Merge branch 'master' into revert-13487-revert-13463-BUDI-8157

This commit is contained in:
Mike Sealey 2024-04-19 16:11:54 +01:00 committed by GitHub
commit 2d3fcae2e9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
125 changed files with 1850 additions and 1251 deletions

View File

@ -42,7 +42,17 @@
}, },
"rules": { "rules": {
"no-unused-vars": "off", "no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error", "local-rules/no-budibase-imports": "error",
"local-rules/no-console-error": "error",
"@typescript-eslint/no-unused-vars": [
"error",
{
"varsIgnorePattern": "^_",
"argsIgnorePattern": "^_",
"destructuredArrayIgnorePattern": "^_",
"ignoreRestSiblings": true
}
],
"local-rules/no-budibase-imports": "error" "local-rules/no-budibase-imports": "error"
} }
}, },
@ -59,7 +69,15 @@
}, },
"rules": { "rules": {
"no-unused-vars": "off", "no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error", "@typescript-eslint/no-unused-vars": [
"error",
{
"varsIgnorePattern": "^_",
"argsIgnorePattern": "^_",
"destructuredArrayIgnorePattern": "^_",
"ignoreRestSiblings": true
}
],
"local-rules/no-test-com": "error", "local-rules/no-test-com": "error",
"local-rules/email-domain-example-com": "error", "local-rules/email-domain-example-com": "error",
"no-console": "warn", "no-console": "warn",
@ -89,7 +107,8 @@
{ {
"varsIgnorePattern": "^_", "varsIgnorePattern": "^_",
"argsIgnorePattern": "^_", "argsIgnorePattern": "^_",
"destructuredArrayIgnorePattern": "^_" "destructuredArrayIgnorePattern": "^_",
"ignoreRestSiblings": true
} }
], ],
"import/no-relative-packages": "error", "import/no-relative-packages": "error",

View File

@ -24,5 +24,8 @@
}, },
"[svelte]": { "[svelte]": {
"editor.defaultFormatter": "svelte.svelte-vscode" "editor.defaultFormatter": "svelte.svelte-vscode"
},
"[handlebars]": {
"editor.formatOnSave": false
} }
} }

View File

@ -1,4 +1,25 @@
module.exports = { module.exports = {
"no-console-error": {
create: function(context) {
return {
CallExpression(node) {
if (
node.callee.type === "MemberExpression" &&
node.callee.object.name === "console" &&
node.callee.property.name === "error" &&
node.arguments.length === 1 &&
node.arguments[0].name &&
node.arguments[0].name.startsWith("err")
) {
context.report({
node,
message: 'Using console.error(err) on its own is not allowed. Either provide context to the error (console.error(msg, err)) or throw it.',
})
}
},
};
},
},
"no-budibase-imports": { "no-budibase-imports": {
create: function (context) { create: function (context) {
return { return {

View File

@ -17,6 +17,7 @@ APP_PORT=4002
WORKER_PORT=4003 WORKER_PORT=4003
MINIO_PORT=4004 MINIO_PORT=4004
COUCH_DB_PORT=4005 COUCH_DB_PORT=4005
COUCH_DB_SQS_PORT=4006
REDIS_PORT=6379 REDIS_PORT=6379
WATCHTOWER_PORT=6161 WATCHTOWER_PORT=6161
BUDIBASE_ENVIRONMENT=PRODUCTION BUDIBASE_ENVIRONMENT=PRODUCTION
@ -28,4 +29,4 @@ BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/ # A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR= PLUGINS_DIR=
ROLLING_LOG_MAX_SIZE= ROLLING_LOG_MAX_SIZE=

View File

@ -1,5 +1,5 @@
{ {
"version": "2.23.5", "version": "2.23.10",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -9,10 +9,7 @@
}, },
"targetDefaults": { "targetDefaults": {
"build": { "build": {
"inputs": [ "inputs": ["{workspaceRoot}/scripts/*", "{workspaceRoot}/lerna.json"]
"{workspaceRoot}/scripts/build.js",
"{workspaceRoot}/lerna.json"
]
} }
} }
} }

View File

@ -56,9 +56,10 @@
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server", "dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server", "dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server", "dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
"dev:camunda": "./scripts/deploy-camunda.sh",
"dev:all": "yarn run kill-all && lerna run --stream dev", "dev:all": "yarn run kill-all && lerna run --stream dev",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0", "dev:docker": "./scripts/devDocker.sh",
"test": "REUSE_CONTAINERS=1 lerna run --concurrency 1 --stream test --stream", "test": "REUSE_CONTAINERS=1 lerna run --concurrency 1 --stream test --stream",
"lint:eslint": "eslint packages --max-warnings=0", "lint:eslint": "eslint packages --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",

@ -1 +1 @@
Subproject commit bd0e01d639ec3b2547e7c859a1c43b622dce8344 Subproject commit c167c331ff9b8161fc18e2ecbaaf1ea5815ba964

View File

@ -64,7 +64,6 @@ async function refreshOIDCAccessToken(
} }
strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp) strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp)
} catch (err) { } catch (err) {
console.error(err)
throw new Error("Could not refresh OAuth Token") throw new Error("Could not refresh OAuth Token")
} }
@ -99,7 +98,6 @@ async function refreshGoogleAccessToken(
ssoSaveUserNoOp ssoSaveUserNoOp
) )
} catch (err: any) { } catch (err: any) {
console.error(err)
throw new Error( throw new Error(
`Error constructing OIDC refresh strategy: message=${err.message}` `Error constructing OIDC refresh strategy: message=${err.message}`
) )

View File

@ -8,19 +8,9 @@ import {
SearchParams, SearchParams,
WithRequired, WithRequired,
} from "@budibase/types" } from "@budibase/types"
import { dataFilters } from "@budibase/shared-core"
const QUERY_START_REGEX = /\d[0-9]*:/g export const removeKeyNumbering = dataFilters.removeKeyNumbering
export function removeKeyNumbering(key: any): string {
if (typeof key === "string" && key.match(QUERY_START_REGEX) != null) {
const parts = key.split(":")
// remove the number
parts.shift()
return parts.join(":")
} else {
return key
}
}
/** /**
* Class to build lucene query URLs. * Class to build lucene query URLs.

View File

@ -107,7 +107,7 @@ const environment = {
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY, ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
API_ENCRYPTION_KEY: getAPIEncryptionKey(), API_ENCRYPTION_KEY: getAPIEncryptionKey(),
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005", COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4984", COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4006",
COUCH_DB_USERNAME: process.env.COUCH_DB_USER, COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD, COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID, GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,

View File

@ -138,7 +138,6 @@ export default function (
} catch (err: any) { } catch (err: any) {
authenticated = false authenticated = false
console.error(`Auth Error: ${err.message}`) console.error(`Auth Error: ${err.message}`)
console.error(err)
// remove the cookie as the user does not exist anymore // remove the cookie as the user does not exist anymore
clearCookie(ctx, Cookie.Auth) clearCookie(ctx, Cookie.Auth)
} }
@ -187,7 +186,6 @@ export default function (
} }
} catch (err: any) { } catch (err: any) {
console.error(`Auth Error: ${err.message}`) console.error(`Auth Error: ${err.message}`)
console.error(err)
// invalid token, clear the cookie // invalid token, clear the cookie
if (err?.name === "JsonWebTokenError") { if (err?.name === "JsonWebTokenError") {
clearCookie(ctx, Cookie.Auth) clearCookie(ctx, Cookie.Auth)

View File

@ -12,7 +12,7 @@ export async function errorHandling(ctx: any, next: any) {
if (status >= 400 && status < 500) { if (status >= 400 && status < 500) {
console.warn(err) console.warn(err)
} else { } else {
console.error(err) console.error("Got 400 response code", err)
} }
let error: APIError = { let error: APIError = {

View File

@ -68,7 +68,6 @@ export async function strategyFactory(
verify verify
) )
} catch (err: any) { } catch (err: any) {
console.error(err)
throw new Error(`Error constructing google authentication strategy: ${err}`) throw new Error(`Error constructing google authentication strategy: ${err}`)
} }
} }

View File

@ -103,7 +103,6 @@ export async function strategyFactory(
strategy.name = "oidc" strategy.name = "oidc"
return strategy return strategy
} catch (err: any) { } catch (err: any) {
console.error(err)
throw new Error(`Error constructing OIDC authentication strategy - ${err}`) throw new Error(`Error constructing OIDC authentication strategy - ${err}`)
} }
} }
@ -142,7 +141,6 @@ export async function fetchStrategyConfig(
callbackURL: callbackUrl, callbackURL: callbackUrl,
} }
} catch (err) { } catch (err) {
console.error(err)
throw new Error( throw new Error(
`Error constructing OIDC authentication configuration - ${err}` `Error constructing OIDC authentication configuration - ${err}`
) )

View File

@ -26,7 +26,6 @@ export const getMigrationsDoc = async (db: any) => {
if (err.status && err.status === 404) { if (err.status && err.status === 404) {
return { _id: DocumentType.MIGRATIONS } return { _id: DocumentType.MIGRATIONS }
} else { } else {
console.error(err)
throw err throw err
} }
} }

View File

@ -115,7 +115,6 @@ class InMemoryQueue implements Partial<Queue> {
* a JSON message as this is required by Bull. * a JSON message as this is required by Bull.
* @param repeat serves no purpose for the import queue. * @param repeat serves no purpose for the import queue.
*/ */
// eslint-disable-next-line no-unused-vars
async add(data: any, opts?: JobOptions) { async add(data: any, opts?: JobOptions) {
const jobId = opts?.jobId?.toString() const jobId = opts?.jobId?.toString()
if (jobId && this._queuedJobIds.has(jobId)) { if (jobId && this._queuedJobIds.has(jobId)) {
@ -166,8 +165,7 @@ class InMemoryQueue implements Partial<Queue> {
return [] return []
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars async removeJobs(_pattern: string) {
async removeJobs(pattern: string) {
// no-op // no-op
} }

View File

@ -50,6 +50,8 @@ type CreateAdminUserOpts = {
hashPassword?: boolean hashPassword?: boolean
requirePassword?: boolean requirePassword?: boolean
skipPasswordValidation?: boolean skipPasswordValidation?: boolean
firstName?: string
lastName?: string
} }
type FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn } type FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn }
@ -517,6 +519,8 @@ export class UserDB {
global: true, global: true,
}, },
tenantId, tenantId,
firstName: opts?.firstName,
lastName: opts?.lastName,
} }
if (opts?.ssoId) { if (opts?.ssoId) {
user.ssoId = opts.ssoId user.ssoId = opts.ssoId

View File

@ -17,8 +17,8 @@ import {
ContextUser, ContextUser,
CouchFindOptions, CouchFindOptions,
DatabaseQueryOpts, DatabaseQueryOpts,
SearchQuery, SearchFilters,
SearchQueryOperators, SearchFilterOperator,
SearchUsersRequest, SearchUsersRequest,
User, User,
} from "@budibase/types" } from "@budibase/types"
@ -44,11 +44,11 @@ function removeUserPassword(users: User | User[]) {
return users return users
} }
export function isSupportedUserSearch(query: SearchQuery) { export function isSupportedUserSearch(query: SearchFilters) {
const allowed = [ const allowed = [
{ op: SearchQueryOperators.STRING, key: "email" }, { op: SearchFilterOperator.STRING, key: "email" },
{ op: SearchQueryOperators.EQUAL, key: "_id" }, { op: SearchFilterOperator.EQUAL, key: "_id" },
{ op: SearchQueryOperators.ONE_OF, key: "_id" }, { op: SearchFilterOperator.ONE_OF, key: "_id" },
] ]
for (let [key, operation] of Object.entries(query)) { for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") { if (typeof operation !== "object") {

View File

@ -14,6 +14,7 @@
notifications, notifications,
Checkbox, Checkbox,
DatePicker, DatePicker,
DrawerContent,
} from "@budibase/bbui" } from "@budibase/bbui"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte" import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
import { automationStore, selectedAutomation, tables } from "stores/builder" import { automationStore, selectedAutomation, tables } from "stores/builder"
@ -37,7 +38,7 @@
hbAutocomplete, hbAutocomplete,
EditorModes, EditorModes,
} from "components/common/CodeEditor" } from "components/common/CodeEditor"
import FilterDrawer from "components/design/settings/controls/FilterEditor/FilterDrawer.svelte" import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
import { LuceneUtils, Utils } from "@budibase/frontend-core" import { LuceneUtils, Utils } from "@budibase/frontend-core"
import { import {
getSchemaForDatasourcePlus, getSchemaForDatasourcePlus,
@ -442,15 +443,16 @@
<Button cta slot="buttons" on:click={() => saveFilters(key)}> <Button cta slot="buttons" on:click={() => saveFilters(key)}>
Save Save
</Button> </Button>
<FilterDrawer <DrawerContent slot="body">
slot="body" <FilterBuilder
{filters} {filters}
{bindings} {bindings}
{schemaFields} {schemaFields}
datasource={{ type: "table", tableId }} datasource={{ type: "table", tableId }}
panel={AutomationBindingPanel} panel={AutomationBindingPanel}
on:change={e => (tempFilters = e.detail)} on:change={e => (tempFilters = e.detail)}
/> />
</DrawerContent>
</Drawer> </Drawer>
{:else if value.customType === "password"} {:else if value.customType === "password"}
<Input <Input

View File

@ -1,7 +1,7 @@
<script> <script>
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { ActionButton, Modal, ModalContent } from "@budibase/bbui" import { ActionButton, Modal, ModalContent } from "@budibase/bbui"
import FilterDrawer from "components/design/settings/controls/FilterEditor/FilterDrawer.svelte" import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
export let schema export let schema
export let filters export let filters
@ -40,7 +40,7 @@
onConfirm={() => dispatch("change", tempValue)} onConfirm={() => dispatch("change", tempValue)}
> >
<div class="wrapper"> <div class="wrapper">
<FilterDrawer <FilterBuilder
allowBindings={false} allowBindings={false}
{filters} {filters}
{schemaFields} {schemaFields}

View File

@ -13,6 +13,7 @@
Layout, Layout,
AbsTooltip, AbsTooltip,
} from "@budibase/bbui" } from "@budibase/bbui"
import { SWITCHABLE_TYPES, ValidColumnNameRegex } from "@budibase/shared-core"
import { createEventDispatcher, getContext, onMount } from "svelte" import { createEventDispatcher, getContext, onMount } from "svelte"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { tables, datasources } from "stores/builder" import { tables, datasources } from "stores/builder"
@ -20,11 +21,6 @@
import { import {
FIELDS, FIELDS,
RelationshipType, RelationshipType,
ALLOWABLE_STRING_OPTIONS,
ALLOWABLE_NUMBER_OPTIONS,
ALLOWABLE_STRING_TYPES,
ALLOWABLE_NUMBER_TYPES,
SWITCHABLE_TYPES,
PrettyRelationshipDefinitions, PrettyRelationshipDefinitions,
DB_TYPE_EXTERNAL, DB_TYPE_EXTERNAL,
} from "constants/backend" } from "constants/backend"
@ -33,21 +29,20 @@
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte" import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
import { getBindings } from "components/backend/DataTable/formula" import { getBindings } from "components/backend/DataTable/formula"
import JSONSchemaModal from "./JSONSchemaModal.svelte" import JSONSchemaModal from "./JSONSchemaModal.svelte"
import { ValidColumnNameRegex } from "@budibase/shared-core"
import { FieldType, FieldSubtype, SourceName } from "@budibase/types" import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
import RelationshipSelector from "components/common/RelationshipSelector.svelte" import RelationshipSelector from "components/common/RelationshipSelector.svelte"
import { RowUtils } from "@budibase/frontend-core" import { RowUtils } from "@budibase/frontend-core"
import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte" import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte"
const AUTO_TYPE = FIELDS.AUTO.type const AUTO_TYPE = FieldType.AUTO
const FORMULA_TYPE = FIELDS.FORMULA.type const FORMULA_TYPE = FieldType.FORMULA
const LINK_TYPE = FIELDS.LINK.type const LINK_TYPE = FieldType.LINK
const STRING_TYPE = FIELDS.STRING.type const STRING_TYPE = FieldType.STRING
const NUMBER_TYPE = FIELDS.NUMBER.type const NUMBER_TYPE = FieldType.NUMBER
const JSON_TYPE = FIELDS.JSON.type const JSON_TYPE = FieldType.JSON
const DATE_TYPE = FIELDS.DATETIME.type const DATE_TYPE = FieldType.DATETIME
const USER_TYPE = FIELDS.USER.subtype const USER_TYPE = FieldSubtype.USER
const USERS_TYPE = FIELDS.USERS.subtype const USERS_TYPE = FieldSubtype.USERS
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"] const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
@ -61,8 +56,8 @@
let primaryDisplay let primaryDisplay
let indexes = [...($tables.selected.indexes || [])] let indexes = [...($tables.selected.indexes || [])]
let isCreating = undefined let isCreating = undefined
let relationshipPart1 = PrettyRelationshipDefinitions.Many let relationshipPart1 = PrettyRelationshipDefinitions.MANY
let relationshipPart2 = PrettyRelationshipDefinitions.One let relationshipPart2 = PrettyRelationshipDefinitions.ONE
let relationshipTableIdPrimary = null let relationshipTableIdPrimary = null
let relationshipTableIdSecondary = null let relationshipTableIdSecondary = null
let table = $tables.selected let table = $tables.selected
@ -175,7 +170,7 @@
$: typeEnabled = $: typeEnabled =
!originalName || !originalName ||
(originalName && (originalName &&
SWITCHABLE_TYPES.indexOf(editableColumn.type) !== -1 && SWITCHABLE_TYPES[field.type] &&
!editableColumn?.autocolumn) !editableColumn?.autocolumn)
const fieldDefinitions = Object.values(FIELDS).reduce( const fieldDefinitions = Object.values(FIELDS).reduce(
@ -367,16 +362,15 @@
} }
function getAllowedTypes() { function getAllowedTypes() {
if ( if (originalName) {
originalName && const possibleTypes = (
ALLOWABLE_STRING_TYPES.indexOf(editableColumn.type) !== -1 SWITCHABLE_TYPES[field.type] || [editableColumn.type]
) { ).map(t => t.toLowerCase())
return ALLOWABLE_STRING_OPTIONS return Object.entries(FIELDS)
} else if ( .filter(([fieldType]) =>
originalName && possibleTypes.includes(fieldType.toLowerCase())
ALLOWABLE_NUMBER_TYPES.indexOf(editableColumn.type) !== -1 )
) { .map(([_, fieldDefinition]) => fieldDefinition)
return ALLOWABLE_NUMBER_OPTIONS
} }
const isUsers = const isUsers =
@ -632,7 +626,7 @@
/> />
</div> </div>
</div> </div>
{:else if editableColumn.type === FieldType.LINK} {:else if editableColumn.type === FieldType.LINK && !editableColumn.autocolumn}
<RelationshipSelector <RelationshipSelector
bind:relationshipPart1 bind:relationshipPart1
bind:relationshipPart2 bind:relationshipPart2

View File

@ -27,14 +27,6 @@
return [] return []
} }
} }
async function deleteAttachments(fileList) {
try {
return await API.deleteBuilderAttachments(fileList)
} catch (error) {
return []
}
}
</script> </script>
<Dropzone <Dropzone
@ -42,6 +34,5 @@
{label} {label}
{...$$restProps} {...$$restProps}
{processFiles} {processFiles}
{deleteAttachments}
{handleFileTooLarge} {handleFileTooLarge}
/> />

View File

@ -9,7 +9,6 @@
"", "",
requiredValidator requiredValidator
) )
// eslint-disable-next-line no-unused-vars
const [repeatPassword, _, repeatTouched] = createValidationStore( const [repeatPassword, _, repeatTouched] = createValidationStore(
"", "",
requiredValidator requiredValidator

View File

@ -0,0 +1,84 @@
<script>
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
import { dataFilters } from "@budibase/shared-core"
import { FilterBuilder } from "@budibase/frontend-core"
import { createEventDispatcher, onMount } from "svelte"
export let schemaFields
export let filters = []
export let bindings = []
export let panel = ClientBindingPanel
export let allowBindings = true
export let datasource
const dispatch = createEventDispatcher()
let rawFilters
$: parseFilters(rawFilters)
$: dispatch("change", enrichFilters(rawFilters))
// Remove field key prefixes and determine which behaviours to use
const parseFilters = filters => {
rawFilters = (filters || []).map(filter => {
const { field } = filter
let newFilter = { ...filter }
delete newFilter.allOr
newFilter.field = dataFilters.removeKeyNumbering(field)
return newFilter
})
}
onMount(() => {
parseFilters(filters)
rawFilters.forEach(filter => {
filter.type =
schemaFields.find(field => field.name === filter.field)?.type ||
filter.type
})
})
// Add field key prefixes and a special metadata filter object to indicate
// how to handle filter behaviour
const enrichFilters = rawFilters => {
let count = 1
return rawFilters
.filter(filter => filter.field)
.map(filter => ({
...filter,
field: `${count++}:${filter.field}`,
}))
.concat(...rawFilters.filter(filter => !filter.field))
}
</script>
<FilterBuilder
bind:filters={rawFilters}
behaviourFilters={true}
{schemaFields}
{datasource}
{allowBindings}
>
<div slot="filtering-hero-content" />
<DrawerBindableInput
let:filter
slot="binding"
disabled={filter.noValue}
title={filter.field}
value={filter.value}
placeholder="Value"
{panel}
{bindings}
on:change={event => {
const indexToUpdate = rawFilters.findIndex(f => f.id === filter.id)
rawFilters[indexToUpdate] = {
...rawFilters[indexToUpdate],
value: event.detail,
}
}}
/>
</FilterBuilder>

View File

@ -1,8 +1,14 @@
<script> <script>
import { notifications, ActionButton, Button, Drawer } from "@budibase/bbui" import {
notifications,
ActionButton,
Button,
Drawer,
DrawerContent,
} from "@budibase/bbui"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { getDatasourceForProvider, getSchemaForDatasource } from "dataBinding" import { getDatasourceForProvider, getSchemaForDatasource } from "dataBinding"
import FilterDrawer from "./FilterDrawer.svelte" import FilterBuilder from "./FilterBuilder.svelte"
import { selectedScreen } from "stores/builder" import { selectedScreen } from "stores/builder"
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
@ -40,14 +46,15 @@
</div> </div>
<Drawer bind:this={drawer} title="Filtering" on:drawerHide on:drawerShow> <Drawer bind:this={drawer} title="Filtering" on:drawerHide on:drawerShow>
<Button cta slot="buttons" on:click={saveFilter}>Save</Button> <Button cta slot="buttons" on:click={saveFilter}>Save</Button>
<FilterDrawer <DrawerContent slot="body">
slot="body" <FilterBuilder
filters={value} filters={value}
{bindings} {bindings}
{schemaFields} {schemaFields}
{datasource} {datasource}
on:change={e => (tempValue = e.detail)} on:change={e => (tempValue = e.detail)}
/> />
</DrawerContent>
</Drawer> </Drawer>
<style> <style>

View File

@ -202,26 +202,6 @@ export const PrettyRelationshipDefinitions = {
ONE: "One row", ONE: "One row",
} }
export const ALLOWABLE_STRING_OPTIONS = [
FIELDS.STRING,
FIELDS.OPTIONS,
FIELDS.LONGFORM,
FIELDS.BARCODEQR,
]
export const ALLOWABLE_STRING_TYPES = ALLOWABLE_STRING_OPTIONS.map(
opt => opt.type
)
export const ALLOWABLE_NUMBER_OPTIONS = [FIELDS.NUMBER, FIELDS.BOOLEAN]
export const ALLOWABLE_NUMBER_TYPES = ALLOWABLE_NUMBER_OPTIONS.map(
opt => opt.type
)
export const SWITCHABLE_TYPES = [
...ALLOWABLE_STRING_TYPES,
...ALLOWABLE_NUMBER_TYPES,
]
export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
export const BUDIBASE_DATASOURCE_TYPE = "budibase" export const BUDIBASE_DATASOURCE_TYPE = "budibase"

View File

@ -4,8 +4,6 @@ import {
createDatasourceCreationStore, createDatasourceCreationStore,
} from "./datasourceCreation" } from "./datasourceCreation"
import { get } from "svelte/store" import { get } from "svelte/store"
// eslint-disable-next-line no-unused-vars
import { shouldIntegrationFetchTableNames } from "stores/selectors"
vi.mock("stores/selectors", () => ({ vi.mock("stores/selectors", () => ({
shouldIntegrationFetchTableNames: vi.fn(), shouldIntegrationFetchTableNames: vi.fn(),

View File

@ -1,9 +1,9 @@
import { it, expect, describe, beforeEach, vi } from "vitest" import { it, expect, describe, beforeEach, vi } from "vitest"
import { createOnGoogleAuthStore } from "./onGoogleAuth" import { createOnGoogleAuthStore } from "./onGoogleAuth"
import { writable, get } from "svelte/store" import { writable, get } from "svelte/store"
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line
import { params } from "@roxi/routify" import { params } from "@roxi/routify"
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line
import { integrations } from "stores/builder" import { integrations } from "stores/builder"
import { IntegrationTypes } from "constants/backend" import { IntegrationTypes } from "constants/backend"

View File

@ -189,6 +189,7 @@
<Select <Select
options={settingOptions} options={settingOptions}
bind:value={condition.setting} bind:value={condition.setting}
on:change={() => delete condition.settingValue}
/> />
<div>TO</div> <div>TO</div>
{#if definition} {#if definition}

View File

@ -1,8 +1,8 @@
import { FieldType } from "@budibase/types" import { FieldType } from "@budibase/types"
import { SWITCHABLE_TYPES } from "@budibase/shared-core"
import { get, writable, derived } from "svelte/store" import { get, writable, derived } from "svelte/store"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { API } from "api" import { API } from "api"
import { SWITCHABLE_TYPES } from "constants/backend"
export function createTablesStore() { export function createTablesStore() {
const store = writable({ const store = writable({
@ -64,7 +64,7 @@ export function createTablesStore() {
if ( if (
oldField != null && oldField != null &&
oldField?.type !== field.type && oldField?.type !== field.type &&
SWITCHABLE_TYPES.indexOf(oldField?.type) === -1 !SWITCHABLE_TYPES[oldField?.type]?.includes(field.type)
) { ) {
updatedTable.schema[key] = oldField updatedTable.schema[key] = oldField
} }
@ -148,12 +148,6 @@ export function createTablesStore() {
if (indexes) { if (indexes) {
draft.indexes = indexes draft.indexes = indexes
} }
// Add object to indicate if column is being added
if (draft.schema[field.name] === undefined) {
draft._add = {
name: field.name,
}
}
draft.schema = { draft.schema = {
...draft.schema, ...draft.schema,
[field.name]: cloneDeep(field), [field.name]: cloneDeep(field),

View File

@ -105,7 +105,6 @@ export function getAppService(path: string) {
} }
export function updateDockerComposeService( export function updateDockerComposeService(
// eslint-disable-next-line no-unused-vars
updateFn: (service: DockerCompose) => void updateFn: (service: DockerCompose) => void
) { ) {
const opts = ["docker-compose.yaml", "docker-compose.yml"] const opts = ["docker-compose.yaml", "docker-compose.yml"]

View File

@ -119,140 +119,142 @@
{/if} {/if}
</svelte:head> </svelte:head>
<div {#if dataLoaded}
id="spectrum-root" <div
lang="en" id="spectrum-root"
dir="ltr" lang="en"
class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}" dir="ltr"
class:builder={$builderStore.inBuilder} class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}"
class:show={fontsLoaded && dataLoaded} class:builder={$builderStore.inBuilder}
> class:show={fontsLoaded && dataLoaded}
{#if $environmentStore.maintenance.length > 0} >
<MaintenanceScreen maintenanceList={$environmentStore.maintenance} /> {#if $environmentStore.maintenance.length > 0}
{:else} <MaintenanceScreen maintenanceList={$environmentStore.maintenance} />
<DeviceBindingsProvider> {:else}
<UserBindingsProvider> <DeviceBindingsProvider>
<StateBindingsProvider> <UserBindingsProvider>
<RowSelectionProvider> <StateBindingsProvider>
<QueryParamsProvider> <RowSelectionProvider>
<SnippetsProvider> <QueryParamsProvider>
<!-- Settings bar can be rendered outside of device preview --> <SnippetsProvider>
<!-- Key block needs to be outside the if statement or it breaks --> <!-- Settings bar can be rendered outside of device preview -->
{#key $builderStore.selectedComponentId} <!-- Key block needs to be outside the if statement or it breaks -->
{#if $builderStore.inBuilder} {#key $builderStore.selectedComponentId}
<SettingsBar /> {#if $builderStore.inBuilder}
{/if} <SettingsBar />
{/key}
<!-- Clip boundary for selection indicators -->
<div
id="clip-root"
class:preview={$builderStore.inBuilder}
class:tablet-preview={$builderStore.previewDevice ===
"tablet"}
class:mobile-preview={$builderStore.previewDevice ===
"mobile"}
>
<!-- Actual app -->
<div id="app-root">
{#if showDevTools}
<DevToolsHeader />
{/if} {/if}
{/key}
<div id="app-body"> <!-- Clip boundary for selection indicators -->
{#if permissionError} <div
<div class="error"> id="clip-root"
<Layout justifyItems="center" gap="S"> class:preview={$builderStore.inBuilder}
<!-- eslint-disable-next-line svelte/no-at-html-tags --> class:tablet-preview={$builderStore.previewDevice ===
{@html ErrorSVG} "tablet"}
<Heading size="L"> class:mobile-preview={$builderStore.previewDevice ===
You don't have permission to use this app "mobile"}
</Heading> >
<Body size="S"> <!-- Actual app -->
Ask your administrator to grant you access <div id="app-root">
</Body> {#if showDevTools}
</Layout> <DevToolsHeader />
</div> {/if}
{:else if !$screenStore.activeLayout}
<div class="error">
<Layout justifyItems="center" gap="S">
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html ErrorSVG}
<Heading size="L">
Something went wrong rendering your app
</Heading>
<Body size="S">
Get in touch with support if this issue persists
</Body>
</Layout>
</div>
{:else if embedNoScreens}
<div class="error">
<Layout justifyItems="center" gap="S">
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html ErrorSVG}
<Heading size="L">
This Budibase app is not publicly accessible
</Heading>
</Layout>
</div>
{:else}
<CustomThemeWrapper>
{#key $screenStore.activeLayout._id}
<Component
isLayout
instance={$screenStore.activeLayout.props}
/>
{/key}
<!-- <div id="app-body">
{#if permissionError}
<div class="error">
<Layout justifyItems="center" gap="S">
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html ErrorSVG}
<Heading size="L">
You don't have permission to use this app
</Heading>
<Body size="S">
Ask your administrator to grant you access
</Body>
</Layout>
</div>
{:else if !$screenStore.activeLayout}
<div class="error">
<Layout justifyItems="center" gap="S">
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html ErrorSVG}
<Heading size="L">
Something went wrong rendering your app
</Heading>
<Body size="S">
Get in touch with support if this issue persists
</Body>
</Layout>
</div>
{:else if embedNoScreens}
<div class="error">
<Layout justifyItems="center" gap="S">
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html ErrorSVG}
<Heading size="L">
This Budibase app is not publicly accessible
</Heading>
</Layout>
</div>
{:else}
<CustomThemeWrapper>
{#key $screenStore.activeLayout._id}
<Component
isLayout
instance={$screenStore.activeLayout.props}
/>
{/key}
<!--
Flatpickr needs to be inside the theme wrapper. Flatpickr needs to be inside the theme wrapper.
It also needs its own container because otherwise it hijacks It also needs its own container because otherwise it hijacks
key events on the whole page. It is painful to work with. key events on the whole page. It is painful to work with.
--> -->
<div id="flatpickr-root" /> <div id="flatpickr-root" />
<!-- Modal container to ensure they sit on top --> <!-- Modal container to ensure they sit on top -->
<div class="modal-container" /> <div class="modal-container" />
<!-- Layers on top of app --> <!-- Layers on top of app -->
<NotificationDisplay /> <NotificationDisplay />
<ConfirmationDisplay /> <ConfirmationDisplay />
<PeekScreenDisplay /> <PeekScreenDisplay />
</CustomThemeWrapper> </CustomThemeWrapper>
{/if} {/if}
{#if showDevTools} {#if showDevTools}
<DevTools /> <DevTools />
{/if}
</div>
{#if !$builderStore.inBuilder && $featuresStore.logoEnabled}
<FreeFooter />
{/if} {/if}
</div> </div>
{#if !$builderStore.inBuilder && $featuresStore.logoEnabled} <!-- Preview and dev tools utilities -->
<FreeFooter /> {#if $appStore.isDevApp}
<SelectionIndicator />
{/if}
{#if $builderStore.inBuilder || $devToolsStore.allowSelection}
<HoverIndicator />
{/if}
{#if $builderStore.inBuilder}
<DNDHandler />
<GridDNDHandler />
{/if} {/if}
</div> </div>
</SnippetsProvider>
<!-- Preview and dev tools utilities --> </QueryParamsProvider>
{#if $appStore.isDevApp} </RowSelectionProvider>
<SelectionIndicator /> </StateBindingsProvider>
{/if} </UserBindingsProvider>
{#if $builderStore.inBuilder || $devToolsStore.allowSelection} </DeviceBindingsProvider>
<HoverIndicator /> {/if}
{/if} </div>
{#if $builderStore.inBuilder} <KeyboardManager />
<DNDHandler /> {/if}
<GridDNDHandler />
{/if}
</div>
</SnippetsProvider>
</QueryParamsProvider>
</RowSelectionProvider>
</StateBindingsProvider>
</UserBindingsProvider>
</DeviceBindingsProvider>
{/if}
</div>
<KeyboardManager />
<style> <style>
#spectrum-root { #spectrum-root {

View File

@ -157,6 +157,11 @@
width: 100%; width: 100%;
} }
/* Use normal theme colors for links when using a top nav */
.dropdown:not(.left) .sublinks a {
color: var(--spectrum-alias-text-color);
}
/* Left dropdowns */ /* Left dropdowns */
.dropdown.left .sublinks-wrapper { .dropdown.left .sublinks-wrapper {
display: none; display: none;

View File

@ -1,216 +1,14 @@
<script> <script>
import { import { FilterBuilder } from "@budibase/frontend-core"
Body,
Button,
Combobox,
DatePicker,
Icon,
Input,
Layout,
Select,
} from "@budibase/bbui"
import { generate } from "shortid"
import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { getContext } from "svelte"
export let schemaFields export let schemaFields
export let filters = [] export let filters = []
export let datasource export let datasource
const context = getContext("context")
const BannedTypes = ["link", "attachment", "json"]
$: fieldOptions = (schemaFields ?? [])
.filter(
field =>
!BannedTypes.includes(field.type) ||
(field.type === "formula" && field.formulaType === "static")
)
.map(field => ({
label: field.displayName || field.name,
value: field.name,
}))
const addFilter = () => {
filters = [
...filters,
{
id: generate(),
field: null,
operator: Constants.OperatorOptions.Equals.value,
value: null,
valueType: "Value",
},
]
}
const removeFilter = id => {
filters = filters.filter(field => field.id !== id)
}
const duplicateFilter = id => {
const existingFilter = filters.find(filter => filter.id === id)
const duplicate = { ...existingFilter, id: generate() }
filters = [...filters, duplicate]
}
const onFieldChange = (expression, field) => {
// Update the field type
expression.type = schemaFields.find(x => x.name === field)?.type
expression.externalType = schemaFields.find(
x => x.name === field
)?.externalType
// Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType(
{ type: expression.type },
expression.field,
datasource
).map(x => x.value)
if (!validOperators.includes(expression.operator)) {
expression.operator =
validOperators[0] ?? Constants.OperatorOptions.Equals.value
onOperatorChange(expression, expression.operator)
}
// if changed to an array, change default value to empty array
const idx = filters.findIndex(x => x.field === field)
if (expression.type === "array") {
filters[idx].value = []
} else {
filters[idx].value = null
}
}
const onOperatorChange = (expression, operator) => {
const noValueOptions = [
Constants.OperatorOptions.Empty.value,
Constants.OperatorOptions.NotEmpty.value,
]
expression.noValue = noValueOptions.includes(operator)
if (expression.noValue) {
expression.value = null
}
}
const getFieldOptions = field => {
const schema = schemaFields.find(x => x.name === field)
return schema?.constraints?.inclusion || []
}
const getSchema = filter => {
return schemaFields.find(field => field.name === filter.field)
}
</script> </script>
<div class="container" class:mobile={$context.device.mobile}> <FilterBuilder bind:filters {schemaFields} {datasource} filtersLabel={null}>
<Layout noPadding> <div slot="filtering-hero-content">
<Body size="S"> Results are filtered to only those which match all of the following
{#if !filters?.length} constraints.
Add your first filter expression. </div>
{:else} </FilterBuilder>
Results are filtered to only those which match all of the following
constraints.
{/if}
</Body>
{#if filters?.length}
<div class="fields">
{#each filters as filter}
<Select
bind:value={filter.field}
options={fieldOptions}
on:change={e => onFieldChange(filter, e.detail)}
placeholder="Column"
/>
<Select
disabled={!filter.field}
options={LuceneUtils.getValidOperatorsForType(
{ type: filter.type, subtype: filter.subtype },
filter.field,
datasource
)}
bind:value={filter.operator}
on:change={e => onOperatorChange(filter, e.detail)}
placeholder={null}
/>
{#if ["string", "longform", "number", "bigint", "formula"].includes(filter.type)}
<Input disabled={filter.noValue} bind:value={filter.value} />
{:else if ["options", "array"].includes(filter.type)}
<Combobox
disabled={filter.noValue}
options={getFieldOptions(filter.field)}
bind:value={filter.value}
/>
{:else if filter.type === "boolean"}
<Combobox
disabled={filter.noValue}
options={[
{ label: "True", value: "true" },
{ label: "False", value: "false" },
]}
bind:value={filter.value}
/>
{:else if filter.type === "datetime"}
<DatePicker
disabled={filter.noValue}
enableTime={!getSchema(filter).dateOnly}
timeOnly={getSchema(filter).timeOnly}
bind:value={filter.value}
/>
{:else}
<Input disabled />
{/if}
<div class="controls">
<Icon
name="Duplicate"
hoverable
size="S"
on:click={() => duplicateFilter(filter.id)}
/>
<Icon
name="Close"
hoverable
size="S"
on:click={() => removeFilter(filter.id)}
/>
</div>
{/each}
</div>
{/if}
<div>
<Button icon="AddCircle" size="M" secondary on:click={addFilter}>
Add filter
</Button>
</div>
</Layout>
</div>
<style>
.container {
width: 100%;
max-width: 1000px;
margin: 0 auto;
}
.fields {
display: grid;
column-gap: var(--spacing-l);
row-gap: var(--spacing-s);
align-items: center;
grid-template-columns: 1fr 120px 1fr auto auto;
}
.controls {
display: contents;
}
.container.mobile .fields {
grid-template-columns: 1fr;
}
.container.mobile .controls {
display: flex;
flex-direction: row;
justify-content: flex-start;
align-items: center;
padding: var(--spacing-s) 0;
gap: var(--spacing-s);
}
</style>

View File

@ -58,17 +58,6 @@
} }
} }
const deleteAttachments = async fileList => {
try {
return await API.deleteAttachments({
keys: fileList,
tableId: formContext?.dataSource?.tableId,
})
} catch (error) {
return []
}
}
const handleChange = e => { const handleChange = e => {
const value = fieldApiMapper.set(e.detail) const value = fieldApiMapper.set(e.detail)
const changed = fieldApi.setValue(value) const changed = fieldApi.setValue(value)
@ -98,7 +87,6 @@
error={fieldState.error} error={fieldState.error}
on:change={handleChange} on:change={handleChange}
{processFiles} {processFiles}
{deleteAttachments}
{handleFileTooLarge} {handleFileTooLarge}
{handleTooManyFiles} {handleTooManyFiles}
{maximum} {maximum}

View File

@ -11,6 +11,7 @@
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",
"dayjs": "^1.10.8", "dayjs": "^1.10.8",
"lodash": "4.17.21", "lodash": "4.17.21",
"shortid": "2.2.15",
"socket.io-client": "^4.6.1" "socket.io-client": "^4.6.1"
} }
} }

View File

@ -61,34 +61,6 @@ export const buildAttachmentEndpoints = API => {
}) })
return { publicUrl } return { publicUrl }
}, },
/**
* Deletes attachments from the bucket.
* @param keys the attachments to delete
* @param tableId the associated table ID
*/
deleteAttachments: async ({ keys, tableId }) => {
return await API.post({
url: `/api/attachments/${tableId}/delete`,
body: {
keys,
},
})
},
/**
* Deletes attachments from the builder bucket.
* @param keys the attachments to delete
*/
deleteBuilderAttachments: async keys => {
return await API.post({
url: `/api/attachments/delete`,
body: {
keys,
},
})
},
/** /**
* Download an attachment from a row given its column name. * Download an attachment from a row given its column name.
* @param datasourceId the ID of the datasource to download from * @param datasourceId the ID of the datasource to download from

View File

@ -4,33 +4,36 @@
Button, Button,
Combobox, Combobox,
DatePicker, DatePicker,
DrawerContent,
Icon, Icon,
Input, Input,
Label,
Layout, Layout,
Multiselect,
Select, Select,
Label,
Multiselect,
} from "@budibase/bbui" } from "@budibase/bbui"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte" import { FieldType, SearchFilterOperator } from "@budibase/types"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
import { generate } from "shortid" import { generate } from "shortid"
import { Constants, LuceneUtils } from "@budibase/frontend-core" import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { getFields } from "helpers/searchFields" import { getContext } from "svelte"
import { FieldType } from "@budibase/types"
import { createEventDispatcher, onMount } from "svelte"
import FilterUsers from "./FilterUsers.svelte" import FilterUsers from "./FilterUsers.svelte"
const { OperatorOptions } = Constants
export let schemaFields export let schemaFields
export let filters = [] export let filters = []
export let bindings = []
export let panel = ClientBindingPanel
export let allowBindings = true
export let datasource export let datasource
export let behaviourFilters = false
export let allowBindings = false
export let filtersLabel = "Filters"
$: matchAny = filters?.find(filter => filter.operator === "allOr") != null
$: onEmptyFilter =
filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all"
$: fieldFilters = filters.filter(
filter => filter.operator !== "allOr" && !filter.onEmptyFilter
)
const dispatch = createEventDispatcher()
const { OperatorOptions } = Constants
const KeyedFieldRegex = /\d[0-9]*:/g
const behaviourOptions = [ const behaviourOptions = [
{ value: "and", label: "Match all filters" }, { value: "and", label: "Match all filters" },
{ value: "or", label: "Match any filter" }, { value: "or", label: "Match any filter" },
@ -40,62 +43,18 @@
{ value: "none", label: "Return no rows" }, { value: "none", label: "Return no rows" },
] ]
let rawFilters const context = getContext("context")
let matchAny = false
let onEmptyFilter = "all"
$: parseFilters(filters) $: fieldOptions = (schemaFields ?? [])
$: dispatch("change", enrichFilters(rawFilters, matchAny, onEmptyFilter)) .filter(field => getValidOperatorsForType(field).length)
$: enrichedSchemaFields = getFields(schemaFields || [], { allowLinks: true }) .map(field => ({
$: fieldOptions = enrichedSchemaFields.map(field => field.name) || [] label: field.displayName || field.name,
$: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"] value: field.name,
}))
// Remove field key prefixes and determine which behaviours to use
const parseFilters = filters => {
matchAny = filters?.find(filter => filter.operator === "allOr") != null
onEmptyFilter =
filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all"
rawFilters = (filters || [])
.filter(filter => filter.operator !== "allOr" && !filter.onEmptyFilter)
.map(filter => {
const { field } = filter
let newFilter = { ...filter }
delete newFilter.allOr
if (typeof field === "string" && field.match(KeyedFieldRegex) != null) {
const parts = field.split(":")
parts.shift()
newFilter.field = parts.join(":")
}
return newFilter
})
}
onMount(() => {
parseFilters(filters)
rawFilters.forEach(filter => {
filter.type =
schemaFields.find(field => field.name === filter.field)?.type ||
filter.type
})
})
// Add field key prefixes and a special metadata filter object to indicate
// how to handle filter behaviour
const enrichFilters = (rawFilters, matchAny, onEmptyFilter) => {
let count = 1
return rawFilters
.filter(filter => filter.field)
.map(filter => ({
...filter,
field: `${count++}:${filter.field}`,
}))
.concat(matchAny ? [{ operator: "allOr" }] : [])
.concat([{ onEmptyFilter }])
}
const addFilter = () => { const addFilter = () => {
rawFilters = [ filters = [
...rawFilters, ...(filters || []),
{ {
id: generate(), id: generate(),
field: null, field: null,
@ -107,22 +66,57 @@
} }
const removeFilter = id => { const removeFilter = id => {
rawFilters = rawFilters.filter(field => field.id !== id) filters = filters.filter(field => field.id !== id)
} }
const duplicateFilter = id => { const duplicateFilter = id => {
const existingFilter = rawFilters.find(filter => filter.id === id) const existingFilter = filters.find(filter => filter.id === id)
const duplicate = { ...existingFilter, id: generate() } const duplicate = { ...existingFilter, id: generate() }
rawFilters = [...rawFilters, duplicate] filters = [...filters, duplicate]
}
const onFieldChange = filter => {
const previousType = filter.type
sanitizeTypes(filter)
sanitizeOperator(filter)
sanitizeValue(filter, previousType)
}
const onOperatorChange = filter => {
sanitizeOperator(filter)
sanitizeValue(filter, filter.type)
}
const onValueTypeChange = filter => {
sanitizeValue(filter)
}
const getFieldOptions = field => {
const schema = schemaFields.find(x => x.name === field)
return schema?.constraints?.inclusion || []
} }
const getSchema = filter => { const getSchema = filter => {
return enrichedSchemaFields.find(field => field.name === filter.field) return schemaFields.find(field => field.name === filter.field)
} }
const getValidOperatorsForType = filter => {
if (!filter?.field && !filter?.name) {
return []
}
return LuceneUtils.getValidOperatorsForType(
filter,
filter.field || filter.name,
datasource
)
}
$: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"]
const sanitizeTypes = filter => { const sanitizeTypes = filter => {
// Update type based on field // Update type based on field
const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field) const fieldSchema = schemaFields.find(x => x.name === filter.field)
filter.type = fieldSchema?.type filter.type = fieldSchema?.type
filter.subtype = fieldSchema?.subtype filter.subtype = fieldSchema?.subtype
@ -154,88 +148,79 @@
// Ensure array values are properly set and cleared // Ensure array values are properly set and cleared
if (Array.isArray(filter.value)) { if (Array.isArray(filter.value)) {
if (filter.valueType !== "Value" || filter.type !== "array") { if (filter.valueType !== "Value" || filter.type !== FieldType.ARRAY) {
filter.value = null filter.value = null
} }
} else if (filter.type === "array" && filter.valueType === "Value") { } else if (
filter.type === FieldType.ARRAY &&
filter.valueType === "Value"
) {
filter.value = [] filter.value = []
} else if ( } else if (
previousType !== filter.type && previousType !== filter.type &&
(previousType === FieldType.BB_REFERENCE || (previousType === FieldType.BB_REFERENCE ||
filter.type === FieldType.BB_REFERENCE) filter.type === FieldType.BB_REFERENCE)
) { ) {
filter.value = filter.type === "array" ? [] : null filter.value = filter.type === FieldType.ARRAY ? [] : null
} }
} }
const onFieldChange = filter => { function handleAllOr(option) {
const previousType = filter.type filters = filters.filter(f => f.operator !== "allOr")
sanitizeTypes(filter) if (option === "or") {
sanitizeOperator(filter) filters.push({ operator: "allOr" })
sanitizeValue(filter, previousType)
}
const onOperatorChange = filter => {
sanitizeOperator(filter)
sanitizeValue(filter, filter.type)
}
const onValueTypeChange = filter => {
sanitizeValue(filter)
}
const getFieldOptions = field => {
const schema = enrichedSchemaFields.find(x => x.name === field)
return schema?.constraints?.inclusion || []
}
const getValidOperatorsForType = filter => {
if (!filter?.field) {
return []
} }
}
return LuceneUtils.getValidOperatorsForType( function handleOnEmptyFilter(value) {
{ type: filter.type, subtype: filter.subtype }, filters = filters?.filter(filter => !filter.onEmptyFilter)
filter.field, filters.push({ onEmptyFilter: value })
datasource
)
} }
</script> </script>
<DrawerContent> <div class="container" class:mobile={$context?.device?.mobile}>
<div class="container"> <Layout noPadding>
<Layout noPadding> {#if fieldOptions?.length}
{#if !rawFilters?.length} <Body size="S">
<Body size="S">Add your first filter expression.</Body> {#if !fieldFilters?.length}
{:else} Add your first filter expression.
<div class="fields"> {:else}
<Select <slot name="filtering-hero-content" />
label="Behaviour" {#if behaviourFilters}
value={matchAny ? "or" : "and"} <div class="behaviour-filters">
options={behaviourOptions} <Select
getOptionLabel={opt => opt.label} label="Behaviour"
getOptionValue={opt => opt.value} value={matchAny ? "or" : "and"}
on:change={e => (matchAny = e.detail === "or")} options={behaviourOptions}
placeholder={null} getOptionLabel={opt => opt.label}
/> getOptionValue={opt => opt.value}
{#if datasource?.type === "table"} on:change={e => handleAllOr(e.detail)}
<Select placeholder={null}
label="When filter empty" />
value={onEmptyFilter} {#if datasource?.type === "table"}
options={onEmptyOptions} <Select
getOptionLabel={opt => opt.label} label="When filter empty"
getOptionValue={opt => opt.value} value={onEmptyFilter}
on:change={e => (onEmptyFilter = e.detail)} options={onEmptyOptions}
placeholder={null} getOptionLabel={opt => opt.label}
/> getOptionValue={opt => opt.value}
on:change={e => handleOnEmptyFilter(e.detail)}
placeholder={null}
/>
{/if}
</div>
{/if} {/if}
</div> {/if}
</Body>
{#if fieldFilters?.length}
<div> <div>
<div class="filter-label"> {#if filtersLabel}
<Label>Filters</Label> <div class="filter-label">
</div> <Label>{filtersLabel}</Label>
<div class="fields"> </div>
{#each rawFilters as filter} {/if}
<div class="fields" class:with-bindings={allowBindings}>
{#each fieldFilters as filter}
<Select <Select
bind:value={filter.field} bind:value={filter.field}
options={fieldOptions} options={fieldOptions}
@ -249,38 +234,32 @@
on:change={() => onOperatorChange(filter)} on:change={() => onOperatorChange(filter)}
placeholder={null} placeholder={null}
/> />
<Select {#if allowBindings}
disabled={filter.noValue || !filter.field} <Select
options={valueTypeOptions} disabled={filter.noValue || !filter.field}
bind:value={filter.valueType} options={valueTypeOptions}
on:change={() => onValueTypeChange(filter)} bind:value={filter.valueType}
placeholder={null} on:change={() => onValueTypeChange(filter)}
/> placeholder={null}
{#if filter.field && filter.valueType === "Binding"}
<DrawerBindableInput
disabled={filter.noValue}
title={filter.field}
value={filter.value}
placeholder="Value"
{panel}
{bindings}
on:change={event => (filter.value = event.detail)}
/> />
{:else if ["string", "longform", "number", "bigint", "formula"].includes(filter.type)} {/if}
{#if allowBindings && filter.field && filter.valueType === "Binding"}
<slot name="binding" {filter} />
{:else if [FieldType.STRING, FieldType.LONGFORM, FieldType.NUMBER, FieldType.BIGINT, FieldType.FORMULA].includes(filter.type)}
<Input disabled={filter.noValue} bind:value={filter.value} /> <Input disabled={filter.noValue} bind:value={filter.value} />
{:else if filter.type === "array" || (filter.type === "options" && filter.operator === "oneOf")} {:else if filter.type === FieldType.ARRAY || (filter.type === FieldType.OPTIONS && filter.operator === SearchFilterOperator.ONE_OF)}
<Multiselect <Multiselect
disabled={filter.noValue} disabled={filter.noValue}
options={getFieldOptions(filter.field)} options={getFieldOptions(filter.field)}
bind:value={filter.value} bind:value={filter.value}
/> />
{:else if filter.type === "options"} {:else if filter.type === FieldType.OPTIONS}
<Combobox <Combobox
disabled={filter.noValue} disabled={filter.noValue}
options={getFieldOptions(filter.field)} options={getFieldOptions(filter.field)}
bind:value={filter.value} bind:value={filter.value}
/> />
{:else if filter.type === "boolean"} {:else if filter.type === FieldType.BOOLEAN}
<Combobox <Combobox
disabled={filter.noValue} disabled={filter.noValue}
options={[ options={[
@ -289,7 +268,7 @@
]} ]}
bind:value={filter.value} bind:value={filter.value}
/> />
{:else if filter.type === "datetime"} {:else if filter.type === FieldType.DATETIME}
<DatePicker <DatePicker
disabled={filter.noValue} disabled={filter.noValue}
enableTime={!getSchema(filter)?.dateOnly} enableTime={!getSchema(filter)?.dateOnly}
@ -306,32 +285,36 @@
disabled={filter.noValue} disabled={filter.noValue}
/> />
{:else} {:else}
<DrawerBindableInput disabled /> <Input disabled />
{/if} {/if}
<Icon <div class="controls">
name="Duplicate" <Icon
hoverable name="Duplicate"
size="S" hoverable
on:click={() => duplicateFilter(filter.id)} size="S"
/> on:click={() => duplicateFilter(filter.id)}
<Icon />
name="Close" <Icon
hoverable name="Close"
size="S" hoverable
on:click={() => removeFilter(filter.id)} size="S"
/> on:click={() => removeFilter(filter.id)}
/>
</div>
{/each} {/each}
</div> </div>
</div> </div>
{/if} {/if}
<div class="bottom"> <div>
<Button icon="AddCircle" size="M" secondary on:click={addFilter}> <Button icon="AddCircle" size="M" secondary on:click={addFilter}>
Add filter Add filter
</Button> </Button>
</div> </div>
</Layout> {:else}
</div> <Body size="S">None of the table column can be used for filtering.</Body>
</DrawerContent> {/if}
</Layout>
</div>
<style> <style>
.container { .container {
@ -339,22 +322,42 @@
max-width: 1000px; max-width: 1000px;
margin: 0 auto; margin: 0 auto;
} }
.fields { .fields {
display: grid; display: grid;
column-gap: var(--spacing-l); column-gap: var(--spacing-l);
row-gap: var(--spacing-s); row-gap: var(--spacing-s);
align-items: center; align-items: center;
grid-template-columns: 1fr 120px 1fr auto auto;
}
.fields.with-bindings {
grid-template-columns: minmax(150px, 1fr) 170px 120px minmax(150px, 1fr) 16px 16px; grid-template-columns: minmax(150px, 1fr) 170px 120px minmax(150px, 1fr) 16px 16px;
} }
.controls {
display: contents;
}
.container.mobile .fields {
grid-template-columns: 1fr;
}
.container.mobile .controls {
display: flex;
flex-direction: row;
justify-content: flex-start;
align-items: center;
padding: var(--spacing-s) 0;
gap: var(--spacing-s);
}
.filter-label { .filter-label {
margin-bottom: var(--spacing-s); margin-bottom: var(--spacing-s);
} }
.bottom { .behaviour-filters {
display: flex; display: grid;
justify-content: space-between; column-gap: var(--spacing-l);
row-gap: var(--spacing-s);
align-items: center; align-items: center;
grid-template-columns: minmax(150px, 1fr) 170px 120px minmax(150px, 1fr) 16px 16px;
} }
</style> </style>

View File

@ -1,9 +1,9 @@
<script> <script>
import { Select, Multiselect } from "@budibase/bbui" import { Select, Multiselect } from "@budibase/bbui"
import { fetchData } from "@budibase/frontend-core" import { fetchData } from "@budibase/frontend-core"
import { createAPIClient } from "../api"
import { API } from "api" export let API = createAPIClient()
export let value = null export let value = null
export let disabled export let disabled
export let multiselect = false export let multiselect = false

View File

@ -61,14 +61,6 @@
} }
} }
const deleteAttachments = async fileList => {
try {
return await API.deleteBuilderAttachments(fileList)
} catch (error) {
return []
}
}
onMount(() => { onMount(() => {
api = { api = {
focus: () => open(), focus: () => open(),
@ -101,7 +93,6 @@
on:change={e => onChange(e.detail)} on:change={e => onChange(e.detail)}
maximum={maximum || schema.constraints?.length?.maximum} maximum={maximum || schema.constraints?.length?.maximum}
{processFiles} {processFiles}
{deleteAttachments}
{handleFileTooLarge} {handleFileTooLarge}
/> />
</div> </div>

View File

@ -6,3 +6,4 @@ export { default as UserAvatars } from "./UserAvatars.svelte"
export { default as Updating } from "./Updating.svelte" export { default as Updating } from "./Updating.svelte"
export { Grid } from "./grid" export { Grid } from "./grid"
export { default as ClientAppSkeleton } from "./ClientAppSkeleton.svelte" export { default as ClientAppSkeleton } from "./ClientAppSkeleton.svelte"
export { default as FilterBuilder } from "./FilterBuilder.svelte"

View File

@ -348,8 +348,7 @@ export default class DataFetch {
* Determine the feature flag for this datasource definition * Determine the feature flag for this datasource definition
* @param definition * @param definition
*/ */
// eslint-disable-next-line no-unused-vars determineFeatureFlags(_definition) {
determineFeatureFlags(definition) {
return { return {
supportsSearch: false, supportsSearch: false,
supportsSort: false, supportsSort: false,

@ -1 +1 @@
Subproject commit ef186d00241f96037f9fd34d7a3826041977ab3a Subproject commit 06b1064f7e2f7cac5d4bef2ee999796a2a1f0f2c

View File

@ -125,7 +125,7 @@
"@babel/preset-env": "7.16.11", "@babel/preset-env": "7.16.11",
"@swc/core": "1.3.71", "@swc/core": "1.3.71",
"@swc/jest": "0.2.27", "@swc/jest": "0.2.27",
"@types/archiver": "^6.0.2", "@types/archiver": "6.0.2",
"@types/global-agent": "2.1.1", "@types/global-agent": "2.1.1",
"@types/google-spreadsheet": "3.1.5", "@types/google-spreadsheet": "3.1.5",
"@types/jest": "29.5.5", "@types/jest": "29.5.5",

View File

@ -4,6 +4,7 @@ services:
# user: sa # user: sa
# database: master # database: master
mssql: mssql:
# platform: linux/amd64
image: bb/mssql image: bb/mssql
build: build:
context: . context: .

View File

@ -76,7 +76,7 @@ function writeFile(output: any, filename: string) {
console.log(`Wrote spec to ${path}`) console.log(`Wrote spec to ${path}`)
return path return path
} catch (err) { } catch (err) {
console.error(err) console.error("Error writing spec file", err)
} }
} }

View File

@ -320,6 +320,7 @@ async function performAppCreate(ctx: UserCtx<CreateAppRequest, App>) {
"theme", "theme",
"customTheme", "customTheme",
"icon", "icon",
"snippets",
] ]
keys.forEach(key => { keys.forEach(key => {
if (existing[key]) { if (existing[key]) {

View File

@ -36,7 +36,6 @@ import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils"
import { processObjectSync } from "@budibase/string-templates" import { processObjectSync } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import AliasTables from "./alias"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import env from "../../../environment" import env from "../../../environment"
@ -120,6 +119,9 @@ async function removeManyToManyRelationships(
endpoint: getEndpoint(tableId, Operation.DELETE), endpoint: getEndpoint(tableId, Operation.DELETE),
body: { [colName]: null }, body: { [colName]: null },
filters, filters,
meta: {
table,
},
}) })
} else { } else {
return [] return []
@ -134,6 +136,9 @@ async function removeOneToManyRelationships(rowId: string, table: Table) {
return getDatasourceAndQuery({ return getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.UPDATE), endpoint: getEndpoint(tableId, Operation.UPDATE),
filters, filters,
meta: {
table,
},
}) })
} else { } else {
return [] return []
@ -249,6 +254,9 @@ export class ExternalRequest<T extends Operation> {
const response = await getDatasourceAndQuery({ const response = await getDatasourceAndQuery({
endpoint: getEndpoint(table._id!, Operation.READ), endpoint: getEndpoint(table._id!, Operation.READ),
filters: buildFilters(rowId, {}, table), filters: buildFilters(rowId, {}, table),
meta: {
table,
},
}) })
if (Array.isArray(response) && response.length > 0) { if (Array.isArray(response) && response.length > 0) {
return response[0] return response[0]
@ -366,36 +374,45 @@ export class ExternalRequest<T extends Operation> {
) { ) {
continue continue
} }
let tableId: string | undefined, let relatedTableId: string | undefined,
lookupField: string | undefined, lookupField: string | undefined,
fieldName: string | undefined fieldName: string | undefined
if (isManyToMany(field)) { if (isManyToMany(field)) {
tableId = field.through relatedTableId = field.through
lookupField = primaryKey lookupField = primaryKey
fieldName = field.throughTo || primaryKey fieldName = field.throughTo || primaryKey
} else if (isManyToOne(field)) { } else if (isManyToOne(field)) {
tableId = field.tableId relatedTableId = field.tableId
lookupField = field.foreignKey lookupField = field.foreignKey
fieldName = field.fieldName fieldName = field.fieldName
} }
if (!tableId || !lookupField || !fieldName) { if (!relatedTableId || !lookupField || !fieldName) {
throw new Error( throw new Error(
"Unable to lookup relationships - undefined column properties." "Unable to lookup relationships - undefined column properties."
) )
} }
const { tableName: relatedTableName } = breakExternalTableId(tableId) const { tableName: relatedTableName } =
breakExternalTableId(relatedTableId)
// @ts-ignore // @ts-ignore
const linkPrimaryKey = this.tables[relatedTableName].primary[0] const linkPrimaryKey = this.tables[relatedTableName].primary[0]
if (!lookupField || !row[lookupField]) { if (!lookupField || !row[lookupField]) {
continue continue
} }
const endpoint = getEndpoint(relatedTableId, Operation.READ)
const relatedTable = this.tables[endpoint.entityId]
if (!relatedTable) {
throw new Error("unable to find related table")
}
const response = await getDatasourceAndQuery({ const response = await getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.READ), endpoint: endpoint,
filters: { filters: {
equal: { equal: {
[fieldName]: row[lookupField], [fieldName]: row[lookupField],
}, },
}, },
meta: {
table: relatedTable,
},
}) })
// this is the response from knex if no rows found // this is the response from knex if no rows found
const rows: Row[] = const rows: Row[] =
@ -403,7 +420,11 @@ export class ExternalRequest<T extends Operation> {
const storeTo = isManyToMany(field) const storeTo = isManyToMany(field)
? field.throughFrom || linkPrimaryKey ? field.throughFrom || linkPrimaryKey
: fieldName : fieldName
related[storeTo] = { rows, isMany: isManyToMany(field), tableId } related[storeTo] = {
rows,
isMany: isManyToMany(field),
tableId: relatedTableId,
}
} }
return related return related
} }
@ -471,6 +492,9 @@ export class ExternalRequest<T extends Operation> {
// if we're doing many relationships then we're writing, only one response // if we're doing many relationships then we're writing, only one response
body, body,
filters: buildFilters(id, {}, linkTable), filters: buildFilters(id, {}, linkTable),
meta: {
table: linkTable,
},
}) })
) )
} else { } else {
@ -618,7 +642,7 @@ export class ExternalRequest<T extends Operation> {
if (env.SQL_ALIASING_DISABLE) { if (env.SQL_ALIASING_DISABLE) {
response = await getDatasourceAndQuery(json) response = await getDatasourceAndQuery(json)
} else { } else {
const aliasing = new AliasTables(Object.keys(this.tables)) const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
response = await aliasing.queryWithAliasing(json) response = await aliasing.queryWithAliasing(json)
} }

View File

@ -62,12 +62,12 @@ export function basicProcessing({
row, row,
table, table,
isLinked, isLinked,
internal, sqs,
}: { }: {
row: Row row: Row
table: Table table: Table
isLinked: boolean isLinked: boolean
internal?: boolean sqs?: boolean
}): Row { }): Row {
const thisRow: Row = {} const thisRow: Row = {}
// filter the row down to what is actually the row (not joined) // filter the row down to what is actually the row (not joined)
@ -84,12 +84,13 @@ export function basicProcessing({
thisRow[fieldName] = value thisRow[fieldName] = value
} }
} }
if (!internal) { if (!sqs) {
thisRow._id = generateIdForRow(row, table, isLinked) thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id thisRow.tableId = table._id
thisRow._rev = "rev" thisRow._rev = "rev"
} else { } else {
for (let internalColumn of CONSTANT_INTERNAL_ROW_COLS) { const columns = Object.keys(table.schema)
for (let internalColumn of [...CONSTANT_INTERNAL_ROW_COLS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({ thisRow[internalColumn] = extractFieldValue({
row, row,
tableName: table._id!, tableName: table._id!,

View File

@ -51,11 +51,11 @@ export async function updateRelationshipColumns(
continue continue
} }
let linked = await basicProcessing({ let linked = basicProcessing({
row, row,
table: linkedTable, table: linkedTable,
isLinked: true, isLinked: true,
internal: opts?.sqs, sqs: opts?.sqs,
}) })
if (!linked._id) { if (!linked._id) {
continue continue

View File

@ -132,6 +132,7 @@ export async function sqlOutputProcessing(
let rowId = row._id let rowId = row._id
if (opts?.sqs) { if (opts?.sqs) {
rowId = getInternalRowId(row, table) rowId = getInternalRowId(row, table)
row._id = rowId
} else if (!rowId) { } else if (!rowId) {
rowId = generateIdForRow(row, table) rowId = generateIdForRow(row, table)
row._id = rowId row._id = rowId
@ -153,7 +154,7 @@ export async function sqlOutputProcessing(
row, row,
table, table,
isLinked: false, isLinked: false,
internal: opts?.sqs, sqs: opts?.sqs,
}), }),
table table
) )
@ -167,7 +168,8 @@ export async function sqlOutputProcessing(
tables, tables,
row, row,
finalRows, finalRows,
relationships relationships,
opts
) )
} }

View File

@ -127,13 +127,6 @@ export const uploadFile = async function (
) )
} }
export const deleteObjects = async function (ctx: Ctx) {
ctx.body = await objectStore.deleteFiles(
ObjectStoreBuckets.APPS,
ctx.request.body.keys
)
}
const requiresMigration = async (ctx: Ctx) => { const requiresMigration = async (ctx: Ctx) => {
const appId = context.getAppId() const appId = context.getAppId()
if (!appId) { if (!appId) {

View File

@ -22,6 +22,7 @@ export async function makeTableRequest(
operation, operation,
}, },
meta: { meta: {
table,
tables, tables,
}, },
table, table,

View File

@ -31,7 +31,6 @@ export async function save(
renaming?: RenameColumn renaming?: RenameColumn
) { ) {
const inputs = ctx.request.body const inputs = ctx.request.body
const adding = inputs?._add
// can't do this right now // can't do this right now
delete inputs.rows delete inputs.rows
const tableId = ctx.request.body._id const tableId = ctx.request.body._id
@ -44,7 +43,7 @@ export async function save(
const { datasource, table } = await sdk.tables.external.save( const { datasource, table } = await sdk.tables.external.save(
datasourceId!, datasourceId!,
inputs, inputs,
{ tableId, renaming, adding } { tableId, renaming }
) )
builderSocket?.emitDatasourceUpdate(ctx, datasource) builderSocket?.emitDatasourceUpdate(ctx, datasource)
return table return table

View File

@ -77,11 +77,6 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const renaming = ctx.request.body._rename const renaming = ctx.request.body._rename
const api = pickApi({ table }) const api = pickApi({ table })
// do not pass _rename or _add if saving to CouchDB
if (api === internal) {
delete ctx.request.body._add
delete ctx.request.body._rename
}
let savedTable = await api.save(ctx, renaming) let savedTable = await api.save(ctx, renaming)
if (!table._id) { if (!table._id) {
savedTable = sdk.tables.enrichViewSchemas(savedTable) savedTable = sdk.tables.enrichViewSchemas(savedTable)

View File

@ -16,7 +16,7 @@ export async function save(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>, ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn renaming?: RenameColumn
) { ) {
const { rows, ...rest } = ctx.request.body const { _rename, rows, ...rest } = ctx.request.body
let tableToSave: Table = { let tableToSave: Table = {
_id: generateTableID(), _id: generateTableID(),
...rest, ...rest,

View File

@ -32,11 +32,6 @@ router
.get("/builder/:file*", controller.serveBuilder) .get("/builder/:file*", controller.serveBuilder)
.get("/api/assets/client", controller.serveClientLibrary) .get("/api/assets/client", controller.serveClientLibrary)
.post("/api/attachments/process", authorized(BUILDER), controller.uploadFile) .post("/api/attachments/process", authorized(BUILDER), controller.uploadFile)
.post(
"/api/attachments/delete",
authorized(BUILDER),
controller.deleteObjects
)
.post("/api/beta/:feature", controller.toggleBetaUiFeature) .post("/api/beta/:feature", controller.toggleBetaUiFeature)
.post( .post(
"/api/attachments/:tableId/upload", "/api/attachments/:tableId/upload",
@ -44,12 +39,6 @@ router
authorized(PermissionType.TABLE, PermissionLevel.WRITE), authorized(PermissionType.TABLE, PermissionLevel.WRITE),
controller.uploadFile controller.uploadFile
) )
.post(
"/api/attachments/:tableId/delete",
paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
controller.deleteObjects
)
.get("/app/preview", authorized(BUILDER), controller.serveBuilderPreview) .get("/app/preview", authorized(BUILDER), controller.serveBuilderPreview)
.get("/app/:appUrl/:path*", controller.serveApp) .get("/app/:appUrl/:path*", controller.serveApp)
.get("/:appId/:path*", controller.serveApp) .get("/:appId/:path*", controller.serveApp)

View File

@ -6,7 +6,19 @@ import sdk from "../../../sdk"
import tk from "timekeeper" import tk from "timekeeper"
import { mocks } from "@budibase/backend-core/tests" import { mocks } from "@budibase/backend-core/tests"
import { QueryPreview, SourceName } from "@budibase/types" import {
Datasource,
FieldSchema,
FieldSubtype,
FieldType,
QueryPreview,
RelationshipType,
SourceName,
Table,
TableSchema,
} from "@budibase/types"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures"
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
@ -223,4 +235,152 @@ describe("/datasources", () => {
}) })
}) })
}) })
describe.each([
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("fetch schema (%s)", (_, dsProvider) => {
beforeAll(async () => {
datasource = await config.api.datasource.create(await dsProvider)
})
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
type SupportedSqlTypes =
| FieldType.STRING
| FieldType.BARCODEQR
| FieldType.LONGFORM
| FieldType.OPTIONS
| FieldType.DATETIME
| FieldType.NUMBER
| FieldType.BOOLEAN
| FieldType.FORMULA
| FieldType.BIGINT
| FieldType.BB_REFERENCE
| FieldType.LINK
| FieldType.ARRAY
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: "string",
type: FieldType.STRING,
constraints: {
presence: true,
},
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: { allowEmpty: false },
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema(datasourceId)
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = {
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = expect.objectContaining({
...field,
})
return acc
},
{}
),
}
return acc
},
{}
),
_rev: expect.any(String),
}
expect(updated).toEqual(expected)
})
})
}) })

View File

@ -4,6 +4,7 @@ import {
Query, Query,
QueryPreview, QueryPreview,
SourceName, SourceName,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import * as setup from "../utilities" import * as setup from "../utilities"
import { import {
@ -740,12 +741,25 @@ describe.each(
}) })
describe("query through datasource", () => { describe("query through datasource", () => {
it("should be able to query a pg datasource", async () => { it("should be able to query the datasource", async () => {
const entityId = "test_table"
await config.api.datasource.update({
...datasource,
entities: {
[entityId]: {
name: entityId,
schema: {},
type: "table",
sourceId: datasource._id!,
sourceType: TableSourceType.EXTERNAL,
},
},
})
const res = await config.api.datasource.query({ const res = await config.api.datasource.query({
endpoint: { endpoint: {
datasourceId: datasource._id!, datasourceId: datasource._id!,
operation: Operation.READ, operation: Operation.READ,
entityId: "test_table", entityId,
}, },
resource: { resource: {
fields: ["id", "name"], fields: ["id", "name"],

View File

@ -26,6 +26,7 @@ describe.each([
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/api/:sourceId/search (%s)", (name, dsProvider) => { ])("/api/:sourceId/search (%s)", (name, dsProvider) => {
const isSqs = name === "internal-sqs" const isSqs = name === "internal-sqs"
const isInternal = name === "internal"
const config = setup.getConfig() const config = setup.getConfig()
let envCleanup: (() => void) | undefined let envCleanup: (() => void) | undefined
@ -336,6 +337,20 @@ describe.each([
expectQuery({ expectQuery({
range: { age: { low: 5, high: 9 } }, range: { age: { low: 5, high: 9 } },
}).toFindNothing()) }).toFindNothing())
// We never implemented half-open ranges in Lucene.
!isInternal &&
it("can search using just a low value", () =>
expectQuery({
range: { age: { low: 5 } },
}).toContainExactly([{ age: 10 }]))
// We never implemented half-open ranges in Lucene.
!isInternal &&
it("can search using just a high value", () =>
expectQuery({
range: { age: { high: 5 } },
}).toContainExactly([{ age: 1 }]))
}) })
describe("sort", () => { describe("sort", () => {
@ -440,6 +455,20 @@ describe.each([
expectQuery({ expectQuery({
range: { dob: { low: JAN_5TH, high: JAN_9TH } }, range: { dob: { low: JAN_5TH, high: JAN_9TH } },
}).toFindNothing()) }).toFindNothing())
// We never implemented half-open ranges in Lucene.
!isInternal &&
it("can search using just a low value", () =>
expectQuery({
range: { dob: { low: JAN_5TH } },
}).toContainExactly([{ dob: JAN_10TH }]))
// We never implemented half-open ranges in Lucene.
!isInternal &&
it("can search using just a high value", () =>
expectQuery({
range: { dob: { high: JAN_5TH } },
}).toContainExactly([{ dob: JAN_1ST }]))
}) })
describe("sort", () => { describe("sort", () => {
@ -476,4 +505,174 @@ describe.each([
}) })
}) })
}) })
describe("array of strings", () => {
beforeAll(async () => {
await createTable({
numbers: {
name: "numbers",
type: FieldType.ARRAY,
constraints: { inclusion: ["one", "two", "three"] },
},
})
await createRows([{ numbers: ["one", "two"] }, { numbers: ["three"] }])
})
describe("contains", () => {
it("successfully finds a row", () =>
expectQuery({ contains: { numbers: ["one"] } }).toContainExactly([
{ numbers: ["one", "two"] },
]))
it("fails to find nonexistent row", () =>
expectQuery({ contains: { numbers: ["none"] } }).toFindNothing())
it("fails to find row containing all", () =>
expectQuery({
contains: { numbers: ["one", "two", "three"] },
}).toFindNothing())
it("finds all with empty list", () =>
expectQuery({ contains: { numbers: [] } }).toContainExactly([
{ numbers: ["one", "two"] },
{ numbers: ["three"] },
]))
})
describe("notContains", () => {
it("successfully finds a row", () =>
expectQuery({ notContains: { numbers: ["one"] } }).toContainExactly([
{ numbers: ["three"] },
]))
it("fails to find nonexistent row", () =>
expectQuery({
notContains: { numbers: ["one", "two", "three"] },
}).toContainExactly([
{ numbers: ["one", "two"] },
{ numbers: ["three"] },
]))
it("finds all with empty list", () =>
expectQuery({ notContains: { numbers: [] } }).toContainExactly([
{ numbers: ["one", "two"] },
{ numbers: ["three"] },
]))
})
describe("containsAny", () => {
it("successfully finds rows", () =>
expectQuery({
containsAny: { numbers: ["one", "two", "three"] },
}).toContainExactly([
{ numbers: ["one", "two"] },
{ numbers: ["three"] },
]))
it("fails to find nonexistent row", () =>
expectQuery({ containsAny: { numbers: ["none"] } }).toFindNothing())
it("finds all with empty list", () =>
expectQuery({ containsAny: { numbers: [] } }).toContainExactly([
{ numbers: ["one", "two"] },
{ numbers: ["three"] },
]))
})
})
describe("bigints", () => {
const SMALL = "1"
const MEDIUM = "10000000"
// Our bigints are int64s in most datasources.
const BIG = "9223372036854775807"
beforeAll(async () => {
await createTable({
num: { name: "num", type: FieldType.BIGINT },
})
await createRows([{ num: SMALL }, { num: MEDIUM }, { num: BIG }])
})
describe("equal", () => {
it("successfully finds a row", () =>
expectQuery({ equal: { num: SMALL } }).toContainExactly([
{ num: SMALL },
]))
it("successfully finds a big value", () =>
expectQuery({ equal: { num: BIG } }).toContainExactly([{ num: BIG }]))
it("fails to find nonexistent row", () =>
expectQuery({ equal: { num: "2" } }).toFindNothing())
})
describe("notEqual", () => {
it("successfully finds a row", () =>
expectQuery({ notEqual: { num: SMALL } }).toContainExactly([
{ num: MEDIUM },
{ num: BIG },
]))
it("fails to find nonexistent row", () =>
expectQuery({ notEqual: { num: 10 } }).toContainExactly([
{ num: SMALL },
{ num: MEDIUM },
{ num: BIG },
]))
})
describe("oneOf", () => {
it("successfully finds a row", () =>
expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([
{ num: SMALL },
]))
it("successfully finds all rows", () =>
expectQuery({ oneOf: { num: [SMALL, MEDIUM, BIG] } }).toContainExactly([
{ num: SMALL },
{ num: MEDIUM },
{ num: BIG },
]))
it("fails to find nonexistent row", () =>
expectQuery({ oneOf: { num: [2] } }).toFindNothing())
})
// Range searches against bigints don't seem to work at all in Lucene, and I
// couldn't figure out why. Given that we're replacing Lucene with SQS,
// we've decided not to spend time on it.
!isInternal &&
describe("range", () => {
it("successfully finds a row", () =>
expectQuery({
range: { num: { low: SMALL, high: "5" } },
}).toContainExactly([{ num: SMALL }]))
it("successfully finds multiple rows", () =>
expectQuery({
range: { num: { low: SMALL, high: MEDIUM } },
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }]))
it("successfully finds a row with a high bound", () =>
expectQuery({
range: { num: { low: MEDIUM, high: BIG } },
}).toContainExactly([{ num: MEDIUM }, { num: BIG }]))
it("successfully finds no rows", () =>
expectQuery({
range: { num: { low: "5", high: "5" } },
}).toFindNothing())
it("can search using just a low value", () =>
expectQuery({
range: { num: { low: MEDIUM } },
}).toContainExactly([{ num: MEDIUM }, { num: BIG }]))
it("can search using just a high value", () =>
expectQuery({
range: { num: { high: MEDIUM } },
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }]))
})
})
}) })

View File

@ -34,7 +34,7 @@ describe.each([
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/tables (%s)", (_, dsProvider) => { ])("/tables (%s)", (_, dsProvider) => {
let isInternal: boolean const isInternal: boolean = !dsProvider
let datasource: Datasource | undefined let datasource: Datasource | undefined
let config = setup.getConfig() let config = setup.getConfig()
@ -44,9 +44,6 @@ describe.each([
await config.init() await config.init()
if (dsProvider) { if (dsProvider) {
datasource = await config.api.datasource.create(await dsProvider) datasource = await config.api.datasource.create(await dsProvider)
isInternal = false
} else {
isInternal = true
} }
}) })
@ -219,9 +216,6 @@ describe.each([
it("should add a new column for an internal DB table", async () => { it("should add a new column for an internal DB table", async () => {
const saveTableRequest: SaveTableRequest = { const saveTableRequest: SaveTableRequest = {
_add: {
name: "NEW_COLUMN",
},
...basicTable(), ...basicTable(),
} }
@ -235,7 +229,6 @@ describe.each([
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN), updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
views: {}, views: {},
} }
delete expectedResponse._add
expect(response).toEqual(expectedResponse) expect(response).toEqual(expectedResponse)
}) })
}) })

View File

@ -8,7 +8,7 @@ import {
PermissionLevel, PermissionLevel,
QuotaUsageType, QuotaUsageType,
SaveTableRequest, SaveTableRequest,
SearchQueryOperators, SearchFilterOperator,
SortOrder, SortOrder,
SortType, SortType,
StaticQuotaName, StaticQuotaName,
@ -132,7 +132,7 @@ describe.each([
primaryDisplay: generator.word(), primaryDisplay: generator.word(),
query: [ query: [
{ {
operator: SearchQueryOperators.EQUAL, operator: SearchFilterOperator.EQUAL,
field: "field", field: "field",
value: "value", value: "value",
}, },
@ -236,7 +236,7 @@ describe.each([
...view, ...view,
query: [ query: [
{ {
operator: SearchQueryOperators.EQUAL, operator: SearchFilterOperator.EQUAL,
field: "newField", field: "newField",
value: "thatValue", value: "thatValue",
}, },
@ -263,7 +263,7 @@ describe.each([
primaryDisplay: generator.word(), primaryDisplay: generator.word(),
query: [ query: [
{ {
operator: SearchQueryOperators.EQUAL, operator: SearchFilterOperator.EQUAL,
field: generator.word(), field: generator.word(),
value: generator.word(), value: generator.word(),
}, },
@ -341,7 +341,7 @@ describe.each([
tableId: generator.guid(), tableId: generator.guid(),
query: [ query: [
{ {
operator: SearchQueryOperators.EQUAL, operator: SearchFilterOperator.EQUAL,
field: "newField", field: "newField",
value: "thatValue", value: "thatValue",
}, },
@ -671,7 +671,7 @@ describe.each([
name: generator.guid(), name: generator.guid(),
query: [ query: [
{ {
operator: SearchQueryOperators.EQUAL, operator: SearchFilterOperator.EQUAL,
field: "two", field: "two",
value: "bar2", value: "bar2",
}, },

View File

@ -62,7 +62,6 @@ export const definition: AutomationStepSchema = {
} }
export async function run({ inputs }: AutomationStepInput) { export async function run({ inputs }: AutomationStepInput) {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { automationId, ...fieldParams } = inputs.automation const { automationId, ...fieldParams } = inputs.automation
if (await features.isTriggerAutomationRunEnabled()) { if (await features.isTriggerAutomationRunEnabled()) {

View File

@ -20,6 +20,7 @@ export enum FilterTypes {
NOT_EMPTY = "notEmpty", NOT_EMPTY = "notEmpty",
CONTAINS = "contains", CONTAINS = "contains",
NOT_CONTAINS = "notContains", NOT_CONTAINS = "notContains",
CONTAINS_ANY = "containsAny",
ONE_OF = "oneOf", ONE_OF = "oneOf",
} }
@ -30,6 +31,7 @@ export const NoEmptyFilterStrings = [
FilterTypes.NOT_EQUAL, FilterTypes.NOT_EQUAL,
FilterTypes.CONTAINS, FilterTypes.CONTAINS,
FilterTypes.NOT_CONTAINS, FilterTypes.NOT_CONTAINS,
FilterTypes.CONTAINS_ANY,
] ]
export const CanSwitchTypes = [ export const CanSwitchTypes = [

View File

@ -40,6 +40,7 @@ export const USER_METDATA_PREFIX = `${DocumentType.ROW}${SEPARATOR}${dbCore.Inte
export const LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${dbCore.InternalTable.USER_METADATA}${SEPARATOR}` export const LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${dbCore.InternalTable.USER_METADATA}${SEPARATOR}`
export const TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}` export const TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}`
export const AUTOMATION_LOG_PREFIX = `${DocumentType.AUTOMATION_LOG}${SEPARATOR}` export const AUTOMATION_LOG_PREFIX = `${DocumentType.AUTOMATION_LOG}${SEPARATOR}`
export const SQS_DATASOURCE_INTERNAL = "internal"
export const ViewName = dbCore.ViewName export const ViewName = dbCore.ViewName
export const InternalTables = dbCore.InternalTable export const InternalTables = dbCore.InternalTable
export const UNICODE_MAX = dbCore.UNICODE_MAX export const UNICODE_MAX = dbCore.UNICODE_MAX

View File

@ -28,6 +28,7 @@ const DEFAULTS = {
PLUGINS_DIR: "/plugins", PLUGINS_DIR: "/plugins",
FORKED_PROCESS_NAME: "main", FORKED_PROCESS_NAME: "main",
JS_RUNNER_MEMORY_LIMIT: 64, JS_RUNNER_MEMORY_LIMIT: 64,
COUCH_DB_SQL_URL: "http://localhost:4006",
} }
const QUERY_THREAD_TIMEOUT = const QUERY_THREAD_TIMEOUT =
@ -39,6 +40,7 @@ const environment = {
// important - prefer app port to generic port // important - prefer app port to generic port
PORT: process.env.APP_PORT || process.env.PORT, PORT: process.env.APP_PORT || process.env.PORT,
COUCH_DB_URL: process.env.COUCH_DB_URL, COUCH_DB_URL: process.env.COUCH_DB_URL,
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || DEFAULTS.COUCH_DB_SQL_URL,
MINIO_URL: process.env.MINIO_URL, MINIO_URL: process.env.MINIO_URL,
WORKER_URL: process.env.WORKER_URL, WORKER_URL: process.env.WORKER_URL,
AWS_REGION: process.env.AWS_REGION, AWS_REGION: process.env.AWS_REGION,

View File

@ -1,11 +1,8 @@
import { features } from "@budibase/backend-core" import { features } from "@budibase/backend-core"
import env from "./environment" import env from "./environment"
// eslint-disable-next-line no-unused-vars
enum AppFeature { enum AppFeature {
// eslint-disable-next-line no-unused-vars
API = "api", API = "api",
// eslint-disable-next-line no-unused-vars
AUTOMATIONS = "automations", AUTOMATIONS = "automations",
} }

View File

@ -16,7 +16,6 @@ import {
getDatasource, getDatasource,
rawQuery, rawQuery,
} from "../integrations/tests/utils" } from "../integrations/tests/utils"
import { builderSocket } from "../websockets"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
// @ts-ignore // @ts-ignore
fetch.mockSearch() fetch.mockSearch()
@ -233,72 +232,6 @@ describe("mysql integrations", () => {
}) })
describe("POST /api/tables/", () => { describe("POST /api/tables/", () => {
const emitDatasourceUpdateMock = jest.fn()
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
const addColumnToTable: TableRequest = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
name: uniqueTableName(),
sourceId: datasource._id!,
primary: ["id"],
schema: {
id: {
type: FieldType.AUTO,
name: "id",
autocolumn: true,
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
},
},
_add: {
name: "new_column",
},
}
jest
.spyOn(builderSocket!, "emitDatasourceUpdate")
.mockImplementation(emitDatasourceUpdateMock)
await makeRequest("post", "/api/tables/", addColumnToTable)
const expectedTable: TableRequest = {
...addColumnToTable,
schema: {
id: {
type: FieldType.NUMBER,
name: "id",
autocolumn: true,
constraints: {
presence: false,
},
externalType: "int unsigned",
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
autocolumn: false,
constraints: {
presence: false,
},
externalType: "float(8,2)",
},
},
created: true,
_id: `${datasource._id}__${addColumnToTable.name}`,
}
delete expectedTable._add
expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1)
const emittedDatasource: Datasource =
emitDatasourceUpdateMock.mock.calls[0][1]
expect(emittedDatasource.entities![expectedTable.name]).toEqual(
expectedTable
)
})
it("will rename a column", async () => { it("will rename a column", async () => {
await makeRequest("post", "/api/tables/", primaryMySqlTable) await makeRequest("post", "/api/tables/", primaryMySqlTable)

View File

@ -2,6 +2,7 @@ import {
QueryJson, QueryJson,
Datasource, Datasource,
DatasourcePlusQueryResponse, DatasourcePlusQueryResponse,
RowOperations,
} from "@budibase/types" } from "@budibase/types"
import { getIntegration } from "../index" import { getIntegration } from "../index"
import sdk from "../../sdk" import sdk from "../../sdk"
@ -10,6 +11,17 @@ export async function makeExternalQuery(
datasource: Datasource, datasource: Datasource,
json: QueryJson json: QueryJson
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {
const entityId = json.endpoint.entityId,
tableName = json.meta.table.name,
tableId = json.meta.table._id
// case found during testing - make sure this doesn't happen again
if (
RowOperations.includes(json.endpoint.operation) &&
entityId !== tableId &&
entityId !== tableName
) {
throw new Error("Entity ID and table metadata do not align")
}
datasource = await sdk.datasources.enrich(datasource) datasource = await sdk.datasources.enrich(datasource)
const Integration = await getIntegration(datasource.source) const Integration = await getIntegration(datasource.source)
// query is the opinionated function // query is the opinionated function

View File

@ -6,6 +6,7 @@ import {
SqlClient, SqlClient,
isValidFilter, isValidFilter,
getNativeSql, getNativeSql,
SqlStatements,
} from "../utils" } from "../utils"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
import { import {
@ -22,6 +23,8 @@ import {
SortDirection, SortDirection,
SqlQueryBinding, SqlQueryBinding,
Table, Table,
TableSourceType,
INTERNAL_TABLE_SOURCE_ID,
} from "@budibase/types" } from "@budibase/types"
import environment from "../../environment" import environment from "../../environment"
@ -135,6 +138,18 @@ function generateSelectStatement(
}) })
} }
function getTableName(table?: Table): string | undefined {
// SQS uses the table ID rather than the table name
if (
table?.sourceType === TableSourceType.INTERNAL ||
table?.sourceId === INTERNAL_TABLE_SOURCE_ID
) {
return table?._id
} else {
return table?.name
}
}
class InternalBuilder { class InternalBuilder {
private readonly client: string private readonly client: string
@ -146,10 +161,20 @@ class InternalBuilder {
addFilters( addFilters(
query: Knex.QueryBuilder, query: Knex.QueryBuilder,
filters: SearchFilters | undefined, filters: SearchFilters | undefined,
tableName: string, table: Table,
opts: { aliases?: Record<string, string>; relationship?: boolean } opts: { aliases?: Record<string, string>; relationship?: boolean }
): Knex.QueryBuilder { ): Knex.QueryBuilder {
function getTableName(name: string) { if (!filters) {
return query
}
filters = parseFilters(filters)
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
const sqlStatements = new SqlStatements(this.client, table, { allOr })
const tableName =
this.client === SqlClient.SQL_LITE ? table._id! : table.name
function getTableAlias(name: string) {
const alias = opts.aliases?.[name] const alias = opts.aliases?.[name]
return alias || name return alias || name
} }
@ -161,11 +186,11 @@ class InternalBuilder {
const updatedKey = dbCore.removeKeyNumbering(key) const updatedKey = dbCore.removeKeyNumbering(key)
const isRelationshipField = updatedKey.includes(".") const isRelationshipField = updatedKey.includes(".")
if (!opts.relationship && !isRelationshipField) { if (!opts.relationship && !isRelationshipField) {
fn(`${getTableName(tableName)}.${updatedKey}`, value) fn(`${getTableAlias(tableName)}.${updatedKey}`, value)
} }
if (opts.relationship && isRelationshipField) { if (opts.relationship && isRelationshipField) {
const [filterTableName, property] = updatedKey.split(".") const [filterTableName, property] = updatedKey.split(".")
fn(`${getTableName(filterTableName)}.${property}`, value) fn(`${getTableAlias(filterTableName)}.${property}`, value)
} }
} }
} }
@ -233,18 +258,17 @@ class InternalBuilder {
(statement ? andOr : "") + (statement ? andOr : "") +
`LOWER(${likeKey(this.client, key)}) LIKE ?` `LOWER(${likeKey(this.client, key)}) LIKE ?`
} }
if (statement === "") {
return
}
// @ts-ignore // @ts-ignore
query = query[rawFnc](`${not}(${statement})`, value) query = query[rawFnc](`${not}(${statement})`, value)
}) })
} }
} }
if (!filters) {
return query
}
filters = parseFilters(filters)
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
if (filters.oneOf) { if (filters.oneOf) {
iterate(filters.oneOf, (key, array) => { iterate(filters.oneOf, (key, array) => {
const fnc = allOr ? "orWhereIn" : "whereIn" const fnc = allOr ? "orWhereIn" : "whereIn"
@ -287,17 +311,11 @@ class InternalBuilder {
const lowValid = isValidFilter(value.low), const lowValid = isValidFilter(value.low),
highValid = isValidFilter(value.high) highValid = isValidFilter(value.high)
if (lowValid && highValid) { if (lowValid && highValid) {
// Use a between operator if we have 2 valid range values query = sqlStatements.between(query, key, value.low, value.high)
const fnc = allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [value.low, value.high])
} else if (lowValid) { } else if (lowValid) {
// Use just a single greater than operator if we only have a low query = sqlStatements.lte(query, key, value.low)
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, ">", value.low)
} else if (highValid) { } else if (highValid) {
// Use just a single less than operator if we only have a high query = sqlStatements.gte(query, key, value.high)
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, "<", value.high)
} }
}) })
} }
@ -340,10 +358,11 @@ class InternalBuilder {
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort, paginate } = json let { sort, paginate } = json
const table = json.meta?.table const table = json.meta.table
const tableName = getTableName(table)
const aliases = json.tableAliases const aliases = json.tableAliases
const aliased = const aliased =
table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name tableName && aliases?.[tableName] ? aliases[tableName] : table?.name
if (sort && Object.keys(sort || {}).length > 0) { if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) { for (let [key, value] of Object.entries(sort)) {
const direction = const direction =
@ -453,14 +472,13 @@ class InternalBuilder {
): Knex.QueryBuilder { ): Knex.QueryBuilder {
const tableName = endpoint.entityId const tableName = endpoint.entityId
const tableAlias = aliases?.[tableName] const tableAlias = aliases?.[tableName]
let table: string | Record<string, string> = tableName
if (tableAlias) { const query = knex(
table = { [tableAlias]: tableName } this.tableNameWithSchema(tableName, {
} alias: tableAlias,
let query = knex(table) schema: endpoint.schema,
if (endpoint.schema) { })
query = query.withSchema(endpoint.schema) )
}
return query return query
} }
@ -527,7 +545,7 @@ class InternalBuilder {
if (foundOffset) { if (foundOffset) {
query = query.offset(foundOffset) query = query.offset(foundOffset)
} }
query = this.addFilters(query, filters, tableName, { query = this.addFilters(query, filters, json.meta.table, {
aliases: tableAliases, aliases: tableAliases,
}) })
// add sorting to pre-query // add sorting to pre-query
@ -548,7 +566,7 @@ class InternalBuilder {
endpoint.schema, endpoint.schema,
tableAliases tableAliases
) )
return this.addFilters(query, filters, tableName, { return this.addFilters(query, filters, json.meta.table, {
relationship: true, relationship: true,
aliases: tableAliases, aliases: tableAliases,
}) })
@ -558,7 +576,7 @@ class InternalBuilder {
const { endpoint, body, filters, tableAliases } = json const { endpoint, body, filters, tableAliases } = json
let query = this.knexWithAlias(knex, endpoint, tableAliases) let query = this.knexWithAlias(knex, endpoint, tableAliases)
const parsedBody = parseBody(body) const parsedBody = parseBody(body)
query = this.addFilters(query, filters, endpoint.entityId, { query = this.addFilters(query, filters, json.meta.table, {
aliases: tableAliases, aliases: tableAliases,
}) })
// mysql can't use returning // mysql can't use returning
@ -572,7 +590,7 @@ class InternalBuilder {
delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
const { endpoint, filters, tableAliases } = json const { endpoint, filters, tableAliases } = json
let query = this.knexWithAlias(knex, endpoint, tableAliases) let query = this.knexWithAlias(knex, endpoint, tableAliases)
query = this.addFilters(query, filters, endpoint.entityId, { query = this.addFilters(query, filters, json.meta.table, {
aliases: tableAliases, aliases: tableAliases,
}) })
// mysql can't use returning // mysql can't use returning
@ -664,7 +682,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
// when creating if an ID has been inserted need to make sure // when creating if an ID has been inserted need to make sure
// the id filter is enriched with it before trying to retrieve the row // the id filter is enriched with it before trying to retrieve the row
checkLookupKeys(id: any, json: QueryJson) { checkLookupKeys(id: any, json: QueryJson) {
if (!id || !json.meta?.table || !json.meta.table.primary) { if (!id || !json.meta.table || !json.meta.table.primary) {
return json return json
} }
const primaryKey = json.meta.table.primary?.[0] const primaryKey = json.meta.table.primary?.[0]
@ -724,12 +742,13 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
results: Record<string, any>[], results: Record<string, any>[],
aliases?: Record<string, string> aliases?: Record<string, string>
): Record<string, any>[] { ): Record<string, any>[] {
const tableName = getTableName(table)
for (const [name, field] of Object.entries(table.schema)) { for (const [name, field] of Object.entries(table.schema)) {
if (!this._isJsonColumn(field)) { if (!this._isJsonColumn(field)) {
continue continue
} }
const tableName = aliases?.[table.name] || table.name const aliasedTableName = (tableName && aliases?.[tableName]) || tableName
const fullName = `${tableName}.${name}` const fullName = `${aliasedTableName}.${name}`
for (let row of results) { for (let row of results) {
if (typeof row[fullName] === "string") { if (typeof row[fullName] === "string") {
row[fullName] = JSON.parse(row[fullName]) row[fullName] = JSON.parse(row[fullName])

View File

@ -1,19 +1,20 @@
import { Knex, knex } from "knex" import { Knex, knex } from "knex"
import { import {
RelationshipType,
FieldSubtype, FieldSubtype,
FieldType,
NumberFieldMetadata, NumberFieldMetadata,
Operation, Operation,
QueryJson, QueryJson,
RelationshipType,
RenameColumn, RenameColumn,
Table,
FieldType,
SqlQuery, SqlQuery,
Table,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { breakExternalTableId, getNativeSql, SqlClient } from "../utils" import { breakExternalTableId, getNativeSql, SqlClient } from "../utils"
import { utils } from "@budibase/shared-core"
import SchemaBuilder = Knex.SchemaBuilder import SchemaBuilder = Knex.SchemaBuilder
import CreateTableBuilder = Knex.CreateTableBuilder import CreateTableBuilder = Knex.CreateTableBuilder
import { utils } from "@budibase/shared-core"
function isIgnoredType(type: FieldType) { function isIgnoredType(type: FieldType) {
const ignored = [FieldType.LINK, FieldType.FORMULA] const ignored = [FieldType.LINK, FieldType.FORMULA]
@ -105,13 +106,13 @@ function generateSchema(
column.relationshipType !== RelationshipType.MANY_TO_MANY column.relationshipType !== RelationshipType.MANY_TO_MANY
) { ) {
if (!column.foreignKey || !column.tableId) { if (!column.foreignKey || !column.tableId) {
throw "Invalid relationship schema" throw new Error("Invalid relationship schema")
} }
const { tableName } = breakExternalTableId(column.tableId) const { tableName } = breakExternalTableId(column.tableId)
// @ts-ignore // @ts-ignore
const relatedTable = tables[tableName] const relatedTable = tables[tableName]
if (!relatedTable) { if (!relatedTable) {
throw "Referenced table doesn't exist" throw new Error("Referenced table doesn't exist")
} }
const relatedPrimary = relatedTable.primary[0] const relatedPrimary = relatedTable.primary[0]
const externalType = relatedTable.schema[relatedPrimary].externalType const externalType = relatedTable.schema[relatedPrimary].externalType
@ -209,15 +210,19 @@ class SqlTableQueryBuilder {
let query: Knex.SchemaBuilder let query: Knex.SchemaBuilder
if (!json.table || !json.meta || !json.meta.tables) { if (!json.table || !json.meta || !json.meta.tables) {
throw "Cannot execute without table being specified" throw new Error("Cannot execute without table being specified")
} }
if (json.table.sourceType === TableSourceType.INTERNAL) {
throw new Error("Cannot perform table actions for SQS.")
}
switch (this._operation(json)) { switch (this._operation(json)) {
case Operation.CREATE_TABLE: case Operation.CREATE_TABLE:
query = buildCreateTable(client, json.table, json.meta.tables) query = buildCreateTable(client, json.table, json.meta.tables)
break break
case Operation.UPDATE_TABLE: case Operation.UPDATE_TABLE:
if (!json.meta || !json.meta.table) { if (!json.meta || !json.meta.table) {
throw "Must specify old table for update" throw new Error("Must specify old table for update")
} }
// renameColumn does not work for MySQL, so return a raw query // renameColumn does not work for MySQL, so return a raw query
if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) { if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) {
@ -264,7 +269,7 @@ class SqlTableQueryBuilder {
query = buildDeleteTable(client, json.table) query = buildDeleteTable(client, json.table)
break break
default: default:
throw "Table operation is of unknown type" throw new Error("Table operation is of unknown type")
} }
return getNativeSql(query) return getNativeSql(query)
} }

View File

@ -168,8 +168,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
return "" return ""
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars getStringConcat(_parts: string[]) {
getStringConcat(parts: string[]) {
return "" return ""
} }

View File

@ -378,7 +378,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
try { try {
await connection.close() await connection.close()
} catch (err) { } catch (err) {
console.error(err) console.error("Error connecting to Oracle", err)
} }
} }
} }

View File

@ -1,14 +1,27 @@
import { SqlClient } from "../utils" import { SqlClient } from "../utils"
import Sql from "../base/sql" import Sql from "../base/sql"
import { import {
FieldType,
Operation, Operation,
QueryJson, QueryJson,
TableSourceType,
Table, Table,
FieldType, TableSourceType,
} from "@budibase/types" } from "@budibase/types"
const TABLE_NAME = "test" const TABLE_NAME = "test"
const TABLE: Table = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
sourceId: "SOURCE_ID",
schema: {
id: {
name: "id",
type: FieldType.NUMBER,
},
},
name: TABLE_NAME,
primary: ["id"],
}
function endpoint(table: any, operation: any) { function endpoint(table: any, operation: any) {
return { return {
@ -25,6 +38,10 @@ function generateReadJson({
sort, sort,
paginate, paginate,
}: any = {}): QueryJson { }: any = {}): QueryJson {
const tableObj = { ...TABLE }
if (table) {
tableObj.name = table
}
return { return {
endpoint: endpoint(table || TABLE_NAME, "READ"), endpoint: endpoint(table || TABLE_NAME, "READ"),
resource: { resource: {
@ -34,14 +51,7 @@ function generateReadJson({
sort: sort || {}, sort: sort || {},
paginate: paginate || {}, paginate: paginate || {},
meta: { meta: {
table: { table: tableObj,
type: "table",
sourceType: TableSourceType.EXTERNAL,
sourceId: "SOURCE_ID",
schema: {},
name: table || TABLE_NAME,
primary: ["id"],
} as any,
}, },
} }
} }
@ -49,6 +59,9 @@ function generateReadJson({
function generateCreateJson(table = TABLE_NAME, body = {}): QueryJson { function generateCreateJson(table = TABLE_NAME, body = {}): QueryJson {
return { return {
endpoint: endpoint(table, "CREATE"), endpoint: endpoint(table, "CREATE"),
meta: {
table: TABLE,
},
body, body,
} }
} }
@ -58,7 +71,15 @@ function generateUpdateJson({
body = {}, body = {},
filters = {}, filters = {},
meta = {}, meta = {},
}: {
table: string
body?: any
filters?: any
meta?: any
}): QueryJson { }): QueryJson {
if (!meta.table) {
meta.table = TABLE
}
return { return {
endpoint: endpoint(table, "UPDATE"), endpoint: endpoint(table, "UPDATE"),
filters, filters,
@ -70,6 +91,9 @@ function generateUpdateJson({
function generateDeleteJson(table = TABLE_NAME, filters = {}): QueryJson { function generateDeleteJson(table = TABLE_NAME, filters = {}): QueryJson {
return { return {
endpoint: endpoint(table, "DELETE"), endpoint: endpoint(table, "DELETE"),
meta: {
table: TABLE,
},
filters, filters,
} }
} }
@ -102,6 +126,9 @@ function generateRelationshipJson(config: { schema?: string } = {}): QueryJson {
}, },
], ],
extra: { idFilter: {} }, extra: { idFilter: {} },
meta: {
table: TABLE,
},
} }
} }
@ -136,6 +163,9 @@ function generateManyRelationshipJson(config: { schema?: string } = {}) {
}, },
], ],
extra: { idFilter: {} }, extra: { idFilter: {} },
meta: {
table: TABLE,
},
} }
} }
@ -319,7 +349,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: [date, limit], bindings: [date, limit],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" > $1 limit $2) as "${TABLE_NAME}"`, sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" >= $1 limit $2) as "${TABLE_NAME}"`,
}) })
}) })
@ -338,7 +368,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: [date, limit], bindings: [date, limit],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" < $1 limit $2) as "${TABLE_NAME}"`, sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" <= $1 limit $2) as "${TABLE_NAME}"`,
}) })
}) })
@ -572,7 +602,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: ["2000-01-01 00:00:00", 500], bindings: ["2000-01-01 00:00:00", 500],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" > $1 limit $2) as "${TABLE_NAME}"`, sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" >= $1 limit $2) as "${TABLE_NAME}"`,
}) })
}) })
@ -591,7 +621,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: ["2010-01-01 00:00:00", 500], bindings: ["2010-01-01 00:00:00", 500],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" < $1 limit $2) as "${TABLE_NAME}"`, sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" <= $1 limit $2) as "${TABLE_NAME}"`,
}) })
}) })

View File

@ -4,12 +4,26 @@ import {
QueryJson, QueryJson,
SourceName, SourceName,
SqlQuery, SqlQuery,
Table,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { join } from "path" import { join } from "path"
import Sql from "../base/sql" import Sql from "../base/sql"
import { SqlClient } from "../utils" import { SqlClient } from "../utils"
import AliasTables from "../../api/controllers/row/alias"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import sdk from "../../sdk"
// this doesn't exist strictly
const TABLE: Table = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
sourceId: "SOURCE_ID",
schema: {},
name: "tableName",
primary: ["id"],
}
const AliasTables = sdk.rows.AliasTables
function multiline(sql: string) { function multiline(sql: string) {
return sql.replace(/\n/g, "").replace(/ +/g, " ") return sql.replace(/\n/g, "").replace(/ +/g, " ")
@ -103,7 +117,8 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
const filters = queryJson.filters const filters = queryJson.filters
const notEqualsValue = Object.values(filters?.notEqual!)[0] const notEqualsValue = Object.values(filters?.notEqual!)[0]
const rangeValue = Object.values(filters?.range!)[0] const rangeValue: { high?: string | number; low?: string | number } =
Object.values(filters?.range!)[0]
const equalValue = Object.values(filters?.equal!)[0] const equalValue = Object.values(filters?.equal!)[0]
expect(query).toEqual({ expect(query).toEqual({
@ -220,6 +235,9 @@ describe("Captures of real examples", () => {
resource: { resource: {
fields, fields,
}, },
meta: {
table: TABLE,
},
} }
} }

View File

@ -0,0 +1,2 @@
export * from "./utils"
export { SqlStatements } from "./sqlStatements"

View File

@ -0,0 +1,80 @@
import { FieldType, Table, FieldSchema } from "@budibase/types"
import { SqlClient } from "./utils"
import { Knex } from "knex"
export class SqlStatements {
client: string
table: Table
allOr: boolean | undefined
constructor(
client: string,
table: Table,
{ allOr }: { allOr?: boolean } = {}
) {
this.client = client
this.table = table
this.allOr = allOr
}
getField(key: string): FieldSchema | undefined {
const fieldName = key.split(".")[1]
return this.table.schema[fieldName]
}
between(
query: Knex.QueryBuilder,
key: string,
low: number | string,
high: number | string
) {
// Use a between operator if we have 2 valid range values
const field = this.getField(key)
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
[low, high]
)
} else {
const fnc = this.allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [low, high])
}
return query
}
lte(query: Knex.QueryBuilder, key: string, low: number | string) {
// Use just a single greater than operator if we only have a low
const field = this.getField(key)
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
low,
])
} else {
const fnc = this.allOr ? "orWhere" : "where"
query = query[fnc](key, ">=", low)
}
return query
}
gte(query: Knex.QueryBuilder, key: string, high: number | string) {
const field = this.getField(key)
// Use just a single less than operator if we only have a high
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
high,
])
} else {
const fnc = this.allOr ? "orWhere" : "where"
query = query[fnc](key, "<=", high)
}
return query
}
}

View File

@ -4,18 +4,40 @@ import {
Datasource, Datasource,
FieldType, FieldType,
TableSourceType, TableSourceType,
FieldSchema,
} from "@budibase/types" } from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils" import { DocumentType, SEPARATOR } from "../../db/utils"
import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../constants" import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
import { helpers } from "@budibase/shared-core" import { helpers, utils } from "@budibase/shared-core"
import env from "../environment" import env from "../../environment"
import { Knex } from "knex" import { Knex } from "knex"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g const ROW_ID_REGEX = /^\[.*]$/g
const ENCODED_SPACE = encodeURIComponent(" ") const ENCODED_SPACE = encodeURIComponent(" ")
const SQL_NUMBER_TYPE_MAP = { type PrimitiveTypes =
| FieldType.STRING
| FieldType.NUMBER
| FieldType.BOOLEAN
| FieldType.DATETIME
| FieldType.JSON
| FieldType.BIGINT
| FieldType.OPTIONS
function isPrimitiveType(type: FieldType): type is PrimitiveTypes {
return [
FieldType.STRING,
FieldType.NUMBER,
FieldType.BOOLEAN,
FieldType.DATETIME,
FieldType.JSON,
FieldType.BIGINT,
FieldType.OPTIONS,
].includes(type)
}
const SQL_NUMBER_TYPE_MAP: Record<string, PrimitiveTypes> = {
integer: FieldType.NUMBER, integer: FieldType.NUMBER,
int: FieldType.NUMBER, int: FieldType.NUMBER,
decimal: FieldType.NUMBER, decimal: FieldType.NUMBER,
@ -35,7 +57,7 @@ const SQL_NUMBER_TYPE_MAP = {
smallmoney: FieldType.NUMBER, smallmoney: FieldType.NUMBER,
} }
const SQL_DATE_TYPE_MAP = { const SQL_DATE_TYPE_MAP: Record<string, PrimitiveTypes> = {
timestamp: FieldType.DATETIME, timestamp: FieldType.DATETIME,
time: FieldType.DATETIME, time: FieldType.DATETIME,
datetime: FieldType.DATETIME, datetime: FieldType.DATETIME,
@ -46,7 +68,7 @@ const SQL_DATE_TYPE_MAP = {
const SQL_DATE_ONLY_TYPES = ["date"] const SQL_DATE_ONLY_TYPES = ["date"]
const SQL_TIME_ONLY_TYPES = ["time"] const SQL_TIME_ONLY_TYPES = ["time"]
const SQL_STRING_TYPE_MAP = { const SQL_STRING_TYPE_MAP: Record<string, PrimitiveTypes> = {
varchar: FieldType.STRING, varchar: FieldType.STRING,
char: FieldType.STRING, char: FieldType.STRING,
nchar: FieldType.STRING, nchar: FieldType.STRING,
@ -58,22 +80,22 @@ const SQL_STRING_TYPE_MAP = {
text: FieldType.STRING, text: FieldType.STRING,
} }
const SQL_BOOLEAN_TYPE_MAP = { const SQL_BOOLEAN_TYPE_MAP: Record<string, PrimitiveTypes> = {
boolean: FieldType.BOOLEAN, boolean: FieldType.BOOLEAN,
bit: FieldType.BOOLEAN, bit: FieldType.BOOLEAN,
tinyint: FieldType.BOOLEAN, tinyint: FieldType.BOOLEAN,
} }
const SQL_OPTIONS_TYPE_MAP = { const SQL_OPTIONS_TYPE_MAP: Record<string, PrimitiveTypes> = {
"user-defined": FieldType.OPTIONS, "user-defined": FieldType.OPTIONS,
} }
const SQL_MISC_TYPE_MAP = { const SQL_MISC_TYPE_MAP: Record<string, PrimitiveTypes> = {
json: FieldType.JSON, json: FieldType.JSON,
bigint: FieldType.BIGINT, bigint: FieldType.BIGINT,
} }
const SQL_TYPE_MAP = { const SQL_TYPE_MAP: Record<string, PrimitiveTypes> = {
...SQL_NUMBER_TYPE_MAP, ...SQL_NUMBER_TYPE_MAP,
...SQL_DATE_TYPE_MAP, ...SQL_DATE_TYPE_MAP,
...SQL_STRING_TYPE_MAP, ...SQL_STRING_TYPE_MAP,
@ -239,14 +261,14 @@ export function generateColumnDefinition(config: {
constraints.inclusion = options constraints.inclusion = options
} }
const schema: any = { const schema: FieldSchema = {
type: foundType, type: foundType,
externalType, externalType,
autocolumn, autocolumn,
name, name,
constraints, constraints,
} }
if (foundType === FieldType.DATETIME) { if (schema.type === FieldType.DATETIME) {
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lowerCaseType) schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lowerCaseType)
schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lowerCaseType) schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lowerCaseType)
} }
@ -274,59 +296,6 @@ export function isIsoDateString(str: string) {
return d.toISOString() === trimmedValue return d.toISOString() === trimmedValue
} }
/**
* This function will determine whether a column is a relationship and whether it
* is currently valid. The reason for the validity check is that tables can be deleted
* outside of Budibase control and if this is the case it will break Budibase relationships.
* The tableIds is a list passed down from the main finalise tables function, which is
* based on the tables that have just been fetched. This will only really be used on subsequent
* fetches to the first one - if the user is periodically refreshing Budibase knowledge of tables.
* @param column The column to check, to see if it is a valid relationship.
* @param tableIds The IDs of the tables which currently exist.
*/
export function shouldCopyRelationship(
column: { type: string; tableId?: string },
tableIds: string[]
) {
return (
column.type === FieldType.LINK &&
column.tableId &&
tableIds.includes(column.tableId)
)
}
/**
* Similar function to the shouldCopyRelationship function, but instead this looks for options and boolean
* types. It is possible to switch a string -> options and a number -> boolean (and vice versus) need to make
* sure that these get copied over when tables are fetched. Also checks whether they are still valid, if a
* column has changed type in the external database then copying it over may not be possible.
* @param column The column to check for options or boolean type.
* @param fetchedColumn The fetched column to check for the type in the external database.
*/
export function shouldCopySpecialColumn(
column: { type: string },
fetchedColumn: { type: string } | undefined
) {
const isFormula = column.type === FieldType.FORMULA
const specialTypes = [
FieldType.OPTIONS,
FieldType.LONGFORM,
FieldType.ARRAY,
FieldType.FORMULA,
FieldType.BB_REFERENCE,
]
// column has been deleted, remove - formulas will never exist, always copy
if (!isFormula && column && !fetchedColumn) {
return false
}
const fetchedIsNumber =
!fetchedColumn || fetchedColumn.type === FieldType.NUMBER
return (
specialTypes.indexOf(column.type as FieldType) !== -1 ||
(fetchedIsNumber && column.type === FieldType.BOOLEAN)
)
}
/** /**
* Looks for columns which need to be copied over into the new table definitions, like relationships, * Looks for columns which need to be copied over into the new table definitions, like relationships,
* options types and views. * options types and views.
@ -348,6 +317,9 @@ function copyExistingPropsOver(
if (entities[tableName]?.created) { if (entities[tableName]?.created) {
table.created = entities[tableName]?.created table.created = entities[tableName]?.created
} }
if (entities[tableName]?.constrained) {
table.constrained = entities[tableName]?.constrained
}
table.views = entities[tableName].views table.views = entities[tableName].views
@ -356,12 +328,73 @@ function copyExistingPropsOver(
if (!Object.prototype.hasOwnProperty.call(existingTableSchema, key)) { if (!Object.prototype.hasOwnProperty.call(existingTableSchema, key)) {
continue continue
} }
const column = existingTableSchema[key] const column = existingTableSchema[key]
if (
shouldCopyRelationship(column, tableIds) || const existingColumnType = column?.type
shouldCopySpecialColumn(column, table.schema[key]) const updatedColumnType = table.schema[key]?.type
) {
table.schema[key] = existingTableSchema[key] const keepIfType = (...validTypes: PrimitiveTypes[]) => {
return (
isPrimitiveType(updatedColumnType) &&
table.schema[key] &&
validTypes.includes(updatedColumnType)
)
}
let shouldKeepSchema = false
switch (existingColumnType) {
case FieldType.FORMULA:
case FieldType.AUTO:
case FieldType.INTERNAL:
shouldKeepSchema = true
break
case FieldType.LINK:
shouldKeepSchema =
existingColumnType === FieldType.LINK &&
tableIds.includes(column.tableId)
break
case FieldType.STRING:
case FieldType.OPTIONS:
case FieldType.LONGFORM:
case FieldType.BARCODEQR:
shouldKeepSchema = keepIfType(FieldType.STRING)
break
case FieldType.NUMBER:
case FieldType.BOOLEAN:
shouldKeepSchema = keepIfType(FieldType.BOOLEAN, FieldType.NUMBER)
break
case FieldType.ARRAY:
case FieldType.ATTACHMENTS:
case FieldType.ATTACHMENT_SINGLE:
case FieldType.JSON:
case FieldType.BB_REFERENCE:
shouldKeepSchema = keepIfType(FieldType.JSON, FieldType.STRING)
break
case FieldType.DATETIME:
shouldKeepSchema = keepIfType(FieldType.DATETIME, FieldType.STRING)
break
case FieldType.BIGINT:
shouldKeepSchema = keepIfType(FieldType.BIGINT, FieldType.NUMBER)
break
default:
utils.unreachable(existingColumnType)
}
if (shouldKeepSchema) {
table.schema[key] = {
...existingTableSchema[key],
externalType:
existingTableSchema[key].externalType ||
table.schema[key]?.externalType,
}
} }
} }
} }

View File

@ -35,7 +35,6 @@ export function init() {
// Because we can't pass functions into an Isolate, we remove them from // Because we can't pass functions into an Isolate, we remove them from
// the passed context and rely on the withHelpers() method to add them // the passed context and rely on the withHelpers() method to add them
// back in. // back in.
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { helpers, snippets, ...rest } = ctx const { helpers, snippets, ...rest } = ctx
return vm.withContext(rest, () => vm.execute(js)) return vm.withContext(rest, () => vm.execute(js))
} catch (error: any) { } catch (error: any) {

View File

@ -348,8 +348,7 @@ const preSaveAction: Partial<Record<SourceName, any>> = {
* Make sure all datasource entities have a display name selected * Make sure all datasource entities have a display name selected
*/ */
export function setDefaultDisplayColumns(datasource: Datasource) { export function setDefaultDisplayColumns(datasource: Datasource) {
// for (const entity of Object.values(datasource.entities || {})) {
for (let entity of Object.values(datasource.entities || {})) {
if (entity.primaryDisplay) { if (entity.primaryDisplay) {
continue continue
} }

View File

@ -3,6 +3,7 @@ import * as rows from "./rows"
import * as search from "./search" import * as search from "./search"
import * as utils from "./utils" import * as utils from "./utils"
import * as external from "./external" import * as external from "./external"
import AliasTables from "./sqlAlias"
export default { export default {
...attachments, ...attachments,
@ -10,4 +11,5 @@ export default {
...search, ...search,
utils, utils,
external, external,
AliasTables,
} }

View File

@ -13,6 +13,8 @@ import * as sqs from "./search/sqs"
import env from "../../../environment" import env from "../../../environment"
import { ExportRowsParams, ExportRowsResult } from "./search/types" import { ExportRowsParams, ExportRowsResult } from "./search/types"
import { dataFilters } from "@budibase/shared-core" import { dataFilters } from "@budibase/shared-core"
import sdk from "../../index"
import { searchInputMapping } from "./search/utils"
export { isValidFilter } from "../../../integrations/utils" export { isValidFilter } from "../../../integrations/utils"
@ -29,6 +31,10 @@ function pickApi(tableId: any) {
return internal return internal
} }
function isEmptyArray(value: any) {
return Array.isArray(value) && value.length === 0
}
// don't do a pure falsy check, as 0 is included // don't do a pure falsy check, as 0 is included
// https://github.com/Budibase/budibase/issues/10118 // https://github.com/Budibase/budibase/issues/10118
export function removeEmptyFilters(filters: SearchFilters) { export function removeEmptyFilters(filters: SearchFilters) {
@ -47,7 +53,7 @@ export function removeEmptyFilters(filters: SearchFilters) {
for (let [key, value] of Object.entries( for (let [key, value] of Object.entries(
filters[filterType] as object filters[filterType] as object
)) { )) {
if (value == null || value === "") { if (value == null || value === "" || isEmptyArray(value)) {
// @ts-ignore // @ts-ignore
delete filters[filterField][key] delete filters[filterField][key]
} }
@ -72,12 +78,15 @@ export async function search(
} }
} }
const table = await sdk.tables.getTable(options.tableId)
options = searchInputMapping(table, options)
if (isExternalTable) { if (isExternalTable) {
return external.search(options) return external.search(options, table)
} else if (env.SQS_SEARCH_ENABLE) { } else if (env.SQS_SEARCH_ENABLE) {
return sqs.search(options) return sqs.search(options, table)
} else { } else {
return internal.search(options) return internal.search(options, table)
} }
} }

View File

@ -8,6 +8,7 @@ import {
SearchFilters, SearchFilters,
RowSearchParams, RowSearchParams,
SearchResponse, SearchResponse,
Table,
} from "@budibase/types" } from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import { handleRequest } from "../../../../api/controllers/row/external" import { handleRequest } from "../../../../api/controllers/row/external"
@ -18,13 +19,13 @@ import {
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "./types" import { ExportRowsParams, ExportRowsResult } from "./types"
import { HTTPError, db } from "@budibase/backend-core" import { HTTPError, db } from "@budibase/backend-core"
import { searchInputMapping } from "./utils"
import pick from "lodash/pick" import pick from "lodash/pick"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import sdk from "../../../" import sdk from "../../../"
export async function search( export async function search(
options: RowSearchParams options: RowSearchParams,
table: Table
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId } = options const { tableId } = options
const { paginate, query, ...params } = options const { paginate, query, ...params } = options
@ -68,8 +69,6 @@ export async function search(
} }
try { try {
const table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options)
let rows = await handleRequest(Operation.READ, tableId, { let rows = await handleRequest(Operation.READ, tableId, {
filters: query, filters: query,
sort, sort,
@ -150,11 +149,15 @@ export async function exportRows(
} }
const datasource = await sdk.datasources.get(datasourceId!) const datasource = await sdk.datasources.get(datasourceId!)
const table = await sdk.tables.getTable(tableId)
if (!datasource || !datasource.entities) { if (!datasource || !datasource.entities) {
throw new HTTPError("Datasource has not been configured for plus API.", 400) throw new HTTPError("Datasource has not been configured for plus API.", 400)
} }
let result = await search({ tableId, query: requestQuery, sort, sortOrder }) let result = await search(
{ tableId, query: requestQuery, sort, sortOrder },
table
)
let rows: Row[] = [] let rows: Row[] = []
let headers let headers

View File

@ -1,6 +1,6 @@
import { context, db, HTTPError } from "@budibase/backend-core" import { context, db, HTTPError } from "@budibase/backend-core"
import env from "../../../../environment" import env from "../../../../environment"
import { fullSearch, paginatedSearch, searchInputMapping } from "./utils" import { fullSearch, paginatedSearch } from "./utils"
import { getRowParams, InternalTables } from "../../../../db/utils" import { getRowParams, InternalTables } from "../../../../db/utils"
import { import {
Database, Database,
@ -33,7 +33,8 @@ import pick from "lodash/pick"
import { breakRowIdField } from "../../../../integrations/utils" import { breakRowIdField } from "../../../../integrations/utils"
export async function search( export async function search(
options: RowSearchParams options: RowSearchParams,
table: Table
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId } = options const { tableId } = options
@ -51,8 +52,6 @@ export async function search(
query: {}, query: {},
} }
let table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options)
if (params.sort && !params.sortType) { if (params.sort && !params.sortType) {
const schema = table.schema const schema = table.schema
const sortField = schema[params.sort] const sortField = schema[params.sort]
@ -122,12 +121,15 @@ export async function exportRows(
result = await outputProcessing<Row[]>(table, response) result = await outputProcessing<Row[]>(table, response)
} else if (query) { } else if (query) {
let searchResponse = await search({ let searchResponse = await search(
tableId, {
query, tableId,
sort, query,
sortOrder, sort,
}) sortOrder,
},
table
)
result = searchResponse.rows result = searchResponse.rows
} }

View File

@ -20,7 +20,12 @@ import {
} from "../../../../api/controllers/row/utils" } from "../../../../api/controllers/row/utils"
import sdk from "../../../index" import sdk from "../../../index"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" import {
CONSTANT_INTERNAL_ROW_COLS,
SQS_DATASOURCE_INTERNAL,
} from "../../../../db/utils"
import AliasTables from "../sqlAlias"
import { outputProcessing } from "../../../../utilities/rowProcessor"
function buildInternalFieldList( function buildInternalFieldList(
table: Table, table: Table,
@ -31,19 +36,19 @@ function buildInternalFieldList(
fieldList = fieldList.concat( fieldList = fieldList.concat(
CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`) CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`)
) )
if (opts.relationships) { for (let col of Object.values(table.schema)) {
for (let col of Object.values(table.schema)) { const isRelationship = col.type === FieldType.LINK
if (col.type === FieldType.LINK) { if (!opts.relationships && isRelationship) {
const linkCol = col as RelationshipFieldMetadata continue
const relatedTable = tables.find( }
table => table._id === linkCol.tableId if (isRelationship) {
)! const linkCol = col as RelationshipFieldMetadata
fieldList = fieldList.concat( const relatedTable = tables.find(table => table._id === linkCol.tableId)!
buildInternalFieldList(relatedTable, tables, { relationships: false }) fieldList = fieldList.concat(
) buildInternalFieldList(relatedTable, tables, { relationships: false })
} else { )
fieldList.push(`${table._id}.${col.name}`) } else {
} fieldList.push(`${table._id}.${col.name}`)
} }
} }
return fieldList return fieldList
@ -94,14 +99,14 @@ function buildTableMap(tables: Table[]) {
} }
export async function search( export async function search(
options: RowSearchParams options: RowSearchParams,
table: Table
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId, paginate, query, ...params } = options const { paginate, query, ...params } = options
const builder = new SqlQueryBuilder(SqlClient.SQL_LITE) const builder = new SqlQueryBuilder(SqlClient.SQL_LITE)
const allTables = await sdk.tables.getAllInternalTables() const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables) const allTablesMap = buildTableMap(allTables)
const table = allTables.find(table => table._id === tableId)
if (!table) { if (!table) {
throw new Error("Unable to find table") throw new Error("Unable to find table")
} }
@ -111,7 +116,7 @@ export async function search(
const request: QueryJson = { const request: QueryJson = {
endpoint: { endpoint: {
// not important, we query ourselves // not important, we query ourselves
datasourceId: "internal", datasourceId: SQS_DATASOURCE_INTERNAL,
entityId: table._id!, entityId: table._id!,
operation: Operation.READ, operation: Operation.READ,
}, },
@ -154,37 +159,47 @@ export async function search(
} }
} }
try { try {
const query = builder._query(request, { const alias = new AliasTables(allTables.map(table => table.name))
disableReturning: true, const rows = await alias.queryWithAliasing(request, async json => {
const query = builder._query(json, {
disableReturning: true,
})
if (Array.isArray(query)) {
throw new Error("SQS cannot currently handle multiple queries")
}
let sql = query.sql,
bindings = query.bindings
// quick hack for docIds
sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`")
sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`")
const db = context.getAppDB()
return await db.sql<Row>(sql, bindings)
}) })
if (Array.isArray(query)) { // process from the format of tableId.column to expected format
throw new Error("SQS cannot currently handle multiple queries") const processed = await sqlOutputProcessing(
} rows,
table!,
let sql = query.sql, allTablesMap,
bindings = query.bindings relationships,
{
// quick hack for docIds sqs: true,
sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`") }
sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`") )
const db = context.getAppDB()
const rows = await db.sql<Row>(sql, bindings)
return { return {
rows: await sqlOutputProcessing( // final row processing for response
rows, rows: await outputProcessing<Row[]>(table, processed, {
table!, preserveLinks: true,
allTablesMap, squash: true,
relationships, }),
{
sqs: true,
}
),
} }
} catch (err: any) { } catch (err: any) {
const msg = typeof err === "string" ? err : err.message const msg = typeof err === "string" ? err : err.message
throw new Error(`Unable to search by SQL - ${msg}`) throw new Error(`Unable to search by SQL - ${msg}`, { cause: err })
} }
} }

View File

@ -112,7 +112,7 @@ describe("external search", () => {
tableId, tableId,
query: {}, query: {},
} }
const result = await search(searchParams) const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(
@ -130,7 +130,7 @@ describe("external search", () => {
query: {}, query: {},
fields: ["name", "age"], fields: ["name", "age"],
} }
const result = await search(searchParams) const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(
@ -157,7 +157,7 @@ describe("external search", () => {
}, },
}, },
} }
const result = await search(searchParams) const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(3) expect(result.rows).toHaveLength(3)
expect(result.rows.map(row => row.id)).toEqual([1, 4, 8]) expect(result.rows.map(row => row.id)).toEqual([1, 4, 8])

View File

@ -81,7 +81,7 @@ describe("internal", () => {
tableId, tableId,
query: {}, query: {},
} }
const result = await search(searchParams) const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(
@ -99,7 +99,7 @@ describe("internal", () => {
query: {}, query: {},
fields: ["name", "age"], fields: ["name", "age"],
} }
const result = await search(searchParams) const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(

View File

@ -160,7 +160,7 @@ describe("internal search", () => {
const response = await search.paginatedSearch( const response = await search.paginatedSearch(
{ {
contains: { contains: {
column: "a", column: ["a"],
colArr: [1, 2, 3], colArr: [1, 2, 3],
}, },
}, },
@ -168,7 +168,7 @@ describe("internal search", () => {
) )
checkLucene( checkLucene(
response, response,
`(*:* AND column:a AND colArr:(1 AND 2 AND 3))`, `(*:* AND column:(a) AND colArr:(1 AND 2 AND 3))`,
PARAMS PARAMS
) )
}) })

View File

@ -6,11 +6,12 @@ import {
Row, Row,
SearchFilters, SearchFilters,
} from "@budibase/types" } from "@budibase/types"
import { getSQLClient } from "../../../sdk/app/rows/utils" import { getSQLClient } from "./utils"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import sdk from "../../../sdk" import datasources from "../datasources"
import { makeExternalQuery } from "../../../integrations/base/query" import { makeExternalQuery } from "../../../integrations/base/query"
import { SqlClient } from "../../../integrations/utils" import { SqlClient } from "../../../integrations/utils"
import { SQS_DATASOURCE_INTERNAL } from "../../../db/utils"
const WRITE_OPERATIONS: Operation[] = [ const WRITE_OPERATIONS: Operation[] = [
Operation.CREATE, Operation.CREATE,
@ -156,12 +157,19 @@ export default class AliasTables {
} }
async queryWithAliasing( async queryWithAliasing(
json: QueryJson json: QueryJson,
queryFn?: (json: QueryJson) => Promise<DatasourcePlusQueryResponse>
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {
const datasourceId = json.endpoint.datasourceId const datasourceId = json.endpoint.datasourceId
const datasource = await sdk.datasources.get(datasourceId) const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL
let aliasingEnabled: boolean, datasource: Datasource | undefined
if (isSqs) {
aliasingEnabled = true
} else {
datasource = await datasources.get(datasourceId)
aliasingEnabled = this.isAliasingEnabled(json, datasource)
}
const aliasingEnabled = this.isAliasingEnabled(json, datasource)
if (aliasingEnabled) { if (aliasingEnabled) {
json = cloneDeep(json) json = cloneDeep(json)
// run through the query json to update anywhere a table may be used // run through the query json to update anywhere a table may be used
@ -207,7 +215,15 @@ export default class AliasTables {
} }
json.tableAliases = invertedTableAliases json.tableAliases = invertedTableAliases
} }
const response = await makeExternalQuery(datasource, json)
let response: DatasourcePlusQueryResponse
if (datasource && !isSqs) {
response = await makeExternalQuery(datasource, json)
} else if (queryFn) {
response = await queryFn(json)
} else {
throw new Error("No supplied method to perform aliased query")
}
if (Array.isArray(response) && aliasingEnabled) { if (Array.isArray(response) && aliasingEnabled) {
return this.reverse(response) return this.reverse(response)
} else { } else {

View File

@ -52,6 +52,12 @@ export async function getDatasourceAndQuery(
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {
const datasourceId = json.endpoint.datasourceId const datasourceId = json.endpoint.datasourceId
const datasource = await sdk.datasources.get(datasourceId) const datasource = await sdk.datasources.get(datasourceId)
const table = datasource.entities?.[json.endpoint.entityId]
if (!json.meta && table) {
json.meta = {
table,
}
}
return makeExternalQuery(datasource, json) return makeExternalQuery(datasource, json)
} }

View File

@ -3,7 +3,6 @@ import {
Operation, Operation,
RelationshipType, RelationshipType,
RenameColumn, RenameColumn,
AddColumn,
Table, Table,
TableRequest, TableRequest,
ViewV2, ViewV2,
@ -33,7 +32,7 @@ import * as viewSdk from "../../views"
export async function save( export async function save(
datasourceId: string, datasourceId: string,
update: Table, update: Table,
opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn } opts?: { tableId?: string; renaming?: RenameColumn }
) { ) {
let tableToSave: TableRequest = { let tableToSave: TableRequest = {
...update, ...update,
@ -52,6 +51,12 @@ export async function save(
!oldTable && !oldTable &&
(tableToSave.primary == null || tableToSave.primary.length === 0) (tableToSave.primary == null || tableToSave.primary.length === 0)
) { ) {
if (tableToSave.schema.id) {
throw new Error(
"External tables with no `primary` column set will define an `id` column, but we found an `id` column in the supplied schema. Either set a `primary` column or remove the `id` column."
)
}
tableToSave.primary = ["id"] tableToSave.primary = ["id"]
tableToSave.schema.id = { tableToSave.schema.id = {
type: FieldType.NUMBER, type: FieldType.NUMBER,
@ -179,14 +184,7 @@ export async function save(
// remove the rename prop // remove the rename prop
delete tableToSave._rename delete tableToSave._rename
// if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column datasource.entities[tableToSave.name] = tableToSave
if (opts?.adding) {
datasource.entities[tableToSave.name] = (
await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name])
).tables[tableToSave.name]
} else {
datasource.entities[tableToSave.name] = tableToSave
}
// store it into couch now for budibase reference // store it into couch now for budibase reference
await db.put(populateExternalTableSchemas(datasource)) await db.put(populateExternalTableSchemas(datasource))

View File

@ -1,8 +1,19 @@
import { context, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core" import { context, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
import { FieldType, SQLiteDefinition, SQLiteType, Table } from "@budibase/types" import {
FieldType,
RelationshipFieldMetadata,
SQLiteDefinition,
SQLiteTable,
SQLiteTables,
SQLiteType,
Table,
} from "@budibase/types"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import tablesSdk from "../" import tablesSdk from "../"
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" import {
CONSTANT_INTERNAL_ROW_COLS,
generateJunctionTableID,
} from "../../../../db/utils"
const BASIC_SQLITE_DOC: SQLiteDefinition = { const BASIC_SQLITE_DOC: SQLiteDefinition = {
_id: SQLITE_DESIGN_DOC_ID, _id: SQLITE_DESIGN_DOC_ID,
@ -31,14 +42,47 @@ const FieldTypeMap: Record<FieldType, SQLiteType> = {
[FieldType.ATTACHMENT_SINGLE]: SQLiteType.BLOB, [FieldType.ATTACHMENT_SINGLE]: SQLiteType.BLOB,
[FieldType.ARRAY]: SQLiteType.BLOB, [FieldType.ARRAY]: SQLiteType.BLOB,
[FieldType.LINK]: SQLiteType.BLOB, [FieldType.LINK]: SQLiteType.BLOB,
[FieldType.BIGINT]: SQLiteType.REAL, [FieldType.BIGINT]: SQLiteType.TEXT,
// TODO: consider the difference between multi-user and single user types (subtyping) // TODO: consider the difference between multi-user and single user types (subtyping)
[FieldType.BB_REFERENCE]: SQLiteType.TEXT, [FieldType.BB_REFERENCE]: SQLiteType.TEXT,
} }
function mapTable(table: Table): { [key: string]: SQLiteType } { function buildRelationshipDefinitions(
table: Table,
relationshipColumn: RelationshipFieldMetadata
): {
tableId: string
definition: SQLiteTable
} {
const tableId = table._id!,
relatedTableId = relationshipColumn.tableId
return {
tableId: generateJunctionTableID(tableId, relatedTableId),
definition: {
["doc1.rowId"]: SQLiteType.TEXT,
["doc1.tableId"]: SQLiteType.TEXT,
["doc1.fieldName"]: SQLiteType.TEXT,
["doc2.rowId"]: SQLiteType.TEXT,
["doc2.tableId"]: SQLiteType.TEXT,
["doc2.fieldName"]: SQLiteType.TEXT,
tableId: SQLiteType.TEXT,
},
}
}
// this can generate relationship tables as part of the mapping
function mapTable(table: Table): SQLiteTables {
const tables: SQLiteTables = {}
const fields: Record<string, SQLiteType> = {} const fields: Record<string, SQLiteType> = {}
for (let [key, column] of Object.entries(table.schema)) { for (let [key, column] of Object.entries(table.schema)) {
// relationships should be handled differently
if (column.type === FieldType.LINK) {
const { tableId, definition } = buildRelationshipDefinitions(
table,
column
)
tables[tableId] = { fields: definition }
}
if (!FieldTypeMap[column.type]) { if (!FieldTypeMap[column.type]) {
throw new Error(`Unable to map type "${column.type}" to SQLite type`) throw new Error(`Unable to map type "${column.type}" to SQLite type`)
} }
@ -49,10 +93,12 @@ function mapTable(table: Table): { [key: string]: SQLiteType } {
CONSTANT_INTERNAL_ROW_COLS.forEach(col => { CONSTANT_INTERNAL_ROW_COLS.forEach(col => {
constantMap[col] = SQLiteType.TEXT constantMap[col] = SQLiteType.TEXT
}) })
return { const thisTable: SQLiteTable = {
...constantMap, ...constantMap,
...fields, ...fields,
} }
tables[table._id!] = { fields: thisTable }
return tables
} }
// nothing exists, need to iterate though existing tables // nothing exists, need to iterate though existing tables
@ -60,8 +106,9 @@ async function buildBaseDefinition(): Promise<SQLiteDefinition> {
const tables = await tablesSdk.getAllInternalTables() const tables = await tablesSdk.getAllInternalTables()
const definition = cloneDeep(BASIC_SQLITE_DOC) const definition = cloneDeep(BASIC_SQLITE_DOC)
for (let table of tables) { for (let table of tables) {
definition.sql.tables[table._id!] = { definition.sql.tables = {
fields: mapTable(table), ...definition.sql.tables,
...mapTable(table),
} }
} }
return definition return definition
@ -75,8 +122,9 @@ export async function addTableToSqlite(table: Table) {
} catch (err) { } catch (err) {
definition = await buildBaseDefinition() definition = await buildBaseDefinition()
} }
definition.sql.tables[table._id!] = { definition.sql.tables = {
fields: mapTable(table), ...definition.sql.tables,
...mapTable(table),
} }
await db.put(definition) await db.put(definition)
} }

View File

@ -125,7 +125,7 @@ describe("validation and update of external table schemas", () => {
} }
it("should correctly set utilised foreign keys to autocolumns", () => { it("should correctly set utilised foreign keys to autocolumns", () => {
const response = populateExternalTableSchemas(cloneDeep(SCHEMA) as any) const response = populateExternalTableSchemas(cloneDeep(SCHEMA))
const foreignKey = getForeignKeyColumn(response) const foreignKey = getForeignKeyColumn(response)
expect(foreignKey.autocolumn).toBe(true) expect(foreignKey.autocolumn).toBe(true)
expect(foreignKey.autoReason).toBe(AutoReason.FOREIGN_KEY) expect(foreignKey.autoReason).toBe(AutoReason.FOREIGN_KEY)
@ -133,7 +133,7 @@ describe("validation and update of external table schemas", () => {
}) })
it("should correctly unset foreign keys when no longer used", () => { it("should correctly unset foreign keys when no longer used", () => {
const setResponse = populateExternalTableSchemas(cloneDeep(SCHEMA) as any) const setResponse = populateExternalTableSchemas(cloneDeep(SCHEMA))
const beforeFk = getForeignKeyColumn(setResponse) const beforeFk = getForeignKeyColumn(setResponse)
delete setResponse.entities!.client.schema.project delete setResponse.entities!.client.schema.project
delete setResponse.entities!.project.schema.client delete setResponse.entities!.project.schema.client

View File

@ -44,7 +44,10 @@ function checkForeignKeysAreAutoColumns(datasource: Datasource) {
if (shouldBeForeign && !column.autocolumn) { if (shouldBeForeign && !column.autocolumn) {
column.autocolumn = true column.autocolumn = true
column.autoReason = AutoReason.FOREIGN_KEY column.autoReason = AutoReason.FOREIGN_KEY
} else if (column.autoReason === AutoReason.FOREIGN_KEY) { } else if (
!shouldBeForeign &&
column.autoReason === AutoReason.FOREIGN_KEY
) {
delete column.autocolumn delete column.autocolumn
delete column.autoReason delete column.autoReason
} }

View File

@ -351,7 +351,6 @@ describe("table sdk", () => {
const view: ViewV2 = { const view: ViewV2 = {
...basicView, ...basicView,
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, description, ...newTableSchema } = basicTable.schema const { name, description, ...newTableSchema } = basicTable.schema
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined) const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
@ -365,7 +364,6 @@ describe("table sdk", () => {
const view: ViewV2 = { const view: ViewV2 = {
...basicView, ...basicView,
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { description, ...newTableSchema } = { const { description, ...newTableSchema } = {
...basicTable.schema, ...basicTable.schema,
updatedDescription: { updatedDescription: {
@ -450,7 +448,6 @@ describe("table sdk", () => {
hiddenField: { visible: false }, hiddenField: { visible: false },
}, },
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, description, ...newTableSchema } = basicTable.schema const { name, description, ...newTableSchema } = basicTable.schema
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined) const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
@ -474,7 +471,6 @@ describe("table sdk", () => {
hiddenField: { visible: false }, hiddenField: { visible: false },
}, },
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, description, ...newTableSchema } = { const { name, description, ...newTableSchema } = {
...basicTable.schema, ...basicTable.schema,
newField1: { newField1: {
@ -506,7 +502,6 @@ describe("table sdk", () => {
hiddenField: { visible: false }, hiddenField: { visible: false },
}, },
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { description, ...newTableSchema } = { const { description, ...newTableSchema } = {
...basicTable.schema, ...basicTable.schema,
updatedDescription: { updatedDescription: {

View File

@ -5,6 +5,7 @@ import {
UpdateDatasourceResponse, UpdateDatasourceResponse,
UpdateDatasourceRequest, UpdateDatasourceRequest,
QueryJson, QueryJson,
BuildSchemaFromSourceResponse,
} from "@budibase/types" } from "@budibase/types"
import { Expectations, TestAPI } from "./base" import { Expectations, TestAPI } from "./base"
@ -60,10 +61,22 @@ export class DatasourceAPI extends TestAPI {
}) })
} }
query = async (query: QueryJson, expectations?: Expectations) => { query = async (
query: Omit<QueryJson, "meta"> & Partial<Pick<QueryJson, "meta">>,
expectations?: Expectations
) => {
return await this._post<any>(`/api/datasources/query`, { return await this._post<any>(`/api/datasources/query`, {
body: query, body: query,
expectations, expectations,
}) })
} }
fetchSchema = async (id: string, expectations?: Expectations) => {
return await this._post<BuildSchemaFromSourceResponse>(
`/api/datasources/${id}/schema`,
{
expectations,
}
)
}
} }

View File

@ -374,11 +374,13 @@ class Orchestrator {
for (let [innerObject, innerValue] of Object.entries( for (let [innerObject, innerValue] of Object.entries(
originalStepInput[key][innerKey] originalStepInput[key][innerKey]
)) { )) {
originalStepInput[key][innerKey][innerObject] = if (typeof innerValue === "string") {
automationUtils.substituteLoopStep( originalStepInput[key][innerKey][innerObject] =
innerValue as string, automationUtils.substituteLoopStep(
`steps.${loopStepNumber}` innerValue,
) `steps.${loopStepNumber}`
)
}
} }
} }
} }
@ -458,7 +460,6 @@ class Orchestrator {
inputs, inputs,
step.schema.inputs step.schema.inputs
) )
try { try {
// appId is always passed // appId is always passed
const outputs = await stepFn({ const outputs = await stepFn({

View File

@ -43,7 +43,7 @@ export const checkDevelopmentEnvironment = () => {
error = "Must run via yarn once to generate environment." error = "Must run via yarn once to generate environment."
} }
if (error) { if (error) {
console.error(error) console.error("Error during development environment check", error)
process.exit(-1) process.exit(-1)
} }
} }

View File

@ -262,13 +262,11 @@ export class BaseSocket {
} }
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars async onConnect(_socket: Socket) {
async onConnect(socket: Socket) {
// Override // Override
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars async onDisconnect(_socket: Socket) {
async onDisconnect(socket: Socket) {
// Override // Override
} }

Some files were not shown because too many files have changed in this diff Show More