diff --git a/.eslintrc.json b/.eslintrc.json index 525072dc6c..624c2b8f26 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -42,7 +42,15 @@ }, "rules": { "no-unused-vars": "off", - "@typescript-eslint/no-unused-vars": "error", + "@typescript-eslint/no-unused-vars": [ + "error", + { + "varsIgnorePattern": "^_", + "argsIgnorePattern": "^_", + "destructuredArrayIgnorePattern": "^_", + "ignoreRestSiblings": true + } + ], "local-rules/no-budibase-imports": "error" } }, @@ -59,7 +67,15 @@ }, "rules": { "no-unused-vars": "off", - "@typescript-eslint/no-unused-vars": "error", + "@typescript-eslint/no-unused-vars": [ + "error", + { + "varsIgnorePattern": "^_", + "argsIgnorePattern": "^_", + "destructuredArrayIgnorePattern": "^_", + "ignoreRestSiblings": true + } + ], "local-rules/no-test-com": "error", "local-rules/email-domain-example-com": "error", "no-console": "warn", @@ -89,7 +105,8 @@ { "varsIgnorePattern": "^_", "argsIgnorePattern": "^_", - "destructuredArrayIgnorePattern": "^_" + "destructuredArrayIgnorePattern": "^_", + "ignoreRestSiblings": true } ], "import/no-relative-packages": "error", diff --git a/.vscode/settings.json b/.vscode/settings.json index e22d5a8866..0723219a8b 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -24,5 +24,8 @@ }, "[svelte]": { "editor.defaultFormatter": "svelte.svelte-vscode" + }, + "[handlebars]": { + "editor.formatOnSave": false } } diff --git a/hosting/.env b/hosting/.env index 8a0756c0e3..173d409d04 100644 --- a/hosting/.env +++ b/hosting/.env @@ -17,6 +17,7 @@ APP_PORT=4002 WORKER_PORT=4003 MINIO_PORT=4004 COUCH_DB_PORT=4005 +COUCH_DB_SQS_PORT=4006 REDIS_PORT=6379 WATCHTOWER_PORT=6161 BUDIBASE_ENVIRONMENT=PRODUCTION @@ -28,4 +29,4 @@ BB_ADMIN_USER_PASSWORD= # A path that is watched for plugin bundles. Any bundles found are imported automatically/ PLUGINS_DIR= -ROLLING_LOG_MAX_SIZE= \ No newline at end of file +ROLLING_LOG_MAX_SIZE= diff --git a/lerna.json b/lerna.json index 9839b8b166..99a3e46ab2 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.23.5", + "version": "2.23.8", "npmClient": "yarn", "packages": [ "packages/*", diff --git a/nx.json b/nx.json index 618395ec90..8ba8798946 100644 --- a/nx.json +++ b/nx.json @@ -9,10 +9,7 @@ }, "targetDefaults": { "build": { - "inputs": [ - "{workspaceRoot}/scripts/build.js", - "{workspaceRoot}/lerna.json" - ] + "inputs": ["{workspaceRoot}/scripts/*", "{workspaceRoot}/lerna.json"] } } } diff --git a/package.json b/package.json index e520b7c2cf..e60a086e17 100644 --- a/package.json +++ b/package.json @@ -59,7 +59,7 @@ "dev:camunda": "./scripts/deploy-camunda.sh", "dev:all": "yarn run kill-all && lerna run --stream dev", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", - "dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0", + "dev:docker": "./scripts/devDocker.sh", "test": "REUSE_CONTAINERS=1 lerna run --concurrency 1 --stream test --stream", "lint:eslint": "eslint packages --max-warnings=0", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"", diff --git a/packages/account-portal b/packages/account-portal index bd0e01d639..eb7d5da233 160000 --- a/packages/account-portal +++ b/packages/account-portal @@ -1 +1 @@ -Subproject commit bd0e01d639ec3b2547e7c859a1c43b622dce8344 +Subproject commit eb7d5da233885c5cffd9c255d3e954d0cd39185e diff --git a/packages/backend-core/src/db/lucene.ts b/packages/backend-core/src/db/lucene.ts index 4a2cfd34e2..d9dddd0097 100644 --- a/packages/backend-core/src/db/lucene.ts +++ b/packages/backend-core/src/db/lucene.ts @@ -8,19 +8,9 @@ import { SearchParams, WithRequired, } from "@budibase/types" +import { dataFilters } from "@budibase/shared-core" -const QUERY_START_REGEX = /\d[0-9]*:/g - -export function removeKeyNumbering(key: any): string { - if (typeof key === "string" && key.match(QUERY_START_REGEX) != null) { - const parts = key.split(":") - // remove the number - parts.shift() - return parts.join(":") - } else { - return key - } -} +export const removeKeyNumbering = dataFilters.removeKeyNumbering /** * Class to build lucene query URLs. diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 2da2a77d67..8dbc904643 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -107,7 +107,7 @@ const environment = { ENCRYPTION_KEY: process.env.ENCRYPTION_KEY, API_ENCRYPTION_KEY: getAPIEncryptionKey(), COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005", - COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4984", + COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4006", COUCH_DB_USERNAME: process.env.COUCH_DB_USER, COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD, GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID, diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 87e43b324d..333accc985 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -115,7 +115,6 @@ class InMemoryQueue implements Partial { * a JSON message as this is required by Bull. * @param repeat serves no purpose for the import queue. */ - // eslint-disable-next-line no-unused-vars async add(data: any, opts?: JobOptions) { const jobId = opts?.jobId?.toString() if (jobId && this._queuedJobIds.has(jobId)) { @@ -166,8 +165,7 @@ class InMemoryQueue implements Partial { return [] } - // eslint-disable-next-line @typescript-eslint/no-unused-vars - async removeJobs(pattern: string) { + async removeJobs(_pattern: string) { // no-op } diff --git a/packages/backend-core/src/users/db.ts b/packages/backend-core/src/users/db.ts index 6165a68e57..f77c6385ba 100644 --- a/packages/backend-core/src/users/db.ts +++ b/packages/backend-core/src/users/db.ts @@ -50,6 +50,8 @@ type CreateAdminUserOpts = { hashPassword?: boolean requirePassword?: boolean skipPasswordValidation?: boolean + firstName?: string + lastName?: string } type FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn } @@ -517,6 +519,8 @@ export class UserDB { global: true, }, tenantId, + firstName: opts?.firstName, + lastName: opts?.lastName, } if (opts?.ssoId) { user.ssoId = opts.ssoId diff --git a/packages/backend-core/src/users/users.ts b/packages/backend-core/src/users/users.ts index 48920a3771..7d62a6ef39 100644 --- a/packages/backend-core/src/users/users.ts +++ b/packages/backend-core/src/users/users.ts @@ -17,8 +17,8 @@ import { ContextUser, CouchFindOptions, DatabaseQueryOpts, - SearchQuery, - SearchQueryOperators, + SearchFilters, + SearchFilterOperator, SearchUsersRequest, User, } from "@budibase/types" @@ -44,11 +44,11 @@ function removeUserPassword(users: User | User[]) { return users } -export function isSupportedUserSearch(query: SearchQuery) { +export function isSupportedUserSearch(query: SearchFilters) { const allowed = [ - { op: SearchQueryOperators.STRING, key: "email" }, - { op: SearchQueryOperators.EQUAL, key: "_id" }, - { op: SearchQueryOperators.ONE_OF, key: "_id" }, + { op: SearchFilterOperator.STRING, key: "email" }, + { op: SearchFilterOperator.EQUAL, key: "_id" }, + { op: SearchFilterOperator.ONE_OF, key: "_id" }, ] for (let [key, operation] of Object.entries(query)) { if (typeof operation !== "object") { diff --git a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte index 0632993cf0..6434c7710d 100644 --- a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte +++ b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte @@ -14,6 +14,7 @@ notifications, Checkbox, DatePicker, + DrawerContent, } from "@budibase/bbui" import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte" import { automationStore, selectedAutomation, tables } from "stores/builder" @@ -37,7 +38,7 @@ hbAutocomplete, EditorModes, } from "components/common/CodeEditor" - import FilterDrawer from "components/design/settings/controls/FilterEditor/FilterDrawer.svelte" + import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte" import { LuceneUtils, Utils } from "@budibase/frontend-core" import { getSchemaForDatasourcePlus, @@ -442,15 +443,16 @@ - (tempFilters = e.detail)} - /> + + (tempFilters = e.detail)} + /> + {:else if value.customType === "password"} import { createEventDispatcher } from "svelte" import { ActionButton, Drawer, Button } from "@budibase/bbui" - import FilterDrawer from "components/design/settings/controls/FilterEditor/FilterDrawer.svelte" + import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte" import { getUserBindings } from "dataBinding" import { makePropSafe } from "@budibase/string-templates" @@ -64,7 +64,7 @@ > Save - t.toLowerCase()) + return Object.entries(FIELDS) + .filter(([fieldType]) => + possibleTypes.includes(fieldType.toLowerCase()) + ) + .map(([_, fieldDefinition]) => fieldDefinition) } const isUsers = @@ -632,7 +626,7 @@ /> - {:else if editableColumn.type === FieldType.LINK} + {:else if editableColumn.type === FieldType.LINK && !editableColumn.autocolumn} diff --git a/packages/builder/src/components/common/users/PasswordRepeatInput.svelte b/packages/builder/src/components/common/users/PasswordRepeatInput.svelte index 496bee14ec..4a453ef049 100644 --- a/packages/builder/src/components/common/users/PasswordRepeatInput.svelte +++ b/packages/builder/src/components/common/users/PasswordRepeatInput.svelte @@ -9,7 +9,6 @@ "", requiredValidator ) - // eslint-disable-next-line no-unused-vars const [repeatPassword, _, repeatTouched] = createValidationStore( "", requiredValidator diff --git a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterBuilder.svelte b/packages/builder/src/components/design/settings/controls/FilterEditor/FilterBuilder.svelte new file mode 100644 index 0000000000..0ab67cbada --- /dev/null +++ b/packages/builder/src/components/design/settings/controls/FilterEditor/FilterBuilder.svelte @@ -0,0 +1,84 @@ + + + +
+ + { + const indexToUpdate = rawFilters.findIndex(f => f.id === filter.id) + rawFilters[indexToUpdate] = { + ...rawFilters[indexToUpdate], + value: event.detail, + } + }} + /> + diff --git a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte b/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte index 0f1f08d823..e481bb4381 100644 --- a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte +++ b/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte @@ -1,8 +1,14 @@ -
- - - {#if !filters?.length} - Add your first filter expression. - {:else} - Results are filtered to only those which match all of the following - constraints. - {/if} - - {#if filters?.length} -
- {#each filters as filter} - onOperatorChange(filter, e.detail)} - placeholder={null} - /> - {#if ["string", "longform", "number", "bigint", "formula"].includes(filter.type)} - - {:else if ["options", "array"].includes(filter.type)} - - {:else if filter.type === "boolean"} - - {:else if filter.type === "datetime"} - - {:else} - - {/if} -
- duplicateFilter(filter.id)} - /> - removeFilter(filter.id)} - /> -
- {/each} -
- {/if} -
- -
-
-
- - + +
+ Results are filtered to only those which match all of the following + constraints. +
+
diff --git a/packages/client/src/components/app/forms/AttachmentField.svelte b/packages/client/src/components/app/forms/AttachmentField.svelte index 644630810d..3489fd809c 100644 --- a/packages/client/src/components/app/forms/AttachmentField.svelte +++ b/packages/client/src/components/app/forms/AttachmentField.svelte @@ -58,17 +58,6 @@ } } - const deleteAttachments = async fileList => { - try { - return await API.deleteAttachments({ - keys: fileList, - tableId: formContext?.dataSource?.tableId, - }) - } catch (error) { - return [] - } - } - const handleChange = e => { const value = fieldApiMapper.set(e.detail) const changed = fieldApi.setValue(value) @@ -98,7 +87,6 @@ error={fieldState.error} on:change={handleChange} {processFiles} - {deleteAttachments} {handleFileTooLarge} {handleTooManyFiles} {maximum} diff --git a/packages/frontend-core/package.json b/packages/frontend-core/package.json index 4ca88de8f2..3f97573d4a 100644 --- a/packages/frontend-core/package.json +++ b/packages/frontend-core/package.json @@ -11,6 +11,7 @@ "@budibase/types": "0.0.0", "dayjs": "^1.10.8", "lodash": "4.17.21", + "shortid": "2.2.15", "socket.io-client": "^4.6.1" } } diff --git a/packages/frontend-core/src/api/attachments.js b/packages/frontend-core/src/api/attachments.js index cff466c86f..72f280d99d 100644 --- a/packages/frontend-core/src/api/attachments.js +++ b/packages/frontend-core/src/api/attachments.js @@ -61,34 +61,6 @@ export const buildAttachmentEndpoints = API => { }) return { publicUrl } }, - - /** - * Deletes attachments from the bucket. - * @param keys the attachments to delete - * @param tableId the associated table ID - */ - deleteAttachments: async ({ keys, tableId }) => { - return await API.post({ - url: `/api/attachments/${tableId}/delete`, - body: { - keys, - }, - }) - }, - - /** - * Deletes attachments from the builder bucket. - * @param keys the attachments to delete - */ - deleteBuilderAttachments: async keys => { - return await API.post({ - url: `/api/attachments/delete`, - body: { - keys, - }, - }) - }, - /** * Download an attachment from a row given its column name. * @param datasourceId the ID of the datasource to download from diff --git a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterDrawer.svelte b/packages/frontend-core/src/components/FilterBuilder.svelte similarity index 50% rename from packages/builder/src/components/design/settings/controls/FilterEditor/FilterDrawer.svelte rename to packages/frontend-core/src/components/FilterBuilder.svelte index 74c081cd5b..104821aa0f 100644 --- a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterDrawer.svelte +++ b/packages/frontend-core/src/components/FilterBuilder.svelte @@ -4,33 +4,36 @@ Button, Combobox, DatePicker, - DrawerContent, Icon, Input, - Label, Layout, - Multiselect, Select, + Label, + Multiselect, } from "@budibase/bbui" - import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte" - import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte" + import { FieldType, SearchFilterOperator } from "@budibase/types" import { generate } from "shortid" - import { Constants, LuceneUtils } from "@budibase/frontend-core" - import { getFields } from "helpers/searchFields" - import { FieldType } from "@budibase/types" - import { createEventDispatcher, onMount } from "svelte" + import { LuceneUtils, Constants } from "@budibase/frontend-core" + import { getContext } from "svelte" import FilterUsers from "./FilterUsers.svelte" + const { OperatorOptions } = Constants + export let schemaFields export let filters = [] - export let bindings = [] - export let panel = ClientBindingPanel - export let allowBindings = true export let datasource + export let behaviourFilters = false + export let allowBindings = false + export let filtersLabel = "Filters" + + $: matchAny = filters?.find(filter => filter.operator === "allOr") != null + $: onEmptyFilter = + filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all" + + $: fieldFilters = filters.filter( + filter => filter.operator !== "allOr" && !filter.onEmptyFilter + ) - const dispatch = createEventDispatcher() - const { OperatorOptions } = Constants - const KeyedFieldRegex = /\d[0-9]*:/g const behaviourOptions = [ { value: "and", label: "Match all filters" }, { value: "or", label: "Match any filter" }, @@ -40,62 +43,18 @@ { value: "none", label: "Return no rows" }, ] - let rawFilters - let matchAny = false - let onEmptyFilter = "all" + const context = getContext("context") - $: parseFilters(filters) - $: dispatch("change", enrichFilters(rawFilters, matchAny, onEmptyFilter)) - $: enrichedSchemaFields = getFields(schemaFields || [], { allowLinks: true }) - $: fieldOptions = enrichedSchemaFields.map(field => field.name) || [] - $: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"] - - // Remove field key prefixes and determine which behaviours to use - const parseFilters = filters => { - matchAny = filters?.find(filter => filter.operator === "allOr") != null - onEmptyFilter = - filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all" - rawFilters = (filters || []) - .filter(filter => filter.operator !== "allOr" && !filter.onEmptyFilter) - .map(filter => { - const { field } = filter - let newFilter = { ...filter } - delete newFilter.allOr - if (typeof field === "string" && field.match(KeyedFieldRegex) != null) { - const parts = field.split(":") - parts.shift() - newFilter.field = parts.join(":") - } - return newFilter - }) - } - - onMount(() => { - parseFilters(filters) - rawFilters.forEach(filter => { - filter.type = - schemaFields.find(field => field.name === filter.field)?.type || - filter.type - }) - }) - - // Add field key prefixes and a special metadata filter object to indicate - // how to handle filter behaviour - const enrichFilters = (rawFilters, matchAny, onEmptyFilter) => { - let count = 1 - return rawFilters - .filter(filter => filter.field) - .map(filter => ({ - ...filter, - field: `${count++}:${filter.field}`, - })) - .concat(matchAny ? [{ operator: "allOr" }] : []) - .concat([{ onEmptyFilter }]) - } + $: fieldOptions = (schemaFields ?? []) + .filter(field => getValidOperatorsForType(field).length) + .map(field => ({ + label: field.displayName || field.name, + value: field.name, + })) const addFilter = () => { - rawFilters = [ - ...rawFilters, + filters = [ + ...(filters || []), { id: generate(), field: null, @@ -107,22 +66,57 @@ } const removeFilter = id => { - rawFilters = rawFilters.filter(field => field.id !== id) + filters = filters.filter(field => field.id !== id) } const duplicateFilter = id => { - const existingFilter = rawFilters.find(filter => filter.id === id) + const existingFilter = filters.find(filter => filter.id === id) const duplicate = { ...existingFilter, id: generate() } - rawFilters = [...rawFilters, duplicate] + filters = [...filters, duplicate] + } + + const onFieldChange = filter => { + const previousType = filter.type + sanitizeTypes(filter) + sanitizeOperator(filter) + sanitizeValue(filter, previousType) + } + + const onOperatorChange = filter => { + sanitizeOperator(filter) + sanitizeValue(filter, filter.type) + } + + const onValueTypeChange = filter => { + sanitizeValue(filter) + } + + const getFieldOptions = field => { + const schema = schemaFields.find(x => x.name === field) + return schema?.constraints?.inclusion || [] } const getSchema = filter => { - return enrichedSchemaFields.find(field => field.name === filter.field) + return schemaFields.find(field => field.name === filter.field) } + const getValidOperatorsForType = filter => { + if (!filter?.field && !filter?.name) { + return [] + } + + return LuceneUtils.getValidOperatorsForType( + filter, + filter.field || filter.name, + datasource + ) + } + + $: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"] + const sanitizeTypes = filter => { // Update type based on field - const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field) + const fieldSchema = schemaFields.find(x => x.name === filter.field) filter.type = fieldSchema?.type filter.subtype = fieldSchema?.subtype @@ -154,88 +148,79 @@ // Ensure array values are properly set and cleared if (Array.isArray(filter.value)) { - if (filter.valueType !== "Value" || filter.type !== "array") { + if (filter.valueType !== "Value" || filter.type !== FieldType.ARRAY) { filter.value = null } - } else if (filter.type === "array" && filter.valueType === "Value") { + } else if ( + filter.type === FieldType.ARRAY && + filter.valueType === "Value" + ) { filter.value = [] } else if ( previousType !== filter.type && (previousType === FieldType.BB_REFERENCE || filter.type === FieldType.BB_REFERENCE) ) { - filter.value = filter.type === "array" ? [] : null + filter.value = filter.type === FieldType.ARRAY ? [] : null } } - const onFieldChange = filter => { - const previousType = filter.type - sanitizeTypes(filter) - sanitizeOperator(filter) - sanitizeValue(filter, previousType) - } - - const onOperatorChange = filter => { - sanitizeOperator(filter) - sanitizeValue(filter, filter.type) - } - - const onValueTypeChange = filter => { - sanitizeValue(filter) - } - - const getFieldOptions = field => { - const schema = enrichedSchemaFields.find(x => x.name === field) - return schema?.constraints?.inclusion || [] - } - - const getValidOperatorsForType = filter => { - if (!filter?.field) { - return [] + function handleAllOr(option) { + filters = filters.filter(f => f.operator !== "allOr") + if (option === "or") { + filters.push({ operator: "allOr" }) } + } - return LuceneUtils.getValidOperatorsForType( - { type: filter.type, subtype: filter.subtype }, - filter.field, - datasource - ) + function handleOnEmptyFilter(value) { + filters = filters?.filter(filter => !filter.onEmptyFilter) + filters.push({ onEmptyFilter: value }) } - -
- - {#if !rawFilters?.length} - Add your first filter expression. - {:else} -
- opt.label} - getOptionValue={opt => opt.value} - on:change={e => (onEmptyFilter = e.detail)} - placeholder={null} - /> +
+ + {#if fieldOptions?.length} + + {#if !fieldFilters?.length} + Add your first filter expression. + {:else} + + {#if behaviourFilters} +
+ opt.label} + getOptionValue={opt => opt.value} + on:change={e => handleOnEmptyFilter(e.detail)} + placeholder={null} + /> + {/if} +
{/if} -
+ {/if} + + {#if fieldFilters?.length}
-
- -
-
- {#each rawFilters as filter} + {#if filtersLabel} +
+ +
+ {/if} +
+ {#each fieldFilters as filter} onValueTypeChange(filter)} - placeholder={null} - /> - {#if filter.field && filter.valueType === "Binding"} - (filter.value = event.detail)} + {#if allowBindings} + - {:else if filter.type === "array" || (filter.type === "options" && filter.operator === "oneOf")} + {:else if filter.type === FieldType.ARRAY || (filter.type === FieldType.OPTIONS && filter.operator === SearchFilterOperator.ONE_OF)} - {:else if filter.type === "options"} + {:else if filter.type === FieldType.OPTIONS} - {:else if filter.type === "boolean"} + {:else if filter.type === FieldType.BOOLEAN} - {:else if filter.type === "datetime"} + {:else if filter.type === FieldType.DATETIME} {:else} - + {/if} - duplicateFilter(filter.id)} - /> - removeFilter(filter.id)} - /> +
+ duplicateFilter(filter.id)} + /> + removeFilter(filter.id)} + /> +
{/each}
{/if} -
+
- -
- + {:else} + None of the table column can be used for filtering. + {/if} + +
diff --git a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterUsers.svelte b/packages/frontend-core/src/components/FilterUsers.svelte similarity index 100% rename from packages/builder/src/components/design/settings/controls/FilterEditor/FilterUsers.svelte rename to packages/frontend-core/src/components/FilterUsers.svelte diff --git a/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte b/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte index 3a1f165b6e..e7dc51e5d5 100644 --- a/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte +++ b/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte @@ -61,14 +61,6 @@ } } - const deleteAttachments = async fileList => { - try { - return await API.deleteBuilderAttachments(fileList) - } catch (error) { - return [] - } - } - onMount(() => { api = { focus: () => open(), @@ -101,7 +93,6 @@ on:change={e => onChange(e.detail)} maximum={maximum || schema.constraints?.length?.maximum} {processFiles} - {deleteAttachments} {handleFileTooLarge} />
diff --git a/packages/frontend-core/src/components/index.js b/packages/frontend-core/src/components/index.js index f71420b12b..0d4ff8ea35 100644 --- a/packages/frontend-core/src/components/index.js +++ b/packages/frontend-core/src/components/index.js @@ -6,3 +6,4 @@ export { default as UserAvatars } from "./UserAvatars.svelte" export { default as Updating } from "./Updating.svelte" export { Grid } from "./grid" export { default as ClientAppSkeleton } from "./ClientAppSkeleton.svelte" +export { default as FilterBuilder } from "./FilterBuilder.svelte" diff --git a/packages/frontend-core/src/fetch/DataFetch.js b/packages/frontend-core/src/fetch/DataFetch.js index 92115efef0..3a45543c60 100644 --- a/packages/frontend-core/src/fetch/DataFetch.js +++ b/packages/frontend-core/src/fetch/DataFetch.js @@ -348,8 +348,7 @@ export default class DataFetch { * Determine the feature flag for this datasource definition * @param definition */ - // eslint-disable-next-line no-unused-vars - determineFeatureFlags(definition) { + determineFeatureFlags(_definition) { return { supportsSearch: false, supportsSort: false, diff --git a/packages/pro b/packages/pro index ef186d0024..06b1064f7e 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit ef186d00241f96037f9fd34d7a3826041977ab3a +Subproject commit 06b1064f7e2f7cac5d4bef2ee999796a2a1f0f2c diff --git a/packages/server/package.json b/packages/server/package.json index ad03033e67..76402785d7 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -125,7 +125,7 @@ "@babel/preset-env": "7.16.11", "@swc/core": "1.3.71", "@swc/jest": "0.2.27", - "@types/archiver": "^6.0.2", + "@types/archiver": "6.0.2", "@types/global-agent": "2.1.1", "@types/google-spreadsheet": "3.1.5", "@types/jest": "29.5.5", diff --git a/packages/server/scripts/integrations/mssql/docker-compose.yaml b/packages/server/scripts/integrations/mssql/docker-compose.yaml index 89222eddaa..0f10eee456 100644 --- a/packages/server/scripts/integrations/mssql/docker-compose.yaml +++ b/packages/server/scripts/integrations/mssql/docker-compose.yaml @@ -4,6 +4,7 @@ services: # user: sa # database: master mssql: + # platform: linux/amd64 image: bb/mssql build: context: . diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 7fc0333de1..be6ac885df 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -36,7 +36,6 @@ import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" import { processObjectSync } from "@budibase/string-templates" import { cloneDeep } from "lodash/fp" import { db as dbCore } from "@budibase/backend-core" -import AliasTables from "./alias" import sdk from "../../../sdk" import env from "../../../environment" @@ -120,6 +119,9 @@ async function removeManyToManyRelationships( endpoint: getEndpoint(tableId, Operation.DELETE), body: { [colName]: null }, filters, + meta: { + table, + }, }) } else { return [] @@ -134,6 +136,9 @@ async function removeOneToManyRelationships(rowId: string, table: Table) { return getDatasourceAndQuery({ endpoint: getEndpoint(tableId, Operation.UPDATE), filters, + meta: { + table, + }, }) } else { return [] @@ -249,6 +254,9 @@ export class ExternalRequest { const response = await getDatasourceAndQuery({ endpoint: getEndpoint(table._id!, Operation.READ), filters: buildFilters(rowId, {}, table), + meta: { + table, + }, }) if (Array.isArray(response) && response.length > 0) { return response[0] @@ -366,36 +374,45 @@ export class ExternalRequest { ) { continue } - let tableId: string | undefined, + let relatedTableId: string | undefined, lookupField: string | undefined, fieldName: string | undefined if (isManyToMany(field)) { - tableId = field.through + relatedTableId = field.through lookupField = primaryKey fieldName = field.throughTo || primaryKey } else if (isManyToOne(field)) { - tableId = field.tableId + relatedTableId = field.tableId lookupField = field.foreignKey fieldName = field.fieldName } - if (!tableId || !lookupField || !fieldName) { + if (!relatedTableId || !lookupField || !fieldName) { throw new Error( "Unable to lookup relationships - undefined column properties." ) } - const { tableName: relatedTableName } = breakExternalTableId(tableId) + const { tableName: relatedTableName } = + breakExternalTableId(relatedTableId) // @ts-ignore const linkPrimaryKey = this.tables[relatedTableName].primary[0] if (!lookupField || !row[lookupField]) { continue } + const endpoint = getEndpoint(relatedTableId, Operation.READ) + const relatedTable = this.tables[endpoint.entityId] + if (!relatedTable) { + throw new Error("unable to find related table") + } const response = await getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, Operation.READ), + endpoint: endpoint, filters: { equal: { [fieldName]: row[lookupField], }, }, + meta: { + table: relatedTable, + }, }) // this is the response from knex if no rows found const rows: Row[] = @@ -403,7 +420,11 @@ export class ExternalRequest { const storeTo = isManyToMany(field) ? field.throughFrom || linkPrimaryKey : fieldName - related[storeTo] = { rows, isMany: isManyToMany(field), tableId } + related[storeTo] = { + rows, + isMany: isManyToMany(field), + tableId: relatedTableId, + } } return related } @@ -471,6 +492,9 @@ export class ExternalRequest { // if we're doing many relationships then we're writing, only one response body, filters: buildFilters(id, {}, linkTable), + meta: { + table: linkTable, + }, }) ) } else { @@ -618,7 +642,7 @@ export class ExternalRequest { if (env.SQL_ALIASING_DISABLE) { response = await getDatasourceAndQuery(json) } else { - const aliasing = new AliasTables(Object.keys(this.tables)) + const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables)) response = await aliasing.queryWithAliasing(json) } diff --git a/packages/server/src/api/controllers/row/utils/basic.ts b/packages/server/src/api/controllers/row/utils/basic.ts index 1fc84de9c7..6255e13c1c 100644 --- a/packages/server/src/api/controllers/row/utils/basic.ts +++ b/packages/server/src/api/controllers/row/utils/basic.ts @@ -62,12 +62,12 @@ export function basicProcessing({ row, table, isLinked, - internal, + sqs, }: { row: Row table: Table isLinked: boolean - internal?: boolean + sqs?: boolean }): Row { const thisRow: Row = {} // filter the row down to what is actually the row (not joined) @@ -84,12 +84,13 @@ export function basicProcessing({ thisRow[fieldName] = value } } - if (!internal) { + if (!sqs) { thisRow._id = generateIdForRow(row, table, isLinked) thisRow.tableId = table._id thisRow._rev = "rev" } else { - for (let internalColumn of CONSTANT_INTERNAL_ROW_COLS) { + const columns = Object.keys(table.schema) + for (let internalColumn of [...CONSTANT_INTERNAL_ROW_COLS, ...columns]) { thisRow[internalColumn] = extractFieldValue({ row, tableName: table._id!, diff --git a/packages/server/src/api/controllers/row/utils/sqlUtils.ts b/packages/server/src/api/controllers/row/utils/sqlUtils.ts index 6f9837e0ab..372b8394ff 100644 --- a/packages/server/src/api/controllers/row/utils/sqlUtils.ts +++ b/packages/server/src/api/controllers/row/utils/sqlUtils.ts @@ -51,11 +51,11 @@ export async function updateRelationshipColumns( continue } - let linked = await basicProcessing({ + let linked = basicProcessing({ row, table: linkedTable, isLinked: true, - internal: opts?.sqs, + sqs: opts?.sqs, }) if (!linked._id) { continue diff --git a/packages/server/src/api/controllers/row/utils/utils.ts b/packages/server/src/api/controllers/row/utils/utils.ts index 503f139783..8aa30a86ee 100644 --- a/packages/server/src/api/controllers/row/utils/utils.ts +++ b/packages/server/src/api/controllers/row/utils/utils.ts @@ -134,6 +134,7 @@ export async function sqlOutputProcessing( let rowId = row._id if (opts?.sqs) { rowId = getInternalRowId(row, table) + row._id = rowId } else if (!rowId) { rowId = generateIdForRow(row, table) row._id = rowId @@ -155,7 +156,7 @@ export async function sqlOutputProcessing( row, table, isLinked: false, - internal: opts?.sqs, + sqs: opts?.sqs, }), table ) @@ -169,7 +170,8 @@ export async function sqlOutputProcessing( tables, row, finalRows, - relationships + relationships, + opts ) } diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts index d767ca9e98..7cc08e1b5c 100644 --- a/packages/server/src/api/controllers/static/index.ts +++ b/packages/server/src/api/controllers/static/index.ts @@ -127,13 +127,6 @@ export const uploadFile = async function ( ) } -export const deleteObjects = async function (ctx: Ctx) { - ctx.body = await objectStore.deleteFiles( - ObjectStoreBuckets.APPS, - ctx.request.body.keys - ) -} - const requiresMigration = async (ctx: Ctx) => { const appId = context.getAppId() if (!appId) { diff --git a/packages/server/src/api/controllers/table/ExternalRequest.ts b/packages/server/src/api/controllers/table/ExternalRequest.ts index 65cead3a1d..1e57ea3294 100644 --- a/packages/server/src/api/controllers/table/ExternalRequest.ts +++ b/packages/server/src/api/controllers/table/ExternalRequest.ts @@ -22,6 +22,7 @@ export async function makeTableRequest( operation, }, meta: { + table, tables, }, table, diff --git a/packages/server/src/api/controllers/table/external.ts b/packages/server/src/api/controllers/table/external.ts index 7c036bec9d..e526af4ecb 100644 --- a/packages/server/src/api/controllers/table/external.ts +++ b/packages/server/src/api/controllers/table/external.ts @@ -31,7 +31,6 @@ export async function save( renaming?: RenameColumn ) { const inputs = ctx.request.body - const adding = inputs?._add // can't do this right now delete inputs.rows const tableId = ctx.request.body._id @@ -44,7 +43,7 @@ export async function save( const { datasource, table } = await sdk.tables.external.save( datasourceId!, inputs, - { tableId, renaming, adding } + { tableId, renaming } ) builderSocket?.emitDatasourceUpdate(ctx, datasource) return table diff --git a/packages/server/src/api/controllers/table/index.ts b/packages/server/src/api/controllers/table/index.ts index f799113333..63ce00c5ef 100644 --- a/packages/server/src/api/controllers/table/index.ts +++ b/packages/server/src/api/controllers/table/index.ts @@ -77,11 +77,6 @@ export async function save(ctx: UserCtx) { const renaming = ctx.request.body._rename const api = pickApi({ table }) - // do not pass _rename or _add if saving to CouchDB - if (api === internal) { - delete ctx.request.body._add - delete ctx.request.body._rename - } let savedTable = await api.save(ctx, renaming) if (!table._id) { savedTable = sdk.tables.enrichViewSchemas(savedTable) diff --git a/packages/server/src/api/controllers/table/internal.ts b/packages/server/src/api/controllers/table/internal.ts index eb5e4b6c41..a06cc4dee3 100644 --- a/packages/server/src/api/controllers/table/internal.ts +++ b/packages/server/src/api/controllers/table/internal.ts @@ -16,7 +16,7 @@ export async function save( ctx: UserCtx, renaming?: RenameColumn ) { - const { rows, ...rest } = ctx.request.body + const { _rename, rows, ...rest } = ctx.request.body let tableToSave: Table = { _id: generateTableID(), ...rest, diff --git a/packages/server/src/api/routes/static.ts b/packages/server/src/api/routes/static.ts index a5c421b2e6..f331609923 100644 --- a/packages/server/src/api/routes/static.ts +++ b/packages/server/src/api/routes/static.ts @@ -32,11 +32,6 @@ router .get("/builder/:file*", controller.serveBuilder) .get("/api/assets/client", controller.serveClientLibrary) .post("/api/attachments/process", authorized(BUILDER), controller.uploadFile) - .post( - "/api/attachments/delete", - authorized(BUILDER), - controller.deleteObjects - ) .post("/api/beta/:feature", controller.toggleBetaUiFeature) .post( "/api/attachments/:tableId/upload", @@ -44,12 +39,6 @@ router authorized(PermissionType.TABLE, PermissionLevel.WRITE), controller.uploadFile ) - .post( - "/api/attachments/:tableId/delete", - paramResource("tableId"), - authorized(PermissionType.TABLE, PermissionLevel.WRITE), - controller.deleteObjects - ) .get("/app/preview", authorized(BUILDER), controller.serveBuilderPreview) .get("/app/:appUrl/:path*", controller.serveApp) .get("/:appId/:path*", controller.serveApp) diff --git a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts index 7790f909e7..e8a38dcfaa 100644 --- a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts +++ b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts @@ -4,6 +4,7 @@ import { Query, QueryPreview, SourceName, + TableSourceType, } from "@budibase/types" import * as setup from "../utilities" import { @@ -740,12 +741,25 @@ describe.each( }) describe("query through datasource", () => { - it("should be able to query a pg datasource", async () => { + it("should be able to query the datasource", async () => { + const entityId = "test_table" + await config.api.datasource.update({ + ...datasource, + entities: { + [entityId]: { + name: entityId, + schema: {}, + type: "table", + sourceId: datasource._id!, + sourceType: TableSourceType.EXTERNAL, + }, + }, + }) const res = await config.api.datasource.query({ endpoint: { datasourceId: datasource._id!, operation: Operation.READ, - entityId: "test_table", + entityId, }, resource: { fields: ["id", "name"], diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 5b71ec9044..698ea0c10b 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -26,6 +26,7 @@ describe.each([ [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], ])("/api/:sourceId/search (%s)", (name, dsProvider) => { const isSqs = name === "internal-sqs" + const isInternal = name === "internal" const config = setup.getConfig() let envCleanup: (() => void) | undefined @@ -336,6 +337,20 @@ describe.each([ expectQuery({ range: { age: { low: 5, high: 9 } }, }).toFindNothing()) + + // We never implemented half-open ranges in Lucene. + !isInternal && + it("can search using just a low value", () => + expectQuery({ + range: { age: { low: 5 } }, + }).toContainExactly([{ age: 10 }])) + + // We never implemented half-open ranges in Lucene. + !isInternal && + it("can search using just a high value", () => + expectQuery({ + range: { age: { high: 5 } }, + }).toContainExactly([{ age: 1 }])) }) describe("sort", () => { @@ -440,6 +455,20 @@ describe.each([ expectQuery({ range: { dob: { low: JAN_5TH, high: JAN_9TH } }, }).toFindNothing()) + + // We never implemented half-open ranges in Lucene. + !isInternal && + it("can search using just a low value", () => + expectQuery({ + range: { dob: { low: JAN_5TH } }, + }).toContainExactly([{ dob: JAN_10TH }])) + + // We never implemented half-open ranges in Lucene. + !isInternal && + it("can search using just a high value", () => + expectQuery({ + range: { dob: { high: JAN_5TH } }, + }).toContainExactly([{ dob: JAN_1ST }])) }) describe("sort", () => { @@ -550,4 +579,100 @@ describe.each([ ])) }) }) + + describe("bigints", () => { + const SMALL = "1" + const MEDIUM = "10000000" + + // Our bigints are int64s in most datasources. + const BIG = "9223372036854775807" + + beforeAll(async () => { + await createTable({ + num: { name: "num", type: FieldType.BIGINT }, + }) + await createRows([{ num: SMALL }, { num: MEDIUM }, { num: BIG }]) + }) + + describe("equal", () => { + it("successfully finds a row", () => + expectQuery({ equal: { num: SMALL } }).toContainExactly([ + { num: SMALL }, + ])) + + it("successfully finds a big value", () => + expectQuery({ equal: { num: BIG } }).toContainExactly([{ num: BIG }])) + + it("fails to find nonexistent row", () => + expectQuery({ equal: { num: "2" } }).toFindNothing()) + }) + + describe("notEqual", () => { + it("successfully finds a row", () => + expectQuery({ notEqual: { num: SMALL } }).toContainExactly([ + { num: MEDIUM }, + { num: BIG }, + ])) + + it("fails to find nonexistent row", () => + expectQuery({ notEqual: { num: 10 } }).toContainExactly([ + { num: SMALL }, + { num: MEDIUM }, + { num: BIG }, + ])) + }) + + describe("oneOf", () => { + it("successfully finds a row", () => + expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([ + { num: SMALL }, + ])) + + it("successfully finds all rows", () => + expectQuery({ oneOf: { num: [SMALL, MEDIUM, BIG] } }).toContainExactly([ + { num: SMALL }, + { num: MEDIUM }, + { num: BIG }, + ])) + + it("fails to find nonexistent row", () => + expectQuery({ oneOf: { num: [2] } }).toFindNothing()) + }) + + // Range searches against bigints don't seem to work at all in Lucene, and I + // couldn't figure out why. Given that we're replacing Lucene with SQS, + // we've decided not to spend time on it. + !isInternal && + describe("range", () => { + it("successfully finds a row", () => + expectQuery({ + range: { num: { low: SMALL, high: "5" } }, + }).toContainExactly([{ num: SMALL }])) + + it("successfully finds multiple rows", () => + expectQuery({ + range: { num: { low: SMALL, high: MEDIUM } }, + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }])) + + it("successfully finds a row with a high bound", () => + expectQuery({ + range: { num: { low: MEDIUM, high: BIG } }, + }).toContainExactly([{ num: MEDIUM }, { num: BIG }])) + + it("successfully finds no rows", () => + expectQuery({ + range: { num: { low: "5", high: "5" } }, + }).toFindNothing()) + + it("can search using just a low value", () => + expectQuery({ + range: { num: { low: MEDIUM } }, + }).toContainExactly([{ num: MEDIUM }, { num: BIG }])) + + it("can search using just a high value", () => + expectQuery({ + range: { num: { high: MEDIUM } }, + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }])) + }) + }) }) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index 7639b840dc..77e05b8e07 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -219,9 +219,6 @@ describe.each([ it("should add a new column for an internal DB table", async () => { const saveTableRequest: SaveTableRequest = { - _add: { - name: "NEW_COLUMN", - }, ...basicTable(), } @@ -235,7 +232,6 @@ describe.each([ updatedAt: expect.stringMatching(ISO_REGEX_PATTERN), views: {}, } - delete expectedResponse._add expect(response).toEqual(expectedResponse) }) }) diff --git a/packages/server/src/api/routes/tests/viewV2.spec.ts b/packages/server/src/api/routes/tests/viewV2.spec.ts index 780fcb070e..3bc5ab722f 100644 --- a/packages/server/src/api/routes/tests/viewV2.spec.ts +++ b/packages/server/src/api/routes/tests/viewV2.spec.ts @@ -8,7 +8,7 @@ import { PermissionLevel, QuotaUsageType, SaveTableRequest, - SearchQueryOperators, + SearchFilterOperator, SortOrder, SortType, StaticQuotaName, @@ -132,7 +132,7 @@ describe.each([ primaryDisplay: generator.word(), query: [ { - operator: SearchQueryOperators.EQUAL, + operator: SearchFilterOperator.EQUAL, field: "field", value: "value", }, @@ -236,7 +236,7 @@ describe.each([ ...view, query: [ { - operator: SearchQueryOperators.EQUAL, + operator: SearchFilterOperator.EQUAL, field: "newField", value: "thatValue", }, @@ -263,7 +263,7 @@ describe.each([ primaryDisplay: generator.word(), query: [ { - operator: SearchQueryOperators.EQUAL, + operator: SearchFilterOperator.EQUAL, field: generator.word(), value: generator.word(), }, @@ -341,7 +341,7 @@ describe.each([ tableId: generator.guid(), query: [ { - operator: SearchQueryOperators.EQUAL, + operator: SearchFilterOperator.EQUAL, field: "newField", value: "thatValue", }, @@ -671,7 +671,7 @@ describe.each([ name: generator.guid(), query: [ { - operator: SearchQueryOperators.EQUAL, + operator: SearchFilterOperator.EQUAL, field: "two", value: "bar2", }, diff --git a/packages/server/src/automations/steps/triggerAutomationRun.ts b/packages/server/src/automations/steps/triggerAutomationRun.ts index f45a60600f..83e1722877 100644 --- a/packages/server/src/automations/steps/triggerAutomationRun.ts +++ b/packages/server/src/automations/steps/triggerAutomationRun.ts @@ -62,7 +62,6 @@ export const definition: AutomationStepSchema = { } export async function run({ inputs }: AutomationStepInput) { - // eslint-disable-next-line @typescript-eslint/no-unused-vars const { automationId, ...fieldParams } = inputs.automation if (await features.isTriggerAutomationRunEnabled()) { diff --git a/packages/server/src/db/utils.ts b/packages/server/src/db/utils.ts index b1c02b1764..ce8d0accbb 100644 --- a/packages/server/src/db/utils.ts +++ b/packages/server/src/db/utils.ts @@ -40,6 +40,7 @@ export const USER_METDATA_PREFIX = `${DocumentType.ROW}${SEPARATOR}${dbCore.Inte export const LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${dbCore.InternalTable.USER_METADATA}${SEPARATOR}` export const TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}` export const AUTOMATION_LOG_PREFIX = `${DocumentType.AUTOMATION_LOG}${SEPARATOR}` +export const SQS_DATASOURCE_INTERNAL = "internal" export const ViewName = dbCore.ViewName export const InternalTables = dbCore.InternalTable export const UNICODE_MAX = dbCore.UNICODE_MAX diff --git a/packages/server/src/environment.ts b/packages/server/src/environment.ts index f8adcbe0ee..d9d299d5fa 100644 --- a/packages/server/src/environment.ts +++ b/packages/server/src/environment.ts @@ -28,6 +28,7 @@ const DEFAULTS = { PLUGINS_DIR: "/plugins", FORKED_PROCESS_NAME: "main", JS_RUNNER_MEMORY_LIMIT: 64, + COUCH_DB_SQL_URL: "http://localhost:4006", } const QUERY_THREAD_TIMEOUT = @@ -39,6 +40,7 @@ const environment = { // important - prefer app port to generic port PORT: process.env.APP_PORT || process.env.PORT, COUCH_DB_URL: process.env.COUCH_DB_URL, + COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || DEFAULTS.COUCH_DB_SQL_URL, MINIO_URL: process.env.MINIO_URL, WORKER_URL: process.env.WORKER_URL, AWS_REGION: process.env.AWS_REGION, diff --git a/packages/server/src/features.ts b/packages/server/src/features.ts index 3ab9410f53..f040cf82a2 100644 --- a/packages/server/src/features.ts +++ b/packages/server/src/features.ts @@ -1,11 +1,8 @@ import { features } from "@budibase/backend-core" import env from "./environment" -// eslint-disable-next-line no-unused-vars enum AppFeature { - // eslint-disable-next-line no-unused-vars API = "api", - // eslint-disable-next-line no-unused-vars AUTOMATIONS = "automations", } diff --git a/packages/server/src/integration-test/mysql.spec.ts b/packages/server/src/integration-test/mysql.spec.ts index 7e54b53b15..b4eb1035d6 100644 --- a/packages/server/src/integration-test/mysql.spec.ts +++ b/packages/server/src/integration-test/mysql.spec.ts @@ -16,7 +16,6 @@ import { getDatasource, rawQuery, } from "../integrations/tests/utils" -import { builderSocket } from "../websockets" import { generator } from "@budibase/backend-core/tests" // @ts-ignore fetch.mockSearch() @@ -233,72 +232,6 @@ describe("mysql integrations", () => { }) describe("POST /api/tables/", () => { - const emitDatasourceUpdateMock = jest.fn() - - it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => { - const addColumnToTable: TableRequest = { - type: "table", - sourceType: TableSourceType.EXTERNAL, - name: uniqueTableName(), - sourceId: datasource._id!, - primary: ["id"], - schema: { - id: { - type: FieldType.AUTO, - name: "id", - autocolumn: true, - }, - new_column: { - type: FieldType.NUMBER, - name: "new_column", - }, - }, - _add: { - name: "new_column", - }, - } - - jest - .spyOn(builderSocket!, "emitDatasourceUpdate") - .mockImplementation(emitDatasourceUpdateMock) - - await makeRequest("post", "/api/tables/", addColumnToTable) - - const expectedTable: TableRequest = { - ...addColumnToTable, - schema: { - id: { - type: FieldType.NUMBER, - name: "id", - autocolumn: true, - constraints: { - presence: false, - }, - externalType: "int unsigned", - }, - new_column: { - type: FieldType.NUMBER, - name: "new_column", - autocolumn: false, - constraints: { - presence: false, - }, - externalType: "float(8,2)", - }, - }, - created: true, - _id: `${datasource._id}__${addColumnToTable.name}`, - } - delete expectedTable._add - - expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1) - const emittedDatasource: Datasource = - emitDatasourceUpdateMock.mock.calls[0][1] - expect(emittedDatasource.entities![expectedTable.name]).toEqual( - expectedTable - ) - }) - it("will rename a column", async () => { await makeRequest("post", "/api/tables/", primaryMySqlTable) diff --git a/packages/server/src/integrations/base/query.ts b/packages/server/src/integrations/base/query.ts index 03e6028e32..371592bece 100644 --- a/packages/server/src/integrations/base/query.ts +++ b/packages/server/src/integrations/base/query.ts @@ -2,6 +2,7 @@ import { QueryJson, Datasource, DatasourcePlusQueryResponse, + RowOperations, } from "@budibase/types" import { getIntegration } from "../index" import sdk from "../../sdk" @@ -10,6 +11,17 @@ export async function makeExternalQuery( datasource: Datasource, json: QueryJson ): Promise { + const entityId = json.endpoint.entityId, + tableName = json.meta.table.name, + tableId = json.meta.table._id + // case found during testing - make sure this doesn't happen again + if ( + RowOperations.includes(json.endpoint.operation) && + entityId !== tableId && + entityId !== tableName + ) { + throw new Error("Entity ID and table metadata do not align") + } datasource = await sdk.datasources.enrich(datasource) const Integration = await getIntegration(datasource.source) // query is the opinionated function diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 259abec106..28b7eb910b 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -6,6 +6,7 @@ import { SqlClient, isValidFilter, getNativeSql, + SqlStatements, } from "../utils" import SqlTableQueryBuilder from "./sqlTable" import { @@ -22,6 +23,8 @@ import { SortDirection, SqlQueryBinding, Table, + TableSourceType, + INTERNAL_TABLE_SOURCE_ID, } from "@budibase/types" import environment from "../../environment" @@ -135,6 +138,18 @@ function generateSelectStatement( }) } +function getTableName(table?: Table): string | undefined { + // SQS uses the table ID rather than the table name + if ( + table?.sourceType === TableSourceType.INTERNAL || + table?.sourceId === INTERNAL_TABLE_SOURCE_ID + ) { + return table?._id + } else { + return table?.name + } +} + class InternalBuilder { private readonly client: string @@ -146,10 +161,20 @@ class InternalBuilder { addFilters( query: Knex.QueryBuilder, filters: SearchFilters | undefined, - tableName: string, + table: Table, opts: { aliases?: Record; relationship?: boolean } ): Knex.QueryBuilder { - function getTableName(name: string) { + if (!filters) { + return query + } + filters = parseFilters(filters) + // if all or specified in filters, then everything is an or + const allOr = filters.allOr + const sqlStatements = new SqlStatements(this.client, table, { allOr }) + const tableName = + this.client === SqlClient.SQL_LITE ? table._id! : table.name + + function getTableAlias(name: string) { const alias = opts.aliases?.[name] return alias || name } @@ -161,11 +186,11 @@ class InternalBuilder { const updatedKey = dbCore.removeKeyNumbering(key) const isRelationshipField = updatedKey.includes(".") if (!opts.relationship && !isRelationshipField) { - fn(`${getTableName(tableName)}.${updatedKey}`, value) + fn(`${getTableAlias(tableName)}.${updatedKey}`, value) } if (opts.relationship && isRelationshipField) { const [filterTableName, property] = updatedKey.split(".") - fn(`${getTableName(filterTableName)}.${property}`, value) + fn(`${getTableAlias(filterTableName)}.${property}`, value) } } } @@ -244,12 +269,6 @@ class InternalBuilder { } } - if (!filters) { - return query - } - filters = parseFilters(filters) - // if all or specified in filters, then everything is an or - const allOr = filters.allOr if (filters.oneOf) { iterate(filters.oneOf, (key, array) => { const fnc = allOr ? "orWhereIn" : "whereIn" @@ -292,17 +311,11 @@ class InternalBuilder { const lowValid = isValidFilter(value.low), highValid = isValidFilter(value.high) if (lowValid && highValid) { - // Use a between operator if we have 2 valid range values - const fnc = allOr ? "orWhereBetween" : "whereBetween" - query = query[fnc](key, [value.low, value.high]) + query = sqlStatements.between(query, key, value.low, value.high) } else if (lowValid) { - // Use just a single greater than operator if we only have a low - const fnc = allOr ? "orWhere" : "where" - query = query[fnc](key, ">", value.low) + query = sqlStatements.lte(query, key, value.low) } else if (highValid) { - // Use just a single less than operator if we only have a high - const fnc = allOr ? "orWhere" : "where" - query = query[fnc](key, "<", value.high) + query = sqlStatements.gte(query, key, value.high) } }) } @@ -345,10 +358,11 @@ class InternalBuilder { addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { let { sort, paginate } = json - const table = json.meta?.table + const table = json.meta.table + const tableName = getTableName(table) const aliases = json.tableAliases const aliased = - table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name + tableName && aliases?.[tableName] ? aliases[tableName] : table?.name if (sort && Object.keys(sort || {}).length > 0) { for (let [key, value] of Object.entries(sort)) { const direction = @@ -458,14 +472,13 @@ class InternalBuilder { ): Knex.QueryBuilder { const tableName = endpoint.entityId const tableAlias = aliases?.[tableName] - let table: string | Record = tableName - if (tableAlias) { - table = { [tableAlias]: tableName } - } - let query = knex(table) - if (endpoint.schema) { - query = query.withSchema(endpoint.schema) - } + + const query = knex( + this.tableNameWithSchema(tableName, { + alias: tableAlias, + schema: endpoint.schema, + }) + ) return query } @@ -532,7 +545,7 @@ class InternalBuilder { if (foundOffset) { query = query.offset(foundOffset) } - query = this.addFilters(query, filters, tableName, { + query = this.addFilters(query, filters, json.meta.table, { aliases: tableAliases, }) // add sorting to pre-query @@ -553,7 +566,7 @@ class InternalBuilder { endpoint.schema, tableAliases ) - return this.addFilters(query, filters, tableName, { + return this.addFilters(query, filters, json.meta.table, { relationship: true, aliases: tableAliases, }) @@ -563,7 +576,7 @@ class InternalBuilder { const { endpoint, body, filters, tableAliases } = json let query = this.knexWithAlias(knex, endpoint, tableAliases) const parsedBody = parseBody(body) - query = this.addFilters(query, filters, endpoint.entityId, { + query = this.addFilters(query, filters, json.meta.table, { aliases: tableAliases, }) // mysql can't use returning @@ -577,7 +590,7 @@ class InternalBuilder { delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { const { endpoint, filters, tableAliases } = json let query = this.knexWithAlias(knex, endpoint, tableAliases) - query = this.addFilters(query, filters, endpoint.entityId, { + query = this.addFilters(query, filters, json.meta.table, { aliases: tableAliases, }) // mysql can't use returning @@ -669,7 +682,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { // when creating if an ID has been inserted need to make sure // the id filter is enriched with it before trying to retrieve the row checkLookupKeys(id: any, json: QueryJson) { - if (!id || !json.meta?.table || !json.meta.table.primary) { + if (!id || !json.meta.table || !json.meta.table.primary) { return json } const primaryKey = json.meta.table.primary?.[0] @@ -729,12 +742,13 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { results: Record[], aliases?: Record ): Record[] { + const tableName = getTableName(table) for (const [name, field] of Object.entries(table.schema)) { if (!this._isJsonColumn(field)) { continue } - const tableName = aliases?.[table.name] || table.name - const fullName = `${tableName}.${name}` + const aliasedTableName = (tableName && aliases?.[tableName]) || tableName + const fullName = `${aliasedTableName}.${name}` for (let row of results) { if (typeof row[fullName] === "string") { row[fullName] = JSON.parse(row[fullName]) diff --git a/packages/server/src/integrations/base/sqlTable.ts b/packages/server/src/integrations/base/sqlTable.ts index 4ff336421f..3c55d75b8b 100644 --- a/packages/server/src/integrations/base/sqlTable.ts +++ b/packages/server/src/integrations/base/sqlTable.ts @@ -1,19 +1,20 @@ import { Knex, knex } from "knex" import { - RelationshipType, FieldSubtype, + FieldType, NumberFieldMetadata, Operation, QueryJson, + RelationshipType, RenameColumn, - Table, - FieldType, SqlQuery, + Table, + TableSourceType, } from "@budibase/types" import { breakExternalTableId, getNativeSql, SqlClient } from "../utils" +import { utils } from "@budibase/shared-core" import SchemaBuilder = Knex.SchemaBuilder import CreateTableBuilder = Knex.CreateTableBuilder -import { utils } from "@budibase/shared-core" function isIgnoredType(type: FieldType) { const ignored = [FieldType.LINK, FieldType.FORMULA] @@ -105,13 +106,13 @@ function generateSchema( column.relationshipType !== RelationshipType.MANY_TO_MANY ) { if (!column.foreignKey || !column.tableId) { - throw "Invalid relationship schema" + throw new Error("Invalid relationship schema") } const { tableName } = breakExternalTableId(column.tableId) // @ts-ignore const relatedTable = tables[tableName] if (!relatedTable) { - throw "Referenced table doesn't exist" + throw new Error("Referenced table doesn't exist") } const relatedPrimary = relatedTable.primary[0] const externalType = relatedTable.schema[relatedPrimary].externalType @@ -209,15 +210,19 @@ class SqlTableQueryBuilder { let query: Knex.SchemaBuilder if (!json.table || !json.meta || !json.meta.tables) { - throw "Cannot execute without table being specified" + throw new Error("Cannot execute without table being specified") } + if (json.table.sourceType === TableSourceType.INTERNAL) { + throw new Error("Cannot perform table actions for SQS.") + } + switch (this._operation(json)) { case Operation.CREATE_TABLE: query = buildCreateTable(client, json.table, json.meta.tables) break case Operation.UPDATE_TABLE: if (!json.meta || !json.meta.table) { - throw "Must specify old table for update" + throw new Error("Must specify old table for update") } // renameColumn does not work for MySQL, so return a raw query if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) { @@ -264,7 +269,7 @@ class SqlTableQueryBuilder { query = buildDeleteTable(client, json.table) break default: - throw "Table operation is of unknown type" + throw new Error("Table operation is of unknown type") } return getNativeSql(query) } diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index 1573c98f16..7215c337d7 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -168,8 +168,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { return "" } - // eslint-disable-next-line @typescript-eslint/no-unused-vars - getStringConcat(parts: string[]) { + getStringConcat(_parts: string[]) { return "" } diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index dc2a06446b..5de9cc4fbc 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -1,14 +1,27 @@ import { SqlClient } from "../utils" import Sql from "../base/sql" import { + FieldType, Operation, QueryJson, - TableSourceType, Table, - FieldType, + TableSourceType, } from "@budibase/types" const TABLE_NAME = "test" +const TABLE: Table = { + type: "table", + sourceType: TableSourceType.EXTERNAL, + sourceId: "SOURCE_ID", + schema: { + id: { + name: "id", + type: FieldType.NUMBER, + }, + }, + name: TABLE_NAME, + primary: ["id"], +} function endpoint(table: any, operation: any) { return { @@ -25,6 +38,10 @@ function generateReadJson({ sort, paginate, }: any = {}): QueryJson { + const tableObj = { ...TABLE } + if (table) { + tableObj.name = table + } return { endpoint: endpoint(table || TABLE_NAME, "READ"), resource: { @@ -34,14 +51,7 @@ function generateReadJson({ sort: sort || {}, paginate: paginate || {}, meta: { - table: { - type: "table", - sourceType: TableSourceType.EXTERNAL, - sourceId: "SOURCE_ID", - schema: {}, - name: table || TABLE_NAME, - primary: ["id"], - } as any, + table: tableObj, }, } } @@ -49,6 +59,9 @@ function generateReadJson({ function generateCreateJson(table = TABLE_NAME, body = {}): QueryJson { return { endpoint: endpoint(table, "CREATE"), + meta: { + table: TABLE, + }, body, } } @@ -58,7 +71,15 @@ function generateUpdateJson({ body = {}, filters = {}, meta = {}, +}: { + table: string + body?: any + filters?: any + meta?: any }): QueryJson { + if (!meta.table) { + meta.table = TABLE + } return { endpoint: endpoint(table, "UPDATE"), filters, @@ -70,6 +91,9 @@ function generateUpdateJson({ function generateDeleteJson(table = TABLE_NAME, filters = {}): QueryJson { return { endpoint: endpoint(table, "DELETE"), + meta: { + table: TABLE, + }, filters, } } @@ -102,6 +126,9 @@ function generateRelationshipJson(config: { schema?: string } = {}): QueryJson { }, ], extra: { idFilter: {} }, + meta: { + table: TABLE, + }, } } @@ -136,6 +163,9 @@ function generateManyRelationshipJson(config: { schema?: string } = {}) { }, ], extra: { idFilter: {} }, + meta: { + table: TABLE, + }, } } @@ -319,7 +349,7 @@ describe("SQL query builder", () => { ) expect(query).toEqual({ bindings: [date, limit], - sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" > $1 limit $2) as "${TABLE_NAME}"`, + sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" >= $1 limit $2) as "${TABLE_NAME}"`, }) }) @@ -338,7 +368,7 @@ describe("SQL query builder", () => { ) expect(query).toEqual({ bindings: [date, limit], - sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" < $1 limit $2) as "${TABLE_NAME}"`, + sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" <= $1 limit $2) as "${TABLE_NAME}"`, }) }) @@ -572,7 +602,7 @@ describe("SQL query builder", () => { ) expect(query).toEqual({ bindings: ["2000-01-01 00:00:00", 500], - sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" > $1 limit $2) as "${TABLE_NAME}"`, + sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" >= $1 limit $2) as "${TABLE_NAME}"`, }) }) @@ -591,7 +621,7 @@ describe("SQL query builder", () => { ) expect(query).toEqual({ bindings: ["2010-01-01 00:00:00", 500], - sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" < $1 limit $2) as "${TABLE_NAME}"`, + sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" <= $1 limit $2) as "${TABLE_NAME}"`, }) }) diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index bfca24ff7d..fda2a091fa 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -4,12 +4,26 @@ import { QueryJson, SourceName, SqlQuery, + Table, + TableSourceType, } from "@budibase/types" import { join } from "path" import Sql from "../base/sql" import { SqlClient } from "../utils" -import AliasTables from "../../api/controllers/row/alias" import { generator } from "@budibase/backend-core/tests" +import sdk from "../../sdk" + +// this doesn't exist strictly +const TABLE: Table = { + type: "table", + sourceType: TableSourceType.EXTERNAL, + sourceId: "SOURCE_ID", + schema: {}, + name: "tableName", + primary: ["id"], +} + +const AliasTables = sdk.rows.AliasTables function multiline(sql: string) { return sql.replace(/\n/g, "").replace(/ +/g, " ") @@ -103,7 +117,8 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) const filters = queryJson.filters const notEqualsValue = Object.values(filters?.notEqual!)[0] - const rangeValue = Object.values(filters?.range!)[0] + const rangeValue: { high?: string | number; low?: string | number } = + Object.values(filters?.range!)[0] const equalValue = Object.values(filters?.equal!)[0] expect(query).toEqual({ @@ -220,6 +235,9 @@ describe("Captures of real examples", () => { resource: { fields, }, + meta: { + table: TABLE, + }, } } diff --git a/packages/server/src/integrations/utils/index.ts b/packages/server/src/integrations/utils/index.ts new file mode 100644 index 0000000000..a9c2019ba2 --- /dev/null +++ b/packages/server/src/integrations/utils/index.ts @@ -0,0 +1,2 @@ +export * from "./utils" +export { SqlStatements } from "./sqlStatements" diff --git a/packages/server/src/integrations/utils/sqlStatements.ts b/packages/server/src/integrations/utils/sqlStatements.ts new file mode 100644 index 0000000000..7a5482830b --- /dev/null +++ b/packages/server/src/integrations/utils/sqlStatements.ts @@ -0,0 +1,80 @@ +import { FieldType, Table, FieldSchema } from "@budibase/types" +import { SqlClient } from "./utils" +import { Knex } from "knex" + +export class SqlStatements { + client: string + table: Table + allOr: boolean | undefined + constructor( + client: string, + table: Table, + { allOr }: { allOr?: boolean } = {} + ) { + this.client = client + this.table = table + this.allOr = allOr + } + + getField(key: string): FieldSchema | undefined { + const fieldName = key.split(".")[1] + return this.table.schema[fieldName] + } + + between( + query: Knex.QueryBuilder, + key: string, + low: number | string, + high: number | string + ) { + // Use a between operator if we have 2 valid range values + const field = this.getField(key) + if ( + field?.type === FieldType.BIGINT && + this.client === SqlClient.SQL_LITE + ) { + query = query.whereRaw( + `CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`, + [low, high] + ) + } else { + const fnc = this.allOr ? "orWhereBetween" : "whereBetween" + query = query[fnc](key, [low, high]) + } + return query + } + + lte(query: Knex.QueryBuilder, key: string, low: number | string) { + // Use just a single greater than operator if we only have a low + const field = this.getField(key) + if ( + field?.type === FieldType.BIGINT && + this.client === SqlClient.SQL_LITE + ) { + query = query.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [ + low, + ]) + } else { + const fnc = this.allOr ? "orWhere" : "where" + query = query[fnc](key, ">=", low) + } + return query + } + + gte(query: Knex.QueryBuilder, key: string, high: number | string) { + const field = this.getField(key) + // Use just a single less than operator if we only have a high + if ( + field?.type === FieldType.BIGINT && + this.client === SqlClient.SQL_LITE + ) { + query = query.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [ + high, + ]) + } else { + const fnc = this.allOr ? "orWhere" : "where" + query = query[fnc](key, "<=", high) + } + return query + } +} diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils/utils.ts similarity index 89% rename from packages/server/src/integrations/utils.ts rename to packages/server/src/integrations/utils/utils.ts index d5f6d191e1..cc75f0444d 100644 --- a/packages/server/src/integrations/utils.ts +++ b/packages/server/src/integrations/utils/utils.ts @@ -5,10 +5,10 @@ import { FieldType, TableSourceType, } from "@budibase/types" -import { DocumentType, SEPARATOR } from "../db/utils" -import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../constants" -import { helpers } from "@budibase/shared-core" -import env from "../environment" +import { DocumentType, SEPARATOR } from "../../db/utils" +import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../../constants" +import { SWITCHABLE_TYPES, helpers } from "@budibase/shared-core" +import env from "../../environment" import { Knex } from "knex" const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` @@ -284,8 +284,8 @@ export function isIsoDateString(str: string) { * @param column The column to check, to see if it is a valid relationship. * @param tableIds The IDs of the tables which currently exist. */ -export function shouldCopyRelationship( - column: { type: string; tableId?: string }, +function shouldCopyRelationship( + column: { type: FieldType.LINK; tableId?: string }, tableIds: string[] ) { return ( @@ -303,28 +303,18 @@ export function shouldCopyRelationship( * @param column The column to check for options or boolean type. * @param fetchedColumn The fetched column to check for the type in the external database. */ -export function shouldCopySpecialColumn( - column: { type: string }, - fetchedColumn: { type: string } | undefined +function shouldCopySpecialColumn( + column: { type: FieldType }, + fetchedColumn: { type: FieldType } | undefined ) { const isFormula = column.type === FieldType.FORMULA - const specialTypes = [ - FieldType.OPTIONS, - FieldType.LONGFORM, - FieldType.ARRAY, - FieldType.FORMULA, - FieldType.BB_REFERENCE, - ] // column has been deleted, remove - formulas will never exist, always copy if (!isFormula && column && !fetchedColumn) { return false } const fetchedIsNumber = !fetchedColumn || fetchedColumn.type === FieldType.NUMBER - return ( - specialTypes.indexOf(column.type as FieldType) !== -1 || - (fetchedIsNumber && column.type === FieldType.BOOLEAN) - ) + return fetchedIsNumber && column.type === FieldType.BOOLEAN } /** @@ -357,11 +347,44 @@ function copyExistingPropsOver( continue } const column = existingTableSchema[key] + + const existingColumnType = column?.type + const updatedColumnType = table.schema[key]?.type + + // If the db column type changed to a non-compatible one, we want to re-fetch it if ( - shouldCopyRelationship(column, tableIds) || - shouldCopySpecialColumn(column, table.schema[key]) + updatedColumnType !== existingColumnType && + !SWITCHABLE_TYPES[updatedColumnType]?.includes(existingColumnType) ) { - table.schema[key] = existingTableSchema[key] + continue + } + + if ( + column.type === FieldType.LINK && + !shouldCopyRelationship(column, tableIds) + ) { + continue + } + + const specialTypes = [ + FieldType.OPTIONS, + FieldType.LONGFORM, + FieldType.ARRAY, + FieldType.FORMULA, + FieldType.BB_REFERENCE, + ] + if ( + specialTypes.includes(column.type) && + !shouldCopySpecialColumn(column, table.schema[key]) + ) { + continue + } + + table.schema[key] = { + ...existingTableSchema[key], + externalType: + existingTableSchema[key].externalType || + table.schema[key].externalType, } } } diff --git a/packages/server/src/jsRunner/index.ts b/packages/server/src/jsRunner/index.ts index 7065febcb4..b2cae1cfbc 100644 --- a/packages/server/src/jsRunner/index.ts +++ b/packages/server/src/jsRunner/index.ts @@ -35,7 +35,6 @@ export function init() { // Because we can't pass functions into an Isolate, we remove them from // the passed context and rely on the withHelpers() method to add them // back in. - // eslint-disable-next-line @typescript-eslint/no-unused-vars const { helpers, snippets, ...rest } = ctx return vm.withContext(rest, () => vm.execute(js)) } catch (error: any) { diff --git a/packages/server/src/sdk/app/datasources/datasources.ts b/packages/server/src/sdk/app/datasources/datasources.ts index 336a94636b..84e1601152 100644 --- a/packages/server/src/sdk/app/datasources/datasources.ts +++ b/packages/server/src/sdk/app/datasources/datasources.ts @@ -348,8 +348,7 @@ const preSaveAction: Partial> = { * Make sure all datasource entities have a display name selected */ export function setDefaultDisplayColumns(datasource: Datasource) { - // - for (let entity of Object.values(datasource.entities || {})) { + for (const entity of Object.values(datasource.entities || {})) { if (entity.primaryDisplay) { continue } diff --git a/packages/server/src/sdk/app/rows/index.ts b/packages/server/src/sdk/app/rows/index.ts index ea501e93d9..c117941419 100644 --- a/packages/server/src/sdk/app/rows/index.ts +++ b/packages/server/src/sdk/app/rows/index.ts @@ -3,6 +3,7 @@ import * as rows from "./rows" import * as search from "./search" import * as utils from "./utils" import * as external from "./external" +import AliasTables from "./sqlAlias" export default { ...attachments, @@ -10,4 +11,5 @@ export default { ...search, utils, external, + AliasTables, } diff --git a/packages/server/src/sdk/app/rows/search.ts b/packages/server/src/sdk/app/rows/search.ts index 5a016c821f..e347a8657d 100644 --- a/packages/server/src/sdk/app/rows/search.ts +++ b/packages/server/src/sdk/app/rows/search.ts @@ -13,6 +13,8 @@ import * as sqs from "./search/sqs" import env from "../../../environment" import { ExportRowsParams, ExportRowsResult } from "./search/types" import { dataFilters } from "@budibase/shared-core" +import sdk from "../../index" +import { searchInputMapping } from "./search/utils" export { isValidFilter } from "../../../integrations/utils" @@ -76,12 +78,15 @@ export async function search( } } + const table = await sdk.tables.getTable(options.tableId) + options = searchInputMapping(table, options) + if (isExternalTable) { - return external.search(options) + return external.search(options, table) } else if (env.SQS_SEARCH_ENABLE) { - return sqs.search(options) + return sqs.search(options, table) } else { - return internal.search(options) + return internal.search(options, table) } } diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index e0a3bad94e..077f971903 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -8,6 +8,7 @@ import { SearchFilters, RowSearchParams, SearchResponse, + Table, } from "@budibase/types" import * as exporters from "../../../../api/controllers/view/exporters" import { handleRequest } from "../../../../api/controllers/row/external" @@ -18,13 +19,13 @@ import { import { utils } from "@budibase/shared-core" import { ExportRowsParams, ExportRowsResult } from "./types" import { HTTPError, db } from "@budibase/backend-core" -import { searchInputMapping } from "./utils" import pick from "lodash/pick" import { outputProcessing } from "../../../../utilities/rowProcessor" import sdk from "../../../" export async function search( - options: RowSearchParams + options: RowSearchParams, + table: Table ): Promise> { const { tableId } = options const { paginate, query, ...params } = options @@ -68,8 +69,6 @@ export async function search( } try { - const table = await sdk.tables.getTable(tableId) - options = searchInputMapping(table, options) let rows = await handleRequest(Operation.READ, tableId, { filters: query, sort, @@ -150,11 +149,15 @@ export async function exportRows( } const datasource = await sdk.datasources.get(datasourceId!) + const table = await sdk.tables.getTable(tableId) if (!datasource || !datasource.entities) { throw new HTTPError("Datasource has not been configured for plus API.", 400) } - let result = await search({ tableId, query: requestQuery, sort, sortOrder }) + let result = await search( + { tableId, query: requestQuery, sort, sortOrder }, + table + ) let rows: Row[] = [] let headers diff --git a/packages/server/src/sdk/app/rows/search/internal.ts b/packages/server/src/sdk/app/rows/search/internal.ts index 610807a10e..906ca016d1 100644 --- a/packages/server/src/sdk/app/rows/search/internal.ts +++ b/packages/server/src/sdk/app/rows/search/internal.ts @@ -1,6 +1,6 @@ import { context, db, HTTPError } from "@budibase/backend-core" import env from "../../../../environment" -import { fullSearch, paginatedSearch, searchInputMapping } from "./utils" +import { fullSearch, paginatedSearch } from "./utils" import { getRowParams, InternalTables } from "../../../../db/utils" import { Database, @@ -33,7 +33,8 @@ import pick from "lodash/pick" import { breakRowIdField } from "../../../../integrations/utils" export async function search( - options: RowSearchParams + options: RowSearchParams, + table: Table ): Promise> { const { tableId } = options @@ -51,8 +52,6 @@ export async function search( query: {}, } - let table = await sdk.tables.getTable(tableId) - options = searchInputMapping(table, options) if (params.sort && !params.sortType) { const schema = table.schema const sortField = schema[params.sort] @@ -122,12 +121,15 @@ export async function exportRows( result = await outputProcessing(table, response) } else if (query) { - let searchResponse = await search({ - tableId, - query, - sort, - sortOrder, - }) + let searchResponse = await search( + { + tableId, + query, + sort, + sortOrder, + }, + table + ) result = searchResponse.rows } diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 7abd7d9e72..10cc5aa6c6 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -20,7 +20,12 @@ import { } from "../../../../api/controllers/row/utils" import sdk from "../../../index" import { context } from "@budibase/backend-core" -import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" +import { + CONSTANT_INTERNAL_ROW_COLS, + SQS_DATASOURCE_INTERNAL, +} from "../../../../db/utils" +import AliasTables from "../sqlAlias" +import { outputProcessing } from "../../../../utilities/rowProcessor" function buildInternalFieldList( table: Table, @@ -31,19 +36,19 @@ function buildInternalFieldList( fieldList = fieldList.concat( CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`) ) - if (opts.relationships) { - for (let col of Object.values(table.schema)) { - if (col.type === FieldType.LINK) { - const linkCol = col as RelationshipFieldMetadata - const relatedTable = tables.find( - table => table._id === linkCol.tableId - )! - fieldList = fieldList.concat( - buildInternalFieldList(relatedTable, tables, { relationships: false }) - ) - } else { - fieldList.push(`${table._id}.${col.name}`) - } + for (let col of Object.values(table.schema)) { + const isRelationship = col.type === FieldType.LINK + if (!opts.relationships && isRelationship) { + continue + } + if (isRelationship) { + const linkCol = col as RelationshipFieldMetadata + const relatedTable = tables.find(table => table._id === linkCol.tableId)! + fieldList = fieldList.concat( + buildInternalFieldList(relatedTable, tables, { relationships: false }) + ) + } else { + fieldList.push(`${table._id}.${col.name}`) } } return fieldList @@ -94,14 +99,14 @@ function buildTableMap(tables: Table[]) { } export async function search( - options: RowSearchParams + options: RowSearchParams, + table: Table ): Promise> { - const { tableId, paginate, query, ...params } = options + const { paginate, query, ...params } = options const builder = new SqlQueryBuilder(SqlClient.SQL_LITE) const allTables = await sdk.tables.getAllInternalTables() const allTablesMap = buildTableMap(allTables) - const table = allTables.find(table => table._id === tableId) if (!table) { throw new Error("Unable to find table") } @@ -111,7 +116,7 @@ export async function search( const request: QueryJson = { endpoint: { // not important, we query ourselves - datasourceId: "internal", + datasourceId: SQS_DATASOURCE_INTERNAL, entityId: table._id!, operation: Operation.READ, }, @@ -154,37 +159,47 @@ export async function search( } } try { - const query = builder._query(request, { - disableReturning: true, + const alias = new AliasTables(allTables.map(table => table.name)) + const rows = await alias.queryWithAliasing(request, async json => { + const query = builder._query(json, { + disableReturning: true, + }) + + if (Array.isArray(query)) { + throw new Error("SQS cannot currently handle multiple queries") + } + + let sql = query.sql, + bindings = query.bindings + + // quick hack for docIds + sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`") + sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`") + + const db = context.getAppDB() + return await db.sql(sql, bindings) }) - if (Array.isArray(query)) { - throw new Error("SQS cannot currently handle multiple queries") - } - - let sql = query.sql, - bindings = query.bindings - - // quick hack for docIds - sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`") - sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`") - - const db = context.getAppDB() - const rows = await db.sql(sql, bindings) + // process from the format of tableId.column to expected format + const processed = await sqlOutputProcessing( + rows, + table!, + allTablesMap, + relationships, + { + sqs: true, + } + ) return { - rows: await sqlOutputProcessing( - rows, - table!, - allTablesMap, - relationships, - { - sqs: true, - } - ), + // final row processing for response + rows: await outputProcessing(table, processed, { + preserveLinks: true, + squash: true, + }), } } catch (err: any) { const msg = typeof err === "string" ? err : err.message - throw new Error(`Unable to search by SQL - ${msg}`) + throw new Error(`Unable to search by SQL - ${msg}`, { cause: err }) } } diff --git a/packages/server/src/sdk/app/rows/search/tests/external.spec.ts b/packages/server/src/sdk/app/rows/search/tests/external.spec.ts index f2bdec4692..53bc049a9b 100644 --- a/packages/server/src/sdk/app/rows/search/tests/external.spec.ts +++ b/packages/server/src/sdk/app/rows/search/tests/external.spec.ts @@ -112,7 +112,7 @@ describe("external search", () => { tableId, query: {}, } - const result = await search(searchParams) + const result = await search(searchParams, config.table!) expect(result.rows).toHaveLength(10) expect(result.rows).toEqual( @@ -130,7 +130,7 @@ describe("external search", () => { query: {}, fields: ["name", "age"], } - const result = await search(searchParams) + const result = await search(searchParams, config.table!) expect(result.rows).toHaveLength(10) expect(result.rows).toEqual( @@ -157,7 +157,7 @@ describe("external search", () => { }, }, } - const result = await search(searchParams) + const result = await search(searchParams, config.table!) expect(result.rows).toHaveLength(3) expect(result.rows.map(row => row.id)).toEqual([1, 4, 8]) diff --git a/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts b/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts index 5be0f4a258..1c5f396737 100644 --- a/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts +++ b/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts @@ -81,7 +81,7 @@ describe("internal", () => { tableId, query: {}, } - const result = await search(searchParams) + const result = await search(searchParams, config.table!) expect(result.rows).toHaveLength(10) expect(result.rows).toEqual( @@ -99,7 +99,7 @@ describe("internal", () => { query: {}, fields: ["name", "age"], } - const result = await search(searchParams) + const result = await search(searchParams, config.table!) expect(result.rows).toHaveLength(10) expect(result.rows).toEqual( diff --git a/packages/server/src/sdk/app/rows/search/tests/lucene.ts b/packages/server/src/sdk/app/rows/search/tests/lucene.spec.ts similarity index 98% rename from packages/server/src/sdk/app/rows/search/tests/lucene.ts rename to packages/server/src/sdk/app/rows/search/tests/lucene.spec.ts index 708f362198..d9c1c79177 100644 --- a/packages/server/src/sdk/app/rows/search/tests/lucene.ts +++ b/packages/server/src/sdk/app/rows/search/tests/lucene.spec.ts @@ -160,7 +160,7 @@ describe("internal search", () => { const response = await search.paginatedSearch( { contains: { - column: "a", + column: ["a"], colArr: [1, 2, 3], }, }, @@ -168,7 +168,7 @@ describe("internal search", () => { ) checkLucene( response, - `(*:* AND column:a AND colArr:(1 AND 2 AND 3))`, + `(*:* AND column:(a) AND colArr:(1 AND 2 AND 3))`, PARAMS ) }) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/sdk/app/rows/sqlAlias.ts similarity index 87% rename from packages/server/src/api/controllers/row/alias.ts rename to packages/server/src/sdk/app/rows/sqlAlias.ts index 0ec9d1a09c..0fc338ecbe 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/sdk/app/rows/sqlAlias.ts @@ -6,11 +6,12 @@ import { Row, SearchFilters, } from "@budibase/types" -import { getSQLClient } from "../../../sdk/app/rows/utils" +import { getSQLClient } from "./utils" import { cloneDeep } from "lodash" -import sdk from "../../../sdk" +import datasources from "../datasources" import { makeExternalQuery } from "../../../integrations/base/query" import { SqlClient } from "../../../integrations/utils" +import { SQS_DATASOURCE_INTERNAL } from "../../../db/utils" const WRITE_OPERATIONS: Operation[] = [ Operation.CREATE, @@ -156,12 +157,19 @@ export default class AliasTables { } async queryWithAliasing( - json: QueryJson + json: QueryJson, + queryFn?: (json: QueryJson) => Promise ): Promise { const datasourceId = json.endpoint.datasourceId - const datasource = await sdk.datasources.get(datasourceId) + const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL + let aliasingEnabled: boolean, datasource: Datasource | undefined + if (isSqs) { + aliasingEnabled = true + } else { + datasource = await datasources.get(datasourceId) + aliasingEnabled = this.isAliasingEnabled(json, datasource) + } - const aliasingEnabled = this.isAliasingEnabled(json, datasource) if (aliasingEnabled) { json = cloneDeep(json) // run through the query json to update anywhere a table may be used @@ -207,7 +215,15 @@ export default class AliasTables { } json.tableAliases = invertedTableAliases } - const response = await makeExternalQuery(datasource, json) + + let response: DatasourcePlusQueryResponse + if (datasource && !isSqs) { + response = await makeExternalQuery(datasource, json) + } else if (queryFn) { + response = await queryFn(json) + } else { + throw new Error("No supplied method to perform aliased query") + } if (Array.isArray(response) && aliasingEnabled) { return this.reverse(response) } else { diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index d307b17947..a9df4f89cd 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -52,6 +52,12 @@ export async function getDatasourceAndQuery( ): Promise { const datasourceId = json.endpoint.datasourceId const datasource = await sdk.datasources.get(datasourceId) + const table = datasource.entities?.[json.endpoint.entityId] + if (!json.meta && table) { + json.meta = { + table, + } + } return makeExternalQuery(datasource, json) } diff --git a/packages/server/src/sdk/app/tables/external/index.ts b/packages/server/src/sdk/app/tables/external/index.ts index 65cd4a07c1..2a78600cfc 100644 --- a/packages/server/src/sdk/app/tables/external/index.ts +++ b/packages/server/src/sdk/app/tables/external/index.ts @@ -3,7 +3,6 @@ import { Operation, RelationshipType, RenameColumn, - AddColumn, Table, TableRequest, ViewV2, @@ -33,7 +32,7 @@ import * as viewSdk from "../../views" export async function save( datasourceId: string, update: Table, - opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn } + opts?: { tableId?: string; renaming?: RenameColumn } ) { let tableToSave: TableRequest = { ...update, @@ -52,6 +51,12 @@ export async function save( !oldTable && (tableToSave.primary == null || tableToSave.primary.length === 0) ) { + if (tableToSave.schema.id) { + throw new Error( + "External tables with no `primary` column set will define an `id` column, but we found an `id` column in the supplied schema. Either set a `primary` column or remove the `id` column." + ) + } + tableToSave.primary = ["id"] tableToSave.schema.id = { type: FieldType.NUMBER, @@ -179,14 +184,7 @@ export async function save( // remove the rename prop delete tableToSave._rename - // if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column - if (opts?.adding) { - datasource.entities[tableToSave.name] = ( - await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name]) - ).tables[tableToSave.name] - } else { - datasource.entities[tableToSave.name] = tableToSave - } + datasource.entities[tableToSave.name] = tableToSave // store it into couch now for budibase reference await db.put(populateExternalTableSchemas(datasource)) diff --git a/packages/server/src/sdk/app/tables/internal/sqs.ts b/packages/server/src/sdk/app/tables/internal/sqs.ts index 79d9be2348..0726c94962 100644 --- a/packages/server/src/sdk/app/tables/internal/sqs.ts +++ b/packages/server/src/sdk/app/tables/internal/sqs.ts @@ -1,8 +1,19 @@ import { context, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core" -import { FieldType, SQLiteDefinition, SQLiteType, Table } from "@budibase/types" +import { + FieldType, + RelationshipFieldMetadata, + SQLiteDefinition, + SQLiteTable, + SQLiteTables, + SQLiteType, + Table, +} from "@budibase/types" import { cloneDeep } from "lodash" import tablesSdk from "../" -import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" +import { + CONSTANT_INTERNAL_ROW_COLS, + generateJunctionTableID, +} from "../../../../db/utils" const BASIC_SQLITE_DOC: SQLiteDefinition = { _id: SQLITE_DESIGN_DOC_ID, @@ -31,14 +42,47 @@ const FieldTypeMap: Record = { [FieldType.ATTACHMENT_SINGLE]: SQLiteType.BLOB, [FieldType.ARRAY]: SQLiteType.BLOB, [FieldType.LINK]: SQLiteType.BLOB, - [FieldType.BIGINT]: SQLiteType.REAL, + [FieldType.BIGINT]: SQLiteType.TEXT, // TODO: consider the difference between multi-user and single user types (subtyping) [FieldType.BB_REFERENCE]: SQLiteType.TEXT, } -function mapTable(table: Table): { [key: string]: SQLiteType } { +function buildRelationshipDefinitions( + table: Table, + relationshipColumn: RelationshipFieldMetadata +): { + tableId: string + definition: SQLiteTable +} { + const tableId = table._id!, + relatedTableId = relationshipColumn.tableId + return { + tableId: generateJunctionTableID(tableId, relatedTableId), + definition: { + ["doc1.rowId"]: SQLiteType.TEXT, + ["doc1.tableId"]: SQLiteType.TEXT, + ["doc1.fieldName"]: SQLiteType.TEXT, + ["doc2.rowId"]: SQLiteType.TEXT, + ["doc2.tableId"]: SQLiteType.TEXT, + ["doc2.fieldName"]: SQLiteType.TEXT, + tableId: SQLiteType.TEXT, + }, + } +} + +// this can generate relationship tables as part of the mapping +function mapTable(table: Table): SQLiteTables { + const tables: SQLiteTables = {} const fields: Record = {} for (let [key, column] of Object.entries(table.schema)) { + // relationships should be handled differently + if (column.type === FieldType.LINK) { + const { tableId, definition } = buildRelationshipDefinitions( + table, + column + ) + tables[tableId] = { fields: definition } + } if (!FieldTypeMap[column.type]) { throw new Error(`Unable to map type "${column.type}" to SQLite type`) } @@ -49,10 +93,12 @@ function mapTable(table: Table): { [key: string]: SQLiteType } { CONSTANT_INTERNAL_ROW_COLS.forEach(col => { constantMap[col] = SQLiteType.TEXT }) - return { + const thisTable: SQLiteTable = { ...constantMap, ...fields, } + tables[table._id!] = { fields: thisTable } + return tables } // nothing exists, need to iterate though existing tables @@ -60,8 +106,9 @@ async function buildBaseDefinition(): Promise { const tables = await tablesSdk.getAllInternalTables() const definition = cloneDeep(BASIC_SQLITE_DOC) for (let table of tables) { - definition.sql.tables[table._id!] = { - fields: mapTable(table), + definition.sql.tables = { + ...definition.sql.tables, + ...mapTable(table), } } return definition @@ -75,8 +122,9 @@ export async function addTableToSqlite(table: Table) { } catch (err) { definition = await buildBaseDefinition() } - definition.sql.tables[table._id!] = { - fields: mapTable(table), + definition.sql.tables = { + ...definition.sql.tables, + ...mapTable(table), } await db.put(definition) } diff --git a/packages/server/src/sdk/app/views/tests/views.spec.ts b/packages/server/src/sdk/app/views/tests/views.spec.ts index a610d34ec2..508285651a 100644 --- a/packages/server/src/sdk/app/views/tests/views.spec.ts +++ b/packages/server/src/sdk/app/views/tests/views.spec.ts @@ -351,7 +351,6 @@ describe("table sdk", () => { const view: ViewV2 = { ...basicView, } - // eslint-disable-next-line @typescript-eslint/no-unused-vars const { name, description, ...newTableSchema } = basicTable.schema const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined) @@ -365,7 +364,6 @@ describe("table sdk", () => { const view: ViewV2 = { ...basicView, } - // eslint-disable-next-line @typescript-eslint/no-unused-vars const { description, ...newTableSchema } = { ...basicTable.schema, updatedDescription: { @@ -450,7 +448,6 @@ describe("table sdk", () => { hiddenField: { visible: false }, }, } - // eslint-disable-next-line @typescript-eslint/no-unused-vars const { name, description, ...newTableSchema } = basicTable.schema const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined) @@ -474,7 +471,6 @@ describe("table sdk", () => { hiddenField: { visible: false }, }, } - // eslint-disable-next-line @typescript-eslint/no-unused-vars const { name, description, ...newTableSchema } = { ...basicTable.schema, newField1: { @@ -506,7 +502,6 @@ describe("table sdk", () => { hiddenField: { visible: false }, }, } - // eslint-disable-next-line @typescript-eslint/no-unused-vars const { description, ...newTableSchema } = { ...basicTable.schema, updatedDescription: { diff --git a/packages/server/src/tests/utilities/api/datasource.ts b/packages/server/src/tests/utilities/api/datasource.ts index 0296f58f7d..6ac624f0db 100644 --- a/packages/server/src/tests/utilities/api/datasource.ts +++ b/packages/server/src/tests/utilities/api/datasource.ts @@ -60,7 +60,10 @@ export class DatasourceAPI extends TestAPI { }) } - query = async (query: QueryJson, expectations?: Expectations) => { + query = async ( + query: Omit & Partial>, + expectations?: Expectations + ) => { return await this._post(`/api/datasources/query`, { body: query, expectations, diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 4e33fadce6..469d0845c9 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -374,11 +374,13 @@ class Orchestrator { for (let [innerObject, innerValue] of Object.entries( originalStepInput[key][innerKey] )) { - originalStepInput[key][innerKey][innerObject] = - automationUtils.substituteLoopStep( - innerValue as string, - `steps.${loopStepNumber}` - ) + if (typeof innerValue === "string") { + originalStepInput[key][innerKey][innerObject] = + automationUtils.substituteLoopStep( + innerValue, + `steps.${loopStepNumber}` + ) + } } } } @@ -458,7 +460,6 @@ class Orchestrator { inputs, step.schema.inputs ) - try { // appId is always passed const outputs = await stepFn({ diff --git a/packages/server/src/websockets/websocket.ts b/packages/server/src/websockets/websocket.ts index 871122678d..5dcf8c8f6f 100644 --- a/packages/server/src/websockets/websocket.ts +++ b/packages/server/src/websockets/websocket.ts @@ -262,13 +262,11 @@ export class BaseSocket { } } - // eslint-disable-next-line @typescript-eslint/no-unused-vars - async onConnect(socket: Socket) { + async onConnect(_socket: Socket) { // Override } - // eslint-disable-next-line @typescript-eslint/no-unused-vars - async onDisconnect(socket: Socket) { + async onDisconnect(_socket: Socket) { // Override } diff --git a/packages/shared-core/src/constants/fields.ts b/packages/shared-core/src/constants/fields.ts new file mode 100644 index 0000000000..5acf07d863 --- /dev/null +++ b/packages/shared-core/src/constants/fields.ts @@ -0,0 +1,33 @@ +import { FieldType } from "@budibase/types" + +type SwitchableTypes = Partial<{ + [K in FieldType]: [K, ...FieldType[]] +}> + +export const SWITCHABLE_TYPES: SwitchableTypes = { + [FieldType.STRING]: [ + FieldType.STRING, + FieldType.OPTIONS, + FieldType.LONGFORM, + FieldType.BARCODEQR, + ], + [FieldType.OPTIONS]: [ + FieldType.OPTIONS, + FieldType.STRING, + FieldType.LONGFORM, + FieldType.BARCODEQR, + ], + [FieldType.LONGFORM]: [ + FieldType.LONGFORM, + FieldType.STRING, + FieldType.OPTIONS, + FieldType.BARCODEQR, + ], + [FieldType.BARCODEQR]: [ + FieldType.BARCODEQR, + FieldType.STRING, + FieldType.OPTIONS, + FieldType.LONGFORM, + ], + [FieldType.NUMBER]: [FieldType.NUMBER, FieldType.BOOLEAN], +} diff --git a/packages/shared-core/src/constants/index.ts b/packages/shared-core/src/constants/index.ts index 922f0d4387..afb7e659e1 100644 --- a/packages/shared-core/src/constants/index.ts +++ b/packages/shared-core/src/constants/index.ts @@ -1,4 +1,5 @@ export * from "./api" +export * from "./fields" export const OperatorOptions = { Equals: { diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index d9fe533c88..0554e0c1e4 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -2,10 +2,11 @@ import { Datasource, FieldSubtype, FieldType, + FormulaType, SearchFilter, - SearchQuery, + SearchFilters, SearchQueryFields, - SearchQueryOperators, + SearchFilterOperator, SortDirection, SortType, } from "@budibase/types" @@ -19,9 +20,13 @@ const HBS_REGEX = /{{([^{].*?)}}/g * Returns the valid operator options for a certain data type */ export const getValidOperatorsForType = ( - fieldType: { type: FieldType; subtype?: FieldSubtype }, + fieldType: { + type: FieldType + subtype?: FieldSubtype + formulaType?: FormulaType + }, field: string, - datasource: Datasource & { tableId: any } // TODO: is this table id ever populated? + datasource: Datasource & { tableId: any } ) => { const Op = OperatorOptions const stringOps = [ @@ -46,7 +51,7 @@ export const getValidOperatorsForType = ( value: string label: string }[] = [] - const { type, subtype } = fieldType + const { type, subtype, formulaType } = fieldType if (type === FieldType.STRING) { ops = stringOps } else if (type === FieldType.NUMBER || type === FieldType.BIGINT) { @@ -61,7 +66,7 @@ export const getValidOperatorsForType = ( ops = stringOps } else if (type === FieldType.DATETIME) { ops = numOps - } else if (type === FieldType.FORMULA) { + } else if (type === FieldType.FORMULA && formulaType === FormulaType.STATIC) { ops = stringOps.concat([Op.MoreThan, Op.LessThan]) } else if (type === FieldType.BB_REFERENCE && subtype == FieldSubtype.USER) { ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In] @@ -94,18 +99,19 @@ export const NoEmptyFilterStrings = [ * Removes any fields that contain empty strings that would cause inconsistent * behaviour with how backend tables are filtered (no value means no filter). */ -const cleanupQuery = (query: SearchQuery) => { +const cleanupQuery = (query: SearchFilters) => { if (!query) { return query } for (let filterField of NoEmptyFilterStrings) { - if (!query[filterField]) { + const operator = filterField as SearchFilterOperator + if (!query[operator]) { continue } - for (let [key, value] of Object.entries(query[filterField]!)) { + for (let [key, value] of Object.entries(query[operator]!)) { if (value == null || value === "") { - delete query[filterField]![key] + delete query[operator]![key] } } } @@ -115,9 +121,10 @@ const cleanupQuery = (query: SearchQuery) => { /** * Removes a numeric prefix on field names designed to give fields uniqueness */ -const removeKeyNumbering = (key: string) => { +export const removeKeyNumbering = (key: string): string => { if (typeof key === "string" && key.match(/\d[0-9]*:/g) != null) { const parts = key.split(":") + // remove the number parts.shift() return parts.join(":") } else { @@ -130,7 +137,7 @@ const removeKeyNumbering = (key: string) => { * @param filter the builder filter structure */ export const buildLuceneQuery = (filter: SearchFilter[]) => { - let query: SearchQuery = { + let query: SearchFilters = { string: {}, fuzzy: {}, range: {}, @@ -151,6 +158,7 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => { filter.forEach(expression => { let { operator, field, type, value, externalType, onEmptyFilter } = expression + const queryOperator = operator as SearchFilterOperator const isHbs = typeof value === "string" && (value.match(HBS_REGEX) || []).length > 0 // Parse all values into correct types @@ -165,8 +173,8 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => { if ( type === "datetime" && !isHbs && - operator !== "empty" && - operator !== "notEmpty" + queryOperator !== "empty" && + queryOperator !== "notEmpty" ) { // Ensure date value is a valid date and parse into correct format if (!value) { @@ -179,7 +187,7 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => { } } if (type === "number" && typeof value === "string" && !isHbs) { - if (operator === "oneOf") { + if (queryOperator === "oneOf") { value = value.split(",").map(item => parseFloat(item)) } else { value = parseFloat(value) @@ -210,33 +218,35 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => { high: type === "number" ? maxint : "9999-00-00T00:00:00.000Z", } } - if ((operator as any) === "rangeLow" && value != null && value !== "") { - query.range[field].low = value - } else if ( - (operator as any) === "rangeHigh" && - value != null && - value !== "" - ) { - query.range[field].high = value + if (operator === "rangeLow" && value != null && value !== "") { + query.range[field] = { + ...query.range[field], + low: value, + } + } else if (operator === "rangeHigh" && value != null && value !== "") { + query.range[field] = { + ...query.range[field], + high: value, + } } - } else if (query[operator] && operator !== "onEmptyFilter") { + } else if (query[queryOperator] && operator !== "onEmptyFilter") { if (type === "boolean") { // Transform boolean filters to cope with null. // "equals false" needs to be "not equals true" // "not equals false" needs to be "equals true" - if (operator === "equal" && value === false) { + if (queryOperator === "equal" && value === false) { query.notEqual = query.notEqual || {} query.notEqual[field] = true - } else if (operator === "notEqual" && value === false) { + } else if (queryOperator === "notEqual" && value === false) { query.equal = query.equal || {} query.equal[field] = true } else { - query[operator] = query[operator] || {} - query[operator]![field] = value + query[queryOperator] = query[queryOperator] || {} + query[queryOperator]![field] = value } } else { - query[operator] = query[operator] || {} - query[operator]![field] = value + query[queryOperator] = query[queryOperator] || {} + query[queryOperator]![field] = value } } }) @@ -249,7 +259,7 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => { * @param docs the data * @param query the JSON lucene query */ -export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { +export const runLuceneQuery = (docs: any[], query?: SearchFilters) => { if (!docs || !Array.isArray(docs)) { return [] } @@ -263,7 +273,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { // Iterates over a set of filters and evaluates a fail function against a doc const match = ( - type: keyof SearchQueryFields, + type: SearchFilterOperator, failFn: (docValue: any, testValue: any) => boolean ) => (doc: any) => { @@ -280,7 +290,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { // Process a string match (fails if the value does not start with the string) const stringMatch = match( - SearchQueryOperators.STRING, + SearchFilterOperator.STRING, (docValue: string, testValue: string) => { return ( !docValue || @@ -291,7 +301,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { // Process a fuzzy match (treat the same as starts with when running locally) const fuzzyMatch = match( - SearchQueryOperators.FUZZY, + SearchFilterOperator.FUZZY, (docValue: string, testValue: string) => { return ( !docValue || @@ -302,7 +312,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { // Process a range match const rangeMatch = match( - SearchQueryOperators.RANGE, + SearchFilterOperator.RANGE, ( docValue: string | number | null, testValue: { low: number; high: number } @@ -325,7 +335,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { // Process an equal match (fails if the value is different) const equalMatch = match( - SearchQueryOperators.EQUAL, + SearchFilterOperator.EQUAL, (docValue: any, testValue: string | null) => { return testValue != null && testValue !== "" && docValue !== testValue } @@ -333,7 +343,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { // Process a not-equal match (fails if the value is the same) const notEqualMatch = match( - SearchQueryOperators.NOT_EQUAL, + SearchFilterOperator.NOT_EQUAL, (docValue: any, testValue: string | null) => { return testValue != null && testValue !== "" && docValue === testValue } @@ -341,7 +351,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { // Process an empty match (fails if the value is not empty) const emptyMatch = match( - SearchQueryOperators.EMPTY, + SearchFilterOperator.EMPTY, (docValue: string | null) => { return docValue != null && docValue !== "" } @@ -349,7 +359,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { // Process a not-empty match (fails is the value is empty) const notEmptyMatch = match( - SearchQueryOperators.NOT_EMPTY, + SearchFilterOperator.NOT_EMPTY, (docValue: string | null) => { return docValue == null || docValue === "" } @@ -357,7 +367,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { // Process an includes match (fails if the value is not included) const oneOf = match( - SearchQueryOperators.ONE_OF, + SearchFilterOperator.ONE_OF, (docValue: any, testValue: any) => { if (typeof testValue === "string") { testValue = testValue.split(",") @@ -370,28 +380,28 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { ) const containsAny = match( - SearchQueryOperators.CONTAINS_ANY, + SearchFilterOperator.CONTAINS_ANY, (docValue: any, testValue: any) => { return !docValue?.includes(...testValue) } ) const contains = match( - SearchQueryOperators.CONTAINS, + SearchFilterOperator.CONTAINS, (docValue: string | any[], testValue: any[]) => { return !testValue?.every((item: any) => docValue?.includes(item)) } ) const notContains = match( - SearchQueryOperators.NOT_CONTAINS, + SearchFilterOperator.NOT_CONTAINS, (docValue: string | any[], testValue: any[]) => { return testValue?.every((item: any) => docValue?.includes(item)) } ) const docMatch = (doc: any) => { - const filterFunctions: Record boolean> = + const filterFunctions: Record boolean> = { string: stringMatch, fuzzy: fuzzyMatch, @@ -406,7 +416,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { notContains: notContains, } - const activeFilterKeys: SearchQueryOperators[] = Object.entries(query || {}) + const activeFilterKeys: SearchFilterOperator[] = Object.entries(query || {}) .filter( ([key, value]: [string, any]) => !["allOr", "onEmptyFilter"].includes(key) && @@ -474,7 +484,7 @@ export const luceneLimit = (docs: any[], limit: string) => { return docs.slice(0, numLimit) } -export const hasFilters = (query?: SearchQuery) => { +export const hasFilters = (query?: SearchFilters) => { if (!query) { return false } diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts index e74e37d681..f188c5f951 100644 --- a/packages/shared-core/src/tests/filters.test.ts +++ b/packages/shared-core/src/tests/filters.test.ts @@ -1,6 +1,6 @@ import { - SearchQuery, - SearchQueryOperators, + SearchFilters, + SearchFilterOperator, FieldType, SearchFilter, } from "@budibase/types" @@ -46,8 +46,8 @@ describe("runLuceneQuery", () => { }, ] - function buildQuery(filters: { [filterKey: string]: any }): SearchQuery { - const query: SearchQuery = { + function buildQuery(filters: { [filterKey: string]: any }): SearchFilters { + const query: SearchFilters = { string: {}, fuzzy: {}, range: {}, @@ -63,7 +63,7 @@ describe("runLuceneQuery", () => { } for (const filterKey in filters) { - query[filterKey as SearchQueryOperators] = filters[filterKey] + query[filterKey as SearchFilterOperator] = filters[filterKey] } return query @@ -265,13 +265,13 @@ describe("buildLuceneQuery", () => { it("should parseFloat if the type is a number, but the value is a numeric string", () => { const filter: SearchFilter[] = [ { - operator: SearchQueryOperators.EQUAL, + operator: SearchFilterOperator.EQUAL, field: "customer_id", type: FieldType.NUMBER, value: "1212", }, { - operator: SearchQueryOperators.ONE_OF, + operator: SearchFilterOperator.ONE_OF, field: "customer_id", type: FieldType.NUMBER, value: "1000,1212,3400", @@ -299,13 +299,13 @@ describe("buildLuceneQuery", () => { it("should not parseFloat if the type is a number, but the value is a handlebars binding string", () => { const filter: SearchFilter[] = [ { - operator: SearchQueryOperators.EQUAL, + operator: SearchFilterOperator.EQUAL, field: "customer_id", type: FieldType.NUMBER, value: "{{ customer_id }}", }, { - operator: SearchQueryOperators.ONE_OF, + operator: SearchFilterOperator.ONE_OF, field: "customer_id", type: FieldType.NUMBER, value: "{{ list_of_customer_ids }}", @@ -333,19 +333,19 @@ describe("buildLuceneQuery", () => { it("should cast string to boolean if the type is boolean", () => { const filter: SearchFilter[] = [ { - operator: SearchQueryOperators.EQUAL, + operator: SearchFilterOperator.EQUAL, field: "a", type: FieldType.BOOLEAN, value: "not_true", }, { - operator: SearchQueryOperators.NOT_EQUAL, + operator: SearchFilterOperator.NOT_EQUAL, field: "b", type: FieldType.BOOLEAN, value: "not_true", }, { - operator: SearchQueryOperators.EQUAL, + operator: SearchFilterOperator.EQUAL, field: "c", type: FieldType.BOOLEAN, value: "true", @@ -374,19 +374,19 @@ describe("buildLuceneQuery", () => { it("should split the string for contains operators", () => { const filter: SearchFilter[] = [ { - operator: SearchQueryOperators.CONTAINS, + operator: SearchFilterOperator.CONTAINS, field: "description", type: FieldType.ARRAY, value: "Large box,Heavy box,Small box", }, { - operator: SearchQueryOperators.NOT_CONTAINS, + operator: SearchFilterOperator.NOT_CONTAINS, field: "description", type: FieldType.ARRAY, value: "Large box,Heavy box,Small box", }, { - operator: SearchQueryOperators.CONTAINS_ANY, + operator: SearchFilterOperator.CONTAINS_ANY, field: "description", type: FieldType.ARRAY, value: "Large box,Heavy box,Small box", diff --git a/packages/string-templates/src/processors/postprocessor.ts b/packages/string-templates/src/processors/postprocessor.ts index 6f7260718b..b8d99682b1 100644 --- a/packages/string-templates/src/processors/postprocessor.ts +++ b/packages/string-templates/src/processors/postprocessor.ts @@ -4,7 +4,6 @@ export const PostProcessorNames = { CONVERT_LITERALS: "convert-literals", } -/* eslint-disable no-unused-vars */ class Postprocessor { name: string private fn: any diff --git a/packages/string-templates/src/processors/preprocessor.ts b/packages/string-templates/src/processors/preprocessor.ts index 141b2be3a9..010c259e12 100644 --- a/packages/string-templates/src/processors/preprocessor.ts +++ b/packages/string-templates/src/processors/preprocessor.ts @@ -9,7 +9,6 @@ export const PreprocessorNames = { FINALISE: "finalise", } -/* eslint-disable no-unused-vars */ class Preprocessor { name: string private fn: any diff --git a/packages/types/src/api/web/searchFilter.ts b/packages/types/src/api/web/searchFilter.ts index ac3c446e36..5223204a7f 100644 --- a/packages/types/src/api/web/searchFilter.ts +++ b/packages/types/src/api/web/searchFilter.ts @@ -1,68 +1,11 @@ import { FieldType } from "../../documents" -import { EmptyFilterOption } from "../../sdk" +import { EmptyFilterOption, SearchFilters } from "../../sdk" export type SearchFilter = { - operator: keyof SearchQuery + operator: keyof SearchFilters | "rangeLow" | "rangeHigh" onEmptyFilter?: EmptyFilterOption field: string type?: FieldType value: any externalType?: string } - -export enum SearchQueryOperators { - STRING = "string", - FUZZY = "fuzzy", - RANGE = "range", - EQUAL = "equal", - NOT_EQUAL = "notEqual", - EMPTY = "empty", - NOT_EMPTY = "notEmpty", - ONE_OF = "oneOf", - CONTAINS = "contains", - NOT_CONTAINS = "notContains", - CONTAINS_ANY = "containsAny", -} - -export type SearchQuery = { - allOr?: boolean - onEmptyFilter?: EmptyFilterOption - [SearchQueryOperators.STRING]?: { - [key: string]: string - } - [SearchQueryOperators.FUZZY]?: { - [key: string]: string - } - [SearchQueryOperators.RANGE]?: { - [key: string]: { - high: number | string - low: number | string - } - } - [SearchQueryOperators.EQUAL]?: { - [key: string]: any - } - [SearchQueryOperators.NOT_EQUAL]?: { - [key: string]: any - } - [SearchQueryOperators.EMPTY]?: { - [key: string]: any - } - [SearchQueryOperators.NOT_EMPTY]?: { - [key: string]: any - } - [SearchQueryOperators.ONE_OF]?: { - [key: string]: any[] - } - [SearchQueryOperators.CONTAINS]?: { - [key: string]: any[] - } - [SearchQueryOperators.NOT_CONTAINS]?: { - [key: string]: any[] - } - [SearchQueryOperators.CONTAINS_ANY]?: { - [key: string]: any[] - } -} - -export type SearchQueryFields = Omit diff --git a/packages/types/src/api/web/user.ts b/packages/types/src/api/web/user.ts index 0ef7493016..75f00760bf 100644 --- a/packages/types/src/api/web/user.ts +++ b/packages/types/src/api/web/user.ts @@ -1,5 +1,5 @@ import { User } from "../../documents" -import { SearchQuery } from "./searchFilter" +import { SearchFilters } from "../../sdk" export interface SaveUserResponse { _id: string @@ -55,7 +55,7 @@ export interface InviteUsersResponse { export interface SearchUsersRequest { bookmark?: string - query?: SearchQuery + query?: SearchFilters appId?: string limit?: number paginate?: boolean @@ -66,6 +66,8 @@ export interface CreateAdminUserRequest { password?: string tenantId: string ssoId?: string + familyName?: string + givenName?: string } export interface AddSSoUserRequest { diff --git a/packages/types/src/documents/app/sqlite.ts b/packages/types/src/documents/app/sqlite.ts index 76c47bbd74..e23a68b336 100644 --- a/packages/types/src/documents/app/sqlite.ts +++ b/packages/types/src/documents/app/sqlite.ts @@ -6,17 +6,23 @@ export enum SQLiteType { NUMERIC = "NUMERIC", } +export type SQLiteTable = Record< + string, + SQLiteType | { field: string; type: SQLiteType } +> + +export type SQLiteTables = Record< + string, + { + fields: SQLiteTable + } +> + export interface SQLiteDefinition { _id: string language: string sql: { - tables: { - [tableName: string]: { - fields: { - [key: string]: SQLiteType | { field: string; type: SQLiteType } - } - } - } + tables: SQLiteTables options: { table_name: string } diff --git a/packages/types/src/documents/app/table/table.ts b/packages/types/src/documents/app/table/table.ts index b284e9a840..f0e6079aef 100644 --- a/packages/types/src/documents/app/table/table.ts +++ b/packages/types/src/documents/app/table/table.ts @@ -1,6 +1,6 @@ import { Document } from "../../document" import { View, ViewV2 } from "../view" -import { AddColumn, RenameColumn } from "../../../sdk" +import { RenameColumn } from "../../../sdk" import { TableSchema } from "./schema" export const INTERNAL_TABLE_SOURCE_ID = "bb_internal" @@ -30,6 +30,5 @@ export interface Table extends Document { export interface TableRequest extends Table { _rename?: RenameColumn - _add?: AddColumn created?: boolean } diff --git a/packages/types/src/documents/global/user.ts b/packages/types/src/documents/global/user.ts index a4e6b613c6..9c7dc80e49 100644 --- a/packages/types/src/documents/global/user.ts +++ b/packages/types/src/documents/global/user.ts @@ -22,6 +22,13 @@ export interface UserSSO { providerType: SSOProviderType oauth2?: OAuth2 thirdPartyProfile?: SSOProfileJson + profile?: { + displayName?: string + name?: { + givenName?: string + familyName?: string + } + } } export type SSOUser = User & UserSSO diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts index e1a012d81e..77e4877dfa 100644 --- a/packages/types/src/sdk/datasources.ts +++ b/packages/types/src/sdk/datasources.ts @@ -14,6 +14,14 @@ export enum Operation { DELETE_TABLE = "DELETE_TABLE", } +export const RowOperations = [ + Operation.CREATE, + Operation.READ, + Operation.UPDATE, + Operation.DELETE, + Operation.BULK_CREATE, +] + export enum SortDirection { ASCENDING = "ASCENDING", DESCENDING = "DESCENDING", diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts index 9325f09eed..40f411f02a 100644 --- a/packages/types/src/sdk/search.ts +++ b/packages/types/src/sdk/search.ts @@ -3,47 +3,66 @@ import { Row, Table } from "../documents" import { SortType } from "../api" import { Knex } from "knex" +export enum SearchFilterOperator { + STRING = "string", + FUZZY = "fuzzy", + RANGE = "range", + EQUAL = "equal", + NOT_EQUAL = "notEqual", + EMPTY = "empty", + NOT_EMPTY = "notEmpty", + ONE_OF = "oneOf", + CONTAINS = "contains", + NOT_CONTAINS = "notContains", + CONTAINS_ANY = "containsAny", +} + export interface SearchFilters { allOr?: boolean onEmptyFilter?: EmptyFilterOption - string?: { + [SearchFilterOperator.STRING]?: { [key: string]: string } - fuzzy?: { + [SearchFilterOperator.FUZZY]?: { [key: string]: string } - range?: { - [key: string]: { - high: number | string - low: number | string - } + [SearchFilterOperator.RANGE]?: { + [key: string]: + | { + high: number | string + low: number | string + } + | { high: number | string } + | { low: number | string } } - equal?: { + [SearchFilterOperator.EQUAL]?: { [key: string]: any } - notEqual?: { + [SearchFilterOperator.NOT_EQUAL]?: { [key: string]: any } - empty?: { + [SearchFilterOperator.EMPTY]?: { [key: string]: any } - notEmpty?: { + [SearchFilterOperator.NOT_EMPTY]?: { [key: string]: any } - oneOf?: { + [SearchFilterOperator.ONE_OF]?: { [key: string]: any[] } - contains?: { - [key: string]: any[] | any - } - notContains?: { + [SearchFilterOperator.CONTAINS]?: { [key: string]: any[] } - containsAny?: { + [SearchFilterOperator.NOT_CONTAINS]?: { + [key: string]: any[] + } + [SearchFilterOperator.CONTAINS_ANY]?: { [key: string]: any[] } } +export type SearchQueryFields = Omit + export interface SortJson { [key: string]: { direction: SortDirection @@ -61,10 +80,6 @@ export interface RenameColumn { updated: string } -export interface AddColumn { - name: string -} - export interface RelationshipsJson { through?: string from?: string @@ -90,8 +105,8 @@ export interface QueryJson { paginate?: PaginationJson body?: Row | Row[] table?: Table - meta?: { - table?: Table + meta: { + table: Table tables?: Record renamed?: RenameColumn } diff --git a/packages/worker/src/api/controllers/global/users.ts b/packages/worker/src/api/controllers/global/users.ts index 4c1af90d38..b610ecce1a 100644 --- a/packages/worker/src/api/controllers/global/users.ts +++ b/packages/worker/src/api/controllers/global/users.ts @@ -116,7 +116,8 @@ const parseBooleanParam = (param: any) => { export const adminUser = async ( ctx: Ctx ) => { - const { email, password, tenantId, ssoId } = ctx.request.body + const { email, password, tenantId, ssoId, givenName, familyName } = + ctx.request.body if (await platform.tenants.exists(tenantId)) { ctx.throw(403, "Organisation already exists.") @@ -151,6 +152,8 @@ export const adminUser = async ( ssoId, hashPassword, requirePassword, + firstName: givenName, + lastName: familyName, }) // events diff --git a/packages/worker/src/api/routes/global/tests/scim.spec.ts b/packages/worker/src/api/routes/global/tests/scim.spec.ts index 85c70b7b63..258702a3b3 100644 --- a/packages/worker/src/api/routes/global/tests/scim.spec.ts +++ b/packages/worker/src/api/routes/global/tests/scim.spec.ts @@ -704,7 +704,6 @@ describe("scim", () => { expect(response).toEqual({ Resources: expect.arrayContaining( groups.map(g => { - // eslint-disable-next-line @typescript-eslint/no-unused-vars const { members, ...groupData } = g return groupData }) @@ -724,7 +723,6 @@ describe("scim", () => { expect(response).toEqual({ Resources: expect.arrayContaining( groups.map(g => { - // eslint-disable-next-line @typescript-eslint/no-unused-vars const { members, displayName, ...groupData } = g return groupData }) @@ -874,7 +872,6 @@ describe("scim", () => { qs: "excludedAttributes=members", }) - // eslint-disable-next-line @typescript-eslint/no-unused-vars const { members, ...expectedResponse } = group expect(response).toEqual(expectedResponse) diff --git a/packages/worker/src/api/routes/global/users.ts b/packages/worker/src/api/routes/global/users.ts index e7c77678fc..b40c491830 100644 --- a/packages/worker/src/api/routes/global/users.ts +++ b/packages/worker/src/api/routes/global/users.ts @@ -16,6 +16,8 @@ function buildAdminInitValidation() { password: OPTIONAL_STRING, tenantId: Joi.string().required(), ssoId: Joi.string(), + familyName: OPTIONAL_STRING, + givenName: OPTIONAL_STRING, }) .required() .unknown(false) diff --git a/packages/worker/src/features.ts b/packages/worker/src/features.ts index 33fce3aebe..075b3b81ca 100644 --- a/packages/worker/src/features.ts +++ b/packages/worker/src/features.ts @@ -1,7 +1,6 @@ import { features } from "@budibase/backend-core" import env from "./environment" -// eslint-disable-next-line no-unused-vars enum WorkerFeature {} const featureList: WorkerFeature[] = features.processFeatureEnvVar( diff --git a/packages/worker/src/tests/api/users.ts b/packages/worker/src/tests/api/users.ts index d08a4ef8c7..541004391d 100644 --- a/packages/worker/src/tests/api/users.ts +++ b/packages/worker/src/tests/api/users.ts @@ -4,7 +4,7 @@ import { InviteUsersRequest, User, CreateAdminUserRequest, - SearchQuery, + SearchFilters, InviteUsersResponse, } from "@budibase/types" import structures from "../structures" @@ -150,7 +150,7 @@ export class UserAPI extends TestAPI { } searchUsers = ( - { query }: { query?: SearchQuery }, + { query }: { query?: SearchFilters }, opts?: { status?: number; noHeaders?: boolean } ) => { const req = this.request diff --git a/scripts/devDocker.sh b/scripts/devDocker.sh new file mode 100755 index 0000000000..5e01e5813a --- /dev/null +++ b/scripts/devDocker.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# Check if the pro submodule is loaded +if [ ! -d "./packages/pro/src" ]; then + echo "[ERROR] Submodule is not loaded. This is only allowed with loaded submodules." + exit 1 +fi + +yarn build --scope @budibase/server --scope @budibase/worker +docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0 + + diff --git a/yarn.lock b/yarn.lock index a36b54d3be..ce39c89075 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5174,7 +5174,7 @@ dependencies: "@types/node" "*" -"@types/archiver@^6.0.2": +"@types/archiver@6.0.2": version "6.0.2" resolved "https://registry.yarnpkg.com/@types/archiver/-/archiver-6.0.2.tgz#0daf8c83359cbde69de1e4b33dcade6a48a929e2" integrity sha512-KmROQqbQzKGuaAbmK+ZcytkJ51+YqDa7NmbXjmtC5YBLSyQYo21YaUnQ3HbaPFKL1ooo6RQ6OPYPIDyxfpDDXw==