Merge branch 'master' into fix-client-loading-issues
This commit is contained in:
commit
c261ec10f4
|
@ -24,5 +24,8 @@
|
||||||
},
|
},
|
||||||
"[svelte]": {
|
"[svelte]": {
|
||||||
"editor.defaultFormatter": "svelte.svelte-vscode"
|
"editor.defaultFormatter": "svelte.svelte-vscode"
|
||||||
|
},
|
||||||
|
"[handlebars]": {
|
||||||
|
"editor.formatOnSave": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.23.6",
|
"version": "2.23.9",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*",
|
"packages/*",
|
||||||
|
|
5
nx.json
5
nx.json
|
@ -9,10 +9,7 @@
|
||||||
},
|
},
|
||||||
"targetDefaults": {
|
"targetDefaults": {
|
||||||
"build": {
|
"build": {
|
||||||
"inputs": [
|
"inputs": ["{workspaceRoot}/scripts/*", "{workspaceRoot}/lerna.json"]
|
||||||
"{workspaceRoot}/scripts/build.js",
|
|
||||||
"{workspaceRoot}/lerna.json"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,7 +59,7 @@
|
||||||
"dev:camunda": "./scripts/deploy-camunda.sh",
|
"dev:camunda": "./scripts/deploy-camunda.sh",
|
||||||
"dev:all": "yarn run kill-all && lerna run --stream dev",
|
"dev:all": "yarn run kill-all && lerna run --stream dev",
|
||||||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||||
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
"dev:docker": "./scripts/devDocker.sh",
|
||||||
"test": "REUSE_CONTAINERS=1 lerna run --concurrency 1 --stream test --stream",
|
"test": "REUSE_CONTAINERS=1 lerna run --concurrency 1 --stream test --stream",
|
||||||
"lint:eslint": "eslint packages --max-warnings=0",
|
"lint:eslint": "eslint packages --max-warnings=0",
|
||||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
||||||
|
|
|
@ -50,6 +50,8 @@ type CreateAdminUserOpts = {
|
||||||
hashPassword?: boolean
|
hashPassword?: boolean
|
||||||
requirePassword?: boolean
|
requirePassword?: boolean
|
||||||
skipPasswordValidation?: boolean
|
skipPasswordValidation?: boolean
|
||||||
|
firstName?: string
|
||||||
|
lastName?: string
|
||||||
}
|
}
|
||||||
type FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn }
|
type FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn }
|
||||||
|
|
||||||
|
@ -517,6 +519,8 @@ export class UserDB {
|
||||||
global: true,
|
global: true,
|
||||||
},
|
},
|
||||||
tenantId,
|
tenantId,
|
||||||
|
firstName: opts?.firstName,
|
||||||
|
lastName: opts?.lastName,
|
||||||
}
|
}
|
||||||
if (opts?.ssoId) {
|
if (opts?.ssoId) {
|
||||||
user.ssoId = opts.ssoId
|
user.ssoId = opts.ssoId
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
Layout,
|
Layout,
|
||||||
AbsTooltip,
|
AbsTooltip,
|
||||||
} from "@budibase/bbui"
|
} from "@budibase/bbui"
|
||||||
|
import { SWITCHABLE_TYPES, ValidColumnNameRegex } from "@budibase/shared-core"
|
||||||
import { createEventDispatcher, getContext, onMount } from "svelte"
|
import { createEventDispatcher, getContext, onMount } from "svelte"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { tables, datasources } from "stores/builder"
|
import { tables, datasources } from "stores/builder"
|
||||||
|
@ -20,11 +21,6 @@
|
||||||
import {
|
import {
|
||||||
FIELDS,
|
FIELDS,
|
||||||
RelationshipType,
|
RelationshipType,
|
||||||
ALLOWABLE_STRING_OPTIONS,
|
|
||||||
ALLOWABLE_NUMBER_OPTIONS,
|
|
||||||
ALLOWABLE_STRING_TYPES,
|
|
||||||
ALLOWABLE_NUMBER_TYPES,
|
|
||||||
SWITCHABLE_TYPES,
|
|
||||||
PrettyRelationshipDefinitions,
|
PrettyRelationshipDefinitions,
|
||||||
DB_TYPE_EXTERNAL,
|
DB_TYPE_EXTERNAL,
|
||||||
} from "constants/backend"
|
} from "constants/backend"
|
||||||
|
@ -33,21 +29,20 @@
|
||||||
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
|
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
|
||||||
import { getBindings } from "components/backend/DataTable/formula"
|
import { getBindings } from "components/backend/DataTable/formula"
|
||||||
import JSONSchemaModal from "./JSONSchemaModal.svelte"
|
import JSONSchemaModal from "./JSONSchemaModal.svelte"
|
||||||
import { ValidColumnNameRegex } from "@budibase/shared-core"
|
|
||||||
import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
|
import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
|
||||||
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
|
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
|
||||||
import { RowUtils } from "@budibase/frontend-core"
|
import { RowUtils } from "@budibase/frontend-core"
|
||||||
import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte"
|
import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte"
|
||||||
|
|
||||||
const AUTO_TYPE = FIELDS.AUTO.type
|
const AUTO_TYPE = FieldType.AUTO
|
||||||
const FORMULA_TYPE = FIELDS.FORMULA.type
|
const FORMULA_TYPE = FieldType.FORMULA
|
||||||
const LINK_TYPE = FIELDS.LINK.type
|
const LINK_TYPE = FieldType.LINK
|
||||||
const STRING_TYPE = FIELDS.STRING.type
|
const STRING_TYPE = FieldType.STRING
|
||||||
const NUMBER_TYPE = FIELDS.NUMBER.type
|
const NUMBER_TYPE = FieldType.NUMBER
|
||||||
const JSON_TYPE = FIELDS.JSON.type
|
const JSON_TYPE = FieldType.JSON
|
||||||
const DATE_TYPE = FIELDS.DATETIME.type
|
const DATE_TYPE = FieldType.DATETIME
|
||||||
const USER_TYPE = FIELDS.USER.subtype
|
const USER_TYPE = FieldSubtype.USER
|
||||||
const USERS_TYPE = FIELDS.USERS.subtype
|
const USERS_TYPE = FieldSubtype.USERS
|
||||||
|
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
|
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
|
||||||
|
@ -61,8 +56,8 @@
|
||||||
let primaryDisplay
|
let primaryDisplay
|
||||||
let indexes = [...($tables.selected.indexes || [])]
|
let indexes = [...($tables.selected.indexes || [])]
|
||||||
let isCreating = undefined
|
let isCreating = undefined
|
||||||
let relationshipPart1 = PrettyRelationshipDefinitions.Many
|
let relationshipPart1 = PrettyRelationshipDefinitions.MANY
|
||||||
let relationshipPart2 = PrettyRelationshipDefinitions.One
|
let relationshipPart2 = PrettyRelationshipDefinitions.ONE
|
||||||
let relationshipTableIdPrimary = null
|
let relationshipTableIdPrimary = null
|
||||||
let relationshipTableIdSecondary = null
|
let relationshipTableIdSecondary = null
|
||||||
let table = $tables.selected
|
let table = $tables.selected
|
||||||
|
@ -175,7 +170,7 @@
|
||||||
$: typeEnabled =
|
$: typeEnabled =
|
||||||
!originalName ||
|
!originalName ||
|
||||||
(originalName &&
|
(originalName &&
|
||||||
SWITCHABLE_TYPES.indexOf(editableColumn.type) !== -1 &&
|
SWITCHABLE_TYPES[field.type] &&
|
||||||
!editableColumn?.autocolumn)
|
!editableColumn?.autocolumn)
|
||||||
|
|
||||||
const fieldDefinitions = Object.values(FIELDS).reduce(
|
const fieldDefinitions = Object.values(FIELDS).reduce(
|
||||||
|
@ -367,16 +362,15 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
function getAllowedTypes() {
|
function getAllowedTypes() {
|
||||||
if (
|
if (originalName) {
|
||||||
originalName &&
|
const possibleTypes = (
|
||||||
ALLOWABLE_STRING_TYPES.indexOf(editableColumn.type) !== -1
|
SWITCHABLE_TYPES[field.type] || [editableColumn.type]
|
||||||
) {
|
).map(t => t.toLowerCase())
|
||||||
return ALLOWABLE_STRING_OPTIONS
|
return Object.entries(FIELDS)
|
||||||
} else if (
|
.filter(([fieldType]) =>
|
||||||
originalName &&
|
possibleTypes.includes(fieldType.toLowerCase())
|
||||||
ALLOWABLE_NUMBER_TYPES.indexOf(editableColumn.type) !== -1
|
)
|
||||||
) {
|
.map(([_, fieldDefinition]) => fieldDefinition)
|
||||||
return ALLOWABLE_NUMBER_OPTIONS
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const isUsers =
|
const isUsers =
|
||||||
|
@ -632,7 +626,7 @@
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{:else if editableColumn.type === FieldType.LINK}
|
{:else if editableColumn.type === FieldType.LINK && !editableColumn.autocolumn}
|
||||||
<RelationshipSelector
|
<RelationshipSelector
|
||||||
bind:relationshipPart1
|
bind:relationshipPart1
|
||||||
bind:relationshipPart2
|
bind:relationshipPart2
|
||||||
|
|
|
@ -202,26 +202,6 @@ export const PrettyRelationshipDefinitions = {
|
||||||
ONE: "One row",
|
ONE: "One row",
|
||||||
}
|
}
|
||||||
|
|
||||||
export const ALLOWABLE_STRING_OPTIONS = [
|
|
||||||
FIELDS.STRING,
|
|
||||||
FIELDS.OPTIONS,
|
|
||||||
FIELDS.LONGFORM,
|
|
||||||
FIELDS.BARCODEQR,
|
|
||||||
]
|
|
||||||
export const ALLOWABLE_STRING_TYPES = ALLOWABLE_STRING_OPTIONS.map(
|
|
||||||
opt => opt.type
|
|
||||||
)
|
|
||||||
|
|
||||||
export const ALLOWABLE_NUMBER_OPTIONS = [FIELDS.NUMBER, FIELDS.BOOLEAN]
|
|
||||||
export const ALLOWABLE_NUMBER_TYPES = ALLOWABLE_NUMBER_OPTIONS.map(
|
|
||||||
opt => opt.type
|
|
||||||
)
|
|
||||||
|
|
||||||
export const SWITCHABLE_TYPES = [
|
|
||||||
...ALLOWABLE_STRING_TYPES,
|
|
||||||
...ALLOWABLE_NUMBER_TYPES,
|
|
||||||
]
|
|
||||||
|
|
||||||
export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID
|
export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID
|
||||||
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
||||||
export const BUDIBASE_DATASOURCE_TYPE = "budibase"
|
export const BUDIBASE_DATASOURCE_TYPE = "budibase"
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import { FieldType } from "@budibase/types"
|
import { FieldType } from "@budibase/types"
|
||||||
|
import { SWITCHABLE_TYPES } from "@budibase/shared-core"
|
||||||
import { get, writable, derived } from "svelte/store"
|
import { get, writable, derived } from "svelte/store"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
import { SWITCHABLE_TYPES } from "constants/backend"
|
|
||||||
|
|
||||||
export function createTablesStore() {
|
export function createTablesStore() {
|
||||||
const store = writable({
|
const store = writable({
|
||||||
|
@ -64,7 +64,7 @@ export function createTablesStore() {
|
||||||
if (
|
if (
|
||||||
oldField != null &&
|
oldField != null &&
|
||||||
oldField?.type !== field.type &&
|
oldField?.type !== field.type &&
|
||||||
SWITCHABLE_TYPES.indexOf(oldField?.type) === -1
|
!SWITCHABLE_TYPES[oldField?.type]?.includes(field.type)
|
||||||
) {
|
) {
|
||||||
updatedTable.schema[key] = oldField
|
updatedTable.schema[key] = oldField
|
||||||
}
|
}
|
||||||
|
@ -148,12 +148,6 @@ export function createTablesStore() {
|
||||||
if (indexes) {
|
if (indexes) {
|
||||||
draft.indexes = indexes
|
draft.indexes = indexes
|
||||||
}
|
}
|
||||||
// Add object to indicate if column is being added
|
|
||||||
if (draft.schema[field.name] === undefined) {
|
|
||||||
draft._add = {
|
|
||||||
name: field.name,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
draft.schema = {
|
draft.schema = {
|
||||||
...draft.schema,
|
...draft.schema,
|
||||||
[field.name]: cloneDeep(field),
|
[field.name]: cloneDeep(field),
|
||||||
|
|
|
@ -4,6 +4,7 @@ services:
|
||||||
# user: sa
|
# user: sa
|
||||||
# database: master
|
# database: master
|
||||||
mssql:
|
mssql:
|
||||||
|
# platform: linux/amd64
|
||||||
image: bb/mssql
|
image: bb/mssql
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
|
|
|
@ -374,38 +374,44 @@ export class ExternalRequest<T extends Operation> {
|
||||||
) {
|
) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
let tableId: string | undefined,
|
let relatedTableId: string | undefined,
|
||||||
lookupField: string | undefined,
|
lookupField: string | undefined,
|
||||||
fieldName: string | undefined
|
fieldName: string | undefined
|
||||||
if (isManyToMany(field)) {
|
if (isManyToMany(field)) {
|
||||||
tableId = field.through
|
relatedTableId = field.through
|
||||||
lookupField = primaryKey
|
lookupField = primaryKey
|
||||||
fieldName = field.throughTo || primaryKey
|
fieldName = field.throughTo || primaryKey
|
||||||
} else if (isManyToOne(field)) {
|
} else if (isManyToOne(field)) {
|
||||||
tableId = field.tableId
|
relatedTableId = field.tableId
|
||||||
lookupField = field.foreignKey
|
lookupField = field.foreignKey
|
||||||
fieldName = field.fieldName
|
fieldName = field.fieldName
|
||||||
}
|
}
|
||||||
if (!tableId || !lookupField || !fieldName) {
|
if (!relatedTableId || !lookupField || !fieldName) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
"Unable to lookup relationships - undefined column properties."
|
"Unable to lookup relationships - undefined column properties."
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
const { tableName: relatedTableName } = breakExternalTableId(tableId)
|
const { tableName: relatedTableName } =
|
||||||
|
breakExternalTableId(relatedTableId)
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const linkPrimaryKey = this.tables[relatedTableName].primary[0]
|
const linkPrimaryKey = this.tables[relatedTableName].primary[0]
|
||||||
if (!lookupField || !row[lookupField]) {
|
if (!lookupField || !row[lookupField]) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
const endpoint = getEndpoint(relatedTableId, Operation.READ)
|
||||||
|
const relatedTable = this.tables[endpoint.entityId]
|
||||||
|
if (!relatedTable) {
|
||||||
|
throw new Error("unable to find related table")
|
||||||
|
}
|
||||||
const response = await getDatasourceAndQuery({
|
const response = await getDatasourceAndQuery({
|
||||||
endpoint: getEndpoint(tableId, Operation.READ),
|
endpoint: endpoint,
|
||||||
filters: {
|
filters: {
|
||||||
equal: {
|
equal: {
|
||||||
[fieldName]: row[lookupField],
|
[fieldName]: row[lookupField],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
meta: {
|
meta: {
|
||||||
table,
|
table: relatedTable,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
// this is the response from knex if no rows found
|
// this is the response from knex if no rows found
|
||||||
|
@ -414,7 +420,11 @@ export class ExternalRequest<T extends Operation> {
|
||||||
const storeTo = isManyToMany(field)
|
const storeTo = isManyToMany(field)
|
||||||
? field.throughFrom || linkPrimaryKey
|
? field.throughFrom || linkPrimaryKey
|
||||||
: fieldName
|
: fieldName
|
||||||
related[storeTo] = { rows, isMany: isManyToMany(field), tableId }
|
related[storeTo] = {
|
||||||
|
rows,
|
||||||
|
isMany: isManyToMany(field),
|
||||||
|
tableId: relatedTableId,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return related
|
return related
|
||||||
}
|
}
|
||||||
|
@ -437,7 +447,6 @@ export class ExternalRequest<T extends Operation> {
|
||||||
// if we're creating (in a through table) need to wipe the existing ones first
|
// if we're creating (in a through table) need to wipe the existing ones first
|
||||||
const promises = []
|
const promises = []
|
||||||
const related = await this.lookupRelations(mainTableId, row)
|
const related = await this.lookupRelations(mainTableId, row)
|
||||||
const table = this.getTable(mainTableId)!
|
|
||||||
for (let relationship of relationships) {
|
for (let relationship of relationships) {
|
||||||
const { key, tableId, isUpdate, id, ...rest } = relationship
|
const { key, tableId, isUpdate, id, ...rest } = relationship
|
||||||
const body: { [key: string]: any } = processObjectSync(rest, row, {})
|
const body: { [key: string]: any } = processObjectSync(rest, row, {})
|
||||||
|
@ -484,7 +493,7 @@ export class ExternalRequest<T extends Operation> {
|
||||||
body,
|
body,
|
||||||
filters: buildFilters(id, {}, linkTable),
|
filters: buildFilters(id, {}, linkTable),
|
||||||
meta: {
|
meta: {
|
||||||
table,
|
table: linkTable,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
|
@ -31,7 +31,6 @@ export async function save(
|
||||||
renaming?: RenameColumn
|
renaming?: RenameColumn
|
||||||
) {
|
) {
|
||||||
const inputs = ctx.request.body
|
const inputs = ctx.request.body
|
||||||
const adding = inputs?._add
|
|
||||||
// can't do this right now
|
// can't do this right now
|
||||||
delete inputs.rows
|
delete inputs.rows
|
||||||
const tableId = ctx.request.body._id
|
const tableId = ctx.request.body._id
|
||||||
|
@ -44,7 +43,7 @@ export async function save(
|
||||||
const { datasource, table } = await sdk.tables.external.save(
|
const { datasource, table } = await sdk.tables.external.save(
|
||||||
datasourceId!,
|
datasourceId!,
|
||||||
inputs,
|
inputs,
|
||||||
{ tableId, renaming, adding }
|
{ tableId, renaming }
|
||||||
)
|
)
|
||||||
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
||||||
return table
|
return table
|
||||||
|
|
|
@ -77,11 +77,6 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||||
const renaming = ctx.request.body._rename
|
const renaming = ctx.request.body._rename
|
||||||
|
|
||||||
const api = pickApi({ table })
|
const api = pickApi({ table })
|
||||||
// do not pass _rename or _add if saving to CouchDB
|
|
||||||
if (api === internal) {
|
|
||||||
delete ctx.request.body._add
|
|
||||||
delete ctx.request.body._rename
|
|
||||||
}
|
|
||||||
let savedTable = await api.save(ctx, renaming)
|
let savedTable = await api.save(ctx, renaming)
|
||||||
if (!table._id) {
|
if (!table._id) {
|
||||||
savedTable = sdk.tables.enrichViewSchemas(savedTable)
|
savedTable = sdk.tables.enrichViewSchemas(savedTable)
|
||||||
|
|
|
@ -16,7 +16,7 @@ export async function save(
|
||||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||||
renaming?: RenameColumn
|
renaming?: RenameColumn
|
||||||
) {
|
) {
|
||||||
const { rows, ...rest } = ctx.request.body
|
const { _rename, rows, ...rest } = ctx.request.body
|
||||||
let tableToSave: Table = {
|
let tableToSave: Table = {
|
||||||
_id: generateTableID(),
|
_id: generateTableID(),
|
||||||
...rest,
|
...rest,
|
||||||
|
|
|
@ -4,6 +4,7 @@ import {
|
||||||
Query,
|
Query,
|
||||||
QueryPreview,
|
QueryPreview,
|
||||||
SourceName,
|
SourceName,
|
||||||
|
TableSourceType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as setup from "../utilities"
|
import * as setup from "../utilities"
|
||||||
import {
|
import {
|
||||||
|
@ -740,12 +741,25 @@ describe.each(
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("query through datasource", () => {
|
describe("query through datasource", () => {
|
||||||
it("should be able to query a pg datasource", async () => {
|
it("should be able to query the datasource", async () => {
|
||||||
|
const entityId = "test_table"
|
||||||
|
await config.api.datasource.update({
|
||||||
|
...datasource,
|
||||||
|
entities: {
|
||||||
|
[entityId]: {
|
||||||
|
name: entityId,
|
||||||
|
schema: {},
|
||||||
|
type: "table",
|
||||||
|
sourceId: datasource._id!,
|
||||||
|
sourceType: TableSourceType.EXTERNAL,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
const res = await config.api.datasource.query({
|
const res = await config.api.datasource.query({
|
||||||
endpoint: {
|
endpoint: {
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
operation: Operation.READ,
|
operation: Operation.READ,
|
||||||
entityId: "test_table",
|
entityId,
|
||||||
},
|
},
|
||||||
resource: {
|
resource: {
|
||||||
fields: ["id", "name"],
|
fields: ["id", "name"],
|
||||||
|
|
|
@ -26,6 +26,7 @@ describe.each([
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||||
])("/api/:sourceId/search (%s)", (name, dsProvider) => {
|
])("/api/:sourceId/search (%s)", (name, dsProvider) => {
|
||||||
const isSqs = name === "internal-sqs"
|
const isSqs = name === "internal-sqs"
|
||||||
|
const isInternal = name === "internal"
|
||||||
const config = setup.getConfig()
|
const config = setup.getConfig()
|
||||||
|
|
||||||
let envCleanup: (() => void) | undefined
|
let envCleanup: (() => void) | undefined
|
||||||
|
@ -336,6 +337,20 @@ describe.each([
|
||||||
expectQuery({
|
expectQuery({
|
||||||
range: { age: { low: 5, high: 9 } },
|
range: { age: { low: 5, high: 9 } },
|
||||||
}).toFindNothing())
|
}).toFindNothing())
|
||||||
|
|
||||||
|
// We never implemented half-open ranges in Lucene.
|
||||||
|
!isInternal &&
|
||||||
|
it("can search using just a low value", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { age: { low: 5 } },
|
||||||
|
}).toContainExactly([{ age: 10 }]))
|
||||||
|
|
||||||
|
// We never implemented half-open ranges in Lucene.
|
||||||
|
!isInternal &&
|
||||||
|
it("can search using just a high value", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { age: { high: 5 } },
|
||||||
|
}).toContainExactly([{ age: 1 }]))
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("sort", () => {
|
describe("sort", () => {
|
||||||
|
@ -440,6 +455,20 @@ describe.each([
|
||||||
expectQuery({
|
expectQuery({
|
||||||
range: { dob: { low: JAN_5TH, high: JAN_9TH } },
|
range: { dob: { low: JAN_5TH, high: JAN_9TH } },
|
||||||
}).toFindNothing())
|
}).toFindNothing())
|
||||||
|
|
||||||
|
// We never implemented half-open ranges in Lucene.
|
||||||
|
!isInternal &&
|
||||||
|
it("can search using just a low value", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { dob: { low: JAN_5TH } },
|
||||||
|
}).toContainExactly([{ dob: JAN_10TH }]))
|
||||||
|
|
||||||
|
// We never implemented half-open ranges in Lucene.
|
||||||
|
!isInternal &&
|
||||||
|
it("can search using just a high value", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { dob: { high: JAN_5TH } },
|
||||||
|
}).toContainExactly([{ dob: JAN_1ST }]))
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("sort", () => {
|
describe("sort", () => {
|
||||||
|
@ -550,4 +579,100 @@ describe.each([
|
||||||
]))
|
]))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("bigints", () => {
|
||||||
|
const SMALL = "1"
|
||||||
|
const MEDIUM = "10000000"
|
||||||
|
|
||||||
|
// Our bigints are int64s in most datasources.
|
||||||
|
const BIG = "9223372036854775807"
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await createTable({
|
||||||
|
num: { name: "num", type: FieldType.BIGINT },
|
||||||
|
})
|
||||||
|
await createRows([{ num: SMALL }, { num: MEDIUM }, { num: BIG }])
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("equal", () => {
|
||||||
|
it("successfully finds a row", () =>
|
||||||
|
expectQuery({ equal: { num: SMALL } }).toContainExactly([
|
||||||
|
{ num: SMALL },
|
||||||
|
]))
|
||||||
|
|
||||||
|
it("successfully finds a big value", () =>
|
||||||
|
expectQuery({ equal: { num: BIG } }).toContainExactly([{ num: BIG }]))
|
||||||
|
|
||||||
|
it("fails to find nonexistent row", () =>
|
||||||
|
expectQuery({ equal: { num: "2" } }).toFindNothing())
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("notEqual", () => {
|
||||||
|
it("successfully finds a row", () =>
|
||||||
|
expectQuery({ notEqual: { num: SMALL } }).toContainExactly([
|
||||||
|
{ num: MEDIUM },
|
||||||
|
{ num: BIG },
|
||||||
|
]))
|
||||||
|
|
||||||
|
it("fails to find nonexistent row", () =>
|
||||||
|
expectQuery({ notEqual: { num: 10 } }).toContainExactly([
|
||||||
|
{ num: SMALL },
|
||||||
|
{ num: MEDIUM },
|
||||||
|
{ num: BIG },
|
||||||
|
]))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("oneOf", () => {
|
||||||
|
it("successfully finds a row", () =>
|
||||||
|
expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([
|
||||||
|
{ num: SMALL },
|
||||||
|
]))
|
||||||
|
|
||||||
|
it("successfully finds all rows", () =>
|
||||||
|
expectQuery({ oneOf: { num: [SMALL, MEDIUM, BIG] } }).toContainExactly([
|
||||||
|
{ num: SMALL },
|
||||||
|
{ num: MEDIUM },
|
||||||
|
{ num: BIG },
|
||||||
|
]))
|
||||||
|
|
||||||
|
it("fails to find nonexistent row", () =>
|
||||||
|
expectQuery({ oneOf: { num: [2] } }).toFindNothing())
|
||||||
|
})
|
||||||
|
|
||||||
|
// Range searches against bigints don't seem to work at all in Lucene, and I
|
||||||
|
// couldn't figure out why. Given that we're replacing Lucene with SQS,
|
||||||
|
// we've decided not to spend time on it.
|
||||||
|
!isInternal &&
|
||||||
|
describe("range", () => {
|
||||||
|
it("successfully finds a row", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { num: { low: SMALL, high: "5" } },
|
||||||
|
}).toContainExactly([{ num: SMALL }]))
|
||||||
|
|
||||||
|
it("successfully finds multiple rows", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { num: { low: SMALL, high: MEDIUM } },
|
||||||
|
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }]))
|
||||||
|
|
||||||
|
it("successfully finds a row with a high bound", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { num: { low: MEDIUM, high: BIG } },
|
||||||
|
}).toContainExactly([{ num: MEDIUM }, { num: BIG }]))
|
||||||
|
|
||||||
|
it("successfully finds no rows", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { num: { low: "5", high: "5" } },
|
||||||
|
}).toFindNothing())
|
||||||
|
|
||||||
|
it("can search using just a low value", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { num: { low: MEDIUM } },
|
||||||
|
}).toContainExactly([{ num: MEDIUM }, { num: BIG }]))
|
||||||
|
|
||||||
|
it("can search using just a high value", () =>
|
||||||
|
expectQuery({
|
||||||
|
range: { num: { high: MEDIUM } },
|
||||||
|
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }]))
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -219,9 +219,6 @@ describe.each([
|
||||||
|
|
||||||
it("should add a new column for an internal DB table", async () => {
|
it("should add a new column for an internal DB table", async () => {
|
||||||
const saveTableRequest: SaveTableRequest = {
|
const saveTableRequest: SaveTableRequest = {
|
||||||
_add: {
|
|
||||||
name: "NEW_COLUMN",
|
|
||||||
},
|
|
||||||
...basicTable(),
|
...basicTable(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -235,7 +232,6 @@ describe.each([
|
||||||
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
|
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
|
||||||
views: {},
|
views: {},
|
||||||
}
|
}
|
||||||
delete expectedResponse._add
|
|
||||||
expect(response).toEqual(expectedResponse)
|
expect(response).toEqual(expectedResponse)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -16,7 +16,6 @@ import {
|
||||||
getDatasource,
|
getDatasource,
|
||||||
rawQuery,
|
rawQuery,
|
||||||
} from "../integrations/tests/utils"
|
} from "../integrations/tests/utils"
|
||||||
import { builderSocket } from "../websockets"
|
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
fetch.mockSearch()
|
fetch.mockSearch()
|
||||||
|
@ -233,72 +232,6 @@ describe("mysql integrations", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("POST /api/tables/", () => {
|
describe("POST /api/tables/", () => {
|
||||||
const emitDatasourceUpdateMock = jest.fn()
|
|
||||||
|
|
||||||
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
|
|
||||||
const addColumnToTable: TableRequest = {
|
|
||||||
type: "table",
|
|
||||||
sourceType: TableSourceType.EXTERNAL,
|
|
||||||
name: uniqueTableName(),
|
|
||||||
sourceId: datasource._id!,
|
|
||||||
primary: ["id"],
|
|
||||||
schema: {
|
|
||||||
id: {
|
|
||||||
type: FieldType.AUTO,
|
|
||||||
name: "id",
|
|
||||||
autocolumn: true,
|
|
||||||
},
|
|
||||||
new_column: {
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
name: "new_column",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
_add: {
|
|
||||||
name: "new_column",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
jest
|
|
||||||
.spyOn(builderSocket!, "emitDatasourceUpdate")
|
|
||||||
.mockImplementation(emitDatasourceUpdateMock)
|
|
||||||
|
|
||||||
await makeRequest("post", "/api/tables/", addColumnToTable)
|
|
||||||
|
|
||||||
const expectedTable: TableRequest = {
|
|
||||||
...addColumnToTable,
|
|
||||||
schema: {
|
|
||||||
id: {
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
name: "id",
|
|
||||||
autocolumn: true,
|
|
||||||
constraints: {
|
|
||||||
presence: false,
|
|
||||||
},
|
|
||||||
externalType: "int unsigned",
|
|
||||||
},
|
|
||||||
new_column: {
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
name: "new_column",
|
|
||||||
autocolumn: false,
|
|
||||||
constraints: {
|
|
||||||
presence: false,
|
|
||||||
},
|
|
||||||
externalType: "float(8,2)",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
created: true,
|
|
||||||
_id: `${datasource._id}__${addColumnToTable.name}`,
|
|
||||||
}
|
|
||||||
delete expectedTable._add
|
|
||||||
|
|
||||||
expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1)
|
|
||||||
const emittedDatasource: Datasource =
|
|
||||||
emitDatasourceUpdateMock.mock.calls[0][1]
|
|
||||||
expect(emittedDatasource.entities![expectedTable.name]).toEqual(
|
|
||||||
expectedTable
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("will rename a column", async () => {
|
it("will rename a column", async () => {
|
||||||
await makeRequest("post", "/api/tables/", primaryMySqlTable)
|
await makeRequest("post", "/api/tables/", primaryMySqlTable)
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@ import {
|
||||||
QueryJson,
|
QueryJson,
|
||||||
Datasource,
|
Datasource,
|
||||||
DatasourcePlusQueryResponse,
|
DatasourcePlusQueryResponse,
|
||||||
|
RowOperations,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { getIntegration } from "../index"
|
import { getIntegration } from "../index"
|
||||||
import sdk from "../../sdk"
|
import sdk from "../../sdk"
|
||||||
|
@ -10,6 +11,17 @@ export async function makeExternalQuery(
|
||||||
datasource: Datasource,
|
datasource: Datasource,
|
||||||
json: QueryJson
|
json: QueryJson
|
||||||
): Promise<DatasourcePlusQueryResponse> {
|
): Promise<DatasourcePlusQueryResponse> {
|
||||||
|
const entityId = json.endpoint.entityId,
|
||||||
|
tableName = json.meta.table.name,
|
||||||
|
tableId = json.meta.table._id
|
||||||
|
// case found during testing - make sure this doesn't happen again
|
||||||
|
if (
|
||||||
|
RowOperations.includes(json.endpoint.operation) &&
|
||||||
|
entityId !== tableId &&
|
||||||
|
entityId !== tableName
|
||||||
|
) {
|
||||||
|
throw new Error("Entity ID and table metadata do not align")
|
||||||
|
}
|
||||||
datasource = await sdk.datasources.enrich(datasource)
|
datasource = await sdk.datasources.enrich(datasource)
|
||||||
const Integration = await getIntegration(datasource.source)
|
const Integration = await getIntegration(datasource.source)
|
||||||
// query is the opinionated function
|
// query is the opinionated function
|
||||||
|
|
|
@ -6,6 +6,7 @@ import {
|
||||||
SqlClient,
|
SqlClient,
|
||||||
isValidFilter,
|
isValidFilter,
|
||||||
getNativeSql,
|
getNativeSql,
|
||||||
|
SqlStatements,
|
||||||
} from "../utils"
|
} from "../utils"
|
||||||
import SqlTableQueryBuilder from "./sqlTable"
|
import SqlTableQueryBuilder from "./sqlTable"
|
||||||
import {
|
import {
|
||||||
|
@ -160,9 +161,19 @@ class InternalBuilder {
|
||||||
addFilters(
|
addFilters(
|
||||||
query: Knex.QueryBuilder,
|
query: Knex.QueryBuilder,
|
||||||
filters: SearchFilters | undefined,
|
filters: SearchFilters | undefined,
|
||||||
tableName: string,
|
table: Table,
|
||||||
opts: { aliases?: Record<string, string>; relationship?: boolean }
|
opts: { aliases?: Record<string, string>; relationship?: boolean }
|
||||||
): Knex.QueryBuilder {
|
): Knex.QueryBuilder {
|
||||||
|
if (!filters) {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
filters = parseFilters(filters)
|
||||||
|
// if all or specified in filters, then everything is an or
|
||||||
|
const allOr = filters.allOr
|
||||||
|
const sqlStatements = new SqlStatements(this.client, table, { allOr })
|
||||||
|
const tableName =
|
||||||
|
this.client === SqlClient.SQL_LITE ? table._id! : table.name
|
||||||
|
|
||||||
function getTableAlias(name: string) {
|
function getTableAlias(name: string) {
|
||||||
const alias = opts.aliases?.[name]
|
const alias = opts.aliases?.[name]
|
||||||
return alias || name
|
return alias || name
|
||||||
|
@ -258,12 +269,6 @@ class InternalBuilder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!filters) {
|
|
||||||
return query
|
|
||||||
}
|
|
||||||
filters = parseFilters(filters)
|
|
||||||
// if all or specified in filters, then everything is an or
|
|
||||||
const allOr = filters.allOr
|
|
||||||
if (filters.oneOf) {
|
if (filters.oneOf) {
|
||||||
iterate(filters.oneOf, (key, array) => {
|
iterate(filters.oneOf, (key, array) => {
|
||||||
const fnc = allOr ? "orWhereIn" : "whereIn"
|
const fnc = allOr ? "orWhereIn" : "whereIn"
|
||||||
|
@ -306,17 +311,11 @@ class InternalBuilder {
|
||||||
const lowValid = isValidFilter(value.low),
|
const lowValid = isValidFilter(value.low),
|
||||||
highValid = isValidFilter(value.high)
|
highValid = isValidFilter(value.high)
|
||||||
if (lowValid && highValid) {
|
if (lowValid && highValid) {
|
||||||
// Use a between operator if we have 2 valid range values
|
query = sqlStatements.between(query, key, value.low, value.high)
|
||||||
const fnc = allOr ? "orWhereBetween" : "whereBetween"
|
|
||||||
query = query[fnc](key, [value.low, value.high])
|
|
||||||
} else if (lowValid) {
|
} else if (lowValid) {
|
||||||
// Use just a single greater than operator if we only have a low
|
query = sqlStatements.lte(query, key, value.low)
|
||||||
const fnc = allOr ? "orWhere" : "where"
|
|
||||||
query = query[fnc](key, ">", value.low)
|
|
||||||
} else if (highValid) {
|
} else if (highValid) {
|
||||||
// Use just a single less than operator if we only have a high
|
query = sqlStatements.gte(query, key, value.high)
|
||||||
const fnc = allOr ? "orWhere" : "where"
|
|
||||||
query = query[fnc](key, "<", value.high)
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -359,7 +358,7 @@ class InternalBuilder {
|
||||||
|
|
||||||
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
|
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
|
||||||
let { sort, paginate } = json
|
let { sort, paginate } = json
|
||||||
const table = json.meta?.table
|
const table = json.meta.table
|
||||||
const tableName = getTableName(table)
|
const tableName = getTableName(table)
|
||||||
const aliases = json.tableAliases
|
const aliases = json.tableAliases
|
||||||
const aliased =
|
const aliased =
|
||||||
|
@ -473,14 +472,13 @@ class InternalBuilder {
|
||||||
): Knex.QueryBuilder {
|
): Knex.QueryBuilder {
|
||||||
const tableName = endpoint.entityId
|
const tableName = endpoint.entityId
|
||||||
const tableAlias = aliases?.[tableName]
|
const tableAlias = aliases?.[tableName]
|
||||||
let table: string | Record<string, string> = tableName
|
|
||||||
if (tableAlias) {
|
const query = knex(
|
||||||
table = { [tableAlias]: tableName }
|
this.tableNameWithSchema(tableName, {
|
||||||
}
|
alias: tableAlias,
|
||||||
let query = knex(table)
|
schema: endpoint.schema,
|
||||||
if (endpoint.schema) {
|
})
|
||||||
query = query.withSchema(endpoint.schema)
|
)
|
||||||
}
|
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -547,7 +545,7 @@ class InternalBuilder {
|
||||||
if (foundOffset) {
|
if (foundOffset) {
|
||||||
query = query.offset(foundOffset)
|
query = query.offset(foundOffset)
|
||||||
}
|
}
|
||||||
query = this.addFilters(query, filters, tableName, {
|
query = this.addFilters(query, filters, json.meta.table, {
|
||||||
aliases: tableAliases,
|
aliases: tableAliases,
|
||||||
})
|
})
|
||||||
// add sorting to pre-query
|
// add sorting to pre-query
|
||||||
|
@ -568,7 +566,7 @@ class InternalBuilder {
|
||||||
endpoint.schema,
|
endpoint.schema,
|
||||||
tableAliases
|
tableAliases
|
||||||
)
|
)
|
||||||
return this.addFilters(query, filters, tableName, {
|
return this.addFilters(query, filters, json.meta.table, {
|
||||||
relationship: true,
|
relationship: true,
|
||||||
aliases: tableAliases,
|
aliases: tableAliases,
|
||||||
})
|
})
|
||||||
|
@ -578,7 +576,7 @@ class InternalBuilder {
|
||||||
const { endpoint, body, filters, tableAliases } = json
|
const { endpoint, body, filters, tableAliases } = json
|
||||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||||
const parsedBody = parseBody(body)
|
const parsedBody = parseBody(body)
|
||||||
query = this.addFilters(query, filters, endpoint.entityId, {
|
query = this.addFilters(query, filters, json.meta.table, {
|
||||||
aliases: tableAliases,
|
aliases: tableAliases,
|
||||||
})
|
})
|
||||||
// mysql can't use returning
|
// mysql can't use returning
|
||||||
|
@ -592,7 +590,7 @@ class InternalBuilder {
|
||||||
delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||||
const { endpoint, filters, tableAliases } = json
|
const { endpoint, filters, tableAliases } = json
|
||||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||||
query = this.addFilters(query, filters, endpoint.entityId, {
|
query = this.addFilters(query, filters, json.meta.table, {
|
||||||
aliases: tableAliases,
|
aliases: tableAliases,
|
||||||
})
|
})
|
||||||
// mysql can't use returning
|
// mysql can't use returning
|
||||||
|
@ -684,7 +682,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
// when creating if an ID has been inserted need to make sure
|
// when creating if an ID has been inserted need to make sure
|
||||||
// the id filter is enriched with it before trying to retrieve the row
|
// the id filter is enriched with it before trying to retrieve the row
|
||||||
checkLookupKeys(id: any, json: QueryJson) {
|
checkLookupKeys(id: any, json: QueryJson) {
|
||||||
if (!id || !json.meta?.table || !json.meta.table.primary) {
|
if (!id || !json.meta.table || !json.meta.table.primary) {
|
||||||
return json
|
return json
|
||||||
}
|
}
|
||||||
const primaryKey = json.meta.table.primary?.[0]
|
const primaryKey = json.meta.table.primary?.[0]
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
import { SqlClient } from "../utils"
|
import { SqlClient } from "../utils"
|
||||||
import Sql from "../base/sql"
|
import Sql from "../base/sql"
|
||||||
import {
|
import {
|
||||||
|
FieldType,
|
||||||
Operation,
|
Operation,
|
||||||
QueryJson,
|
QueryJson,
|
||||||
TableSourceType,
|
|
||||||
Table,
|
Table,
|
||||||
FieldType,
|
TableSourceType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
const TABLE_NAME = "test"
|
const TABLE_NAME = "test"
|
||||||
|
@ -13,7 +13,12 @@ const TABLE: Table = {
|
||||||
type: "table",
|
type: "table",
|
||||||
sourceType: TableSourceType.EXTERNAL,
|
sourceType: TableSourceType.EXTERNAL,
|
||||||
sourceId: "SOURCE_ID",
|
sourceId: "SOURCE_ID",
|
||||||
schema: {},
|
schema: {
|
||||||
|
id: {
|
||||||
|
name: "id",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
},
|
||||||
|
},
|
||||||
name: TABLE_NAME,
|
name: TABLE_NAME,
|
||||||
primary: ["id"],
|
primary: ["id"],
|
||||||
}
|
}
|
||||||
|
@ -73,7 +78,7 @@ function generateUpdateJson({
|
||||||
meta?: any
|
meta?: any
|
||||||
}): QueryJson {
|
}): QueryJson {
|
||||||
if (!meta.table) {
|
if (!meta.table) {
|
||||||
meta.table = table
|
meta.table = TABLE
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
endpoint: endpoint(table, "UPDATE"),
|
endpoint: endpoint(table, "UPDATE"),
|
||||||
|
@ -158,6 +163,9 @@ function generateManyRelationshipJson(config: { schema?: string } = {}) {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
extra: { idFilter: {} },
|
extra: { idFilter: {} },
|
||||||
|
meta: {
|
||||||
|
table: TABLE,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -341,7 +349,7 @@ describe("SQL query builder", () => {
|
||||||
)
|
)
|
||||||
expect(query).toEqual({
|
expect(query).toEqual({
|
||||||
bindings: [date, limit],
|
bindings: [date, limit],
|
||||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" > $1 limit $2) as "${TABLE_NAME}"`,
|
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" >= $1 limit $2) as "${TABLE_NAME}"`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -360,7 +368,7 @@ describe("SQL query builder", () => {
|
||||||
)
|
)
|
||||||
expect(query).toEqual({
|
expect(query).toEqual({
|
||||||
bindings: [date, limit],
|
bindings: [date, limit],
|
||||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" < $1 limit $2) as "${TABLE_NAME}"`,
|
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" <= $1 limit $2) as "${TABLE_NAME}"`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -594,7 +602,7 @@ describe("SQL query builder", () => {
|
||||||
)
|
)
|
||||||
expect(query).toEqual({
|
expect(query).toEqual({
|
||||||
bindings: ["2000-01-01 00:00:00", 500],
|
bindings: ["2000-01-01 00:00:00", 500],
|
||||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" > $1 limit $2) as "${TABLE_NAME}"`,
|
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" >= $1 limit $2) as "${TABLE_NAME}"`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -613,7 +621,7 @@ describe("SQL query builder", () => {
|
||||||
)
|
)
|
||||||
expect(query).toEqual({
|
expect(query).toEqual({
|
||||||
bindings: ["2010-01-01 00:00:00", 500],
|
bindings: ["2010-01-01 00:00:00", 500],
|
||||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" < $1 limit $2) as "${TABLE_NAME}"`,
|
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" <= $1 limit $2) as "${TABLE_NAME}"`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -117,7 +117,8 @@ describe("Captures of real examples", () => {
|
||||||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||||
const filters = queryJson.filters
|
const filters = queryJson.filters
|
||||||
const notEqualsValue = Object.values(filters?.notEqual!)[0]
|
const notEqualsValue = Object.values(filters?.notEqual!)[0]
|
||||||
const rangeValue = Object.values(filters?.range!)[0]
|
const rangeValue: { high?: string | number; low?: string | number } =
|
||||||
|
Object.values(filters?.range!)[0]
|
||||||
const equalValue = Object.values(filters?.equal!)[0]
|
const equalValue = Object.values(filters?.equal!)[0]
|
||||||
|
|
||||||
expect(query).toEqual({
|
expect(query).toEqual({
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * from "./utils"
|
||||||
|
export { SqlStatements } from "./sqlStatements"
|
|
@ -0,0 +1,80 @@
|
||||||
|
import { FieldType, Table, FieldSchema } from "@budibase/types"
|
||||||
|
import { SqlClient } from "./utils"
|
||||||
|
import { Knex } from "knex"
|
||||||
|
|
||||||
|
export class SqlStatements {
|
||||||
|
client: string
|
||||||
|
table: Table
|
||||||
|
allOr: boolean | undefined
|
||||||
|
constructor(
|
||||||
|
client: string,
|
||||||
|
table: Table,
|
||||||
|
{ allOr }: { allOr?: boolean } = {}
|
||||||
|
) {
|
||||||
|
this.client = client
|
||||||
|
this.table = table
|
||||||
|
this.allOr = allOr
|
||||||
|
}
|
||||||
|
|
||||||
|
getField(key: string): FieldSchema | undefined {
|
||||||
|
const fieldName = key.split(".")[1]
|
||||||
|
return this.table.schema[fieldName]
|
||||||
|
}
|
||||||
|
|
||||||
|
between(
|
||||||
|
query: Knex.QueryBuilder,
|
||||||
|
key: string,
|
||||||
|
low: number | string,
|
||||||
|
high: number | string
|
||||||
|
) {
|
||||||
|
// Use a between operator if we have 2 valid range values
|
||||||
|
const field = this.getField(key)
|
||||||
|
if (
|
||||||
|
field?.type === FieldType.BIGINT &&
|
||||||
|
this.client === SqlClient.SQL_LITE
|
||||||
|
) {
|
||||||
|
query = query.whereRaw(
|
||||||
|
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
|
||||||
|
[low, high]
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
const fnc = this.allOr ? "orWhereBetween" : "whereBetween"
|
||||||
|
query = query[fnc](key, [low, high])
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
lte(query: Knex.QueryBuilder, key: string, low: number | string) {
|
||||||
|
// Use just a single greater than operator if we only have a low
|
||||||
|
const field = this.getField(key)
|
||||||
|
if (
|
||||||
|
field?.type === FieldType.BIGINT &&
|
||||||
|
this.client === SqlClient.SQL_LITE
|
||||||
|
) {
|
||||||
|
query = query.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
|
||||||
|
low,
|
||||||
|
])
|
||||||
|
} else {
|
||||||
|
const fnc = this.allOr ? "orWhere" : "where"
|
||||||
|
query = query[fnc](key, ">=", low)
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
gte(query: Knex.QueryBuilder, key: string, high: number | string) {
|
||||||
|
const field = this.getField(key)
|
||||||
|
// Use just a single less than operator if we only have a high
|
||||||
|
if (
|
||||||
|
field?.type === FieldType.BIGINT &&
|
||||||
|
this.client === SqlClient.SQL_LITE
|
||||||
|
) {
|
||||||
|
query = query.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
|
||||||
|
high,
|
||||||
|
])
|
||||||
|
} else {
|
||||||
|
const fnc = this.allOr ? "orWhere" : "where"
|
||||||
|
query = query[fnc](key, "<=", high)
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
}
|
|
@ -5,10 +5,10 @@ import {
|
||||||
FieldType,
|
FieldType,
|
||||||
TableSourceType,
|
TableSourceType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { DocumentType, SEPARATOR } from "../db/utils"
|
import { DocumentType, SEPARATOR } from "../../db/utils"
|
||||||
import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../constants"
|
import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
|
||||||
import { helpers } from "@budibase/shared-core"
|
import { SWITCHABLE_TYPES, helpers } from "@budibase/shared-core"
|
||||||
import env from "../environment"
|
import env from "../../environment"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
|
||||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||||
|
@ -284,8 +284,8 @@ export function isIsoDateString(str: string) {
|
||||||
* @param column The column to check, to see if it is a valid relationship.
|
* @param column The column to check, to see if it is a valid relationship.
|
||||||
* @param tableIds The IDs of the tables which currently exist.
|
* @param tableIds The IDs of the tables which currently exist.
|
||||||
*/
|
*/
|
||||||
export function shouldCopyRelationship(
|
function shouldCopyRelationship(
|
||||||
column: { type: string; tableId?: string },
|
column: { type: FieldType.LINK; tableId?: string },
|
||||||
tableIds: string[]
|
tableIds: string[]
|
||||||
) {
|
) {
|
||||||
return (
|
return (
|
||||||
|
@ -303,28 +303,18 @@ export function shouldCopyRelationship(
|
||||||
* @param column The column to check for options or boolean type.
|
* @param column The column to check for options or boolean type.
|
||||||
* @param fetchedColumn The fetched column to check for the type in the external database.
|
* @param fetchedColumn The fetched column to check for the type in the external database.
|
||||||
*/
|
*/
|
||||||
export function shouldCopySpecialColumn(
|
function shouldCopySpecialColumn(
|
||||||
column: { type: string },
|
column: { type: FieldType },
|
||||||
fetchedColumn: { type: string } | undefined
|
fetchedColumn: { type: FieldType } | undefined
|
||||||
) {
|
) {
|
||||||
const isFormula = column.type === FieldType.FORMULA
|
const isFormula = column.type === FieldType.FORMULA
|
||||||
const specialTypes = [
|
|
||||||
FieldType.OPTIONS,
|
|
||||||
FieldType.LONGFORM,
|
|
||||||
FieldType.ARRAY,
|
|
||||||
FieldType.FORMULA,
|
|
||||||
FieldType.BB_REFERENCE,
|
|
||||||
]
|
|
||||||
// column has been deleted, remove - formulas will never exist, always copy
|
// column has been deleted, remove - formulas will never exist, always copy
|
||||||
if (!isFormula && column && !fetchedColumn) {
|
if (!isFormula && column && !fetchedColumn) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
const fetchedIsNumber =
|
const fetchedIsNumber =
|
||||||
!fetchedColumn || fetchedColumn.type === FieldType.NUMBER
|
!fetchedColumn || fetchedColumn.type === FieldType.NUMBER
|
||||||
return (
|
return fetchedIsNumber && column.type === FieldType.BOOLEAN
|
||||||
specialTypes.indexOf(column.type as FieldType) !== -1 ||
|
|
||||||
(fetchedIsNumber && column.type === FieldType.BOOLEAN)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -357,11 +347,44 @@ function copyExistingPropsOver(
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
const column = existingTableSchema[key]
|
const column = existingTableSchema[key]
|
||||||
|
|
||||||
|
const existingColumnType = column?.type
|
||||||
|
const updatedColumnType = table.schema[key]?.type
|
||||||
|
|
||||||
|
// If the db column type changed to a non-compatible one, we want to re-fetch it
|
||||||
if (
|
if (
|
||||||
shouldCopyRelationship(column, tableIds) ||
|
updatedColumnType !== existingColumnType &&
|
||||||
shouldCopySpecialColumn(column, table.schema[key])
|
!SWITCHABLE_TYPES[updatedColumnType]?.includes(existingColumnType)
|
||||||
) {
|
) {
|
||||||
table.schema[key] = existingTableSchema[key]
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
column.type === FieldType.LINK &&
|
||||||
|
!shouldCopyRelationship(column, tableIds)
|
||||||
|
) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const specialTypes = [
|
||||||
|
FieldType.OPTIONS,
|
||||||
|
FieldType.LONGFORM,
|
||||||
|
FieldType.ARRAY,
|
||||||
|
FieldType.FORMULA,
|
||||||
|
FieldType.BB_REFERENCE,
|
||||||
|
]
|
||||||
|
if (
|
||||||
|
specialTypes.includes(column.type) &&
|
||||||
|
!shouldCopySpecialColumn(column, table.schema[key])
|
||||||
|
) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
table.schema[key] = {
|
||||||
|
...existingTableSchema[key],
|
||||||
|
externalType:
|
||||||
|
existingTableSchema[key].externalType ||
|
||||||
|
table.schema[key].externalType,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -348,8 +348,7 @@ const preSaveAction: Partial<Record<SourceName, any>> = {
|
||||||
* Make sure all datasource entities have a display name selected
|
* Make sure all datasource entities have a display name selected
|
||||||
*/
|
*/
|
||||||
export function setDefaultDisplayColumns(datasource: Datasource) {
|
export function setDefaultDisplayColumns(datasource: Datasource) {
|
||||||
//
|
for (const entity of Object.values(datasource.entities || {})) {
|
||||||
for (let entity of Object.values(datasource.entities || {})) {
|
|
||||||
if (entity.primaryDisplay) {
|
if (entity.primaryDisplay) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
|
@ -200,6 +200,6 @@ export async function search(
|
||||||
}
|
}
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
const msg = typeof err === "string" ? err : err.message
|
const msg = typeof err === "string" ? err : err.message
|
||||||
throw new Error(`Unable to search by SQL - ${msg}`)
|
throw new Error(`Unable to search by SQL - ${msg}`, { cause: err })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,6 +52,12 @@ export async function getDatasourceAndQuery(
|
||||||
): Promise<DatasourcePlusQueryResponse> {
|
): Promise<DatasourcePlusQueryResponse> {
|
||||||
const datasourceId = json.endpoint.datasourceId
|
const datasourceId = json.endpoint.datasourceId
|
||||||
const datasource = await sdk.datasources.get(datasourceId)
|
const datasource = await sdk.datasources.get(datasourceId)
|
||||||
|
const table = datasource.entities?.[json.endpoint.entityId]
|
||||||
|
if (!json.meta && table) {
|
||||||
|
json.meta = {
|
||||||
|
table,
|
||||||
|
}
|
||||||
|
}
|
||||||
return makeExternalQuery(datasource, json)
|
return makeExternalQuery(datasource, json)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,6 @@ import {
|
||||||
Operation,
|
Operation,
|
||||||
RelationshipType,
|
RelationshipType,
|
||||||
RenameColumn,
|
RenameColumn,
|
||||||
AddColumn,
|
|
||||||
Table,
|
Table,
|
||||||
TableRequest,
|
TableRequest,
|
||||||
ViewV2,
|
ViewV2,
|
||||||
|
@ -33,7 +32,7 @@ import * as viewSdk from "../../views"
|
||||||
export async function save(
|
export async function save(
|
||||||
datasourceId: string,
|
datasourceId: string,
|
||||||
update: Table,
|
update: Table,
|
||||||
opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn }
|
opts?: { tableId?: string; renaming?: RenameColumn }
|
||||||
) {
|
) {
|
||||||
let tableToSave: TableRequest = {
|
let tableToSave: TableRequest = {
|
||||||
...update,
|
...update,
|
||||||
|
@ -52,6 +51,12 @@ export async function save(
|
||||||
!oldTable &&
|
!oldTable &&
|
||||||
(tableToSave.primary == null || tableToSave.primary.length === 0)
|
(tableToSave.primary == null || tableToSave.primary.length === 0)
|
||||||
) {
|
) {
|
||||||
|
if (tableToSave.schema.id) {
|
||||||
|
throw new Error(
|
||||||
|
"External tables with no `primary` column set will define an `id` column, but we found an `id` column in the supplied schema. Either set a `primary` column or remove the `id` column."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
tableToSave.primary = ["id"]
|
tableToSave.primary = ["id"]
|
||||||
tableToSave.schema.id = {
|
tableToSave.schema.id = {
|
||||||
type: FieldType.NUMBER,
|
type: FieldType.NUMBER,
|
||||||
|
@ -179,14 +184,7 @@ export async function save(
|
||||||
// remove the rename prop
|
// remove the rename prop
|
||||||
delete tableToSave._rename
|
delete tableToSave._rename
|
||||||
|
|
||||||
// if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column
|
datasource.entities[tableToSave.name] = tableToSave
|
||||||
if (opts?.adding) {
|
|
||||||
datasource.entities[tableToSave.name] = (
|
|
||||||
await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name])
|
|
||||||
).tables[tableToSave.name]
|
|
||||||
} else {
|
|
||||||
datasource.entities[tableToSave.name] = tableToSave
|
|
||||||
}
|
|
||||||
|
|
||||||
// store it into couch now for budibase reference
|
// store it into couch now for budibase reference
|
||||||
await db.put(populateExternalTableSchemas(datasource))
|
await db.put(populateExternalTableSchemas(datasource))
|
||||||
|
|
|
@ -42,7 +42,7 @@ const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
||||||
[FieldType.ATTACHMENT_SINGLE]: SQLiteType.BLOB,
|
[FieldType.ATTACHMENT_SINGLE]: SQLiteType.BLOB,
|
||||||
[FieldType.ARRAY]: SQLiteType.BLOB,
|
[FieldType.ARRAY]: SQLiteType.BLOB,
|
||||||
[FieldType.LINK]: SQLiteType.BLOB,
|
[FieldType.LINK]: SQLiteType.BLOB,
|
||||||
[FieldType.BIGINT]: SQLiteType.REAL,
|
[FieldType.BIGINT]: SQLiteType.TEXT,
|
||||||
// TODO: consider the difference between multi-user and single user types (subtyping)
|
// TODO: consider the difference between multi-user and single user types (subtyping)
|
||||||
[FieldType.BB_REFERENCE]: SQLiteType.TEXT,
|
[FieldType.BB_REFERENCE]: SQLiteType.TEXT,
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,7 +61,7 @@ export class DatasourceAPI extends TestAPI {
|
||||||
}
|
}
|
||||||
|
|
||||||
query = async (
|
query = async (
|
||||||
query: Omit<QueryJson, "meta">,
|
query: Omit<QueryJson, "meta"> & Partial<Pick<QueryJson, "meta">>,
|
||||||
expectations?: Expectations
|
expectations?: Expectations
|
||||||
) => {
|
) => {
|
||||||
return await this._post<any>(`/api/datasources/query`, {
|
return await this._post<any>(`/api/datasources/query`, {
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
import { FieldType } from "@budibase/types"
|
||||||
|
|
||||||
|
type SwitchableTypes = Partial<{
|
||||||
|
[K in FieldType]: [K, ...FieldType[]]
|
||||||
|
}>
|
||||||
|
|
||||||
|
export const SWITCHABLE_TYPES: SwitchableTypes = {
|
||||||
|
[FieldType.STRING]: [
|
||||||
|
FieldType.STRING,
|
||||||
|
FieldType.OPTIONS,
|
||||||
|
FieldType.LONGFORM,
|
||||||
|
FieldType.BARCODEQR,
|
||||||
|
],
|
||||||
|
[FieldType.OPTIONS]: [
|
||||||
|
FieldType.OPTIONS,
|
||||||
|
FieldType.STRING,
|
||||||
|
FieldType.LONGFORM,
|
||||||
|
FieldType.BARCODEQR,
|
||||||
|
],
|
||||||
|
[FieldType.LONGFORM]: [
|
||||||
|
FieldType.LONGFORM,
|
||||||
|
FieldType.STRING,
|
||||||
|
FieldType.OPTIONS,
|
||||||
|
FieldType.BARCODEQR,
|
||||||
|
],
|
||||||
|
[FieldType.BARCODEQR]: [
|
||||||
|
FieldType.BARCODEQR,
|
||||||
|
FieldType.STRING,
|
||||||
|
FieldType.OPTIONS,
|
||||||
|
FieldType.LONGFORM,
|
||||||
|
],
|
||||||
|
[FieldType.NUMBER]: [FieldType.NUMBER, FieldType.BOOLEAN],
|
||||||
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
export * from "./api"
|
export * from "./api"
|
||||||
|
export * from "./fields"
|
||||||
|
|
||||||
export const OperatorOptions = {
|
export const OperatorOptions = {
|
||||||
Equals: {
|
Equals: {
|
||||||
|
|
|
@ -218,14 +218,16 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
||||||
high: type === "number" ? maxint : "9999-00-00T00:00:00.000Z",
|
high: type === "number" ? maxint : "9999-00-00T00:00:00.000Z",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if ((operator as any) === "rangeLow" && value != null && value !== "") {
|
if (operator === "rangeLow" && value != null && value !== "") {
|
||||||
query.range[field].low = value
|
query.range[field] = {
|
||||||
} else if (
|
...query.range[field],
|
||||||
(operator as any) === "rangeHigh" &&
|
low: value,
|
||||||
value != null &&
|
}
|
||||||
value !== ""
|
} else if (operator === "rangeHigh" && value != null && value !== "") {
|
||||||
) {
|
query.range[field] = {
|
||||||
query.range[field].high = value
|
...query.range[field],
|
||||||
|
high: value,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else if (query[queryOperator] && operator !== "onEmptyFilter") {
|
} else if (query[queryOperator] && operator !== "onEmptyFilter") {
|
||||||
if (type === "boolean") {
|
if (type === "boolean") {
|
||||||
|
|
|
@ -66,6 +66,8 @@ export interface CreateAdminUserRequest {
|
||||||
password?: string
|
password?: string
|
||||||
tenantId: string
|
tenantId: string
|
||||||
ssoId?: string
|
ssoId?: string
|
||||||
|
familyName?: string
|
||||||
|
givenName?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AddSSoUserRequest {
|
export interface AddSSoUserRequest {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { Document } from "../../document"
|
import { Document } from "../../document"
|
||||||
import { View, ViewV2 } from "../view"
|
import { View, ViewV2 } from "../view"
|
||||||
import { AddColumn, RenameColumn } from "../../../sdk"
|
import { RenameColumn } from "../../../sdk"
|
||||||
import { TableSchema } from "./schema"
|
import { TableSchema } from "./schema"
|
||||||
|
|
||||||
export const INTERNAL_TABLE_SOURCE_ID = "bb_internal"
|
export const INTERNAL_TABLE_SOURCE_ID = "bb_internal"
|
||||||
|
@ -30,6 +30,5 @@ export interface Table extends Document {
|
||||||
|
|
||||||
export interface TableRequest extends Table {
|
export interface TableRequest extends Table {
|
||||||
_rename?: RenameColumn
|
_rename?: RenameColumn
|
||||||
_add?: AddColumn
|
|
||||||
created?: boolean
|
created?: boolean
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,13 @@ export interface UserSSO {
|
||||||
providerType: SSOProviderType
|
providerType: SSOProviderType
|
||||||
oauth2?: OAuth2
|
oauth2?: OAuth2
|
||||||
thirdPartyProfile?: SSOProfileJson
|
thirdPartyProfile?: SSOProfileJson
|
||||||
|
profile?: {
|
||||||
|
displayName?: string
|
||||||
|
name?: {
|
||||||
|
givenName?: string
|
||||||
|
familyName?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export type SSOUser = User & UserSSO
|
export type SSOUser = User & UserSSO
|
||||||
|
|
|
@ -14,6 +14,14 @@ export enum Operation {
|
||||||
DELETE_TABLE = "DELETE_TABLE",
|
DELETE_TABLE = "DELETE_TABLE",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const RowOperations = [
|
||||||
|
Operation.CREATE,
|
||||||
|
Operation.READ,
|
||||||
|
Operation.UPDATE,
|
||||||
|
Operation.DELETE,
|
||||||
|
Operation.BULK_CREATE,
|
||||||
|
]
|
||||||
|
|
||||||
export enum SortDirection {
|
export enum SortDirection {
|
||||||
ASCENDING = "ASCENDING",
|
ASCENDING = "ASCENDING",
|
||||||
DESCENDING = "DESCENDING",
|
DESCENDING = "DESCENDING",
|
||||||
|
|
|
@ -27,10 +27,13 @@ export interface SearchFilters {
|
||||||
[key: string]: string
|
[key: string]: string
|
||||||
}
|
}
|
||||||
[SearchFilterOperator.RANGE]?: {
|
[SearchFilterOperator.RANGE]?: {
|
||||||
[key: string]: {
|
[key: string]:
|
||||||
high: number | string
|
| {
|
||||||
low: number | string
|
high: number | string
|
||||||
}
|
low: number | string
|
||||||
|
}
|
||||||
|
| { high: number | string }
|
||||||
|
| { low: number | string }
|
||||||
}
|
}
|
||||||
[SearchFilterOperator.EQUAL]?: {
|
[SearchFilterOperator.EQUAL]?: {
|
||||||
[key: string]: any
|
[key: string]: any
|
||||||
|
@ -77,10 +80,6 @@ export interface RenameColumn {
|
||||||
updated: string
|
updated: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AddColumn {
|
|
||||||
name: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface RelationshipsJson {
|
export interface RelationshipsJson {
|
||||||
through?: string
|
through?: string
|
||||||
from?: string
|
from?: string
|
||||||
|
|
|
@ -116,7 +116,8 @@ const parseBooleanParam = (param: any) => {
|
||||||
export const adminUser = async (
|
export const adminUser = async (
|
||||||
ctx: Ctx<CreateAdminUserRequest, CreateAdminUserResponse>
|
ctx: Ctx<CreateAdminUserRequest, CreateAdminUserResponse>
|
||||||
) => {
|
) => {
|
||||||
const { email, password, tenantId, ssoId } = ctx.request.body
|
const { email, password, tenantId, ssoId, givenName, familyName } =
|
||||||
|
ctx.request.body
|
||||||
|
|
||||||
if (await platform.tenants.exists(tenantId)) {
|
if (await platform.tenants.exists(tenantId)) {
|
||||||
ctx.throw(403, "Organisation already exists.")
|
ctx.throw(403, "Organisation already exists.")
|
||||||
|
@ -151,6 +152,8 @@ export const adminUser = async (
|
||||||
ssoId,
|
ssoId,
|
||||||
hashPassword,
|
hashPassword,
|
||||||
requirePassword,
|
requirePassword,
|
||||||
|
firstName: givenName,
|
||||||
|
lastName: familyName,
|
||||||
})
|
})
|
||||||
|
|
||||||
// events
|
// events
|
||||||
|
|
|
@ -16,6 +16,8 @@ function buildAdminInitValidation() {
|
||||||
password: OPTIONAL_STRING,
|
password: OPTIONAL_STRING,
|
||||||
tenantId: Joi.string().required(),
|
tenantId: Joi.string().required(),
|
||||||
ssoId: Joi.string(),
|
ssoId: Joi.string(),
|
||||||
|
familyName: OPTIONAL_STRING,
|
||||||
|
givenName: OPTIONAL_STRING,
|
||||||
})
|
})
|
||||||
.required()
|
.required()
|
||||||
.unknown(false)
|
.unknown(false)
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Check if the pro submodule is loaded
|
||||||
|
if [ ! -d "./packages/pro/src" ]; then
|
||||||
|
echo "[ERROR] Submodule is not loaded. This is only allowed with loaded submodules."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
yarn build --scope @budibase/server --scope @budibase/worker
|
||||||
|
docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue