Merge branch 'master' of github.com:budibase/budibase into update-docker-compose-for-sqs
This commit is contained in:
commit
d83248e917
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.29.1",
|
||||
"version": "2.29.2",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -72,4 +72,4 @@ export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
|
|||
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
|
||||
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
|
||||
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
|
||||
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
||||
export { DEFAULT_BB_DATASOURCE_ID } from "@budibase/shared-core"
|
||||
|
|
|
@ -449,8 +449,12 @@ class InternalBuilder {
|
|||
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
|
||||
}
|
||||
}
|
||||
// always add sorting by the primary key - make sure result is deterministic
|
||||
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
|
||||
|
||||
// add sorting by the primary key if the result isn't already sorted by it,
|
||||
// to make sure result is deterministic
|
||||
if (!sort || sort[primaryKey[0]] === undefined) {
|
||||
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
|
@ -604,7 +608,8 @@ class InternalBuilder {
|
|||
if (!primary) {
|
||||
throw new Error("Primary key is required for upsert")
|
||||
}
|
||||
return query.insert(parsedBody).onConflict(primary).merge()
|
||||
const ret = query.insert(parsedBody).onConflict(primary).merge()
|
||||
return ret
|
||||
} else if (this.client === SqlClient.MS_SQL) {
|
||||
// No upsert or onConflict support in MSSQL yet, see:
|
||||
// https://github.com/knex/knex/pull/6050
|
||||
|
|
|
@ -109,8 +109,10 @@ function generateSchema(
|
|||
const { tableName } = breakExternalTableId(column.tableId)
|
||||
// @ts-ignore
|
||||
const relatedTable = tables[tableName]
|
||||
if (!relatedTable) {
|
||||
throw new Error("Referenced table doesn't exist")
|
||||
if (!relatedTable || !relatedTable.primary) {
|
||||
throw new Error(
|
||||
"Referenced table doesn't exist or has no primary keys"
|
||||
)
|
||||
}
|
||||
const relatedPrimary = relatedTable.primary[0]
|
||||
const externalType = relatedTable.schema[relatedPrimary].externalType
|
||||
|
|
|
@ -55,10 +55,7 @@ export function buildExternalTableId(datasourceId: string, tableName: string) {
|
|||
return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}`
|
||||
}
|
||||
|
||||
export function breakExternalTableId(tableId: string | undefined) {
|
||||
if (!tableId) {
|
||||
return {}
|
||||
}
|
||||
export function breakExternalTableId(tableId: string) {
|
||||
const parts = tableId.split(DOUBLE_SEPARATOR)
|
||||
let datasourceId = parts.shift()
|
||||
// if they need joined
|
||||
|
@ -67,6 +64,9 @@ export function breakExternalTableId(tableId: string | undefined) {
|
|||
if (tableName.includes(ENCODED_SPACE)) {
|
||||
tableName = decodeURIComponent(tableName)
|
||||
}
|
||||
if (!datasourceId || !tableName) {
|
||||
throw new Error("Unable to get datasource/table name from table ID")
|
||||
}
|
||||
return { datasourceId, tableName }
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,6 @@ export const account = (partial: Partial<Account> = {}): Account => {
|
|||
createdAt: Date.now(),
|
||||
verified: true,
|
||||
verificationSent: true,
|
||||
tier: "FREE", // DEPRECATED
|
||||
authType: AuthType.PASSWORD,
|
||||
name: generator.name(),
|
||||
size: "10+",
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
<script>
|
||||
import { FieldType, BBReferenceFieldSubType } from "@budibase/types"
|
||||
import {
|
||||
FieldType,
|
||||
BBReferenceFieldSubType,
|
||||
SourceName,
|
||||
} from "@budibase/types"
|
||||
import { Select, Toggle, Multiselect } from "@budibase/bbui"
|
||||
import { DB_TYPE_INTERNAL } from "constants/backend"
|
||||
import { API } from "api"
|
||||
import { parseFile } from "./utils"
|
||||
import { tables, datasources } from "stores/builder"
|
||||
|
||||
let error = null
|
||||
let fileName = null
|
||||
|
@ -80,6 +85,9 @@
|
|||
schema = fetchSchema(tableId)
|
||||
}
|
||||
|
||||
$: table = $tables.list.find(table => table._id === tableId)
|
||||
$: datasource = $datasources.list.find(ds => ds._id === table?.sourceId)
|
||||
|
||||
async function fetchSchema(tableId) {
|
||||
try {
|
||||
const definition = await API.fetchTableDefinition(tableId)
|
||||
|
@ -185,20 +193,25 @@
|
|||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{#if tableType === DB_TYPE_INTERNAL}
|
||||
<br />
|
||||
<br />
|
||||
<!-- SQL Server doesn't yet support overwriting rows by existing keys -->
|
||||
{#if datasource?.source !== SourceName.SQL_SERVER}
|
||||
<Toggle
|
||||
bind:value={updateExistingRows}
|
||||
on:change={() => (identifierFields = [])}
|
||||
thin
|
||||
text="Update existing rows"
|
||||
/>
|
||||
{#if updateExistingRows}
|
||||
{/if}
|
||||
{#if updateExistingRows}
|
||||
{#if tableType === DB_TYPE_INTERNAL}
|
||||
<Multiselect
|
||||
label="Identifier field(s)"
|
||||
options={Object.keys(validation)}
|
||||
bind:value={identifierFields}
|
||||
/>
|
||||
{:else}
|
||||
<p>Rows will be updated based on the table's primary key.</p>
|
||||
{/if}
|
||||
{/if}
|
||||
{#if invalidColumns.length > 0}
|
||||
|
|
|
@ -233,9 +233,9 @@
|
|||
response.info = response.info || { code: 200 }
|
||||
// if existing schema, copy over what it is
|
||||
if (schema) {
|
||||
for (let [name, field] of Object.entries(schema)) {
|
||||
if (response.schema[name]) {
|
||||
response.schema[name] = field
|
||||
for (let [name, field] of Object.entries(response.schema)) {
|
||||
if (!schema[name]) {
|
||||
schema[name] = field
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
import FilterUsers from "./FilterUsers.svelte"
|
||||
import { getFields } from "../utils/searchFields"
|
||||
|
||||
const { OperatorOptions } = Constants
|
||||
const { OperatorOptions, DEFAULT_BB_DATASOURCE_ID } = Constants
|
||||
|
||||
export let schemaFields
|
||||
export let filters = []
|
||||
|
@ -28,6 +28,23 @@
|
|||
export let allowBindings = false
|
||||
export let filtersLabel = "Filters"
|
||||
|
||||
$: {
|
||||
if (
|
||||
tables.find(
|
||||
table =>
|
||||
table._id === datasource.tableId &&
|
||||
table.sourceId === DEFAULT_BB_DATASOURCE_ID
|
||||
) &&
|
||||
!schemaFields.some(field => field.name === "_id")
|
||||
) {
|
||||
schemaFields = [
|
||||
...schemaFields,
|
||||
{ name: "_id", type: "string" },
|
||||
{ name: "_rev", type: "string" },
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
$: matchAny = filters?.find(filter => filter.operator === "allOr") != null
|
||||
$: onEmptyFilter =
|
||||
filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all"
|
||||
|
@ -35,7 +52,6 @@
|
|||
$: fieldFilters = filters.filter(
|
||||
filter => filter.operator !== "allOr" && !filter.onEmptyFilter
|
||||
)
|
||||
|
||||
const behaviourOptions = [
|
||||
{ value: "and", label: "Match all filters" },
|
||||
{ value: "or", label: "Match any filter" },
|
||||
|
@ -44,7 +60,6 @@
|
|||
{ value: "all", label: "Return all table rows" },
|
||||
{ value: "none", label: "Return no rows" },
|
||||
]
|
||||
|
||||
const context = getContext("context")
|
||||
|
||||
$: fieldOptions = getFields(tables, schemaFields || [], {
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
/**
|
||||
* Operator options for lucene queries
|
||||
*/
|
||||
export { OperatorOptions, SqlNumberTypeRangeMap } from "@budibase/shared-core"
|
||||
export {
|
||||
OperatorOptions,
|
||||
SqlNumberTypeRangeMap,
|
||||
DEFAULT_BB_DATASOURCE_ID,
|
||||
} from "@budibase/shared-core"
|
||||
export { Feature as Features } from "@budibase/types"
|
||||
import { BpmCorrelationKey } from "@budibase/shared-core"
|
||||
import { FieldType, BBReferenceFieldSubType } from "@budibase/types"
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit bf30f47a28292d619cf0837f21d66790ff31c3a6
|
||||
Subproject commit 6c8d0174ca58c578a37022965ddb923fdbf8e32a
|
|
@ -1,4 +1,4 @@
|
|||
FROM mcr.microsoft.com/mssql/server:2017-latest
|
||||
FROM mcr.microsoft.com/mssql/server:2022-latest
|
||||
|
||||
ENV ACCEPT_EULA=Y
|
||||
ENV SA_PASSWORD=Passw0rd
|
||||
|
|
|
@ -311,8 +311,8 @@ export async function preview(
|
|||
|
||||
// if existing schema, update to include any previous schema keys
|
||||
if (existingSchema) {
|
||||
for (let key of Object.keys(previewSchema)) {
|
||||
if (existingSchema[key]) {
|
||||
for (let key of Object.keys(existingSchema)) {
|
||||
if (!previewSchema[key]) {
|
||||
previewSchema[key] = existingSchema[key]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,92 +72,6 @@ export type ExternalRequestReturnType<T extends Operation> =
|
|||
? number
|
||||
: { row: Row; table: Table }
|
||||
|
||||
function buildFilters(
|
||||
id: string | undefined | string[],
|
||||
filters: SearchFilters,
|
||||
table: Table
|
||||
) {
|
||||
const primary = table.primary
|
||||
// if passed in array need to copy for shifting etc
|
||||
let idCopy: undefined | string | any[] = cloneDeep(id)
|
||||
if (filters) {
|
||||
// need to map over the filters and make sure the _id field isn't present
|
||||
let prefix = 1
|
||||
for (let operator of Object.values(filters)) {
|
||||
for (let field of Object.keys(operator || {})) {
|
||||
if (dbCore.removeKeyNumbering(field) === "_id") {
|
||||
if (primary) {
|
||||
const parts = breakRowIdField(operator[field])
|
||||
for (let field of primary) {
|
||||
operator[`${prefix}:${field}`] = parts.shift()
|
||||
}
|
||||
prefix++
|
||||
}
|
||||
// make sure this field doesn't exist on any filter
|
||||
delete operator[field]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// there is no id, just use the user provided filters
|
||||
if (!idCopy || !table) {
|
||||
return filters
|
||||
}
|
||||
// if used as URL parameter it will have been joined
|
||||
if (!Array.isArray(idCopy)) {
|
||||
idCopy = breakRowIdField(idCopy)
|
||||
}
|
||||
const equal: any = {}
|
||||
if (primary && idCopy) {
|
||||
for (let field of primary) {
|
||||
// work through the ID and get the parts
|
||||
equal[field] = idCopy.shift()
|
||||
}
|
||||
}
|
||||
return {
|
||||
equal,
|
||||
}
|
||||
}
|
||||
|
||||
async function removeManyToManyRelationships(
|
||||
rowId: string,
|
||||
table: Table,
|
||||
colName: string
|
||||
) {
|
||||
const tableId = table._id!
|
||||
const filters = buildFilters(rowId, {}, table)
|
||||
// safety check, if there are no filters on deletion bad things happen
|
||||
if (Object.keys(filters).length !== 0) {
|
||||
return getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, Operation.DELETE),
|
||||
body: { [colName]: null },
|
||||
filters,
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
async function removeOneToManyRelationships(rowId: string, table: Table) {
|
||||
const tableId = table._id!
|
||||
const filters = buildFilters(rowId, {}, table)
|
||||
// safety check, if there are no filters on deletion bad things happen
|
||||
if (Object.keys(filters).length !== 0) {
|
||||
return getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, Operation.UPDATE),
|
||||
filters,
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function checks the incoming parameters to make sure all the inputs are
|
||||
* valid based on on the table schema. The main thing this is looking for is when a
|
||||
|
@ -212,8 +126,8 @@ function getEndpoint(tableId: string | undefined, operation: string) {
|
|||
}
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
return {
|
||||
datasourceId: datasourceId!,
|
||||
entityId: tableName!,
|
||||
datasourceId: datasourceId,
|
||||
entityId: tableName,
|
||||
operation: operation as Operation,
|
||||
}
|
||||
}
|
||||
|
@ -240,6 +154,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
private readonly tableId: string
|
||||
private datasource?: Datasource
|
||||
private tables: { [key: string]: Table } = {}
|
||||
private tableList: Table[]
|
||||
|
||||
constructor(operation: T, tableId: string, datasource?: Datasource) {
|
||||
this.operation = operation
|
||||
|
@ -248,22 +163,134 @@ export class ExternalRequest<T extends Operation> {
|
|||
if (datasource && datasource.entities) {
|
||||
this.tables = datasource.entities
|
||||
}
|
||||
this.tableList = Object.values(this.tables)
|
||||
}
|
||||
|
||||
private prepareFilters(
|
||||
id: string | undefined | string[],
|
||||
filters: SearchFilters,
|
||||
table: Table
|
||||
): SearchFilters {
|
||||
// replace any relationship columns initially, table names and relationship column names are acceptable
|
||||
const relationshipColumns = sdk.rows.filters.getRelationshipColumns(table)
|
||||
filters = sdk.rows.filters.updateFilterKeys(
|
||||
filters,
|
||||
relationshipColumns.map(({ name, definition }) => {
|
||||
const { tableName } = breakExternalTableId(definition.tableId)
|
||||
return {
|
||||
original: name,
|
||||
updated: tableName,
|
||||
}
|
||||
})
|
||||
)
|
||||
const primary = table.primary
|
||||
// if passed in array need to copy for shifting etc
|
||||
let idCopy: undefined | string | any[] = cloneDeep(id)
|
||||
if (filters) {
|
||||
// need to map over the filters and make sure the _id field isn't present
|
||||
let prefix = 1
|
||||
for (let operator of Object.values(filters)) {
|
||||
for (let field of Object.keys(operator || {})) {
|
||||
if (dbCore.removeKeyNumbering(field) === "_id") {
|
||||
if (primary) {
|
||||
const parts = breakRowIdField(operator[field])
|
||||
for (let field of primary) {
|
||||
operator[`${prefix}:${field}`] = parts.shift()
|
||||
}
|
||||
prefix++
|
||||
}
|
||||
// make sure this field doesn't exist on any filter
|
||||
delete operator[field]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// there is no id, just use the user provided filters
|
||||
if (!idCopy || !table) {
|
||||
return filters
|
||||
}
|
||||
// if used as URL parameter it will have been joined
|
||||
if (!Array.isArray(idCopy)) {
|
||||
idCopy = breakRowIdField(idCopy)
|
||||
}
|
||||
const equal: SearchFilters["equal"] = {}
|
||||
if (primary && idCopy) {
|
||||
for (let field of primary) {
|
||||
// work through the ID and get the parts
|
||||
equal[field] = idCopy.shift()
|
||||
}
|
||||
}
|
||||
return {
|
||||
equal,
|
||||
}
|
||||
}
|
||||
|
||||
private async removeManyToManyRelationships(
|
||||
rowId: string,
|
||||
table: Table,
|
||||
colName: string
|
||||
) {
|
||||
const tableId = table._id!
|
||||
const filters = this.prepareFilters(rowId, {}, table)
|
||||
// safety check, if there are no filters on deletion bad things happen
|
||||
if (Object.keys(filters).length !== 0) {
|
||||
return getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, Operation.DELETE),
|
||||
body: { [colName]: null },
|
||||
filters,
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
private async removeOneToManyRelationships(rowId: string, table: Table) {
|
||||
const tableId = table._id!
|
||||
const filters = this.prepareFilters(rowId, {}, table)
|
||||
// safety check, if there are no filters on deletion bad things happen
|
||||
if (Object.keys(filters).length !== 0) {
|
||||
return getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, Operation.UPDATE),
|
||||
filters,
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
getTable(tableId: string | undefined): Table | undefined {
|
||||
if (!tableId) {
|
||||
throw "Table ID is unknown, cannot find table"
|
||||
throw new Error("Table ID is unknown, cannot find table")
|
||||
}
|
||||
const { tableName } = breakExternalTableId(tableId)
|
||||
if (tableName) {
|
||||
return this.tables[tableName]
|
||||
return this.tables[tableName]
|
||||
}
|
||||
|
||||
// seeds the object with table and datasource information
|
||||
async retrieveMetadata(
|
||||
datasourceId: string
|
||||
): Promise<{ tables: Record<string, Table>; datasource: Datasource }> {
|
||||
if (!this.datasource) {
|
||||
this.datasource = await sdk.datasources.get(datasourceId)
|
||||
if (!this.datasource || !this.datasource.entities) {
|
||||
throw "No tables found, fetch tables before query."
|
||||
}
|
||||
this.tables = this.datasource.entities
|
||||
this.tableList = Object.values(this.tables)
|
||||
}
|
||||
return { tables: this.tables, datasource: this.datasource }
|
||||
}
|
||||
|
||||
async getRow(table: Table, rowId: string): Promise<Row> {
|
||||
const response = await getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(table._id!, Operation.READ),
|
||||
filters: buildFilters(rowId, {}, table),
|
||||
filters: this.prepareFilters(rowId, {}, table),
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
|
@ -289,16 +316,20 @@ export class ExternalRequest<T extends Operation> {
|
|||
manyRelationships: ManyRelationship[] = []
|
||||
for (let [key, field] of Object.entries(table.schema)) {
|
||||
// if set already, or not set just skip it
|
||||
if (row[key] === undefined || newRow[key] || !isEditableColumn(field)) {
|
||||
if (row[key] === undefined || newRow[key]) {
|
||||
continue
|
||||
}
|
||||
if (
|
||||
!(this.operation === Operation.BULK_UPSERT) &&
|
||||
!isEditableColumn(field)
|
||||
) {
|
||||
continue
|
||||
}
|
||||
// parse floats/numbers
|
||||
if (field.type === FieldType.NUMBER && !isNaN(parseFloat(row[key]))) {
|
||||
newRow[key] = parseFloat(row[key])
|
||||
} else if (field.type === FieldType.LINK) {
|
||||
const { tableName: linkTableName } = breakExternalTableId(
|
||||
field?.tableId
|
||||
)
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
// table has to exist for many to many
|
||||
if (!linkTableName || !this.tables[linkTableName]) {
|
||||
continue
|
||||
|
@ -379,9 +410,6 @@ export class ExternalRequest<T extends Operation> {
|
|||
[key: string]: { rows: Row[]; isMany: boolean; tableId: string }
|
||||
} = {}
|
||||
const { tableName } = breakExternalTableId(tableId)
|
||||
if (!tableName) {
|
||||
return related
|
||||
}
|
||||
const table = this.tables[tableName]
|
||||
// @ts-ignore
|
||||
const primaryKey = table.primary[0]
|
||||
|
@ -514,7 +542,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
endpoint: getEndpoint(tableId, operation),
|
||||
// if we're doing many relationships then we're writing, only one response
|
||||
body,
|
||||
filters: buildFilters(id, {}, linkTable),
|
||||
filters: this.prepareFilters(id, {}, linkTable),
|
||||
meta: {
|
||||
table: linkTable,
|
||||
},
|
||||
|
@ -538,8 +566,8 @@ export class ExternalRequest<T extends Operation> {
|
|||
for (let row of rows) {
|
||||
const rowId = generateIdForRow(row, table)
|
||||
const promise: Promise<any> = isMany
|
||||
? removeManyToManyRelationships(rowId, table, colName)
|
||||
: removeOneToManyRelationships(rowId, table)
|
||||
? this.removeManyToManyRelationships(rowId, table, colName)
|
||||
: this.removeOneToManyRelationships(rowId, table)
|
||||
if (promise) {
|
||||
promises.push(promise)
|
||||
}
|
||||
|
@ -562,12 +590,12 @@ export class ExternalRequest<T extends Operation> {
|
|||
rows.map(row => {
|
||||
const rowId = generateIdForRow(row, table)
|
||||
return isMany
|
||||
? removeManyToManyRelationships(
|
||||
? this.removeManyToManyRelationships(
|
||||
rowId,
|
||||
table,
|
||||
relationshipColumn.fieldName
|
||||
)
|
||||
: removeOneToManyRelationships(rowId, table)
|
||||
: this.removeOneToManyRelationships(rowId, table)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
@ -575,21 +603,21 @@ export class ExternalRequest<T extends Operation> {
|
|||
|
||||
async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> {
|
||||
const { operation, tableId } = this
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
if (!tableName) {
|
||||
throw "Unable to run without a table name"
|
||||
if (!tableId) {
|
||||
throw new Error("Unable to run without a table ID")
|
||||
}
|
||||
if (!this.datasource) {
|
||||
this.datasource = await sdk.datasources.get(datasourceId!)
|
||||
if (!this.datasource || !this.datasource.entities) {
|
||||
throw "No tables found, fetch tables before query."
|
||||
}
|
||||
this.tables = this.datasource.entities
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
let datasource = this.datasource
|
||||
if (!datasource) {
|
||||
const { datasource: ds } = await this.retrieveMetadata(datasourceId)
|
||||
datasource = ds
|
||||
}
|
||||
const table = this.tables[tableName]
|
||||
let isSql = isSQL(this.datasource)
|
||||
let isSql = isSQL(datasource)
|
||||
if (!table) {
|
||||
throw `Unable to process query, table "${tableName}" not defined.`
|
||||
throw new Error(
|
||||
`Unable to process query, table "${tableName}" not defined.`
|
||||
)
|
||||
}
|
||||
// look for specific components of config which may not be considered acceptable
|
||||
let { id, row, filters, sort, paginate, rows } = cleanupConfig(
|
||||
|
@ -612,7 +640,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
break
|
||||
}
|
||||
}
|
||||
filters = buildFilters(id, filters || {}, table)
|
||||
filters = this.prepareFilters(id, filters || {}, table)
|
||||
const relationships = buildExternalRelationships(table, this.tables)
|
||||
|
||||
const incRelationships =
|
||||
|
@ -660,10 +688,15 @@ export class ExternalRequest<T extends Operation> {
|
|||
body: row || rows,
|
||||
// pass an id filter into extra, purely for mysql/returning
|
||||
extra: {
|
||||
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
|
||||
idFilter: this.prepareFilters(
|
||||
id || generateIdForRow(row, table),
|
||||
{},
|
||||
table
|
||||
),
|
||||
},
|
||||
meta: {
|
||||
table,
|
||||
id: config.id,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -136,10 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
|||
const id = ctx.params.rowId
|
||||
const tableId = utils.getTableId(ctx)
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const datasource: Datasource = await sdk.datasources.get(datasourceId!)
|
||||
if (!tableName) {
|
||||
ctx.throw(400, "Unable to find table.")
|
||||
}
|
||||
const datasource: Datasource = await sdk.datasources.get(datasourceId)
|
||||
if (!datasource || !datasource.entities) {
|
||||
ctx.throw(400, "Datasource has not been configured for plus API.")
|
||||
}
|
||||
|
@ -163,7 +160,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
|||
}
|
||||
const links = row[fieldName]
|
||||
const linkedTableId = field.tableId
|
||||
const linkedTableName = breakExternalTableId(linkedTableId).tableName!
|
||||
const linkedTableName = breakExternalTableId(linkedTableId).tableName
|
||||
const linkedTable = tables[linkedTableName]
|
||||
// don't support composite keys right now
|
||||
const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0])
|
||||
|
|
|
@ -99,7 +99,7 @@ export function basicProcessing({
|
|||
row,
|
||||
tableName: table._id!,
|
||||
fieldName: internalColumn,
|
||||
isLinked: false,
|
||||
isLinked,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,8 @@ import {
|
|||
DatasourcePlusQueryResponse,
|
||||
DSPlusOperation,
|
||||
FieldType,
|
||||
isManyToOne,
|
||||
isOneToMany,
|
||||
ManyToManyRelationshipFieldMetadata,
|
||||
RelationshipFieldMetadata,
|
||||
RelationshipsJson,
|
||||
|
@ -93,12 +95,12 @@ export function buildExternalRelationships(
|
|||
): RelationshipsJson[] {
|
||||
const relationships = []
|
||||
for (let [fieldName, field] of Object.entries(table.schema)) {
|
||||
if (field.type !== FieldType.LINK) {
|
||||
if (field.type !== FieldType.LINK || !field.tableId) {
|
||||
continue
|
||||
}
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
// no table to link to, this is not a valid relationships
|
||||
if (!linkTableName || !tables[linkTableName]) {
|
||||
if (!tables[linkTableName]) {
|
||||
continue
|
||||
}
|
||||
const linkTable = tables[linkTableName]
|
||||
|
@ -110,7 +112,7 @@ export function buildExternalRelationships(
|
|||
// need to specify where to put this back into
|
||||
column: fieldName,
|
||||
}
|
||||
if (isManyToMany(field)) {
|
||||
if (isManyToMany(field) && field.through) {
|
||||
const { tableName: throughTableName } = breakExternalTableId(
|
||||
field.through
|
||||
)
|
||||
|
@ -120,7 +122,7 @@ export function buildExternalRelationships(
|
|||
definition.to = field.throughFrom || linkTable.primary[0]
|
||||
definition.fromPrimary = table.primary[0]
|
||||
definition.toPrimary = linkTable.primary[0]
|
||||
} else {
|
||||
} else if (isManyToOne(field) || isOneToMany(field)) {
|
||||
// if no foreign key specified then use the name of the field in other table
|
||||
definition.from = field.foreignKey || table.primary[0]
|
||||
definition.to = field.fieldName
|
||||
|
@ -180,16 +182,18 @@ export function buildSqlFieldList(
|
|||
}
|
||||
let fields = extractRealFields(table)
|
||||
for (let field of Object.values(table.schema)) {
|
||||
if (field.type !== FieldType.LINK || !opts?.relationships) {
|
||||
if (
|
||||
field.type !== FieldType.LINK ||
|
||||
!opts?.relationships ||
|
||||
!field.tableId
|
||||
) {
|
||||
continue
|
||||
}
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
if (linkTableName) {
|
||||
const linkTable = tables[linkTableName]
|
||||
if (linkTable) {
|
||||
const linkedFields = extractRealFields(linkTable, fields)
|
||||
fields = fields.concat(linkedFields)
|
||||
}
|
||||
const linkTable = tables[linkTableName]
|
||||
if (linkTable) {
|
||||
const linkedFields = extractRealFields(linkTable, fields)
|
||||
fields = fields.concat(linkedFields)
|
||||
}
|
||||
}
|
||||
return fields
|
||||
|
|
|
@ -16,14 +16,18 @@ import {
|
|||
import sdk from "../../../sdk"
|
||||
import { builderSocket } from "../../../websockets"
|
||||
import { inputProcessing } from "../../../utilities/rowProcessor"
|
||||
import { isEqual } from "lodash"
|
||||
|
||||
function getDatasourceId(table: Table) {
|
||||
if (!table) {
|
||||
throw "No table supplied"
|
||||
throw new Error("No table supplied")
|
||||
}
|
||||
if (table.sourceId) {
|
||||
return table.sourceId
|
||||
}
|
||||
if (!table._id) {
|
||||
throw new Error("No table ID supplied")
|
||||
}
|
||||
return breakExternalTableId(table._id).datasourceId
|
||||
}
|
||||
|
||||
|
@ -82,15 +86,30 @@ export async function bulkImport(
|
|||
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
|
||||
) {
|
||||
let table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { rows } = ctx.request.body
|
||||
const { rows, identifierFields } = ctx.request.body
|
||||
const schema = table.schema
|
||||
|
||||
if (
|
||||
identifierFields &&
|
||||
identifierFields.length > 0 &&
|
||||
!isEqual(identifierFields, table.primary)
|
||||
) {
|
||||
// This is becuse we make use of the ON CONFLICT functionality in SQL
|
||||
// databases, which only triggers when there's a conflict against a unique
|
||||
// index. The only unique index we can count on atm in Budibase is the
|
||||
// primary key, so this functionality always uses the primary key.
|
||||
ctx.throw(
|
||||
400,
|
||||
"Identifier fields are not supported for bulk import into an external datasource."
|
||||
)
|
||||
}
|
||||
|
||||
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
||||
ctx.throw(400, "Provided data import information is invalid.")
|
||||
}
|
||||
|
||||
const parsedRows = []
|
||||
for (const row of parse(rows, schema)) {
|
||||
for (const row of parse(rows, table)) {
|
||||
const processed = await inputProcessing(ctx.user?._id, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
|
|
|
@ -178,7 +178,7 @@ export async function handleDataImport(
|
|||
}
|
||||
|
||||
const db = context.getAppDB()
|
||||
const data = parse(importRows, schema)
|
||||
const data = parse(importRows, table)
|
||||
|
||||
let finalData: any = await importToRows(data, table, user)
|
||||
|
||||
|
|
|
@ -250,6 +250,67 @@ describe.each(
|
|||
expect(events.query.previewed).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should update schema when column type changes from number to string", async () => {
|
||||
const tableName = "schema_change_test"
|
||||
await client.schema.dropTableIfExists(tableName)
|
||||
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id").primary()
|
||||
table.string("name")
|
||||
table.integer("data")
|
||||
})
|
||||
|
||||
await client(tableName).insert({
|
||||
name: "test",
|
||||
data: 123,
|
||||
})
|
||||
|
||||
const firstPreview = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "Test Query",
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT * FROM ${tableName}`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(firstPreview.schema).toEqual(
|
||||
expect.objectContaining({
|
||||
data: { type: "number", name: "data" },
|
||||
})
|
||||
)
|
||||
|
||||
await client.schema.alterTable(tableName, table => {
|
||||
table.string("data").alter()
|
||||
})
|
||||
|
||||
await client(tableName).update({
|
||||
data: "string value",
|
||||
})
|
||||
|
||||
const secondPreview = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "Test Query",
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT * FROM ${tableName}`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
schema: firstPreview.schema,
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(secondPreview.schema).toEqual(
|
||||
expect.objectContaining({
|
||||
data: { type: "string", name: "data" },
|
||||
})
|
||||
)
|
||||
})
|
||||
it("should work with static variables", async () => {
|
||||
await config.api.datasource.update({
|
||||
...datasource,
|
||||
|
|
|
@ -137,6 +137,67 @@ describe("/queries", () => {
|
|||
})
|
||||
})
|
||||
|
||||
it("should update schema when structure changes from object to array", async () => {
|
||||
const name = generator.guid()
|
||||
|
||||
await withCollection(async collection => {
|
||||
await collection.insertOne({ name, field: { subfield: "value" } })
|
||||
})
|
||||
|
||||
const firstPreview = await config.api.query.preview({
|
||||
name: "Test Query",
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
json: { name: { $eq: name } },
|
||||
extra: {
|
||||
collection,
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(firstPreview.schema).toEqual(
|
||||
expect.objectContaining({
|
||||
field: { type: "json", name: "field" },
|
||||
})
|
||||
)
|
||||
|
||||
await withCollection(async collection => {
|
||||
await collection.updateOne(
|
||||
{ name },
|
||||
{ $set: { field: ["value1", "value2"] } }
|
||||
)
|
||||
})
|
||||
|
||||
const secondPreview = await config.api.query.preview({
|
||||
name: "Test Query",
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
json: { name: { $eq: name } },
|
||||
extra: {
|
||||
collection,
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
schema: firstPreview.schema,
|
||||
queryVerb: "read",
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(secondPreview.schema).toEqual(
|
||||
expect.objectContaining({
|
||||
field: { type: "array", name: "field" },
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it("should generate a nested schema based on all of the nested items", async () => {
|
||||
const name = generator.guid()
|
||||
const item = {
|
||||
|
|
|
@ -92,6 +92,61 @@ describe("rest", () => {
|
|||
expect(cached.rows[0].name).toEqual("one")
|
||||
})
|
||||
|
||||
it("should update schema when structure changes from JSON to array", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {},
|
||||
})
|
||||
|
||||
nock("http://www.example.com")
|
||||
.get("/")
|
||||
.reply(200, [{ obj: {}, id: "1" }])
|
||||
|
||||
const firstResponse = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "test query",
|
||||
parameters: [],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
},
|
||||
})
|
||||
|
||||
expect(firstResponse.schema).toEqual({
|
||||
obj: { type: "json", name: "obj" },
|
||||
id: { type: "string", name: "id" },
|
||||
})
|
||||
|
||||
nock.cleanAll()
|
||||
|
||||
nock("http://www.example.com")
|
||||
.get("/")
|
||||
.reply(200, [{ obj: [], id: "1" }])
|
||||
|
||||
const secondResponse = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "test query",
|
||||
parameters: [],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: firstResponse.schema,
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
},
|
||||
})
|
||||
|
||||
expect(secondResponse.schema).toEqual({
|
||||
obj: { type: "array", name: "obj" },
|
||||
id: { type: "string", name: "id" },
|
||||
})
|
||||
})
|
||||
|
||||
it("should parse global and query level header mappings", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
knexClient,
|
||||
} from "../../../integrations/tests/utils"
|
||||
|
||||
import tk from "timekeeper"
|
||||
import emitter from "../../../../src/events"
|
||||
|
@ -31,6 +35,7 @@ import {
|
|||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import _, { merge } from "lodash"
|
||||
import * as uuid from "uuid"
|
||||
import { Knex } from "knex"
|
||||
|
||||
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
||||
tk.freeze(timestamp)
|
||||
|
@ -70,13 +75,16 @@ describe.each([
|
|||
|
||||
let table: Table
|
||||
let datasource: Datasource | undefined
|
||||
let client: Knex | undefined
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
if (dsProvider) {
|
||||
const rawDatasource = await dsProvider
|
||||
datasource = await config.createDatasource({
|
||||
datasource: await dsProvider,
|
||||
datasource: rawDatasource,
|
||||
})
|
||||
client = await knexClient(rawDatasource)
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -307,13 +315,13 @@ describe.each([
|
|||
// as quickly as possible.
|
||||
await Promise.all(
|
||||
sequence.map(async () => {
|
||||
const attempts = 20
|
||||
const attempts = 30
|
||||
for (let attempt = 0; attempt < attempts; attempt++) {
|
||||
try {
|
||||
await config.api.row.save(table._id!, {})
|
||||
return
|
||||
} catch (e) {
|
||||
await new Promise(r => setTimeout(r, Math.random() * 15))
|
||||
await new Promise(r => setTimeout(r, Math.random() * 50))
|
||||
}
|
||||
}
|
||||
throw new Error(`Failed to create row after ${attempts} attempts`)
|
||||
|
@ -598,6 +606,35 @@ describe.each([
|
|||
expect(res.name).toEqual("Updated Name")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
it("can update a row on an external table with a primary key", async () => {
|
||||
const tableName = uuid.v4().substring(0, 10)
|
||||
await client!.schema.createTable(tableName, table => {
|
||||
table.increments("id").primary()
|
||||
table.string("name")
|
||||
})
|
||||
|
||||
const res = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource!._id!,
|
||||
})
|
||||
const table = res.datasource.entities![tableName]
|
||||
|
||||
const row = await config.api.row.save(table._id!, {
|
||||
id: 1,
|
||||
name: "Row 1",
|
||||
})
|
||||
|
||||
const updatedRow = await config.api.row.save(table._id!, {
|
||||
_id: row._id!,
|
||||
name: "Row 1 Updated",
|
||||
})
|
||||
|
||||
expect(updatedRow.name).toEqual("Row 1 Updated")
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("patch", () => {
|
||||
|
@ -667,6 +704,7 @@ describe.each([
|
|||
expect(event.oldRow.description).toEqual(beforeRow.description)
|
||||
expect(event.row.description).toEqual(beforeRow.description)
|
||||
})
|
||||
|
||||
it("should throw an error when given improper types", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
@ -758,7 +796,8 @@ describe.each([
|
|||
})
|
||||
|
||||
!isInternal &&
|
||||
// TODO: SQL is having issues creating composite keys
|
||||
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
||||
// to identity columns. This is not something Budibase does currently.
|
||||
providerType !== DatabaseName.SQL_SERVER &&
|
||||
it("should support updating fields that are part of a composite key", async () => {
|
||||
const tableRequest = saveTableRequest({
|
||||
|
@ -911,32 +950,21 @@ describe.each([
|
|||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||
})
|
||||
|
||||
it("Should ignore malformed/invalid delete requests", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
it.each([{ not: "valid" }, { rows: 123 }, "invalid"])(
|
||||
"Should ignore malformed/invalid delete request: %s",
|
||||
async (request: any) => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.delete(table._id!, { not: "valid" } as any, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
await config.api.row.delete(table._id!, request, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.row.delete(table._id!, { rows: 123 } as any, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.row.delete(table._id!, "invalid" as any, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
await assertRowUsage(rowUsage)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe("bulkImport", () => {
|
||||
|
@ -1085,6 +1113,121 @@ describe.each([
|
|||
expect(rows[2].name).toEqual("Row 3")
|
||||
expect(rows[2].description).toEqual("Row 3 description")
|
||||
})
|
||||
|
||||
// Upserting isn't yet supported in MSSQL, see:
|
||||
// https://github.com/knex/knex/pull/6050
|
||||
!isMSSQL &&
|
||||
!isInternal &&
|
||||
it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
|
||||
const tableName = uuid.v4()
|
||||
await client?.schema.createTable(tableName, table => {
|
||||
table.integer("companyId")
|
||||
table.integer("userId")
|
||||
table.string("name")
|
||||
table.string("description")
|
||||
table.primary(["companyId", "userId"])
|
||||
})
|
||||
|
||||
const resp = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource!._id!,
|
||||
})
|
||||
const table = resp.datasource.entities![tableName]
|
||||
|
||||
const row1 = await config.api.row.save(table._id!, {
|
||||
companyId: 1,
|
||||
userId: 1,
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
})
|
||||
|
||||
const row2 = await config.api.row.save(table._id!, {
|
||||
companyId: 1,
|
||||
userId: 2,
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
})
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
identifierFields: ["companyId", "userId"],
|
||||
rows: [
|
||||
{
|
||||
companyId: 1,
|
||||
userId: row1.userId,
|
||||
name: "Row 1 updated",
|
||||
description: "Row 1 description updated",
|
||||
},
|
||||
{
|
||||
companyId: 1,
|
||||
userId: row2.userId,
|
||||
name: "Row 2 updated",
|
||||
description: "Row 2 description updated",
|
||||
},
|
||||
{
|
||||
companyId: 1,
|
||||
userId: 3,
|
||||
name: "Row 3",
|
||||
description: "Row 3 description",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(3)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Row 1 updated")
|
||||
expect(rows[0].description).toEqual("Row 1 description updated")
|
||||
expect(rows[1].name).toEqual("Row 2 updated")
|
||||
expect(rows[1].description).toEqual("Row 2 description updated")
|
||||
expect(rows[2].name).toEqual("Row 3")
|
||||
expect(rows[2].description).toEqual("Row 3 description")
|
||||
})
|
||||
|
||||
// Upserting isn't yet supported in MSSQL, see:
|
||||
// https://github.com/knex/knex/pull/6050
|
||||
!isMSSQL &&
|
||||
!isInternal &&
|
||||
it("should be able to update existing rows an autoID primary key", async () => {
|
||||
const tableName = uuid.v4()
|
||||
await client!.schema.createTable(tableName, table => {
|
||||
table.increments("userId").primary()
|
||||
table.string("name")
|
||||
})
|
||||
|
||||
const resp = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource!._id!,
|
||||
})
|
||||
const table = resp.datasource.entities![tableName]
|
||||
|
||||
const row1 = await config.api.row.save(table._id!, {
|
||||
name: "Clare",
|
||||
})
|
||||
|
||||
const row2 = await config.api.row.save(table._id!, {
|
||||
name: "Jeff",
|
||||
})
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
identifierFields: ["userId"],
|
||||
rows: [
|
||||
{
|
||||
userId: row1.userId,
|
||||
name: "Clare updated",
|
||||
},
|
||||
{
|
||||
userId: row2.userId,
|
||||
name: "Jeff updated",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(2)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Clare updated")
|
||||
expect(rows[1].name).toEqual("Jeff updated")
|
||||
})
|
||||
})
|
||||
|
||||
describe("enrich", () => {
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
knexClient,
|
||||
} from "../../../integrations/tests/utils"
|
||||
import { db as dbCore, utils } from "@budibase/backend-core"
|
||||
|
||||
import * as setup from "./utilities"
|
||||
|
@ -24,6 +28,8 @@ import _ from "lodash"
|
|||
import tk from "timekeeper"
|
||||
import { encodeJSBinding } from "@budibase/string-templates"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
import { Knex } from "knex"
|
||||
import { structures } from "@budibase/backend-core/tests"
|
||||
|
||||
describe.each([
|
||||
["in-memory", undefined],
|
||||
|
@ -42,6 +48,7 @@ describe.each([
|
|||
|
||||
let envCleanup: (() => void) | undefined
|
||||
let datasource: Datasource | undefined
|
||||
let client: Knex | undefined
|
||||
let table: Table
|
||||
let rows: Row[]
|
||||
|
||||
|
@ -63,8 +70,10 @@ describe.each([
|
|||
}
|
||||
|
||||
if (dsProvider) {
|
||||
const rawDatasource = await dsProvider
|
||||
client = await knexClient(rawDatasource)
|
||||
datasource = await config.createDatasource({
|
||||
datasource: await dsProvider,
|
||||
datasource: rawDatasource,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
@ -76,9 +85,9 @@ describe.each([
|
|||
}
|
||||
})
|
||||
|
||||
async function createTable(schema: TableSchema) {
|
||||
async function createTable(schema: TableSchema, name?: string) {
|
||||
return await config.api.table.save(
|
||||
tableForDatasource(datasource, { schema })
|
||||
tableForDatasource(datasource, { schema, name })
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -909,6 +918,44 @@ describe.each([
|
|||
}).toMatchExactly([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
!isInMemory &&
|
||||
// This test was added because we automatically add in a sort by the
|
||||
// primary key, and we used to do this unconditionally which caused
|
||||
// problems because it was possible for the primary key to appear twice
|
||||
// in the resulting SQL ORDER BY clause, resulting in an SQL error.
|
||||
// We now check first to make sure that the primary key isn't already
|
||||
// in the sort before adding it.
|
||||
describe("sort on primary key", () => {
|
||||
beforeAll(async () => {
|
||||
const tableName = structures.uuid().substring(0, 10)
|
||||
await client!.schema.createTable(tableName, t => {
|
||||
t.string("name").primary()
|
||||
})
|
||||
const resp = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource!._id!,
|
||||
})
|
||||
|
||||
table = resp.datasource.entities![tableName]
|
||||
|
||||
await createRows([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
|
||||
it("should be able to sort by a primary key column ascending", async () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "name",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
|
||||
|
||||
it("should be able to sort by a primary key column descending", async () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "name",
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1956,53 +2003,74 @@ describe.each([
|
|||
// isn't available.
|
||||
!isInMemory &&
|
||||
describe("relations", () => {
|
||||
let otherTable: Table
|
||||
let otherRows: Row[]
|
||||
let productCategoryTable: Table, productCatRows: Row[]
|
||||
|
||||
beforeAll(async () => {
|
||||
otherTable = await createTable({
|
||||
one: { name: "one", type: FieldType.STRING },
|
||||
})
|
||||
table = await createTable({
|
||||
two: { name: "two", type: FieldType.STRING },
|
||||
other: {
|
||||
type: FieldType.LINK,
|
||||
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||
name: "other",
|
||||
fieldName: "other",
|
||||
tableId: otherTable._id!,
|
||||
constraints: {
|
||||
type: "array",
|
||||
productCategoryTable = await createTable(
|
||||
{
|
||||
name: { name: "name", type: FieldType.STRING },
|
||||
},
|
||||
"productCategory"
|
||||
)
|
||||
table = await createTable(
|
||||
{
|
||||
name: { name: "name", type: FieldType.STRING },
|
||||
productCat: {
|
||||
type: FieldType.LINK,
|
||||
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||
name: "productCat",
|
||||
fieldName: "product",
|
||||
tableId: productCategoryTable._id!,
|
||||
constraints: {
|
||||
type: "array",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
"product"
|
||||
)
|
||||
|
||||
otherRows = await Promise.all([
|
||||
config.api.row.save(otherTable._id!, { one: "foo" }),
|
||||
config.api.row.save(otherTable._id!, { one: "bar" }),
|
||||
productCatRows = await Promise.all([
|
||||
config.api.row.save(productCategoryTable._id!, { name: "foo" }),
|
||||
config.api.row.save(productCategoryTable._id!, { name: "bar" }),
|
||||
])
|
||||
|
||||
await Promise.all([
|
||||
config.api.row.save(table._id!, {
|
||||
two: "foo",
|
||||
other: [otherRows[0]._id],
|
||||
name: "foo",
|
||||
productCat: [productCatRows[0]._id],
|
||||
}),
|
||||
config.api.row.save(table._id!, {
|
||||
two: "bar",
|
||||
other: [otherRows[1]._id],
|
||||
name: "bar",
|
||||
productCat: [productCatRows[1]._id],
|
||||
}),
|
||||
config.api.row.save(table._id!, {
|
||||
name: "baz",
|
||||
productCat: [],
|
||||
}),
|
||||
])
|
||||
|
||||
rows = await config.api.row.fetch(table._id!)
|
||||
})
|
||||
|
||||
it("can search through relations", async () => {
|
||||
it("should be able to filter by relationship using column name", async () => {
|
||||
await expectQuery({
|
||||
equal: { [`${otherTable.name}.one`]: "foo" },
|
||||
equal: { ["productCat.name"]: "foo" },
|
||||
}).toContainExactly([
|
||||
{ two: "foo", other: [{ _id: otherRows[0]._id }] },
|
||||
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
|
||||
])
|
||||
})
|
||||
|
||||
it("should be able to filter by relationship using table name", async () => {
|
||||
await expectQuery({
|
||||
equal: { ["productCategory.name"]: "foo" },
|
||||
}).toContainExactly([
|
||||
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
|
||||
])
|
||||
})
|
||||
|
||||
it("shouldn't return any relationship for last row", async () => {
|
||||
await expectQuery({
|
||||
equal: { ["name"]: "baz" },
|
||||
}).toContainExactly([{ name: "baz", productCat: undefined }])
|
||||
})
|
||||
})
|
||||
|
||||
// lucene can't count the total rows
|
||||
|
|
|
@ -3,12 +3,14 @@ import * as rows from "./rows"
|
|||
import * as search from "./search"
|
||||
import * as utils from "./utils"
|
||||
import * as external from "./external"
|
||||
import * as filters from "./search/filters"
|
||||
import AliasTables from "./sqlAlias"
|
||||
|
||||
export default {
|
||||
...attachments,
|
||||
...rows,
|
||||
...search,
|
||||
filters,
|
||||
utils,
|
||||
external,
|
||||
AliasTables,
|
||||
|
|
|
@ -145,6 +145,10 @@ export async function exportRows(
|
|||
delimiter,
|
||||
customHeaders,
|
||||
} = options
|
||||
|
||||
if (!tableId) {
|
||||
throw new HTTPError("No table ID for search provided.", 400)
|
||||
}
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
|
||||
let requestQuery: SearchFilters = {}
|
||||
|
@ -167,7 +171,7 @@ export async function exportRows(
|
|||
requestQuery = query || {}
|
||||
}
|
||||
|
||||
const datasource = await sdk.datasources.get(datasourceId!)
|
||||
const datasource = await sdk.datasources.get(datasourceId)
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
if (!datasource || !datasource.entities) {
|
||||
throw new HTTPError("Datasource has not been configured for plus API.", 400)
|
||||
|
@ -180,10 +184,6 @@ export async function exportRows(
|
|||
let rows: Row[] = []
|
||||
let headers
|
||||
|
||||
if (!tableName) {
|
||||
throw new HTTPError("Could not find table name.", 400)
|
||||
}
|
||||
|
||||
// Filter data to only specified columns if required
|
||||
if (columns && columns.length) {
|
||||
for (let i = 0; i < result.rows.length; i++) {
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
import {
|
||||
FieldType,
|
||||
RelationshipFieldMetadata,
|
||||
SearchFilters,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import { isPlainObject } from "lodash"
|
||||
|
||||
export function getRelationshipColumns(table: Table): {
|
||||
name: string
|
||||
definition: RelationshipFieldMetadata
|
||||
}[] {
|
||||
// performing this with a for loop rather than an array filter improves
|
||||
// type guarding, as no casts are required
|
||||
const linkEntries: [string, RelationshipFieldMetadata][] = []
|
||||
for (let entry of Object.entries(table.schema)) {
|
||||
if (entry[1].type === FieldType.LINK) {
|
||||
const linkColumn: RelationshipFieldMetadata = entry[1]
|
||||
linkEntries.push([entry[0], linkColumn])
|
||||
}
|
||||
}
|
||||
return linkEntries.map(entry => ({
|
||||
name: entry[0],
|
||||
definition: entry[1],
|
||||
}))
|
||||
}
|
||||
|
||||
export function getTableIDList(
|
||||
tables: Table[]
|
||||
): { name: string; id: string }[] {
|
||||
return tables
|
||||
.filter(table => table.originalName && table._id)
|
||||
.map(table => ({ id: table._id!, name: table.originalName! }))
|
||||
}
|
||||
|
||||
export function updateFilterKeys(
|
||||
filters: SearchFilters,
|
||||
updates: { original: string; updated: string }[]
|
||||
): SearchFilters {
|
||||
const makeFilterKeyRegex = (str: string) =>
|
||||
new RegExp(`^${str}\\.|:${str}\\.`)
|
||||
for (let filter of Object.values(filters)) {
|
||||
if (!isPlainObject(filter)) {
|
||||
continue
|
||||
}
|
||||
for (let [key, keyFilter] of Object.entries(filter)) {
|
||||
if (keyFilter === "") {
|
||||
delete filter[key]
|
||||
}
|
||||
const possibleKey = updates.find(({ original }) =>
|
||||
key.match(makeFilterKeyRegex(original))
|
||||
)
|
||||
if (possibleKey && possibleKey.original !== possibleKey.updated) {
|
||||
// only replace the first, not replaceAll
|
||||
filter[key.replace(possibleKey.original, possibleKey.updated)] =
|
||||
filter[key]
|
||||
delete filter[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return filters
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
import {
|
||||
Datasource,
|
||||
DocumentType,
|
||||
FieldType,
|
||||
Operation,
|
||||
|
@ -12,7 +13,6 @@ import {
|
|||
SortType,
|
||||
SqlClient,
|
||||
Table,
|
||||
Datasource,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
buildInternalRelationships,
|
||||
|
@ -30,6 +30,11 @@ import AliasTables from "../sqlAlias"
|
|||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||
import pick from "lodash/pick"
|
||||
import { processRowCountResponse } from "../utils"
|
||||
import {
|
||||
updateFilterKeys,
|
||||
getRelationshipColumns,
|
||||
getTableIDList,
|
||||
} from "./filters"
|
||||
|
||||
const builder = new sql.Sql(SqlClient.SQL_LITE)
|
||||
|
||||
|
@ -60,34 +65,31 @@ function buildInternalFieldList(
|
|||
return fieldList
|
||||
}
|
||||
|
||||
function tableNameInFieldRegex(tableName: string) {
|
||||
return new RegExp(`^${tableName}.|:${tableName}.`, "g")
|
||||
}
|
||||
|
||||
function cleanupFilters(filters: SearchFilters, tables: Table[]) {
|
||||
for (let filter of Object.values(filters)) {
|
||||
if (typeof filter !== "object") {
|
||||
continue
|
||||
}
|
||||
for (let [key, keyFilter] of Object.entries(filter)) {
|
||||
if (keyFilter === "") {
|
||||
delete filter[key]
|
||||
}
|
||||
|
||||
// relationship, switch to table ID
|
||||
const tableRelated = tables.find(
|
||||
table =>
|
||||
table.originalName &&
|
||||
key.match(tableNameInFieldRegex(table.originalName))
|
||||
function cleanupFilters(
|
||||
filters: SearchFilters,
|
||||
table: Table,
|
||||
allTables: Table[]
|
||||
) {
|
||||
// get a list of all relationship columns in the table for updating
|
||||
const relationshipColumns = getRelationshipColumns(table)
|
||||
// get table names to ID map for relationships
|
||||
const tableNameToID = getTableIDList(allTables)
|
||||
// all should be applied at once
|
||||
filters = updateFilterKeys(
|
||||
filters,
|
||||
relationshipColumns
|
||||
.map(({ name, definition }) => ({
|
||||
original: name,
|
||||
updated: definition.tableId,
|
||||
}))
|
||||
.concat(
|
||||
tableNameToID.map(({ name, id }) => ({
|
||||
original: name,
|
||||
updated: id,
|
||||
}))
|
||||
)
|
||||
if (tableRelated && tableRelated.originalName) {
|
||||
// only replace the first, not replaceAll
|
||||
filter[key.replace(tableRelated.originalName, tableRelated._id!)] =
|
||||
filter[key]
|
||||
delete filter[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
return filters
|
||||
}
|
||||
|
||||
|
@ -176,7 +178,7 @@ export async function search(
|
|||
operation: Operation.READ,
|
||||
},
|
||||
filters: {
|
||||
...cleanupFilters(query, allTables),
|
||||
...cleanupFilters(query, table, allTables),
|
||||
documentType: DocumentType.ROW,
|
||||
},
|
||||
table,
|
||||
|
|
|
@ -90,10 +90,10 @@ export async function getExternalTable(
|
|||
export async function getTable(tableId: string): Promise<Table> {
|
||||
const db = context.getAppDB()
|
||||
let output: Table
|
||||
if (isExternalTableID(tableId)) {
|
||||
if (tableId && isExternalTableID(tableId)) {
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const datasource = await datasources.get(datasourceId!)
|
||||
const table = await getExternalTable(datasourceId!, tableName!)
|
||||
const datasource = await datasources.get(datasourceId)
|
||||
const table = await getExternalTable(datasourceId, tableName)
|
||||
output = { ...table, sql: isSQL(datasource) }
|
||||
} else {
|
||||
output = await db.get<Table>(tableId)
|
||||
|
|
|
@ -10,9 +10,9 @@ export async function get(viewId: string): Promise<ViewV2> {
|
|||
const { tableId } = utils.extractViewInfoFromID(viewId)
|
||||
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const ds = await sdk.datasources.get(datasourceId!)
|
||||
const ds = await sdk.datasources.get(datasourceId)
|
||||
|
||||
const table = ds.entities![tableName!]
|
||||
const table = ds.entities![tableName]
|
||||
const views = Object.values(table.views!).filter(isV2)
|
||||
const found = views.find(v => v.id === viewId)
|
||||
if (!found) {
|
||||
|
@ -25,9 +25,9 @@ export async function getEnriched(viewId: string): Promise<ViewV2Enriched> {
|
|||
const { tableId } = utils.extractViewInfoFromID(viewId)
|
||||
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const ds = await sdk.datasources.get(datasourceId!)
|
||||
const ds = await sdk.datasources.get(datasourceId)
|
||||
|
||||
const table = ds.entities![tableName!]
|
||||
const table = ds.entities![tableName]
|
||||
const views = Object.values(table.views!).filter(isV2)
|
||||
const found = views.find(v => v.id === viewId)
|
||||
if (!found) {
|
||||
|
@ -49,9 +49,9 @@ export async function create(
|
|||
const db = context.getAppDB()
|
||||
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const ds = await sdk.datasources.get(datasourceId!)
|
||||
ds.entities![tableName!].views ??= {}
|
||||
ds.entities![tableName!].views![view.name] = view
|
||||
const ds = await sdk.datasources.get(datasourceId)
|
||||
ds.entities![tableName].views ??= {}
|
||||
ds.entities![tableName].views![view.name] = view
|
||||
await db.put(ds)
|
||||
return view
|
||||
}
|
||||
|
@ -60,9 +60,9 @@ export async function update(tableId: string, view: ViewV2): Promise<ViewV2> {
|
|||
const db = context.getAppDB()
|
||||
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const ds = await sdk.datasources.get(datasourceId!)
|
||||
ds.entities![tableName!].views ??= {}
|
||||
const views = ds.entities![tableName!].views!
|
||||
const ds = await sdk.datasources.get(datasourceId)
|
||||
ds.entities![tableName].views ??= {}
|
||||
const views = ds.entities![tableName].views!
|
||||
|
||||
const existingView = Object.values(views).find(
|
||||
v => isV2(v) && v.id === view.id
|
||||
|
@ -87,9 +87,9 @@ export async function remove(viewId: string): Promise<ViewV2> {
|
|||
}
|
||||
|
||||
const { datasourceId, tableName } = breakExternalTableId(view.tableId)
|
||||
const ds = await sdk.datasources.get(datasourceId!)
|
||||
const ds = await sdk.datasources.get(datasourceId)
|
||||
|
||||
delete ds.entities![tableName!].views![view?.name]
|
||||
delete ds.entities![tableName].views![view?.name]
|
||||
await db.put(ds)
|
||||
return view
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import {
|
|||
TableSchema,
|
||||
FieldSchema,
|
||||
Row,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
|
||||
import { db } from "@budibase/backend-core"
|
||||
|
@ -118,16 +119,26 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
|
|||
return results
|
||||
}
|
||||
|
||||
export function parse(rows: Rows, schema: TableSchema): Rows {
|
||||
export function parse(rows: Rows, table: Table): Rows {
|
||||
return rows.map(row => {
|
||||
const parsedRow: Row = {}
|
||||
|
||||
Object.entries(row).forEach(([columnName, columnData]) => {
|
||||
if (!(columnName in schema) || schema[columnName]?.autocolumn) {
|
||||
const schema = table.schema
|
||||
if (!(columnName in schema)) {
|
||||
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
schema[columnName].autocolumn &&
|
||||
!table.primary?.includes(columnName)
|
||||
) {
|
||||
// Don't want the user specifying values for autocolumns unless they're updating
|
||||
// a row through its primary key.
|
||||
return
|
||||
}
|
||||
|
||||
const columnSchema = schema[columnName]
|
||||
const { type: columnType } = columnSchema
|
||||
if (columnType === FieldType.NUMBER) {
|
||||
|
|
|
@ -180,3 +180,5 @@ export enum BpmStatusValue {
|
|||
VERIFYING_EMAIL = "verifying_email",
|
||||
COMPLETED = "completed",
|
||||
}
|
||||
|
||||
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
||||
|
|
|
@ -42,10 +42,7 @@ export interface Account extends CreateAccount {
|
|||
verified: boolean
|
||||
verificationSent: boolean
|
||||
// licensing
|
||||
tier: string // deprecated
|
||||
planType?: PlanType
|
||||
/** @deprecated */
|
||||
planTier?: number
|
||||
license?: License
|
||||
installId?: string
|
||||
installTenantId?: string
|
||||
|
|
Loading…
Reference in New Issue