Fixing issues with SQL many relationship updates.

This commit is contained in:
mike12345567 2021-09-01 19:40:47 +01:00
parent 90f6dffc44
commit a9e2336036
4 changed files with 83 additions and 71 deletions

View File

@ -1,34 +1,29 @@
import { import {
IncludeRelationships,
Operation, Operation,
SearchFilters,
SortJson,
PaginationJson, PaginationJson,
RelationshipsJson, RelationshipsJson,
SearchFilters,
SortJson,
} from "../../../definitions/datasource" } from "../../../definitions/datasource"
import { import {Datasource, FieldSchema, Row, Table} from "../../../definitions/common"
Row, import {breakRowIdField, generateRowIdField} from "../../../integrations/utils"
Table, import { RelationshipTypes } from "../../../constants"
FieldSchema,
Datasource,
} from "../../../definitions/common"
import {
breakRowIdField,
generateRowIdField,
} from "../../../integrations/utils"
interface ManyRelationship { interface ManyRelationship {
tableId?: string tableId?: string
id?: string id?: string
isUpdate?: boolean isUpdate?: boolean
key: string
[key: string]: any [key: string]: any
} }
interface RunConfig { interface RunConfig {
id: string id?: string
filters: SearchFilters filters?: SearchFilters
sort: SortJson sort?: SortJson
paginate: PaginationJson paginate?: PaginationJson
row: Row row?: Row
} }
module External { module External {
@ -37,7 +32,6 @@ module External {
const { breakExternalTableId, isSQL } = require("../../../integrations/utils") const { breakExternalTableId, isSQL } = require("../../../integrations/utils")
const { processObjectSync } = require("@budibase/string-templates") const { processObjectSync } = require("@budibase/string-templates")
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
const { isEqual } = require("lodash")
const CouchDB = require("../../../db") const CouchDB = require("../../../db")
function buildFilters( function buildFilters(
@ -81,7 +75,7 @@ module External {
} }
} }
function generateIdForRow(row: Row, table: Table): string { function generateIdForRow(row: Row | undefined, table: Table): string {
const primary = table.primary const primary = table.primary
if (!row || !primary) { if (!row || !primary) {
return "" return ""
@ -89,7 +83,8 @@ module External {
// build id array // build id array
let idParts = [] let idParts = []
for (let field of primary) { for (let field of primary) {
const fieldValue = row[`${table.name}.${field}`] // need to handle table name + field or just field, depending on if relationships used
const fieldValue = row[`${table.name}.${field}`] || row[field]
if (fieldValue) { if (fieldValue) {
idParts.push(fieldValue) idParts.push(fieldValue)
} }
@ -156,7 +151,15 @@ module External {
} }
} }
inputProcessing(row: Row, table: Table) { getTable(tableId: string | undefined): Table {
if (!tableId) {
throw "Table ID is unknown, cannot find table"
}
const { tableName } = breakExternalTableId(tableId)
return this.tables[tableName]
}
inputProcessing(row: Row | undefined, table: Table) {
if (!row) { if (!row) {
return { row, manyRelationships: [] } return { row, manyRelationships: [] }
} }
@ -202,6 +205,7 @@ module External {
manyRelationships.push({ manyRelationships.push({
tableId: field.through || field.tableId, tableId: field.through || field.tableId,
isUpdate, isUpdate,
key: otherKey,
[thisKey]: breakRowIdField(relationship)[0], [thisKey]: breakRowIdField(relationship)[0],
// leave the ID for enrichment later // leave the ID for enrichment later
[otherKey]: `{{ literal ${tablePrimary} }}`, [otherKey]: `{{ literal ${tablePrimary} }}`,
@ -343,41 +347,34 @@ module External {
* This is a cached lookup, of relationship records, this is mainly for creating/deleting junction * This is a cached lookup, of relationship records, this is mainly for creating/deleting junction
* information. * information.
*/ */
async lookup( async lookupRelations(tableId: string, row: Row) {
row: Row, const related: {[key: string]: any} = {}
relationship: ManyRelationship,
cache: { [key: string]: Row[] } = {}
) {
const { tableId, isUpdate, id, ...rest } = relationship
const { tableName } = breakExternalTableId(tableId) const { tableName } = breakExternalTableId(tableId)
const table = this.tables[tableName] const table = this.tables[tableName]
if (isUpdate) { // @ts-ignore
return { rows: [], table } const primaryKey = table.primary[0]
} // make a new request to get the row with all its relationships
// if not updating need to make sure we have a list of all possible options // we need this to work out if any relationships need removed
let fullKey: string = tableId + "/", for (let field of Object.values(table.schema)) {
rowKey: string = "" if (field.type !== FieldTypes.LINK || !field.fieldName) {
for (let key of Object.keys(rest)) { continue
if (row[key]) {
fullKey += key
rowKey = key
} }
} const isMany = field.relationshipType === RelationshipTypes.MANY_TO_MANY
if (cache[fullKey] == null) { const tableId = isMany ? field.through : field.tableId
const fieldName = isMany ? primaryKey : field.fieldName
const response = await makeExternalQuery(this.appId, { const response = await makeExternalQuery(this.appId, {
endpoint: getEndpoint(tableId, DataSourceOperation.READ), endpoint: getEndpoint(tableId, DataSourceOperation.READ),
filters: { filters: {
equal: { equal: {
[rowKey]: row[rowKey], [fieldName]: row[primaryKey],
}, },
}, },
}) })
// this is the response from knex if no rows found // this is the response from knex if no rows found
if (!response[0].read) { const rows = !response[0].read ? response : []
cache[fullKey] = response related[fieldName] = { rows, isMany, tableId }
}
} }
return { rows: cache[fullKey] || [], table } return related
} }
/** /**
@ -390,19 +387,21 @@ module External {
* isn't supposed to exist anymore and delete those. This is better than the usual method of delete them * isn't supposed to exist anymore and delete those. This is better than the usual method of delete them
* all and then re-create, as theres no chance of losing data (e.g. delete succeed, but write fail). * all and then re-create, as theres no chance of losing data (e.g. delete succeed, but write fail).
*/ */
async handleManyRelationships(row: Row, relationships: ManyRelationship[]) { async handleManyRelationships(mainTableId: string, row: Row, relationships: ManyRelationship[]) {
const { appId } = this const { appId } = this
if (relationships.length === 0) {
return
}
// if we're creating (in a through table) need to wipe the existing ones first // if we're creating (in a through table) need to wipe the existing ones first
const promises = [] const promises = []
const cache: { [key: string]: Row[] } = {} const related = await this.lookupRelations(mainTableId, row)
for (let relationship of relationships) { for (let relationship of relationships) {
const { tableId, isUpdate, id, ...rest } = relationship const { key, tableId, isUpdate, id, ...rest } = relationship
const body = processObjectSync(rest, row) const body = processObjectSync(rest, row)
const { table, rows } = await this.lookup(row, relationship, cache) const linkTable = this.getTable(tableId)
const found = rows.find(row => isEqual(body, row)) // @ts-ignore
const linkPrimary = linkTable.primary[0]
const rows = related[key].rows || []
const found = rows.find((row: { [key: string]: any }) =>
row[linkPrimary] === relationship.id || row[linkPrimary] === body[linkPrimary]
)
const operation = isUpdate const operation = isUpdate
? DataSourceOperation.UPDATE ? DataSourceOperation.UPDATE
: DataSourceOperation.CREATE : DataSourceOperation.CREATE
@ -412,27 +411,27 @@ module External {
endpoint: getEndpoint(tableId, operation), endpoint: getEndpoint(tableId, operation),
// if we're doing many relationships then we're writing, only one response // if we're doing many relationships then we're writing, only one response
body, body,
filters: buildFilters(id, {}, table), filters: buildFilters(id, {}, linkTable),
}) })
) )
} else { } else {
// remove the relationship from the rows // remove the relationship from cache so it isn't adjusted again
rows.splice(rows.indexOf(found), 1) rows.splice(rows.indexOf(found), 1)
} }
} }
// finally if creating, cleanup any rows that aren't supposed to be here // finally cleanup anything that needs to be removed
for (let [key, rows] of Object.entries(cache)) { for (let [colName, {isMany, rows, tableId}] of Object.entries(related)) {
// @ts-ignore const table = this.getTable(tableId)
const tableId: string = key.split("/").shift()
const { tableName } = breakExternalTableId(tableId)
const table = this.tables[tableName]
for (let row of rows) { for (let row of rows) {
const filters = buildFilters(generateIdForRow(row, table), {}, table) const filters = buildFilters(generateIdForRow(row, table), {}, table)
// safety check, if there are no filters on deletion bad things happen // safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) { if (Object.keys(filters).length !== 0) {
const op = isMany ? DataSourceOperation.DELETE : DataSourceOperation.UPDATE
const body = isMany ? null : { [colName]: null }
promises.push( promises.push(
makeExternalQuery(this.appId, { makeExternalQuery(this.appId, {
endpoint: getEndpoint(tableId, DataSourceOperation.DELETE), endpoint: getEndpoint(tableId, op),
body,
filters, filters,
}) })
) )
@ -449,7 +448,7 @@ module External {
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us * Creating the specific list of fields that we desire, and excluding the ones that are no use to us
* is more performant and has the added benefit of protecting against this scenario. * is more performant and has the added benefit of protecting against this scenario.
*/ */
buildFields(table: Table) { buildFields(table: Table, includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE) {
function extractNonLinkFieldNames(table: Table, existing: string[] = []) { function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
return Object.entries(table.schema) return Object.entries(table.schema)
.filter( .filter(
@ -461,7 +460,7 @@ module External {
} }
let fields = extractNonLinkFieldNames(table) let fields = extractNonLinkFieldNames(table)
for (let field of Object.values(table.schema)) { for (let field of Object.values(table.schema)) {
if (field.type !== FieldTypes.LINK) { if (field.type !== FieldTypes.LINK || !includeRelations) {
continue continue
} }
const { tableName: linkTableName } = breakExternalTableId(field.tableId) const { tableName: linkTableName } = breakExternalTableId(field.tableId)
@ -491,7 +490,7 @@ module External {
throw `Unable to process query, table "${tableName}" not defined.` throw `Unable to process query, table "${tableName}" not defined.`
} }
// clean up row on ingress using schema // clean up row on ingress using schema
filters = buildFilters(id, filters, table) filters = buildFilters(id, filters || {}, table)
const relationships = this.buildRelationships(table) const relationships = this.buildRelationships(table)
const processed = this.inputProcessing(row, table) const processed = this.inputProcessing(row, table)
row = processed.row row = processed.row
@ -524,8 +523,9 @@ module External {
// can't really use response right now // can't really use response right now
const response = await makeExternalQuery(appId, json) const response = await makeExternalQuery(appId, json)
// handle many to many relationships now if we know the ID (could be auto increment) // handle many to many relationships now if we know the ID (could be auto increment)
if (processed.manyRelationships) { if (operation !== DataSourceOperation.READ && processed.manyRelationships) {
await this.handleManyRelationships( await this.handleManyRelationships(
table._id || "",
response[0], response[0],
processed.manyRelationships processed.manyRelationships
) )

View File

@ -40,6 +40,11 @@ export enum SourceNames {
REST = "REST", REST = "REST",
} }
export enum IncludeRelationships {
INCLUDE = 1,
EXCLUDE = 0
}
export interface QueryDefinition { export interface QueryDefinition {
type: QueryTypes type: QueryTypes
displayName?: string displayName?: string

View File

@ -1,5 +1,7 @@
import { Knex, knex } from "knex" import { Knex, knex } from "knex"
const BASE_LIMIT = 5000 const BASE_LIMIT = 5000
// if requesting a single row then need to up the limit for the sake of joins
const SINGLE_ROW_LIMIT = 100
import { import {
QueryJson, QueryJson,
SearchFilters, SearchFilters,
@ -168,17 +170,21 @@ function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
query = query.orderBy(key, direction) query = query.orderBy(key, direction)
} }
} }
let foundLimit = BASE_LIMIT || limit
// handle pagination // handle pagination
if (paginate && paginate.page && paginate.limit) { if (paginate && paginate.page && paginate.limit) {
// @ts-ignore // @ts-ignore
const page = paginate.page <= 1 ? 0 : paginate.page - 1 const page = paginate.page <= 1 ? 0 : paginate.page - 1
const offset = page * paginate.limit const offset = page * paginate.limit
query = query.offset(offset).limit(paginate.limit) foundLimit = paginate.limit
query = query.offset(offset)
} else if (paginate && paginate.limit) { } else if (paginate && paginate.limit) {
query = query.limit(paginate.limit) foundLimit = paginate.limit
} else {
query.limit(limit)
} }
if (foundLimit === 1) {
foundLimit = SINGLE_ROW_LIMIT
}
query = query.limit(foundLimit)
return query return query
} }

View File

@ -33,13 +33,14 @@ export function generateRowIdField(keyProps: any[] = []) {
} }
// should always return an array // should always return an array
export function breakRowIdField(_id: string): any[] { export function breakRowIdField(_id: string | { _id: string }): any[] {
if (!_id) { if (!_id) {
return [] return []
} }
// have to replace on the way back as we swapped out the double quotes // have to replace on the way back as we swapped out the double quotes
// when encoding, but JSON can't handle the single quotes // when encoding, but JSON can't handle the single quotes
const decoded: string = decodeURIComponent(_id).replace(/'/g, '"') const id = typeof _id === "string" ? _id : _id._id
const decoded: string = decodeURIComponent(id).replace(/'/g, '"')
try { try {
const parsed = JSON.parse(decoded) const parsed = JSON.parse(decoded)
return Array.isArray(parsed) ? parsed : [parsed] return Array.isArray(parsed) ? parsed : [parsed]