Re-writing external layer in typescript.

This commit is contained in:
mike12345567 2021-07-01 19:20:41 +01:00
parent 5e819faa58
commit 3cfbced94d
6 changed files with 463 additions and 398 deletions

View File

@ -0,0 +1,424 @@
import {
Operation,
SearchFilters,
SortJson,
PaginationJson,
RelationshipsJson,
} from "../../../definitions/datasource"
import { Row, Table, FieldSchema } from "../../../definitions/common"
import {
breakRowIdField,
generateRowIdField,
} from "../../../integrations/utils"
interface ManyRelationship {
tableId?: string
id?: string
isUpdate?: boolean
[key: string]: any
}
interface RunConfig {
id: string
row: Row
filters: SearchFilters
sort: SortJson
paginate: PaginationJson
}
module External {
const { makeExternalQuery } = require("./utils")
const { DataSourceOperation, FieldTypes } = require("../../../constants")
const { getAllExternalTables } = require("../table/utils")
const { breakExternalTableId } = require("../../../integrations/utils")
const { processObjectSync } = require("@budibase/string-templates")
const { cloneDeep } = require("lodash/fp")
function buildFilters(
id: string | undefined,
filters: SearchFilters,
table: Table
) {
const primary = table.primary
// if passed in array need to copy for shifting etc
let idCopy = cloneDeep(id)
if (filters) {
// need to map over the filters and make sure the _id field isn't present
for (let filter of Object.values(filters)) {
if (filter._id && primary) {
const parts = breakRowIdField(filter._id)
for (let field of primary) {
filter[field] = parts.shift()
}
}
// make sure this field doesn't exist on any filter
delete filter._id
}
}
// there is no id, just use the user provided filters
if (!idCopy || !table) {
return filters
}
// if used as URL parameter it will have been joined
if (!Array.isArray(idCopy)) {
idCopy = breakRowIdField(idCopy)
}
const equal: any = {}
if (primary && idCopy) {
for (let field of primary) {
// work through the ID and get the parts
equal[field] = idCopy.shift()
}
}
return {
equal,
}
}
function generateIdForRow(row: Row, table: Table): string {
const primary = table.primary
if (!row || !primary) {
return ""
}
// build id array
let idParts = []
for (let field of primary) {
if (row[field]) {
idParts.push(row[field])
}
}
if (idParts.length === 0) {
return ""
}
return generateRowIdField(idParts)
}
function basicProcessing(row: Row, table: Table) {
const thisRow: { [key: string]: any } = {}
// filter the row down to what is actually the row (not joined)
for (let fieldName of Object.keys(table.schema)) {
thisRow[fieldName] = row[fieldName]
}
thisRow._id = generateIdForRow(row, table)
thisRow.tableId = table._id
thisRow._rev = "rev"
return thisRow
}
function isMany(field: FieldSchema) {
return (
field.relationshipType && field.relationshipType.split("-")[0] === "many"
)
}
class ExternalRequest {
private appId: string
private operation: Operation
private tableId: string
private tables: { [key: string]: Table }
constructor(
appId: string,
operation: Operation,
tableId: string,
tables: { [key: string]: Table }
) {
this.appId = appId
this.operation = operation
this.tableId = tableId
this.tables = tables
}
inputProcessing(row: Row, table: Table) {
if (!row) {
return { row, manyRelationships: [] }
}
// we don't really support composite keys for relationships, this is why [0] is used
// @ts-ignore
const tablePrimary: string = table.primary[0]
let newRow: Row = {},
manyRelationships: ManyRelationship[] = []
for (let [key, field] of Object.entries(table.schema)) {
// if set already, or not set just skip it
if (!row[key] || newRow[key]) {
continue
}
// if its not a link then just copy it over
if (field.type !== FieldTypes.LINK) {
newRow[key] = row[key]
continue
}
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
// table has to exist for many to many
if (!this.tables[linkTableName]) {
continue
}
const linkTable = this.tables[linkTableName]
// @ts-ignore
const linkTablePrimary = linkTable.primary[0]
if (!isMany(field)) {
newRow[field.foreignKey || linkTablePrimary] = breakRowIdField(
row[key][0]
)[0]
} else {
// we're not inserting a doc, will be a bunch of update calls
const isUpdate = !field.through
const thisKey: string = isUpdate ? "id" : linkTablePrimary
// @ts-ignore
const otherKey: string = isUpdate
? field.foreignKey
: tablePrimary
row[key].map((relationship: any) => {
// we don't really support composite keys for relationships, this is why [0] is used
manyRelationships.push({
tableId: field.through || field.tableId,
isUpdate,
[thisKey]: breakRowIdField(relationship)[0],
// leave the ID for enrichment later
[otherKey]: `{{ ${tablePrimary} }}`,
})
})
}
}
// we return the relationships that may need to be created in the through table
// we do this so that if the ID is generated by the DB it can be inserted
// after the fact
return { row: newRow, manyRelationships }
}
updateRelationshipColumns(
row: Row,
rows: { [key: string]: Row },
relationships: RelationshipsJson[]
) {
const columns: { [key: string]: any } = {}
for (let relationship of relationships) {
const linkedTable = this.tables[relationship.tableName]
if (!linkedTable) {
continue
}
let linked = basicProcessing(row, linkedTable)
if (!linked._id) {
continue
}
// if not returning full docs then get the minimal links out
const display = linkedTable.primaryDisplay
linked = {
primaryDisplay: display ? linked[display] : undefined,
_id: linked._id,
}
columns[relationship.column] = linked
}
for (let [column, related] of Object.entries(columns)) {
if (!row._id) {
continue
}
const rowId: string = row._id
if (!Array.isArray(rows[rowId][column])) {
rows[rowId][column] = []
}
// make sure relationship hasn't been found already
if (
!rows[rowId][column].find(
(relation: Row) => relation._id === related._id
)
) {
rows[rowId][column].push(related)
}
}
return rows
}
outputProcessing(
rows: Row[],
table: Table,
relationships: RelationshipsJson[]
) {
if (rows[0].read === true) {
return []
}
let finalRows: { [key: string]: Row } = {}
for (let row of rows) {
const rowId = generateIdForRow(row, table)
row._id = rowId
// this is a relationship of some sort
if (finalRows[rowId]) {
finalRows = this.updateRelationshipColumns(
row,
finalRows,
relationships
)
continue
}
const thisRow = basicProcessing(row, table)
finalRows[thisRow._id] = thisRow
// do this at end once its been added to the final rows
finalRows = this.updateRelationshipColumns(
row,
finalRows,
relationships
)
}
return Object.values(finalRows)
}
buildRelationships(table: Table): RelationshipsJson[] {
const relationships = []
for (let [fieldName, field] of Object.entries(table.schema)) {
if (field.type !== FieldTypes.LINK) {
continue
}
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
// no table to link to, this is not a valid relationships
if (!this.tables[linkTableName]) {
continue
}
const linkTable = this.tables[linkTableName]
if (!table.primary || !linkTable.primary) {
continue
}
const definition = {
// if no foreign key specified then use the name of the field in other table
from: field.foreignKey || table.primary[0],
to: field.fieldName,
tableName: linkTableName,
through: undefined,
// need to specify where to put this back into
column: fieldName,
}
if (field.through) {
const { tableName: throughTableName } = breakExternalTableId(
field.through
)
definition.through = throughTableName
// don't support composite keys for relationships
definition.from = table.primary[0]
definition.to = linkTable.primary[0]
}
relationships.push(definition)
}
return relationships
}
async handleManyRelationships(row: Row, relationships: ManyRelationship[]) {
const { appId, tables } = this
const promises = []
for (let relationship of relationships) {
const { tableId, isUpdate, id, ...rest } = relationship
const { datasourceId, tableName } = breakExternalTableId(tableId)
const linkedTable = tables[tableName]
if (!linkedTable) {
continue
}
const endpoint = {
datasourceId,
entityId: tableName,
operation: isUpdate
? DataSourceOperation.UPDATE
: DataSourceOperation.CREATE,
}
promises.push(
makeExternalQuery(appId, {
endpoint,
// if we're doing many relationships then we're writing, only one response
body: processObjectSync(rest, row),
filters: buildFilters(id, {}, linkedTable),
})
)
}
await Promise.all(promises)
}
/**
* This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which
* you have column overlap in relationships, e.g. we join a few different tables and they all have the
* concept of an ID, but for some of them it will be null (if they say don't have a relationship).
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
* is more performant and has the added benefit of protecting against this scenario.
*/
buildFields(table: Table) {
function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
return Object.entries(table.schema)
.filter(
column =>
column[1].type !== FieldTypes.LINK &&
!existing.find((field: string) => field.includes(column[0]))
)
.map(column => `${table.name}.${column[0]}`)
}
let fields = extractNonLinkFieldNames(table)
for (let field of Object.values(table.schema)) {
if (field.type !== FieldTypes.LINK) {
continue
}
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
const linkTable = this.tables[linkTableName]
if (linkTable) {
const linkedFields = extractNonLinkFieldNames(linkTable, fields)
fields = fields.concat(linkedFields)
}
}
return fields
}
async run({ id, row, filters, sort, paginate }: RunConfig) {
const { appId, operation, tableId } = this
let { datasourceId, tableName } = breakExternalTableId(tableId)
if (!this.tables) {
this.tables = await getAllExternalTables(appId, datasourceId)
}
const table = this.tables[tableName]
if (!table) {
throw `Unable to process query, table "${tableName}" not defined.`
}
// clean up row on ingress using schema
filters = buildFilters(id, filters, table)
const relationships = this.buildRelationships(table)
const processed = this.inputProcessing(row, table)
row = processed.row
if (
operation === DataSourceOperation.DELETE &&
(filters == null || Object.keys(filters).length === 0)
) {
throw "Deletion must be filtered"
}
let json = {
endpoint: {
datasourceId,
entityId: tableName,
operation,
},
resource: {
// have to specify the fields to avoid column overlap
fields: this.buildFields(table),
},
filters,
sort,
paginate,
relationships,
body: row,
// pass an id filter into extra, purely for mysql/returning
extra: {
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
},
}
// can't really use response right now
const response = await makeExternalQuery(appId, json)
// handle many to many relationships now if we know the ID (could be auto increment)
if (processed.manyRelationships) {
await this.handleManyRelationships(
response[0],
processed.manyRelationships
)
}
const output = this.outputProcessing(response, table, relationships)
// if reading it'll just be an array of rows, return whole thing
return operation === DataSourceOperation.READ && Array.isArray(response)
? output
: { row: output[0], table }
}
}
module.exports = ExternalRequest
}

View File

@ -1,4 +1,3 @@
const { makeExternalQuery } = require("./utils")
const {
DataSourceOperation,
SortDirection,
@ -9,112 +8,9 @@ const {
breakExternalTableId,
breakRowIdField,
} = require("../../../integrations/utils")
const {
buildRelationships,
buildFilters,
inputProcessing,
outputProcessing,
generateIdForRow,
buildFields,
} = require("./externalUtils")
const { processObjectSync } = require("@budibase/string-templates")
const ExternalRequest = require("./ExternalRequest")
class ExternalRequest {
constructor(appId, operation, tableId, tables) {
this.appId = appId
this.operation = operation
this.tableId = tableId
this.tables = tables
}
async handleManyRelationships(row, relationships) {
const { appId, tables } = this
const promises = []
for (let relationship of relationships) {
const { tableId, isUpdate, id, ...rest } = relationship
const { datasourceId, tableName } = breakExternalTableId(tableId)
const linkedTable = tables[tableName]
if (!linkedTable) {
continue
}
const endpoint = {
datasourceId,
entityId: tableName,
operation: isUpdate ? DataSourceOperation.UPDATE : DataSourceOperation.CREATE,
}
promises.push(
makeExternalQuery(appId, {
endpoint,
// if we're doing many relationships then we're writing, only one response
body: processObjectSync(rest, row),
filters: buildFilters(id, {}, linkedTable)
})
)
}
await Promise.all(promises)
}
async run({ id, row, filters, sort, paginate }) {
const { appId, operation, tableId } = this
let { datasourceId, tableName } = breakExternalTableId(tableId)
if (!this.tables) {
this.tables = await getAllExternalTables(appId, datasourceId)
}
const table = this.tables[tableName]
if (!table) {
throw `Unable to process query, table "${tableName}" not defined.`
}
// clean up row on ingress using schema
filters = buildFilters(id, filters, table)
const relationships = buildRelationships(table, this.tables)
const processed = inputProcessing(row, table, this.tables)
row = processed.row
if (
operation === DataSourceOperation.DELETE &&
(filters == null || Object.keys(filters).length === 0)
) {
throw "Deletion must be filtered"
}
let json = {
endpoint: {
datasourceId,
entityId: tableName,
operation,
},
resource: {
// have to specify the fields to avoid column overlap
fields: buildFields(table, this.tables),
},
filters,
sort,
paginate,
relationships,
body: row,
// pass an id filter into extra, purely for mysql/returning
extra: {
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
},
}
// can't really use response right now
const response = await makeExternalQuery(appId, json)
// handle many to many relationships now if we know the ID (could be auto increment)
if (processed.manyRelationships) {
await this.handleManyRelationships(response[0], processed.manyRelationships)
}
const output = outputProcessing(response, table, relationships, this.tables)
// if reading it'll just be an array of rows, return whole thing
return operation === DataSourceOperation.READ && Array.isArray(response)
? output
: { row: output[0], table }
}
}
async function handleRequest(
appId,
operation,
tableId,
opts = {}
) {
async function handleRequest(appId, operation, tableId, opts = {}) {
return new ExternalRequest(appId, operation, tableId, opts.tables).run(opts)
}

View File

@ -1,259 +0,0 @@
const {
breakExternalTableId,
generateRowIdField,
breakRowIdField,
} = require("../../../integrations/utils")
const { FieldTypes } = require("../../../constants")
const { cloneDeep } = require("lodash/fp")
function basicProcessing(row, table) {
const thisRow = {}
// filter the row down to what is actually the row (not joined)
for (let fieldName of Object.keys(table.schema)) {
thisRow[fieldName] = row[fieldName]
}
thisRow._id = exports.generateIdForRow(row, table)
thisRow.tableId = table._id
thisRow._rev = "rev"
return thisRow
}
function isMany(field) {
return field.relationshipType.split("-")[0] === "many"
}
exports.inputProcessing = (row, table, allTables) => {
if (!row) {
return { row, manyRelationships: [] }
}
let newRow = {},
manyRelationships = []
for (let [key, field] of Object.entries(table.schema)) {
// if set already, or not set just skip it
if (!row[key] || newRow[key]) {
continue
}
// if its not a link then just copy it over
if (field.type !== FieldTypes.LINK) {
newRow[key] = row[key]
continue
}
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
// table has to exist for many to many
if (!allTables[linkTableName]) {
continue
}
const linkTable = allTables[linkTableName]
if (!isMany(field)) {
// we don't really support composite keys for relationships, this is why [0] is used
newRow[field.foreignKey || linkTable.primary] = breakRowIdField(
row[key][0]
)[0]
} else {
// we're not inserting a doc, will be a bunch of update calls
const isUpdate = !field.through
const thisKey = isUpdate ? "id" : linkTable.primary
const otherKey = isUpdate ? field.foreignKey : table.primary
row[key].map(relationship => {
// we don't really support composite keys for relationships, this is why [0] is used
manyRelationships.push({
tableId: field.through || field.tableId,
isUpdate,
[thisKey]: breakRowIdField(relationship)[0],
// leave the ID for enrichment later
[otherKey]: `{{ ${table.primary} }}`,
})
})
}
}
// we return the relationships that may need to be created in the through table
// we do this so that if the ID is generated by the DB it can be inserted
// after the fact
return { row: newRow, manyRelationships }
}
exports.generateIdForRow = (row, table) => {
if (!row) {
return null
}
const primary = table.primary
// build id array
let idParts = []
for (let field of primary) {
if (row[field]) {
idParts.push(row[field])
}
}
if (idParts.length === 0) {
return null
}
return generateRowIdField(idParts)
}
exports.updateRelationshipColumns = (row, rows, relationships, allTables) => {
const columns = {}
for (let relationship of relationships) {
const linkedTable = allTables[relationship.tableName]
if (!linkedTable) {
continue
}
let linked = basicProcessing(row, linkedTable)
if (!linked._id) {
continue
}
// if not returning full docs then get the minimal links out
const display = linkedTable.primaryDisplay
linked = {
primaryDisplay: display ? linked[display] : undefined,
_id: linked._id,
}
columns[relationship.column] = linked
}
for (let [column, related] of Object.entries(columns)) {
if (!Array.isArray(rows[row._id][column])) {
rows[row._id][column] = []
}
// make sure relationship hasn't been found already
if (!rows[row._id][column].find(relation => relation._id === related._id)) {
rows[row._id][column].push(related)
}
}
return rows
}
exports.outputProcessing = (rows, table, relationships, allTables) => {
// if no rows this is what is returned? Might be PG only
if (rows[0].read === true) {
return []
}
let finalRows = {}
for (let row of rows) {
row._id = exports.generateIdForRow(row, table)
// this is a relationship of some sort
if (finalRows[row._id]) {
finalRows = exports.updateRelationshipColumns(
row,
finalRows,
relationships,
allTables
)
continue
}
const thisRow = basicProcessing(row, table)
finalRows[thisRow._id] = thisRow
// do this at end once its been added to the final rows
finalRows = exports.updateRelationshipColumns(
row,
finalRows,
relationships,
allTables
)
}
return Object.values(finalRows)
}
/**
* This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which
* you have column overlap in relationships, e.g. we join a few different tables and they all have the
* concept of an ID, but for some of them it will be null (if they say don't have a relationship).
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
* is more performant and has the added benefit of protecting against this scenario.
* @param {Object} table The table we are retrieving fields for.
* @param {Object[]} allTables All of the tables that exist in the external data source, this is
* needed to work out what is needed from other tables based on relationships.
* @return {string[]} A list of fields like ["products.productid"] which can be used for an SQL select.
*/
exports.buildFields = (table, allTables) => {
function extractNonLinkFieldNames(table, existing = []) {
return Object.entries(table.schema)
.filter(
column =>
column[1].type !== FieldTypes.LINK &&
!existing.find(field => field.includes(column[0]))
)
.map(column => `${table.name}.${column[0]}`)
}
let fields = extractNonLinkFieldNames(table)
for (let field of Object.values(table.schema)) {
if (field.type !== FieldTypes.LINK) {
continue
}
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
const linkTable = allTables[linkTableName]
if (linkTable) {
const linkedFields = extractNonLinkFieldNames(linkTable, fields)
fields = fields.concat(linkedFields)
}
}
return fields
}
exports.buildFilters = (id, filters, table) => {
const primary = table.primary
// if passed in array need to copy for shifting etc
let idCopy = cloneDeep(id)
if (filters) {
// need to map over the filters and make sure the _id field isn't present
for (let filter of Object.values(filters)) {
if (filter._id) {
const parts = breakRowIdField(filter._id)
for (let field of primary) {
filter[field] = parts.shift()
}
}
// make sure this field doesn't exist on any filter
delete filter._id
}
}
// there is no id, just use the user provided filters
if (!idCopy || !table) {
return filters
}
// if used as URL parameter it will have been joined
if (!Array.isArray(idCopy)) {
idCopy = breakRowIdField(idCopy)
}
const equal = {}
for (let field of primary) {
// work through the ID and get the parts
equal[field] = idCopy.shift()
}
return {
equal,
}
}
exports.buildRelationships = (table, allTables) => {
const relationships = []
for (let [fieldName, field] of Object.entries(table.schema)) {
if (field.type !== FieldTypes.LINK) {
continue
}
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
// no table to link to, this is not a valid relationships
if (!allTables[linkTableName]) {
continue
}
const linkTable = allTables[linkTableName]
const definition = {
// if no foreign key specified then use the name of the field in other table
from: field.foreignKey || table.primary[0],
to: field.fieldName,
tableName: linkTableName,
through: undefined,
// need to specify where to put this back into
column: fieldName,
}
if (field.through) {
const { tableName: throughTableName } = breakExternalTableId(
field.through
)
definition.through = throughTableName
// don't support composite keys for relationships
definition.from = table.primary[0]
definition.to = linkTable.primary[0]
}
relationships.push(definition)
}
return relationships
}

View File

@ -3,29 +3,31 @@ interface Base {
_rev?: string
}
export interface TableSchema {
[key: string]: {
// TODO: replace with field types enum when done
type: string
fieldName?: string
name: string
tableId?: string
relationshipType?: string
through?: string
foreignKey?: string
constraints?: {
type?: string
email?: boolean
inclusion?: string[]
length?: {
minimum?: string | number
maximum?: string | number
}
presence?: boolean
export interface FieldSchema {
// TODO: replace with field types enum when done
type: string
fieldName?: string
name: string
tableId?: string
relationshipType?: string
through?: string
foreignKey?: string
constraints?: {
type?: string
email?: boolean
inclusion?: string[]
length?: {
minimum?: string | number
maximum?: string | number
}
presence?: boolean
}
}
export interface TableSchema {
[key: string]: FieldSchema
}
export interface Table extends Base {
type?: string
views?: {}
@ -38,7 +40,7 @@ export interface Table extends Base {
export interface Row extends Base {
type?: string
tableId: string
tableId?: string
[key: string]: any
}

View File

@ -47,7 +47,7 @@ export interface Integration {
}
export interface SearchFilters {
allOr: boolean
allOr?: boolean
string?: {
[key: string]: string
}
@ -77,11 +77,21 @@ export interface SearchFilters {
}
}
export interface SortJson {
[key: string]: SortDirection
}
export interface PaginationJson {
limit: number
page: string | number
}
export interface RelationshipsJson {
through?: string
from: string
to: string
from?: string
to?: string
tableName: string
column: string
}
export interface QueryJson {
@ -94,13 +104,8 @@ export interface QueryJson {
fields: string[]
}
filters?: SearchFilters
sort?: {
[key: string]: SortDirection
}
paginate?: {
limit: number
page: string | number
}
sort?: SortJson
paginate?: PaginationJson
body?: object
extra?: {
idFilter?: SearchFilters

View File

@ -30,9 +30,6 @@ export function generateRowIdField(keyProps: any[] = []) {
// should always return an array
export function breakRowIdField(_id: string) {
if (!_id) {
return null
}
const decoded = decodeURIComponent(_id)
const parsed = JSON.parse(decoded)
return Array.isArray(parsed) ? parsed : [parsed]