Merge branch 'master' into BUDI-7641/push_v2_images_to_qa

This commit is contained in:
Adria Navarro 2023-10-20 12:40:56 +02:00 committed by GitHub
commit 281348295d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 950 additions and 767 deletions

View File

@ -4,6 +4,8 @@ on:
types: [created] types: [created]
pull_request_target: pull_request_target:
types: [opened,closed,synchronize] types: [opened,closed,synchronize]
branches:
- master
jobs: jobs:
CLAssistant: CLAssistant:

View File

@ -4,7 +4,6 @@ import {
getQueryParams, getQueryParams,
getTableParams, getTableParams,
} from "../../db/utils" } from "../../db/utils"
import { destroy as tableDestroy } from "./table/internal"
import { getIntegration } from "../../integrations" import { getIntegration } from "../../integrations"
import { invalidateDynamicVariables } from "../../threads/utils" import { invalidateDynamicVariables } from "../../threads/utils"
import { context, db as dbCore, events } from "@budibase/backend-core" import { context, db as dbCore, events } from "@budibase/backend-core"
@ -325,11 +324,7 @@ async function destroyInternalTablesBySourceId(datasourceId: string) {
// Destroy the tables. // Destroy the tables.
for (const table of datasourceTableDocs) { for (const table of datasourceTableDocs) {
await tableDestroy({ await sdk.tables.internal.destroy(table)
params: {
tableId: table._id,
},
})
} }
} }

View File

@ -0,0 +1,36 @@
import {
Datasource,
Operation,
QueryJson,
RenameColumn,
Table,
} from "@budibase/types"
import { makeExternalQuery } from "../../../integrations/base/query"
export async function makeTableRequest(
datasource: Datasource,
operation: Operation,
table: Table,
tables: Record<string, Table>,
oldTable?: Table,
renamed?: RenameColumn
) {
const json: QueryJson = {
endpoint: {
datasourceId: datasource._id!,
entityId: table._id!,
operation,
},
meta: {
tables,
},
table,
}
if (oldTable) {
json.meta!.table = oldTable
}
if (renamed) {
json.meta!.renamed = renamed
}
return makeExternalQuery(datasource, json)
}

View File

@ -1,108 +1,20 @@
import { import { breakExternalTableId } from "../../../integrations/utils"
breakExternalTableId,
buildExternalTableId,
} from "../../../integrations/utils"
import {
foreignKeyStructure,
generateForeignKey,
generateJunctionTableName,
hasTypeChanged,
setStaticSchemas,
} from "./utils"
import { FieldTypes } from "../../../constants"
import { makeExternalQuery } from "../../../integrations/base/query"
import { handleRequest } from "../row/external" import { handleRequest } from "../row/external"
import { context, events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
import { isRows, isSchema, parse } from "../../../utilities/schema" import { isRows, isSchema, parse } from "../../../utilities/schema"
import { import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
Datasource,
FieldSchema,
ManyToManyRelationshipFieldMetadata,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
Operation, Operation,
QueryJson,
RelationshipFieldMetadata,
RelationshipType,
RenameColumn,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
TableRequest, TableRequest,
UserCtx, UserCtx,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
const { cloneDeep } = require("lodash/fp")
async function makeTableRequest(
datasource: Datasource,
operation: Operation,
table: Table,
tables: Record<string, Table>,
oldTable?: Table,
renamed?: RenameColumn
) {
const json: QueryJson = {
endpoint: {
datasourceId: datasource._id!,
entityId: table._id!,
operation,
},
meta: {
tables,
},
table,
}
if (oldTable) {
json.meta!.table = oldTable
}
if (renamed) {
json.meta!.renamed = renamed
}
return makeExternalQuery(datasource, json)
}
function cleanupRelationships(
table: Table,
tables: Record<string, Table>,
oldTable?: Table
) {
const tableToIterate = oldTable ? oldTable : table
// clean up relationships in couch table schemas
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
if (
schema.type === FieldTypes.LINK &&
(!oldTable || table.schema[key] == null)
) {
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find(
table => table._id === schemaTableId
)
const foreignKey =
schema.relationshipType !== RelationshipType.MANY_TO_MANY &&
schema.foreignKey
if (!relatedTable || !foreignKey) {
continue
}
for (let [relatedKey, relatedSchema] of Object.entries(
relatedTable.schema
)) {
if (
relatedSchema.type === FieldTypes.LINK &&
relatedSchema.fieldName === foreignKey
) {
delete relatedTable.schema[relatedKey]
}
}
}
}
}
function getDatasourceId(table: Table) { function getDatasourceId(table: Table) {
if (!table) { if (!table) {
throw "No table supplied" throw "No table supplied"
@ -113,247 +25,32 @@ function getDatasourceId(table: Table) {
return breakExternalTableId(table._id).datasourceId return breakExternalTableId(table._id).datasourceId
} }
function otherRelationshipType(type?: string) {
if (type === RelationshipType.MANY_TO_MANY) {
return RelationshipType.MANY_TO_MANY
}
return type === RelationshipType.ONE_TO_MANY
? RelationshipType.MANY_TO_ONE
: RelationshipType.ONE_TO_MANY
}
function generateManyLinkSchema(
datasource: Datasource,
column: ManyToManyRelationshipFieldMetadata,
table: Table,
relatedTable: Table
): Table {
if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate many link schema, no primary keys")
}
const primary = table.name + table.primary[0]
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
const jcTblName = generateJunctionTableName(column, table, relatedTable)
// first create the new table
const junctionTable = {
_id: buildExternalTableId(datasource._id!, jcTblName),
name: jcTblName,
primary: [primary, relatedPrimary],
constrained: [primary, relatedPrimary],
schema: {
[primary]: foreignKeyStructure(primary, {
toTable: table.name,
toKey: table.primary[0],
}),
[relatedPrimary]: foreignKeyStructure(relatedPrimary, {
toTable: relatedTable.name,
toKey: relatedTable.primary[0],
}),
},
}
column.through = junctionTable._id
column.throughFrom = relatedPrimary
column.throughTo = primary
column.fieldName = relatedPrimary
return junctionTable
}
function generateLinkSchema(
column:
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata,
table: Table,
relatedTable: Table,
type: RelationshipType.ONE_TO_MANY | RelationshipType.MANY_TO_ONE
) {
if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate link schema, no primary keys")
}
const isOneSide = type === RelationshipType.ONE_TO_MANY
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
// generate a foreign key
const foreignKey = generateForeignKey(column, relatedTable)
column.relationshipType = type
column.foreignKey = isOneSide ? foreignKey : primary
column.fieldName = isOneSide ? primary : foreignKey
return foreignKey
}
function generateRelatedSchema(
linkColumn: RelationshipFieldMetadata,
table: Table,
relatedTable: Table,
columnName: string
) {
// generate column for other table
const relatedSchema = cloneDeep(linkColumn)
const isMany2Many =
linkColumn.relationshipType === RelationshipType.MANY_TO_MANY
// swap them from the main link
if (!isMany2Many && linkColumn.foreignKey) {
relatedSchema.fieldName = linkColumn.foreignKey
relatedSchema.foreignKey = linkColumn.fieldName
}
// is many to many
else if (isMany2Many) {
// don't need to copy through, already got it
relatedSchema.fieldName = linkColumn.throughTo
relatedSchema.throughTo = linkColumn.throughFrom
relatedSchema.throughFrom = linkColumn.throughTo
}
relatedSchema.relationshipType = otherRelationshipType(
linkColumn.relationshipType
)
relatedSchema.tableId = relatedTable._id
relatedSchema.name = columnName
table.schema[columnName] = relatedSchema
}
function isRelationshipSetup(column: RelationshipFieldMetadata) {
return (column as any).foreignKey || (column as any).through
}
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const inputs = ctx.request.body const inputs = ctx.request.body
const renamed = inputs?._rename const renaming = inputs?._rename
// can't do this right now // can't do this right now
delete inputs.rows delete inputs.rows
const datasourceId = getDatasourceId(ctx.request.body)! const tableId = ctx.request.body._id
const datasourceId = getDatasourceId(ctx.request.body)
// table doesn't exist already, note that it is created // table doesn't exist already, note that it is created
if (!inputs._id) { if (!inputs._id) {
inputs.created = true inputs.created = true
} }
let tableToSave: TableRequest = { try {
type: "table", const { datasource, table } = await sdk.tables.external.save(
_id: buildExternalTableId(datasourceId, inputs.name), datasourceId!,
sourceId: datasourceId, inputs,
...inputs, { tableId, renaming }
}
let oldTable: Table | undefined
if (ctx.request.body && ctx.request.body._id) {
oldTable = await sdk.tables.getTable(ctx.request.body._id)
}
if (hasTypeChanged(tableToSave, oldTable)) {
ctx.throw(400, "A column type has changed.")
}
for (let view in tableToSave.views) {
const tableView = tableToSave.views[view]
if (!tableView || !sdk.views.isV2(tableView)) continue
tableToSave.views[view] = sdk.views.syncSchema(
oldTable!.views![view] as ViewV2,
tableToSave.schema,
renamed
) )
} builderSocket?.emitDatasourceUpdate(ctx, datasource)
return table
const db = context.getAppDB() } catch (err: any) {
const datasource = await sdk.datasources.get(datasourceId) if (err instanceof Error) {
if (!datasource.entities) { ctx.throw(400, err.message)
datasource.entities = {}
}
// GSheets is a specific case - only ever has a static primary key
tableToSave = setStaticSchemas(datasource, tableToSave)
const oldTables = cloneDeep(datasource.entities)
const tables: Record<string, Table> = datasource.entities
const extraTablesToUpdate = []
// check if relations need setup
for (let schema of Object.values(tableToSave.schema)) {
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
continue
}
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find(
table => table._id === schemaTableId
)
if (!relatedTable) {
continue
}
const relatedColumnName = schema.fieldName!
const relationType = schema.relationshipType
if (relationType === RelationshipType.MANY_TO_MANY) {
const junctionTable = generateManyLinkSchema(
datasource,
schema,
tableToSave,
relatedTable
)
if (tables[junctionTable.name]) {
throw "Junction table already exists, cannot create another relationship."
}
tables[junctionTable.name] = junctionTable
extraTablesToUpdate.push(junctionTable)
} else { } else {
const fkTable = ctx.throw(err.status || 500, err?.message || err)
relationType === RelationshipType.ONE_TO_MANY
? tableToSave
: relatedTable
const foreignKey = generateLinkSchema(
schema,
tableToSave,
relatedTable,
relationType
)
fkTable.schema[foreignKey] = foreignKeyStructure(foreignKey)
if (fkTable.constrained == null) {
fkTable.constrained = []
}
if (fkTable.constrained.indexOf(foreignKey) === -1) {
fkTable.constrained.push(foreignKey)
}
// foreign key is in other table, need to save it to external
if (fkTable._id !== tableToSave._id) {
extraTablesToUpdate.push(fkTable)
}
} }
generateRelatedSchema(schema, relatedTable, tableToSave, relatedColumnName)
schema.main = true
} }
cleanupRelationships(tableToSave, tables, oldTable)
const operation = oldTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
await makeTableRequest(
datasource,
operation,
tableToSave,
tables,
oldTable,
renamed
)
// update any extra tables (like foreign keys in other tables)
for (let extraTable of extraTablesToUpdate) {
const oldExtraTable = oldTables[extraTable.name]
let op = oldExtraTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
}
// make sure the constrained list, all still exist
if (Array.isArray(tableToSave.constrained)) {
tableToSave.constrained = tableToSave.constrained.filter(constraint =>
Object.keys(tableToSave.schema).includes(constraint)
)
}
// remove the rename prop
delete tableToSave._rename
// store it into couch now for budibase reference
datasource.entities[tableToSave.name] = tableToSave
await db.put(sdk.tables.populateExternalTableSchemas(datasource))
// Since tables are stored inside datasources, we need to notify clients
// that the datasource definition changed
const updatedDatasource = await sdk.datasources.get(datasource._id!)
builderSocket?.emitDatasourceUpdate(ctx, updatedDatasource)
return tableToSave
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
@ -364,27 +61,20 @@ export async function destroy(ctx: UserCtx) {
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.") ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
} }
const datasourceId = getDatasourceId(tableToDelete) const datasourceId = getDatasourceId(tableToDelete)
try {
const db = context.getAppDB() const { datasource, table } = await sdk.tables.external.destroy(
const datasource = await sdk.datasources.get(datasourceId!) datasourceId!,
const tables = datasource.entities tableToDelete
)
const operation = Operation.DELETE_TABLE builderSocket?.emitDatasourceUpdate(ctx, datasource)
if (tables) { return table
await makeTableRequest(datasource, operation, tableToDelete, tables) } catch (err: any) {
cleanupRelationships(tableToDelete, tables) if (err instanceof Error) {
delete tables[tableToDelete.name] ctx.throw(400, err.message)
datasource.entities = tables } else {
ctx.throw(err.status || 500, err.message || err)
}
} }
await db.put(sdk.tables.populateExternalTableSchemas(datasource))
// Since tables are stored inside datasources, we need to notify clients
// that the datasource definition changed
const updatedDatasource = await sdk.datasources.get(datasource._id!)
builderSocket?.emitDatasourceUpdate(ctx, updatedDatasource)
return tableToDelete
} }
export async function bulkImport( export async function bulkImport(

View File

@ -1,14 +1,5 @@
import { updateLinks, EventType } from "../../../db/linkedRows" import { generateTableID } from "../../../db/utils"
import { getRowParams, generateTableID } from "../../../db/utils" import { handleDataImport } from "./utils"
import { FieldTypes } from "../../../constants"
import { TableSaveFunctions, hasTypeChanged, handleDataImport } from "./utils"
import { context } from "@budibase/backend-core"
import env from "../../../environment"
import {
cleanupAttachments,
fixAutoColumnSubType,
} from "../../../utilities/rowProcessor"
import { runStaticFormulaChecks } from "./bulkFormula"
import { import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
@ -17,195 +8,52 @@ import {
SaveTableResponse, SaveTableResponse,
Table, Table,
UserCtx, UserCtx,
ViewStatisticsSchema,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { quotas } from "@budibase/pro"
import isEqual from "lodash/isEqual"
import { cloneDeep } from "lodash/fp"
import sdk from "../../../sdk" import sdk from "../../../sdk"
function checkAutoColumns(table: Table, oldTable?: Table) {
if (!table.schema) {
return table
}
for (let [key, schema] of Object.entries(table.schema)) {
if (!schema.autocolumn || schema.subtype) {
continue
}
const oldSchema = oldTable && oldTable.schema[key]
if (oldSchema && oldSchema.subtype) {
table.schema[key].subtype = oldSchema.subtype
} else {
table.schema[key] = fixAutoColumnSubType(schema)
}
}
return table
}
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const db = context.getAppDB()
const { rows, ...rest } = ctx.request.body const { rows, ...rest } = ctx.request.body
let tableToSave: Table & { let tableToSave: Table & {
_rename?: { old: string; updated: string } | undefined _rename?: RenameColumn
} = { } = {
type: "table", type: "table",
_id: generateTableID(), _id: generateTableID(),
views: {}, views: {},
...rest, ...rest,
} }
const renaming = tableToSave._rename
delete tableToSave._rename
// if the table obj had an _id then it will have been retrieved
let oldTable: Table | undefined
if (ctx.request.body && ctx.request.body._id) {
oldTable = await sdk.tables.getTable(ctx.request.body._id)
}
// check all types are correct
if (hasTypeChanged(tableToSave, oldTable)) {
ctx.throw(400, "A column type has changed.")
}
// check that subtypes have been maintained
tableToSave = checkAutoColumns(tableToSave, oldTable)
// saving a table is a complex operation, involving many different steps, this
// has been broken out into a utility to make it more obvious/easier to manipulate
const tableSaveFunctions = new TableSaveFunctions({
user: ctx.user,
oldTable,
importRows: rows,
})
tableToSave = await tableSaveFunctions.before(tableToSave)
// make sure that types don't change of a column, have to remove
// the column if you want to change the type
if (oldTable && oldTable.schema) {
for (const propKey of Object.keys(tableToSave.schema)) {
let oldColumn = oldTable.schema[propKey]
if (oldColumn && oldColumn.type === FieldTypes.INTERNAL) {
oldTable.schema[propKey].type = FieldTypes.AUTO
}
}
}
// Don't rename if the name is the same
let _rename: RenameColumn | undefined = tableToSave._rename
/* istanbul ignore next */
if (_rename && _rename.old === _rename.updated) {
_rename = undefined
delete tableToSave._rename
}
// rename row fields when table column is renamed
/* istanbul ignore next */
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
ctx.throw(400, "Cannot rename a linked column.")
}
tableToSave = await tableSaveFunctions.mid(tableToSave)
// update schema of non-statistics views when new columns are added
for (let view in tableToSave.views) {
const tableView = tableToSave.views[view]
if (!tableView) continue
if (sdk.views.isV2(tableView)) {
tableToSave.views[view] = sdk.views.syncSchema(
oldTable!.views![view] as ViewV2,
tableToSave.schema,
_rename
)
continue
}
if (
(tableView.schema as ViewStatisticsSchema).group ||
tableView.schema.field
)
continue
tableView.schema = tableToSave.schema
}
// update linked rows
try { try {
const linkResp: any = await updateLinks({ const { table } = await sdk.tables.internal.save(tableToSave, {
eventType: oldTable ? EventType.TABLE_UPDATED : EventType.TABLE_SAVE, user: ctx.user,
table: tableToSave, rowsToImport: rows,
oldTable: oldTable, tableId: ctx.request.body._id,
renaming: renaming,
}) })
if (linkResp != null && linkResp._rev) {
tableToSave._rev = linkResp._rev return table
} catch (err: any) {
if (err instanceof Error) {
ctx.throw(400, err.message)
} else {
ctx.throw(err.status || 500, err.message || err)
} }
} catch (err) {
ctx.throw(400, err as string)
} }
// don't perform any updates until relationships have been
// checked by the updateLinks function
const updatedRows = tableSaveFunctions.getUpdatedRows()
if (updatedRows && updatedRows.length !== 0) {
await db.bulkDocs(updatedRows)
}
let result = await db.put(tableToSave)
tableToSave._rev = result.rev
const savedTable = cloneDeep(tableToSave)
tableToSave = await tableSaveFunctions.after(tableToSave)
// the table may be updated as part of the table save after functionality - need to write it
if (!isEqual(savedTable, tableToSave)) {
result = await db.put(tableToSave)
tableToSave._rev = result.rev
}
// has to run after, make sure it has _id
await runStaticFormulaChecks(tableToSave, { oldTable, deletion: false })
return tableToSave
} }
export async function destroy(ctx: any) { export async function destroy(ctx: UserCtx) {
const db = context.getAppDB()
const tableToDelete = await sdk.tables.getTable(ctx.params.tableId) const tableToDelete = await sdk.tables.getTable(ctx.params.tableId)
try {
// Delete all rows for that table const { table } = await sdk.tables.internal.destroy(tableToDelete)
const rowsData = await db.allDocs( return table
getRowParams(ctx.params.tableId, null, { } catch (err: any) {
include_docs: true, if (err instanceof Error) {
}) ctx.throw(400, err.message)
) } else {
await db.bulkDocs( ctx.throw(err.status || 500, err.message || err)
rowsData.rows.map((row: any) => ({ ...row.doc, _deleted: true }))
)
await quotas.removeRows(rowsData.rows.length, {
tableId: ctx.params.tableId,
})
// update linked rows
await updateLinks({
eventType: EventType.TABLE_DELETE,
table: tableToDelete,
})
// don't remove the table itself until very end
await db.remove(tableToDelete._id!, tableToDelete._rev)
// remove table search index
if (!env.isTest() || env.COUCH_DB_URL) {
const currentIndexes = await db.getIndexes()
const existingIndex = currentIndexes.indexes.find(
(existing: any) => existing.name === `search:${ctx.params.tableId}`
)
if (existingIndex) {
await db.deleteIndex(existingIndex)
} }
} }
// has to run after, make sure it has _id
await runStaticFormulaChecks(tableToDelete, {
deletion: true,
})
await cleanupAttachments(tableToDelete, {
rows: rowsData.rows.map((row: any) => row.doc),
})
return tableToDelete
} }
export async function bulkImport( export async function bulkImport(
@ -213,6 +61,10 @@ export async function bulkImport(
) { ) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body const { rows, identifierFields } = ctx.request.body
await handleDataImport(ctx.user, table, rows, identifierFields) await handleDataImport(table, {
importRows: rows,
identifierFields,
user: ctx.user,
})
return table return table
} }

View File

@ -26,9 +26,16 @@ import {
Row, Row,
SourceName, SourceName,
Table, Table,
Database,
RenameColumn,
NumberFieldMetadata,
FieldSchema,
View,
RelationshipFieldMetadata,
FieldType,
} from "@budibase/types" } from "@budibase/types"
export async function clearColumns(table: any, columnNames: any) { export async function clearColumns(table: Table, columnNames: string[]) {
const db = context.getAppDB() const db = context.getAppDB()
const rows = await db.allDocs( const rows = await db.allDocs(
getRowParams(table._id, null, { getRowParams(table._id, null, {
@ -43,10 +50,13 @@ export async function clearColumns(table: any, columnNames: any) {
)) as { id: string; _rev?: string }[] )) as { id: string; _rev?: string }[]
} }
export async function checkForColumnUpdates(oldTable: any, updatedTable: any) { export async function checkForColumnUpdates(
updatedTable: Table,
oldTable?: Table,
columnRename?: RenameColumn
) {
const db = context.getAppDB() const db = context.getAppDB()
let updatedRows = [] let updatedRows = []
const rename = updatedTable._rename
let deletedColumns: any = [] let deletedColumns: any = []
if (oldTable && oldTable.schema && updatedTable.schema) { if (oldTable && oldTable.schema && updatedTable.schema) {
deletedColumns = Object.keys(oldTable.schema).filter( deletedColumns = Object.keys(oldTable.schema).filter(
@ -54,7 +64,7 @@ export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
) )
} }
// check for renaming of columns or deleted columns // check for renaming of columns or deleted columns
if (rename || deletedColumns.length !== 0) { if (columnRename || deletedColumns.length !== 0) {
// Update all rows // Update all rows
const rows = await db.allDocs( const rows = await db.allDocs(
getRowParams(updatedTable._id, null, { getRowParams(updatedTable._id, null, {
@ -64,9 +74,9 @@ export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
const rawRows = rows.rows.map(({ doc }: any) => doc) const rawRows = rows.rows.map(({ doc }: any) => doc)
updatedRows = rawRows.map((row: any) => { updatedRows = rawRows.map((row: any) => {
row = cloneDeep(row) row = cloneDeep(row)
if (rename) { if (columnRename) {
row[rename.updated] = row[rename.old] row[columnRename.updated] = row[columnRename.old]
delete row[rename.old] delete row[columnRename.old]
} else if (deletedColumns.length !== 0) { } else if (deletedColumns.length !== 0) {
deletedColumns.forEach((colName: any) => delete row[colName]) deletedColumns.forEach((colName: any) => delete row[colName])
} }
@ -76,14 +86,13 @@ export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
// cleanup any attachments from object storage for deleted attachment columns // cleanup any attachments from object storage for deleted attachment columns
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows }) await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
// Update views // Update views
await checkForViewUpdates(updatedTable, rename, deletedColumns) await checkForViewUpdates(updatedTable, deletedColumns, columnRename)
delete updatedTable._rename
} }
return { rows: updatedRows, table: updatedTable } return { rows: updatedRows, table: updatedTable }
} }
// makes sure the passed in table isn't going to reset the auto ID // makes sure the passed in table isn't going to reset the auto ID
export function makeSureTableUpToDate(table: any, tableToSave: any) { export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
if (!table) { if (!table) {
return tableToSave return tableToSave
} }
@ -99,16 +108,17 @@ export function makeSureTableUpToDate(table: any, tableToSave: any) {
column.subtype === AutoFieldSubTypes.AUTO_ID && column.subtype === AutoFieldSubTypes.AUTO_ID &&
tableToSave.schema[field] tableToSave.schema[field]
) { ) {
tableToSave.schema[field].lastID = column.lastID const tableCol = tableToSave.schema[field] as NumberFieldMetadata
tableCol.lastID = column.lastID
} }
} }
return tableToSave return tableToSave
} }
export async function importToRows( export async function importToRows(
data: any[], data: Row[],
table: Table, table: Table,
user: ContextUser | null = null user?: ContextUser
) { ) {
let originalTable = table let originalTable = table
let finalData: any = [] let finalData: any = []
@ -150,19 +160,20 @@ export async function importToRows(
} }
export async function handleDataImport( export async function handleDataImport(
user: ContextUser,
table: Table, table: Table,
rows: Row[], opts?: { identifierFields?: string[]; user?: ContextUser; importRows?: Row[] }
identifierFields: Array<string> = []
) { ) {
const schema = table.schema const schema = table.schema
const identifierFields = opts?.identifierFields || []
const user = opts?.user
const importRows = opts?.importRows
if (!rows || !isRows(rows) || !isSchema(schema)) { if (!importRows || !isRows(importRows) || !isSchema(schema)) {
return table return table
} }
const db = context.getAppDB() const db = context.getAppDB()
const data = parse(rows, schema) const data = parse(importRows, schema)
let finalData: any = await importToRows(data, table, user) let finalData: any = await importToRows(data, table, user)
@ -200,7 +211,7 @@ export async function handleDataImport(
return table return table
} }
export async function handleSearchIndexes(table: any) { export async function handleSearchIndexes(table: Table) {
const db = context.getAppDB() const db = context.getAppDB()
// create relevant search indexes // create relevant search indexes
if (table.indexes && table.indexes.length > 0) { if (table.indexes && table.indexes.length > 0) {
@ -244,13 +255,13 @@ export async function handleSearchIndexes(table: any) {
return table return table
} }
export function checkStaticTables(table: any) { export function checkStaticTables(table: Table) {
// check user schema has all required elements // check user schema has all required elements
if (table._id === InternalTables.USER_METADATA) { if (table._id === InternalTables.USER_METADATA) {
for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) { for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) {
// check if the schema exists on the table to be created/updated // check if the schema exists on the table to be created/updated
if (table.schema[key] == null) { if (table.schema[key] == null) {
table.schema[key] = schema table.schema[key] = schema as FieldSchema
} }
} }
} }
@ -258,13 +269,21 @@ export function checkStaticTables(table: any) {
} }
class TableSaveFunctions { class TableSaveFunctions {
db: any db: Database
user: any user?: ContextUser
oldTable: any oldTable?: Table
importRows: any importRows?: Row[]
rows: any rows: Row[]
constructor({ user, oldTable, importRows }: any) { constructor({
user,
oldTable,
importRows,
}: {
user?: ContextUser
oldTable?: Table
importRows?: Row[]
}) {
this.db = context.getAppDB() this.db = context.getAppDB()
this.user = user this.user = user
this.oldTable = oldTable this.oldTable = oldTable
@ -274,7 +293,7 @@ class TableSaveFunctions {
} }
// before anything is done // before anything is done
async before(table: any) { async before(table: Table) {
if (this.oldTable) { if (this.oldTable) {
table = makeSureTableUpToDate(this.oldTable, table) table = makeSureTableUpToDate(this.oldTable, table)
} }
@ -283,16 +302,23 @@ class TableSaveFunctions {
} }
// when confirmed valid // when confirmed valid
async mid(table: any) { async mid(table: Table, columnRename?: RenameColumn) {
let response = await checkForColumnUpdates(this.oldTable, table) let response = await checkForColumnUpdates(
table,
this.oldTable,
columnRename
)
this.rows = this.rows.concat(response.rows) this.rows = this.rows.concat(response.rows)
return table return table
} }
// after saving // after saving
async after(table: any) { async after(table: Table) {
table = await handleSearchIndexes(table) table = await handleSearchIndexes(table)
table = await handleDataImport(this.user, table, this.importRows) table = await handleDataImport(table, {
importRows: this.importRows,
user: this.user,
})
return table return table
} }
@ -302,9 +328,9 @@ class TableSaveFunctions {
} }
export async function checkForViewUpdates( export async function checkForViewUpdates(
table: any, table: Table,
rename: any, deletedColumns: string[],
deletedColumns: any columnRename?: RenameColumn
) { ) {
const views = await getViews() const views = await getViews()
const tableViews = views.filter(view => view.meta.tableId === table._id) const tableViews = views.filter(view => view.meta.tableId === table._id)
@ -314,30 +340,30 @@ export async function checkForViewUpdates(
let needsUpdated = false let needsUpdated = false
// First check for renames, otherwise check for deletions // First check for renames, otherwise check for deletions
if (rename) { if (columnRename) {
// Update calculation field if required // Update calculation field if required
if (view.meta.field === rename.old) { if (view.meta.field === columnRename.old) {
view.meta.field = rename.updated view.meta.field = columnRename.updated
needsUpdated = true needsUpdated = true
} }
// Update group by field if required // Update group by field if required
if (view.meta.groupBy === rename.old) { if (view.meta.groupBy === columnRename.old) {
view.meta.groupBy = rename.updated view.meta.groupBy = columnRename.updated
needsUpdated = true needsUpdated = true
} }
// Update filters if required // Update filters if required
if (view.meta.filters) { if (view.meta.filters) {
view.meta.filters.forEach((filter: any) => { view.meta.filters.forEach((filter: any) => {
if (filter.key === rename.old) { if (filter.key === columnRename.old) {
filter.key = rename.updated filter.key = columnRename.updated
needsUpdated = true needsUpdated = true
} }
}) })
} }
} else if (deletedColumns) { } else if (deletedColumns) {
deletedColumns.forEach((column: any) => { deletedColumns.forEach((column: string) => {
// Remove calculation statement if required // Remove calculation statement if required
if (view.meta.field === column) { if (view.meta.field === column) {
delete view.meta.field delete view.meta.field
@ -378,24 +404,29 @@ export async function checkForViewUpdates(
if (!newViewTemplate.meta.schema) { if (!newViewTemplate.meta.schema) {
newViewTemplate.meta.schema = table.schema newViewTemplate.meta.schema = table.schema
} }
table.views[view.name] = newViewTemplate.meta if (table.views?.[view.name]) {
table.views[view.name] = newViewTemplate.meta as View
}
} }
} }
} }
export function generateForeignKey(column: any, relatedTable: any) { export function generateForeignKey(
column: RelationshipFieldMetadata,
relatedTable: Table
) {
return `fk_${relatedTable.name}_${column.fieldName}` return `fk_${relatedTable.name}_${column.fieldName}`
} }
export function generateJunctionTableName( export function generateJunctionTableName(
column: any, column: RelationshipFieldMetadata,
table: any, table: Table,
relatedTable: any relatedTable: Table
) { ) {
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}` return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
} }
export function foreignKeyStructure(keyName: any, meta?: any) { export function foreignKeyStructure(keyName: string, meta?: any) {
const structure: any = { const structure: any = {
type: FieldTypes.NUMBER, type: FieldTypes.NUMBER,
constraints: {}, constraints: {},
@ -407,7 +438,7 @@ export function foreignKeyStructure(keyName: any, meta?: any) {
return structure return structure
} }
export function areSwitchableTypes(type1: any, type2: any) { export function areSwitchableTypes(type1: FieldType, type2: FieldType) {
if ( if (
SwitchableTypes.indexOf(type1) === -1 && SwitchableTypes.indexOf(type1) === -1 &&
SwitchableTypes.indexOf(type2) === -1 SwitchableTypes.indexOf(type2) === -1

View File

@ -12,14 +12,14 @@ describe("run misc tests", () => {
}) })
describe("/bbtel", () => { describe("/bbtel", () => {
it("check if analytics enabled", async () => { it("check if analytics enabled", async () => {
const res = await request const res = await request
.get(`/api/bbtel`) .get(`/api/bbtel`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(typeof res.body.enabled).toEqual("boolean") expect(typeof res.body.enabled).toEqual("boolean")
}) })
}) })
describe("/health", () => { describe("/health", () => {
@ -37,7 +37,6 @@ describe("run misc tests", () => {
} else { } else {
expect(text.split(".").length).toEqual(3) expect(text.split(".").length).toEqual(3)
} }
}) })
}) })
@ -93,62 +92,64 @@ describe("run misc tests", () => {
constraints: { constraints: {
type: "array", type: "array",
presence: { presence: {
"allowEmpty": true allowEmpty: true,
}, },
inclusion: [ inclusion: ["One", "Two", "Three"],
"One",
"Two",
"Three",
]
}, },
name: "Sample Tags", name: "Sample Tags",
sortable: false sortable: false,
}, },
g: { g: {
type: "options", type: "options",
constraints: { constraints: {
type: "string", type: "string",
presence: false, presence: false,
inclusion: [ inclusion: ["Alpha", "Beta", "Gamma"],
"Alpha",
"Beta",
"Gamma"
]
}, },
name: "Sample Opts" name: "Sample Opts",
} },
}, },
}) })
const importRows = [
{ a: "1", b: "2", c: "3", d: "4", f: "['One']", g: "Alpha" },
{ a: "5", b: "6", c: "7", d: "8", f: "[]", g: undefined },
{ a: "9", b: "10", c: "11", d: "12", f: "['Two','Four']", g: "" },
{ a: "13", b: "14", c: "15", d: "16", g: "Omega" },
]
// Shift specific row tests to the row spec // Shift specific row tests to the row spec
await tableUtils.handleDataImport( await tableUtils.handleDataImport(table, {
{ userId: "test" }, importRows,
table, user: { userId: "test" },
[ })
{ a: '1', b: '2', c: '3', d: '4', f: "['One']", g: "Alpha" },
{ a: '5', b: '6', c: '7', d: '8', f: "[]", g: undefined},
{ a: '9', b: '10', c: '11', d: '12', f: "['Two','Four']", g: ""},
{ a: '13', b: '14', c: '15', d: '16', g: "Omega"}
]
)
// 4 rows imported, the auto ID starts at 1 // 4 rows imported, the auto ID starts at 1
// We expect the handleDataImport function to update the lastID // We expect the handleDataImport function to update the lastID
expect(table.schema.e.lastID).toEqual(4); expect(table.schema.e.lastID).toEqual(4)
// Array/Multi - should have added a new value to the inclusion. // Array/Multi - should have added a new value to the inclusion.
expect(table.schema.f.constraints.inclusion).toEqual(['Four','One','Three','Two']); expect(table.schema.f.constraints.inclusion).toEqual([
"Four",
"One",
"Three",
"Two",
])
// Options - should have a new value in the inclusion // Options - should have a new value in the inclusion
expect(table.schema.g.constraints.inclusion).toEqual(['Alpha','Beta','Gamma','Omega']); expect(table.schema.g.constraints.inclusion).toEqual([
"Alpha",
"Beta",
"Gamma",
"Omega",
])
const rows = await config.getRows() const rows = await config.getRows()
expect(rows.length).toEqual(4); expect(rows.length).toEqual(4)
const rowOne = rows.find(row => row.e === 1) const rowOne = rows.find(row => row.e === 1)
expect(rowOne.a).toEqual("1") expect(rowOne.a).toEqual("1")
expect(rowOne.f).toEqual(['One']) expect(rowOne.f).toEqual(["One"])
expect(rowOne.g).toEqual('Alpha') expect(rowOne.g).toEqual("Alpha")
const rowTwo = rows.find(row => row.e === 2) const rowTwo = rows.find(row => row.e === 2)
expect(rowTwo.a).toEqual("5") expect(rowTwo.a).toEqual("5")
@ -157,13 +158,13 @@ describe("run misc tests", () => {
const rowThree = rows.find(row => row.e === 3) const rowThree = rows.find(row => row.e === 3)
expect(rowThree.a).toEqual("9") expect(rowThree.a).toEqual("9")
expect(rowThree.f).toEqual(['Two','Four']) expect(rowThree.f).toEqual(["Two", "Four"])
expect(rowThree.g).toEqual(null) expect(rowThree.g).toEqual(null)
const rowFour = rows.find(row => row.e === 4) const rowFour = rows.find(row => row.e === 4)
expect(rowFour.a).toEqual("13") expect(rowFour.a).toEqual("13")
expect(rowFour.f).toEqual(undefined) expect(rowFour.f).toEqual(undefined)
expect(rowFour.g).toEqual('Omega') expect(rowFour.g).toEqual("Omega")
}) })
}) })
}) })

View File

@ -0,0 +1,196 @@
import {
Operation,
RelationshipType,
RenameColumn,
Table,
TableRequest,
ViewV2,
} from "@budibase/types"
import { context } from "@budibase/backend-core"
import { buildExternalTableId } from "../../../../integrations/utils"
import {
foreignKeyStructure,
hasTypeChanged,
setStaticSchemas,
} from "../../../../api/controllers/table/utils"
import { cloneDeep } from "lodash/fp"
import { FieldTypes } from "../../../../constants"
import { makeTableRequest } from "../../../../api/controllers/table/ExternalRequest"
import {
isRelationshipSetup,
cleanupRelationships,
generateLinkSchema,
generateManyLinkSchema,
generateRelatedSchema,
} from "./utils"
import { getTable } from "../getters"
import { populateExternalTableSchemas } from "../validation"
import datasourceSdk from "../../datasources"
import * as viewSdk from "../../views"
export async function save(
datasourceId: string,
update: Table,
opts?: { tableId?: string; renaming?: RenameColumn }
) {
let tableToSave: TableRequest = {
type: "table",
_id: buildExternalTableId(datasourceId, update.name),
sourceId: datasourceId,
...update,
}
const tableId = opts?.tableId || update._id
let oldTable: Table | undefined
if (tableId) {
oldTable = await getTable(tableId)
}
if (hasTypeChanged(tableToSave, oldTable)) {
throw new Error("A column type has changed.")
}
for (let view in tableToSave.views) {
const tableView = tableToSave.views[view]
if (!tableView || !viewSdk.isV2(tableView)) continue
tableToSave.views[view] = viewSdk.syncSchema(
oldTable!.views![view] as ViewV2,
tableToSave.schema,
opts?.renaming
)
}
const db = context.getAppDB()
const datasource = await datasourceSdk.get(datasourceId)
if (!datasource.entities) {
datasource.entities = {}
}
// GSheets is a specific case - only ever has a static primary key
tableToSave = setStaticSchemas(datasource, tableToSave)
const oldTables = cloneDeep(datasource.entities)
const tables: Record<string, Table> = datasource.entities
const extraTablesToUpdate = []
// check if relations need setup
for (let schema of Object.values(tableToSave.schema)) {
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
continue
}
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find(
table => table._id === schemaTableId
)
if (!relatedTable) {
continue
}
const relatedColumnName = schema.fieldName!
const relationType = schema.relationshipType
if (relationType === RelationshipType.MANY_TO_MANY) {
const junctionTable = generateManyLinkSchema(
datasource,
schema,
tableToSave,
relatedTable
)
if (tables[junctionTable.name]) {
throw new Error(
"Junction table already exists, cannot create another relationship."
)
}
tables[junctionTable.name] = junctionTable
extraTablesToUpdate.push(junctionTable)
} else {
const fkTable =
relationType === RelationshipType.ONE_TO_MANY
? tableToSave
: relatedTable
const foreignKey = generateLinkSchema(
schema,
tableToSave,
relatedTable,
relationType
)
if (fkTable.schema[foreignKey] != null) {
throw new Error(
`Unable to generate foreign key - column ${foreignKey} already in use.`
)
}
fkTable.schema[foreignKey] = foreignKeyStructure(foreignKey)
if (fkTable.constrained == null) {
fkTable.constrained = []
}
if (fkTable.constrained.indexOf(foreignKey) === -1) {
fkTable.constrained.push(foreignKey)
}
// foreign key is in other table, need to save it to external
if (fkTable._id !== tableToSave._id) {
extraTablesToUpdate.push(fkTable)
}
}
generateRelatedSchema(schema, relatedTable, tableToSave, relatedColumnName)
schema.main = true
}
cleanupRelationships(tableToSave, tables, oldTable)
const operation = tableId ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
await makeTableRequest(
datasource,
operation,
tableToSave,
tables,
oldTable,
opts?.renaming
)
// update any extra tables (like foreign keys in other tables)
for (let extraTable of extraTablesToUpdate) {
const oldExtraTable = oldTables[extraTable.name]
let op = oldExtraTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
}
// make sure the constrained list, all still exist
if (Array.isArray(tableToSave.constrained)) {
tableToSave.constrained = tableToSave.constrained.filter(constraint =>
Object.keys(tableToSave.schema).includes(constraint)
)
}
// remove the rename prop
delete tableToSave._rename
// store it into couch now for budibase reference
datasource.entities[tableToSave.name] = tableToSave
await db.put(populateExternalTableSchemas(datasource))
// Since tables are stored inside datasources, we need to notify clients
// that the datasource definition changed
const updatedDatasource = await datasourceSdk.get(datasource._id!)
return { datasource: updatedDatasource, table: tableToSave }
}
export async function destroy(datasourceId: string, table: Table) {
const db = context.getAppDB()
const datasource = await datasourceSdk.get(datasourceId)
const tables = datasource.entities
const operation = Operation.DELETE_TABLE
if (tables) {
await makeTableRequest(datasource, operation, table, tables)
cleanupRelationships(table, tables)
delete tables[table.name]
datasource.entities = tables
}
await db.put(populateExternalTableSchemas(datasource))
// Since tables are stored inside datasources, we need to notify clients
// that the datasource definition changed
const updatedDatasource = await datasourceSdk.get(datasource._id!)
return { datasource: updatedDatasource, table }
}

View File

@ -0,0 +1,161 @@
import {
Datasource,
ManyToManyRelationshipFieldMetadata,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
RelationshipFieldMetadata,
RelationshipType,
Table,
} from "@budibase/types"
import { FieldTypes } from "../../../../constants"
import {
foreignKeyStructure,
generateForeignKey,
generateJunctionTableName,
} from "../../../../api/controllers/table/utils"
import { buildExternalTableId } from "../../../../integrations/utils"
import { cloneDeep } from "lodash/fp"
export function cleanupRelationships(
table: Table,
tables: Record<string, Table>,
oldTable?: Table
) {
const tableToIterate = oldTable ? oldTable : table
// clean up relationships in couch table schemas
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
if (
schema.type === FieldTypes.LINK &&
(!oldTable || table.schema[key] == null)
) {
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find(
table => table._id === schemaTableId
)
const foreignKey =
schema.relationshipType !== RelationshipType.MANY_TO_MANY &&
schema.foreignKey
if (!relatedTable || !foreignKey) {
continue
}
for (let [relatedKey, relatedSchema] of Object.entries(
relatedTable.schema
)) {
if (
relatedSchema.type === FieldTypes.LINK &&
relatedSchema.fieldName === foreignKey
) {
delete relatedTable.schema[relatedKey]
}
}
}
}
}
export function otherRelationshipType(type: RelationshipType) {
if (type === RelationshipType.MANY_TO_MANY) {
return RelationshipType.MANY_TO_MANY
}
return type === RelationshipType.ONE_TO_MANY
? RelationshipType.MANY_TO_ONE
: RelationshipType.ONE_TO_MANY
}
export function generateManyLinkSchema(
datasource: Datasource,
column: ManyToManyRelationshipFieldMetadata,
table: Table,
relatedTable: Table
): Table {
if (!table.primary || !relatedTable.primary) {
const noPrimaryName = !table.primary ? table.name : relatedTable.name
throw new Error(
`Unable to generate many link schema, "${noPrimaryName}" does not have a primary key`
)
}
const primary = table.name + table.primary[0]
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
const jcTblName = generateJunctionTableName(column, table, relatedTable)
// first create the new table
const junctionTable = {
_id: buildExternalTableId(datasource._id!, jcTblName),
name: jcTblName,
primary: [primary, relatedPrimary],
constrained: [primary, relatedPrimary],
schema: {
[primary]: foreignKeyStructure(primary, {
toTable: table.name,
toKey: table.primary[0],
}),
[relatedPrimary]: foreignKeyStructure(relatedPrimary, {
toTable: relatedTable.name,
toKey: relatedTable.primary[0],
}),
},
}
column.through = junctionTable._id
column.throughFrom = relatedPrimary
column.throughTo = primary
column.fieldName = relatedPrimary
return junctionTable
}
export function generateLinkSchema(
column:
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata,
table: Table,
relatedTable: Table,
type: RelationshipType.ONE_TO_MANY | RelationshipType.MANY_TO_ONE
) {
if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate link schema, no primary keys")
}
const isOneSide = type === RelationshipType.ONE_TO_MANY
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
// generate a foreign key
const foreignKey = generateForeignKey(column, relatedTable)
column.relationshipType = type
column.foreignKey = isOneSide ? foreignKey : primary
column.fieldName = isOneSide ? primary : foreignKey
return foreignKey
}
export function generateRelatedSchema(
linkColumn: RelationshipFieldMetadata,
table: Table,
relatedTable: Table,
columnName: string
) {
// generate column for other table
let relatedSchema: RelationshipFieldMetadata
const isMany2Many =
linkColumn.relationshipType === RelationshipType.MANY_TO_MANY
// swap them from the main link
if (!isMany2Many && linkColumn.foreignKey) {
relatedSchema = cloneDeep(linkColumn) as
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata
relatedSchema.fieldName = linkColumn.foreignKey
relatedSchema.foreignKey = linkColumn.fieldName
}
// is many to many
else {
const manyToManyCol = linkColumn as ManyToManyRelationshipFieldMetadata
relatedSchema = cloneDeep(linkColumn) as ManyToManyRelationshipFieldMetadata
// don't need to copy through, already got it
relatedSchema.fieldName = manyToManyCol.throughTo!
relatedSchema.throughTo = manyToManyCol.throughFrom
relatedSchema.throughFrom = manyToManyCol.throughTo
}
relatedSchema.relationshipType = otherRelationshipType(
linkColumn.relationshipType
)
relatedSchema.tableId = relatedTable._id!
relatedSchema.name = columnName
table.schema[columnName] = relatedSchema
}
export function isRelationshipSetup(column: RelationshipFieldMetadata) {
return (column as any).foreignKey || (column as any).through
}

View File

@ -0,0 +1,124 @@
import { context } from "@budibase/backend-core"
import {
BudibaseInternalDB,
getMultiIDParams,
getTableParams,
} from "../../../db/utils"
import {
breakExternalTableId,
isExternalTable,
isSQL,
} from "../../../integrations/utils"
import {
AllDocsResponse,
Database,
Table,
TableResponse,
TableViewsResponse,
} from "@budibase/types"
import datasources from "../datasources"
import sdk from "../../../sdk"
function processInternalTables(docs: AllDocsResponse<Table[]>): Table[] {
return docs.rows.map((tableDoc: any) => ({
...tableDoc.doc,
type: "internal",
sourceId: tableDoc.doc.sourceId || BudibaseInternalDB._id,
}))
}
export async function getAllInternalTables(db?: Database): Promise<Table[]> {
if (!db) {
db = context.getAppDB()
}
const internalTables = await db.allDocs<Table[]>(
getTableParams(null, {
include_docs: true,
})
)
return processInternalTables(internalTables)
}
async function getAllExternalTables(): Promise<Table[]> {
const datasources = await sdk.datasources.fetch({ enriched: true })
const allEntities = datasources.map(datasource => datasource.entities)
let final: Table[] = []
for (let entities of allEntities) {
if (entities) {
final = final.concat(Object.values(entities))
}
}
return final
}
export async function getExternalTable(
datasourceId: string,
tableName: string
): Promise<Table> {
const entities = await getExternalTablesInDatasource(datasourceId)
return entities[tableName]
}
export async function getTable(tableId: string): Promise<Table> {
const db = context.getAppDB()
if (isExternalTable(tableId)) {
let { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource = await datasources.get(datasourceId!)
const table = await getExternalTable(datasourceId!, tableName!)
return { ...table, sql: isSQL(datasource) }
} else {
return db.get(tableId)
}
}
export async function getAllTables() {
const [internal, external] = await Promise.all([
getAllInternalTables(),
getAllExternalTables(),
])
return [...internal, ...external]
}
export async function getExternalTablesInDatasource(
datasourceId: string
): Promise<Record<string, Table>> {
const datasource = await datasources.get(datasourceId, { enriched: true })
if (!datasource || !datasource.entities) {
throw new Error("Datasource is not configured fully.")
}
return datasource.entities
}
export async function getTables(tableIds: string[]): Promise<Table[]> {
const externalTableIds = tableIds.filter(tableId => isExternalTable(tableId)),
internalTableIds = tableIds.filter(tableId => !isExternalTable(tableId))
let tables: Table[] = []
if (externalTableIds.length) {
const externalTables = await getAllExternalTables()
tables = tables.concat(
externalTables.filter(
table => externalTableIds.indexOf(table._id!) !== -1
)
)
}
if (internalTableIds.length) {
const db = context.getAppDB()
const internalTableDocs = await db.allDocs<Table[]>(
getMultiIDParams(internalTableIds)
)
tables = tables.concat(processInternalTables(internalTableDocs))
}
return tables
}
export function enrichViewSchemas(table: Table): TableResponse {
return {
...table,
views: Object.values(table.views ?? [])
.map(v => sdk.views.enrichSchema(v, table.schema))
.reduce((p, v) => {
p[v.name!] = v
return p
}, {} as TableViewsResponse),
}
}

View File

@ -1,148 +1,11 @@
import { context } from "@budibase/backend-core"
import {
BudibaseInternalDB,
getMultiIDParams,
getTableParams,
} from "../../../db/utils"
import {
breakExternalTableId,
isExternalTable,
isSQL,
} from "../../../integrations/utils"
import {
AllDocsResponse,
Database,
Table,
TableResponse,
TableViewsResponse,
} from "@budibase/types"
import datasources from "../datasources"
import { populateExternalTableSchemas } from "./validation" import { populateExternalTableSchemas } from "./validation"
import sdk from "../../../sdk" import * as getters from "./getters"
import * as updates from "./update"
function processInternalTables(docs: AllDocsResponse<Table[]>): Table[] { import * as utils from "./utils"
return docs.rows.map((tableDoc: any) => ({
...tableDoc.doc,
type: "internal",
sourceId: tableDoc.doc.sourceId || BudibaseInternalDB._id,
}))
}
async function getAllInternalTables(db?: Database): Promise<Table[]> {
if (!db) {
db = context.getAppDB()
}
const internalTableDocs = await db.allDocs<Table[]>(
getTableParams(null, {
include_docs: true,
})
)
return processInternalTables(internalTableDocs)
}
async function getAllExternalTables(): Promise<Table[]> {
const datasources = await sdk.datasources.fetch({ enriched: true })
const allEntities = datasources.map(datasource => datasource.entities)
let final: Table[] = []
for (let entities of allEntities) {
if (entities) {
final = final.concat(Object.values(entities))
}
}
return final
}
async function getAllTables() {
const [internal, external] = await Promise.all([
getAllInternalTables(),
getAllExternalTables(),
])
return [...internal, external]
}
async function getTables(tableIds: string[]): Promise<Table[]> {
const externalTableIds = tableIds.filter(tableId => isExternalTable(tableId)),
internalTableIds = tableIds.filter(tableId => !isExternalTable(tableId))
let tables: Table[] = []
if (externalTableIds.length) {
const externalTables = await getAllExternalTables()
tables = tables.concat(
externalTables.filter(
table => externalTableIds.indexOf(table._id!) !== -1
)
)
}
if (internalTableIds.length) {
const db = context.getAppDB()
const internalTableDocs = await db.allDocs<Table[]>(
getMultiIDParams(internalTableIds)
)
tables = tables.concat(processInternalTables(internalTableDocs))
}
return tables
}
async function getExternalTablesInDatasource(
datasourceId: string
): Promise<Record<string, Table>> {
const datasource = await datasources.get(datasourceId, { enriched: true })
if (!datasource || !datasource.entities) {
throw "Datasource is not configured fully."
}
return datasource.entities
}
async function getExternalTable(
datasourceId: string,
tableName: string
): Promise<Table> {
const entities = await getExternalTablesInDatasource(datasourceId)
return entities[tableName]
}
async function getTable(tableId: string): Promise<Table> {
const db = context.getAppDB()
if (isExternalTable(tableId)) {
let { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource = await datasources.get(datasourceId!)
const table = await getExternalTable(datasourceId!, tableName!)
return { ...table, sql: isSQL(datasource) }
} else {
return db.get<Table>(tableId)
}
}
function enrichViewSchemas(table: Table): TableResponse {
return {
...table,
views: Object.values(table.views ?? [])
.map(v => sdk.views.enrichSchema(v, table.schema))
.reduce((p, v) => {
p[v.name] = v
return p
}, {} as TableViewsResponse),
}
}
async function saveTable(table: Table) {
const db = context.getAppDB()
if (isExternalTable(table._id!)) {
const datasource = await sdk.datasources.get(table.sourceId!)
datasource.entities![table.name] = table
await db.put(datasource)
} else {
await db.put(table)
}
}
export default { export default {
getAllInternalTables,
getExternalTablesInDatasource,
getExternalTable,
getTable,
getAllTables,
getTables,
populateExternalTableSchemas, populateExternalTableSchemas,
enrichViewSchemas, ...updates,
saveTable, ...getters,
...utils,
} }

View File

@ -0,0 +1,172 @@
import {
RenameColumn,
Table,
ViewStatisticsSchema,
ViewV2,
Row,
ContextUser,
} from "@budibase/types"
import {
hasTypeChanged,
TableSaveFunctions,
} from "../../../../api/controllers/table/utils"
import { FieldTypes } from "../../../../constants"
import { EventType, updateLinks } from "../../../../db/linkedRows"
import { cloneDeep } from "lodash/fp"
import isEqual from "lodash/isEqual"
import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula"
import { context } from "@budibase/backend-core"
import { getTable } from "../getters"
import { checkAutoColumns } from "./utils"
import * as viewsSdk from "../../views"
import sdk from "../../../index"
import { getRowParams } from "../../../../db/utils"
import { quotas } from "@budibase/pro"
import env from "../../../../environment"
import { cleanupAttachments } from "../../../../utilities/rowProcessor"
export async function save(
table: Table,
opts?: {
user?: ContextUser
tableId?: string
rowsToImport?: Row[]
renaming?: RenameColumn
}
) {
const db = context.getAppDB()
// if the table obj had an _id then it will have been retrieved
let oldTable: Table | undefined
if (opts?.tableId) {
oldTable = await getTable(opts.tableId)
}
// check all types are correct
if (hasTypeChanged(table, oldTable)) {
throw new Error("A column type has changed.")
}
// check that subtypes have been maintained
table = checkAutoColumns(table, oldTable)
// saving a table is a complex operation, involving many different steps, this
// has been broken out into a utility to make it more obvious/easier to manipulate
const tableSaveFunctions = new TableSaveFunctions({
user: opts?.user,
oldTable,
importRows: opts?.rowsToImport,
})
table = await tableSaveFunctions.before(table)
let renaming = opts?.renaming
if (renaming && renaming.old === renaming.updated) {
renaming = undefined
}
// rename row fields when table column is renamed
if (renaming && table.schema[renaming.updated].type === FieldTypes.LINK) {
throw new Error("Cannot rename a linked column.")
}
table = await tableSaveFunctions.mid(table, renaming)
// update schema of non-statistics views when new columns are added
for (let view in table.views) {
const tableView = table.views[view]
if (!tableView) continue
if (viewsSdk.isV2(tableView)) {
table.views[view] = viewsSdk.syncSchema(
oldTable!.views![view] as ViewV2,
table.schema,
renaming
)
continue
}
if (
(tableView.schema as ViewStatisticsSchema).group ||
tableView.schema.field
)
continue
tableView.schema = table.schema
}
// update linked rows
const linkResp: any = await updateLinks({
eventType: oldTable ? EventType.TABLE_UPDATED : EventType.TABLE_SAVE,
table: table,
oldTable: oldTable,
})
if (linkResp != null && linkResp._rev) {
table._rev = linkResp._rev
}
// don't perform any updates until relationships have been
// checked by the updateLinks function
const updatedRows = tableSaveFunctions.getUpdatedRows()
if (updatedRows && updatedRows.length !== 0) {
await db.bulkDocs(updatedRows)
}
let result = await db.put(table)
table._rev = result.rev
const savedTable = cloneDeep(table)
table = await tableSaveFunctions.after(table)
// the table may be updated as part of the table save after functionality - need to write it
if (!isEqual(savedTable, table)) {
result = await db.put(table)
table._rev = result.rev
}
// has to run after, make sure it has _id
await runStaticFormulaChecks(table, { oldTable, deletion: false })
return { table }
}
export async function destroy(table: Table) {
const db = context.getAppDB()
const tableId = table._id!
// Delete all rows for that table
const rowsData = await db.allDocs(
getRowParams(tableId, null, {
include_docs: true,
})
)
await db.bulkDocs(
rowsData.rows.map((row: any) => ({ ...row.doc, _deleted: true }))
)
await quotas.removeRows(rowsData.rows.length, {
tableId,
})
// update linked rows
await updateLinks({
eventType: EventType.TABLE_DELETE,
table: table,
})
// don't remove the table itself until very end
await db.remove(tableId, table._rev)
// remove table search index
if (!env.isTest() || env.COUCH_DB_URL) {
const currentIndexes = await db.getIndexes()
const existingIndex = currentIndexes.indexes.find(
(existing: any) => existing.name === `search:${tableId}`
)
if (existingIndex) {
await db.deleteIndex(existingIndex)
}
}
// has to run after, make sure it has _id
await runStaticFormulaChecks(table, {
deletion: true,
})
await cleanupAttachments(table, {
rows: rowsData.rows.map((row: any) => row.doc),
})
return { table }
}

View File

@ -0,0 +1,20 @@
import { Table } from "@budibase/types"
import { fixAutoColumnSubType } from "../../../../utilities/rowProcessor"
export function checkAutoColumns(table: Table, oldTable?: Table) {
if (!table.schema) {
return table
}
for (let [key, schema] of Object.entries(table.schema)) {
if (!schema.autocolumn || schema.subtype) {
continue
}
const oldSchema = oldTable && oldTable.schema[key]
if (oldSchema && oldSchema.subtype) {
table.schema[key].subtype = oldSchema.subtype
} else {
table.schema[key] = fixAutoColumnSubType(schema)
}
}
return table
}

View File

@ -0,0 +1,31 @@
import { Table, RenameColumn } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils"
import sdk from "../../index"
import { context } from "@budibase/backend-core"
import { isExternal } from "./utils"
import * as external from "./external"
import * as internal from "./internal"
export * as external from "./external"
export * as internal from "./internal"
export async function saveTable(table: Table) {
const db = context.getAppDB()
if (isExternalTable(table._id!)) {
const datasource = await sdk.datasources.get(table.sourceId!)
datasource.entities![table.name] = table
await db.put(datasource)
} else {
await db.put(table)
}
}
export async function update(table: Table, renaming?: RenameColumn) {
const tableId = table._id
if (isExternal({ table })) {
const datasourceId = table.sourceId!
await external.save(datasourceId, table, { tableId, renaming })
} else {
await internal.save(table, { tableId, renaming })
}
}

View File

@ -0,0 +1,11 @@
import { Table } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils"
export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
if (opts.table && opts.table.type === "external") {
return true
} else if (opts.tableId && isExternalTable(opts.tableId)) {
return true
}
return false
}

View File

@ -59,11 +59,10 @@ export async function update(tableId: string, view: ViewV2): Promise<ViewV2> {
const existingView = Object.values(views).find( const existingView = Object.values(views).find(
v => isV2(v) && v.id === view.id v => isV2(v) && v.id === view.id
) )
if (!existingView) { if (!existingView || !existingView.name) {
throw new HTTPError(`View ${view.id} not found in table ${tableId}`, 404) throw new HTTPError(`View ${view.id} not found in table ${tableId}`, 404)
} }
console.log("set to", view)
delete views[existingView.name] delete views[existingView.name]
views[view.name] = view views[view.name] = view
await db.put(ds) await db.put(ds)

View File

@ -51,11 +51,10 @@ export async function update(tableId: string, view: ViewV2): Promise<ViewV2> {
const existingView = Object.values(table.views).find( const existingView = Object.values(table.views).find(
v => isV2(v) && v.id === view.id v => isV2(v) && v.id === view.id
) )
if (!existingView) { if (!existingView || !existingView.name) {
throw new HTTPError(`View ${view.id} not found in table ${tableId}`, 404) throw new HTTPError(`View ${view.id} not found in table ${tableId}`, 404)
} }
console.log("set to", view)
delete table.views[existingView.name] delete table.views[existingView.name]
table.views[view.name] = view table.views[view.name] = view
await db.put(table) await db.put(table)

View File

@ -2,7 +2,7 @@ import { SearchFilter, SortOrder, SortType } from "../../api"
import { UIFieldMetadata } from "./table" import { UIFieldMetadata } from "./table"
export interface View { export interface View {
name: string name?: string
tableId: string tableId: string
field?: string field?: string
filters: ViewFilter[] filters: ViewFilter[]