2023-01-17 16:07:52 +01:00
|
|
|
import { parse, isSchema, isRows } from "../../../utilities/schema"
|
2022-10-20 16:05:50 +02:00
|
|
|
import { getRowParams, generateRowID, InternalTables } from "../../../db/utils"
|
2023-07-27 21:36:32 +02:00
|
|
|
import isEqual from "lodash/isEqual"
|
2023-03-14 13:11:01 +01:00
|
|
|
import {
|
|
|
|
AutoFieldSubTypes,
|
|
|
|
FieldTypes,
|
|
|
|
GOOGLE_SHEETS_PRIMARY_KEY,
|
|
|
|
} from "../../../constants"
|
2022-03-09 22:16:22 +01:00
|
|
|
import {
|
2022-01-24 17:32:41 +01:00
|
|
|
inputProcessing,
|
|
|
|
cleanupAttachments,
|
2022-03-09 22:16:22 +01:00
|
|
|
} from "../../../utilities/rowProcessor"
|
|
|
|
import {
|
2022-01-26 19:50:13 +01:00
|
|
|
USERS_TABLE_SCHEMA,
|
|
|
|
SwitchableTypes,
|
|
|
|
CanSwitchTypes,
|
2022-03-09 22:16:22 +01:00
|
|
|
} from "../../../constants"
|
|
|
|
import { getViews, saveView } from "../view/utils"
|
|
|
|
import viewTemplate from "../view/viewBuilder"
|
|
|
|
import { cloneDeep } from "lodash/fp"
|
2022-05-26 17:01:10 +02:00
|
|
|
import { quotas } from "@budibase/pro"
|
2022-11-22 14:56:01 +01:00
|
|
|
import { events, context } from "@budibase/backend-core"
|
2023-07-18 12:00:02 +02:00
|
|
|
import { ContextUser, Datasource, SourceName, Table } from "@budibase/types"
|
2021-02-22 12:39:58 +01:00
|
|
|
|
2022-03-09 22:16:22 +01:00
|
|
|
export async function clearColumns(table: any, columnNames: any) {
|
2023-07-18 12:00:02 +02:00
|
|
|
const db = context.getAppDB()
|
2022-01-24 17:32:41 +01:00
|
|
|
const rows = await db.allDocs(
|
|
|
|
getRowParams(table._id, null, {
|
|
|
|
include_docs: true,
|
|
|
|
})
|
|
|
|
)
|
2022-11-22 14:56:01 +01:00
|
|
|
return (await db.bulkDocs(
|
2022-03-09 22:16:22 +01:00
|
|
|
rows.rows.map(({ doc }: any) => {
|
|
|
|
columnNames.forEach((colName: any) => delete doc[colName])
|
2022-01-24 17:32:41 +01:00
|
|
|
return doc
|
|
|
|
})
|
2022-11-22 14:56:01 +01:00
|
|
|
)) as { id: string; _rev?: string }[]
|
2022-01-24 17:32:41 +01:00
|
|
|
}
|
2021-02-22 12:39:58 +01:00
|
|
|
|
2022-03-09 22:16:22 +01:00
|
|
|
export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
2022-11-22 14:56:01 +01:00
|
|
|
const db = context.getAppDB()
|
2021-02-22 13:05:59 +01:00
|
|
|
let updatedRows = []
|
2021-02-22 12:39:58 +01:00
|
|
|
const rename = updatedTable._rename
|
2022-03-09 22:16:22 +01:00
|
|
|
let deletedColumns: any = []
|
2021-02-22 12:39:58 +01:00
|
|
|
if (oldTable && oldTable.schema && updatedTable.schema) {
|
|
|
|
deletedColumns = Object.keys(oldTable.schema).filter(
|
2021-05-04 12:32:22 +02:00
|
|
|
colName => updatedTable.schema[colName] == null
|
2021-02-22 12:39:58 +01:00
|
|
|
)
|
|
|
|
}
|
|
|
|
// check for renaming of columns or deleted columns
|
|
|
|
if (rename || deletedColumns.length !== 0) {
|
2021-10-20 21:01:49 +02:00
|
|
|
// Update all rows
|
2021-02-22 12:39:58 +01:00
|
|
|
const rows = await db.allDocs(
|
|
|
|
getRowParams(updatedTable._id, null, {
|
|
|
|
include_docs: true,
|
|
|
|
})
|
|
|
|
)
|
2022-03-09 22:16:22 +01:00
|
|
|
const rawRows = rows.rows.map(({ doc }: any) => doc)
|
|
|
|
updatedRows = rawRows.map((row: any) => {
|
2022-01-24 17:32:41 +01:00
|
|
|
row = cloneDeep(row)
|
2021-02-22 12:39:58 +01:00
|
|
|
if (rename) {
|
2022-01-24 17:32:41 +01:00
|
|
|
row[rename.updated] = row[rename.old]
|
|
|
|
delete row[rename.old]
|
2021-02-22 12:39:58 +01:00
|
|
|
} else if (deletedColumns.length !== 0) {
|
2022-03-09 22:16:22 +01:00
|
|
|
deletedColumns.forEach((colName: any) => delete row[colName])
|
2021-02-22 12:39:58 +01:00
|
|
|
}
|
2022-01-24 17:32:41 +01:00
|
|
|
return row
|
2021-02-22 12:39:58 +01:00
|
|
|
})
|
2021-10-20 21:01:49 +02:00
|
|
|
|
2022-01-24 17:32:41 +01:00
|
|
|
// cleanup any attachments from object storage for deleted attachment columns
|
2022-01-31 18:00:22 +01:00
|
|
|
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
|
2021-10-20 21:01:49 +02:00
|
|
|
// Update views
|
2022-03-09 22:16:22 +01:00
|
|
|
await checkForViewUpdates(updatedTable, rename, deletedColumns)
|
2021-02-22 12:39:58 +01:00
|
|
|
delete updatedTable._rename
|
|
|
|
}
|
|
|
|
return { rows: updatedRows, table: updatedTable }
|
|
|
|
}
|
|
|
|
|
|
|
|
// makes sure the passed in table isn't going to reset the auto ID
|
2022-03-09 22:16:22 +01:00
|
|
|
export function makeSureTableUpToDate(table: any, tableToSave: any) {
|
2021-02-22 12:39:58 +01:00
|
|
|
if (!table) {
|
|
|
|
return tableToSave
|
|
|
|
}
|
|
|
|
// sure sure rev is up to date
|
|
|
|
tableToSave._rev = table._rev
|
|
|
|
// make sure auto IDs are always updated - these are internal
|
|
|
|
// so the client may not know they have changed
|
2022-03-09 22:16:22 +01:00
|
|
|
let field: any
|
|
|
|
let column: any
|
|
|
|
for ([field, column] of Object.entries(table.schema)) {
|
2021-02-22 12:39:58 +01:00
|
|
|
if (
|
|
|
|
column.autocolumn &&
|
|
|
|
column.subtype === AutoFieldSubTypes.AUTO_ID &&
|
|
|
|
tableToSave.schema[field]
|
|
|
|
) {
|
|
|
|
tableToSave.schema[field].lastID = column.lastID
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return tableToSave
|
|
|
|
}
|
|
|
|
|
2023-03-30 12:30:35 +02:00
|
|
|
export function importToRows(
|
|
|
|
data: any[],
|
|
|
|
table: Table,
|
|
|
|
user: ContextUser | null = null
|
|
|
|
) {
|
2023-04-20 21:10:30 +02:00
|
|
|
let originalTable = table
|
2022-03-18 09:01:31 +01:00
|
|
|
let finalData: any = []
|
2021-11-12 19:26:57 +01:00
|
|
|
for (let i = 0; i < data.length; i++) {
|
|
|
|
let row = data[i]
|
2023-03-30 11:12:50 +02:00
|
|
|
row._id = generateRowID(table._id!)
|
2021-11-12 19:26:57 +01:00
|
|
|
row.tableId = table._id
|
2023-04-20 21:10:30 +02:00
|
|
|
|
|
|
|
// We use a reference to table here and update it after input processing,
|
|
|
|
// so that we can auto increment auto IDs in imported data properly
|
2023-03-30 11:12:50 +02:00
|
|
|
const processed = inputProcessing(user, table, row, {
|
2021-11-12 19:26:57 +01:00
|
|
|
noAutoRelationships: true,
|
|
|
|
})
|
|
|
|
row = processed.row
|
2023-03-30 11:12:50 +02:00
|
|
|
table = processed.table
|
2021-08-26 15:13:30 +02:00
|
|
|
|
2023-04-20 21:10:30 +02:00
|
|
|
// However here we must reference the original table, as we want to mutate
|
|
|
|
// the real schema of the table passed in, not the clone used for
|
|
|
|
// incrementing auto IDs
|
|
|
|
for (const [fieldName, schema] of Object.entries(originalTable.schema)) {
|
2023-05-04 13:12:47 +02:00
|
|
|
const rowVal = Array.isArray(row[fieldName])
|
|
|
|
? row[fieldName]
|
|
|
|
: [row[fieldName]]
|
2021-11-12 19:26:57 +01:00
|
|
|
if (
|
2023-02-20 19:46:06 +01:00
|
|
|
(schema.type === FieldTypes.OPTIONS ||
|
2023-05-04 13:12:47 +02:00
|
|
|
schema.type === FieldTypes.ARRAY) &&
|
2023-05-04 12:21:24 +02:00
|
|
|
row[fieldName]
|
2021-11-12 19:26:57 +01:00
|
|
|
) {
|
2023-05-04 13:12:47 +02:00
|
|
|
let merged = [...schema.constraints!.inclusion!, ...rowVal]
|
|
|
|
let superSet = new Set(merged)
|
|
|
|
schema.constraints!.inclusion = Array.from(superSet)
|
2023-03-30 11:21:50 +02:00
|
|
|
schema.constraints!.inclusion.sort()
|
2021-02-22 12:39:58 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-12 19:26:57 +01:00
|
|
|
finalData.push(row)
|
2021-02-22 12:39:58 +01:00
|
|
|
}
|
2022-11-13 22:37:50 +01:00
|
|
|
return finalData
|
|
|
|
}
|
|
|
|
|
2023-05-25 19:05:07 +02:00
|
|
|
export async function handleDataImport(
|
|
|
|
user: any,
|
|
|
|
table: any,
|
|
|
|
rows: any,
|
|
|
|
identifierFields: Array<string> = []
|
|
|
|
) {
|
2023-01-17 16:07:52 +01:00
|
|
|
const schema: unknown = table.schema
|
|
|
|
|
|
|
|
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
2022-11-13 22:37:50 +01:00
|
|
|
return table
|
|
|
|
}
|
|
|
|
|
2022-11-22 14:56:01 +01:00
|
|
|
const db = context.getAppDB()
|
2023-01-17 16:07:52 +01:00
|
|
|
const data = parse(rows, schema)
|
2022-11-13 22:37:50 +01:00
|
|
|
|
|
|
|
let finalData: any = importToRows(data, table, user)
|
2021-11-12 19:26:57 +01:00
|
|
|
|
2023-05-25 19:05:07 +02:00
|
|
|
//Set IDs of finalData to match existing row if an update is expected
|
|
|
|
if (identifierFields.length > 0) {
|
|
|
|
const allDocs = await db.allDocs(
|
|
|
|
getRowParams(table._id, null, {
|
|
|
|
include_docs: true,
|
|
|
|
})
|
|
|
|
)
|
|
|
|
allDocs.rows
|
|
|
|
.map(existingRow => existingRow.doc)
|
|
|
|
.forEach((doc: any) => {
|
|
|
|
finalData.forEach((finalItem: any) => {
|
|
|
|
let match = true
|
|
|
|
for (const field of identifierFields) {
|
|
|
|
if (finalItem[field] !== doc[field]) {
|
|
|
|
match = false
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (match) {
|
|
|
|
finalItem._id = doc._id
|
|
|
|
finalItem._rev = doc._rev
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-05-26 17:01:10 +02:00
|
|
|
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), {
|
|
|
|
tableId: table._id,
|
|
|
|
})
|
2023-01-17 16:07:52 +01:00
|
|
|
|
|
|
|
await events.rows.imported(table, finalData.length)
|
2021-02-22 12:39:58 +01:00
|
|
|
return table
|
|
|
|
}
|
|
|
|
|
2022-03-09 22:16:22 +01:00
|
|
|
export async function handleSearchIndexes(table: any) {
|
2022-11-22 14:56:01 +01:00
|
|
|
const db = context.getAppDB()
|
2021-02-22 12:39:58 +01:00
|
|
|
// create relevant search indexes
|
|
|
|
if (table.indexes && table.indexes.length > 0) {
|
|
|
|
const currentIndexes = await db.getIndexes()
|
|
|
|
const indexName = `search:${table._id}`
|
|
|
|
|
|
|
|
const existingIndex = currentIndexes.indexes.find(
|
2022-03-09 22:16:22 +01:00
|
|
|
(existing: any) => existing.name === indexName
|
2021-02-22 12:39:58 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if (existingIndex) {
|
|
|
|
const currentFields = existingIndex.def.fields.map(
|
2022-03-09 22:16:22 +01:00
|
|
|
(field: any) => Object.keys(field)[0]
|
2021-02-22 12:39:58 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
// if index fields have changed, delete the original index
|
|
|
|
if (!isEqual(currentFields, table.indexes)) {
|
|
|
|
await db.deleteIndex(existingIndex)
|
|
|
|
// create/recreate the index with fields
|
|
|
|
await db.createIndex({
|
|
|
|
index: {
|
|
|
|
fields: table.indexes,
|
|
|
|
name: indexName,
|
|
|
|
ddoc: "search_ddoc",
|
|
|
|
type: "json",
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// create/recreate the index with fields
|
|
|
|
await db.createIndex({
|
|
|
|
index: {
|
|
|
|
fields: table.indexes,
|
|
|
|
name: indexName,
|
|
|
|
ddoc: "search_ddoc",
|
|
|
|
type: "json",
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return table
|
|
|
|
}
|
|
|
|
|
2022-03-09 22:16:22 +01:00
|
|
|
export function checkStaticTables(table: any) {
|
2021-02-22 12:39:58 +01:00
|
|
|
// check user schema has all required elements
|
2021-04-09 18:33:21 +02:00
|
|
|
if (table._id === InternalTables.USER_METADATA) {
|
2021-02-22 12:39:58 +01:00
|
|
|
for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) {
|
|
|
|
// check if the schema exists on the table to be created/updated
|
|
|
|
if (table.schema[key] == null) {
|
|
|
|
table.schema[key] = schema
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return table
|
|
|
|
}
|
|
|
|
|
|
|
|
class TableSaveFunctions {
|
2022-03-09 22:16:22 +01:00
|
|
|
db: any
|
|
|
|
user: any
|
|
|
|
oldTable: any
|
2023-01-17 16:07:52 +01:00
|
|
|
importRows: any
|
2022-03-09 22:16:22 +01:00
|
|
|
rows: any
|
|
|
|
|
2023-01-17 16:07:52 +01:00
|
|
|
constructor({ user, oldTable, importRows }: any) {
|
2022-11-22 14:56:01 +01:00
|
|
|
this.db = context.getAppDB()
|
2022-01-27 19:18:31 +01:00
|
|
|
this.user = user
|
2021-02-22 12:39:58 +01:00
|
|
|
this.oldTable = oldTable
|
2023-01-17 16:07:52 +01:00
|
|
|
this.importRows = importRows
|
2021-02-22 12:39:58 +01:00
|
|
|
// any rows that need updated
|
|
|
|
this.rows = []
|
|
|
|
}
|
|
|
|
|
|
|
|
// before anything is done
|
2022-03-09 22:16:22 +01:00
|
|
|
async before(table: any) {
|
2021-02-22 12:39:58 +01:00
|
|
|
if (this.oldTable) {
|
2022-03-09 22:16:22 +01:00
|
|
|
table = makeSureTableUpToDate(this.oldTable, table)
|
2021-02-22 12:39:58 +01:00
|
|
|
}
|
2022-03-09 22:16:22 +01:00
|
|
|
table = checkStaticTables(table)
|
2021-02-22 12:39:58 +01:00
|
|
|
return table
|
|
|
|
}
|
|
|
|
|
|
|
|
// when confirmed valid
|
2022-03-09 22:16:22 +01:00
|
|
|
async mid(table: any) {
|
|
|
|
let response = await checkForColumnUpdates(this.oldTable, table)
|
2021-02-22 13:05:59 +01:00
|
|
|
this.rows = this.rows.concat(response.rows)
|
2021-02-22 12:39:58 +01:00
|
|
|
return table
|
|
|
|
}
|
|
|
|
|
|
|
|
// after saving
|
2022-03-09 22:16:22 +01:00
|
|
|
async after(table: any) {
|
|
|
|
table = await handleSearchIndexes(table)
|
2023-01-17 16:07:52 +01:00
|
|
|
table = await handleDataImport(this.user, table, this.importRows)
|
2021-02-22 12:39:58 +01:00
|
|
|
return table
|
|
|
|
}
|
|
|
|
|
|
|
|
getUpdatedRows() {
|
|
|
|
return this.rows
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-09 22:16:22 +01:00
|
|
|
export async function checkForViewUpdates(
|
|
|
|
table: any,
|
|
|
|
rename: any,
|
|
|
|
deletedColumns: any
|
|
|
|
) {
|
2022-01-27 19:18:31 +01:00
|
|
|
const views = await getViews()
|
2021-10-20 21:01:49 +02:00
|
|
|
const tableViews = views.filter(view => view.meta.tableId === table._id)
|
|
|
|
|
|
|
|
// Check each table view to see if impacted by this table action
|
|
|
|
for (let view of tableViews) {
|
|
|
|
let needsUpdated = false
|
|
|
|
|
|
|
|
// First check for renames, otherwise check for deletions
|
|
|
|
if (rename) {
|
|
|
|
// Update calculation field if required
|
|
|
|
if (view.meta.field === rename.old) {
|
|
|
|
view.meta.field = rename.updated
|
|
|
|
needsUpdated = true
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update group by field if required
|
|
|
|
if (view.meta.groupBy === rename.old) {
|
|
|
|
view.meta.groupBy = rename.updated
|
|
|
|
needsUpdated = true
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update filters if required
|
2021-10-21 11:24:41 +02:00
|
|
|
if (view.meta.filters) {
|
2022-03-09 22:16:22 +01:00
|
|
|
view.meta.filters.forEach((filter: any) => {
|
2021-10-21 11:24:41 +02:00
|
|
|
if (filter.key === rename.old) {
|
|
|
|
filter.key = rename.updated
|
|
|
|
needsUpdated = true
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
} else if (deletedColumns) {
|
2022-03-09 22:16:22 +01:00
|
|
|
deletedColumns.forEach((column: any) => {
|
2021-10-20 21:01:49 +02:00
|
|
|
// Remove calculation statement if required
|
|
|
|
if (view.meta.field === column) {
|
|
|
|
delete view.meta.field
|
|
|
|
delete view.meta.calculation
|
|
|
|
delete view.meta.groupBy
|
|
|
|
needsUpdated = true
|
|
|
|
}
|
|
|
|
|
|
|
|
// Remove group by field if required
|
|
|
|
if (view.meta.groupBy === column) {
|
|
|
|
delete view.meta.groupBy
|
|
|
|
needsUpdated = true
|
|
|
|
}
|
|
|
|
|
|
|
|
// Remove filters referencing deleted field if required
|
2021-10-21 11:24:41 +02:00
|
|
|
if (view.meta.filters && view.meta.filters.length) {
|
2021-10-20 21:01:49 +02:00
|
|
|
const initialLength = view.meta.filters.length
|
2022-03-09 22:16:22 +01:00
|
|
|
view.meta.filters = view.meta.filters.filter((filter: any) => {
|
2021-10-20 21:01:49 +02:00
|
|
|
return filter.key !== column
|
|
|
|
})
|
|
|
|
if (initialLength !== view.meta.filters.length) {
|
|
|
|
needsUpdated = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update view if required
|
|
|
|
if (needsUpdated) {
|
2023-01-10 17:25:23 +01:00
|
|
|
const groupByField: any = Object.values(table.schema).find(
|
|
|
|
(field: any) => field.name == view.groupBy
|
|
|
|
)
|
|
|
|
const newViewTemplate = viewTemplate(
|
|
|
|
view.meta,
|
|
|
|
groupByField?.type === FieldTypes.ARRAY
|
|
|
|
)
|
2022-01-27 19:18:31 +01:00
|
|
|
await saveView(null, view.name, newViewTemplate)
|
2021-10-20 21:01:49 +02:00
|
|
|
if (!newViewTemplate.meta.schema) {
|
|
|
|
newViewTemplate.meta.schema = table.schema
|
|
|
|
}
|
|
|
|
table.views[view.name] = newViewTemplate.meta
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-09 22:16:22 +01:00
|
|
|
export function generateForeignKey(column: any, relatedTable: any) {
|
2021-10-29 19:37:29 +02:00
|
|
|
return `fk_${relatedTable.name}_${column.fieldName}`
|
|
|
|
}
|
|
|
|
|
2022-03-09 22:16:22 +01:00
|
|
|
export function generateJunctionTableName(
|
|
|
|
column: any,
|
|
|
|
table: any,
|
|
|
|
relatedTable: any
|
|
|
|
) {
|
2021-10-29 19:37:29 +02:00
|
|
|
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
|
|
|
|
}
|
|
|
|
|
2022-11-22 14:56:01 +01:00
|
|
|
export function foreignKeyStructure(keyName: any, meta?: any) {
|
2022-03-09 22:16:22 +01:00
|
|
|
const structure: any = {
|
2021-10-29 19:37:29 +02:00
|
|
|
type: FieldTypes.NUMBER,
|
|
|
|
constraints: {},
|
|
|
|
name: keyName,
|
|
|
|
}
|
|
|
|
if (meta) {
|
|
|
|
structure.meta = meta
|
|
|
|
}
|
|
|
|
return structure
|
|
|
|
}
|
|
|
|
|
2022-03-09 22:16:22 +01:00
|
|
|
export function areSwitchableTypes(type1: any, type2: any) {
|
2022-01-26 19:50:13 +01:00
|
|
|
if (
|
|
|
|
SwitchableTypes.indexOf(type1) === -1 &&
|
|
|
|
SwitchableTypes.indexOf(type2) === -1
|
|
|
|
) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
for (let option of CanSwitchTypes) {
|
|
|
|
const index1 = option.indexOf(type1),
|
|
|
|
index2 = option.indexOf(type2)
|
|
|
|
if (index1 !== -1 && index2 !== -1 && index1 !== index2) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2023-08-18 15:33:21 +02:00
|
|
|
export function hasTypeChanged(table: Table, oldTable: Table | undefined) {
|
2021-11-10 16:01:44 +01:00
|
|
|
if (!oldTable) {
|
|
|
|
return false
|
|
|
|
}
|
2022-03-09 22:16:22 +01:00
|
|
|
let key: any
|
|
|
|
let field: any
|
|
|
|
for ([key, field] of Object.entries(oldTable.schema)) {
|
2021-11-10 16:01:44 +01:00
|
|
|
const oldType = field.type
|
|
|
|
if (!table.schema[key]) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
const newType = table.schema[key].type
|
2022-03-09 22:16:22 +01:00
|
|
|
if (oldType !== newType && !areSwitchableTypes(oldType, newType)) {
|
2021-11-10 16:01:44 +01:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2023-03-14 13:11:01 +01:00
|
|
|
// used for external tables, some of them will have static schemas that need
|
|
|
|
// to be hard set
|
|
|
|
export function setStaticSchemas(datasource: Datasource, table: Table) {
|
|
|
|
// GSheets is a specific case - only ever has a static primary key
|
|
|
|
if (table && datasource.source === SourceName.GOOGLE_SHEETS) {
|
|
|
|
table.primary = [GOOGLE_SHEETS_PRIMARY_KEY]
|
|
|
|
// if there is an id column, remove it, should never exist in GSheets
|
|
|
|
delete table.schema?.id
|
|
|
|
}
|
|
|
|
return table
|
|
|
|
}
|
|
|
|
|
2022-03-09 22:16:22 +01:00
|
|
|
const _TableSaveFunctions = TableSaveFunctions
|
|
|
|
export { _TableSaveFunctions as TableSaveFunctions }
|