Typing fixes - unsure why all of these came up suddenly.
This commit is contained in:
parent
3e2f9dfc4e
commit
ee4a042204
|
@ -26,6 +26,7 @@ import { fixRow } from "../public/rows"
|
|||
import sdk from "../../../sdk"
|
||||
import * as exporters from "../view/exporters"
|
||||
import { apiFileReturn } from "../../../utilities/fileSystem"
|
||||
import { Format } from "../view/exporters"
|
||||
export * as views from "./views"
|
||||
|
||||
function pickApi(tableId: any) {
|
||||
|
@ -267,7 +268,7 @@ export const exportRows = async (
|
|||
async () => {
|
||||
const { fileName, content } = await sdk.rows.exportRows({
|
||||
tableId,
|
||||
format,
|
||||
format: format as Format,
|
||||
rowIds: rows,
|
||||
columns,
|
||||
query,
|
||||
|
|
|
@ -157,14 +157,14 @@ export async function destroy(ctx: UserCtx) {
|
|||
if (row.tableId !== tableId) {
|
||||
throw "Supplied tableId doesn't match the row's tableId"
|
||||
}
|
||||
const table = await sdk.tables.getTable(row.tableId)
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
// update the row to include full relationships before deleting them
|
||||
row = await outputProcessing(table, row, { squash: false })
|
||||
// now remove the relationships
|
||||
await linkRows.updateLinks({
|
||||
eventType: linkRows.EventType.ROW_DELETE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
tableId,
|
||||
})
|
||||
// remove any attachments that were on the row from object storage
|
||||
await cleanupAttachments(table, { row })
|
||||
|
@ -246,7 +246,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
|||
const linkTables = await sdk.tables.getTables(linkTableIds)
|
||||
|
||||
// perform output processing
|
||||
let final = []
|
||||
let final: Promise<Row[]>[] = []
|
||||
for (let linkTable of linkTables) {
|
||||
const relatedRows = linkedRows.filter(row => row.tableId === linkTable._id)
|
||||
// include the row being enriched for performance reasons, don't need to fetch it to include
|
||||
|
|
|
@ -149,7 +149,7 @@ export async function finaliseRow(
|
|||
await db.put(table)
|
||||
} catch (err: any) {
|
||||
if (err.status === 409) {
|
||||
const updatedTable = await sdk.tables.getTable(table._id)
|
||||
const updatedTable = await sdk.tables.getTable(table._id!)
|
||||
let response = processAutoColumn(null, updatedTable, row, {
|
||||
reprocessing: true,
|
||||
})
|
||||
|
|
|
@ -1,26 +1,9 @@
|
|||
import { InternalTables } from "../../../db/utils"
|
||||
import * as userController from "../user"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import {
|
||||
Ctx,
|
||||
FieldType,
|
||||
Row,
|
||||
SearchFilters,
|
||||
Table,
|
||||
UserCtx,
|
||||
} from "@budibase/types"
|
||||
import { FieldTypes, NoEmptyFilterStrings } from "../../../constants"
|
||||
import sdk from "../../../sdk"
|
||||
import { Ctx, Row, UserCtx } from "@budibase/types"
|
||||
|
||||
import validateJs from "validate.js"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
|
||||
function isForeignKey(key: string, table: Table) {
|
||||
const relationships = Object.values(table.schema).filter(
|
||||
column => column.type === FieldType.LINK
|
||||
)
|
||||
return relationships.some(relationship => relationship.foreignKey === key)
|
||||
}
|
||||
|
||||
validateJs.extend(validateJs.validators.datetime, {
|
||||
parse: function (value: string) {
|
||||
|
@ -51,7 +34,7 @@ export async function findRow(ctx: UserCtx, tableId: string, rowId: string) {
|
|||
return row
|
||||
}
|
||||
|
||||
export function getTableId(ctx: Ctx) {
|
||||
export function getTableId(ctx: Ctx): string {
|
||||
// top priority, use the URL first
|
||||
if (ctx.params?.sourceId) {
|
||||
return ctx.params.sourceId
|
||||
|
@ -68,112 +51,7 @@ export function getTableId(ctx: Ctx) {
|
|||
if (ctx.params?.viewName) {
|
||||
return ctx.params.viewName
|
||||
}
|
||||
}
|
||||
|
||||
export async function validate({
|
||||
tableId,
|
||||
row,
|
||||
table,
|
||||
}: {
|
||||
tableId?: string
|
||||
row: Row
|
||||
table?: Table
|
||||
}) {
|
||||
let fetchedTable: Table
|
||||
if (!table) {
|
||||
fetchedTable = await sdk.tables.getTable(tableId)
|
||||
} else {
|
||||
fetchedTable = table
|
||||
}
|
||||
const errors: any = {}
|
||||
for (let fieldName of Object.keys(fetchedTable.schema)) {
|
||||
const column = fetchedTable.schema[fieldName]
|
||||
const constraints = cloneDeep(column.constraints)
|
||||
const type = column.type
|
||||
// foreign keys are likely to be enriched
|
||||
if (isForeignKey(fieldName, fetchedTable)) {
|
||||
continue
|
||||
}
|
||||
// formulas shouldn't validated, data will be deleted anyway
|
||||
if (type === FieldTypes.FORMULA || column.autocolumn) {
|
||||
continue
|
||||
}
|
||||
// special case for options, need to always allow unselected (empty)
|
||||
if (type === FieldTypes.OPTIONS && constraints?.inclusion) {
|
||||
constraints.inclusion.push(null as any, "")
|
||||
}
|
||||
let res
|
||||
|
||||
// Validate.js doesn't seem to handle array
|
||||
if (type === FieldTypes.ARRAY && row[fieldName]) {
|
||||
if (row[fieldName].length) {
|
||||
if (!Array.isArray(row[fieldName])) {
|
||||
row[fieldName] = row[fieldName].split(",")
|
||||
}
|
||||
row[fieldName].map((val: any) => {
|
||||
if (
|
||||
!constraints?.inclusion?.includes(val) &&
|
||||
constraints?.inclusion?.length !== 0
|
||||
) {
|
||||
errors[fieldName] = "Field not in list"
|
||||
}
|
||||
})
|
||||
} else if (constraints?.presence && row[fieldName].length === 0) {
|
||||
// non required MultiSelect creates an empty array, which should not throw errors
|
||||
errors[fieldName] = [`${fieldName} is required`]
|
||||
}
|
||||
} else if (
|
||||
(type === FieldTypes.ATTACHMENT || type === FieldTypes.JSON) &&
|
||||
typeof row[fieldName] === "string"
|
||||
) {
|
||||
// this should only happen if there is an error
|
||||
try {
|
||||
const json = JSON.parse(row[fieldName])
|
||||
if (type === FieldTypes.ATTACHMENT) {
|
||||
if (Array.isArray(json)) {
|
||||
row[fieldName] = json
|
||||
} else {
|
||||
errors[fieldName] = [`Must be an array`]
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
errors[fieldName] = [`Contains invalid JSON`]
|
||||
}
|
||||
} else {
|
||||
res = validateJs.single(row[fieldName], constraints)
|
||||
}
|
||||
if (res) errors[fieldName] = res
|
||||
}
|
||||
return { valid: Object.keys(errors).length === 0, errors }
|
||||
}
|
||||
|
||||
// don't do a pure falsy check, as 0 is included
|
||||
// https://github.com/Budibase/budibase/issues/10118
|
||||
export function removeEmptyFilters(filters: SearchFilters) {
|
||||
for (let filterField of NoEmptyFilterStrings) {
|
||||
if (!filters[filterField]) {
|
||||
continue
|
||||
}
|
||||
|
||||
for (let filterType of Object.keys(filters)) {
|
||||
if (filterType !== filterField) {
|
||||
continue
|
||||
}
|
||||
// don't know which one we're checking, type could be anything
|
||||
const value = filters[filterType] as unknown
|
||||
if (typeof value === "object") {
|
||||
for (let [key, value] of Object.entries(
|
||||
filters[filterType] as object
|
||||
)) {
|
||||
if (value == null || value === "") {
|
||||
// @ts-ignore
|
||||
delete filters[filterField][key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return filters
|
||||
throw new Error("Unable to find table ID in request")
|
||||
}
|
||||
|
||||
export function isUserMetadataTable(tableId: string) {
|
||||
|
|
|
@ -14,6 +14,7 @@ import {
|
|||
Table,
|
||||
TableResponse,
|
||||
UserCtx,
|
||||
Row,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { jsonFromCsvString } from "../../../utilities/csv"
|
||||
|
@ -129,8 +130,7 @@ export async function validateNewTableImport(ctx: UserCtx) {
|
|||
}
|
||||
|
||||
export async function validateExistingTableImport(ctx: UserCtx) {
|
||||
const { rows, tableId }: { rows: unknown; tableId: unknown } =
|
||||
ctx.request.body
|
||||
const { rows, tableId }: { rows: Row[]; tableId?: string } = ctx.request.body
|
||||
|
||||
let schema = null
|
||||
if (tableId) {
|
||||
|
|
|
@ -72,7 +72,10 @@ export async function getLinkDocuments(args: {
|
|||
|
||||
// filter down to just the required field name
|
||||
if (fieldName) {
|
||||
linkRows = linkRows.filter(link => link.value.fieldName === fieldName)
|
||||
linkRows = linkRows.filter(link => {
|
||||
const value = link.value as LinkDocumentValue
|
||||
return value.fieldName === fieldName
|
||||
})
|
||||
}
|
||||
// return docs if docs requested, otherwise just the value information
|
||||
if (includeDocs) {
|
||||
|
|
|
@ -68,12 +68,15 @@ export async function validate({
|
|||
valid: boolean
|
||||
errors: Record<string, any>
|
||||
}> {
|
||||
let fetchedTable: Table
|
||||
if (!table) {
|
||||
let fetchedTable: Table | undefined
|
||||
if (!table && tableId) {
|
||||
fetchedTable = await sdk.tables.getTable(tableId)
|
||||
} else {
|
||||
} else if (table) {
|
||||
fetchedTable = table
|
||||
}
|
||||
if (fetchedTable === undefined) {
|
||||
throw new Error("Unable to fetch table for validation")
|
||||
}
|
||||
const errors: Record<string, any> = {}
|
||||
for (let fieldName of Object.keys(fetchedTable.schema)) {
|
||||
const column = fetchedTable.schema[fieldName]
|
||||
|
|
|
@ -32,21 +32,23 @@ async function getAllInternalTables(db?: Database): Promise<Table[]> {
|
|||
if (!db) {
|
||||
db = context.getAppDB()
|
||||
}
|
||||
const internalTables = await db.allDocs(
|
||||
const internalTableDocs = await db.allDocs<Table[]>(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
return processInternalTables(internalTables)
|
||||
return processInternalTables(internalTableDocs)
|
||||
}
|
||||
|
||||
async function getAllExternalTables(): Promise<Table[]> {
|
||||
const datasources = await sdk.datasources.fetch({ enriched: true })
|
||||
const allEntities = datasources.map(datasource => datasource.entities)
|
||||
let final = []
|
||||
let final: Table[] = []
|
||||
for (let entities of allEntities) {
|
||||
if (entities) {
|
||||
final = final.concat(Object.values(entities))
|
||||
}
|
||||
}
|
||||
return final
|
||||
}
|
||||
|
||||
|
@ -61,7 +63,7 @@ async function getAllTables() {
|
|||
async function getTables(tableIds: string[]): Promise<Table[]> {
|
||||
const externalTableIds = tableIds.filter(tableId => isExternalTable(tableId)),
|
||||
internalTableIds = tableIds.filter(tableId => !isExternalTable(tableId))
|
||||
let tables = []
|
||||
let tables: Table[] = []
|
||||
if (externalTableIds.length) {
|
||||
const externalTables = await getAllExternalTables()
|
||||
tables = tables.concat(
|
||||
|
@ -72,8 +74,10 @@ async function getTables(tableIds: string[]): Promise<Table[]> {
|
|||
}
|
||||
if (internalTableIds.length) {
|
||||
const db = context.getAppDB()
|
||||
const internalTables = await db.allDocs(getMultiIDParams(internalTableIds))
|
||||
tables = tables.concat(processInternalTables(internalTables))
|
||||
const internalTableDocs = await db.allDocs<Table[]>(
|
||||
getMultiIDParams(internalTableIds)
|
||||
)
|
||||
tables = tables.concat(processInternalTables(internalTableDocs))
|
||||
}
|
||||
return tables
|
||||
}
|
||||
|
@ -101,7 +105,7 @@ async function getTable(tableId: string): Promise<Table> {
|
|||
if (isExternalTable(tableId)) {
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const datasource = await datasources.get(datasourceId!)
|
||||
const table = await getExternalTable(datasourceId, tableName)
|
||||
const table = await getExternalTable(datasourceId!, tableName!)
|
||||
return { ...table, sql: isSQL(datasource) }
|
||||
} else {
|
||||
return db.get<Table>(tableId)
|
||||
|
|
Loading…
Reference in New Issue