Merge branch 'master' into node-fetch-mockectomy-2

This commit is contained in:
Sam Rose 2024-08-02 11:07:48 +01:00 committed by GitHub
commit 5c490dbe70
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 505 additions and 53 deletions

View File

@ -1,9 +1,9 @@
<script> <script>
import { Select, Icon } from "@budibase/bbui" import { Select, Icon } from "@budibase/bbui"
import { FIELDS } from "constants/backend" import { FIELDS } from "constants/backend"
import { canBeDisplayColumn, utils } from "@budibase/shared-core"
import { API } from "api" import { API } from "api"
import { parseFile } from "./utils" import { parseFile } from "./utils"
import { canBeDisplayColumn } from "@budibase/shared-core"
export let rows = [] export let rows = []
export let schema = {} export let schema = {}
@ -97,6 +97,8 @@
let errors = {} let errors = {}
let selectedColumnTypes = {} let selectedColumnTypes = {}
let rawRows = []
$: displayColumnOptions = Object.keys(schema || {}).filter(column => { $: displayColumnOptions = Object.keys(schema || {}).filter(column => {
return validation[column] && canBeDisplayColumn(schema[column].type) return validation[column] && canBeDisplayColumn(schema[column].type)
}) })
@ -106,6 +108,8 @@
} }
$: { $: {
rows = rawRows.map(row => utils.trimOtherProps(row, Object.keys(schema)))
// binding in consumer is causing double renders here // binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema) const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
if (newValidateHash !== validateHash) { if (newValidateHash !== validateHash) {
@ -122,7 +126,7 @@
try { try {
const response = await parseFile(e) const response = await parseFile(e)
rows = response.rows rawRows = response.rows
schema = response.schema schema = response.schema
fileName = response.fileName fileName = response.fileName
selectedColumnTypes = Object.entries(response.schema).reduce( selectedColumnTypes = Object.entries(response.schema).reduce(
@ -188,7 +192,7 @@
type="file" type="file"
on:change={handleFile} on:change={handleFile}
/> />
<label for="file-upload" class:uploaded={rows.length > 0}> <label for="file-upload" class:uploaded={rawRows.length > 0}>
{#if error} {#if error}
Error: {error} Error: {error}
{:else if fileName} {:else if fileName}
@ -198,7 +202,7 @@
{/if} {/if}
</label> </label>
</div> </div>
{#if rows.length > 0 && !error} {#if rawRows.length > 0 && !error}
<div class="schema-fields"> <div class="schema-fields">
{#each Object.entries(schema) as [name, column]} {#each Object.entries(schema) as [name, column]}
<div class="field"> <div class="field">

View File

@ -78,7 +78,7 @@
await datasources.fetch() await datasources.fetch()
await afterSave(table) await afterSave(table)
} catch (e) { } catch (e) {
notifications.error(e) notifications.error(e.message || e)
// reload in case the table was created // reload in case the table was created
await tables.fetch() await tables.fetch()
} }

View File

@ -1,4 +1,4 @@
FROM mcr.microsoft.com/mssql/server:2022-latest FROM mcr.microsoft.com/mssql/server@sha256:c4369c38385eba011c10906dc8892425831275bb035d5ce69656da8e29de50d8
ENV ACCEPT_EULA=Y ENV ACCEPT_EULA=Y
ENV SA_PASSWORD=Passw0rd ENV SA_PASSWORD=Passw0rd

View File

@ -17,6 +17,7 @@ import {
CsvToJsonRequest, CsvToJsonRequest,
CsvToJsonResponse, CsvToJsonResponse,
FetchTablesResponse, FetchTablesResponse,
FieldType,
MigrateRequest, MigrateRequest,
MigrateResponse, MigrateResponse,
SaveTableRequest, SaveTableRequest,
@ -33,7 +34,11 @@ import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv" import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash" import { cloneDeep, isEqual } from "lodash"
import { helpers } from "@budibase/shared-core" import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) { function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && isExternalTable(table)) { if (table && isExternalTable(table)) {
@ -166,7 +171,7 @@ export async function validateNewTableImport(
if (isRows(rows) && isSchema(schema)) { if (isRows(rows) && isSchema(schema)) {
ctx.status = 200 ctx.status = 200
ctx.body = validateSchema(rows, schema) ctx.body = validateSchema(rows, schema, PROTECTED_INTERNAL_COLUMNS)
} else { } else {
ctx.status = 422 ctx.status = 422
} }
@ -178,9 +183,21 @@ export async function validateExistingTableImport(
const { rows, tableId } = ctx.request.body const { rows, tableId } = ctx.request.body
let schema = null let schema = null
let protectedColumnNames
if (tableId) { if (tableId) {
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
schema = table.schema schema = table.schema
if (!isExternalTable(table)) {
schema._id = {
name: "_id",
type: FieldType.STRING,
}
protectedColumnNames = PROTECTED_INTERNAL_COLUMNS.filter(x => x !== "_id")
} else {
protectedColumnNames = PROTECTED_EXTERNAL_COLUMNS
}
} else { } else {
ctx.status = 422 ctx.status = 422
return return
@ -188,7 +205,7 @@ export async function validateExistingTableImport(
if (tableId && isRows(rows) && isSchema(schema)) { if (tableId && isRows(rows) && isSchema(schema)) {
ctx.status = 200 ctx.status = 200
ctx.body = validateSchema(rows, schema) ctx.body = validateSchema(rows, schema, protectedColumnNames)
} else { } else {
ctx.status = 422 ctx.status = 422
} }

View File

@ -3,6 +3,7 @@ import { handleDataImport } from "./utils"
import { import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
FieldType,
RenameColumn, RenameColumn,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
@ -69,10 +70,22 @@ export async function bulkImport(
) { ) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body const { rows, identifierFields } = ctx.request.body
await handleDataImport(table, { await handleDataImport(
{
...table,
schema: {
_id: {
name: "_id",
type: FieldType.STRING,
},
...table.schema,
},
},
{
importRows: rows, importRows: rows,
identifierFields, identifierFields,
user: ctx.user, user: ctx.user,
}) }
)
return table return table
} }

View File

@ -122,13 +122,15 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
export async function importToRows( export async function importToRows(
data: Row[], data: Row[],
table: Table, table: Table,
user?: ContextUser user?: ContextUser,
opts?: { keepCouchId: boolean }
) { ) {
let originalTable = table const originalTable = table
let finalData: any = [] const finalData: Row[] = []
const keepCouchId = !!opts?.keepCouchId
for (let i = 0; i < data.length; i++) { for (let i = 0; i < data.length; i++) {
let row = data[i] let row = data[i]
row._id = generateRowID(table._id!) row._id = (keepCouchId && row._id) || generateRowID(table._id!)
row.type = "row" row.type = "row"
row.tableId = table._id row.tableId = table._id
@ -180,7 +182,11 @@ export async function handleDataImport(
const db = context.getAppDB() const db = context.getAppDB()
const data = parse(importRows, table) const data = parse(importRows, table)
let finalData: any = await importToRows(data, table, user) const finalData = await importToRows(data, table, user, {
keepCouchId: identifierFields.includes("_id"),
})
let newRowCount = finalData.length
//Set IDs of finalData to match existing row if an update is expected //Set IDs of finalData to match existing row if an update is expected
if (identifierFields.length > 0) { if (identifierFields.length > 0) {
@ -203,12 +209,14 @@ export async function handleDataImport(
if (match) { if (match) {
finalItem._id = doc._id finalItem._id = doc._id
finalItem._rev = doc._rev finalItem._rev = doc._rev
newRowCount--
} }
}) })
}) })
} }
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), { await quotas.addRows(newRowCount, () => db.bulkDocs(finalData), {
tableId: table._id, tableId: table._id,
}) })

View File

@ -1298,6 +1298,113 @@ describe.each([
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
}) })
isInternal &&
it("should be able to update existing rows on bulkImport", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
const rowUsage = await getRowUsage()
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
identifierFields: ["_id"],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1")
expect(rows[0].description).toEqual("Row 1 description")
expect(rows[1].name).toEqual("Row 2")
expect(rows[1].description).toEqual("Row 2 description")
expect(rows[2].name).toEqual("Updated existing row")
expect(rows[2].description).toEqual("Existing description")
await assertRowUsage(rowUsage + 2)
})
isInternal &&
it("should create new rows if not identifierFields are provided", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
const rowUsage = await getRowUsage()
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(4)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Existing row")
expect(rows[0].description).toEqual("Existing description")
expect(rows[1].name).toEqual("Row 1")
expect(rows[1].description).toEqual("Row 1 description")
expect(rows[2].name).toEqual("Row 2")
expect(rows[2].description).toEqual("Row 2 description")
expect(rows[3].name).toEqual("Updated existing row")
expect(rows[3].description).toEqual("Existing description")
await assertRowUsage(rowUsage + 3)
})
// Upserting isn't yet supported in MSSQL, see: // Upserting isn't yet supported in MSSQL, see:
// https://github.com/knex/knex/pull/6050 // https://github.com/knex/knex/pull/6050
!isMSSQL && !isMSSQL &&
@ -1640,6 +1747,23 @@ describe.each([
table = await config.api.table.save(defaultTable()) table = await config.api.table.save(defaultTable())
}) })
isInternal &&
it("should not export internal couchdb fields", async () => {
const existing = await config.api.row.save(table._id!, {
name: generator.guid(),
description: generator.paragraph(),
})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
expect(Object.keys(row)).toEqual(["_id", "name", "description"])
})
!isInternal &&
it("should allow exporting all columns", async () => { it("should allow exporting all columns", async () => {
const existing = await config.api.row.save(table._id!, {}) const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!, { const res = await config.api.row.exportRows(table._id!, {
@ -1650,9 +1774,7 @@ describe.each([
const row = results[0] const row = results[0]
// Ensure all original columns were exported // Ensure all original columns were exported
expect(Object.keys(row).length).toBeGreaterThanOrEqual( expect(Object.keys(row).length).toBe(Object.keys(existing).length)
Object.keys(existing).length
)
Object.keys(existing).forEach(key => { Object.keys(existing).forEach(key => {
expect(row[key]).toEqual(existing[key]) expect(row[key]).toEqual(existing[key])
}) })

View File

@ -1,4 +1,8 @@
import { context, events } from "@budibase/backend-core" import { context, docIds, events } from "@budibase/backend-core"
import {
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { import {
AutoFieldSubType, AutoFieldSubType,
BBReferenceFieldSubType, BBReferenceFieldSubType,
@ -10,6 +14,7 @@ import {
Row, Row,
SaveTableRequest, SaveTableRequest,
Table, Table,
TableSchema,
TableSourceType, TableSourceType,
User, User,
ViewCalculation, ViewCalculation,
@ -118,6 +123,64 @@ describe.each([
body: basicTable(), body: basicTable(),
}) })
}) })
it("does not persist the row fields that are not on the table schema", async () => {
const table: SaveTableRequest = basicTable()
table.rows = [
{
name: "test-name",
description: "test-desc",
nonValid: "test-non-valid",
},
]
const res = await config.api.table.save(table)
const persistedRows = await config.api.row.search(res._id!)
expect(persistedRows.rows).toEqual([
expect.objectContaining({
name: "test-name",
description: "test-desc",
}),
])
expect(persistedRows.rows[0].nonValid).toBeUndefined()
})
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)(
"cannot use protected column names (%s) while importing a table",
async columnName => {
const table: SaveTableRequest = basicTable()
table.rows = [
{
name: "test-name",
description: "test-desc",
},
]
await config.api.table.save(
{
...table,
schema: {
...table.schema,
[columnName]: {
name: columnName,
type: FieldType.STRING,
},
},
},
{
status: 400,
body: {
message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`,
status: 400,
},
}
)
}
)
}) })
describe("update", () => { describe("update", () => {
@ -1022,4 +1085,156 @@ describe.each([
}) })
}) })
}) })
describe("import validation", () => {
const basicSchema: TableSchema = {
id: {
type: FieldType.NUMBER,
name: "id",
},
name: {
type: FieldType.STRING,
name: "name",
},
}
describe("validateNewTableImport", () => {
it("can validate basic imports", async () => {
const result = await config.api.table.validateNewTableImport(
[{ id: generator.natural(), name: generator.first() }],
basicSchema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in schema (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport(
[
{
id: generator.natural(),
name: generator.first(),
[columnName]: generator.word(),
},
],
{
...basicSchema,
}
)
expect(result).toEqual({
allValid: false,
errors: {
[columnName]: `${columnName} is a protected column name`,
},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
[columnName]: false,
},
})
})
isInternal &&
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in the rows (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport(
[
{
id: generator.natural(),
name: generator.first(),
},
],
{
...basicSchema,
[columnName]: {
name: columnName,
type: FieldType.STRING,
},
}
)
expect(result).toEqual({
allValid: false,
errors: {
[columnName]: `${columnName} is a protected column name`,
},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
[columnName]: false,
},
})
})
})
describe("validateExistingTableImport", () => {
it("can validate basic imports", async () => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: basicSchema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows: [{ id: generator.natural(), name: generator.first() }],
})
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
isInternal &&
it("can reimport _id fields for internal tables", async () => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: basicSchema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows: [
{
_id: docIds.generateRowID(table._id!),
id: generator.natural(),
name: generator.first(),
},
],
})
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
_id: true,
id: true,
name: true,
},
})
})
})
})
}) })

View File

@ -651,10 +651,10 @@ export async function buildDefaultDocs() {
return new LinkDocument( return new LinkDocument(
employeeData.table._id!, employeeData.table._id!,
"Jobs", "Jobs",
employeeData.rows[index]._id, employeeData.rows[index]._id!,
jobData.table._id!, jobData.table._id!,
"Assigned", "Assigned",
jobData.rows[index]._id jobData.rows[index]._id!
) )
} }
) )

View File

@ -29,6 +29,7 @@ import { getReadableErrorMessage } from "./base/errorMapping"
import sqlServer from "mssql" import sqlServer from "mssql"
import { sql } from "@budibase/backend-core" import { sql } from "@budibase/backend-core"
import { ConfidentialClientApplication } from "@azure/msal-node" import { ConfidentialClientApplication } from "@azure/msal-node"
import env from "../environment"
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
@ -246,6 +247,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
options: { options: {
encrypt, encrypt,
enableArithAbort: true, enableArithAbort: true,
requestTimeout: env.QUERY_THREAD_TIMEOUT,
}, },
} }
if (encrypt) { if (encrypt) {

View File

@ -11,6 +11,7 @@ import {
SearchResponse, SearchResponse,
SortType, SortType,
Table, Table,
TableSchema,
User, User,
} from "@budibase/types" } from "@budibase/types"
import { getGlobalUsersFromMetadata } from "../../../../utilities/global" import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
@ -137,6 +138,9 @@ export async function exportRows(
let rows: Row[] = [] let rows: Row[] = []
let schema = table.schema let schema = table.schema
let headers let headers
result = trimFields(result, schema)
// Filter data to only specified columns if required // Filter data to only specified columns if required
if (columns && columns.length) { if (columns && columns.length) {
for (let i = 0; i < result.length; i++) { for (let i = 0; i < result.length; i++) {
@ -299,3 +303,13 @@ async function getView(db: Database, viewName: string) {
} }
return viewInfo return viewInfo
} }
function trimFields(rows: Row[], schema: TableSchema) {
const allowedFields = ["_id", ...Object.keys(schema)]
const result = rows.map(row =>
Object.keys(row)
.filter(key => allowedFields.includes(key))
.reduce((acc, key) => ({ ...acc, [key]: row[key] }), {} as Row)
)
return result
}

View File

@ -76,7 +76,7 @@ export async function getDatasourceAndQuery(
} }
export function cleanExportRows( export function cleanExportRows(
rows: any[], rows: Row[],
schema: TableSchema, schema: TableSchema,
format: string, format: string,
columns?: string[], columns?: string[],

View File

@ -48,9 +48,7 @@ export async function save(
} }
// check for case sensitivity - we don't want to allow duplicated columns // check for case sensitivity - we don't want to allow duplicated columns
const duplicateColumn = findDuplicateInternalColumns(table, { const duplicateColumn = findDuplicateInternalColumns(table)
ignoreProtectedColumnNames: !oldTable && !!opts?.isImport,
})
if (duplicateColumn.length) { if (duplicateColumn.length) {
throw new Error( throw new Error(
`Column(s) "${duplicateColumn.join( `Column(s) "${duplicateColumn.join(

View File

@ -3,9 +3,13 @@ import {
BulkImportResponse, BulkImportResponse,
MigrateRequest, MigrateRequest,
MigrateResponse, MigrateResponse,
Row,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
TableSchema,
ValidateTableImportRequest,
ValidateTableImportResponse,
} from "@budibase/types" } from "@budibase/types"
import { Expectations, TestAPI } from "./base" import { Expectations, TestAPI } from "./base"
@ -61,8 +65,38 @@ export class TableAPI extends TestAPI {
revId: string, revId: string,
expectations?: Expectations expectations?: Expectations
): Promise<void> => { ): Promise<void> => {
return await this._delete<void>(`/api/tables/${tableId}/${revId}`, { return await this._delete(`/api/tables/${tableId}/${revId}`, {
expectations, expectations,
}) })
} }
validateNewTableImport = async (
rows: Row[],
schema: TableSchema,
expectations?: Expectations
): Promise<ValidateTableImportResponse> => {
return await this._post<ValidateTableImportResponse>(
`/api/tables/validateNewTableImport`,
{
body: {
rows,
schema,
},
expectations,
}
)
}
validateExistingTableImport = async (
body: ValidateTableImportRequest,
expectations?: Expectations
): Promise<ValidateTableImportResponse> => {
return await this._post<ValidateTableImportResponse>(
`/api/tables/validateExistingTableImport`,
{
body,
expectations,
}
)
}
} }

View File

@ -41,7 +41,11 @@ export function isRows(rows: any): rows is Rows {
return Array.isArray(rows) && rows.every(row => typeof row === "object") return Array.isArray(rows) && rows.every(row => typeof row === "object")
} }
export function validate(rows: Rows, schema: TableSchema): ValidationResults { export function validate(
rows: Rows,
schema: TableSchema,
protectedColumnNames: readonly string[]
): ValidationResults {
const results: ValidationResults = { const results: ValidationResults = {
schemaValidation: {}, schemaValidation: {},
allValid: false, allValid: false,
@ -49,6 +53,8 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
errors: {}, errors: {},
} }
protectedColumnNames = protectedColumnNames.map(x => x.toLowerCase())
rows.forEach(row => { rows.forEach(row => {
Object.entries(row).forEach(([columnName, columnData]) => { Object.entries(row).forEach(([columnName, columnData]) => {
const { const {
@ -63,6 +69,12 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
return return
} }
if (protectedColumnNames.includes(columnName.toLowerCase())) {
results.schemaValidation[columnName] = false
results.errors[columnName] = `${columnName} is a protected column name`
return
}
// If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array // If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array
if (typeof columnType !== "string") { if (typeof columnType !== "string") {
results.invalidColumns.push(columnName) results.invalidColumns.push(columnName)
@ -109,6 +121,13 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
}) })
}) })
for (const schemaField of Object.keys(schema)) {
if (protectedColumnNames.includes(schemaField.toLowerCase())) {
results.schemaValidation[schemaField] = false
results.errors[schemaField] = `${schemaField} is a protected column name`
}
}
results.allValid = results.allValid =
Object.values(results.schemaValidation).length > 0 && Object.values(results.schemaValidation).length > 0 &&
Object.values(results.schemaValidation).every(column => column) Object.values(results.schemaValidation).every(column => column)

View File

@ -53,10 +53,7 @@ export function canBeSortColumn(type: FieldType): boolean {
return !!allowSortColumnByType[type] return !!allowSortColumnByType[type]
} }
export function findDuplicateInternalColumns( export function findDuplicateInternalColumns(table: Table): string[] {
table: Table,
opts?: { ignoreProtectedColumnNames: boolean }
): string[] {
// maintains the case of keys // maintains the case of keys
const casedKeys = Object.keys(table.schema) const casedKeys = Object.keys(table.schema)
// get the column names // get the column names
@ -72,12 +69,11 @@ export function findDuplicateInternalColumns(
} }
} }
} }
if (!opts?.ignoreProtectedColumnNames) {
for (let internalColumn of PROTECTED_INTERNAL_COLUMNS) { for (let internalColumn of PROTECTED_INTERNAL_COLUMNS) {
if (casedKeys.find(key => key === internalColumn)) { if (casedKeys.find(key => key === internalColumn)) {
duplicates.push(internalColumn) duplicates.push(internalColumn)
} }
} }
}
return duplicates return duplicates
} }

View File

@ -67,3 +67,13 @@ export function hasSchema(test: any) {
Object.keys(test).length > 0 Object.keys(test).length > 0
) )
} }
export function trimOtherProps(object: any, allowedProps: string[]) {
const result = Object.keys(object)
.filter(key => allowedProps.includes(key))
.reduce<Record<string, any>>(
(acc, key) => ({ ...acc, [key]: object[key] }),
{}
)
return result
}