Merge branch 'master' into node-fetch-mockectomy-2
This commit is contained in:
commit
5c490dbe70
|
@ -1,9 +1,9 @@
|
|||
<script>
|
||||
import { Select, Icon } from "@budibase/bbui"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import { canBeDisplayColumn, utils } from "@budibase/shared-core"
|
||||
import { API } from "api"
|
||||
import { parseFile } from "./utils"
|
||||
import { canBeDisplayColumn } from "@budibase/shared-core"
|
||||
|
||||
export let rows = []
|
||||
export let schema = {}
|
||||
|
@ -97,6 +97,8 @@
|
|||
let errors = {}
|
||||
let selectedColumnTypes = {}
|
||||
|
||||
let rawRows = []
|
||||
|
||||
$: displayColumnOptions = Object.keys(schema || {}).filter(column => {
|
||||
return validation[column] && canBeDisplayColumn(schema[column].type)
|
||||
})
|
||||
|
@ -106,6 +108,8 @@
|
|||
}
|
||||
|
||||
$: {
|
||||
rows = rawRows.map(row => utils.trimOtherProps(row, Object.keys(schema)))
|
||||
|
||||
// binding in consumer is causing double renders here
|
||||
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
|
||||
if (newValidateHash !== validateHash) {
|
||||
|
@ -122,7 +126,7 @@
|
|||
|
||||
try {
|
||||
const response = await parseFile(e)
|
||||
rows = response.rows
|
||||
rawRows = response.rows
|
||||
schema = response.schema
|
||||
fileName = response.fileName
|
||||
selectedColumnTypes = Object.entries(response.schema).reduce(
|
||||
|
@ -188,7 +192,7 @@
|
|||
type="file"
|
||||
on:change={handleFile}
|
||||
/>
|
||||
<label for="file-upload" class:uploaded={rows.length > 0}>
|
||||
<label for="file-upload" class:uploaded={rawRows.length > 0}>
|
||||
{#if error}
|
||||
Error: {error}
|
||||
{:else if fileName}
|
||||
|
@ -198,7 +202,7 @@
|
|||
{/if}
|
||||
</label>
|
||||
</div>
|
||||
{#if rows.length > 0 && !error}
|
||||
{#if rawRows.length > 0 && !error}
|
||||
<div class="schema-fields">
|
||||
{#each Object.entries(schema) as [name, column]}
|
||||
<div class="field">
|
||||
|
|
|
@ -78,7 +78,7 @@
|
|||
await datasources.fetch()
|
||||
await afterSave(table)
|
||||
} catch (e) {
|
||||
notifications.error(e)
|
||||
notifications.error(e.message || e)
|
||||
// reload in case the table was created
|
||||
await tables.fetch()
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM mcr.microsoft.com/mssql/server:2022-latest
|
||||
FROM mcr.microsoft.com/mssql/server@sha256:c4369c38385eba011c10906dc8892425831275bb035d5ce69656da8e29de50d8
|
||||
|
||||
ENV ACCEPT_EULA=Y
|
||||
ENV SA_PASSWORD=Passw0rd
|
||||
|
|
|
@ -17,6 +17,7 @@ import {
|
|||
CsvToJsonRequest,
|
||||
CsvToJsonResponse,
|
||||
FetchTablesResponse,
|
||||
FieldType,
|
||||
MigrateRequest,
|
||||
MigrateResponse,
|
||||
SaveTableRequest,
|
||||
|
@ -33,7 +34,11 @@ import sdk from "../../../sdk"
|
|||
import { jsonFromCsvString } from "../../../utilities/csv"
|
||||
import { builderSocket } from "../../../websockets"
|
||||
import { cloneDeep, isEqual } from "lodash"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
import {
|
||||
helpers,
|
||||
PROTECTED_EXTERNAL_COLUMNS,
|
||||
PROTECTED_INTERNAL_COLUMNS,
|
||||
} from "@budibase/shared-core"
|
||||
|
||||
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
|
||||
if (table && isExternalTable(table)) {
|
||||
|
@ -166,7 +171,7 @@ export async function validateNewTableImport(
|
|||
|
||||
if (isRows(rows) && isSchema(schema)) {
|
||||
ctx.status = 200
|
||||
ctx.body = validateSchema(rows, schema)
|
||||
ctx.body = validateSchema(rows, schema, PROTECTED_INTERNAL_COLUMNS)
|
||||
} else {
|
||||
ctx.status = 422
|
||||
}
|
||||
|
@ -178,9 +183,21 @@ export async function validateExistingTableImport(
|
|||
const { rows, tableId } = ctx.request.body
|
||||
|
||||
let schema = null
|
||||
|
||||
let protectedColumnNames
|
||||
if (tableId) {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
schema = table.schema
|
||||
|
||||
if (!isExternalTable(table)) {
|
||||
schema._id = {
|
||||
name: "_id",
|
||||
type: FieldType.STRING,
|
||||
}
|
||||
protectedColumnNames = PROTECTED_INTERNAL_COLUMNS.filter(x => x !== "_id")
|
||||
} else {
|
||||
protectedColumnNames = PROTECTED_EXTERNAL_COLUMNS
|
||||
}
|
||||
} else {
|
||||
ctx.status = 422
|
||||
return
|
||||
|
@ -188,7 +205,7 @@ export async function validateExistingTableImport(
|
|||
|
||||
if (tableId && isRows(rows) && isSchema(schema)) {
|
||||
ctx.status = 200
|
||||
ctx.body = validateSchema(rows, schema)
|
||||
ctx.body = validateSchema(rows, schema, protectedColumnNames)
|
||||
} else {
|
||||
ctx.status = 422
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ import { handleDataImport } from "./utils"
|
|||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
FieldType,
|
||||
RenameColumn,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
|
@ -69,10 +70,22 @@ export async function bulkImport(
|
|||
) {
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { rows, identifierFields } = ctx.request.body
|
||||
await handleDataImport(table, {
|
||||
importRows: rows,
|
||||
identifierFields,
|
||||
user: ctx.user,
|
||||
})
|
||||
await handleDataImport(
|
||||
{
|
||||
...table,
|
||||
schema: {
|
||||
_id: {
|
||||
name: "_id",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
...table.schema,
|
||||
},
|
||||
},
|
||||
{
|
||||
importRows: rows,
|
||||
identifierFields,
|
||||
user: ctx.user,
|
||||
}
|
||||
)
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -122,13 +122,15 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
|
|||
export async function importToRows(
|
||||
data: Row[],
|
||||
table: Table,
|
||||
user?: ContextUser
|
||||
user?: ContextUser,
|
||||
opts?: { keepCouchId: boolean }
|
||||
) {
|
||||
let originalTable = table
|
||||
let finalData: any = []
|
||||
const originalTable = table
|
||||
const finalData: Row[] = []
|
||||
const keepCouchId = !!opts?.keepCouchId
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let row = data[i]
|
||||
row._id = generateRowID(table._id!)
|
||||
row._id = (keepCouchId && row._id) || generateRowID(table._id!)
|
||||
row.type = "row"
|
||||
row.tableId = table._id
|
||||
|
||||
|
@ -180,7 +182,11 @@ export async function handleDataImport(
|
|||
const db = context.getAppDB()
|
||||
const data = parse(importRows, table)
|
||||
|
||||
let finalData: any = await importToRows(data, table, user)
|
||||
const finalData = await importToRows(data, table, user, {
|
||||
keepCouchId: identifierFields.includes("_id"),
|
||||
})
|
||||
|
||||
let newRowCount = finalData.length
|
||||
|
||||
//Set IDs of finalData to match existing row if an update is expected
|
||||
if (identifierFields.length > 0) {
|
||||
|
@ -203,12 +209,14 @@ export async function handleDataImport(
|
|||
if (match) {
|
||||
finalItem._id = doc._id
|
||||
finalItem._rev = doc._rev
|
||||
|
||||
newRowCount--
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), {
|
||||
await quotas.addRows(newRowCount, () => db.bulkDocs(finalData), {
|
||||
tableId: table._id,
|
||||
})
|
||||
|
||||
|
|
|
@ -1298,6 +1298,113 @@ describe.each([
|
|||
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("should be able to update existing rows on bulkImport", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
},
|
||||
description: {
|
||||
type: FieldType.STRING,
|
||||
name: "description",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const existingRow = await config.api.row.save(table._id!, {
|
||||
name: "Existing row",
|
||||
description: "Existing description",
|
||||
})
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
},
|
||||
{ ...existingRow, name: "Updated existing row" },
|
||||
{
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
},
|
||||
],
|
||||
identifierFields: ["_id"],
|
||||
})
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(3)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Row 1")
|
||||
expect(rows[0].description).toEqual("Row 1 description")
|
||||
expect(rows[1].name).toEqual("Row 2")
|
||||
expect(rows[1].description).toEqual("Row 2 description")
|
||||
expect(rows[2].name).toEqual("Updated existing row")
|
||||
expect(rows[2].description).toEqual("Existing description")
|
||||
|
||||
await assertRowUsage(rowUsage + 2)
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("should create new rows if not identifierFields are provided", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
},
|
||||
description: {
|
||||
type: FieldType.STRING,
|
||||
name: "description",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const existingRow = await config.api.row.save(table._id!, {
|
||||
name: "Existing row",
|
||||
description: "Existing description",
|
||||
})
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
},
|
||||
{ ...existingRow, name: "Updated existing row" },
|
||||
{
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(4)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Existing row")
|
||||
expect(rows[0].description).toEqual("Existing description")
|
||||
expect(rows[1].name).toEqual("Row 1")
|
||||
expect(rows[1].description).toEqual("Row 1 description")
|
||||
expect(rows[2].name).toEqual("Row 2")
|
||||
expect(rows[2].description).toEqual("Row 2 description")
|
||||
expect(rows[3].name).toEqual("Updated existing row")
|
||||
expect(rows[3].description).toEqual("Existing description")
|
||||
|
||||
await assertRowUsage(rowUsage + 3)
|
||||
})
|
||||
|
||||
// Upserting isn't yet supported in MSSQL, see:
|
||||
// https://github.com/knex/knex/pull/6050
|
||||
!isMSSQL &&
|
||||
|
@ -1640,23 +1747,38 @@ describe.each([
|
|||
table = await config.api.table.save(defaultTable())
|
||||
})
|
||||
|
||||
it("should allow exporting all columns", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const res = await config.api.row.exportRows(table._id!, {
|
||||
rows: [existing._id!],
|
||||
})
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
isInternal &&
|
||||
it("should not export internal couchdb fields", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {
|
||||
name: generator.guid(),
|
||||
description: generator.paragraph(),
|
||||
})
|
||||
const res = await config.api.row.exportRows(table._id!, {
|
||||
rows: [existing._id!],
|
||||
})
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
|
||||
// Ensure all original columns were exported
|
||||
expect(Object.keys(row).length).toBeGreaterThanOrEqual(
|
||||
Object.keys(existing).length
|
||||
)
|
||||
Object.keys(existing).forEach(key => {
|
||||
expect(row[key]).toEqual(existing[key])
|
||||
expect(Object.keys(row)).toEqual(["_id", "name", "description"])
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
it("should allow exporting all columns", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const res = await config.api.row.exportRows(table._id!, {
|
||||
rows: [existing._id!],
|
||||
})
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
|
||||
// Ensure all original columns were exported
|
||||
expect(Object.keys(row).length).toBe(Object.keys(existing).length)
|
||||
Object.keys(existing).forEach(key => {
|
||||
expect(row[key]).toEqual(existing[key])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should allow exporting only certain columns", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
import { context, events } from "@budibase/backend-core"
|
||||
import { context, docIds, events } from "@budibase/backend-core"
|
||||
import {
|
||||
PROTECTED_EXTERNAL_COLUMNS,
|
||||
PROTECTED_INTERNAL_COLUMNS,
|
||||
} from "@budibase/shared-core"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
BBReferenceFieldSubType,
|
||||
|
@ -10,6 +14,7 @@ import {
|
|||
Row,
|
||||
SaveTableRequest,
|
||||
Table,
|
||||
TableSchema,
|
||||
TableSourceType,
|
||||
User,
|
||||
ViewCalculation,
|
||||
|
@ -118,6 +123,64 @@ describe.each([
|
|||
body: basicTable(),
|
||||
})
|
||||
})
|
||||
|
||||
it("does not persist the row fields that are not on the table schema", async () => {
|
||||
const table: SaveTableRequest = basicTable()
|
||||
table.rows = [
|
||||
{
|
||||
name: "test-name",
|
||||
description: "test-desc",
|
||||
nonValid: "test-non-valid",
|
||||
},
|
||||
]
|
||||
|
||||
const res = await config.api.table.save(table)
|
||||
|
||||
const persistedRows = await config.api.row.search(res._id!)
|
||||
|
||||
expect(persistedRows.rows).toEqual([
|
||||
expect.objectContaining({
|
||||
name: "test-name",
|
||||
description: "test-desc",
|
||||
}),
|
||||
])
|
||||
expect(persistedRows.rows[0].nonValid).toBeUndefined()
|
||||
})
|
||||
|
||||
it.each(
|
||||
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
||||
)(
|
||||
"cannot use protected column names (%s) while importing a table",
|
||||
async columnName => {
|
||||
const table: SaveTableRequest = basicTable()
|
||||
table.rows = [
|
||||
{
|
||||
name: "test-name",
|
||||
description: "test-desc",
|
||||
},
|
||||
]
|
||||
|
||||
await config.api.table.save(
|
||||
{
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
[columnName]: {
|
||||
name: columnName,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`,
|
||||
status: 400,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
|
@ -1022,4 +1085,156 @@ describe.each([
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("import validation", () => {
|
||||
const basicSchema: TableSchema = {
|
||||
id: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "id",
|
||||
},
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
},
|
||||
}
|
||||
|
||||
describe("validateNewTableImport", () => {
|
||||
it("can validate basic imports", async () => {
|
||||
const result = await config.api.table.validateNewTableImport(
|
||||
[{ id: generator.natural(), name: generator.first() }],
|
||||
basicSchema
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: true,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it.each(
|
||||
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
||||
)("don't allow protected names in schema (%s)", async columnName => {
|
||||
const result = await config.api.table.validateNewTableImport(
|
||||
[
|
||||
{
|
||||
id: generator.natural(),
|
||||
name: generator.first(),
|
||||
[columnName]: generator.word(),
|
||||
},
|
||||
],
|
||||
{
|
||||
...basicSchema,
|
||||
}
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: false,
|
||||
errors: {
|
||||
[columnName]: `${columnName} is a protected column name`,
|
||||
},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
[columnName]: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it.each(
|
||||
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
||||
)("don't allow protected names in the rows (%s)", async columnName => {
|
||||
const result = await config.api.table.validateNewTableImport(
|
||||
[
|
||||
{
|
||||
id: generator.natural(),
|
||||
name: generator.first(),
|
||||
},
|
||||
],
|
||||
{
|
||||
...basicSchema,
|
||||
[columnName]: {
|
||||
name: columnName,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: false,
|
||||
errors: {
|
||||
[columnName]: `${columnName} is a protected column name`,
|
||||
},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
[columnName]: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("validateExistingTableImport", () => {
|
||||
it("can validate basic imports", async () => {
|
||||
const table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
primary: ["id"],
|
||||
schema: basicSchema,
|
||||
})
|
||||
)
|
||||
const result = await config.api.table.validateExistingTableImport({
|
||||
tableId: table._id,
|
||||
rows: [{ id: generator.natural(), name: generator.first() }],
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: true,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("can reimport _id fields for internal tables", async () => {
|
||||
const table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
primary: ["id"],
|
||||
schema: basicSchema,
|
||||
})
|
||||
)
|
||||
const result = await config.api.table.validateExistingTableImport({
|
||||
tableId: table._id,
|
||||
rows: [
|
||||
{
|
||||
_id: docIds.generateRowID(table._id!),
|
||||
id: generator.natural(),
|
||||
name: generator.first(),
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: true,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
_id: true,
|
||||
id: true,
|
||||
name: true,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -651,10 +651,10 @@ export async function buildDefaultDocs() {
|
|||
return new LinkDocument(
|
||||
employeeData.table._id!,
|
||||
"Jobs",
|
||||
employeeData.rows[index]._id,
|
||||
employeeData.rows[index]._id!,
|
||||
jobData.table._id!,
|
||||
"Assigned",
|
||||
jobData.rows[index]._id
|
||||
jobData.rows[index]._id!
|
||||
)
|
||||
}
|
||||
)
|
||||
|
|
|
@ -29,6 +29,7 @@ import { getReadableErrorMessage } from "./base/errorMapping"
|
|||
import sqlServer from "mssql"
|
||||
import { sql } from "@budibase/backend-core"
|
||||
import { ConfidentialClientApplication } from "@azure/msal-node"
|
||||
import env from "../environment"
|
||||
|
||||
import { utils } from "@budibase/shared-core"
|
||||
|
||||
|
@ -246,6 +247,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
options: {
|
||||
encrypt,
|
||||
enableArithAbort: true,
|
||||
requestTimeout: env.QUERY_THREAD_TIMEOUT,
|
||||
},
|
||||
}
|
||||
if (encrypt) {
|
||||
|
|
|
@ -11,6 +11,7 @@ import {
|
|||
SearchResponse,
|
||||
SortType,
|
||||
Table,
|
||||
TableSchema,
|
||||
User,
|
||||
} from "@budibase/types"
|
||||
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
|
||||
|
@ -137,6 +138,9 @@ export async function exportRows(
|
|||
let rows: Row[] = []
|
||||
let schema = table.schema
|
||||
let headers
|
||||
|
||||
result = trimFields(result, schema)
|
||||
|
||||
// Filter data to only specified columns if required
|
||||
if (columns && columns.length) {
|
||||
for (let i = 0; i < result.length; i++) {
|
||||
|
@ -299,3 +303,13 @@ async function getView(db: Database, viewName: string) {
|
|||
}
|
||||
return viewInfo
|
||||
}
|
||||
|
||||
function trimFields(rows: Row[], schema: TableSchema) {
|
||||
const allowedFields = ["_id", ...Object.keys(schema)]
|
||||
const result = rows.map(row =>
|
||||
Object.keys(row)
|
||||
.filter(key => allowedFields.includes(key))
|
||||
.reduce((acc, key) => ({ ...acc, [key]: row[key] }), {} as Row)
|
||||
)
|
||||
return result
|
||||
}
|
||||
|
|
|
@ -76,7 +76,7 @@ export async function getDatasourceAndQuery(
|
|||
}
|
||||
|
||||
export function cleanExportRows(
|
||||
rows: any[],
|
||||
rows: Row[],
|
||||
schema: TableSchema,
|
||||
format: string,
|
||||
columns?: string[],
|
||||
|
|
|
@ -48,9 +48,7 @@ export async function save(
|
|||
}
|
||||
|
||||
// check for case sensitivity - we don't want to allow duplicated columns
|
||||
const duplicateColumn = findDuplicateInternalColumns(table, {
|
||||
ignoreProtectedColumnNames: !oldTable && !!opts?.isImport,
|
||||
})
|
||||
const duplicateColumn = findDuplicateInternalColumns(table)
|
||||
if (duplicateColumn.length) {
|
||||
throw new Error(
|
||||
`Column(s) "${duplicateColumn.join(
|
||||
|
|
|
@ -3,9 +3,13 @@ import {
|
|||
BulkImportResponse,
|
||||
MigrateRequest,
|
||||
MigrateResponse,
|
||||
Row,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
Table,
|
||||
TableSchema,
|
||||
ValidateTableImportRequest,
|
||||
ValidateTableImportResponse,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
|
@ -61,8 +65,38 @@ export class TableAPI extends TestAPI {
|
|||
revId: string,
|
||||
expectations?: Expectations
|
||||
): Promise<void> => {
|
||||
return await this._delete<void>(`/api/tables/${tableId}/${revId}`, {
|
||||
return await this._delete(`/api/tables/${tableId}/${revId}`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
validateNewTableImport = async (
|
||||
rows: Row[],
|
||||
schema: TableSchema,
|
||||
expectations?: Expectations
|
||||
): Promise<ValidateTableImportResponse> => {
|
||||
return await this._post<ValidateTableImportResponse>(
|
||||
`/api/tables/validateNewTableImport`,
|
||||
{
|
||||
body: {
|
||||
rows,
|
||||
schema,
|
||||
},
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
validateExistingTableImport = async (
|
||||
body: ValidateTableImportRequest,
|
||||
expectations?: Expectations
|
||||
): Promise<ValidateTableImportResponse> => {
|
||||
return await this._post<ValidateTableImportResponse>(
|
||||
`/api/tables/validateExistingTableImport`,
|
||||
{
|
||||
body,
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,11 @@ export function isRows(rows: any): rows is Rows {
|
|||
return Array.isArray(rows) && rows.every(row => typeof row === "object")
|
||||
}
|
||||
|
||||
export function validate(rows: Rows, schema: TableSchema): ValidationResults {
|
||||
export function validate(
|
||||
rows: Rows,
|
||||
schema: TableSchema,
|
||||
protectedColumnNames: readonly string[]
|
||||
): ValidationResults {
|
||||
const results: ValidationResults = {
|
||||
schemaValidation: {},
|
||||
allValid: false,
|
||||
|
@ -49,6 +53,8 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
|
|||
errors: {},
|
||||
}
|
||||
|
||||
protectedColumnNames = protectedColumnNames.map(x => x.toLowerCase())
|
||||
|
||||
rows.forEach(row => {
|
||||
Object.entries(row).forEach(([columnName, columnData]) => {
|
||||
const {
|
||||
|
@ -63,6 +69,12 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
|
|||
return
|
||||
}
|
||||
|
||||
if (protectedColumnNames.includes(columnName.toLowerCase())) {
|
||||
results.schemaValidation[columnName] = false
|
||||
results.errors[columnName] = `${columnName} is a protected column name`
|
||||
return
|
||||
}
|
||||
|
||||
// If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array
|
||||
if (typeof columnType !== "string") {
|
||||
results.invalidColumns.push(columnName)
|
||||
|
@ -109,6 +121,13 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
|
|||
})
|
||||
})
|
||||
|
||||
for (const schemaField of Object.keys(schema)) {
|
||||
if (protectedColumnNames.includes(schemaField.toLowerCase())) {
|
||||
results.schemaValidation[schemaField] = false
|
||||
results.errors[schemaField] = `${schemaField} is a protected column name`
|
||||
}
|
||||
}
|
||||
|
||||
results.allValid =
|
||||
Object.values(results.schemaValidation).length > 0 &&
|
||||
Object.values(results.schemaValidation).every(column => column)
|
||||
|
|
|
@ -53,10 +53,7 @@ export function canBeSortColumn(type: FieldType): boolean {
|
|||
return !!allowSortColumnByType[type]
|
||||
}
|
||||
|
||||
export function findDuplicateInternalColumns(
|
||||
table: Table,
|
||||
opts?: { ignoreProtectedColumnNames: boolean }
|
||||
): string[] {
|
||||
export function findDuplicateInternalColumns(table: Table): string[] {
|
||||
// maintains the case of keys
|
||||
const casedKeys = Object.keys(table.schema)
|
||||
// get the column names
|
||||
|
@ -72,11 +69,10 @@ export function findDuplicateInternalColumns(
|
|||
}
|
||||
}
|
||||
}
|
||||
if (!opts?.ignoreProtectedColumnNames) {
|
||||
for (let internalColumn of PROTECTED_INTERNAL_COLUMNS) {
|
||||
if (casedKeys.find(key => key === internalColumn)) {
|
||||
duplicates.push(internalColumn)
|
||||
}
|
||||
|
||||
for (let internalColumn of PROTECTED_INTERNAL_COLUMNS) {
|
||||
if (casedKeys.find(key => key === internalColumn)) {
|
||||
duplicates.push(internalColumn)
|
||||
}
|
||||
}
|
||||
return duplicates
|
||||
|
|
|
@ -67,3 +67,13 @@ export function hasSchema(test: any) {
|
|||
Object.keys(test).length > 0
|
||||
)
|
||||
}
|
||||
|
||||
export function trimOtherProps(object: any, allowedProps: string[]) {
|
||||
const result = Object.keys(object)
|
||||
.filter(key => allowedProps.includes(key))
|
||||
.reduce<Record<string, any>>(
|
||||
(acc, key) => ({ ...acc, [key]: object[key] }),
|
||||
{}
|
||||
)
|
||||
return result
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue