More validation around datetime columns and bulk importing.
This commit is contained in:
parent
d06b22d4b8
commit
e55874a698
|
@ -2043,6 +2043,104 @@ if (descriptions.length) {
|
||||||
expect(rows[0].name).toEqual("Clare updated")
|
expect(rows[0].name).toEqual("Clare updated")
|
||||||
expect(rows[1].name).toEqual("Jeff updated")
|
expect(rows[1].name).toEqual("Jeff updated")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should reject bulkImport date only fields with wrong format", async () => {
|
||||||
|
const table = await config.api.table.save(
|
||||||
|
saveTableRequest({
|
||||||
|
schema: {
|
||||||
|
date: {
|
||||||
|
type: FieldType.DATETIME,
|
||||||
|
dateOnly: true,
|
||||||
|
name: "date",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
await config.api.row.bulkImport(
|
||||||
|
table._id!,
|
||||||
|
{
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
date: "01.02.2024",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 400,
|
||||||
|
body: {
|
||||||
|
message:
|
||||||
|
'Invalid format for field "date": "01.02.2024". Date-only fields must be in the format "YYYY-MM-DD".',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should reject bulkImport date time fields with wrong format", async () => {
|
||||||
|
const table = await config.api.table.save(
|
||||||
|
saveTableRequest({
|
||||||
|
schema: {
|
||||||
|
date: {
|
||||||
|
type: FieldType.DATETIME,
|
||||||
|
name: "date",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
await config.api.row.bulkImport(
|
||||||
|
table._id!,
|
||||||
|
{
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
date: "01.02.2024",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 400,
|
||||||
|
body: {
|
||||||
|
message:
|
||||||
|
'Invalid format for field "date": "01.02.2024". Datetime fields must be in ISO format, e.g. "YYYY-MM-DDTHH:MM:SSZ".',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should reject bulkImport time fields with wrong format", async () => {
|
||||||
|
const table = await config.api.table.save(
|
||||||
|
saveTableRequest({
|
||||||
|
schema: {
|
||||||
|
time: {
|
||||||
|
type: FieldType.DATETIME,
|
||||||
|
timeOnly: true,
|
||||||
|
name: "time",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
await config.api.row.bulkImport(
|
||||||
|
table._id!,
|
||||||
|
{
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
time: "3pm",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// This isn't ideal atm because it doesn't line up with datetime
|
||||||
|
// and date only error messages, but there's a check earlier in
|
||||||
|
// the stack than when those errors happen that produces this one,
|
||||||
|
// and it's not easy to bypass. The key is that this fails.
|
||||||
|
status: 500,
|
||||||
|
body: {
|
||||||
|
message: 'Invalid date value: "3pm"',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("enrich", () => {
|
describe("enrich", () => {
|
||||||
|
|
|
@ -1690,9 +1690,6 @@ if (descriptions.length) {
|
||||||
describe.each([true, false])(
|
describe.each([true, false])(
|
||||||
"search with timestamp: %s",
|
"search with timestamp: %s",
|
||||||
searchWithTimestamp => {
|
searchWithTimestamp => {
|
||||||
describe.each(["/", "-"])(
|
|
||||||
"date separator: %s",
|
|
||||||
separator => {
|
|
||||||
const SAVE_SUFFIX = saveWithTimestamp
|
const SAVE_SUFFIX = saveWithTimestamp
|
||||||
? "T00:00:00.000Z"
|
? "T00:00:00.000Z"
|
||||||
: ""
|
: ""
|
||||||
|
@ -1833,7 +1830,7 @@ if (descriptions.length) {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe.only("sort", () => {
|
describe("sort", () => {
|
||||||
it("sorts ascending", async () => {
|
it("sorts ascending", async () => {
|
||||||
await expectSearch({
|
await expectSearch({
|
||||||
query: {},
|
query: {},
|
||||||
|
@ -1890,8 +1887,6 @@ if (descriptions.length) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
isInternal &&
|
isInternal &&
|
||||||
|
|
|
@ -7,7 +7,7 @@ import {
|
||||||
Table,
|
Table,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
|
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
|
||||||
import { db } from "@budibase/backend-core"
|
import { db, HTTPError, sql } from "@budibase/backend-core"
|
||||||
|
|
||||||
type Rows = Array<Row>
|
type Rows = Array<Row>
|
||||||
|
|
||||||
|
@ -180,10 +180,26 @@ export function parse(rows: Rows, table: Table): Rows {
|
||||||
!columnSchema.timeOnly &&
|
!columnSchema.timeOnly &&
|
||||||
!columnSchema.dateOnly
|
!columnSchema.dateOnly
|
||||||
) {
|
) {
|
||||||
// If provided must be a valid date
|
if (columnData && !columnSchema.timeOnly) {
|
||||||
|
if (!sql.utils.isValidISODateString(columnData)) {
|
||||||
|
let message = `Invalid format for field "${columnName}": "${columnData}".`
|
||||||
|
if (columnSchema.dateOnly) {
|
||||||
|
message += ` Date-only fields must be in the format "YYYY-MM-DD".`
|
||||||
|
} else {
|
||||||
|
message += ` Datetime fields must be in ISO format, e.g. "YYYY-MM-DDTHH:MM:SSZ".`
|
||||||
|
}
|
||||||
|
throw new HTTPError(message, 400)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (columnData && columnSchema.timeOnly) {
|
||||||
|
if (!sql.utils.isValidTime(columnData)) {
|
||||||
|
throw new HTTPError(
|
||||||
|
`Invalid format for field "${columnName}": "${columnData}". Time-only fields must be in the format "HH:MM:SS".`,
|
||||||
|
400
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
parsedRow[columnName] = columnData
|
parsedRow[columnName] = columnData
|
||||||
? new Date(columnData).toISOString()
|
|
||||||
: columnData
|
|
||||||
} else if (
|
} else if (
|
||||||
columnType === FieldType.JSON &&
|
columnType === FieldType.JSON &&
|
||||||
typeof columnData === "string"
|
typeof columnData === "string"
|
||||||
|
|
Loading…
Reference in New Issue