More validation around datetime columns and bulk importing.
This commit is contained in:
parent
d06b22d4b8
commit
e55874a698
|
@ -2043,6 +2043,104 @@ if (descriptions.length) {
|
|||
expect(rows[0].name).toEqual("Clare updated")
|
||||
expect(rows[1].name).toEqual("Jeff updated")
|
||||
})
|
||||
|
||||
it("should reject bulkImport date only fields with wrong format", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
date: {
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
name: "date",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
await config.api.row.bulkImport(
|
||||
table._id!,
|
||||
{
|
||||
rows: [
|
||||
{
|
||||
date: "01.02.2024",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message:
|
||||
'Invalid format for field "date": "01.02.2024". Date-only fields must be in the format "YYYY-MM-DD".',
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should reject bulkImport date time fields with wrong format", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
date: {
|
||||
type: FieldType.DATETIME,
|
||||
name: "date",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
await config.api.row.bulkImport(
|
||||
table._id!,
|
||||
{
|
||||
rows: [
|
||||
{
|
||||
date: "01.02.2024",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message:
|
||||
'Invalid format for field "date": "01.02.2024". Datetime fields must be in ISO format, e.g. "YYYY-MM-DDTHH:MM:SSZ".',
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should reject bulkImport time fields with wrong format", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
time: {
|
||||
type: FieldType.DATETIME,
|
||||
timeOnly: true,
|
||||
name: "time",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
await config.api.row.bulkImport(
|
||||
table._id!,
|
||||
{
|
||||
rows: [
|
||||
{
|
||||
time: "3pm",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
// This isn't ideal atm because it doesn't line up with datetime
|
||||
// and date only error messages, but there's a check earlier in
|
||||
// the stack than when those errors happen that produces this one,
|
||||
// and it's not easy to bypass. The key is that this fails.
|
||||
status: 500,
|
||||
body: {
|
||||
message: 'Invalid date value: "3pm"',
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("enrich", () => {
|
||||
|
|
|
@ -1690,204 +1690,199 @@ if (descriptions.length) {
|
|||
describe.each([true, false])(
|
||||
"search with timestamp: %s",
|
||||
searchWithTimestamp => {
|
||||
describe.each(["/", "-"])(
|
||||
"date separator: %s",
|
||||
separator => {
|
||||
const SAVE_SUFFIX = saveWithTimestamp
|
||||
? "T00:00:00.000Z"
|
||||
: ""
|
||||
const SEARCH_SUFFIX = searchWithTimestamp
|
||||
? "T00:00:00.000Z"
|
||||
: ""
|
||||
const SAVE_SUFFIX = saveWithTimestamp
|
||||
? "T00:00:00.000Z"
|
||||
: ""
|
||||
const SEARCH_SUFFIX = searchWithTimestamp
|
||||
? "T00:00:00.000Z"
|
||||
: ""
|
||||
|
||||
const JAN_1ST = `2020-01-01`
|
||||
const JAN_10TH = `2020-01-10`
|
||||
const JAN_30TH = `2020-01-30`
|
||||
const UNEXISTING_DATE = `2020-01-03`
|
||||
const NULL_DATE__ID = `null_date__id`
|
||||
const JAN_1ST = `2020-01-01`
|
||||
const JAN_10TH = `2020-01-10`
|
||||
const JAN_30TH = `2020-01-30`
|
||||
const UNEXISTING_DATE = `2020-01-03`
|
||||
const NULL_DATE__ID = `null_date__id`
|
||||
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
dateid: {
|
||||
name: "dateid",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
dateid: {
|
||||
name: "dateid",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
date: {
|
||||
name: "date",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
},
|
||||
})
|
||||
|
||||
await createRows([
|
||||
{ dateid: NULL_DATE__ID, date: null },
|
||||
{ date: `${JAN_1ST}${SAVE_SUFFIX}` },
|
||||
{ date: `${JAN_10TH}${SAVE_SUFFIX}` },
|
||||
])
|
||||
})
|
||||
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_1ST}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("successfully finds an ISO8601 row", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_10TH}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_10TH }])
|
||||
})
|
||||
|
||||
it("finds a row with ISO8601 timestamp", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_1ST}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
equal: {
|
||||
date: `${UNEXISTING_DATE}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
describe("notEqual", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
notEqual: {
|
||||
date: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ date: JAN_10TH },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
notEqual: {
|
||||
date: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
oneOf: { date: [`${JAN_1ST}${SEARCH_SUFFIX}`] },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
date: [`${UNEXISTING_DATE}${SEARCH_SUFFIX}`],
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
describe("range", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
name: "date",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
low: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
})
|
||||
},
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
await createRows([
|
||||
{ dateid: NULL_DATE__ID, date: null },
|
||||
{ date: `${JAN_1ST}${SAVE_SUFFIX}` },
|
||||
{ date: `${JAN_10TH}${SAVE_SUFFIX}` },
|
||||
it("successfully finds multiple rows", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_10TH}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
])
|
||||
})
|
||||
|
||||
it("successfully finds no rows", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
describe("sort", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([
|
||||
{ dateid: NULL_DATE__ID },
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
])
|
||||
})
|
||||
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([
|
||||
{ date: JAN_10TH },
|
||||
{ date: JAN_1ST },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([
|
||||
{ dateid: NULL_DATE__ID },
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
])
|
||||
})
|
||||
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_1ST}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("successfully finds an ISO8601 row", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_10TH}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_10TH }])
|
||||
})
|
||||
|
||||
it("finds a row with ISO8601 timestamp", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_1ST}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
equal: {
|
||||
date: `${UNEXISTING_DATE}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([
|
||||
{ date: JAN_10TH },
|
||||
{ date: JAN_1ST },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
|
||||
describe("notEqual", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
notEqual: {
|
||||
date: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ date: JAN_10TH },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
notEqual: {
|
||||
date: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
oneOf: { date: [`${JAN_1ST}${SEARCH_SUFFIX}`] },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
date: [`${UNEXISTING_DATE}${SEARCH_SUFFIX}`],
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
describe("range", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("successfully finds multiple rows", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_10TH}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
])
|
||||
})
|
||||
|
||||
it("successfully finds no rows", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
describe.only("sort", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([
|
||||
{ dateid: NULL_DATE__ID },
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
])
|
||||
})
|
||||
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([
|
||||
{ date: JAN_10TH },
|
||||
{ date: JAN_1ST },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([
|
||||
{ dateid: NULL_DATE__ID },
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
])
|
||||
})
|
||||
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([
|
||||
{ date: JAN_10TH },
|
||||
{ date: JAN_1ST },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ import {
|
|||
Table,
|
||||
} from "@budibase/types"
|
||||
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
|
||||
import { db } from "@budibase/backend-core"
|
||||
import { db, HTTPError, sql } from "@budibase/backend-core"
|
||||
|
||||
type Rows = Array<Row>
|
||||
|
||||
|
@ -180,10 +180,26 @@ export function parse(rows: Rows, table: Table): Rows {
|
|||
!columnSchema.timeOnly &&
|
||||
!columnSchema.dateOnly
|
||||
) {
|
||||
// If provided must be a valid date
|
||||
if (columnData && !columnSchema.timeOnly) {
|
||||
if (!sql.utils.isValidISODateString(columnData)) {
|
||||
let message = `Invalid format for field "${columnName}": "${columnData}".`
|
||||
if (columnSchema.dateOnly) {
|
||||
message += ` Date-only fields must be in the format "YYYY-MM-DD".`
|
||||
} else {
|
||||
message += ` Datetime fields must be in ISO format, e.g. "YYYY-MM-DDTHH:MM:SSZ".`
|
||||
}
|
||||
throw new HTTPError(message, 400)
|
||||
}
|
||||
}
|
||||
if (columnData && columnSchema.timeOnly) {
|
||||
if (!sql.utils.isValidTime(columnData)) {
|
||||
throw new HTTPError(
|
||||
`Invalid format for field "${columnName}": "${columnData}". Time-only fields must be in the format "HH:MM:SS".`,
|
||||
400
|
||||
)
|
||||
}
|
||||
}
|
||||
parsedRow[columnName] = columnData
|
||||
? new Date(columnData).toISOString()
|
||||
: columnData
|
||||
} else if (
|
||||
columnType === FieldType.JSON &&
|
||||
typeof columnData === "string"
|
||||
|
|
Loading…
Reference in New Issue