Merge branch 'master' into feature/filter-bindings
This commit is contained in:
commit
32cce5f5c6
|
@ -32,8 +32,6 @@ import * as uuid from "uuid"
|
|||
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
||||
tk.freeze(timestamp)
|
||||
|
||||
jest.unmock("mssql")
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
|
@ -131,7 +129,13 @@ describe.each([
|
|||
|
||||
const assertRowUsage = async (expected: number) => {
|
||||
const usage = await getRowUsage()
|
||||
expect(usage).toBe(expected)
|
||||
|
||||
// Because our quota tracking is not perfect, we allow a 10% margin of
|
||||
// error. This is to account for the fact that parallel writes can result
|
||||
// in some quota updates getting lost. We don't have any need to solve this
|
||||
// right now, so we just allow for some error.
|
||||
expect(usage).toBeGreaterThan(expected * 0.9)
|
||||
expect(usage).toBeLessThan(expected * 1.1)
|
||||
}
|
||||
|
||||
const defaultRowFields = isInternal
|
||||
|
@ -194,39 +198,99 @@ describe.each([
|
|||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
||||
it("increment row autoId per create row request", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
isInternal &&
|
||||
it("increment row autoId per create row request", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const newTable = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
"Row ID": {
|
||||
name: "Row ID",
|
||||
type: FieldType.NUMBER,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
icon: "ri-magic-line",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: true,
|
||||
numericality: {
|
||||
greaterThanOrEqualTo: "",
|
||||
lessThanOrEqualTo: "",
|
||||
const newTable = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
"Row ID": {
|
||||
name: "Row ID",
|
||||
type: FieldType.NUMBER,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
icon: "ri-magic-line",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: true,
|
||||
numericality: {
|
||||
greaterThanOrEqualTo: "",
|
||||
lessThanOrEqualTo: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
})
|
||||
)
|
||||
|
||||
let previousId = 0
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const row = await config.api.row.save(newTable._id!, {})
|
||||
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
||||
previousId = row["Row ID"]
|
||||
}
|
||||
await assertRowUsage(rowUsage + 10)
|
||||
})
|
||||
let previousId = 0
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const row = await config.api.row.save(newTable._id!, {})
|
||||
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
||||
previousId = row["Row ID"]
|
||||
}
|
||||
await assertRowUsage(rowUsage + 10)
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("should increment auto ID correctly when creating rows in parallel", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
"Row ID": {
|
||||
name: "Row ID",
|
||||
type: FieldType.NUMBER,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
icon: "ri-magic-line",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: true,
|
||||
numericality: {
|
||||
greaterThanOrEqualTo: "",
|
||||
lessThanOrEqualTo: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const sequence = Array(50)
|
||||
.fill(0)
|
||||
.map((_, i) => i + 1)
|
||||
|
||||
// This block of code is simulating users creating auto ID rows at the
|
||||
// same time. It's expected that this operation will sometimes return
|
||||
// a document conflict error (409), but the idea is to retry in those
|
||||
// situations. The code below does this a large number of times with
|
||||
// small, random delays between them to try and get through the list
|
||||
// as quickly as possible.
|
||||
await Promise.all(
|
||||
sequence.map(async () => {
|
||||
const attempts = 20
|
||||
for (let attempt = 0; attempt < attempts; attempt++) {
|
||||
try {
|
||||
await config.api.row.save(table._id!, {})
|
||||
return
|
||||
} catch (e) {
|
||||
await new Promise(r => setTimeout(r, Math.random() * 15))
|
||||
}
|
||||
}
|
||||
throw new Error(`Failed to create row after ${attempts} attempts`)
|
||||
})
|
||||
)
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows).toHaveLength(50)
|
||||
|
||||
// The main purpose of this test is to ensure that even under pressure,
|
||||
// we maintain data integrity. An auto ID column should hand out
|
||||
// monotonically increasing unique integers no matter what.
|
||||
const ids = rows.map(r => r["Row ID"])
|
||||
expect(ids).toEqual(expect.arrayContaining(sequence))
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("row values are coerced", async () => {
|
||||
|
|
|
@ -24,8 +24,6 @@ import { encodeJSBinding } from "@budibase/string-templates"
|
|||
const serverTime = new Date("2024-05-06T00:00:00.000Z")
|
||||
tk.freeze(serverTime)
|
||||
|
||||
jest.unmock("mssql")
|
||||
|
||||
describe.each([
|
||||
["lucene", undefined],
|
||||
["sqs", undefined],
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { context, events } from "@budibase/backend-core"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
Datasource,
|
||||
BBReferenceFieldSubType,
|
||||
Datasource,
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
InternalTable,
|
||||
|
@ -149,58 +149,59 @@ describe.each([
|
|||
expect(res.name).toBeUndefined()
|
||||
})
|
||||
|
||||
it("updates only the passed fields", async () => {
|
||||
await timekeeper.withFreeze(new Date(2021, 1, 1), async () => {
|
||||
const table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
schema: {
|
||||
autoId: {
|
||||
name: "id",
|
||||
type: FieldType.NUMBER,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: false,
|
||||
isInternal &&
|
||||
it("updates only the passed fields", async () => {
|
||||
await timekeeper.withFreeze(new Date(2021, 1, 1), async () => {
|
||||
const table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
schema: {
|
||||
autoId: {
|
||||
name: "id",
|
||||
type: FieldType.NUMBER,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const newName = generator.guid()
|
||||
|
||||
const updatedTable = await config.api.table.save({
|
||||
...table,
|
||||
name: newName,
|
||||
})
|
||||
)
|
||||
|
||||
const newName = generator.guid()
|
||||
let expected: Table = {
|
||||
...table,
|
||||
name: newName,
|
||||
_id: expect.any(String),
|
||||
}
|
||||
if (isInternal) {
|
||||
expected._rev = expect.stringMatching(/^2-.+/)
|
||||
}
|
||||
|
||||
const updatedTable = await config.api.table.save({
|
||||
...table,
|
||||
name: newName,
|
||||
expect(updatedTable).toEqual(expected)
|
||||
|
||||
const persistedTable = await config.api.table.get(updatedTable._id!)
|
||||
expected = {
|
||||
...table,
|
||||
name: newName,
|
||||
_id: updatedTable._id,
|
||||
}
|
||||
if (datasource?.isSQL) {
|
||||
expected.sql = true
|
||||
}
|
||||
if (isInternal) {
|
||||
expected._rev = expect.stringMatching(/^2-.+/)
|
||||
}
|
||||
expect(persistedTable).toEqual(expected)
|
||||
})
|
||||
|
||||
let expected: Table = {
|
||||
...table,
|
||||
name: newName,
|
||||
_id: expect.any(String),
|
||||
}
|
||||
if (isInternal) {
|
||||
expected._rev = expect.stringMatching(/^2-.+/)
|
||||
}
|
||||
|
||||
expect(updatedTable).toEqual(expected)
|
||||
|
||||
const persistedTable = await config.api.table.get(updatedTable._id!)
|
||||
expected = {
|
||||
...table,
|
||||
name: newName,
|
||||
_id: updatedTable._id,
|
||||
}
|
||||
if (datasource?.isSQL) {
|
||||
expected.sql = true
|
||||
}
|
||||
if (isInternal) {
|
||||
expected._rev = expect.stringMatching(/^2-.+/)
|
||||
}
|
||||
expect(persistedTable).toEqual(expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe("user table", () => {
|
||||
isInternal &&
|
||||
|
@ -214,6 +215,57 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
describe("external table validation", () => {
|
||||
!isInternal &&
|
||||
it("should error if column is of type auto", async () => {
|
||||
const table = basicTable(datasource)
|
||||
await config.api.table.save(
|
||||
{
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
auto: {
|
||||
name: "auto",
|
||||
autocolumn: true,
|
||||
type: FieldType.AUTO,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: `Column "auto" has type "${FieldType.AUTO}" - this is not supported.`,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
it("should error if column has auto subtype", async () => {
|
||||
const table = basicTable(datasource)
|
||||
await config.api.table.save(
|
||||
{
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
auto: {
|
||||
name: "auto",
|
||||
autocolumn: true,
|
||||
type: FieldType.NUMBER,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: `Column "auto" has subtype "${AutoFieldSubType.AUTO_ID}" - this is not supported.`,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("should add a new column for an internal DB table", async () => {
|
||||
const saveTableRequest: SaveTableRequest = {
|
||||
...basicTable(),
|
||||
|
|
|
@ -24,8 +24,6 @@ import merge from "lodash/merge"
|
|||
import { quotas } from "@budibase/pro"
|
||||
import { roles } from "@budibase/backend-core"
|
||||
|
||||
jest.unmock("mssql")
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
|
|
|
@ -1,230 +0,0 @@
|
|||
import tk from "timekeeper"
|
||||
import * as internalSdk from "../internal"
|
||||
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import {
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
TableSourceType,
|
||||
FieldType,
|
||||
Table,
|
||||
AutoFieldSubType,
|
||||
AutoColumnFieldMetadata,
|
||||
} from "@budibase/types"
|
||||
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
|
||||
tk.freeze(Date.now())
|
||||
|
||||
describe("sdk >> rows >> internal", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
function makeRow() {
|
||||
return {
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
}
|
||||
}
|
||||
|
||||
describe("save", () => {
|
||||
const tableData: Table = {
|
||||
name: generator.word(),
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
surname: {
|
||||
name: "surname",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
constraints: {
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
address: {
|
||||
name: "address",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it("save will persist the row properly", async () => {
|
||||
const table = await config.createTable(tableData)
|
||||
const row = makeRow()
|
||||
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const response = await internalSdk.save(
|
||||
table._id!,
|
||||
row,
|
||||
config.getUser()._id
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
table,
|
||||
row: {
|
||||
...row,
|
||||
type: "row",
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
},
|
||||
squashed: {
|
||||
...row,
|
||||
type: "row",
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
},
|
||||
})
|
||||
|
||||
const persistedRow = await config.api.row.get(
|
||||
table._id!,
|
||||
response.row._id!
|
||||
)
|
||||
expect(persistedRow).toEqual({
|
||||
...row,
|
||||
type: "row",
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("auto ids will update when creating new rows", async () => {
|
||||
const table = await config.createTable({
|
||||
...tableData,
|
||||
schema: {
|
||||
...tableData.schema,
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
lastID: 0,
|
||||
},
|
||||
},
|
||||
})
|
||||
const row = makeRow()
|
||||
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const response = await internalSdk.save(
|
||||
table._id!,
|
||||
row,
|
||||
config.getUser()._id
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
table: {
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
id: {
|
||||
...table.schema.id,
|
||||
lastID: 1,
|
||||
},
|
||||
},
|
||||
_rev: expect.stringMatching("2-.*"),
|
||||
},
|
||||
row: {
|
||||
...row,
|
||||
id: 1,
|
||||
type: "row",
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
},
|
||||
squashed: {
|
||||
...row,
|
||||
id: 1,
|
||||
type: "row",
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
},
|
||||
})
|
||||
|
||||
const persistedRow = await config.api.row.get(
|
||||
table._id!,
|
||||
response.row._id!
|
||||
)
|
||||
expect(persistedRow).toEqual({
|
||||
...row,
|
||||
type: "row",
|
||||
id: 1,
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("auto ids will update when creating new rows in parallel", async () => {
|
||||
function makeRows(count: number) {
|
||||
return Array.from({ length: count }, () => makeRow())
|
||||
}
|
||||
|
||||
const table = await config.createTable({
|
||||
...tableData,
|
||||
schema: {
|
||||
...tableData.schema,
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await config.doInContext(config.appId, async () => {
|
||||
for (const row of makeRows(5)) {
|
||||
await internalSdk.save(table._id!, row, config.getUser()._id)
|
||||
}
|
||||
await Promise.all(
|
||||
makeRows(20).map(row =>
|
||||
internalSdk.save(table._id!, row, config.getUser()._id)
|
||||
)
|
||||
)
|
||||
for (const row of makeRows(5)) {
|
||||
await internalSdk.save(table._id!, row, config.getUser()._id)
|
||||
}
|
||||
})
|
||||
|
||||
const persistedRows = await config.getRows(table._id!)
|
||||
expect(persistedRows).toHaveLength(30)
|
||||
expect(persistedRows).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from({ length: 30 }).map((_, i) =>
|
||||
expect.objectContaining({ id: i + 1 })
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
const persistedTable = await config.getTable(table._id)
|
||||
expect(
|
||||
(table.schema.id as AutoColumnFieldMetadata).lastID
|
||||
).toBeUndefined()
|
||||
expect((persistedTable.schema.id as AutoColumnFieldMetadata).lastID).toBe(
|
||||
30
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -6,6 +6,7 @@ import {
|
|||
Table,
|
||||
TableRequest,
|
||||
ViewV2,
|
||||
AutoFieldSubType,
|
||||
} from "@budibase/types"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { buildExternalTableId } from "../../../../integrations/utils"
|
||||
|
@ -29,6 +30,52 @@ import { populateExternalTableSchemas } from "../validation"
|
|||
import datasourceSdk from "../../datasources"
|
||||
import * as viewSdk from "../../views"
|
||||
|
||||
const DEFAULT_PRIMARY_COLUMN = "id"
|
||||
|
||||
function noPrimaryKey(table: Table) {
|
||||
return table.primary == null || table.primary.length === 0
|
||||
}
|
||||
|
||||
function validate(table: Table, oldTable?: Table) {
|
||||
if (
|
||||
!oldTable &&
|
||||
table.schema[DEFAULT_PRIMARY_COLUMN] &&
|
||||
noPrimaryKey(table)
|
||||
) {
|
||||
throw new Error(
|
||||
"External tables with no `primary` column set will define an `id` column, but we found an `id` column in the supplied schema. Either set a `primary` column or remove the `id` column."
|
||||
)
|
||||
}
|
||||
|
||||
if (hasTypeChanged(table, oldTable)) {
|
||||
throw new Error("A column type has changed.")
|
||||
}
|
||||
|
||||
const autoSubTypes = Object.values(AutoFieldSubType)
|
||||
// check for auto columns, they are not allowed
|
||||
for (let [key, column] of Object.entries(table.schema)) {
|
||||
// this column is a special case, do not validate it
|
||||
if (key === DEFAULT_PRIMARY_COLUMN) {
|
||||
continue
|
||||
}
|
||||
// the auto-column type should never be used
|
||||
if (column.type === FieldType.AUTO) {
|
||||
throw new Error(
|
||||
`Column "${key}" has type "${FieldType.AUTO}" - this is not supported.`
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
column.subtype &&
|
||||
autoSubTypes.includes(column.subtype as AutoFieldSubType)
|
||||
) {
|
||||
throw new Error(
|
||||
`Column "${key}" has subtype "${column.subtype}" - this is not supported.`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function save(
|
||||
datasourceId: string,
|
||||
update: Table,
|
||||
|
@ -47,28 +94,18 @@ export async function save(
|
|||
oldTable = await getTable(tableId)
|
||||
}
|
||||
|
||||
if (
|
||||
!oldTable &&
|
||||
(tableToSave.primary == null || tableToSave.primary.length === 0)
|
||||
) {
|
||||
if (tableToSave.schema.id) {
|
||||
throw new Error(
|
||||
"External tables with no `primary` column set will define an `id` column, but we found an `id` column in the supplied schema. Either set a `primary` column or remove the `id` column."
|
||||
)
|
||||
}
|
||||
// this will throw an error if something is wrong
|
||||
validate(tableToSave, oldTable)
|
||||
|
||||
tableToSave.primary = ["id"]
|
||||
tableToSave.schema.id = {
|
||||
if (!oldTable && noPrimaryKey(tableToSave)) {
|
||||
tableToSave.primary = [DEFAULT_PRIMARY_COLUMN]
|
||||
tableToSave.schema[DEFAULT_PRIMARY_COLUMN] = {
|
||||
type: FieldType.NUMBER,
|
||||
autocolumn: true,
|
||||
name: "id",
|
||||
name: DEFAULT_PRIMARY_COLUMN,
|
||||
}
|
||||
}
|
||||
|
||||
if (hasTypeChanged(tableToSave, oldTable)) {
|
||||
throw new Error("A column type has changed.")
|
||||
}
|
||||
|
||||
for (let view in tableToSave.views) {
|
||||
const tableView = tableToSave.views[view]
|
||||
if (!tableView || !viewSdk.isV2(tableView)) continue
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import * as linkRows from "../../db/linkedRows"
|
||||
import { processFormulas, fixAutoColumnSubType } from "./utils"
|
||||
import { context, objectStore, utils } from "@budibase/backend-core"
|
||||
import { objectStore, utils } from "@budibase/backend-core"
|
||||
import { InternalTables } from "../../db/utils"
|
||||
import { TYPE_TRANSFORM_MAP } from "./map"
|
||||
import {
|
||||
|
@ -25,45 +25,6 @@ type AutoColumnProcessingOpts = {
|
|||
noAutoRelationships?: boolean
|
||||
}
|
||||
|
||||
// Returns the next auto ID for a column in a table. On success, the table will
|
||||
// be updated which is why it gets returned. The nextID returned is guaranteed
|
||||
// to be given only to you, and if you don't use it it's gone forever (a gap
|
||||
// will be left in the auto ID sequence).
|
||||
//
|
||||
// This function can throw if it fails to generate an auto ID after so many
|
||||
// attempts.
|
||||
async function getNextAutoId(
|
||||
table: Table,
|
||||
column: string
|
||||
): Promise<{ table: Table; nextID: number }> {
|
||||
const db = context.getAppDB()
|
||||
for (let attempt = 0; attempt < 5; attempt++) {
|
||||
const schema = table.schema[column]
|
||||
if (schema.type !== FieldType.NUMBER && schema.type !== FieldType.AUTO) {
|
||||
throw new Error(`Column ${column} is not an auto column`)
|
||||
}
|
||||
schema.lastID = (schema.lastID || 0) + 1
|
||||
try {
|
||||
const resp = await db.put(table)
|
||||
table._rev = resp.rev
|
||||
return { table, nextID: schema.lastID }
|
||||
} catch (e: any) {
|
||||
if (e.status !== 409) {
|
||||
throw e
|
||||
}
|
||||
// We wait for a random amount of time before retrying. The randomness
|
||||
// makes it less likely for multiple requests modifying this table to
|
||||
// collide.
|
||||
await new Promise(resolve =>
|
||||
setTimeout(resolve, Math.random() * 1.2 ** attempt * 1000)
|
||||
)
|
||||
table = await db.get(table._id)
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error("Failed to generate an auto ID")
|
||||
}
|
||||
|
||||
/**
|
||||
* This will update any auto columns that are found on the row/table with the correct information based on
|
||||
* time now and the current logged in user making the request.
|
||||
|
@ -116,9 +77,10 @@ export async function processAutoColumn(
|
|||
break
|
||||
case AutoFieldSubType.AUTO_ID:
|
||||
if (creating) {
|
||||
const { table: newTable, nextID } = await getNextAutoId(table, key)
|
||||
table = newTable
|
||||
row[key] = nextID
|
||||
schema.lastID = schema.lastID || 0
|
||||
row[key] = schema.lastID + 1
|
||||
schema.lastID++
|
||||
table.schema[key] = schema
|
||||
}
|
||||
break
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
jest.unmock("node-fetch")
|
||||
jest.unmock("aws-sdk")
|
||||
import { TestConfiguration } from "../../../../tests"
|
||||
import { EmailTemplatePurpose } from "../../../../constants"
|
||||
import { objectStore } from "@budibase/backend-core"
|
||||
|
|
Loading…
Reference in New Issue