Rebase onto chore/sqs-always-on
This commit is contained in:
parent
06a0ad7910
commit
d1e83ce4f7
|
@ -1 +1 @@
|
|||
Subproject commit f01e9e7bb7276bace6a4ae2813d7b2d4c2f9a376
|
||||
Subproject commit 2ab8536b6005576684810d774f1ac22239218546
|
|
@ -1,10 +1,4 @@
|
|||
import * as setup from "./utilities"
|
||||
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
knexClient,
|
||||
} from "../../../integrations/tests/utils"
|
||||
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||
|
||||
import tk from "timekeeper"
|
||||
import emitter from "../../../../src/events"
|
||||
|
@ -47,6 +41,7 @@ import { Knex } from "knex"
|
|||
import { InternalTables } from "../../../db/utils"
|
||||
import { withEnv } from "../../../environment"
|
||||
import { JsTimeoutError } from "@budibase/string-templates"
|
||||
import { structures } from "./utilities"
|
||||
|
||||
jest.mock("@budibase/pro", () => ({
|
||||
...jest.requireActual("@budibase/pro"),
|
||||
|
@ -85,37 +80,18 @@ async function waitForEvent(
|
|||
return await p
|
||||
}
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
||||
])("/rows (%s)", (providerType, dsProvider) => {
|
||||
const isInternal = dsProvider === undefined
|
||||
const isMSSQL = providerType === DatabaseName.SQL_SERVER
|
||||
const isOracle = providerType === DatabaseName.ORACLE
|
||||
const config = setup.getConfig()
|
||||
|
||||
let table: Table
|
||||
datasourceDescribe(
|
||||
{ name: "/rows (%s)" },
|
||||
({ config, dsProvider, isInternal, isMSSQL, isOracle }) => {
|
||||
let datasource: Datasource | undefined
|
||||
let client: Knex | undefined
|
||||
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
||||
if (dsProvider) {
|
||||
const rawDatasource = await dsProvider
|
||||
datasource = await config.createDatasource({
|
||||
datasource: rawDatasource,
|
||||
})
|
||||
client = await knexClient(rawDatasource)
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
setup.afterAll()
|
||||
const ds = await dsProvider
|
||||
datasource = ds.datasource
|
||||
client = ds.client
|
||||
})
|
||||
|
||||
function saveTableRequest(
|
||||
|
@ -191,7 +167,10 @@ describe.each([
|
|||
|
||||
const getRowUsage = async () => {
|
||||
const { total } = await config.doInContext(undefined, () =>
|
||||
quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
|
||||
quotas.getCurrentUsageValues(
|
||||
QuotaUsageType.STATIC,
|
||||
StaticQuotaName.ROWS
|
||||
)
|
||||
)
|
||||
return total
|
||||
}
|
||||
|
@ -1344,7 +1323,7 @@ describe.each([
|
|||
!isInternal &&
|
||||
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
||||
// to identity columns. This is not something Budibase does currently.
|
||||
providerType !== DatabaseName.SQL_SERVER &&
|
||||
!isMSSQL &&
|
||||
it("should support updating fields that are part of a composite key", async () => {
|
||||
const tableRequest = saveTableRequest({
|
||||
primary: ["number", "string"],
|
||||
|
@ -1375,7 +1354,9 @@ describe.each([
|
|||
number: naturalValue,
|
||||
})
|
||||
|
||||
expect(existing._id).toEqual(`%5B${naturalValue}%2C'${stringValue}'%5D`)
|
||||
expect(existing._id).toEqual(
|
||||
`%5B${naturalValue}%2C'${stringValue}'%5D`
|
||||
)
|
||||
|
||||
const row = await config.api.row.patch(table._id!, {
|
||||
_id: existing._id!,
|
||||
|
@ -2235,7 +2216,7 @@ describe.each([
|
|||
|
||||
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
||||
// to identity columns. This is not something Budibase does currently.
|
||||
providerType !== DatabaseName.SQL_SERVER &&
|
||||
!isMSSQL &&
|
||||
it("should handle filtering by composite primary keys", async () => {
|
||||
const tableRequest = saveTableRequest({
|
||||
primary: ["number", "string"],
|
||||
|
@ -2273,13 +2254,13 @@ describe.each([
|
|||
let expectedRowData: Row
|
||||
|
||||
beforeAll(async () => {
|
||||
const fullSchema = setup.structures.fullSchemaWithoutLinks({
|
||||
const fullSchema = structures.fullSchemaWithoutLinks({
|
||||
allRequired: true,
|
||||
})
|
||||
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
...setup.structures.basicTable(),
|
||||
...structures.basicTable(),
|
||||
schema: fullSchema,
|
||||
primary: ["string"],
|
||||
})
|
||||
|
@ -2294,15 +2275,15 @@ describe.each([
|
|||
[FieldType.NUMBER]: generator.natural(),
|
||||
[FieldType.BOOLEAN]: generator.bool(),
|
||||
[FieldType.DATETIME]: generator.date().toISOString(),
|
||||
[FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()],
|
||||
[FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(),
|
||||
[FieldType.ATTACHMENTS]: [structures.basicAttachment()],
|
||||
[FieldType.ATTACHMENT_SINGLE]: structures.basicAttachment(),
|
||||
[FieldType.FORMULA]: undefined, // generated field
|
||||
[FieldType.AUTO]: undefined, // generated field
|
||||
[FieldType.AI]: "LLM Output",
|
||||
[FieldType.JSON]: { name: generator.guid() },
|
||||
[FieldType.INTERNAL]: generator.guid(),
|
||||
[FieldType.BARCODEQR]: generator.guid(),
|
||||
[FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(),
|
||||
[FieldType.SIGNATURE_SINGLE]: structures.basicAttachment(),
|
||||
[FieldType.BIGINT]: generator.integer().toString(),
|
||||
[FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }],
|
||||
[FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id },
|
||||
|
@ -2362,10 +2343,16 @@ describe.each([
|
|||
const stringified = (value: string) =>
|
||||
JSON.stringify(value).replace(/"/g, "'")
|
||||
|
||||
const matchingObject = (key: string, value: any, isArray: boolean) => {
|
||||
const matchingObject = (
|
||||
key: string,
|
||||
value: any,
|
||||
isArray: boolean
|
||||
) => {
|
||||
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
|
||||
if (isArray) {
|
||||
return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`))
|
||||
return expect.stringMatching(
|
||||
new RegExp(`^\\[${objectMatcher}\\]$`)
|
||||
)
|
||||
}
|
||||
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
|
||||
}
|
||||
|
@ -2638,7 +2625,9 @@ describe.each([
|
|||
name: "foo",
|
||||
description: "bar",
|
||||
tableId,
|
||||
users: expect.arrayContaining(selectedUsers.map(u => resultMapper(u))),
|
||||
users: expect.arrayContaining(
|
||||
selectedUsers.map(u => resultMapper(u))
|
||||
),
|
||||
_id: expect.any(String),
|
||||
_rev: expect.any(String),
|
||||
id: isInternal ? undefined : expect.any(Number),
|
||||
|
@ -2683,7 +2672,9 @@ describe.each([
|
|||
description: "bar",
|
||||
tableId,
|
||||
user: expect.arrayContaining([user1].map(u => resultMapper(u))),
|
||||
users: expect.arrayContaining([user2, user3].map(u => resultMapper(u))),
|
||||
users: expect.arrayContaining(
|
||||
[user2, user3].map(u => resultMapper(u))
|
||||
),
|
||||
_id: row._id,
|
||||
_rev: expect.any(String),
|
||||
id: isInternal ? undefined : expect.any(Number),
|
||||
|
@ -3473,6 +3464,5 @@ describe.each([
|
|||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// todo: remove me
|
||||
}
|
||||
)
|
||||
|
|
|
@ -9,8 +9,10 @@ import * as oracle from "./oracle"
|
|||
import { GenericContainer, StartedTestContainer } from "testcontainers"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import cloneDeep from "lodash/cloneDeep"
|
||||
import { Knex } from "knex"
|
||||
import TestConfiguration from "src/tests/utilities/TestConfiguration"
|
||||
|
||||
export type DatasourceProvider = () => Promise<Datasource>
|
||||
export type DatasourceProvider = () => Promise<Datasource | undefined>
|
||||
|
||||
export enum DatabaseName {
|
||||
POSTGRES = "postgres",
|
||||
|
@ -19,6 +21,7 @@ export enum DatabaseName {
|
|||
SQL_SERVER = "mssql",
|
||||
MARIADB = "mariadb",
|
||||
ORACLE = "oracle",
|
||||
SQS = "sqs",
|
||||
}
|
||||
|
||||
const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||
|
@ -28,30 +31,113 @@ const providers: Record<DatabaseName, DatasourceProvider> = {
|
|||
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
||||
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
||||
[DatabaseName.ORACLE]: oracle.getDatasource,
|
||||
[DatabaseName.SQS]: async () => undefined,
|
||||
}
|
||||
|
||||
export function getDatasourceProviders(
|
||||
...sourceNames: DatabaseName[]
|
||||
): Promise<Datasource>[] {
|
||||
return sourceNames.map(sourceName => providers[sourceName]())
|
||||
export interface DatasourceDescribeOpts {
|
||||
name: string
|
||||
only?: DatabaseName[]
|
||||
exclude?: DatabaseName[]
|
||||
}
|
||||
|
||||
export function getDatasourceProvider(
|
||||
export interface DatasourceDescribeReturnPromise {
|
||||
rawDatasource: Datasource | undefined
|
||||
datasource: Datasource | undefined
|
||||
client: Knex | undefined
|
||||
}
|
||||
|
||||
export interface DatasourceDescribeReturn {
|
||||
name: DatabaseName
|
||||
config: TestConfiguration
|
||||
dsProvider: Promise<DatasourceDescribeReturnPromise>
|
||||
isInternal: boolean
|
||||
isExternal: boolean
|
||||
isMySQL: boolean
|
||||
isPostgres: boolean
|
||||
isMongodb: boolean
|
||||
isMSSQL: boolean
|
||||
isOracle: boolean
|
||||
}
|
||||
|
||||
async function createDatasources(
|
||||
config: TestConfiguration,
|
||||
name: DatabaseName
|
||||
): Promise<DatasourceDescribeReturnPromise> {
|
||||
const rawDatasource = await getDatasource(name)
|
||||
|
||||
let datasource: Datasource | undefined
|
||||
if (rawDatasource) {
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
}
|
||||
|
||||
let client: Knex | undefined
|
||||
if (rawDatasource) {
|
||||
try {
|
||||
client = await knexClient(rawDatasource)
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
rawDatasource,
|
||||
datasource,
|
||||
client,
|
||||
}
|
||||
}
|
||||
|
||||
export function datasourceDescribe(
|
||||
opts: DatasourceDescribeOpts,
|
||||
cb: (args: DatasourceDescribeReturn) => void
|
||||
) {
|
||||
const { name, only, exclude } = opts
|
||||
|
||||
if (only && exclude) {
|
||||
throw new Error("you can only supply one of 'only' or 'exclude'")
|
||||
}
|
||||
|
||||
let databases = Object.values(DatabaseName)
|
||||
if (only) {
|
||||
databases = only
|
||||
} else if (exclude) {
|
||||
databases = databases.filter(db => !exclude.includes(db))
|
||||
}
|
||||
|
||||
const config = new TestConfiguration()
|
||||
const prepped = databases.map(name => {
|
||||
return {
|
||||
name,
|
||||
config,
|
||||
dsProvider: createDatasources(config, name),
|
||||
isInternal: name === DatabaseName.SQS,
|
||||
isExternal: name !== DatabaseName.SQS,
|
||||
isMySQL: name === DatabaseName.MYSQL,
|
||||
isPostgres: name === DatabaseName.POSTGRES,
|
||||
isMongodb: name === DatabaseName.MONGODB,
|
||||
isMSSQL: name === DatabaseName.SQL_SERVER,
|
||||
isOracle: name === DatabaseName.ORACLE,
|
||||
}
|
||||
})
|
||||
|
||||
describe.each(prepped)(name, args => {
|
||||
beforeAll(async () => {
|
||||
await args.config.init()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
args.config.end()
|
||||
})
|
||||
|
||||
cb(args)
|
||||
})
|
||||
}
|
||||
|
||||
export function getDatasource(
|
||||
sourceName: DatabaseName
|
||||
): DatasourceProvider {
|
||||
return providers[sourceName]
|
||||
}
|
||||
|
||||
export function getDatasource(sourceName: DatabaseName): Promise<Datasource> {
|
||||
): Promise<Datasource | undefined> {
|
||||
return providers[sourceName]()
|
||||
}
|
||||
|
||||
export async function getDatasources(
|
||||
...sourceNames: DatabaseName[]
|
||||
): Promise<Datasource[]> {
|
||||
return Promise.all(sourceNames.map(sourceName => providers[sourceName]()))
|
||||
}
|
||||
|
||||
export async function knexClient(ds: Datasource) {
|
||||
switch (ds.source) {
|
||||
case SourceName.POSTGRES: {
|
||||
|
|
Loading…
Reference in New Issue