Integrate Elasticsearch as a test datasource.
This commit is contained in:
parent
f484673667
commit
1c23509839
|
@ -155,7 +155,18 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
datasource:
|
||||
[mssql, mysql, postgres, postgres_legacy, mongodb, mariadb, oracle, sqs, none]
|
||||
[
|
||||
mssql,
|
||||
mysql,
|
||||
postgres,
|
||||
postgres_legacy,
|
||||
mongodb,
|
||||
mariadb,
|
||||
oracle,
|
||||
sqs,
|
||||
elasticsearch,
|
||||
none,
|
||||
]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -192,6 +203,8 @@ jobs:
|
|||
docker pull budibase/oracle-database:23.2-slim-faststart
|
||||
elif [ "${{ matrix.datasource }}" == "postgres_legacy" ]; then
|
||||
docker pull postgres:9.5.25
|
||||
elif [ "${{ matrix.datasource }}" == "elasticsearch" ]; then
|
||||
docker pull elasticsearch@${{ steps.dotenv.outputs.ELASTICSEARCH_SHA }}
|
||||
fi
|
||||
docker pull minio/minio &
|
||||
docker pull redis &
|
||||
|
|
|
@ -165,7 +165,8 @@ describe("/datasources", () => {
|
|||
})
|
||||
|
||||
const descriptions = datasourceDescribe({
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||
plus: true,
|
||||
exclude: [DatabaseName.SQS],
|
||||
})
|
||||
|
||||
if (descriptions.length) {
|
||||
|
@ -590,7 +591,8 @@ if (descriptions.length) {
|
|||
}
|
||||
|
||||
const datasources = datasourceDescribe({
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS, DatabaseName.ORACLE],
|
||||
plus: true,
|
||||
exclude: [DatabaseName.SQS, DatabaseName.ORACLE],
|
||||
})
|
||||
|
||||
if (datasources.length) {
|
||||
|
|
|
@ -9,7 +9,8 @@ import { Knex } from "knex"
|
|||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
const descriptions = datasourceDescribe({
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||
plus: true,
|
||||
exclude: [DatabaseName.SQS],
|
||||
})
|
||||
|
||||
if (descriptions.length) {
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
import * as setup from "./utilities"
|
||||
|
||||
import {
|
||||
DatabaseName,
|
||||
datasourceDescribe,
|
||||
} from "../../../integrations/tests/utils"
|
||||
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||
|
||||
import tk from "timekeeper"
|
||||
import emitter from "../../../../src/events"
|
||||
|
@ -80,7 +77,7 @@ function encodeJS(binding: string) {
|
|||
return `{{ js "${Buffer.from(binding).toString("base64")}"}}`
|
||||
}
|
||||
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
const descriptions = datasourceDescribe({ plus: true })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||
import {
|
||||
DatabaseName,
|
||||
datasourceDescribe,
|
||||
} from "../../../integrations/tests/utils"
|
||||
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||
import {
|
||||
context,
|
||||
db as dbCore,
|
||||
|
@ -60,7 +57,7 @@ jest.mock("@budibase/pro", () => ({
|
|||
},
|
||||
}))
|
||||
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
const descriptions = datasourceDescribe({ plus: true })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
|
|
|
@ -28,17 +28,14 @@ import * as setup from "./utilities"
|
|||
import * as uuid from "uuid"
|
||||
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import {
|
||||
DatabaseName,
|
||||
datasourceDescribe,
|
||||
} from "../../../integrations/tests/utils"
|
||||
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||
import timekeeper from "timekeeper"
|
||||
|
||||
const { basicTable } = setup.structures
|
||||
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
||||
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
const descriptions = datasourceDescribe({ plus: true })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
|
|
|
@ -37,17 +37,14 @@ import {
|
|||
ViewV2Type,
|
||||
} from "@budibase/types"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import {
|
||||
DatabaseName,
|
||||
datasourceDescribe,
|
||||
} from "../../../integrations/tests/utils"
|
||||
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||
import merge from "lodash/merge"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { context, db, events, roles, setEnv } from "@budibase/backend-core"
|
||||
import { mockChatGPTResponse } from "../../../tests/utilities/mocks/openai"
|
||||
import nock from "nock"
|
||||
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
const descriptions = datasourceDescribe({ plus: true })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
|
|
|
@ -9,7 +9,8 @@ import { generator } from "@budibase/backend-core/tests"
|
|||
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
|
||||
|
||||
const descriptions = datasourceDescribe({
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||
plus: true,
|
||||
exclude: [DatabaseName.SQS],
|
||||
})
|
||||
|
||||
if (descriptions.length) {
|
||||
|
|
|
@ -2,75 +2,80 @@ import { Datasource } from "@budibase/types"
|
|||
import { ElasticsearchConfig, ElasticSearchIntegration } from "../elasticsearch"
|
||||
import * as elasticsearch from "../tests/utils/elasticsearch"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { DatabaseName, datasourceDescribe } from "./utils"
|
||||
|
||||
describe("Elasticsearch Integration", () => {
|
||||
let datasource: Datasource
|
||||
let integration: ElasticSearchIntegration
|
||||
const describes = datasourceDescribe({ only: [DatabaseName.ELASTICSEARCH] })
|
||||
|
||||
let index: string
|
||||
if (describes.length) {
|
||||
describe.each(describes)("Elasticsearch Integration", () => {
|
||||
let datasource: Datasource
|
||||
let integration: ElasticSearchIntegration
|
||||
|
||||
beforeAll(async () => {
|
||||
datasource = await elasticsearch.getDatasource()
|
||||
let index: string
|
||||
|
||||
beforeAll(async () => {
|
||||
datasource = await elasticsearch.getDatasource()
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
index = generator.guid()
|
||||
integration = new ElasticSearchIntegration(
|
||||
datasource.config! as ElasticsearchConfig
|
||||
)
|
||||
})
|
||||
|
||||
it("can create a record", async () => {
|
||||
await integration.create({
|
||||
index,
|
||||
json: { name: "Hello" },
|
||||
extra: { refresh: "true" },
|
||||
})
|
||||
const records = await integration.read({
|
||||
index,
|
||||
json: { query: { match_all: {} } },
|
||||
})
|
||||
expect(records).toEqual([{ name: "Hello" }])
|
||||
})
|
||||
|
||||
it("can update a record", async () => {
|
||||
const create = await integration.create({
|
||||
index,
|
||||
json: { name: "Hello" },
|
||||
extra: { refresh: "true" },
|
||||
})
|
||||
|
||||
await integration.update({
|
||||
id: create._id,
|
||||
index,
|
||||
json: { doc: { name: "World" } },
|
||||
extra: { refresh: "true" },
|
||||
})
|
||||
|
||||
const records = await integration.read({
|
||||
index,
|
||||
json: { query: { match_all: {} } },
|
||||
})
|
||||
expect(records).toEqual([{ name: "World" }])
|
||||
})
|
||||
|
||||
it("can delete a record", async () => {
|
||||
const create = await integration.create({
|
||||
index,
|
||||
json: { name: "Hello" },
|
||||
extra: { refresh: "true" },
|
||||
})
|
||||
|
||||
await integration.delete({
|
||||
id: create._id,
|
||||
index,
|
||||
extra: { refresh: "true" },
|
||||
})
|
||||
|
||||
const records = await integration.read({
|
||||
index,
|
||||
json: { query: { match_all: {} } },
|
||||
})
|
||||
expect(records).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
index = generator.guid()
|
||||
integration = new ElasticSearchIntegration(
|
||||
datasource.config! as ElasticsearchConfig
|
||||
)
|
||||
})
|
||||
|
||||
it("can create a record", async () => {
|
||||
await integration.create({
|
||||
index,
|
||||
json: { name: "Hello" },
|
||||
extra: { refresh: "true" },
|
||||
})
|
||||
const records = await integration.read({
|
||||
index,
|
||||
json: { query: { match_all: {} } },
|
||||
})
|
||||
expect(records).toEqual([{ name: "Hello" }])
|
||||
})
|
||||
|
||||
it("can update a record", async () => {
|
||||
const create = await integration.create({
|
||||
index,
|
||||
json: { name: "Hello" },
|
||||
extra: { refresh: "true" },
|
||||
})
|
||||
|
||||
await integration.update({
|
||||
id: create._id,
|
||||
index,
|
||||
json: { doc: { name: "World" } },
|
||||
extra: { refresh: "true" },
|
||||
})
|
||||
|
||||
const records = await integration.read({
|
||||
index,
|
||||
json: { query: { match_all: {} } },
|
||||
})
|
||||
expect(records).toEqual([{ name: "World" }])
|
||||
})
|
||||
|
||||
it("can delete a record", async () => {
|
||||
const create = await integration.create({
|
||||
index,
|
||||
json: { name: "Hello" },
|
||||
extra: { refresh: "true" },
|
||||
})
|
||||
|
||||
await integration.delete({
|
||||
id: create._id,
|
||||
index,
|
||||
extra: { refresh: "true" },
|
||||
})
|
||||
|
||||
const records = await integration.read({
|
||||
index,
|
||||
json: { query: { match_all: {} } },
|
||||
})
|
||||
expect(records).toEqual([])
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import * as mysql from "./mysql"
|
|||
import * as mssql from "./mssql"
|
||||
import * as mariadb from "./mariadb"
|
||||
import * as oracle from "./oracle"
|
||||
import * as elasticsearch from "./elasticsearch"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { Knex } from "knex"
|
||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||
|
@ -23,22 +24,32 @@ export enum DatabaseName {
|
|||
MARIADB = "mariadb",
|
||||
ORACLE = "oracle",
|
||||
SQS = "sqs",
|
||||
ELASTICSEARCH = "elasticsearch",
|
||||
}
|
||||
|
||||
const DATASOURCE_PLUS = [
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.POSTGRES_LEGACY,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.MARIADB,
|
||||
DatabaseName.ORACLE,
|
||||
DatabaseName.SQS,
|
||||
]
|
||||
|
||||
const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||
// datasource_plus entries
|
||||
[DatabaseName.POSTGRES]: postgres.getDatasource,
|
||||
[DatabaseName.POSTGRES_LEGACY]: postgres.getLegacyDatasource,
|
||||
[DatabaseName.MONGODB]: mongodb.getDatasource,
|
||||
[DatabaseName.MYSQL]: mysql.getDatasource,
|
||||
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
||||
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
||||
[DatabaseName.ORACLE]: oracle.getDatasource,
|
||||
[DatabaseName.SQS]: async () => undefined,
|
||||
}
|
||||
|
||||
export interface DatasourceDescribeOpts {
|
||||
only?: DatabaseName[]
|
||||
exclude?: DatabaseName[]
|
||||
// rest
|
||||
[DatabaseName.ELASTICSEARCH]: elasticsearch.getDatasource,
|
||||
[DatabaseName.MONGODB]: mongodb.getDatasource,
|
||||
}
|
||||
|
||||
export interface DatasourceDescribeReturnPromise {
|
||||
|
@ -103,6 +114,20 @@ function createDummyTest() {
|
|||
})
|
||||
}
|
||||
|
||||
interface OnlyOpts {
|
||||
only: DatabaseName[]
|
||||
}
|
||||
|
||||
interface PlusOpts {
|
||||
plus: true
|
||||
exclude?: DatabaseName[]
|
||||
}
|
||||
|
||||
export type DatasourceDescribeOpts = OnlyOpts | PlusOpts
|
||||
|
||||
// If you ever want to rename this function, be mindful that you will also need
|
||||
// to modify src/tests/filters/index.js to make sure that we're correctly
|
||||
// filtering datasource/non-datasource tests in CI.
|
||||
export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
||||
// tests that call this need a lot longer timeouts
|
||||
jest.setTimeout(120000)
|
||||
|
@ -111,17 +136,15 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
|||
createDummyTest()
|
||||
}
|
||||
|
||||
const { only, exclude } = opts
|
||||
|
||||
if (only && exclude) {
|
||||
throw new Error("you can only supply one of 'only' or 'exclude'")
|
||||
}
|
||||
|
||||
let databases = Object.values(DatabaseName)
|
||||
if (only) {
|
||||
databases = only
|
||||
} else if (exclude) {
|
||||
databases = databases.filter(db => !exclude.includes(db))
|
||||
let databases: DatabaseName[] = []
|
||||
if ("only" in opts) {
|
||||
databases = opts.only
|
||||
} else if ("plus" in opts) {
|
||||
databases = Object.values(DatabaseName)
|
||||
.filter(db => DATASOURCE_PLUS.includes(db))
|
||||
.filter(db => !opts.exclude?.includes(db))
|
||||
} else {
|
||||
throw new Error("invalid options")
|
||||
}
|
||||
|
||||
if (process.env.DATASOURCE) {
|
||||
|
@ -156,6 +179,7 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
|||
isMSSQL: dbName === DatabaseName.SQL_SERVER,
|
||||
isOracle: dbName === DatabaseName.ORACLE,
|
||||
isMariaDB: dbName === DatabaseName.MARIADB,
|
||||
isElasticsearch: dbName === DatabaseName.ELASTICSEARCH,
|
||||
}))
|
||||
}
|
||||
|
||||
|
|
|
@ -10,16 +10,13 @@ import {
|
|||
import { search } from "../../../../../sdk/app/rows/search"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
import {
|
||||
DatabaseName,
|
||||
datasourceDescribe,
|
||||
} from "../../../../../integrations/tests/utils"
|
||||
import { datasourceDescribe } from "../../../../../integrations/tests/utils"
|
||||
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
||||
|
||||
// These test cases are only for things that cannot be tested through the API
|
||||
// (e.g. limiting searches to returning specific fields). If it's possible to
|
||||
// test through the API, it should be done there instead.
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
const descriptions = datasourceDescribe({ plus: true })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
|
|
Loading…
Reference in New Issue