Merge branch 'master' into ci/clean-unused-scripts
This commit is contained in:
commit
7408dbc199
|
@ -6,6 +6,26 @@ import {
|
||||||
import { ContainerInfo } from "dockerode"
|
import { ContainerInfo } from "dockerode"
|
||||||
import path from "path"
|
import path from "path"
|
||||||
import lockfile from "proper-lockfile"
|
import lockfile from "proper-lockfile"
|
||||||
|
import { execSync } from "child_process"
|
||||||
|
|
||||||
|
interface DockerContext {
|
||||||
|
Name: string
|
||||||
|
Description: string
|
||||||
|
DockerEndpoint: string
|
||||||
|
ContextType: string
|
||||||
|
Error: string
|
||||||
|
}
|
||||||
|
|
||||||
|
function getCurrentDockerContext(): DockerContext {
|
||||||
|
const out = execSync("docker context ls --format json")
|
||||||
|
for (const line of out.toString().split("\n")) {
|
||||||
|
const parsed = JSON.parse(line)
|
||||||
|
if (parsed.Current) {
|
||||||
|
return parsed as DockerContext
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error("No current Docker context")
|
||||||
|
}
|
||||||
|
|
||||||
async function getBudibaseContainers() {
|
async function getBudibaseContainers() {
|
||||||
const client = await getContainerRuntimeClient()
|
const client = await getContainerRuntimeClient()
|
||||||
|
@ -27,6 +47,14 @@ async function killContainers(containers: ContainerInfo[]) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default async function setup() {
|
export default async function setup() {
|
||||||
|
// For whatever reason, testcontainers doesn't always use the correct current
|
||||||
|
// docker context. This bit of code forces the issue by finding the current
|
||||||
|
// context and setting it as the DOCKER_HOST environment
|
||||||
|
if (!process.env.DOCKER_HOST) {
|
||||||
|
const dockerContext = getCurrentDockerContext()
|
||||||
|
process.env.DOCKER_HOST = dockerContext.DockerEndpoint
|
||||||
|
}
|
||||||
|
|
||||||
const lockPath = path.resolve(__dirname, "globalSetup.ts")
|
const lockPath = path.resolve(__dirname, "globalSetup.ts")
|
||||||
// If you run multiple tests at the same time, it's possible for the CouchDB
|
// If you run multiple tests at the same time, it's possible for the CouchDB
|
||||||
// shared container to get started multiple times despite having an
|
// shared container to get started multiple times despite having an
|
||||||
|
|
|
@ -25,7 +25,7 @@ function getTestcontainers(): ContainerInfo[] {
|
||||||
// We use --format json to make sure the output is nice and machine-readable,
|
// We use --format json to make sure the output is nice and machine-readable,
|
||||||
// and we use --no-trunc so that the command returns full container IDs so we
|
// and we use --no-trunc so that the command returns full container IDs so we
|
||||||
// can filter on them correctly.
|
// can filter on them correctly.
|
||||||
return execSync("docker ps --format json --no-trunc")
|
return execSync("docker ps --all --format json --no-trunc")
|
||||||
.toString()
|
.toString()
|
||||||
.split("\n")
|
.split("\n")
|
||||||
.filter(x => x.length > 0)
|
.filter(x => x.length > 0)
|
||||||
|
@ -37,6 +37,10 @@ function getTestcontainers(): ContainerInfo[] {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function removeContainer(container: ContainerInfo) {
|
||||||
|
execSync(`docker rm ${container.ID}`)
|
||||||
|
}
|
||||||
|
|
||||||
export function getContainerByImage(image: string) {
|
export function getContainerByImage(image: string) {
|
||||||
const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
|
const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
|
||||||
if (containers.length > 1) {
|
if (containers.length > 1) {
|
||||||
|
@ -49,6 +53,10 @@ export function getContainerByImage(image: string) {
|
||||||
return containers[0]
|
return containers[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getContainerByName(name: string) {
|
||||||
|
return getTestcontainers().find(x => x.Names === name)
|
||||||
|
}
|
||||||
|
|
||||||
export function getContainerById(id: string) {
|
export function getContainerById(id: string) {
|
||||||
return getTestcontainers().find(x => x.ID === id)
|
return getTestcontainers().find(x => x.ID === id)
|
||||||
}
|
}
|
||||||
|
@ -70,7 +78,34 @@ export function getExposedV4Port(container: ContainerInfo, port: number) {
|
||||||
return getExposedV4Ports(container).find(x => x.container === port)?.host
|
return getExposedV4Ports(container).find(x => x.container === port)?.host
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface DockerContext {
|
||||||
|
Name: string
|
||||||
|
Description: string
|
||||||
|
DockerEndpoint: string
|
||||||
|
ContextType: string
|
||||||
|
Error: string
|
||||||
|
}
|
||||||
|
|
||||||
|
function getCurrentDockerContext(): DockerContext {
|
||||||
|
const out = execSync("docker context ls --format json")
|
||||||
|
for (const line of out.toString().split("\n")) {
|
||||||
|
const parsed = JSON.parse(line)
|
||||||
|
if (parsed.Current) {
|
||||||
|
return parsed as DockerContext
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error("No current Docker context")
|
||||||
|
}
|
||||||
|
|
||||||
export function setupEnv(...envs: any[]) {
|
export function setupEnv(...envs: any[]) {
|
||||||
|
// For whatever reason, testcontainers doesn't always use the correct current
|
||||||
|
// docker context. This bit of code forces the issue by finding the current
|
||||||
|
// context and setting it as the DOCKER_HOST environment
|
||||||
|
if (!process.env.DOCKER_HOST) {
|
||||||
|
const dockerContext = getCurrentDockerContext()
|
||||||
|
process.env.DOCKER_HOST = dockerContext.DockerEndpoint
|
||||||
|
}
|
||||||
|
|
||||||
// We start couchdb in globalSetup.ts, in the root of the monorepo, so it
|
// We start couchdb in globalSetup.ts, in the root of the monorepo, so it
|
||||||
// should be relatively safe to look for it by its image name.
|
// should be relatively safe to look for it by its image name.
|
||||||
const couch = getContainerByImage("budibase/couchdb")
|
const couch = getContainerByImage("budibase/couchdb")
|
||||||
|
@ -116,6 +151,16 @@ export async function startContainer(container: GenericContainer) {
|
||||||
key = imageName.split("@")[0]
|
key = imageName.split("@")[0]
|
||||||
}
|
}
|
||||||
key = key.replace(/\//g, "-").replace(/:/g, "-")
|
key = key.replace(/\//g, "-").replace(/:/g, "-")
|
||||||
|
const name = `${key}_testcontainer`
|
||||||
|
|
||||||
|
// If a container has died it hangs around and future attempts to start a
|
||||||
|
// container with the same name will fail. What we do here is if we find a
|
||||||
|
// matching container and it has exited, we remove it before carrying on. This
|
||||||
|
// removes the need to do this removal manually.
|
||||||
|
const existingContainer = getContainerByName(name)
|
||||||
|
if (existingContainer?.State === "exited") {
|
||||||
|
removeContainer(existingContainer)
|
||||||
|
}
|
||||||
|
|
||||||
container = container
|
container = container
|
||||||
.withReuse()
|
.withReuse()
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 6f38253253ee364aea636add990083ca5cda3bde
|
Subproject commit e2252498ddfade3c2592b1ec78f7bee4e3cf0d2f
|
|
@ -23,13 +23,11 @@ import {
|
||||||
Table,
|
Table,
|
||||||
RowValue,
|
RowValue,
|
||||||
DynamicVariable,
|
DynamicVariable,
|
||||||
QueryJsonRequest,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import sdk from "../../sdk"
|
import sdk from "../../sdk"
|
||||||
import { builderSocket } from "../../websockets"
|
import { builderSocket } from "../../websockets"
|
||||||
import { isEqual } from "lodash"
|
import { isEqual } from "lodash"
|
||||||
import { processTable } from "../../sdk/app/tables/getters"
|
import { processTable } from "../../sdk/app/tables/getters"
|
||||||
import { makeExternalQuery } from "../../integrations/base/query"
|
|
||||||
|
|
||||||
export async function fetch(ctx: UserCtx) {
|
export async function fetch(ctx: UserCtx) {
|
||||||
ctx.body = await sdk.datasources.fetch()
|
ctx.body = await sdk.datasources.fetch()
|
||||||
|
@ -298,16 +296,6 @@ export async function find(ctx: UserCtx) {
|
||||||
ctx.body = await sdk.datasources.removeSecretSingle(datasource)
|
ctx.body = await sdk.datasources.removeSecretSingle(datasource)
|
||||||
}
|
}
|
||||||
|
|
||||||
// dynamic query functionality
|
|
||||||
export async function query(ctx: UserCtx<QueryJsonRequest>) {
|
|
||||||
const queryJson = ctx.request.body
|
|
||||||
try {
|
|
||||||
ctx.body = await makeExternalQuery(queryJson)
|
|
||||||
} catch (err: any) {
|
|
||||||
ctx.throw(400, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getExternalSchema(ctx: UserCtx) {
|
export async function getExternalSchema(ctx: UserCtx) {
|
||||||
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
|
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
|
||||||
const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
|
const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
|
||||||
|
|
|
@ -2,10 +2,7 @@ import Router from "@koa/router"
|
||||||
import * as datasourceController from "../controllers/datasource"
|
import * as datasourceController from "../controllers/datasource"
|
||||||
import authorized from "../../middleware/authorized"
|
import authorized from "../../middleware/authorized"
|
||||||
import { permissions } from "@budibase/backend-core"
|
import { permissions } from "@budibase/backend-core"
|
||||||
import {
|
import { datasourceValidator } from "./utils/validators"
|
||||||
datasourceValidator,
|
|
||||||
datasourceQueryValidator,
|
|
||||||
} from "./utils/validators"
|
|
||||||
|
|
||||||
const router: Router = new Router()
|
const router: Router = new Router()
|
||||||
|
|
||||||
|
@ -41,15 +38,6 @@ router
|
||||||
),
|
),
|
||||||
datasourceController.update
|
datasourceController.update
|
||||||
)
|
)
|
||||||
.post(
|
|
||||||
"/api/datasources/query",
|
|
||||||
authorized(
|
|
||||||
permissions.PermissionType.TABLE,
|
|
||||||
permissions.PermissionLevel.READ
|
|
||||||
),
|
|
||||||
datasourceQueryValidator(),
|
|
||||||
datasourceController.query
|
|
||||||
)
|
|
||||||
.post(
|
.post(
|
||||||
"/api/datasources/:datasourceId/schema",
|
"/api/datasources/:datasourceId/schema",
|
||||||
authorized(permissions.BUILDER),
|
authorized(permissions.BUILDER),
|
||||||
|
|
|
@ -1,10 +1,4 @@
|
||||||
import {
|
import { Datasource, Query, QueryPreview } from "@budibase/types"
|
||||||
Datasource,
|
|
||||||
Operation,
|
|
||||||
Query,
|
|
||||||
QueryPreview,
|
|
||||||
TableSourceType,
|
|
||||||
} from "@budibase/types"
|
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
datasourceDescribe,
|
datasourceDescribe,
|
||||||
|
@ -817,49 +811,6 @@ if (descriptions.length) {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("query through datasource", () => {
|
describe("query through datasource", () => {
|
||||||
it("should be able to query the datasource", async () => {
|
|
||||||
const datasource = await config.api.datasource.create(rawDatasource)
|
|
||||||
|
|
||||||
const entityId = tableName
|
|
||||||
await config.api.datasource.update({
|
|
||||||
...datasource,
|
|
||||||
entities: {
|
|
||||||
[entityId]: {
|
|
||||||
name: entityId,
|
|
||||||
schema: {},
|
|
||||||
type: "table",
|
|
||||||
primary: ["id"],
|
|
||||||
sourceId: datasource._id!,
|
|
||||||
sourceType: TableSourceType.EXTERNAL,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const res = await config.api.datasource.query({
|
|
||||||
endpoint: {
|
|
||||||
datasourceId: datasource._id!,
|
|
||||||
operation: Operation.READ,
|
|
||||||
entityId,
|
|
||||||
},
|
|
||||||
resource: {
|
|
||||||
fields: ["id", "name"],
|
|
||||||
},
|
|
||||||
filters: {
|
|
||||||
string: {
|
|
||||||
name: "two",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
expect(res).toHaveLength(1)
|
|
||||||
expect(res[0]).toEqual({
|
|
||||||
id: 2,
|
|
||||||
name: "two",
|
|
||||||
// the use of table.* introduces the possibility of nulls being returned
|
|
||||||
birthday: null,
|
|
||||||
number: null,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// this parameter really only impacts SQL queries
|
// this parameter really only impacts SQL queries
|
||||||
describe("confirm nullDefaultSupport", () => {
|
describe("confirm nullDefaultSupport", () => {
|
||||||
let queryParams: Partial<Query>
|
let queryParams: Partial<Query>
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import { auth, permissions } from "@budibase/backend-core"
|
import { auth, permissions } from "@budibase/backend-core"
|
||||||
import { DataSourceOperation } from "../../../constants"
|
|
||||||
import {
|
import {
|
||||||
AutomationActionStepId,
|
AutomationActionStepId,
|
||||||
AutomationStep,
|
AutomationStep,
|
||||||
|
@ -231,30 +230,6 @@ export function externalSearchValidator() {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function datasourceQueryValidator() {
|
|
||||||
return auth.joiValidator.body(
|
|
||||||
Joi.object({
|
|
||||||
endpoint: Joi.object({
|
|
||||||
datasourceId: Joi.string().required(),
|
|
||||||
operation: Joi.string()
|
|
||||||
.required()
|
|
||||||
.valid(...Object.values(DataSourceOperation)),
|
|
||||||
entityId: Joi.string().required(),
|
|
||||||
}).required(),
|
|
||||||
resource: Joi.object({
|
|
||||||
fields: Joi.array().items(Joi.string()).optional(),
|
|
||||||
}).optional(),
|
|
||||||
body: Joi.object().optional(),
|
|
||||||
sort: Joi.object().optional(),
|
|
||||||
filters: filterObject().optional(),
|
|
||||||
paginate: Joi.object({
|
|
||||||
page: Joi.string().alphanum().optional(),
|
|
||||||
limit: Joi.number().optional(),
|
|
||||||
}).optional(),
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function webhookValidator() {
|
export function webhookValidator() {
|
||||||
return auth.joiValidator.body(
|
return auth.joiValidator.body(
|
||||||
Joi.object({
|
Joi.object({
|
||||||
|
|
|
@ -45,17 +45,6 @@ export enum AuthTypes {
|
||||||
EXTERNAL = "external",
|
EXTERNAL = "external",
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum DataSourceOperation {
|
|
||||||
CREATE = "CREATE",
|
|
||||||
READ = "READ",
|
|
||||||
UPDATE = "UPDATE",
|
|
||||||
DELETE = "DELETE",
|
|
||||||
BULK_CREATE = "BULK_CREATE",
|
|
||||||
CREATE_TABLE = "CREATE_TABLE",
|
|
||||||
UPDATE_TABLE = "UPDATE_TABLE",
|
|
||||||
DELETE_TABLE = "DELETE_TABLE",
|
|
||||||
}
|
|
||||||
|
|
||||||
export enum DatasourceAuthTypes {
|
export enum DatasourceAuthTypes {
|
||||||
GOOGLE = "google",
|
GOOGLE = "google",
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ import {
|
||||||
Datasource,
|
Datasource,
|
||||||
FetchDatasourceInfoResponse,
|
FetchDatasourceInfoResponse,
|
||||||
FieldType,
|
FieldType,
|
||||||
QueryJsonRequest,
|
|
||||||
RelationshipType,
|
RelationshipType,
|
||||||
UpdateDatasourceRequest,
|
UpdateDatasourceRequest,
|
||||||
UpdateDatasourceResponse,
|
UpdateDatasourceResponse,
|
||||||
|
@ -69,13 +68,6 @@ export class DatasourceAPI extends TestAPI {
|
||||||
return await this._get<Datasource[]>(`/api/datasources`, { expectations })
|
return await this._get<Datasource[]>(`/api/datasources`, { expectations })
|
||||||
}
|
}
|
||||||
|
|
||||||
query = async (query: QueryJsonRequest, expectations?: Expectations) => {
|
|
||||||
return await this._post<any>(`/api/datasources/query`, {
|
|
||||||
body: query,
|
|
||||||
expectations,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fetchSchema = async (
|
fetchSchema = async (
|
||||||
{
|
{
|
||||||
datasourceId,
|
datasourceId,
|
||||||
|
|
|
@ -190,10 +190,6 @@ export interface EnrichedQueryJson extends QueryJson {
|
||||||
datasource?: Datasource
|
datasource?: Datasource
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface QueryJsonRequest extends Omit<QueryJson, "endpoint"> {
|
|
||||||
endpoint: QueryJson["endpoint"] & { datasourceId: string; entityId: string }
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface QueryOptions {
|
export interface QueryOptions {
|
||||||
disableReturning?: boolean
|
disableReturning?: boolean
|
||||||
disableBindings?: boolean
|
disableBindings?: boolean
|
||||||
|
|
Loading…
Reference in New Issue