Merge pull request #9368 from Budibase/test/9339-sqlpostgres-row-api-test-suite
Test - #9339 sqlpostgres row api test suite
This commit is contained in:
commit
a353848152
|
@ -1,4 +1,7 @@
|
|||
function getTestContainerSettings(serverName: string, key: string) {
|
||||
function getTestContainerSettings(
|
||||
serverName: string,
|
||||
key: string
|
||||
): string | null {
|
||||
const entry = Object.entries(global).find(
|
||||
([k]) =>
|
||||
k.includes(`_${serverName.toUpperCase()}`) &&
|
||||
|
@ -10,20 +13,25 @@ function getTestContainerSettings(serverName: string, key: string) {
|
|||
return entry[1]
|
||||
}
|
||||
|
||||
function getCouchConfig() {
|
||||
const port = getTestContainerSettings("COUCHDB-SERVICE", "PORT_5984")
|
||||
function getContainerInfo(containerName: string, port: number) {
|
||||
const assignedPort = getTestContainerSettings(
|
||||
containerName.toUpperCase(),
|
||||
`PORT_${port}`
|
||||
)
|
||||
const host = getTestContainerSettings(containerName.toUpperCase(), "IP")
|
||||
return {
|
||||
port,
|
||||
url: `http://${getTestContainerSettings("COUCHDB-SERVICE", "IP")}:${port}`,
|
||||
port: assignedPort,
|
||||
host,
|
||||
url: host && assignedPort && `http://${host}:${assignedPort}`,
|
||||
}
|
||||
}
|
||||
|
||||
function getCouchConfig() {
|
||||
return getContainerInfo("couchdb-service", 5984)
|
||||
}
|
||||
|
||||
function getMinioConfig() {
|
||||
const port = getTestContainerSettings("MINIO-SERVICE", "PORT_9000")
|
||||
return {
|
||||
port,
|
||||
url: `http://${getTestContainerSettings("MINIO-SERVICE", "IP")}:${port}`,
|
||||
}
|
||||
return getContainerInfo("minio-service", 9000)
|
||||
}
|
||||
|
||||
export function setupEnv(...envs: any[]) {
|
||||
|
@ -34,7 +42,7 @@ export function setupEnv(...envs: any[]) {
|
|||
{ key: "MINIO_URL", value: getMinioConfig().url },
|
||||
]
|
||||
|
||||
for (const config of configs.filter(x => x.value !== null)) {
|
||||
for (const config of configs.filter(x => !!x.value)) {
|
||||
for (const env of envs) {
|
||||
env._set(config.key, config.value)
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
FieldSchema,
|
||||
Row,
|
||||
Table,
|
||||
RelationshipTypes,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
breakRowIdField,
|
||||
|
@ -18,7 +19,7 @@ import {
|
|||
convertRowId,
|
||||
} from "../../../integrations/utils"
|
||||
import { getDatasourceAndQuery } from "./utils"
|
||||
import { FieldTypes, RelationshipTypes } from "../../../constants"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
import { breakExternalTableId, isSQL } from "../../../integrations/utils"
|
||||
import { processObjectSync } from "@budibase/string-templates"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
|
@ -44,6 +45,7 @@ export interface RunConfig {
|
|||
row?: Row
|
||||
rows?: Row[]
|
||||
tables?: Record<string, Table>
|
||||
includeSqlRelationships?: IncludeRelationship
|
||||
}
|
||||
|
||||
function buildFilters(
|
||||
|
@ -706,7 +708,9 @@ export class ExternalRequest {
|
|||
},
|
||||
resource: {
|
||||
// have to specify the fields to avoid column overlap (for SQL)
|
||||
fields: isSql ? this.buildFields(table) : [],
|
||||
fields: isSql
|
||||
? this.buildFields(table, config.includeSqlRelationships)
|
||||
: [],
|
||||
},
|
||||
filters,
|
||||
sort,
|
||||
|
|
|
@ -18,6 +18,7 @@ import {
|
|||
PaginationJson,
|
||||
Table,
|
||||
Datasource,
|
||||
IncludeRelationship,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
|
@ -57,6 +58,7 @@ export async function patch(ctx: BBContext) {
|
|||
return handleRequest(Operation.UPDATE, tableId, {
|
||||
id: breakRowIdField(id),
|
||||
row: inputs,
|
||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -65,6 +67,7 @@ export async function save(ctx: BBContext) {
|
|||
const tableId = ctx.params.tableId
|
||||
return handleRequest(Operation.CREATE, tableId, {
|
||||
row: inputs,
|
||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -78,7 +81,9 @@ export async function fetchView(ctx: BBContext) {
|
|||
|
||||
export async function fetch(ctx: BBContext) {
|
||||
const tableId = ctx.params.tableId
|
||||
return handleRequest(Operation.READ, tableId)
|
||||
return handleRequest(Operation.READ, tableId, {
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
}
|
||||
|
||||
export async function find(ctx: BBContext) {
|
||||
|
@ -86,6 +91,7 @@ export async function find(ctx: BBContext) {
|
|||
const tableId = ctx.params.tableId
|
||||
const response = (await handleRequest(Operation.READ, tableId, {
|
||||
id: breakRowIdField(id),
|
||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||
})) as Row[]
|
||||
return response ? response[0] : response
|
||||
}
|
||||
|
@ -95,6 +101,7 @@ export async function destroy(ctx: BBContext) {
|
|||
const id = ctx.request.body._id
|
||||
const { row } = (await handleRequest(Operation.DELETE, tableId, {
|
||||
id: breakRowIdField(id),
|
||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||
})) as { row: Row }
|
||||
return { response: { ok: true }, row }
|
||||
}
|
||||
|
@ -107,6 +114,7 @@ export async function bulkDestroy(ctx: BBContext) {
|
|||
promises.push(
|
||||
handleRequest(Operation.DELETE, tableId, {
|
||||
id: breakRowIdField(row._id),
|
||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
@ -149,6 +157,7 @@ export async function search(ctx: BBContext) {
|
|||
filters: query,
|
||||
sort,
|
||||
paginate: paginateObj as PaginationJson,
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})) as Row[]
|
||||
let hasNextPage = false
|
||||
if (paginate && rows.length === limit) {
|
||||
|
@ -159,6 +168,7 @@ export async function search(ctx: BBContext) {
|
|||
limit: 1,
|
||||
page: bookmark * limit + 1,
|
||||
},
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})) as Row[]
|
||||
hasNextPage = nextRows.length > 0
|
||||
}
|
||||
|
@ -247,6 +257,7 @@ export async function fetchEnrichedRow(ctx: BBContext) {
|
|||
const response = (await handleRequest(Operation.READ, tableId, {
|
||||
id,
|
||||
datasource,
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})) as Row[]
|
||||
const table: Table = tables[tableName]
|
||||
const row = response[0]
|
||||
|
@ -274,6 +285,7 @@ export async function fetchEnrichedRow(ctx: BBContext) {
|
|||
[primaryLink]: linkedIds,
|
||||
},
|
||||
},
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
}
|
||||
return row
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
foreignKeyStructure,
|
||||
hasTypeChanged,
|
||||
} from "./utils"
|
||||
import { FieldTypes, RelationshipTypes } from "../../../constants"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import { handleRequest } from "../row/external"
|
||||
import { events, context } from "@budibase/backend-core"
|
||||
|
@ -22,6 +22,7 @@ import {
|
|||
FieldSchema,
|
||||
BBContext,
|
||||
TableRequest,
|
||||
RelationshipTypes,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
|
@ -146,7 +147,7 @@ function generateLinkSchema(
|
|||
column: FieldSchema,
|
||||
table: Table,
|
||||
relatedTable: Table,
|
||||
type: string
|
||||
type: RelationshipTypes
|
||||
) {
|
||||
if (!table.primary || !relatedTable.primary) {
|
||||
throw new Error("Unable to generate link schema, no primary keys")
|
||||
|
|
|
@ -13,7 +13,7 @@ beforeAll(async () => {
|
|||
app = await config.init()
|
||||
table = await config.updateTable()
|
||||
apiKey = await config.generateApiKey()
|
||||
makeRequest = generateMakeRequest(apiKey, setup)
|
||||
makeRequest = generateMakeRequest(apiKey)
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
|
|
@ -10,7 +10,7 @@ beforeAll(async () => {
|
|||
await config.init()
|
||||
globalUser = await config.globalUser()
|
||||
apiKey = await config.generateApiKey(globalUser._id)
|
||||
makeRequest = generateMakeRequest(apiKey, setup)
|
||||
makeRequest = generateMakeRequest(apiKey)
|
||||
workerRequests.readGlobalUser.mockReturnValue(globalUser)
|
||||
})
|
||||
|
||||
|
|
|
@ -1,13 +1,27 @@
|
|||
import * as setup from "../../tests/utilities"
|
||||
import { checkSlashesInUrl } from "../../../../utilities"
|
||||
import supertest from "supertest"
|
||||
|
||||
export function generateMakeRequest(apiKey: string, setup: any) {
|
||||
const request = setup.getRequest()
|
||||
const config = setup.getConfig()
|
||||
export type HttpMethod = "post" | "get" | "put" | "delete" | "patch"
|
||||
|
||||
export type MakeRequestResponse = (
|
||||
method: HttpMethod,
|
||||
endpoint: string,
|
||||
body?: any,
|
||||
intAppId?: string
|
||||
) => Promise<supertest.Response>
|
||||
|
||||
export function generateMakeRequest(
|
||||
apiKey: string,
|
||||
isInternal = false
|
||||
): MakeRequestResponse {
|
||||
const request = setup.getRequest()!
|
||||
const config = setup.getConfig()!
|
||||
return async (
|
||||
method: string,
|
||||
method: HttpMethod,
|
||||
endpoint: string,
|
||||
body?: any,
|
||||
intAppId: string = config.getAppId()
|
||||
intAppId: string | null = config.getAppId()
|
||||
) => {
|
||||
const extraHeaders: any = {
|
||||
"x-budibase-api-key": apiKey,
|
||||
|
@ -15,9 +29,12 @@ export function generateMakeRequest(apiKey: string, setup: any) {
|
|||
if (intAppId) {
|
||||
extraHeaders["x-budibase-app-id"] = intAppId
|
||||
}
|
||||
const req = request[method](
|
||||
checkSlashesInUrl(`/api/public/v1/${endpoint}`)
|
||||
).set(config.defaultHeaders(extraHeaders))
|
||||
|
||||
const url = isInternal
|
||||
? endpoint
|
||||
: checkSlashesInUrl(`/api/public/v1/${endpoint}`)
|
||||
|
||||
const req = request[method](url).set(config.defaultHeaders(extraHeaders))
|
||||
if (body) {
|
||||
req.send(body)
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ import * as setup from "./utilities"
|
|||
import { wipeDb } from "./utilities/TestFunctions"
|
||||
|
||||
describe("/cloud", () => {
|
||||
let request = setup.getRequest()
|
||||
let request = setup.getRequest()!
|
||||
let config = setup.getConfig()
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { objectStore, roles, constants } from "@budibase/backend-core"
|
||||
export { FieldType as FieldTypes, RelationshipTypes } from "@budibase/types"
|
||||
|
||||
export enum FilterTypes {
|
||||
STRING = "string",
|
||||
|
@ -22,23 +23,6 @@ export const NoEmptyFilterStrings = [
|
|||
FilterTypes.NOT_CONTAINS,
|
||||
]
|
||||
|
||||
export enum FieldTypes {
|
||||
STRING = "string",
|
||||
BARCODEQR = "barcodeqr",
|
||||
LONGFORM = "longform",
|
||||
OPTIONS = "options",
|
||||
NUMBER = "number",
|
||||
BOOLEAN = "boolean",
|
||||
ARRAY = "array",
|
||||
DATETIME = "datetime",
|
||||
ATTACHMENT = "attachment",
|
||||
LINK = "link",
|
||||
FORMULA = "formula",
|
||||
AUTO = "auto",
|
||||
JSON = "json",
|
||||
INTERNAL = "internal",
|
||||
}
|
||||
|
||||
export const CanSwitchTypes = [
|
||||
[exports.FieldTypes.JSON, exports.FieldTypes.ARRAY],
|
||||
[
|
||||
|
@ -54,12 +38,6 @@ export const SwitchableTypes = CanSwitchTypes.reduce((prev, current) =>
|
|||
prev ? prev.concat(current) : current
|
||||
)
|
||||
|
||||
export enum RelationshipTypes {
|
||||
ONE_TO_MANY = "one-to-many",
|
||||
MANY_TO_ONE = "many-to-one",
|
||||
MANY_TO_MANY = "many-to-many",
|
||||
}
|
||||
|
||||
export enum FormulaTypes {
|
||||
STATIC = "static",
|
||||
DYNAMIC = "dynamic",
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
import {
|
||||
FieldTypes,
|
||||
AutoFieldSubTypes,
|
||||
RelationshipTypes,
|
||||
} from "../../constants"
|
||||
import { FieldTypes, AutoFieldSubTypes } from "../../constants"
|
||||
import { importToRows } from "../../api/controllers/table/utils"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import LinkDocument from "../linkedRows/LinkDocument"
|
||||
|
@ -11,7 +7,7 @@ import { employeeImport } from "./employeeImport"
|
|||
import { jobsImport } from "./jobsImport"
|
||||
import { expensesImport } from "./expensesImport"
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import { Table, Row } from "@budibase/types"
|
||||
import { Table, Row, RelationshipTypes } from "@budibase/types"
|
||||
|
||||
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
|
||||
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
|
||||
|
@ -190,7 +186,7 @@ export const DEFAULT_INVENTORY_TABLE_SCHEMA: Table = {
|
|||
},
|
||||
}
|
||||
|
||||
export const DEFAULT_EMPLOYEE_TABLE_SCHEMA = {
|
||||
export const DEFAULT_EMPLOYEE_TABLE_SCHEMA: Table = {
|
||||
_id: DEFAULT_EMPLOYEE_TABLE_ID,
|
||||
type: "internal",
|
||||
views: {},
|
||||
|
@ -287,7 +283,7 @@ export const DEFAULT_EMPLOYEE_TABLE_SCHEMA = {
|
|||
sortable: false,
|
||||
},
|
||||
"Badge Photo": {
|
||||
type: "attachment",
|
||||
type: FieldTypes.ATTACHMENT,
|
||||
constraints: {
|
||||
type: FieldTypes.ARRAY,
|
||||
presence: false,
|
||||
|
@ -466,7 +462,7 @@ export const DEFAULT_JOBS_TABLE_SCHEMA: Table = {
|
|||
// sortable: true,
|
||||
},
|
||||
"Works End": {
|
||||
type: "datetime",
|
||||
type: FieldTypes.DATETIME,
|
||||
constraints: {
|
||||
type: "string",
|
||||
length: {},
|
||||
|
@ -480,7 +476,7 @@ export const DEFAULT_JOBS_TABLE_SCHEMA: Table = {
|
|||
ignoreTimezones: true,
|
||||
},
|
||||
"Updated Price": {
|
||||
type: "number",
|
||||
type: FieldTypes.NUMBER,
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: false,
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
import { IncludeDocs, getLinkDocuments } from "./linkUtils"
|
||||
import { InternalTables, getUserMetadataParams } from "../utils"
|
||||
import Sentry from "@sentry/node"
|
||||
import { FieldTypes, RelationshipTypes } from "../../constants"
|
||||
import { FieldTypes } from "../../constants"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import LinkDocument from "./LinkDocument"
|
||||
import {
|
||||
Database,
|
||||
FieldSchema,
|
||||
LinkDocumentValue,
|
||||
RelationshipTypes,
|
||||
Row,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
|
|
|
@ -0,0 +1,752 @@
|
|||
import {
|
||||
generateMakeRequest,
|
||||
MakeRequestResponse,
|
||||
} from "../api/routes/public/tests/utils"
|
||||
|
||||
import * as setup from "../api/routes/tests/utilities"
|
||||
import {
|
||||
Datasource,
|
||||
FieldType,
|
||||
RelationshipTypes,
|
||||
Row,
|
||||
SourceName,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { utils } from "@budibase/backend-core"
|
||||
import { GenericContainer } from "testcontainers"
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
jest.unmock("pg")
|
||||
|
||||
describe("row api - postgres", () => {
|
||||
let makeRequest: MakeRequestResponse,
|
||||
postgresDatasource: Datasource,
|
||||
primaryPostgresTable: Table,
|
||||
auxPostgresTable: Table
|
||||
|
||||
let host: string
|
||||
let port: number
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("postgres")
|
||||
.withExposedPorts(5432)
|
||||
.withEnv("POSTGRES_PASSWORD", "password")
|
||||
.start()
|
||||
|
||||
host = container.getContainerIpAddress()
|
||||
port = container.getMappedPort(5432)
|
||||
|
||||
await config.init()
|
||||
const apiKey = await config.generateApiKey()
|
||||
|
||||
makeRequest = generateMakeRequest(apiKey, true)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
postgresDatasource = await config.createDatasource({
|
||||
datasource: {
|
||||
type: "datasource",
|
||||
source: SourceName.POSTGRES,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
port,
|
||||
database: "postgres",
|
||||
user: "postgres",
|
||||
password: "password",
|
||||
schema: "public",
|
||||
ssl: false,
|
||||
rejectUnauthorized: false,
|
||||
ca: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
auxPostgresTable = await config.createTable({
|
||||
name: generator.word({ length: 10 }),
|
||||
type: "external",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
title: {
|
||||
name: "title",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
})
|
||||
|
||||
primaryPostgresTable = await config.createTable({
|
||||
name: generator.word({ length: 10 }),
|
||||
type: "external",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
description: {
|
||||
name: "description",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
value: {
|
||||
name: "value",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
linkedField: {
|
||||
type: FieldType.LINK,
|
||||
constraints: {
|
||||
type: "array",
|
||||
presence: false,
|
||||
},
|
||||
fieldName: "foreignField",
|
||||
name: "linkedField",
|
||||
relationshipType: RelationshipTypes.ONE_TO_MANY,
|
||||
tableId: auxPostgresTable._id,
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.end()
|
||||
})
|
||||
|
||||
function generateRandomPrimaryRowData() {
|
||||
return {
|
||||
name: generator.name(),
|
||||
description: generator.paragraph(),
|
||||
value: generator.age(),
|
||||
}
|
||||
}
|
||||
|
||||
type PrimaryRowData = {
|
||||
name: string
|
||||
description: string
|
||||
value: number
|
||||
}
|
||||
|
||||
async function createPrimaryRow(opts: {
|
||||
rowData: PrimaryRowData
|
||||
createForeignRow?: boolean
|
||||
}) {
|
||||
let { rowData } = opts
|
||||
let foreignRow: Row | undefined
|
||||
if (opts?.createForeignRow) {
|
||||
foreignRow = await config.createRow({
|
||||
tableId: auxPostgresTable._id,
|
||||
title: generator.name(),
|
||||
})
|
||||
|
||||
rowData = {
|
||||
...rowData,
|
||||
[`fk_${auxPostgresTable.name}_foreignField`]: foreignRow.id,
|
||||
}
|
||||
}
|
||||
|
||||
const row = await config.createRow({
|
||||
tableId: primaryPostgresTable._id,
|
||||
...rowData,
|
||||
})
|
||||
|
||||
return { row, foreignRow }
|
||||
}
|
||||
|
||||
async function createDefaultPgTable() {
|
||||
return await config.createTable({
|
||||
name: generator.word({ length: 10 }),
|
||||
type: "external",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
})
|
||||
}
|
||||
|
||||
async function populatePrimaryRows(
|
||||
count: number,
|
||||
opts?: {
|
||||
createForeignRow?: boolean
|
||||
}
|
||||
) {
|
||||
return await Promise.all(
|
||||
Array(count)
|
||||
.fill({})
|
||||
.map(async () => {
|
||||
const rowData = generateRandomPrimaryRowData()
|
||||
return {
|
||||
rowData,
|
||||
...(await createPrimaryRow({
|
||||
rowData,
|
||||
createForeignRow: opts?.createForeignRow,
|
||||
})),
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
it("validate table schema", async () => {
|
||||
const res = await makeRequest(
|
||||
"get",
|
||||
`/api/datasources/${postgresDatasource._id}`
|
||||
)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual({
|
||||
config: {
|
||||
ca: false,
|
||||
database: "postgres",
|
||||
host,
|
||||
password: "--secret-value--",
|
||||
port,
|
||||
rejectUnauthorized: false,
|
||||
schema: "public",
|
||||
ssl: false,
|
||||
user: "postgres",
|
||||
},
|
||||
plus: true,
|
||||
source: "POSTGRES",
|
||||
type: "datasource",
|
||||
_id: expect.any(String),
|
||||
_rev: expect.any(String),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
entities: expect.any(Object),
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/:tableId/rows", () => {
|
||||
const createRow = (tableId: string | undefined, body: object) =>
|
||||
makeRequest("post", `/api/${tableId}/rows`, body)
|
||||
|
||||
describe("given than no row exists", () => {
|
||||
it("adding a new one persists it", async () => {
|
||||
const newRow = generateRandomPrimaryRowData()
|
||||
|
||||
const res = await createRow(primaryPostgresTable._id, newRow)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
const persistedRows = await config.getRows(primaryPostgresTable._id!)
|
||||
expect(persistedRows).toHaveLength(1)
|
||||
|
||||
const expected = {
|
||||
...res.body,
|
||||
...newRow,
|
||||
}
|
||||
|
||||
expect(persistedRows).toEqual([expect.objectContaining(expected)])
|
||||
})
|
||||
|
||||
it("multiple rows can be persisted", async () => {
|
||||
const numberOfRows = 10
|
||||
const newRows = Array(numberOfRows).fill(generateRandomPrimaryRowData())
|
||||
|
||||
for (const newRow of newRows) {
|
||||
const res = await createRow(primaryPostgresTable._id, newRow)
|
||||
expect(res.status).toBe(200)
|
||||
}
|
||||
|
||||
const persistedRows = await config.getRows(primaryPostgresTable._id!)
|
||||
expect(persistedRows).toHaveLength(numberOfRows)
|
||||
expect(persistedRows).toEqual(
|
||||
expect.arrayContaining(newRows.map(expect.objectContaining))
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("PATCH /api/:tableId/rows", () => {
|
||||
const updateRow = (tableId: string | undefined, body: Row) =>
|
||||
makeRequest("patch", `/api/${tableId}/rows`, body)
|
||||
|
||||
describe("given than a row exists", () => {
|
||||
let row: Row
|
||||
beforeEach(async () => {
|
||||
let rowResponse = _.sample(await populatePrimaryRows(10))!
|
||||
row = rowResponse.row
|
||||
})
|
||||
|
||||
it("updating it persists it", async () => {
|
||||
const newName = generator.name()
|
||||
const newValue = generator.age()
|
||||
const updatedRow = {
|
||||
...row,
|
||||
name: newName,
|
||||
value: newValue,
|
||||
}
|
||||
|
||||
const res = await updateRow(primaryPostgresTable._id, updatedRow)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual(updatedRow)
|
||||
|
||||
const persistedRow = await config.getRow(
|
||||
primaryPostgresTable._id!,
|
||||
row.id
|
||||
)
|
||||
|
||||
expect(persistedRow).toEqual(
|
||||
expect.objectContaining({
|
||||
id: row.id,
|
||||
name: newName,
|
||||
value: newValue,
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("DELETE /api/:tableId/rows", () => {
|
||||
const deleteRow = (
|
||||
tableId: string | undefined,
|
||||
body: Row | { rows: Row[] }
|
||||
) => makeRequest("delete", `/api/${tableId}/rows`, body)
|
||||
|
||||
describe("given than multiple row exist", () => {
|
||||
const numberOfInitialRows = 5
|
||||
let rows: Row[]
|
||||
beforeEach(async () => {
|
||||
rows = (await populatePrimaryRows(numberOfInitialRows)).map(x => x.row)
|
||||
})
|
||||
|
||||
it("delete request removes it", async () => {
|
||||
const row = _.sample(rows)!
|
||||
const res = await deleteRow(primaryPostgresTable._id, row)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
const persistedRows = await config.getRows(primaryPostgresTable._id!)
|
||||
expect(persistedRows).toHaveLength(numberOfInitialRows - 1)
|
||||
|
||||
expect(row.id).toBeDefined()
|
||||
expect(persistedRows).not.toContain(
|
||||
expect.objectContaining({ _id: row.id })
|
||||
)
|
||||
})
|
||||
|
||||
it("multiple rows can be removed at once", async () => {
|
||||
let rowsToDelete = _.sampleSize(rows, 3)!
|
||||
|
||||
const res = await deleteRow(primaryPostgresTable._id, {
|
||||
rows: rowsToDelete,
|
||||
})
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
const persistedRows = await config.getRows(primaryPostgresTable._id!)
|
||||
expect(persistedRows).toHaveLength(numberOfInitialRows - 3)
|
||||
|
||||
for (const row of rowsToDelete) {
|
||||
expect(persistedRows).not.toContain(
|
||||
expect.objectContaining({ _id: row.id })
|
||||
)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("GET /api/:tableId/rows/:rowId", () => {
|
||||
const getRow = (tableId: string | undefined, rowId?: string | undefined) =>
|
||||
makeRequest("get", `/api/${tableId}/rows/${rowId}`)
|
||||
|
||||
describe("given than a table have a single row", () => {
|
||||
let rowData: PrimaryRowData, row: Row
|
||||
beforeEach(async () => {
|
||||
const [createdRow] = await populatePrimaryRows(1)
|
||||
rowData = createdRow.rowData
|
||||
row = createdRow.row
|
||||
})
|
||||
|
||||
it("the row can be retrieved successfully", async () => {
|
||||
const res = await getRow(primaryPostgresTable._id, row.id)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body).toEqual(expect.objectContaining(rowData))
|
||||
})
|
||||
})
|
||||
|
||||
describe("given than a table have a multiple rows", () => {
|
||||
let rows: { row: Row; rowData: PrimaryRowData }[]
|
||||
|
||||
beforeEach(async () => {
|
||||
rows = await populatePrimaryRows(10)
|
||||
})
|
||||
|
||||
it("a single row can be retrieved successfully", async () => {
|
||||
const { rowData, row } = _.sample(rows)!
|
||||
|
||||
const res = await getRow(primaryPostgresTable._id, row.id)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body).toEqual(expect.objectContaining(rowData))
|
||||
})
|
||||
})
|
||||
|
||||
describe("given a row with relation data", () => {
|
||||
let row: Row
|
||||
beforeEach(async () => {
|
||||
let [createdRow] = await populatePrimaryRows(1, {
|
||||
createForeignRow: true,
|
||||
})
|
||||
row = createdRow.row
|
||||
})
|
||||
|
||||
it("foreign key fields are not retrieved", async () => {
|
||||
const res = await getRow(primaryPostgresTable._id, row.id)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body).toEqual({
|
||||
...row,
|
||||
_id: expect.any(String),
|
||||
_rev: expect.any(String),
|
||||
})
|
||||
expect(res.body.foreignField).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/:tableId/search", () => {
|
||||
const search = (tableId: string | undefined, body?: object) =>
|
||||
makeRequest("post", `/api/${tableId}/search`, body)
|
||||
|
||||
describe("search without parameters", () => {
|
||||
describe("given than a table has no rows", () => {
|
||||
it("search without query returns empty", async () => {
|
||||
const res = await search(primaryPostgresTable._id)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body).toEqual({
|
||||
rows: [],
|
||||
bookmark: null,
|
||||
hasNextPage: false,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("given than a table has multiple rows", () => {
|
||||
const rowsCount = 6
|
||||
let rows: {
|
||||
row: Row
|
||||
rowData: PrimaryRowData
|
||||
}[]
|
||||
beforeEach(async () => {
|
||||
rows = await populatePrimaryRows(rowsCount)
|
||||
})
|
||||
|
||||
it("search without query returns all of them", async () => {
|
||||
const res = await search(primaryPostgresTable._id)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body).toEqual({
|
||||
rows: expect.arrayContaining(
|
||||
rows.map(r => expect.objectContaining(r.rowData))
|
||||
),
|
||||
bookmark: null,
|
||||
hasNextPage: false,
|
||||
})
|
||||
expect(res.body.rows).toHaveLength(rowsCount)
|
||||
})
|
||||
})
|
||||
|
||||
describe("given than multiple tables have multiple rows", () => {
|
||||
const rowsCount = 6
|
||||
beforeEach(async () => {
|
||||
const createRandomTableWithRows = async () =>
|
||||
await config.createRow({
|
||||
tableId: (await createDefaultPgTable())._id,
|
||||
title: generator.name(),
|
||||
})
|
||||
|
||||
await createRandomTableWithRows()
|
||||
await createRandomTableWithRows()
|
||||
|
||||
await populatePrimaryRows(rowsCount)
|
||||
|
||||
await createRandomTableWithRows()
|
||||
})
|
||||
it("search only return the requested ones", async () => {
|
||||
const res = await search(primaryPostgresTable._id)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body.rows).toHaveLength(rowsCount)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("Querying by a string field returns the rows with field containing or starting by that value", async () => {
|
||||
const name = generator.name()
|
||||
const rowsToFilter = [
|
||||
...Array(2).fill({
|
||||
name,
|
||||
description: generator.paragraph(),
|
||||
value: generator.age(),
|
||||
}),
|
||||
...Array(2).fill({
|
||||
name: `${name}${utils.newid()}`,
|
||||
description: generator.paragraph(),
|
||||
value: generator.age(),
|
||||
}),
|
||||
]
|
||||
|
||||
await populatePrimaryRows(3)
|
||||
for (const row of rowsToFilter) {
|
||||
await createPrimaryRow({
|
||||
rowData: row,
|
||||
})
|
||||
}
|
||||
await populatePrimaryRows(1)
|
||||
|
||||
const res = await search(primaryPostgresTable._id, {
|
||||
query: {
|
||||
string: {
|
||||
name,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body).toEqual({
|
||||
rows: expect.arrayContaining(rowsToFilter.map(expect.objectContaining)),
|
||||
bookmark: null,
|
||||
hasNextPage: false,
|
||||
})
|
||||
expect(res.body.rows).toHaveLength(4)
|
||||
})
|
||||
|
||||
it("Querying respects the limit fields", async () => {
|
||||
await populatePrimaryRows(6)
|
||||
|
||||
const res = await search(primaryPostgresTable._id, {
|
||||
limit: 2,
|
||||
})
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body.rows).toHaveLength(2)
|
||||
})
|
||||
|
||||
describe("sort", () => {
|
||||
beforeEach(async () => {
|
||||
const defaultValue = generateRandomPrimaryRowData()
|
||||
|
||||
await createPrimaryRow({
|
||||
rowData: {
|
||||
...defaultValue,
|
||||
name: "d",
|
||||
value: 3,
|
||||
},
|
||||
})
|
||||
await createPrimaryRow({
|
||||
rowData: { ...defaultValue, name: "aaa", value: 40 },
|
||||
})
|
||||
await createPrimaryRow({
|
||||
rowData: { ...defaultValue, name: "ccccc", value: -5 },
|
||||
})
|
||||
await createPrimaryRow({
|
||||
rowData: { ...defaultValue, name: "bb", value: 0 },
|
||||
})
|
||||
})
|
||||
|
||||
it("Querying respects the sort order when sorting ascending by a string value", async () => {
|
||||
const res = await search(primaryPostgresTable._id, {
|
||||
sort: "name",
|
||||
sortOrder: "ascending",
|
||||
sortType: "string",
|
||||
})
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body.rows).toEqual([
|
||||
expect.objectContaining({ name: "aaa" }),
|
||||
expect.objectContaining({ name: "bb" }),
|
||||
expect.objectContaining({ name: "ccccc" }),
|
||||
expect.objectContaining({ name: "d" }),
|
||||
])
|
||||
})
|
||||
|
||||
it("Querying respects the sort order when sorting descending by a string value", async () => {
|
||||
const res = await search(primaryPostgresTable._id, {
|
||||
sort: "name",
|
||||
sortOrder: "descending",
|
||||
sortType: "string",
|
||||
})
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body.rows).toEqual([
|
||||
expect.objectContaining({ name: "d" }),
|
||||
expect.objectContaining({ name: "ccccc" }),
|
||||
expect.objectContaining({ name: "bb" }),
|
||||
expect.objectContaining({ name: "aaa" }),
|
||||
])
|
||||
})
|
||||
|
||||
it("Querying respects the sort order when sorting ascending by a numeric value", async () => {
|
||||
const res = await search(primaryPostgresTable._id, {
|
||||
sort: "value",
|
||||
sortOrder: "ascending",
|
||||
sortType: "number",
|
||||
})
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body.rows).toEqual([
|
||||
expect.objectContaining({ value: -5 }),
|
||||
expect.objectContaining({ value: 0 }),
|
||||
expect.objectContaining({ value: 3 }),
|
||||
expect.objectContaining({ value: 40 }),
|
||||
])
|
||||
})
|
||||
|
||||
it("Querying respects the sort order when sorting descending by a numeric value", async () => {
|
||||
const res = await search(primaryPostgresTable._id, {
|
||||
sort: "value",
|
||||
sortOrder: "descending",
|
||||
sortType: "number",
|
||||
})
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body.rows).toEqual([
|
||||
expect.objectContaining({ value: 40 }),
|
||||
expect.objectContaining({ value: 3 }),
|
||||
expect.objectContaining({ value: 0 }),
|
||||
expect.objectContaining({ value: -5 }),
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("GET /api/:tableId/:rowId/enrich", () => {
|
||||
const getAll = (tableId: string | undefined, rowId: string | undefined) =>
|
||||
makeRequest("get", `/api/${tableId}/${rowId}/enrich`)
|
||||
describe("given a row with relation data", () => {
|
||||
let row: Row, foreignRow: Row | undefined
|
||||
|
||||
beforeEach(async () => {
|
||||
const rowsInfo = await createPrimaryRow({
|
||||
rowData: generateRandomPrimaryRowData(),
|
||||
createForeignRow: true,
|
||||
})
|
||||
|
||||
row = rowsInfo.row
|
||||
foreignRow = rowsInfo.foreignRow
|
||||
})
|
||||
|
||||
it("enrich populates the foreign field", async () => {
|
||||
const res = await getAll(primaryPostgresTable._id, row.id)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(foreignRow).toBeDefined()
|
||||
expect(res.body).toEqual({
|
||||
...row,
|
||||
linkedField: [
|
||||
{
|
||||
...foreignRow,
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("GET /api/:tableId/rows", () => {
|
||||
const getAll = (tableId: string | undefined) =>
|
||||
makeRequest("get", `/api/${tableId}/rows`)
|
||||
|
||||
describe("given a table with no rows", () => {
|
||||
it("get request returns empty", async () => {
|
||||
const res = await getAll(primaryPostgresTable._id)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
describe("given a table with multiple rows", () => {
|
||||
const rowsCount = 6
|
||||
let rows: {
|
||||
row: Row
|
||||
foreignRow: Row | undefined
|
||||
rowData: PrimaryRowData
|
||||
}[]
|
||||
beforeEach(async () => {
|
||||
rows = await populatePrimaryRows(rowsCount)
|
||||
})
|
||||
|
||||
it("get request returns all of them", async () => {
|
||||
const res = await getAll(primaryPostgresTable._id)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body).toHaveLength(rowsCount)
|
||||
expect(res.body).toEqual(
|
||||
expect.arrayContaining(
|
||||
rows.map(r => expect.objectContaining(r.rowData))
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("given multiple tables with multiple rows", () => {
|
||||
const rowsCount = 6
|
||||
|
||||
beforeEach(async () => {
|
||||
const createRandomTableWithRows = async () =>
|
||||
await config.createRow({
|
||||
tableId: (await createDefaultPgTable())._id,
|
||||
title: generator.name(),
|
||||
})
|
||||
|
||||
await createRandomTableWithRows()
|
||||
await populatePrimaryRows(rowsCount)
|
||||
await createRandomTableWithRows()
|
||||
})
|
||||
|
||||
it("get returns the requested ones", async () => {
|
||||
const res = await getAll(primaryPostgresTable._id)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
expect(res.body).toHaveLength(rowsCount)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -90,10 +90,15 @@ function parseFilters(filters: SearchFilters | undefined): SearchFilters {
|
|||
function generateSelectStatement(
|
||||
json: QueryJson,
|
||||
knex: Knex
|
||||
): (string | Knex.Raw)[] {
|
||||
): (string | Knex.Raw)[] | "*" {
|
||||
const { resource, meta } = json
|
||||
|
||||
if (!resource) {
|
||||
return "*"
|
||||
}
|
||||
|
||||
const schema = meta?.table?.schema
|
||||
return resource!.fields.map(field => {
|
||||
return resource.fields.map(field => {
|
||||
const fieldNames = field.split(/\./g)
|
||||
const tableName = fieldNames[0]
|
||||
const columnName = fieldNames[1]
|
||||
|
@ -392,11 +397,14 @@ class InternalBuilder {
|
|||
delete parsedBody[key]
|
||||
}
|
||||
}
|
||||
|
||||
// mysql can't use returning
|
||||
if (opts.disableReturning) {
|
||||
return query.insert(parsedBody)
|
||||
} else {
|
||||
return query.insert(parsedBody).returning("*")
|
||||
return query
|
||||
.insert(parsedBody)
|
||||
.returning(generateSelectStatement(json, knex))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -481,7 +489,9 @@ class InternalBuilder {
|
|||
if (opts.disableReturning) {
|
||||
return query.update(parsedBody)
|
||||
} else {
|
||||
return query.update(parsedBody).returning("*")
|
||||
return query
|
||||
.update(parsedBody)
|
||||
.returning(generateSelectStatement(json, knex))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -496,7 +506,7 @@ class InternalBuilder {
|
|||
if (opts.disableReturning) {
|
||||
return query.delete()
|
||||
} else {
|
||||
return query.delete().returning("*")
|
||||
return query.delete().returning(generateSelectStatement(json, knex))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -247,7 +247,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
)
|
||||
}
|
||||
|
||||
private internalConvertType(column: OracleColumn): { type: string } {
|
||||
private internalConvertType(column: OracleColumn): { type: FieldTypes } {
|
||||
if (this.isBooleanType(column)) {
|
||||
return { type: FieldTypes.BOOLEAN }
|
||||
}
|
||||
|
|
|
@ -39,8 +39,15 @@ import { cleanup } from "../../utilities/fileSystem"
|
|||
import newid from "../../db/newid"
|
||||
import { generateUserMetadataID } from "../../db/utils"
|
||||
import { startup } from "../../startup"
|
||||
import { AuthToken, Database } from "@budibase/types"
|
||||
const supertest = require("supertest")
|
||||
import supertest from "supertest"
|
||||
import {
|
||||
AuthToken,
|
||||
Database,
|
||||
Datasource,
|
||||
Row,
|
||||
SourceName,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
|
||||
type DefaultUserValues = {
|
||||
globalUserId: string
|
||||
|
@ -52,7 +59,7 @@ type DefaultUserValues = {
|
|||
|
||||
class TestConfiguration {
|
||||
server: any
|
||||
request: any
|
||||
request: supertest.SuperTest<supertest.Test> | undefined
|
||||
started: boolean
|
||||
appId: string | null
|
||||
allApps: any[]
|
||||
|
@ -197,7 +204,7 @@ class TestConfiguration {
|
|||
|
||||
// UTILS
|
||||
|
||||
async _req(body: any, params: any, controlFunc: any) {
|
||||
_req(body: any, params: any, controlFunc: any) {
|
||||
// create a fake request ctx
|
||||
const request: any = {}
|
||||
const appId = this.appId
|
||||
|
@ -360,6 +367,7 @@ class TestConfiguration {
|
|||
[constants.Header.CSRF_TOKEN]: this.defaultUserValues.csrfToken,
|
||||
...extras,
|
||||
}
|
||||
|
||||
if (this.appId) {
|
||||
headers[constants.Header.APP_ID] = this.appId
|
||||
}
|
||||
|
@ -464,13 +472,13 @@ class TestConfiguration {
|
|||
|
||||
// TABLE
|
||||
|
||||
async updateTable(config?: any) {
|
||||
async updateTable(config?: any): Promise<Table> {
|
||||
config = config || basicTable()
|
||||
this.table = await this._req(config, null, controllers.table.save)
|
||||
return this.table
|
||||
}
|
||||
|
||||
async createTable(config?: any) {
|
||||
async createTable(config?: Table) {
|
||||
if (config != null && config._id) {
|
||||
delete config._id
|
||||
}
|
||||
|
@ -514,7 +522,7 @@ class TestConfiguration {
|
|||
|
||||
// ROW
|
||||
|
||||
async createRow(config: any = null) {
|
||||
async createRow(config?: Row): Promise<Row> {
|
||||
if (!this.table) {
|
||||
throw "Test requires table to be configured."
|
||||
}
|
||||
|
@ -523,7 +531,7 @@ class TestConfiguration {
|
|||
return this._req(config, { tableId }, controllers.row.save)
|
||||
}
|
||||
|
||||
async getRow(tableId: string, rowId: string) {
|
||||
async getRow(tableId: string, rowId: string): Promise<Row> {
|
||||
return this._req(null, { tableId, rowId }, controllers.row.find)
|
||||
}
|
||||
|
||||
|
@ -605,7 +613,9 @@ class TestConfiguration {
|
|||
|
||||
// DATASOURCE
|
||||
|
||||
async createDatasource(config?: any) {
|
||||
async createDatasource(config?: {
|
||||
datasource: Datasource
|
||||
}): Promise<Datasource> {
|
||||
config = config || basicDatasource()
|
||||
const response = await this._req(config, null, controllers.datasource.save)
|
||||
this.datasource = response.datasource
|
||||
|
@ -626,7 +636,7 @@ class TestConfiguration {
|
|||
return this.createDatasource({
|
||||
datasource: {
|
||||
...basicDatasource().datasource,
|
||||
source: "REST",
|
||||
source: SourceName.REST,
|
||||
config: cfg || {},
|
||||
},
|
||||
})
|
||||
|
@ -635,7 +645,7 @@ class TestConfiguration {
|
|||
async dynamicVariableDatasource() {
|
||||
let datasource = await this.restDatasource()
|
||||
const basedOnQuery = await this.createQuery({
|
||||
...basicQuery(datasource._id),
|
||||
...basicQuery(datasource._id!),
|
||||
fields: {
|
||||
path: "www.google.com",
|
||||
},
|
||||
|
@ -663,7 +673,7 @@ class TestConfiguration {
|
|||
datasource: any,
|
||||
fields: any,
|
||||
params: any,
|
||||
verb: string
|
||||
verb?: string
|
||||
) {
|
||||
return request
|
||||
.post(`/api/queries/preview`)
|
||||
|
|
|
@ -7,6 +7,8 @@ import {
|
|||
Automation,
|
||||
AutomationActionStepId,
|
||||
AutomationTriggerStepId,
|
||||
Datasource,
|
||||
SourceName,
|
||||
} from "@budibase/types"
|
||||
|
||||
const { v4: uuidv4 } = require("uuid")
|
||||
|
@ -207,12 +209,12 @@ export function basicRole() {
|
|||
}
|
||||
}
|
||||
|
||||
export function basicDatasource() {
|
||||
export function basicDatasource(): { datasource: Datasource } {
|
||||
return {
|
||||
datasource: {
|
||||
type: "datasource",
|
||||
name: "Test",
|
||||
source: "POSTGRES",
|
||||
source: SourceName.POSTGRES,
|
||||
config: {},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ export enum FieldType {
|
|||
AUTO = "auto",
|
||||
JSON = "json",
|
||||
INTERNAL = "internal",
|
||||
BARCODEQR = "barcodeqr",
|
||||
}
|
||||
|
||||
export interface RowAttachment {
|
||||
|
|
|
@ -1,16 +1,22 @@
|
|||
import { Document } from "../document"
|
||||
import { View } from "./view"
|
||||
import { RenameColumn } from "../../sdk"
|
||||
import { FieldType } from "./row"
|
||||
|
||||
export enum RelationshipTypes {
|
||||
ONE_TO_MANY = "one-to-many",
|
||||
MANY_TO_ONE = "many-to-one",
|
||||
MANY_TO_MANY = "many-to-many",
|
||||
}
|
||||
|
||||
export interface FieldSchema {
|
||||
// TODO: replace with field types enum when done
|
||||
type: string
|
||||
type: FieldType
|
||||
externalType?: string
|
||||
fieldName?: string
|
||||
name: string
|
||||
sortable?: boolean
|
||||
tableId?: string
|
||||
relationshipType?: string
|
||||
relationshipType?: RelationshipTypes
|
||||
through?: string
|
||||
foreignKey?: string
|
||||
icon?: string
|
||||
|
|
Loading…
Reference in New Issue