Merge branch 'master' into grid-layout-improved
This commit is contained in:
commit
e58f34ef8c
|
@ -175,6 +175,7 @@ jobs:
|
|||
docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }} &
|
||||
docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }} &
|
||||
docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }} &
|
||||
docker pull budibase/oracle-database:23.2-slim-faststart &
|
||||
docker pull minio/minio &
|
||||
docker pull redis &
|
||||
docker pull testcontainers/ryuk:0.5.1 &
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "2.29.27",
|
||||
"version": "2.29.29",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,87 +0,0 @@
|
|||
import { FieldType, Table, FieldSchema, SqlClient } from "@budibase/types"
|
||||
import { Knex } from "knex"
|
||||
|
||||
export class SqlStatements {
|
||||
client: string
|
||||
table: Table
|
||||
allOr: boolean | undefined
|
||||
columnPrefix: string | undefined
|
||||
|
||||
constructor(
|
||||
client: string,
|
||||
table: Table,
|
||||
{ allOr, columnPrefix }: { allOr?: boolean; columnPrefix?: string } = {}
|
||||
) {
|
||||
this.client = client
|
||||
this.table = table
|
||||
this.allOr = allOr
|
||||
this.columnPrefix = columnPrefix
|
||||
}
|
||||
|
||||
getField(key: string): FieldSchema | undefined {
|
||||
const fieldName = key.split(".")[1]
|
||||
let found = this.table.schema[fieldName]
|
||||
if (!found && this.columnPrefix) {
|
||||
const prefixRemovedFieldName = fieldName.replace(this.columnPrefix, "")
|
||||
found = this.table.schema[prefixRemovedFieldName]
|
||||
}
|
||||
return found
|
||||
}
|
||||
|
||||
between(
|
||||
query: Knex.QueryBuilder,
|
||||
key: string,
|
||||
low: number | string,
|
||||
high: number | string
|
||||
) {
|
||||
// Use a between operator if we have 2 valid range values
|
||||
const field = this.getField(key)
|
||||
if (
|
||||
field?.type === FieldType.BIGINT &&
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
query = query.whereRaw(
|
||||
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
|
||||
[low, high]
|
||||
)
|
||||
} else {
|
||||
const fnc = this.allOr ? "orWhereBetween" : "whereBetween"
|
||||
query = query[fnc](key, [low, high])
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
lte(query: Knex.QueryBuilder, key: string, low: number | string) {
|
||||
// Use just a single greater than operator if we only have a low
|
||||
const field = this.getField(key)
|
||||
if (
|
||||
field?.type === FieldType.BIGINT &&
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
query = query.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
|
||||
low,
|
||||
])
|
||||
} else {
|
||||
const fnc = this.allOr ? "orWhere" : "where"
|
||||
query = query[fnc](key, ">=", low)
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
gte(query: Knex.QueryBuilder, key: string, high: number | string) {
|
||||
const field = this.getField(key)
|
||||
// Use just a single less than operator if we only have a high
|
||||
if (
|
||||
field?.type === FieldType.BIGINT &&
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
query = query.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
|
||||
high,
|
||||
])
|
||||
} else {
|
||||
const fnc = this.allOr ? "orWhere" : "where"
|
||||
query = query[fnc](key, "<=", high)
|
||||
}
|
||||
return query
|
||||
}
|
||||
}
|
|
@ -22,6 +22,7 @@ export function getNativeSql(
|
|||
query: Knex.SchemaBuilder | Knex.QueryBuilder
|
||||
): SqlQuery | SqlQuery[] {
|
||||
let sql = query.toSQL()
|
||||
|
||||
if (Array.isArray(sql)) {
|
||||
return sql as SqlQuery[]
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
<script>
|
||||
import { Select, Icon } from "@budibase/bbui"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import { canBeDisplayColumn, utils } from "@budibase/shared-core"
|
||||
import { API } from "api"
|
||||
import { parseFile } from "./utils"
|
||||
import { canBeDisplayColumn } from "@budibase/shared-core"
|
||||
|
||||
export let rows = []
|
||||
export let schema = {}
|
||||
|
@ -97,6 +97,8 @@
|
|||
let errors = {}
|
||||
let selectedColumnTypes = {}
|
||||
|
||||
let rawRows = []
|
||||
|
||||
$: displayColumnOptions = Object.keys(schema || {}).filter(column => {
|
||||
return validation[column] && canBeDisplayColumn(schema[column].type)
|
||||
})
|
||||
|
@ -106,6 +108,8 @@
|
|||
}
|
||||
|
||||
$: {
|
||||
rows = rawRows.map(row => utils.trimOtherProps(row, Object.keys(schema)))
|
||||
|
||||
// binding in consumer is causing double renders here
|
||||
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
|
||||
if (newValidateHash !== validateHash) {
|
||||
|
@ -122,7 +126,7 @@
|
|||
|
||||
try {
|
||||
const response = await parseFile(e)
|
||||
rows = response.rows
|
||||
rawRows = response.rows
|
||||
schema = response.schema
|
||||
fileName = response.fileName
|
||||
selectedColumnTypes = Object.entries(response.schema).reduce(
|
||||
|
@ -188,7 +192,7 @@
|
|||
type="file"
|
||||
on:change={handleFile}
|
||||
/>
|
||||
<label for="file-upload" class:uploaded={rows.length > 0}>
|
||||
<label for="file-upload" class:uploaded={rawRows.length > 0}>
|
||||
{#if error}
|
||||
Error: {error}
|
||||
{:else if fileName}
|
||||
|
@ -198,7 +202,7 @@
|
|||
{/if}
|
||||
</label>
|
||||
</div>
|
||||
{#if rows.length > 0 && !error}
|
||||
{#if rawRows.length > 0 && !error}
|
||||
<div class="schema-fields">
|
||||
{#each Object.entries(schema) as [name, column]}
|
||||
<div class="field">
|
||||
|
|
|
@ -78,7 +78,7 @@
|
|||
await datasources.fetch()
|
||||
await afterSave(table)
|
||||
} catch (e) {
|
||||
notifications.error(e)
|
||||
notifications.error(e.message || e)
|
||||
// reload in case the table was created
|
||||
await tables.fetch()
|
||||
}
|
||||
|
|
|
@ -9,7 +9,10 @@ import { Constants } from "@budibase/frontend-core"
|
|||
|
||||
const { TypeIconMap } = Constants
|
||||
|
||||
export { RelationshipType } from "@budibase/types"
|
||||
export {
|
||||
RelationshipType,
|
||||
RowExportFormat as ROW_EXPORT_FORMATS,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const AUTO_COLUMN_SUB_TYPES = AutoFieldSubType
|
||||
|
||||
|
@ -307,9 +310,3 @@ export const DatasourceTypes = {
|
|||
GRAPH: "Graph",
|
||||
API: "API",
|
||||
}
|
||||
|
||||
export const ROW_EXPORT_FORMATS = {
|
||||
CSV: "csv",
|
||||
JSON: "json",
|
||||
JSON_WITH_SCHEMA: "jsonWithSchema",
|
||||
}
|
||||
|
|
|
@ -1,206 +0,0 @@
|
|||
// @ts-ignore
|
||||
import fs from "fs"
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
module FetchMock {
|
||||
// @ts-ignore
|
||||
const fetch = jest.requireActual("node-fetch")
|
||||
let failCount = 0
|
||||
let mockSearch = false
|
||||
|
||||
const func = async (url: any, opts: any) => {
|
||||
const { host, pathname } = new URL(url)
|
||||
function json(body: any, status = 200) {
|
||||
return {
|
||||
status,
|
||||
headers: {
|
||||
raw: () => {
|
||||
return { "content-type": ["application/json"] }
|
||||
},
|
||||
get: (name: string) => {
|
||||
if (name.toLowerCase() === "content-type") {
|
||||
return ["application/json"]
|
||||
}
|
||||
},
|
||||
},
|
||||
json: async () => {
|
||||
//x-www-form-encoded body is a URLSearchParams
|
||||
//The call to stringify it leaves it blank
|
||||
if (body?.opts?.body instanceof URLSearchParams) {
|
||||
const paramArray = Array.from(body.opts.body.entries())
|
||||
body.opts.body = paramArray.reduce((acc: any, pair: any) => {
|
||||
acc[pair[0]] = pair[1]
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
return body
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (pathname.includes("/api/global")) {
|
||||
const user = {
|
||||
email: "test@example.com",
|
||||
_id: "us_test@example.com",
|
||||
status: "active",
|
||||
roles: {},
|
||||
builder: {
|
||||
global: false,
|
||||
},
|
||||
admin: {
|
||||
global: false,
|
||||
},
|
||||
}
|
||||
return pathname.endsWith("/users") && opts.method === "GET"
|
||||
? json([user])
|
||||
: json(user)
|
||||
}
|
||||
// mocked data based on url
|
||||
else if (pathname.includes("api/apps")) {
|
||||
return json({
|
||||
app1: {
|
||||
url: "/app1",
|
||||
},
|
||||
})
|
||||
} else if (host.includes("example.com")) {
|
||||
return json({
|
||||
body: opts.body,
|
||||
url,
|
||||
method: opts.method,
|
||||
})
|
||||
} else if (host.includes("invalid.com")) {
|
||||
return json(
|
||||
{
|
||||
invalid: true,
|
||||
},
|
||||
404
|
||||
)
|
||||
} else if (mockSearch && pathname.includes("_search")) {
|
||||
const body = opts.body
|
||||
const parts = body.split("tableId:")
|
||||
let tableId
|
||||
if (parts && parts[1]) {
|
||||
tableId = parts[1].split('"')[0]
|
||||
}
|
||||
return json({
|
||||
rows: [
|
||||
{
|
||||
doc: {
|
||||
_id: "test",
|
||||
tableId: tableId,
|
||||
query: opts.body,
|
||||
},
|
||||
},
|
||||
],
|
||||
bookmark: "test",
|
||||
})
|
||||
} else if (host.includes("google.com")) {
|
||||
return json({
|
||||
url,
|
||||
opts,
|
||||
value:
|
||||
'<!doctype html><html itemscope="" itemtype="http://schema.org/WebPage" lang="en-GB"></html>',
|
||||
})
|
||||
} else if (
|
||||
url === "https://api.github.com/repos/my-repo/budibase-comment-box"
|
||||
) {
|
||||
return Promise.resolve({
|
||||
json: () => {
|
||||
return {
|
||||
name: "budibase-comment-box",
|
||||
releases_url:
|
||||
"https://api.github.com/repos/my-repo/budibase-comment-box{/id}",
|
||||
}
|
||||
},
|
||||
})
|
||||
} else if (
|
||||
url === "https://api.github.com/repos/my-repo/budibase-comment-box/latest"
|
||||
) {
|
||||
return Promise.resolve({
|
||||
json: () => {
|
||||
return {
|
||||
assets: [
|
||||
{
|
||||
content_type: "application/gzip",
|
||||
browser_download_url:
|
||||
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz",
|
||||
},
|
||||
],
|
||||
}
|
||||
},
|
||||
})
|
||||
} else if (
|
||||
url ===
|
||||
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz"
|
||||
) {
|
||||
return Promise.resolve({
|
||||
body: fs.createReadStream(
|
||||
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
|
||||
),
|
||||
ok: true,
|
||||
})
|
||||
} else if (url === "https://www.npmjs.com/package/budibase-component") {
|
||||
return Promise.resolve({
|
||||
status: 200,
|
||||
json: () => {
|
||||
return {
|
||||
name: "budibase-component",
|
||||
"dist-tags": {
|
||||
latest: "1.0.0",
|
||||
},
|
||||
versions: {
|
||||
"1.0.0": {
|
||||
dist: {
|
||||
tarball:
|
||||
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.2.tgz",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
})
|
||||
} else if (
|
||||
url ===
|
||||
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.2.tgz"
|
||||
) {
|
||||
return Promise.resolve({
|
||||
body: fs.createReadStream(
|
||||
"src/api/routes/tests/data/budibase-component-1.0.2.tgz"
|
||||
),
|
||||
ok: true,
|
||||
})
|
||||
} else if (
|
||||
url === "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz"
|
||||
) {
|
||||
return Promise.resolve({
|
||||
body: fs.createReadStream(
|
||||
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
|
||||
),
|
||||
ok: true,
|
||||
})
|
||||
} else if (url === "https://www.googleapis.com/oauth2/v4/token") {
|
||||
// any valid response
|
||||
return json({})
|
||||
} else if (host.includes("failonce.com")) {
|
||||
failCount++
|
||||
if (failCount === 1) {
|
||||
return json({ message: "error" }, 500)
|
||||
} else {
|
||||
return json({
|
||||
fails: failCount - 1,
|
||||
url,
|
||||
opts,
|
||||
})
|
||||
}
|
||||
}
|
||||
return fetch(url, opts)
|
||||
}
|
||||
|
||||
func.Headers = fetch.Headers
|
||||
|
||||
func.mockSearch = () => {
|
||||
mockSearch = true
|
||||
}
|
||||
|
||||
module.exports = func
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
const executeMock = jest.fn(() => ({
|
||||
rows: [
|
||||
{
|
||||
a: "string",
|
||||
b: 1,
|
||||
},
|
||||
],
|
||||
}))
|
||||
|
||||
const closeMock = jest.fn()
|
||||
|
||||
class Connection {
|
||||
execute = executeMock
|
||||
close = closeMock
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getConnection: jest.fn(() => new Connection()),
|
||||
executeMock,
|
||||
closeMock,
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
FROM mcr.microsoft.com/mssql/server:2022-latest
|
||||
FROM mcr.microsoft.com/mssql/server@sha256:c4369c38385eba011c10906dc8892425831275bb035d5ce69656da8e29de50d8
|
||||
|
||||
ENV ACCEPT_EULA=Y
|
||||
ENV SA_PASSWORD=Passw0rd
|
||||
|
|
|
@ -6,9 +6,9 @@ services:
|
|||
db:
|
||||
restart: unless-stopped
|
||||
platform: linux/x86_64
|
||||
image: container-registry.oracle.com/database/express:18.4.0-xe
|
||||
image: gvenzl/oracle-free:23.2-slim-faststart
|
||||
environment:
|
||||
ORACLE_PWD: oracle
|
||||
ORACLE_PWD: Password1
|
||||
ports:
|
||||
- 1521:1521
|
||||
- 5500:5500
|
||||
|
@ -16,4 +16,4 @@ services:
|
|||
- oracle_data:/opt/oracle/oradata
|
||||
|
||||
volumes:
|
||||
oracle_data:
|
||||
oracle_data:
|
||||
|
|
|
@ -1,6 +1,13 @@
|
|||
import { npmUpload, urlUpload, githubUpload } from "./uploaders"
|
||||
import { plugins as pluginCore } from "@budibase/backend-core"
|
||||
import { PluginType, FileType, PluginSource } from "@budibase/types"
|
||||
import {
|
||||
PluginType,
|
||||
FileType,
|
||||
PluginSource,
|
||||
Ctx,
|
||||
CreatePluginRequest,
|
||||
CreatePluginResponse,
|
||||
} from "@budibase/types"
|
||||
import env from "../../../environment"
|
||||
import { clientAppSocket } from "../../../websockets"
|
||||
import sdk from "../../../sdk"
|
||||
|
@ -29,7 +36,9 @@ export async function upload(ctx: any) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function create(ctx: any) {
|
||||
export async function create(
|
||||
ctx: Ctx<CreatePluginRequest, CreatePluginResponse>
|
||||
) {
|
||||
const { source, url, headers, githubToken } = ctx.request.body
|
||||
|
||||
try {
|
||||
|
@ -75,14 +84,9 @@ export async function create(ctx: any) {
|
|||
const doc = await pro.plugins.storePlugin(metadata, directory, source)
|
||||
|
||||
clientAppSocket?.emit("plugins-update", { name, hash: doc.hash })
|
||||
ctx.body = {
|
||||
message: "Plugin uploaded successfully",
|
||||
plugins: [doc],
|
||||
}
|
||||
ctx.body = { plugin: doc }
|
||||
} catch (err: any) {
|
||||
const errMsg = err?.message ? err?.message : err
|
||||
|
||||
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,9 +66,14 @@ export interface RunConfig {
|
|||
includeSqlRelationships?: IncludeRelationship
|
||||
}
|
||||
|
||||
export type ExternalReadRequestReturnType = {
|
||||
rows: Row[]
|
||||
rawResponseSize: number
|
||||
}
|
||||
|
||||
export type ExternalRequestReturnType<T extends Operation> =
|
||||
T extends Operation.READ
|
||||
? Row[]
|
||||
? ExternalReadRequestReturnType
|
||||
: T extends Operation.COUNT
|
||||
? number
|
||||
: { row: Row; table: Table }
|
||||
|
@ -741,9 +746,11 @@ export class ExternalRequest<T extends Operation> {
|
|||
)
|
||||
// if reading it'll just be an array of rows, return whole thing
|
||||
if (operation === Operation.READ) {
|
||||
return (
|
||||
Array.isArray(output) ? output : [output]
|
||||
) as ExternalRequestReturnType<T>
|
||||
const rows = Array.isArray(output) ? output : [output]
|
||||
return {
|
||||
rows,
|
||||
rawResponseSize: responseRows.length,
|
||||
} as ExternalRequestReturnType<T>
|
||||
} else {
|
||||
return { row: output[0], table } as ExternalRequestReturnType<T>
|
||||
}
|
||||
|
|
|
@ -136,7 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
|||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
const table: Table = tables[tableName]
|
||||
const row = response[0]
|
||||
const row = response.rows[0]
|
||||
// this seems like a lot of work, but basically we need to dig deeper for the enrich
|
||||
// for a single row, there is probably a better way to do this with some smart multi-layer joins
|
||||
for (let [fieldName, field] of Object.entries(table.schema)) {
|
||||
|
@ -163,10 +163,14 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
|||
},
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
row[fieldName] = await outputProcessing(linkedTable, relatedRows, {
|
||||
squash: true,
|
||||
preserveLinks: true,
|
||||
})
|
||||
row[fieldName] = await outputProcessing<Row[]>(
|
||||
linkedTable,
|
||||
relatedRows.rows,
|
||||
{
|
||||
squash: true,
|
||||
preserveLinks: true,
|
||||
}
|
||||
)
|
||||
}
|
||||
return row
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import {
|
|||
CsvToJsonRequest,
|
||||
CsvToJsonResponse,
|
||||
FetchTablesResponse,
|
||||
FieldType,
|
||||
MigrateRequest,
|
||||
MigrateResponse,
|
||||
SaveTableRequest,
|
||||
|
@ -33,7 +34,11 @@ import sdk from "../../../sdk"
|
|||
import { jsonFromCsvString } from "../../../utilities/csv"
|
||||
import { builderSocket } from "../../../websockets"
|
||||
import { cloneDeep, isEqual } from "lodash"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
import {
|
||||
helpers,
|
||||
PROTECTED_EXTERNAL_COLUMNS,
|
||||
PROTECTED_INTERNAL_COLUMNS,
|
||||
} from "@budibase/shared-core"
|
||||
|
||||
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
|
||||
if (table && isExternalTable(table)) {
|
||||
|
@ -166,7 +171,7 @@ export async function validateNewTableImport(
|
|||
|
||||
if (isRows(rows) && isSchema(schema)) {
|
||||
ctx.status = 200
|
||||
ctx.body = validateSchema(rows, schema)
|
||||
ctx.body = validateSchema(rows, schema, PROTECTED_INTERNAL_COLUMNS)
|
||||
} else {
|
||||
ctx.status = 422
|
||||
}
|
||||
|
@ -178,9 +183,21 @@ export async function validateExistingTableImport(
|
|||
const { rows, tableId } = ctx.request.body
|
||||
|
||||
let schema = null
|
||||
|
||||
let protectedColumnNames
|
||||
if (tableId) {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
schema = table.schema
|
||||
|
||||
if (!isExternalTable(table)) {
|
||||
schema._id = {
|
||||
name: "_id",
|
||||
type: FieldType.STRING,
|
||||
}
|
||||
protectedColumnNames = PROTECTED_INTERNAL_COLUMNS.filter(x => x !== "_id")
|
||||
} else {
|
||||
protectedColumnNames = PROTECTED_EXTERNAL_COLUMNS
|
||||
}
|
||||
} else {
|
||||
ctx.status = 422
|
||||
return
|
||||
|
@ -188,7 +205,7 @@ export async function validateExistingTableImport(
|
|||
|
||||
if (tableId && isRows(rows) && isSchema(schema)) {
|
||||
ctx.status = 200
|
||||
ctx.body = validateSchema(rows, schema)
|
||||
ctx.body = validateSchema(rows, schema, protectedColumnNames)
|
||||
} else {
|
||||
ctx.status = 422
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ import { handleDataImport } from "./utils"
|
|||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
FieldType,
|
||||
RenameColumn,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
|
@ -69,10 +70,22 @@ export async function bulkImport(
|
|||
) {
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { rows, identifierFields } = ctx.request.body
|
||||
await handleDataImport(table, {
|
||||
importRows: rows,
|
||||
identifierFields,
|
||||
user: ctx.user,
|
||||
})
|
||||
await handleDataImport(
|
||||
{
|
||||
...table,
|
||||
schema: {
|
||||
_id: {
|
||||
name: "_id",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
...table.schema,
|
||||
},
|
||||
},
|
||||
{
|
||||
importRows: rows,
|
||||
identifierFields,
|
||||
user: ctx.user,
|
||||
}
|
||||
)
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -122,13 +122,15 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
|
|||
export async function importToRows(
|
||||
data: Row[],
|
||||
table: Table,
|
||||
user?: ContextUser
|
||||
user?: ContextUser,
|
||||
opts?: { keepCouchId: boolean }
|
||||
) {
|
||||
let originalTable = table
|
||||
let finalData: any = []
|
||||
const originalTable = table
|
||||
const finalData: Row[] = []
|
||||
const keepCouchId = !!opts?.keepCouchId
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let row = data[i]
|
||||
row._id = generateRowID(table._id!)
|
||||
row._id = (keepCouchId && row._id) || generateRowID(table._id!)
|
||||
row.type = "row"
|
||||
row.tableId = table._id
|
||||
|
||||
|
@ -180,7 +182,11 @@ export async function handleDataImport(
|
|||
const db = context.getAppDB()
|
||||
const data = parse(importRows, table)
|
||||
|
||||
let finalData: any = await importToRows(data, table, user)
|
||||
const finalData = await importToRows(data, table, user, {
|
||||
keepCouchId: identifierFields.includes("_id"),
|
||||
})
|
||||
|
||||
let newRowCount = finalData.length
|
||||
|
||||
//Set IDs of finalData to match existing row if an update is expected
|
||||
if (identifierFields.length > 0) {
|
||||
|
@ -203,12 +209,14 @@ export async function handleDataImport(
|
|||
if (match) {
|
||||
finalItem._id = doc._id
|
||||
finalItem._rev = doc._rev
|
||||
|
||||
newRowCount--
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), {
|
||||
await quotas.addRows(newRowCount, () => db.bulkDocs(finalData), {
|
||||
tableId: table._id,
|
||||
})
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import { Row, TableSchema } from "@budibase/types"
|
||||
import { Row, RowExportFormat, TableSchema } from "@budibase/types"
|
||||
|
||||
export { RowExportFormat as Format } from "@budibase/types"
|
||||
|
||||
function getHeaders(
|
||||
headers: string[],
|
||||
|
@ -46,16 +48,6 @@ export function jsonWithSchema(schema: TableSchema, rows: Row[]) {
|
|||
return JSON.stringify({ schema: newSchema, rows }, undefined, 2)
|
||||
}
|
||||
|
||||
export enum Format {
|
||||
CSV = "csv",
|
||||
JSON = "json",
|
||||
JSON_WITH_SCHEMA = "jsonWithSchema",
|
||||
}
|
||||
|
||||
export function isFormat(format: any): format is Format {
|
||||
return Object.values(Format).includes(format as Format)
|
||||
}
|
||||
|
||||
export function parseCsvExport<T>(value: string) {
|
||||
return JSON.parse(value) as T
|
||||
export function isFormat(format: any): format is RowExportFormat {
|
||||
return Object.values(RowExportFormat).includes(format as RowExportFormat)
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ import { type App } from "@budibase/types"
|
|||
import tk from "timekeeper"
|
||||
import * as uuid from "uuid"
|
||||
import { structures } from "@budibase/backend-core/tests"
|
||||
import nock from "nock"
|
||||
|
||||
describe("/applications", () => {
|
||||
let config = setup.getConfig()
|
||||
|
@ -35,6 +36,7 @@ describe("/applications", () => {
|
|||
throw new Error("Failed to publish app")
|
||||
}
|
||||
jest.clearAllMocks()
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
// These need to go first for the app totals to make sense
|
||||
|
@ -324,18 +326,33 @@ describe("/applications", () => {
|
|||
|
||||
describe("delete", () => {
|
||||
it("should delete published app and dev apps with dev app ID", async () => {
|
||||
const prodAppId = app.appId.replace("_dev", "")
|
||||
nock("http://localhost:10000")
|
||||
.delete(`/api/global/roles/${prodAppId}`)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.application.delete(app.appId)
|
||||
expect(events.app.deleted).toHaveBeenCalledTimes(1)
|
||||
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should delete published app and dev app with prod app ID", async () => {
|
||||
await config.api.application.delete(app.appId.replace("_dev", ""))
|
||||
const prodAppId = app.appId.replace("_dev", "")
|
||||
nock("http://localhost:10000")
|
||||
.delete(`/api/global/roles/${prodAppId}`)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.application.delete(prodAppId)
|
||||
expect(events.app.deleted).toHaveBeenCalledTimes(1)
|
||||
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should be able to delete an app after SQS_SEARCH_ENABLE has been set but app hasn't been migrated", async () => {
|
||||
const prodAppId = app.appId.replace("_dev", "")
|
||||
nock("http://localhost:10000")
|
||||
.delete(`/api/global/roles/${prodAppId}`)
|
||||
.reply(200, {})
|
||||
|
||||
await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, async () => {
|
||||
await config.api.application.delete(app.appId)
|
||||
})
|
||||
|
|
|
@ -19,6 +19,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||
import nock from "nock"
|
||||
|
||||
describe("/datasources", () => {
|
||||
const config = setup.getConfig()
|
||||
|
@ -37,6 +38,7 @@ describe("/datasources", () => {
|
|||
config: {},
|
||||
})
|
||||
jest.clearAllMocks()
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
describe("create", () => {
|
||||
|
@ -71,6 +73,12 @@ describe("/datasources", () => {
|
|||
|
||||
describe("dynamic variables", () => {
|
||||
it("should invalidate changed or removed variables", async () => {
|
||||
nock("http://www.example.com/")
|
||||
.get("/")
|
||||
.reply(200, [{ value: "test" }])
|
||||
.get("/?test=test")
|
||||
.reply(200, [{ value: 1 }])
|
||||
|
||||
let datasource = await config.api.datasource.create({
|
||||
type: "datasource",
|
||||
name: "Rest",
|
||||
|
@ -81,7 +89,7 @@ describe("/datasources", () => {
|
|||
const query = await config.api.query.save({
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
path: "www.google.com",
|
||||
path: "www.example.com",
|
||||
},
|
||||
parameters: [],
|
||||
transformer: null,
|
||||
|
|
|
@ -15,6 +15,8 @@ jest.mock("@budibase/backend-core", () => {
|
|||
|
||||
import { events, objectStore } from "@budibase/backend-core"
|
||||
import * as setup from "./utilities"
|
||||
import nock from "nock"
|
||||
import { PluginSource } from "@budibase/types"
|
||||
|
||||
const mockUploadDirectory = objectStore.uploadDirectory as jest.Mock
|
||||
const mockDeleteFolder = objectStore.deleteFolder as jest.Mock
|
||||
|
@ -28,6 +30,7 @@ describe("/plugins", () => {
|
|||
beforeEach(async () => {
|
||||
await config.init()
|
||||
jest.clearAllMocks()
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
const createPlugin = async (status?: number) => {
|
||||
|
@ -112,67 +115,108 @@ describe("/plugins", () => {
|
|||
})
|
||||
|
||||
describe("github", () => {
|
||||
const createGithubPlugin = async (status?: number, url?: string) => {
|
||||
return await request
|
||||
.post(`/api/plugin`)
|
||||
.send({
|
||||
source: "Github",
|
||||
url,
|
||||
githubToken: "token",
|
||||
beforeEach(async () => {
|
||||
nock("https://api.github.com")
|
||||
.get("/repos/my-repo/budibase-comment-box")
|
||||
.reply(200, {
|
||||
name: "budibase-comment-box",
|
||||
releases_url:
|
||||
"https://api.github.com/repos/my-repo/budibase-comment-box{/id}",
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(status ? status : 200)
|
||||
}
|
||||
it("should be able to create a plugin from github", async () => {
|
||||
const res = await createGithubPlugin(
|
||||
200,
|
||||
"https://github.com/my-repo/budibase-comment-box.git"
|
||||
)
|
||||
expect(res.body).toBeDefined()
|
||||
expect(res.body.plugin).toBeDefined()
|
||||
expect(res.body.plugin._id).toEqual("plg_comment-box")
|
||||
.get("/repos/my-repo/budibase-comment-box/latest")
|
||||
.reply(200, {
|
||||
assets: [
|
||||
{
|
||||
content_type: "application/gzip",
|
||||
browser_download_url:
|
||||
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
nock("https://github.com")
|
||||
.get(
|
||||
"/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz"
|
||||
)
|
||||
.replyWithFile(
|
||||
200,
|
||||
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
|
||||
)
|
||||
})
|
||||
|
||||
it("should be able to create a plugin from github", async () => {
|
||||
const { plugin } = await config.api.plugin.create({
|
||||
source: PluginSource.GITHUB,
|
||||
url: "https://github.com/my-repo/budibase-comment-box.git",
|
||||
githubToken: "token",
|
||||
})
|
||||
expect(plugin._id).toEqual("plg_comment-box")
|
||||
})
|
||||
|
||||
it("should fail if the url is not from github", async () => {
|
||||
const res = await createGithubPlugin(
|
||||
400,
|
||||
"https://notgithub.com/my-repo/budibase-comment-box"
|
||||
)
|
||||
expect(res.body.message).toEqual(
|
||||
"Failed to import plugin: The plugin origin must be from Github"
|
||||
await config.api.plugin.create(
|
||||
{
|
||||
source: PluginSource.GITHUB,
|
||||
url: "https://notgithub.com/my-repo/budibase-comment-box",
|
||||
githubToken: "token",
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message:
|
||||
"Failed to import plugin: The plugin origin must be from Github",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
describe("npm", () => {
|
||||
it("should be able to create a plugin from npm", async () => {
|
||||
const res = await request
|
||||
.post(`/api/plugin`)
|
||||
.send({
|
||||
source: "NPM",
|
||||
url: "https://www.npmjs.com/package/budibase-component",
|
||||
nock("https://registry.npmjs.org")
|
||||
.get("/budibase-component")
|
||||
.reply(200, {
|
||||
name: "budibase-component",
|
||||
"dist-tags": {
|
||||
latest: "1.0.0",
|
||||
},
|
||||
versions: {
|
||||
"1.0.0": {
|
||||
dist: {
|
||||
tarball:
|
||||
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.1.tgz",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body).toBeDefined()
|
||||
expect(res.body.plugin._id).toEqual("plg_budibase-component")
|
||||
.get("/budibase-component/-/budibase-component-1.0.1.tgz")
|
||||
.replyWithFile(
|
||||
200,
|
||||
"src/api/routes/tests/data/budibase-component-1.0.1.tgz"
|
||||
)
|
||||
|
||||
const { plugin } = await config.api.plugin.create({
|
||||
source: PluginSource.NPM,
|
||||
url: "https://www.npmjs.com/package/budibase-component",
|
||||
})
|
||||
expect(plugin._id).toEqual("plg_budibase-component")
|
||||
expect(events.plugin.imported).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe("url", () => {
|
||||
it("should be able to create a plugin from a URL", async () => {
|
||||
const res = await request
|
||||
.post(`/api/plugin`)
|
||||
.send({
|
||||
source: "URL",
|
||||
url: "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz",
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body).toBeDefined()
|
||||
expect(res.body.plugin._id).toEqual("plg_comment-box")
|
||||
nock("https://www.someurl.com")
|
||||
.get("/comment-box/comment-box-1.0.2.tar.gz")
|
||||
.replyWithFile(
|
||||
200,
|
||||
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
|
||||
)
|
||||
|
||||
const { plugin } = await config.api.plugin.create({
|
||||
source: PluginSource.URL,
|
||||
url: "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz",
|
||||
})
|
||||
expect(plugin._id).toEqual("plg_comment-box")
|
||||
expect(events.plugin.imported).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -22,9 +22,13 @@ describe.each(
|
|||
DatabaseName.MYSQL,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.MARIADB,
|
||||
DatabaseName.ORACLE,
|
||||
].map(name => [name, getDatasource(name)])
|
||||
)("queries (%s)", (dbName, dsProvider) => {
|
||||
const config = setup.getConfig()
|
||||
const isOracle = dbName === DatabaseName.ORACLE
|
||||
const isMsSQL = dbName === DatabaseName.SQL_SERVER
|
||||
|
||||
let rawDatasource: Datasource
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
@ -97,7 +101,7 @@ describe.each(
|
|||
const query = await createQuery({
|
||||
name: "New Query",
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
sql: client("test_table").select("*").toString(),
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -106,7 +110,7 @@ describe.each(
|
|||
name: "New Query",
|
||||
parameters: [],
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
sql: client("test_table").select("*").toString(),
|
||||
},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
|
@ -125,7 +129,7 @@ describe.each(
|
|||
it("should be able to update a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
sql: client("test_table").select("*").toString(),
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -135,7 +139,7 @@ describe.each(
|
|||
...query,
|
||||
name: "Updated Query",
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table WHERE id = 1",
|
||||
sql: client("test_table").where({ id: 1 }).toString(),
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -144,7 +148,7 @@ describe.each(
|
|||
name: "Updated Query",
|
||||
parameters: [],
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table WHERE id = 1",
|
||||
sql: client("test_table").where({ id: 1 }).toString(),
|
||||
},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
|
@ -161,7 +165,7 @@ describe.each(
|
|||
it("should be able to delete a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
sql: client("test_table").select("*").toString(),
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -180,7 +184,7 @@ describe.each(
|
|||
it("should be able to list queries", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
sql: client("test_table").select("*").toString(),
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -191,7 +195,7 @@ describe.each(
|
|||
it("should strip sensitive fields for prod apps", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
sql: client("test_table").select("*").toString(),
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -212,7 +216,7 @@ describe.each(
|
|||
datasourceId: datasource._id!,
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT * FROM test_table WHERE id = 1`,
|
||||
sql: client("test_table").where({ id: 1 }).toString(),
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
|
@ -270,7 +274,7 @@ describe.each(
|
|||
name: "Test Query",
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT * FROM ${tableName}`,
|
||||
sql: client(tableName).select("*").toString(),
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
|
@ -284,11 +288,13 @@ describe.each(
|
|||
})
|
||||
)
|
||||
|
||||
await client(tableName).delete()
|
||||
await client.schema.alterTable(tableName, table => {
|
||||
table.string("data").alter()
|
||||
})
|
||||
|
||||
await client(tableName).update({
|
||||
await client(tableName).insert({
|
||||
name: "test",
|
||||
data: "string value",
|
||||
})
|
||||
|
||||
|
@ -297,7 +303,7 @@ describe.each(
|
|||
name: "Test Query",
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT * FROM ${tableName}`,
|
||||
sql: client(tableName).select("*").toString(),
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
|
@ -311,6 +317,7 @@ describe.each(
|
|||
})
|
||||
)
|
||||
})
|
||||
|
||||
it("should work with static variables", async () => {
|
||||
await config.api.datasource.update({
|
||||
...datasource,
|
||||
|
@ -326,7 +333,7 @@ describe.each(
|
|||
datasourceId: datasource._id!,
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT '{{ foo }}' as foo`,
|
||||
sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
|
@ -337,16 +344,17 @@ describe.each(
|
|||
|
||||
const response = await config.api.query.preview(request)
|
||||
|
||||
let key = isOracle ? "FOO" : "foo"
|
||||
expect(response.schema).toEqual({
|
||||
foo: {
|
||||
name: "foo",
|
||||
[key]: {
|
||||
name: key,
|
||||
type: "string",
|
||||
},
|
||||
})
|
||||
|
||||
expect(response.rows).toEqual([
|
||||
{
|
||||
foo: "bar",
|
||||
[key]: "bar",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
@ -354,7 +362,7 @@ describe.each(
|
|||
it("should work with dynamic variables", async () => {
|
||||
const basedOnQuery = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT name FROM test_table WHERE id = 1",
|
||||
sql: client("test_table").select("name").where({ id: 1 }).toString(),
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -376,7 +384,7 @@ describe.each(
|
|||
datasourceId: datasource._id!,
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT '{{ foo }}' as foo`,
|
||||
sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
|
@ -385,16 +393,17 @@ describe.each(
|
|||
readable: true,
|
||||
})
|
||||
|
||||
let key = isOracle ? "FOO" : "foo"
|
||||
expect(preview.schema).toEqual({
|
||||
foo: {
|
||||
name: "foo",
|
||||
[key]: {
|
||||
name: key,
|
||||
type: "string",
|
||||
},
|
||||
})
|
||||
|
||||
expect(preview.rows).toEqual([
|
||||
{
|
||||
foo: "one",
|
||||
[key]: "one",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
@ -402,7 +411,7 @@ describe.each(
|
|||
it("should handle the dynamic base query being deleted", async () => {
|
||||
const basedOnQuery = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT name FROM test_table WHERE id = 1",
|
||||
sql: client("test_table").select("name").where({ id: 1 }).toString(),
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -426,7 +435,7 @@ describe.each(
|
|||
datasourceId: datasource._id!,
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT '{{ foo }}' as foo`,
|
||||
sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
|
@ -435,16 +444,17 @@ describe.each(
|
|||
readable: true,
|
||||
})
|
||||
|
||||
let key = isOracle ? "FOO" : "foo"
|
||||
expect(preview.schema).toEqual({
|
||||
foo: {
|
||||
name: "foo",
|
||||
[key]: {
|
||||
name: key,
|
||||
type: "string",
|
||||
},
|
||||
})
|
||||
|
||||
expect(preview.rows).toEqual([
|
||||
{
|
||||
foo: datasource.source === SourceName.SQL_SERVER ? "" : null,
|
||||
[key]: datasource.source === SourceName.SQL_SERVER ? "" : null,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
@ -455,7 +465,7 @@ describe.each(
|
|||
it("should be able to insert with bindings", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
|
||||
sql: client("test_table").insert({ name: "{{ foo }}" }).toString(),
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
|
@ -488,7 +498,7 @@ describe.each(
|
|||
it("should not allow handlebars as parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
|
||||
sql: client("test_table").insert({ name: "{{ foo }}" }).toString(),
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
|
@ -516,46 +526,55 @@ describe.each(
|
|||
)
|
||||
})
|
||||
|
||||
it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])(
|
||||
"should coerce %s into a date",
|
||||
async datetimeStr => {
|
||||
const date = new Date(datetimeStr)
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: `INSERT INTO test_table (name, birthday) VALUES ('foo', {{ birthday }})`,
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "birthday",
|
||||
default: "",
|
||||
// Oracle doesn't automatically coerce strings into dates.
|
||||
!isOracle &&
|
||||
it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])(
|
||||
"should coerce %s into a date",
|
||||
async datetimeStr => {
|
||||
const date = new Date(datetimeStr)
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: client("test_table")
|
||||
.insert({
|
||||
name: "foo",
|
||||
birthday: client.raw("{{ birthday }}"),
|
||||
})
|
||||
.toString(),
|
||||
},
|
||||
],
|
||||
queryVerb: "create",
|
||||
})
|
||||
parameters: [
|
||||
{
|
||||
name: "birthday",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
queryVerb: "create",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { birthday: datetimeStr },
|
||||
})
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { birthday: datetimeStr },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([{ created: true }])
|
||||
expect(result.data).toEqual([{ created: true }])
|
||||
|
||||
const rows = await client("test_table")
|
||||
.where({ birthday: datetimeStr })
|
||||
.select()
|
||||
expect(rows).toHaveLength(1)
|
||||
const rows = await client("test_table")
|
||||
.where({ birthday: datetimeStr })
|
||||
.select()
|
||||
expect(rows).toHaveLength(1)
|
||||
|
||||
for (const row of rows) {
|
||||
expect(new Date(row.birthday)).toEqual(date)
|
||||
for (const row of rows) {
|
||||
expect(new Date(row.birthday)).toEqual(date)
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
it.each(["2021,02,05", "202205-1500"])(
|
||||
"should not coerce %s as a date",
|
||||
async notDateStr => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "INSERT INTO test_table (name) VALUES ({{ name }})",
|
||||
sql: client("test_table")
|
||||
.insert({ name: client.raw("{{ name }}") })
|
||||
.toString(),
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
|
@ -586,7 +605,7 @@ describe.each(
|
|||
it("should execute a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table ORDER BY id",
|
||||
sql: client("test_table").select("*").orderBy("id").toString(),
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -629,7 +648,7 @@ describe.each(
|
|||
it("should be able to transform a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table WHERE id = 1",
|
||||
sql: client("test_table").where({ id: 1 }).select("*").toString(),
|
||||
},
|
||||
transformer: `
|
||||
data[0].id = data[0].id + 1;
|
||||
|
@ -652,7 +671,10 @@ describe.each(
|
|||
it("should coerce numeric bindings", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table WHERE id = {{ id }}",
|
||||
sql: client("test_table")
|
||||
.where({ id: client.raw("{{ id }}") })
|
||||
.select("*")
|
||||
.toString(),
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
|
@ -683,7 +705,10 @@ describe.each(
|
|||
it("should be able to update rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "UPDATE test_table SET name = {{ name }} WHERE id = {{ id }}",
|
||||
sql: client("test_table")
|
||||
.update({ name: client.raw("{{ name }}") })
|
||||
.where({ id: client.raw("{{ id }}") })
|
||||
.toString(),
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
|
@ -698,19 +723,13 @@ describe.each(
|
|||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
id: "1",
|
||||
name: "foo",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
updated: true,
|
||||
},
|
||||
])
|
||||
|
||||
const rows = await client("test_table").where({ id: 1 }).select()
|
||||
expect(rows).toEqual([
|
||||
{ id: 1, name: "foo", birthday: null, number: null },
|
||||
|
@ -720,35 +739,34 @@ describe.each(
|
|||
it("should be able to execute an update that updates no rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "UPDATE test_table SET name = 'updated' WHERE id = 100",
|
||||
sql: client("test_table")
|
||||
.update({ name: "updated" })
|
||||
.where({ id: 100 })
|
||||
.toString(),
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
updated: true,
|
||||
},
|
||||
])
|
||||
const rows = await client("test_table").select()
|
||||
for (const row of rows) {
|
||||
expect(row.name).not.toEqual("updated")
|
||||
}
|
||||
})
|
||||
|
||||
it("should be able to execute a delete that deletes no rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "DELETE FROM test_table WHERE id = 100",
|
||||
sql: client("test_table").where({ id: 100 }).delete().toString(),
|
||||
},
|
||||
queryVerb: "delete",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
deleted: true,
|
||||
},
|
||||
])
|
||||
const rows = await client("test_table").select()
|
||||
expect(rows).toHaveLength(5)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -756,7 +774,10 @@ describe.each(
|
|||
it("should be able to delete rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "DELETE FROM test_table WHERE id = {{ id }}",
|
||||
sql: client("test_table")
|
||||
.where({ id: client.raw("{{ id }}") })
|
||||
.delete()
|
||||
.toString(),
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
|
@ -767,18 +788,12 @@ describe.each(
|
|||
queryVerb: "delete",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
id: "1",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
deleted: true,
|
||||
},
|
||||
])
|
||||
|
||||
const rows = await client("test_table").where({ id: 1 }).select()
|
||||
expect(rows).toHaveLength(0)
|
||||
})
|
||||
|
@ -823,72 +838,63 @@ describe.each(
|
|||
})
|
||||
})
|
||||
|
||||
it("should be able to execute an update that updates no rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "UPDATE test_table SET name = 'updated' WHERE id = 100",
|
||||
},
|
||||
queryVerb: "update",
|
||||
// this parameter really only impacts SQL queries
|
||||
describe("confirm nullDefaultSupport", () => {
|
||||
let queryParams: Partial<Query>
|
||||
beforeAll(async () => {
|
||||
queryParams = {
|
||||
fields: {
|
||||
sql: client("test_table")
|
||||
.insert({
|
||||
name: client.raw("{{ bindingName }}"),
|
||||
number: client.raw("{{ bindingNumber }}"),
|
||||
})
|
||||
.toString(),
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "bindingName",
|
||||
default: "",
|
||||
},
|
||||
{
|
||||
name: "bindingNumber",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
queryVerb: "create",
|
||||
}
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {})
|
||||
it("should error for old queries", async () => {
|
||||
const query = await createQuery(queryParams)
|
||||
await config.api.query.save({ ...query, nullDefaultSupport: false })
|
||||
let error: string | undefined
|
||||
try {
|
||||
await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
bindingName: "testing",
|
||||
},
|
||||
})
|
||||
} catch (err: any) {
|
||||
error = err.message
|
||||
}
|
||||
if (isMsSQL || isOracle) {
|
||||
expect(error).toBeUndefined()
|
||||
} else {
|
||||
expect(error).toBeDefined()
|
||||
expect(error).toContain("integer")
|
||||
}
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
updated: true,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
// this parameter really only impacts SQL queries
|
||||
describe("confirm nullDefaultSupport", () => {
|
||||
const queryParams = {
|
||||
fields: {
|
||||
sql: "INSERT INTO test_table (name, number) VALUES ({{ bindingName }}, {{ bindingNumber }})",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "bindingName",
|
||||
default: "",
|
||||
},
|
||||
{
|
||||
name: "bindingNumber",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
queryVerb: "create",
|
||||
}
|
||||
|
||||
it("should error for old queries", async () => {
|
||||
const query = await createQuery(queryParams)
|
||||
await config.api.query.save({ ...query, nullDefaultSupport: false })
|
||||
let error: string | undefined
|
||||
try {
|
||||
await config.api.query.execute(query._id!, {
|
||||
it("should not error for new queries", async () => {
|
||||
const query = await createQuery(queryParams)
|
||||
const results = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
bindingName: "testing",
|
||||
},
|
||||
})
|
||||
} catch (err: any) {
|
||||
error = err.message
|
||||
}
|
||||
if (dbName === "mssql") {
|
||||
expect(error).toBeUndefined()
|
||||
} else {
|
||||
expect(error).toBeDefined()
|
||||
expect(error).toContain("integer")
|
||||
}
|
||||
})
|
||||
|
||||
it("should not error for new queries", async () => {
|
||||
const query = await createQuery(queryParams)
|
||||
const results = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
bindingName: "testing",
|
||||
},
|
||||
expect(results).toEqual({ data: [{ created: true }] })
|
||||
})
|
||||
expect(results).toEqual({ data: [{ created: true }] })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -5,8 +5,6 @@ import { getCachedVariable } from "../../../../threads/utils"
|
|||
import nock from "nock"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
jest.unmock("node-fetch")
|
||||
|
||||
describe("rest", () => {
|
||||
let config: TestConfiguration
|
||||
let datasource: Datasource
|
||||
|
|
|
@ -33,6 +33,7 @@ import {
|
|||
UpdatedRowEventEmitter,
|
||||
TableSchema,
|
||||
JsonFieldSubType,
|
||||
RowExportFormat,
|
||||
} from "@budibase/types"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import _, { merge } from "lodash"
|
||||
|
@ -71,9 +72,11 @@ describe.each([
|
|||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
||||
])("/rows (%s)", (providerType, dsProvider) => {
|
||||
const isInternal = dsProvider === undefined
|
||||
const isMSSQL = providerType === DatabaseName.SQL_SERVER
|
||||
const isOracle = providerType === DatabaseName.ORACLE
|
||||
const config = setup.getConfig()
|
||||
|
||||
let table: Table
|
||||
|
@ -128,7 +131,8 @@ describe.each([
|
|||
primary: ["id"],
|
||||
schema: defaultSchema,
|
||||
}
|
||||
return merge(req, ...overrides)
|
||||
const merged = merge(req, ...overrides)
|
||||
return merged
|
||||
}
|
||||
|
||||
function defaultTable(
|
||||
|
@ -1298,9 +1302,117 @@ describe.each([
|
|||
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
|
||||
})
|
||||
|
||||
// Upserting isn't yet supported in MSSQL, see:
|
||||
isInternal &&
|
||||
it("should be able to update existing rows on bulkImport", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
},
|
||||
description: {
|
||||
type: FieldType.STRING,
|
||||
name: "description",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const existingRow = await config.api.row.save(table._id!, {
|
||||
name: "Existing row",
|
||||
description: "Existing description",
|
||||
})
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
},
|
||||
{ ...existingRow, name: "Updated existing row" },
|
||||
{
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
},
|
||||
],
|
||||
identifierFields: ["_id"],
|
||||
})
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(3)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Row 1")
|
||||
expect(rows[0].description).toEqual("Row 1 description")
|
||||
expect(rows[1].name).toEqual("Row 2")
|
||||
expect(rows[1].description).toEqual("Row 2 description")
|
||||
expect(rows[2].name).toEqual("Updated existing row")
|
||||
expect(rows[2].description).toEqual("Existing description")
|
||||
|
||||
await assertRowUsage(rowUsage + 2)
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("should create new rows if not identifierFields are provided", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
},
|
||||
description: {
|
||||
type: FieldType.STRING,
|
||||
name: "description",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const existingRow = await config.api.row.save(table._id!, {
|
||||
name: "Existing row",
|
||||
description: "Existing description",
|
||||
})
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
},
|
||||
{ ...existingRow, name: "Updated existing row" },
|
||||
{
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(4)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Existing row")
|
||||
expect(rows[0].description).toEqual("Existing description")
|
||||
expect(rows[1].name).toEqual("Row 1")
|
||||
expect(rows[1].description).toEqual("Row 1 description")
|
||||
expect(rows[2].name).toEqual("Row 2")
|
||||
expect(rows[2].description).toEqual("Row 2 description")
|
||||
expect(rows[3].name).toEqual("Updated existing row")
|
||||
expect(rows[3].description).toEqual("Existing description")
|
||||
|
||||
await assertRowUsage(rowUsage + 3)
|
||||
})
|
||||
|
||||
// Upserting isn't yet supported in MSSQL / Oracle, see:
|
||||
// https://github.com/knex/knex/pull/6050
|
||||
!isMSSQL &&
|
||||
!isOracle &&
|
||||
it("should be able to update existing rows with bulkImport", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
|
@ -1370,9 +1482,10 @@ describe.each([
|
|||
expect(rows[2].description).toEqual("Row 3 description")
|
||||
})
|
||||
|
||||
// Upserting isn't yet supported in MSSQL, see:
|
||||
// Upserting isn't yet supported in MSSQL or Oracle, see:
|
||||
// https://github.com/knex/knex/pull/6050
|
||||
!isMSSQL &&
|
||||
!isOracle &&
|
||||
!isInternal &&
|
||||
it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
|
||||
const tableName = uuid.v4()
|
||||
|
@ -1439,9 +1552,10 @@ describe.each([
|
|||
expect(rows[2].description).toEqual("Row 3 description")
|
||||
})
|
||||
|
||||
// Upserting isn't yet supported in MSSQL, see:
|
||||
// Upserting isn't yet supported in MSSQL/Oracle, see:
|
||||
// https://github.com/knex/knex/pull/6050
|
||||
!isMSSQL &&
|
||||
!isOracle &&
|
||||
!isInternal &&
|
||||
it("should be able to update existing rows an autoID primary key", async () => {
|
||||
const tableName = uuid.v4()
|
||||
|
@ -1640,23 +1754,38 @@ describe.each([
|
|||
table = await config.api.table.save(defaultTable())
|
||||
})
|
||||
|
||||
it("should allow exporting all columns", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const res = await config.api.row.exportRows(table._id!, {
|
||||
rows: [existing._id!],
|
||||
})
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
isInternal &&
|
||||
it("should not export internal couchdb fields", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {
|
||||
name: generator.guid(),
|
||||
description: generator.paragraph(),
|
||||
})
|
||||
const res = await config.api.row.exportRows(table._id!, {
|
||||
rows: [existing._id!],
|
||||
})
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
|
||||
// Ensure all original columns were exported
|
||||
expect(Object.keys(row).length).toBeGreaterThanOrEqual(
|
||||
Object.keys(existing).length
|
||||
)
|
||||
Object.keys(existing).forEach(key => {
|
||||
expect(row[key]).toEqual(existing[key])
|
||||
expect(Object.keys(row)).toEqual(["_id", "name", "description"])
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
it("should allow exporting all columns", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const res = await config.api.row.exportRows(table._id!, {
|
||||
rows: [existing._id!],
|
||||
})
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
|
||||
// Ensure all original columns were exported
|
||||
expect(Object.keys(row).length).toBe(Object.keys(existing).length)
|
||||
Object.keys(existing).forEach(key => {
|
||||
expect(row[key]).toEqual(existing[key])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should allow exporting only certain columns", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
|
@ -1689,6 +1818,7 @@ describe.each([
|
|||
await config.api.row.exportRows(
|
||||
"1234567",
|
||||
{ rows: [existing._id!] },
|
||||
RowExportFormat.JSON,
|
||||
{ status: 404 }
|
||||
)
|
||||
})
|
||||
|
@ -1727,6 +1857,202 @@ describe.each([
|
|||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(3)
|
||||
})
|
||||
|
||||
describe("should allow exporting all column types", () => {
|
||||
let tableId: string
|
||||
let expectedRowData: Row
|
||||
|
||||
beforeAll(async () => {
|
||||
const fullSchema = setup.structures.fullSchemaWithoutLinks({
|
||||
allRequired: true,
|
||||
})
|
||||
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
...setup.structures.basicTable(),
|
||||
schema: fullSchema,
|
||||
primary: ["string"],
|
||||
})
|
||||
)
|
||||
tableId = table._id!
|
||||
|
||||
const rowValues: Record<keyof typeof fullSchema, any> = {
|
||||
[FieldType.STRING]: generator.guid(),
|
||||
[FieldType.LONGFORM]: generator.paragraph(),
|
||||
[FieldType.OPTIONS]: "option 2",
|
||||
[FieldType.ARRAY]: ["options 2", "options 4"],
|
||||
[FieldType.NUMBER]: generator.natural(),
|
||||
[FieldType.BOOLEAN]: generator.bool(),
|
||||
[FieldType.DATETIME]: generator.date().toISOString(),
|
||||
[FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()],
|
||||
[FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(),
|
||||
[FieldType.FORMULA]: undefined, // generated field
|
||||
[FieldType.AUTO]: undefined, // generated field
|
||||
[FieldType.JSON]: { name: generator.guid() },
|
||||
[FieldType.INTERNAL]: generator.guid(),
|
||||
[FieldType.BARCODEQR]: generator.guid(),
|
||||
[FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(),
|
||||
[FieldType.BIGINT]: generator.integer().toString(),
|
||||
[FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }],
|
||||
[FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id },
|
||||
}
|
||||
const row = await config.api.row.save(table._id!, rowValues)
|
||||
expectedRowData = {
|
||||
_id: row._id,
|
||||
[FieldType.STRING]: rowValues[FieldType.STRING],
|
||||
[FieldType.LONGFORM]: rowValues[FieldType.LONGFORM],
|
||||
[FieldType.OPTIONS]: rowValues[FieldType.OPTIONS],
|
||||
[FieldType.ARRAY]: rowValues[FieldType.ARRAY],
|
||||
[FieldType.NUMBER]: rowValues[FieldType.NUMBER],
|
||||
[FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN],
|
||||
[FieldType.DATETIME]: rowValues[FieldType.DATETIME],
|
||||
[FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map(
|
||||
(a: any) =>
|
||||
expect.objectContaining({
|
||||
...a,
|
||||
url: expect.any(String),
|
||||
})
|
||||
),
|
||||
[FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({
|
||||
...rowValues[FieldType.ATTACHMENT_SINGLE],
|
||||
url: expect.any(String),
|
||||
}),
|
||||
[FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula,
|
||||
[FieldType.AUTO]: expect.any(Number),
|
||||
[FieldType.JSON]: rowValues[FieldType.JSON],
|
||||
[FieldType.INTERNAL]: rowValues[FieldType.INTERNAL],
|
||||
[FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR],
|
||||
[FieldType.SIGNATURE_SINGLE]: expect.objectContaining({
|
||||
...rowValues[FieldType.SIGNATURE_SINGLE],
|
||||
url: expect.any(String),
|
||||
}),
|
||||
[FieldType.BIGINT]: rowValues[FieldType.BIGINT],
|
||||
[FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map(
|
||||
expect.objectContaining
|
||||
),
|
||||
[FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining(
|
||||
rowValues[FieldType.BB_REFERENCE_SINGLE]
|
||||
),
|
||||
}
|
||||
})
|
||||
|
||||
it("as csv", async () => {
|
||||
const exportedValue = await config.api.row.exportRows(
|
||||
tableId,
|
||||
{ query: {} },
|
||||
RowExportFormat.CSV
|
||||
)
|
||||
|
||||
const jsonResult = await config.api.table.csvToJson({
|
||||
csvString: exportedValue,
|
||||
})
|
||||
|
||||
const stringified = (value: string) =>
|
||||
JSON.stringify(value).replace(/"/g, "'")
|
||||
|
||||
const matchingObject = (key: string, value: any, isArray: boolean) => {
|
||||
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
|
||||
if (isArray) {
|
||||
return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`))
|
||||
}
|
||||
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
|
||||
}
|
||||
|
||||
expect(jsonResult).toEqual([
|
||||
{
|
||||
...expectedRowData,
|
||||
auto: expect.any(String),
|
||||
array: stringified(expectedRowData["array"]),
|
||||
attachment: matchingObject(
|
||||
"key",
|
||||
expectedRowData["attachment"][0].sample,
|
||||
true
|
||||
),
|
||||
attachment_single: matchingObject(
|
||||
"key",
|
||||
expectedRowData["attachment_single"].sample,
|
||||
false
|
||||
),
|
||||
boolean: stringified(expectedRowData["boolean"]),
|
||||
json: stringified(expectedRowData["json"]),
|
||||
number: stringified(expectedRowData["number"]),
|
||||
signature_single: matchingObject(
|
||||
"key",
|
||||
expectedRowData["signature_single"].sample,
|
||||
false
|
||||
),
|
||||
bb_reference: matchingObject(
|
||||
"_id",
|
||||
expectedRowData["bb_reference"][0].sample,
|
||||
true
|
||||
),
|
||||
bb_reference_single: matchingObject(
|
||||
"_id",
|
||||
expectedRowData["bb_reference_single"].sample,
|
||||
false
|
||||
),
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("as json", async () => {
|
||||
const exportedValue = await config.api.row.exportRows(
|
||||
tableId,
|
||||
{ query: {} },
|
||||
RowExportFormat.JSON
|
||||
)
|
||||
|
||||
const json = JSON.parse(exportedValue)
|
||||
expect(json).toEqual([expectedRowData])
|
||||
})
|
||||
|
||||
it("as json with schema", async () => {
|
||||
const exportedValue = await config.api.row.exportRows(
|
||||
tableId,
|
||||
{ query: {} },
|
||||
RowExportFormat.JSON_WITH_SCHEMA
|
||||
)
|
||||
|
||||
const json = JSON.parse(exportedValue)
|
||||
expect(json).toEqual({
|
||||
schema: expect.any(Object),
|
||||
rows: [expectedRowData],
|
||||
})
|
||||
})
|
||||
|
||||
it("exported data can be re-imported", async () => {
|
||||
// export all
|
||||
const exportedValue = await config.api.row.exportRows(
|
||||
tableId,
|
||||
{ query: {} },
|
||||
RowExportFormat.CSV
|
||||
)
|
||||
|
||||
// import all twice
|
||||
const rows = await config.api.table.csvToJson({
|
||||
csvString: exportedValue,
|
||||
})
|
||||
await config.api.row.bulkImport(tableId, {
|
||||
rows,
|
||||
})
|
||||
await config.api.row.bulkImport(tableId, {
|
||||
rows,
|
||||
})
|
||||
|
||||
const { rows: allRows } = await config.api.row.search(tableId)
|
||||
|
||||
const expectedRow = {
|
||||
...expectedRowData,
|
||||
_id: expect.any(String),
|
||||
_rev: expect.any(String),
|
||||
type: "row",
|
||||
tableId: tableId,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
}
|
||||
expect(allRows).toEqual([expectedRow, expectedRow, expectedRow])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
let o2mTable: Table
|
||||
|
|
|
@ -48,11 +48,13 @@ describe.each([
|
|||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
||||
])("search (%s)", (name, dsProvider) => {
|
||||
const isSqs = name === "sqs"
|
||||
const isLucene = name === "lucene"
|
||||
const isInMemory = name === "in-memory"
|
||||
const isInternal = isSqs || isLucene || isInMemory
|
||||
const isSql = !isInMemory && !isLucene
|
||||
const config = setup.getConfig()
|
||||
|
||||
let envCleanup: (() => void) | undefined
|
||||
|
@ -192,7 +194,8 @@ describe.each([
|
|||
// different to the one passed in will cause the assertion to fail. Extra
|
||||
// rows returned by the query will also cause the assertion to fail.
|
||||
async toMatchExactly(expectedRows: any[]) {
|
||||
const { rows: foundRows } = await this.performSearch()
|
||||
const response = await this.performSearch()
|
||||
const foundRows = response.rows
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toHaveLength(expectedRows.length)
|
||||
|
@ -202,13 +205,15 @@ describe.each([
|
|||
expect.objectContaining(this.popRow(expectedRow, foundRows))
|
||||
)
|
||||
)
|
||||
return response
|
||||
}
|
||||
|
||||
// Asserts that the query returns rows matching exactly the set of rows
|
||||
// passed in. The order of the rows is not important, but extra rows will
|
||||
// cause the assertion to fail.
|
||||
async toContainExactly(expectedRows: any[]) {
|
||||
const { rows: foundRows } = await this.performSearch()
|
||||
const response = await this.performSearch()
|
||||
const foundRows = response.rows
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toHaveLength(expectedRows.length)
|
||||
|
@ -220,6 +225,7 @@ describe.each([
|
|||
)
|
||||
)
|
||||
)
|
||||
return response
|
||||
}
|
||||
|
||||
// Asserts that the query returns some property values - this cannot be used
|
||||
|
@ -236,6 +242,7 @@ describe.each([
|
|||
expect(response[key]).toEqual(properties[key])
|
||||
}
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
// Asserts that the query doesn't return a property, e.g. pagination parameters.
|
||||
|
@ -245,13 +252,15 @@ describe.each([
|
|||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(response[property]).toBeUndefined()
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
// Asserts that the query returns rows matching the set of rows passed in.
|
||||
// The order of the rows is not important. Extra rows will not cause the
|
||||
// assertion to fail.
|
||||
async toContain(expectedRows: any[]) {
|
||||
const { rows: foundRows } = await this.performSearch()
|
||||
const response = await this.performSearch()
|
||||
const foundRows = response.rows
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect([...foundRows]).toEqual(
|
||||
|
@ -261,6 +270,7 @@ describe.each([
|
|||
)
|
||||
)
|
||||
)
|
||||
return response
|
||||
}
|
||||
|
||||
async toFindNothing() {
|
||||
|
@ -1585,7 +1595,10 @@ describe.each([
|
|||
const MEDIUM = "10000000"
|
||||
|
||||
// Our bigints are int64s in most datasources.
|
||||
const BIG = "9223372036854775807"
|
||||
let BIG = "9223372036854775807"
|
||||
if (name === DatabaseName.ORACLE) {
|
||||
// BIG = "9223372036854775808"
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
table = await createTable({
|
||||
|
@ -2608,4 +2621,79 @@ describe.each([
|
|||
}).toContainExactly([row])
|
||||
})
|
||||
})
|
||||
|
||||
isSql &&
|
||||
describe("pagination edge case with relationships", () => {
|
||||
let mainRows: Row[] = []
|
||||
|
||||
beforeAll(async () => {
|
||||
const toRelateTable = await createTable({
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
})
|
||||
table = await createTable({
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
rel: {
|
||||
name: "rel",
|
||||
type: FieldType.LINK,
|
||||
relationshipType: RelationshipType.MANY_TO_ONE,
|
||||
tableId: toRelateTable._id!,
|
||||
fieldName: "rel",
|
||||
},
|
||||
})
|
||||
const relatedRows = await Promise.all([
|
||||
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
|
||||
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
|
||||
config.api.row.save(toRelateTable._id!, { name: "tag 3" }),
|
||||
config.api.row.save(toRelateTable._id!, { name: "tag 4" }),
|
||||
config.api.row.save(toRelateTable._id!, { name: "tag 5" }),
|
||||
config.api.row.save(toRelateTable._id!, { name: "tag 6" }),
|
||||
])
|
||||
mainRows = await Promise.all([
|
||||
config.api.row.save(table._id!, {
|
||||
name: "product 1",
|
||||
rel: relatedRows.map(row => row._id),
|
||||
}),
|
||||
config.api.row.save(table._id!, {
|
||||
name: "product 2",
|
||||
rel: [],
|
||||
}),
|
||||
config.api.row.save(table._id!, {
|
||||
name: "product 3",
|
||||
rel: [],
|
||||
}),
|
||||
])
|
||||
})
|
||||
|
||||
it("can still page when the hard limit is hit", async () => {
|
||||
await config.withCoreEnv(
|
||||
{
|
||||
SQL_MAX_ROWS: "6",
|
||||
},
|
||||
async () => {
|
||||
const params: Omit<RowSearchParams, "tableId"> = {
|
||||
query: {},
|
||||
paginate: true,
|
||||
limit: 3,
|
||||
sort: "name",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}
|
||||
const page1 = await expectSearch(params).toContain([mainRows[0]])
|
||||
expect(page1.hasNextPage).toBe(true)
|
||||
expect(page1.bookmark).toBeDefined()
|
||||
const page2 = await expectSearch({
|
||||
...params,
|
||||
bookmark: page1.bookmark,
|
||||
}).toContain([mainRows[1], mainRows[2]])
|
||||
expect(page2.hasNextPage).toBe(false)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
import { context, events } from "@budibase/backend-core"
|
||||
import { context, docIds, events } from "@budibase/backend-core"
|
||||
import {
|
||||
PROTECTED_EXTERNAL_COLUMNS,
|
||||
PROTECTED_INTERNAL_COLUMNS,
|
||||
} from "@budibase/shared-core"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
BBReferenceFieldSubType,
|
||||
|
@ -10,10 +14,13 @@ import {
|
|||
Row,
|
||||
SaveTableRequest,
|
||||
Table,
|
||||
TableSchema,
|
||||
TableSourceType,
|
||||
User,
|
||||
ValidateTableImportResponse,
|
||||
ViewCalculation,
|
||||
ViewV2Enriched,
|
||||
RowExportFormat,
|
||||
} from "@budibase/types"
|
||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import * as setup from "./utilities"
|
||||
|
@ -33,7 +40,8 @@ describe.each([
|
|||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/tables (%s)", (_, dsProvider) => {
|
||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
||||
])("/tables (%s)", (name, dsProvider) => {
|
||||
const isInternal: boolean = !dsProvider
|
||||
let datasource: Datasource | undefined
|
||||
let config = setup.getConfig()
|
||||
|
@ -52,15 +60,20 @@ describe.each([
|
|||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it.each([
|
||||
let names = [
|
||||
"alphanum",
|
||||
"with spaces",
|
||||
"with-dashes",
|
||||
"with_underscores",
|
||||
'with "double quotes"',
|
||||
"with 'single quotes'",
|
||||
"with `backticks`",
|
||||
])("creates a table with name: %s", async name => {
|
||||
]
|
||||
|
||||
if (name !== DatabaseName.ORACLE) {
|
||||
names.push(`with "double quotes"`)
|
||||
names.push(`with 'single quotes'`)
|
||||
}
|
||||
|
||||
it.each(names)("creates a table with name: %s", async name => {
|
||||
const table = await config.api.table.save(
|
||||
tableForDatasource(datasource, { name })
|
||||
)
|
||||
|
@ -118,6 +131,64 @@ describe.each([
|
|||
body: basicTable(),
|
||||
})
|
||||
})
|
||||
|
||||
it("does not persist the row fields that are not on the table schema", async () => {
|
||||
const table: SaveTableRequest = basicTable()
|
||||
table.rows = [
|
||||
{
|
||||
name: "test-name",
|
||||
description: "test-desc",
|
||||
nonValid: "test-non-valid",
|
||||
},
|
||||
]
|
||||
|
||||
const res = await config.api.table.save(table)
|
||||
|
||||
const persistedRows = await config.api.row.search(res._id!)
|
||||
|
||||
expect(persistedRows.rows).toEqual([
|
||||
expect.objectContaining({
|
||||
name: "test-name",
|
||||
description: "test-desc",
|
||||
}),
|
||||
])
|
||||
expect(persistedRows.rows[0].nonValid).toBeUndefined()
|
||||
})
|
||||
|
||||
it.each(
|
||||
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
||||
)(
|
||||
"cannot use protected column names (%s) while importing a table",
|
||||
async columnName => {
|
||||
const table: SaveTableRequest = basicTable()
|
||||
table.rows = [
|
||||
{
|
||||
name: "test-name",
|
||||
description: "test-desc",
|
||||
},
|
||||
]
|
||||
|
||||
await config.api.table.save(
|
||||
{
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
[columnName]: {
|
||||
name: columnName,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`,
|
||||
status: 400,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
|
@ -1022,4 +1093,371 @@ describe.each([
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe.each([
|
||||
[RowExportFormat.CSV, (val: any) => JSON.stringify(val).replace(/"/g, "'")],
|
||||
[RowExportFormat.JSON, (val: any) => val],
|
||||
])("import validation (%s)", (_, userParser) => {
|
||||
const basicSchema: TableSchema = {
|
||||
id: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "id",
|
||||
},
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
},
|
||||
}
|
||||
|
||||
const importCases: [
|
||||
string,
|
||||
(rows: Row[], schema: TableSchema) => Promise<ValidateTableImportResponse>
|
||||
][] = [
|
||||
[
|
||||
"validateNewTableImport",
|
||||
async (rows: Row[], schema: TableSchema) => {
|
||||
const result = await config.api.table.validateNewTableImport({
|
||||
rows,
|
||||
schema,
|
||||
})
|
||||
return result
|
||||
},
|
||||
],
|
||||
[
|
||||
"validateExistingTableImport",
|
||||
async (rows: Row[], schema: TableSchema) => {
|
||||
const table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
primary: ["id"],
|
||||
schema,
|
||||
})
|
||||
)
|
||||
const result = await config.api.table.validateExistingTableImport({
|
||||
tableId: table._id,
|
||||
rows,
|
||||
})
|
||||
return result
|
||||
},
|
||||
],
|
||||
]
|
||||
|
||||
describe.each(importCases)("%s", (_, testDelegate) => {
|
||||
it("validates basic imports", async () => {
|
||||
const result = await testDelegate(
|
||||
[{ id: generator.natural(), name: generator.first() }],
|
||||
basicSchema
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: true,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it.each(
|
||||
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
||||
)("don't allow protected names in schema (%s)", async columnName => {
|
||||
const result = await config.api.table.validateNewTableImport({
|
||||
rows: [
|
||||
{
|
||||
id: generator.natural(),
|
||||
name: generator.first(),
|
||||
[columnName]: generator.word(),
|
||||
},
|
||||
],
|
||||
schema: {
|
||||
...basicSchema,
|
||||
},
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: false,
|
||||
errors: {
|
||||
[columnName]: `${columnName} is a protected column name`,
|
||||
},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
[columnName]: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow imports without rows", async () => {
|
||||
const result = await testDelegate([], basicSchema)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: false,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {},
|
||||
})
|
||||
})
|
||||
|
||||
it("validates imports with some empty rows", async () => {
|
||||
const result = await testDelegate(
|
||||
[{}, { id: generator.natural(), name: generator.first() }, {}],
|
||||
basicSchema
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: true,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it.each(
|
||||
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
||||
)("don't allow protected names in the rows (%s)", async columnName => {
|
||||
const result = await config.api.table.validateNewTableImport({
|
||||
rows: [
|
||||
{
|
||||
id: generator.natural(),
|
||||
name: generator.first(),
|
||||
},
|
||||
],
|
||||
schema: {
|
||||
...basicSchema,
|
||||
[columnName]: {
|
||||
name: columnName,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: false,
|
||||
errors: {
|
||||
[columnName]: `${columnName} is a protected column name`,
|
||||
},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
[columnName]: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("validates required fields and valid rows", async () => {
|
||||
const schema: TableSchema = {
|
||||
...basicSchema,
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
constraints: { presence: true },
|
||||
},
|
||||
}
|
||||
|
||||
const result = await testDelegate(
|
||||
[
|
||||
{ id: generator.natural(), name: generator.first() },
|
||||
{ id: generator.natural(), name: generator.first() },
|
||||
],
|
||||
schema
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: true,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("validates required fields and non-valid rows", async () => {
|
||||
const schema: TableSchema = {
|
||||
...basicSchema,
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
constraints: { presence: true },
|
||||
},
|
||||
}
|
||||
|
||||
const result = await testDelegate(
|
||||
[
|
||||
{ id: generator.natural(), name: generator.first() },
|
||||
{ id: generator.natural(), name: "" },
|
||||
],
|
||||
schema
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: false,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
describe("bb references", () => {
|
||||
const getUserValues = () => ({
|
||||
_id: docIds.generateGlobalUserID(),
|
||||
primaryDisplay: generator.first(),
|
||||
email: generator.email({}),
|
||||
})
|
||||
|
||||
it("can validate user column imports", async () => {
|
||||
const schema: TableSchema = {
|
||||
...basicSchema,
|
||||
user: {
|
||||
type: FieldType.BB_REFERENCE_SINGLE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
name: "user",
|
||||
},
|
||||
}
|
||||
|
||||
const result = await testDelegate(
|
||||
[
|
||||
{
|
||||
id: generator.natural(),
|
||||
name: generator.first(),
|
||||
user: userParser(getUserValues()),
|
||||
},
|
||||
],
|
||||
schema
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: true,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
user: true,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("can validate user column imports with invalid data", async () => {
|
||||
const schema: TableSchema = {
|
||||
...basicSchema,
|
||||
user: {
|
||||
type: FieldType.BB_REFERENCE_SINGLE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
name: "user",
|
||||
},
|
||||
}
|
||||
|
||||
const result = await testDelegate(
|
||||
[
|
||||
{
|
||||
id: generator.natural(),
|
||||
name: generator.first(),
|
||||
user: userParser(getUserValues()),
|
||||
},
|
||||
{
|
||||
id: generator.natural(),
|
||||
name: generator.first(),
|
||||
user: "no valid user data",
|
||||
},
|
||||
],
|
||||
schema
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: false,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
user: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("can validate users column imports", async () => {
|
||||
const schema: TableSchema = {
|
||||
...basicSchema,
|
||||
user: {
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
name: "user",
|
||||
externalType: "array",
|
||||
},
|
||||
}
|
||||
|
||||
const result = await testDelegate(
|
||||
[
|
||||
{
|
||||
id: generator.natural(),
|
||||
name: generator.first(),
|
||||
user: userParser([
|
||||
getUserValues(),
|
||||
getUserValues(),
|
||||
getUserValues(),
|
||||
]),
|
||||
},
|
||||
],
|
||||
schema
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: true,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
id: true,
|
||||
name: true,
|
||||
user: true,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("validateExistingTableImport", () => {
|
||||
isInternal &&
|
||||
it("can reimport _id fields for internal tables", async () => {
|
||||
const table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
primary: ["id"],
|
||||
schema: basicSchema,
|
||||
})
|
||||
)
|
||||
const result = await config.api.table.validateExistingTableImport({
|
||||
tableId: table._id,
|
||||
rows: [
|
||||
{
|
||||
_id: docIds.generateRowID(table._id!),
|
||||
id: generator.natural(),
|
||||
name: generator.first(),
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: true,
|
||||
errors: {},
|
||||
invalidColumns: [],
|
||||
schemaValidation: {
|
||||
_id: true,
|
||||
id: true,
|
||||
name: true,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -33,6 +33,7 @@ describe.each([
|
|||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
||||
])("/v2/views (%s)", (name, dsProvider) => {
|
||||
const config = setup.getConfig()
|
||||
const isSqs = name === "sqs"
|
||||
|
|
|
@ -20,17 +20,21 @@ import * as triggerAutomationRun from "./steps/triggerAutomationRun"
|
|||
import env from "../environment"
|
||||
import {
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
PluginType,
|
||||
AutomationStep,
|
||||
AutomationActionStepId,
|
||||
ActionImplementations,
|
||||
Hosting,
|
||||
ActionImplementation,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../sdk"
|
||||
import { getAutomationPlugin } from "../utilities/fileSystem"
|
||||
|
||||
const ACTION_IMPLS: Record<
|
||||
string,
|
||||
(opts: AutomationStepInput) => Promise<any>
|
||||
> = {
|
||||
type ActionImplType = ActionImplementations<
|
||||
typeof env.SELF_HOSTED extends "true" ? Hosting.SELF : Hosting.CLOUD
|
||||
>
|
||||
|
||||
const ACTION_IMPLS: ActionImplType = {
|
||||
SEND_EMAIL_SMTP: sendSmtpEmail.run,
|
||||
CREATE_ROW: createRow.run,
|
||||
UPDATE_ROW: updateRow.run,
|
||||
|
@ -51,6 +55,7 @@ const ACTION_IMPLS: Record<
|
|||
integromat: make.run,
|
||||
n8n: n8n.run,
|
||||
}
|
||||
|
||||
export const BUILTIN_ACTION_DEFINITIONS: Record<string, AutomationStepSchema> =
|
||||
{
|
||||
SEND_EMAIL_SMTP: sendSmtpEmail.definition,
|
||||
|
@ -86,7 +91,7 @@ if (env.SELF_HOSTED) {
|
|||
ACTION_IMPLS["EXECUTE_BASH"] = bash.run
|
||||
// @ts-ignore
|
||||
BUILTIN_ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition
|
||||
|
||||
// @ts-ignore
|
||||
ACTION_IMPLS.OPENAI = openai.run
|
||||
BUILTIN_ACTION_DEFINITIONS.OPENAI = openai.definition
|
||||
}
|
||||
|
@ -107,10 +112,13 @@ export async function getActionDefinitions() {
|
|||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
export async function getAction(stepId: string) {
|
||||
if (ACTION_IMPLS[stepId] != null) {
|
||||
return ACTION_IMPLS[stepId]
|
||||
export async function getAction(
|
||||
stepId: AutomationActionStepId
|
||||
): Promise<ActionImplementation<any, any> | undefined> {
|
||||
if (ACTION_IMPLS[stepId as keyof ActionImplType] != null) {
|
||||
return ACTION_IMPLS[stepId as keyof ActionImplType]
|
||||
}
|
||||
|
||||
// must be a plugin
|
||||
if (env.SELF_HOSTED) {
|
||||
const plugins = await sdk.plugins.fetch(PluginType.AUTOMATION)
|
||||
|
|
|
@ -4,8 +4,13 @@ import {
|
|||
encodeJSBinding,
|
||||
} from "@budibase/string-templates"
|
||||
import sdk from "../sdk"
|
||||
import { AutomationAttachment, FieldType, Row } from "@budibase/types"
|
||||
import { LoopInput, LoopStepType } from "../definitions/automations"
|
||||
import {
|
||||
AutomationAttachment,
|
||||
FieldType,
|
||||
Row,
|
||||
LoopStepType,
|
||||
} from "@budibase/types"
|
||||
import { LoopInput } from "../definitions/automations"
|
||||
import { objectStore, context } from "@budibase/backend-core"
|
||||
import * as uuid from "uuid"
|
||||
import path from "path"
|
||||
|
|
|
@ -7,9 +7,10 @@ import {
|
|||
AutomationCustomIOType,
|
||||
AutomationFeature,
|
||||
AutomationIOType,
|
||||
AutomationStepInput,
|
||||
AutomationStepSchema,
|
||||
AutomationStepType,
|
||||
BashStepInputs,
|
||||
BashStepOutputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -51,7 +52,13 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs, context }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
context,
|
||||
}: {
|
||||
inputs: BashStepInputs
|
||||
context: object
|
||||
}): Promise<BashStepOutputs> {
|
||||
if (inputs.code == null) {
|
||||
return {
|
||||
stdout: "Budibase bash automation failed: Invalid inputs",
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
CollectStepInputs,
|
||||
CollectStepOutputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -43,7 +44,11 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: CollectStepInputs
|
||||
}): Promise<CollectStepOutputs> {
|
||||
if (!inputs.collection) {
|
||||
return {
|
||||
success: false,
|
||||
|
|
|
@ -10,10 +10,12 @@ import {
|
|||
AutomationCustomIOType,
|
||||
AutomationFeature,
|
||||
AutomationIOType,
|
||||
AutomationStepInput,
|
||||
AutomationStepSchema,
|
||||
AutomationStepType,
|
||||
CreateRowStepInputs,
|
||||
CreateRowStepOutputs,
|
||||
} from "@budibase/types"
|
||||
import { EventEmitter } from "events"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
name: "Create Row",
|
||||
|
@ -74,7 +76,15 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
appId,
|
||||
emitter,
|
||||
}: {
|
||||
inputs: CreateRowStepInputs
|
||||
appId: string
|
||||
emitter: EventEmitter
|
||||
}): Promise<CreateRowStepOutputs> {
|
||||
if (inputs.row == null || inputs.row.tableId == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
@ -93,7 +103,7 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
|||
try {
|
||||
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
|
||||
inputs.row = await sendAutomationAttachmentsToStorage(
|
||||
inputs.row.tableId,
|
||||
inputs.row.tableId!,
|
||||
inputs.row
|
||||
)
|
||||
await save(ctx)
|
||||
|
|
|
@ -2,9 +2,10 @@ import { wait } from "../../utilities"
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationIOType,
|
||||
AutomationStepInput,
|
||||
AutomationStepSchema,
|
||||
AutomationStepType,
|
||||
DelayStepInputs,
|
||||
DelayStepOutputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -39,7 +40,11 @@ export const definition: AutomationStepSchema = {
|
|||
type: AutomationStepType.LOGIC,
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: DelayStepInputs
|
||||
}): Promise<DelayStepOutputs> {
|
||||
await wait(inputs.time)
|
||||
return {
|
||||
success: true,
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
import { EventEmitter } from "events"
|
||||
import { destroy } from "../../api/controllers/row"
|
||||
import { buildCtx } from "./utils"
|
||||
import { getError } from "../automationUtils"
|
||||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepInput,
|
||||
AutomationStepSchema,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationCustomIOType,
|
||||
AutomationFeature,
|
||||
DeleteRowStepInputs,
|
||||
DeleteRowStepOutputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -59,7 +61,15 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
appId,
|
||||
emitter,
|
||||
}: {
|
||||
inputs: DeleteRowStepInputs
|
||||
appId: string
|
||||
emitter: EventEmitter
|
||||
}): Promise<DeleteRowStepOutputs> {
|
||||
if (inputs.id == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
|
|
@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils"
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationFeature,
|
||||
ExternalAppStepOutputs,
|
||||
DiscordStepInputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
const DEFAULT_USERNAME = "Budibase Automate"
|
||||
|
@ -65,7 +66,11 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: DiscordStepInputs
|
||||
}): Promise<ExternalAppStepOutputs> {
|
||||
let { url, username, avatar_url, content } = inputs
|
||||
if (!username) {
|
||||
username = DEFAULT_USERNAME
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { EventEmitter } from "events"
|
||||
import * as queryController from "../../api/controllers/query"
|
||||
import { buildCtx } from "./utils"
|
||||
import * as automationUtils from "../automationUtils"
|
||||
|
@ -6,9 +7,10 @@ import {
|
|||
AutomationCustomIOType,
|
||||
AutomationFeature,
|
||||
AutomationIOType,
|
||||
AutomationStepInput,
|
||||
AutomationStepSchema,
|
||||
AutomationStepType,
|
||||
ExecuteQueryStepInputs,
|
||||
ExecuteQueryStepOutputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -62,7 +64,15 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
appId,
|
||||
emitter,
|
||||
}: {
|
||||
inputs: ExecuteQueryStepInputs
|
||||
appId: string
|
||||
emitter: EventEmitter
|
||||
}): Promise<ExecuteQueryStepOutputs> {
|
||||
if (inputs.query == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
|
|
@ -6,10 +6,12 @@ import {
|
|||
AutomationCustomIOType,
|
||||
AutomationFeature,
|
||||
AutomationIOType,
|
||||
AutomationStepInput,
|
||||
AutomationStepSchema,
|
||||
AutomationStepType,
|
||||
ExecuteScriptStepInputs,
|
||||
ExecuteScriptStepOutputs,
|
||||
} from "@budibase/types"
|
||||
import { EventEmitter } from "events"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
name: "JS Scripting",
|
||||
|
@ -55,7 +57,12 @@ export async function run({
|
|||
appId,
|
||||
context,
|
||||
emitter,
|
||||
}: AutomationStepInput) {
|
||||
}: {
|
||||
inputs: ExecuteScriptStepInputs
|
||||
appId: string
|
||||
context: object
|
||||
emitter: EventEmitter
|
||||
}): Promise<ExecuteScriptStepOutputs> {
|
||||
if (inputs.code == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
FilterStepInputs,
|
||||
FilterStepOutputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const FilterConditions = {
|
||||
|
@ -69,7 +70,11 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: FilterStepInputs
|
||||
}): Promise<FilterStepOutputs> {
|
||||
try {
|
||||
let { field, condition, value } = inputs
|
||||
// coerce types so that we can use them
|
||||
|
|
|
@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils"
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationFeature,
|
||||
ExternalAppStepOutputs,
|
||||
MakeIntegrationInputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -57,7 +58,11 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: MakeIntegrationInputs
|
||||
}): Promise<ExternalAppStepOutputs> {
|
||||
const { url, body } = inputs
|
||||
|
||||
let payload = {}
|
||||
|
|
|
@ -3,11 +3,12 @@ import { getFetchResponse } from "./utils"
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationFeature,
|
||||
HttpMethod,
|
||||
ExternalAppStepOutputs,
|
||||
n8nStepInputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -67,7 +68,11 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: n8nStepInputs
|
||||
}): Promise<ExternalAppStepOutputs> {
|
||||
const { url, body, method, authorization } = inputs
|
||||
|
||||
let payload = {}
|
||||
|
|
|
@ -3,9 +3,10 @@ import { OpenAI } from "openai"
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
OpenAIStepInputs,
|
||||
OpenAIStepOutputs,
|
||||
} from "@budibase/types"
|
||||
import { env } from "@budibase/backend-core"
|
||||
import * as automationUtils from "../automationUtils"
|
||||
|
@ -59,7 +60,11 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: OpenAIStepInputs
|
||||
}): Promise<OpenAIStepOutputs> {
|
||||
if (!env.OPENAI_API_KEY) {
|
||||
return {
|
||||
success: false,
|
||||
|
|
|
@ -6,9 +6,10 @@ import {
|
|||
AutomationCustomIOType,
|
||||
AutomationFeature,
|
||||
AutomationIOType,
|
||||
AutomationStepInput,
|
||||
AutomationStepSchema,
|
||||
AutomationStepType,
|
||||
ExternalAppStepOutputs,
|
||||
OutgoingWebhookStepInputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
enum RequestType {
|
||||
|
@ -88,7 +89,13 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: OutgoingWebhookStepInputs
|
||||
}): Promise<
|
||||
Omit<ExternalAppStepOutputs, "httpStatus"> | ExternalAppStepOutputs
|
||||
> {
|
||||
let { requestMethod, url, requestBody, headers } = inputs
|
||||
if (!url.startsWith("http")) {
|
||||
url = `http://${url}`
|
||||
|
|
|
@ -8,13 +8,14 @@ import {
|
|||
AutomationCustomIOType,
|
||||
AutomationFeature,
|
||||
AutomationIOType,
|
||||
AutomationStepInput,
|
||||
AutomationStepSchema,
|
||||
AutomationStepType,
|
||||
EmptyFilterOption,
|
||||
SearchFilters,
|
||||
Table,
|
||||
SortOrder,
|
||||
QueryRowsStepInputs,
|
||||
QueryRowsStepOutputs,
|
||||
} from "@budibase/types"
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
|
||||
|
@ -133,7 +134,13 @@ function hasNullFilters(filters: any[]) {
|
|||
)
|
||||
}
|
||||
|
||||
export async function run({ inputs, appId }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
appId,
|
||||
}: {
|
||||
inputs: QueryRowsStepInputs
|
||||
appId: string
|
||||
}): Promise<QueryRowsStepOutputs> {
|
||||
const { tableId, filters, sortColumn, sortOrder, limit } = inputs
|
||||
if (!tableId) {
|
||||
return {
|
||||
|
@ -145,7 +152,7 @@ export async function run({ inputs, appId }: AutomationStepInput) {
|
|||
}
|
||||
const table = await getTable(appId, tableId)
|
||||
let sortType = FieldType.STRING
|
||||
if (table && table.schema && table.schema[sortColumn] && sortColumn) {
|
||||
if (sortColumn && table && table.schema && table.schema[sortColumn]) {
|
||||
const fieldType = table.schema[sortColumn].type
|
||||
sortType =
|
||||
fieldType === FieldType.NUMBER ? FieldType.NUMBER : FieldType.STRING
|
||||
|
|
|
@ -3,11 +3,12 @@ import * as automationUtils from "../automationUtils"
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationFeature,
|
||||
AutomationCustomIOType,
|
||||
SmtpEmailStepInputs,
|
||||
BaseAutomationOutputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -97,7 +98,11 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: SmtpEmailStepInputs
|
||||
}): Promise<BaseAutomationOutputs> {
|
||||
let {
|
||||
to,
|
||||
from,
|
||||
|
@ -116,17 +121,16 @@ export async function run({ inputs }: AutomationStepInput) {
|
|||
if (!contents) {
|
||||
contents = "<h1>No content</h1>"
|
||||
}
|
||||
to = to || undefined
|
||||
|
||||
if (attachments) {
|
||||
if (Array.isArray(attachments)) {
|
||||
attachments.forEach(item => automationUtils.guardAttachment(item))
|
||||
} else {
|
||||
automationUtils.guardAttachment(attachments)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (attachments) {
|
||||
if (Array.isArray(attachments)) {
|
||||
attachments.forEach(item => automationUtils.guardAttachment(item))
|
||||
} else {
|
||||
automationUtils.guardAttachment(attachments)
|
||||
}
|
||||
}
|
||||
|
||||
let response = await sendSmtpEmail({
|
||||
to,
|
||||
from,
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationFeature,
|
||||
ServerLogStepInputs,
|
||||
ServerLogStepOutputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
/**
|
||||
|
@ -53,7 +54,13 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs, appId }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
appId,
|
||||
}: {
|
||||
inputs: ServerLogStepInputs
|
||||
appId: string
|
||||
}): Promise<ServerLogStepOutputs> {
|
||||
const message = `App ${appId} - ${inputs.text}`
|
||||
console.log(message)
|
||||
return {
|
||||
|
|
|
@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils"
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationFeature,
|
||||
ExternalAppStepOutputs,
|
||||
SlackStepInputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -54,7 +55,11 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: SlackStepInputs
|
||||
}): Promise<ExternalAppStepOutputs> {
|
||||
let { url, text } = inputs
|
||||
if (!url?.trim()?.length) {
|
||||
return {
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationResults,
|
||||
Automation,
|
||||
AutomationCustomIOType,
|
||||
TriggerAutomationStepInputs,
|
||||
TriggerAutomationStepOutputs,
|
||||
} from "@budibase/types"
|
||||
import * as triggers from "../triggers"
|
||||
import { context } from "@budibase/backend-core"
|
||||
|
@ -61,7 +62,11 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: TriggerAutomationStepInputs
|
||||
}): Promise<TriggerAutomationStepOutputs> {
|
||||
const { automationId, ...fieldParams } = inputs.automation
|
||||
|
||||
if (await features.isTriggerAutomationRunEnabled()) {
|
||||
|
@ -88,5 +93,9 @@ export async function run({ inputs }: AutomationStepInput) {
|
|||
value: response.steps,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { EventEmitter } from "events"
|
||||
import * as rowController from "../../api/controllers/row"
|
||||
import * as automationUtils from "../automationUtils"
|
||||
import { buildCtx } from "./utils"
|
||||
|
@ -6,9 +7,10 @@ import {
|
|||
AutomationCustomIOType,
|
||||
AutomationFeature,
|
||||
AutomationIOType,
|
||||
AutomationStepInput,
|
||||
AutomationStepSchema,
|
||||
AutomationStepType,
|
||||
UpdateRowStepInputs,
|
||||
UpdateRowStepOutputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -70,8 +72,15 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
appId,
|
||||
emitter,
|
||||
}: {
|
||||
inputs: UpdateRowStepInputs
|
||||
appId: string
|
||||
emitter: EventEmitter
|
||||
}): Promise<UpdateRowStepOutputs> {
|
||||
if (inputs.rowId == null || inputs.row == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
|
|
@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils"
|
|||
import {
|
||||
AutomationActionStepId,
|
||||
AutomationStepSchema,
|
||||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationFeature,
|
||||
ZapierStepInputs,
|
||||
ZapierStepOutputs,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -50,7 +51,11 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
export async function run({
|
||||
inputs,
|
||||
}: {
|
||||
inputs: ZapierStepInputs
|
||||
}): Promise<ZapierStepOutputs> {
|
||||
const { url, body } = inputs
|
||||
|
||||
let payload = {}
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
const setup = require("./utilities")
|
||||
const fetch = require("node-fetch")
|
||||
|
||||
jest.mock("node-fetch")
|
||||
|
||||
describe("test the outgoing webhook action", () => {
|
||||
let inputs
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
inputs = {
|
||||
username: "joe_bloggs",
|
||||
url: "http://www.example.com",
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to run the action", async () => {
|
||||
const res = await setup.runStep(setup.actions.discord.stepId, inputs)
|
||||
expect(res.response.url).toEqual("http://www.example.com")
|
||||
expect(res.response.method).toEqual("post")
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,26 @@
|
|||
import { getConfig, afterAll as _afterAll, runStep, actions } from "./utilities"
|
||||
import nock from "nock"
|
||||
|
||||
describe("test the outgoing webhook action", () => {
|
||||
let config = getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
afterAll(_afterAll)
|
||||
|
||||
beforeEach(() => {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should be able to run the action", async () => {
|
||||
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
||||
const res = await runStep(actions.discord.stepId, {
|
||||
url: "http://www.example.com",
|
||||
username: "joe_bloggs",
|
||||
})
|
||||
expect(res.response.foo).toEqual("bar")
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
})
|
|
@ -1,27 +1,20 @@
|
|||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
import { Datasource, Query } from "@budibase/types"
|
||||
import * as setup from "./utilities"
|
||||
import { DatabaseName, getDatasource } from "../../integrations/tests/utils"
|
||||
import knex, { Knex } from "knex"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
knexClient,
|
||||
} from "../../integrations/tests/utils"
|
||||
import { Knex } from "knex"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
function getKnexClientName(source: SourceName) {
|
||||
switch (source) {
|
||||
case SourceName.MYSQL:
|
||||
return "mysql2"
|
||||
case SourceName.SQL_SERVER:
|
||||
return "mssql"
|
||||
case SourceName.POSTGRES:
|
||||
return "pg"
|
||||
}
|
||||
throw new Error(`Unsupported source: ${source}`)
|
||||
}
|
||||
|
||||
describe.each(
|
||||
[
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.MARIADB,
|
||||
DatabaseName.ORACLE,
|
||||
].map(name => [name, getDatasource(name)])
|
||||
)("execute query action (%s)", (_, dsProvider) => {
|
||||
let tableName: string
|
||||
|
@ -35,10 +28,7 @@ describe.each(
|
|||
|
||||
const ds = await dsProvider
|
||||
datasource = await config.api.datasource.create(ds)
|
||||
client = knex({
|
||||
client: getKnexClientName(ds.source),
|
||||
connection: ds.config,
|
||||
})
|
||||
client = await knexClient(ds)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
|
|
|
@ -3,9 +3,9 @@ import * as triggers from "../triggers"
|
|||
import { loopAutomation } from "../../tests/utilities/structures"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import * as setup from "./utilities"
|
||||
import { Table } from "@budibase/types"
|
||||
import { Table, LoopStepType } from "@budibase/types"
|
||||
import * as loopUtils from "../loopUtils"
|
||||
import { LoopInput, LoopStepType } from "../../definitions/automations"
|
||||
import { LoopInput } from "../../definitions/automations"
|
||||
|
||||
describe("Attempt to run a basic loop automation", () => {
|
||||
let config = setup.getConfig(),
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { getConfig, afterAll, runStep, actions } from "./utilities"
|
||||
import nock from "nock"
|
||||
|
||||
describe("test the outgoing webhook action", () => {
|
||||
let config = getConfig()
|
||||
|
@ -9,42 +10,45 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
afterAll()
|
||||
|
||||
beforeEach(() => {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should be able to run the action", async () => {
|
||||
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
||||
const res = await runStep(actions.integromat.stepId, {
|
||||
value1: "test",
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
expect(res.response.url).toEqual("http://www.example.com")
|
||||
expect(res.response.method).toEqual("post")
|
||||
expect(res.response.foo).toEqual("bar")
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should add the payload props when a JSON string is provided", async () => {
|
||||
const payload = `{"value1":1,"value2":2,"value3":3,"value4":4,"value5":5,"name":"Adam","age":9}`
|
||||
const payload = {
|
||||
value1: 1,
|
||||
value2: 2,
|
||||
value3: 3,
|
||||
value4: 4,
|
||||
value5: 5,
|
||||
name: "Adam",
|
||||
age: 9,
|
||||
}
|
||||
|
||||
nock("http://www.example.com/")
|
||||
.post("/", payload)
|
||||
.reply(200, { foo: "bar" })
|
||||
|
||||
const res = await runStep(actions.integromat.stepId, {
|
||||
value1: "ONE",
|
||||
value2: "TWO",
|
||||
value3: "THREE",
|
||||
value4: "FOUR",
|
||||
value5: "FIVE",
|
||||
body: {
|
||||
value: payload,
|
||||
},
|
||||
body: { value: JSON.stringify(payload) },
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
expect(res.response.url).toEqual("http://www.example.com")
|
||||
expect(res.response.method).toEqual("post")
|
||||
expect(res.response.body).toEqual(payload)
|
||||
expect(res.response.foo).toEqual("bar")
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should return a 400 if the JSON payload string is malformed", async () => {
|
||||
const payload = `{ value1 1 }`
|
||||
const res = await runStep(actions.integromat.stepId, {
|
||||
value1: "ONE",
|
||||
body: {
|
||||
value: payload,
|
||||
},
|
||||
body: { value: "{ invalid json }" },
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
expect(res.httpStatus).toEqual(400)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { getConfig, afterAll, runStep, actions } from "./utilities"
|
||||
import nock from "nock"
|
||||
|
||||
describe("test the outgoing webhook action", () => {
|
||||
let config = getConfig()
|
||||
|
@ -9,31 +10,33 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
afterAll()
|
||||
|
||||
beforeEach(() => {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should be able to run the action and default to 'get'", async () => {
|
||||
nock("http://www.example.com/").get("/").reply(200, { foo: "bar" })
|
||||
const res = await runStep(actions.n8n.stepId, {
|
||||
url: "http://www.example.com",
|
||||
body: {
|
||||
test: "IGNORE_ME",
|
||||
},
|
||||
})
|
||||
expect(res.response.url).toEqual("http://www.example.com")
|
||||
expect(res.response.method).toEqual("GET")
|
||||
expect(res.response.body).toBeUndefined()
|
||||
expect(res.response.foo).toEqual("bar")
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should add the payload props when a JSON string is provided", async () => {
|
||||
const payload = `{ "name": "Adam", "age": 9 }`
|
||||
nock("http://www.example.com/")
|
||||
.post("/", { name: "Adam", age: 9 })
|
||||
.reply(200)
|
||||
const res = await runStep(actions.n8n.stepId, {
|
||||
body: {
|
||||
value: payload,
|
||||
value: JSON.stringify({ name: "Adam", age: 9 }),
|
||||
},
|
||||
method: "POST",
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
expect(res.response.url).toEqual("http://www.example.com")
|
||||
expect(res.response.method).toEqual("POST")
|
||||
expect(res.response.body).toEqual(`{"name":"Adam","age":9}`)
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
|
@ -53,6 +56,9 @@ describe("test the outgoing webhook action", () => {
|
|||
})
|
||||
|
||||
it("should not append the body if the method is HEAD", async () => {
|
||||
nock("http://www.example.com/")
|
||||
.head("/", body => body === "")
|
||||
.reply(200)
|
||||
const res = await runStep(actions.n8n.stepId, {
|
||||
url: "http://www.example.com",
|
||||
method: "HEAD",
|
||||
|
@ -60,9 +66,6 @@ describe("test the outgoing webhook action", () => {
|
|||
test: "IGNORE_ME",
|
||||
},
|
||||
})
|
||||
expect(res.response.url).toEqual("http://www.example.com")
|
||||
expect(res.response.method).toEqual("HEAD")
|
||||
expect(res.response.body).toBeUndefined()
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
const setup = require("./utilities")
|
||||
const fetch = require("node-fetch")
|
||||
|
||||
jest.mock("node-fetch")
|
||||
|
||||
describe("test the outgoing webhook action", () => {
|
||||
let inputs
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
inputs = {
|
||||
requestMethod: "POST",
|
||||
url: "www.example.com",
|
||||
requestBody: JSON.stringify({
|
||||
a: 1,
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to run the action", async () => {
|
||||
const res = await setup.runStep(
|
||||
setup.actions.OUTGOING_WEBHOOK.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toEqual(true)
|
||||
expect(res.response.url).toEqual("http://www.example.com")
|
||||
expect(res.response.method).toEqual("POST")
|
||||
expect(JSON.parse(res.response.body).a).toEqual(1)
|
||||
})
|
||||
|
||||
it("should return an error if something goes wrong in fetch", async () => {
|
||||
const res = await setup.runStep(setup.actions.OUTGOING_WEBHOOK.stepId, {
|
||||
requestMethod: "GET",
|
||||
url: "www.invalid.com",
|
||||
})
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,37 @@
|
|||
import { getConfig, afterAll as _afterAll, runStep, actions } from "./utilities"
|
||||
import nock from "nock"
|
||||
|
||||
describe("test the outgoing webhook action", () => {
|
||||
const config = getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
afterAll(_afterAll)
|
||||
|
||||
beforeEach(() => {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should be able to run the action", async () => {
|
||||
nock("http://www.example.com")
|
||||
.post("/", { a: 1 })
|
||||
.reply(200, { foo: "bar" })
|
||||
const res = await runStep(actions.OUTGOING_WEBHOOK.stepId, {
|
||||
requestMethod: "POST",
|
||||
url: "www.example.com",
|
||||
requestBody: JSON.stringify({ a: 1 }),
|
||||
})
|
||||
expect(res.success).toEqual(true)
|
||||
expect(res.response.foo).toEqual("bar")
|
||||
})
|
||||
|
||||
it("should return an error if something goes wrong in fetch", async () => {
|
||||
const res = await runStep(actions.OUTGOING_WEBHOOK.stepId, {
|
||||
requestMethod: "GET",
|
||||
url: "www.invalid.com",
|
||||
})
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,160 @@
|
|||
import * as automation from "../../index"
|
||||
import * as setup from "../utilities"
|
||||
import { Table, LoopStepType } from "@budibase/types"
|
||||
import { createAutomationBuilder } from "../utilities/AutomationBuilder"
|
||||
|
||||
describe("Automation Scenarios", () => {
|
||||
let config = setup.getConfig(),
|
||||
table: Table
|
||||
|
||||
beforeEach(async () => {
|
||||
await automation.init()
|
||||
await config.init()
|
||||
table = await config.createTable()
|
||||
await config.createRow()
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
describe("Loop automations", () => {
|
||||
it("should run an automation with a trigger, loop, and create row step", async () => {
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test Trigger with Loop and Create Row",
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.rowSaved(
|
||||
{ tableId: table._id! },
|
||||
{
|
||||
row: {
|
||||
name: "Trigger Row",
|
||||
description: "This row triggers the automation",
|
||||
},
|
||||
id: "1234",
|
||||
revision: "1",
|
||||
}
|
||||
)
|
||||
.loop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: [1, 2, 3],
|
||||
})
|
||||
.createRow({
|
||||
row: {
|
||||
name: "Item {{ loop.currentItem }}",
|
||||
description: "Created from loop",
|
||||
tableId: table._id,
|
||||
},
|
||||
})
|
||||
.run()
|
||||
|
||||
expect(results.trigger).toBeDefined()
|
||||
expect(results.steps).toHaveLength(1)
|
||||
|
||||
expect(results.steps[0].outputs.iterations).toBe(3)
|
||||
expect(results.steps[0].outputs.items).toHaveLength(3)
|
||||
|
||||
results.steps[0].outputs.items.forEach((output: any, index: number) => {
|
||||
expect(output).toMatchObject({
|
||||
success: true,
|
||||
row: {
|
||||
name: `Item ${index + 1}`,
|
||||
description: "Created from loop",
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Row Automations", () => {
|
||||
it("should trigger an automation which then creates a row", async () => {
|
||||
const table = await config.createTable()
|
||||
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test Row Save and Create",
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.rowUpdated(
|
||||
{ tableId: table._id! },
|
||||
{
|
||||
row: { name: "Test", description: "TEST" },
|
||||
id: "1234",
|
||||
}
|
||||
)
|
||||
.createRow({
|
||||
row: {
|
||||
name: "{{trigger.row.name}}",
|
||||
description: "{{trigger.row.description}}",
|
||||
tableId: table._id,
|
||||
},
|
||||
})
|
||||
.run()
|
||||
|
||||
expect(results.steps).toHaveLength(1)
|
||||
|
||||
expect(results.steps[0].outputs).toMatchObject({
|
||||
success: true,
|
||||
row: {
|
||||
name: "Test",
|
||||
description: "TEST",
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should trigger an automation which querys the database", async () => {
|
||||
const table = await config.createTable()
|
||||
const row = {
|
||||
name: "Test Row",
|
||||
description: "original description",
|
||||
tableId: table._id,
|
||||
}
|
||||
await config.createRow(row)
|
||||
await config.createRow(row)
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test Row Save and Create",
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.appAction({ fields: {} })
|
||||
.queryRows({
|
||||
tableId: table._id!,
|
||||
})
|
||||
.run()
|
||||
|
||||
expect(results.steps).toHaveLength(1)
|
||||
expect(results.steps[0].outputs.rows).toHaveLength(2)
|
||||
})
|
||||
|
||||
it("should trigger an automation which querys the database then deletes a row", async () => {
|
||||
const table = await config.createTable()
|
||||
const row = {
|
||||
name: "DFN",
|
||||
description: "original description",
|
||||
tableId: table._id,
|
||||
}
|
||||
await config.createRow(row)
|
||||
await config.createRow(row)
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test Row Save and Create",
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.appAction({ fields: {} })
|
||||
.queryRows({
|
||||
tableId: table._id!,
|
||||
})
|
||||
.deleteRow({
|
||||
tableId: table._id!,
|
||||
id: "{{ steps.1.rows.0._id }}",
|
||||
})
|
||||
.queryRows({
|
||||
tableId: table._id!,
|
||||
})
|
||||
.run()
|
||||
|
||||
expect(results.steps).toHaveLength(3)
|
||||
expect(results.steps[1].outputs.success).toBeTruthy()
|
||||
expect(results.steps[2].outputs.rows).toHaveLength(1)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,174 @@
|
|||
import { v4 as uuidv4 } from "uuid"
|
||||
import { testAutomation } from "../../../api/routes/tests/utilities/TestFunctions"
|
||||
import {
|
||||
RowCreatedTriggerInputs,
|
||||
RowCreatedTriggerOutputs,
|
||||
} from "../../triggerInfo/rowSaved"
|
||||
import {
|
||||
RowUpdatedTriggerInputs,
|
||||
RowUpdatedTriggerOutputs,
|
||||
} from "../../triggerInfo/rowUpdated"
|
||||
import {} from "../../steps/createRow"
|
||||
import { BUILTIN_ACTION_DEFINITIONS } from "../../actions"
|
||||
import { TRIGGER_DEFINITIONS } from "../../triggers"
|
||||
import {
|
||||
RowDeletedTriggerInputs,
|
||||
RowDeletedTriggerOutputs,
|
||||
} from "../../triggerInfo/rowDeleted"
|
||||
import {
|
||||
AutomationStepSchema,
|
||||
AutomationTriggerSchema,
|
||||
LoopStepInputs,
|
||||
DeleteRowStepInputs,
|
||||
UpdateRowStepInputs,
|
||||
CreateRowStepInputs,
|
||||
Automation,
|
||||
AutomationTrigger,
|
||||
AutomationResults,
|
||||
SmtpEmailStepInputs,
|
||||
ExecuteQueryStepInputs,
|
||||
QueryRowsStepInputs,
|
||||
} from "@budibase/types"
|
||||
import {} from "../../steps/loop"
|
||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||
import * as setup from "../utilities"
|
||||
import {
|
||||
AppActionTriggerInputs,
|
||||
AppActionTriggerOutputs,
|
||||
} from "../../triggerInfo/app"
|
||||
import { CronTriggerOutputs } from "../../triggerInfo/cron"
|
||||
|
||||
type TriggerOutputs =
|
||||
| RowCreatedTriggerOutputs
|
||||
| RowUpdatedTriggerOutputs
|
||||
| RowDeletedTriggerOutputs
|
||||
| AppActionTriggerOutputs
|
||||
| CronTriggerOutputs
|
||||
| undefined
|
||||
|
||||
class AutomationBuilder {
|
||||
private automationConfig: Automation = {
|
||||
name: "",
|
||||
definition: {
|
||||
steps: [],
|
||||
trigger: {} as AutomationTrigger,
|
||||
},
|
||||
type: "automation",
|
||||
appId: setup.getConfig().getAppId(),
|
||||
}
|
||||
private config: TestConfiguration = setup.getConfig()
|
||||
private triggerOutputs: TriggerOutputs
|
||||
private triggerSet: boolean = false
|
||||
|
||||
constructor(options: { name?: string } = {}) {
|
||||
this.automationConfig.name = options.name || `Test Automation ${uuidv4()}`
|
||||
}
|
||||
|
||||
// TRIGGERS
|
||||
rowSaved(inputs: RowCreatedTriggerInputs, outputs: RowCreatedTriggerOutputs) {
|
||||
this.triggerOutputs = outputs
|
||||
return this.trigger(TRIGGER_DEFINITIONS.ROW_SAVED, inputs, outputs)
|
||||
}
|
||||
|
||||
rowUpdated(
|
||||
inputs: RowUpdatedTriggerInputs,
|
||||
outputs: RowUpdatedTriggerOutputs
|
||||
) {
|
||||
this.triggerOutputs = outputs
|
||||
return this.trigger(TRIGGER_DEFINITIONS.ROW_UPDATED, inputs, outputs)
|
||||
}
|
||||
|
||||
rowDeleted(
|
||||
inputs: RowDeletedTriggerInputs,
|
||||
outputs: RowDeletedTriggerOutputs
|
||||
) {
|
||||
this.triggerOutputs = outputs
|
||||
return this.trigger(TRIGGER_DEFINITIONS.ROW_DELETED, inputs, outputs)
|
||||
}
|
||||
|
||||
appAction(outputs: AppActionTriggerOutputs, inputs?: AppActionTriggerInputs) {
|
||||
this.triggerOutputs = outputs
|
||||
return this.trigger(TRIGGER_DEFINITIONS.APP, inputs, outputs)
|
||||
}
|
||||
|
||||
// STEPS
|
||||
createRow(inputs: CreateRowStepInputs): this {
|
||||
return this.step(BUILTIN_ACTION_DEFINITIONS.CREATE_ROW, inputs)
|
||||
}
|
||||
|
||||
updateRow(inputs: UpdateRowStepInputs): this {
|
||||
return this.step(BUILTIN_ACTION_DEFINITIONS.UPDATE_ROW, inputs)
|
||||
}
|
||||
|
||||
deleteRow(inputs: DeleteRowStepInputs): this {
|
||||
return this.step(BUILTIN_ACTION_DEFINITIONS.DELETE_ROW, inputs)
|
||||
}
|
||||
|
||||
sendSmtpEmail(inputs: SmtpEmailStepInputs): this {
|
||||
return this.step(BUILTIN_ACTION_DEFINITIONS.SEND_EMAIL_SMTP, inputs)
|
||||
}
|
||||
|
||||
executeQuery(inputs: ExecuteQueryStepInputs): this {
|
||||
return this.step(BUILTIN_ACTION_DEFINITIONS.EXECUTE_QUERY, inputs)
|
||||
}
|
||||
|
||||
queryRows(inputs: QueryRowsStepInputs): this {
|
||||
return this.step(BUILTIN_ACTION_DEFINITIONS.QUERY_ROWS, inputs)
|
||||
}
|
||||
loop(inputs: LoopStepInputs): this {
|
||||
return this.step(BUILTIN_ACTION_DEFINITIONS.LOOP, inputs)
|
||||
}
|
||||
|
||||
private trigger<T extends { [key: string]: any }>(
|
||||
triggerSchema: AutomationTriggerSchema,
|
||||
inputs?: T,
|
||||
outputs?: TriggerOutputs
|
||||
): this {
|
||||
if (this.triggerSet) {
|
||||
throw new Error("Only one trigger can be set for an automation.")
|
||||
}
|
||||
this.automationConfig.definition.trigger = {
|
||||
...triggerSchema,
|
||||
inputs: inputs || {},
|
||||
id: uuidv4(),
|
||||
}
|
||||
this.triggerOutputs = outputs
|
||||
this.triggerSet = true
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
private step<T extends { [key: string]: any }>(
|
||||
stepSchema: AutomationStepSchema,
|
||||
inputs: T
|
||||
): this {
|
||||
this.automationConfig.definition.steps.push({
|
||||
...stepSchema,
|
||||
inputs,
|
||||
id: uuidv4(),
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
async run() {
|
||||
const automation = await this.config.createAutomation(this.automationConfig)
|
||||
const results = await testAutomation(
|
||||
this.config,
|
||||
automation,
|
||||
this.triggerOutputs
|
||||
)
|
||||
return this.processResults(results)
|
||||
}
|
||||
|
||||
private processResults(results: { body: AutomationResults }) {
|
||||
results.body.steps.shift()
|
||||
return {
|
||||
trigger: results.body.trigger,
|
||||
steps: results.body.steps,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function createAutomationBuilder(options?: { name?: string }) {
|
||||
return new AutomationBuilder(options)
|
||||
}
|
|
@ -3,6 +3,7 @@ import { context } from "@budibase/backend-core"
|
|||
import { BUILTIN_ACTION_DEFINITIONS, getAction } from "../../actions"
|
||||
import emitter from "../../../events/index"
|
||||
import env from "../../../environment"
|
||||
import { AutomationActionStepId } from "@budibase/types"
|
||||
|
||||
let config: TestConfig
|
||||
|
||||
|
@ -33,7 +34,7 @@ export async function runInProd(fn: any) {
|
|||
|
||||
export async function runStep(stepId: string, inputs: any, stepContext?: any) {
|
||||
async function run() {
|
||||
let step = await getAction(stepId)
|
||||
let step = await getAction(stepId as AutomationActionStepId)
|
||||
expect(step).toBeDefined()
|
||||
if (!step) {
|
||||
throw new Error("No step found")
|
||||
|
@ -41,7 +42,7 @@ export async function runStep(stepId: string, inputs: any, stepContext?: any) {
|
|||
return step({
|
||||
context: stepContext || {},
|
||||
inputs,
|
||||
appId: config ? config.getAppId() : null,
|
||||
appId: config ? config.getAppId() : "",
|
||||
// don't really need an API key, mocked out usage quota, not being tested here
|
||||
apiKey,
|
||||
emitter,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { getConfig, afterAll, runStep, actions } from "./utilities"
|
||||
import nock from "nock"
|
||||
|
||||
describe("test the outgoing webhook action", () => {
|
||||
let config = getConfig()
|
||||
|
@ -9,44 +10,45 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
afterAll()
|
||||
|
||||
beforeEach(() => {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should be able to run the action", async () => {
|
||||
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
||||
const res = await runStep(actions.zapier.stepId, {
|
||||
value1: "test",
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
expect(res.response.url).toEqual("http://www.example.com")
|
||||
expect(res.response.method).toEqual("post")
|
||||
expect(res.response.foo).toEqual("bar")
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should add the payload props when a JSON string is provided", async () => {
|
||||
const payload = `{ "value1": 1, "value2": 2, "value3": 3, "value4": 4, "value5": 5, "name": "Adam", "age": 9 }`
|
||||
const payload = {
|
||||
value1: 1,
|
||||
value2: 2,
|
||||
value3: 3,
|
||||
value4: 4,
|
||||
value5: 5,
|
||||
name: "Adam",
|
||||
age: 9,
|
||||
}
|
||||
|
||||
nock("http://www.example.com/")
|
||||
.post("/", { ...payload, platform: "budibase" })
|
||||
.reply(200, { foo: "bar" })
|
||||
|
||||
const res = await runStep(actions.zapier.stepId, {
|
||||
value1: "ONE",
|
||||
value2: "TWO",
|
||||
value3: "THREE",
|
||||
value4: "FOUR",
|
||||
value5: "FIVE",
|
||||
body: {
|
||||
value: payload,
|
||||
},
|
||||
body: { value: JSON.stringify(payload) },
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
expect(res.response.url).toEqual("http://www.example.com")
|
||||
expect(res.response.method).toEqual("post")
|
||||
expect(res.response.body).toEqual(
|
||||
`{"platform":"budibase","value1":1,"value2":2,"value3":3,"value4":4,"value5":5,"name":"Adam","age":9}`
|
||||
)
|
||||
expect(res.response.foo).toEqual("bar")
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should return a 400 if the JSON payload string is malformed", async () => {
|
||||
const payload = `{ value1 1 }`
|
||||
const res = await runStep(actions.zapier.stepId, {
|
||||
value1: "ONE",
|
||||
body: {
|
||||
value: payload,
|
||||
},
|
||||
body: { value: "{ invalid json }" },
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
expect(res.httpStatus).toEqual(400)
|
||||
|
|
|
@ -39,3 +39,11 @@ export const definition: AutomationTriggerSchema = {
|
|||
},
|
||||
type: AutomationStepType.TRIGGER,
|
||||
}
|
||||
|
||||
export type AppActionTriggerInputs = {
|
||||
fields: object
|
||||
}
|
||||
|
||||
export type AppActionTriggerOutputs = {
|
||||
fields: object
|
||||
}
|
||||
|
|
|
@ -38,3 +38,11 @@ export const definition: AutomationTriggerSchema = {
|
|||
},
|
||||
type: AutomationStepType.TRIGGER,
|
||||
}
|
||||
|
||||
export type CronTriggerInputs = {
|
||||
cron: string
|
||||
}
|
||||
|
||||
export type CronTriggerOutputs = {
|
||||
timestamp: number
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import {
|
|||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
Row,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
|
@ -39,3 +40,11 @@ export const definition: AutomationTriggerSchema = {
|
|||
},
|
||||
type: AutomationStepType.TRIGGER,
|
||||
}
|
||||
|
||||
export type RowDeletedTriggerInputs = {
|
||||
tableId: string
|
||||
}
|
||||
|
||||
export type RowDeletedTriggerOutputs = {
|
||||
row: Row
|
||||
}
|
||||
|
|
|
@ -5,7 +5,9 @@ import {
|
|||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
Row,
|
||||
} from "@budibase/types"
|
||||
import { SearchFilters } from "aws-sdk/clients/elasticbeanstalk"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Row Created",
|
||||
|
@ -52,3 +54,14 @@ export const definition: AutomationTriggerSchema = {
|
|||
},
|
||||
type: AutomationStepType.TRIGGER,
|
||||
}
|
||||
|
||||
export type RowCreatedTriggerInputs = {
|
||||
tableId: string
|
||||
filters?: SearchFilters
|
||||
}
|
||||
|
||||
export type RowCreatedTriggerOutputs = {
|
||||
row: Row
|
||||
id: string
|
||||
revision: string
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@ import {
|
|||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
Row,
|
||||
SearchFilters,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
|
@ -59,3 +61,14 @@ export const definition: AutomationTriggerSchema = {
|
|||
},
|
||||
type: AutomationStepType.TRIGGER,
|
||||
}
|
||||
|
||||
export type RowUpdatedTriggerInputs = {
|
||||
tableId: string
|
||||
filters?: SearchFilters
|
||||
}
|
||||
|
||||
export type RowUpdatedTriggerOutputs = {
|
||||
row: Row
|
||||
id: string
|
||||
revision?: string
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import { LoopStepType } from "../../definitions/automations"
|
||||
import {
|
||||
typecastForLooping,
|
||||
cleanInputValues,
|
||||
substituteLoopStep,
|
||||
} from "../automationUtils"
|
||||
import { LoopStepType } from "@budibase/types"
|
||||
|
||||
describe("automationUtils", () => {
|
||||
describe("substituteLoopStep", () => {
|
||||
|
|
|
@ -651,10 +651,10 @@ export async function buildDefaultDocs() {
|
|||
return new LinkDocument(
|
||||
employeeData.table._id!,
|
||||
"Jobs",
|
||||
employeeData.rows[index]._id,
|
||||
employeeData.rows[index]._id!,
|
||||
jobData.table._id!,
|
||||
"Assigned",
|
||||
jobData.rows[index]._id
|
||||
jobData.rows[index]._id!
|
||||
)
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import { AutomationResults, AutomationStep } from "@budibase/types"
|
||||
|
||||
export enum LoopStepType {
|
||||
ARRAY = "Array",
|
||||
STRING = "String",
|
||||
}
|
||||
import {
|
||||
AutomationResults,
|
||||
AutomationStep,
|
||||
LoopStepType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export interface LoopStep extends AutomationStep {
|
||||
inputs: LoopInput
|
||||
|
|
|
@ -104,6 +104,37 @@ export interface OracleColumnsResponse {
|
|||
SEARCH_CONDITION: null | string
|
||||
}
|
||||
|
||||
export enum TriggeringEvent {
|
||||
INSERT = "INSERT",
|
||||
DELETE = "DELETE",
|
||||
UPDATE = "UPDATE",
|
||||
LOGON = "LOGON",
|
||||
LOGOFF = "LOGOFF",
|
||||
STARTUP = "STARTUP",
|
||||
SHUTDOWN = "SHUTDOWN",
|
||||
SERVERERROR = "SERVERERROR",
|
||||
SCHEMA = "SCHEMA",
|
||||
ALTER = "ALTER",
|
||||
DROP = "DROP",
|
||||
}
|
||||
|
||||
export enum TriggerType {
|
||||
BEFORE_EACH_ROW = "BEFORE EACH ROW",
|
||||
AFTER_EACH_ROW = "AFTER EACH ROW",
|
||||
BEFORE_STATEMENT = "BEFORE STATEMENT",
|
||||
AFTER_STATEMENT = "AFTER STATEMENT",
|
||||
INSTEAD_OF = "INSTEAD OF",
|
||||
COMPOUND = "COMPOUND",
|
||||
}
|
||||
|
||||
export interface OracleTriggersResponse {
|
||||
TABLE_NAME: string
|
||||
TRIGGER_NAME: string
|
||||
TRIGGER_TYPE: TriggerType
|
||||
TRIGGERING_EVENT: TriggeringEvent
|
||||
TRIGGER_BODY: string
|
||||
}
|
||||
|
||||
/**
|
||||
* An oracle constraint
|
||||
*/
|
||||
|
|
|
@ -29,6 +29,7 @@ import { getReadableErrorMessage } from "./base/errorMapping"
|
|||
import sqlServer from "mssql"
|
||||
import { sql } from "@budibase/backend-core"
|
||||
import { ConfidentialClientApplication } from "@azure/msal-node"
|
||||
import env from "../environment"
|
||||
|
||||
import { utils } from "@budibase/shared-core"
|
||||
|
||||
|
@ -246,6 +247,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
options: {
|
||||
encrypt,
|
||||
enableArithAbort: true,
|
||||
requestTimeout: env.QUERY_THREAD_TIMEOUT,
|
||||
},
|
||||
}
|
||||
if (encrypt) {
|
||||
|
|
|
@ -31,7 +31,14 @@ import oracledb, {
|
|||
ExecuteOptions,
|
||||
Result,
|
||||
} from "oracledb"
|
||||
import { OracleTable, OracleColumn, OracleColumnsResponse } from "./base/types"
|
||||
import {
|
||||
OracleTable,
|
||||
OracleColumn,
|
||||
OracleColumnsResponse,
|
||||
OracleTriggersResponse,
|
||||
TriggeringEvent,
|
||||
TriggerType,
|
||||
} from "./base/types"
|
||||
import { sql } from "@budibase/backend-core"
|
||||
|
||||
const Sql = sql.Sql
|
||||
|
@ -98,7 +105,7 @@ const SCHEMA: Integration = {
|
|||
},
|
||||
}
|
||||
|
||||
const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"]
|
||||
const UNSUPPORTED_TYPES = ["BLOB", "NCLOB"]
|
||||
|
||||
const OracleContraintTypes = {
|
||||
PRIMARY: "P",
|
||||
|
@ -111,7 +118,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
private readonly config: OracleConfig
|
||||
private index: number = 1
|
||||
|
||||
private readonly COLUMNS_SQL = `
|
||||
private static readonly COLUMNS_SQL = `
|
||||
SELECT
|
||||
tabs.table_name,
|
||||
cols.column_name,
|
||||
|
@ -139,6 +146,19 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
(cons.status = 'ENABLED'
|
||||
OR cons.status IS NULL)
|
||||
`
|
||||
|
||||
private static readonly TRIGGERS_SQL = `
|
||||
SELECT
|
||||
table_name,
|
||||
trigger_name,
|
||||
trigger_type,
|
||||
triggering_event,
|
||||
trigger_body
|
||||
FROM
|
||||
all_triggers
|
||||
WHERE status = 'ENABLED'
|
||||
`
|
||||
|
||||
constructor(config: OracleConfig) {
|
||||
super(SqlClient.ORACLE)
|
||||
this.config = config
|
||||
|
@ -211,6 +231,75 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
return oracleTables
|
||||
}
|
||||
|
||||
private getTriggersFor(
|
||||
tableName: string,
|
||||
triggersResponse: Result<OracleTriggersResponse>,
|
||||
opts?: { event?: TriggeringEvent; type?: TriggerType }
|
||||
): OracleTriggersResponse[] {
|
||||
const triggers: OracleTriggersResponse[] = []
|
||||
for (const trigger of triggersResponse.rows || []) {
|
||||
if (trigger.TABLE_NAME !== tableName) {
|
||||
continue
|
||||
}
|
||||
if (opts?.event && opts.event !== trigger.TRIGGERING_EVENT) {
|
||||
continue
|
||||
}
|
||||
if (opts?.type && opts.type !== trigger.TRIGGER_TYPE) {
|
||||
continue
|
||||
}
|
||||
triggers.push(trigger)
|
||||
}
|
||||
return triggers
|
||||
}
|
||||
|
||||
private markAutoIncrementColumns(
|
||||
triggersResponse: Result<OracleTriggersResponse>,
|
||||
tables: Record<string, Table>
|
||||
) {
|
||||
for (const table of Object.values(tables)) {
|
||||
const triggers = this.getTriggersFor(table.name, triggersResponse, {
|
||||
type: TriggerType.BEFORE_EACH_ROW,
|
||||
event: TriggeringEvent.INSERT,
|
||||
})
|
||||
|
||||
// This is the trigger body Knex generates for an auto increment column
|
||||
// called "id" on a table called "foo":
|
||||
//
|
||||
// declare checking number := 1;
|
||||
// begin if (:new. "id" is null) then while checking >= 1 loop
|
||||
// select
|
||||
// "foo_seq".nextval into :new. "id"
|
||||
// from
|
||||
// dual;
|
||||
// select
|
||||
// count("id") into checking
|
||||
// from
|
||||
// "foo"
|
||||
// where
|
||||
// "id" = :new. "id";
|
||||
// end loop;
|
||||
// end if;
|
||||
// end;
|
||||
for (const [columnName, schema] of Object.entries(table.schema)) {
|
||||
const autoIncrementTriggers = triggers.filter(
|
||||
trigger =>
|
||||
// This is a bit heuristic, but I think it's the best we can do with
|
||||
// the information we have. We're looking for triggers that run
|
||||
// before each row is inserted, and that have a body that contains a
|
||||
// call to a function that generates a new value for the column. We
|
||||
// also check that the column name is in the trigger body, to make
|
||||
// sure we're not picking up triggers that don't affect the column.
|
||||
trigger.TRIGGER_BODY.includes(`"${columnName}"`) &&
|
||||
trigger.TRIGGER_BODY.includes(`.nextval`)
|
||||
)
|
||||
|
||||
if (autoIncrementTriggers.length > 0) {
|
||||
schema.autocolumn = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static isSupportedColumn(column: OracleColumn) {
|
||||
return !UNSUPPORTED_TYPES.includes(column.type)
|
||||
}
|
||||
|
@ -255,7 +344,10 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
entities: Record<string, Table>
|
||||
): Promise<Schema> {
|
||||
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
|
||||
sql: this.COLUMNS_SQL,
|
||||
sql: OracleIntegration.COLUMNS_SQL,
|
||||
})
|
||||
const triggersResponse = await this.internalQuery<OracleTriggersResponse>({
|
||||
sql: OracleIntegration.TRIGGERS_SQL,
|
||||
})
|
||||
const oracleTables = this.mapColumns(columnsResponse)
|
||||
|
||||
|
@ -318,6 +410,8 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
})
|
||||
})
|
||||
|
||||
this.markAutoIncrementColumns(triggersResponse, tables)
|
||||
|
||||
let externalTables = finaliseExternalTables(tables, entities)
|
||||
let errors = checkExternalTables(externalTables)
|
||||
return { tables: externalTables, errors }
|
||||
|
@ -325,7 +419,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
|
||||
async getTableNames() {
|
||||
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
|
||||
sql: this.COLUMNS_SQL,
|
||||
sql: OracleIntegration.COLUMNS_SQL,
|
||||
})
|
||||
return (columnsResponse.rows || []).map(row => row.TABLE_NAME)
|
||||
}
|
||||
|
@ -360,11 +454,32 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
this.index = 1
|
||||
connection = await this.getConnection()
|
||||
|
||||
const options: ExecuteOptions = { autoCommit: true }
|
||||
const options: ExecuteOptions = {
|
||||
autoCommit: true,
|
||||
fetchTypeHandler: function (metaData) {
|
||||
if (metaData.dbType === oracledb.CLOB) {
|
||||
return { type: oracledb.STRING }
|
||||
} else if (
|
||||
// When we create a new table in OracleDB from Budibase, bigints get
|
||||
// created as NUMBER(20,0). Budibase expects bigints to be returned
|
||||
// as strings, which is what we're doing here. However, this is
|
||||
// likely to be brittle if we connect to externally created
|
||||
// databases that have used different precisions and scales.
|
||||
// We shold find a way to do better.
|
||||
metaData.dbType === oracledb.NUMBER &&
|
||||
metaData.precision === 20 &&
|
||||
metaData.scale === 0
|
||||
) {
|
||||
return { type: oracledb.STRING }
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
const bindings: BindParameters = query.bindings || []
|
||||
|
||||
this.log(query.sql, bindings)
|
||||
return await connection.execute<T>(query.sql, bindings, options)
|
||||
const result = await connection.execute(query.sql, bindings, options)
|
||||
return result as Result<T>
|
||||
} finally {
|
||||
if (connection) {
|
||||
try {
|
||||
|
@ -377,7 +492,6 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
}
|
||||
|
||||
private getConnection = async (): Promise<Connection> => {
|
||||
//connectString : "(DESCRIPTION =(ADDRESS = (PROTOCOL = TCP)(HOST = localhost)(PORT = 1521))(CONNECT_DATA =(SID= ORCL)))"
|
||||
const connectString = `${this.config.host}:${this.config.port || 1521}/${
|
||||
this.config.database
|
||||
}`
|
||||
|
@ -386,7 +500,10 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
password: this.config.password,
|
||||
connectString,
|
||||
}
|
||||
return oracledb.getConnection(attributes)
|
||||
const tz = Intl.DateTimeFormat().resolvedOptions().timeZone
|
||||
const connection = await oracledb.getConnection(attributes)
|
||||
await connection.execute(`ALTER SESSION SET TIME_ZONE = '${tz}'`)
|
||||
return connection
|
||||
}
|
||||
|
||||
async create(query: SqlQuery | string): Promise<any[]> {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import {
|
||||
BodyType,
|
||||
DatasourceFieldType,
|
||||
HttpMethod,
|
||||
Integration,
|
||||
|
@ -15,7 +16,7 @@ import {
|
|||
import get from "lodash/get"
|
||||
import * as https from "https"
|
||||
import qs from "querystring"
|
||||
import type { Response } from "node-fetch"
|
||||
import type { Response, RequestInit } from "node-fetch"
|
||||
import fetch from "node-fetch"
|
||||
import { formatBytes } from "../utilities"
|
||||
import { performance } from "perf_hooks"
|
||||
|
@ -28,15 +29,6 @@ import path from "path"
|
|||
import { Builder as XmlBuilder } from "xml2js"
|
||||
import { getAttachmentHeaders } from "./utils/restUtils"
|
||||
|
||||
enum BodyType {
|
||||
NONE = "none",
|
||||
FORM_DATA = "form",
|
||||
XML = "xml",
|
||||
ENCODED = "encoded",
|
||||
JSON = "json",
|
||||
TEXT = "text",
|
||||
}
|
||||
|
||||
const coreFields = {
|
||||
path: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
|
@ -127,7 +119,23 @@ const SCHEMA: Integration = {
|
|||
},
|
||||
}
|
||||
|
||||
class RestIntegration implements IntegrationBase {
|
||||
interface ParsedResponse {
|
||||
data: any
|
||||
info: {
|
||||
code: number
|
||||
size: string
|
||||
time: string
|
||||
}
|
||||
extra?: {
|
||||
raw: string | undefined
|
||||
headers: Record<string, string[] | string>
|
||||
}
|
||||
pagination?: {
|
||||
cursor: any
|
||||
}
|
||||
}
|
||||
|
||||
export class RestIntegration implements IntegrationBase {
|
||||
private config: RestConfig
|
||||
private headers: {
|
||||
[key: string]: string
|
||||
|
@ -138,7 +146,10 @@ class RestIntegration implements IntegrationBase {
|
|||
this.config = config
|
||||
}
|
||||
|
||||
async parseResponse(response: Response, pagination: PaginationConfig | null) {
|
||||
async parseResponse(
|
||||
response: Response,
|
||||
pagination?: PaginationConfig
|
||||
): Promise<ParsedResponse> {
|
||||
let data: any[] | string | undefined,
|
||||
raw: string | undefined,
|
||||
headers: Record<string, string[] | string> = {},
|
||||
|
@ -235,8 +246,8 @@ class RestIntegration implements IntegrationBase {
|
|||
getUrl(
|
||||
path: string,
|
||||
queryString: string,
|
||||
pagination: PaginationConfig | null,
|
||||
paginationValues: PaginationValues | null
|
||||
pagination?: PaginationConfig,
|
||||
paginationValues?: PaginationValues
|
||||
): string {
|
||||
// Add pagination params to query string if required
|
||||
if (pagination?.location === "query" && paginationValues) {
|
||||
|
@ -279,10 +290,10 @@ class RestIntegration implements IntegrationBase {
|
|||
addBody(
|
||||
bodyType: string,
|
||||
body: string | any,
|
||||
input: any,
|
||||
pagination: PaginationConfig | null,
|
||||
paginationValues: PaginationValues | null
|
||||
) {
|
||||
input: RequestInit,
|
||||
pagination?: PaginationConfig,
|
||||
paginationValues?: PaginationValues
|
||||
): RequestInit {
|
||||
if (!input.headers) {
|
||||
input.headers = {}
|
||||
}
|
||||
|
@ -345,6 +356,7 @@ class RestIntegration implements IntegrationBase {
|
|||
string = new XmlBuilder().buildObject(object)
|
||||
}
|
||||
input.body = string
|
||||
// @ts-ignore
|
||||
input.headers["Content-Type"] = "application/xml"
|
||||
break
|
||||
case BodyType.JSON:
|
||||
|
@ -356,13 +368,14 @@ class RestIntegration implements IntegrationBase {
|
|||
object[key] = value
|
||||
})
|
||||
input.body = JSON.stringify(object)
|
||||
// @ts-ignore
|
||||
input.headers["Content-Type"] = "application/json"
|
||||
break
|
||||
}
|
||||
return input
|
||||
}
|
||||
|
||||
getAuthHeaders(authConfigId: string): { [key: string]: any } {
|
||||
getAuthHeaders(authConfigId?: string): { [key: string]: any } {
|
||||
let headers: any = {}
|
||||
|
||||
if (this.config.authConfigs && authConfigId) {
|
||||
|
@ -398,7 +411,7 @@ class RestIntegration implements IntegrationBase {
|
|||
headers = {},
|
||||
method = HttpMethod.GET,
|
||||
disabledHeaders,
|
||||
bodyType,
|
||||
bodyType = BodyType.NONE,
|
||||
requestBody,
|
||||
authConfigId,
|
||||
pagination,
|
||||
|
@ -407,7 +420,7 @@ class RestIntegration implements IntegrationBase {
|
|||
const authHeaders = this.getAuthHeaders(authConfigId)
|
||||
|
||||
this.headers = {
|
||||
...this.config.defaultHeaders,
|
||||
...(this.config.defaultHeaders || {}),
|
||||
...headers,
|
||||
...authHeaders,
|
||||
}
|
||||
|
@ -420,7 +433,7 @@ class RestIntegration implements IntegrationBase {
|
|||
}
|
||||
}
|
||||
|
||||
let input: any = { method, headers: this.headers }
|
||||
let input: RequestInit = { method, headers: this.headers }
|
||||
input = this.addBody(
|
||||
bodyType,
|
||||
requestBody,
|
||||
|
@ -437,7 +450,12 @@ class RestIntegration implements IntegrationBase {
|
|||
|
||||
// Deprecated by rejectUnauthorized
|
||||
if (this.config.legacyHttpParser) {
|
||||
// NOTE(samwho): it seems like this code doesn't actually work because it requires
|
||||
// node-fetch >=3, and we're not on that because upgrading to it produces errors to
|
||||
// do with ESM that are above my pay grade.
|
||||
|
||||
// https://github.com/nodejs/node/issues/43798
|
||||
// @ts-ignore
|
||||
input.extraHttpOptions = { insecureHTTPParser: true }
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import type { GoogleSpreadsheetWorksheet } from "google-spreadsheet"
|
||||
import nock from "nock"
|
||||
|
||||
jest.mock("google-auth-library")
|
||||
const { OAuth2Client } = require("google-auth-library")
|
||||
|
@ -62,6 +63,13 @@ describe("Google Sheets Integration", () => {
|
|||
await config.init()
|
||||
|
||||
jest.clearAllMocks()
|
||||
|
||||
nock.cleanAll()
|
||||
nock("https://www.googleapis.com/").post("/oauth2/v4/token").reply(200, {
|
||||
grant_type: "client_credentials",
|
||||
client_id: "your-client-id",
|
||||
client_secret: "your-client-secret",
|
||||
})
|
||||
})
|
||||
|
||||
function createBasicTable(name: string, columns: string[]): Table {
|
||||
|
|
|
@ -1,100 +0,0 @@
|
|||
const oracledb = require("oracledb")
|
||||
|
||||
import { default as OracleIntegration } from "../oracle"
|
||||
|
||||
jest.mock("oracledb")
|
||||
|
||||
class TestConfiguration {
|
||||
integration: any
|
||||
|
||||
constructor(config: any = {}) {
|
||||
this.integration = new OracleIntegration.integration(config)
|
||||
}
|
||||
}
|
||||
|
||||
const options = { autoCommit: true }
|
||||
|
||||
describe("Oracle Integration", () => {
|
||||
let config: any
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
config = new TestConfiguration()
|
||||
})
|
||||
|
||||
it("calls the create method with the correct params", async () => {
|
||||
const sql = "insert into users (name, age) values ('Joe', 123);"
|
||||
await config.integration.create({
|
||||
sql,
|
||||
})
|
||||
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
|
||||
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
|
||||
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("calls the read method with the correct params", async () => {
|
||||
const sql = "select * from users;"
|
||||
await config.integration.read({
|
||||
sql,
|
||||
})
|
||||
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
|
||||
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
|
||||
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("calls the update method with the correct params", async () => {
|
||||
const sql = "update table users set name = 'test';"
|
||||
await config.integration.update({
|
||||
sql,
|
||||
})
|
||||
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
|
||||
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
|
||||
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
const sql = "delete from users where name = 'todelete';"
|
||||
await config.integration.delete({
|
||||
sql,
|
||||
})
|
||||
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
|
||||
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
|
||||
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
describe("no rows returned", () => {
|
||||
beforeEach(() => {
|
||||
oracledb.executeMock.mockImplementation(() => ({ rows: [] }))
|
||||
})
|
||||
|
||||
it("returns the correct response when the create response has no rows", async () => {
|
||||
const sql = "insert into users (name, age) values ('Joe', 123);"
|
||||
const response = await config.integration.create({
|
||||
sql,
|
||||
})
|
||||
expect(response).toEqual([{ created: true }])
|
||||
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
|
||||
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("returns the correct response when the update response has no rows", async () => {
|
||||
const sql = "update table users set name = 'test';"
|
||||
const response = await config.integration.update({
|
||||
sql,
|
||||
})
|
||||
expect(response).toEqual([{ updated: true }])
|
||||
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
|
||||
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("returns the correct response when the delete response has no rows", async () => {
|
||||
const sql = "delete from users where name = 'todelete';"
|
||||
const response = await config.integration.delete({
|
||||
sql,
|
||||
})
|
||||
expect(response).toEqual([{ deleted: true }])
|
||||
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
|
||||
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
File diff suppressed because it is too large
Load Diff
|
@ -1,12 +1,16 @@
|
|||
import {
|
||||
FieldType,
|
||||
Operation,
|
||||
PaginationJson,
|
||||
QueryJson,
|
||||
SearchFilters,
|
||||
SortJson,
|
||||
SqlClient,
|
||||
Table,
|
||||
TableSourceType,
|
||||
SqlClient,
|
||||
} from "@budibase/types"
|
||||
import { sql } from "@budibase/backend-core"
|
||||
import { merge } from "lodash"
|
||||
|
||||
const Sql = sql.Sql
|
||||
|
||||
|
@ -25,7 +29,16 @@ const TABLE: Table = {
|
|||
primary: ["id"],
|
||||
}
|
||||
|
||||
function endpoint(table: any, operation: any) {
|
||||
const ORACLE_TABLE: Partial<Table> = {
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
function endpoint(table: string, operation: Operation) {
|
||||
return {
|
||||
datasourceId: "Postgres",
|
||||
operation: operation,
|
||||
|
@ -39,19 +52,25 @@ function generateReadJson({
|
|||
filters,
|
||||
sort,
|
||||
paginate,
|
||||
}: any = {}): QueryJson {
|
||||
const tableObj = { ...TABLE }
|
||||
}: {
|
||||
table?: Partial<Table>
|
||||
fields?: string[]
|
||||
filters?: SearchFilters
|
||||
sort?: SortJson
|
||||
paginate?: PaginationJson
|
||||
} = {}): QueryJson {
|
||||
let tableObj: Table = { ...TABLE }
|
||||
if (table) {
|
||||
tableObj.name = table
|
||||
tableObj = merge(TABLE, table)
|
||||
}
|
||||
return {
|
||||
endpoint: endpoint(table || TABLE_NAME, "READ"),
|
||||
endpoint: endpoint(tableObj.name || TABLE_NAME, Operation.READ),
|
||||
resource: {
|
||||
fields: fields || [],
|
||||
},
|
||||
filters: filters || {},
|
||||
sort: sort || {},
|
||||
paginate: paginate || {},
|
||||
paginate: paginate || undefined,
|
||||
meta: {
|
||||
table: tableObj,
|
||||
},
|
||||
|
@ -191,7 +210,7 @@ describe("SQL query builder", () => {
|
|||
)
|
||||
expect(query).toEqual({
|
||||
bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit, 5000],
|
||||
sql: `select * from (select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4) order by "test"."id" asc) where rownum <= :5) "test" order by "test"."id" asc) where rownum <= :6`,
|
||||
sql: `select * from (select * from (select * from (select * from "test" where COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2 and COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4 order by "test"."id" asc) where rownum <= :5) "test" order by "test"."id" asc) where rownum <= :6`,
|
||||
})
|
||||
|
||||
query = new Sql(SqlClient.ORACLE, limit)._query(
|
||||
|
@ -212,6 +231,7 @@ describe("SQL query builder", () => {
|
|||
it("should use an oracle compatible coalesce query for oracle when using the equals filter", () => {
|
||||
let query = new Sql(SqlClient.ORACLE, limit)._query(
|
||||
generateReadJson({
|
||||
table: ORACLE_TABLE,
|
||||
filters: {
|
||||
equal: {
|
||||
name: "John",
|
||||
|
@ -222,13 +242,14 @@ describe("SQL query builder", () => {
|
|||
|
||||
expect(query).toEqual({
|
||||
bindings: ["John", limit, 5000],
|
||||
sql: `select * from (select * from (select * from (select * from "test" where COALESCE("test"."name", -1) = :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
|
||||
sql: `select * from (select * from (select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
|
||||
})
|
||||
})
|
||||
|
||||
it("should use an oracle compatible coalesce query for oracle when using the not equals filter", () => {
|
||||
let query = new Sql(SqlClient.ORACLE, limit)._query(
|
||||
generateReadJson({
|
||||
table: ORACLE_TABLE,
|
||||
filters: {
|
||||
notEqual: {
|
||||
name: "John",
|
||||
|
@ -239,7 +260,7 @@ describe("SQL query builder", () => {
|
|||
|
||||
expect(query).toEqual({
|
||||
bindings: ["John", limit, 5000],
|
||||
sql: `select * from (select * from (select * from (select * from "test" where COALESCE("test"."name", -1) != :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
|
||||
sql: `select * from (select * from (select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") != :1) OR to_char("test"."name") IS NULL order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -5,6 +5,7 @@ import * as mongodb from "./mongodb"
|
|||
import * as mysql from "./mysql"
|
||||
import * as mssql from "./mssql"
|
||||
import * as mariadb from "./mariadb"
|
||||
import * as oracle from "./oracle"
|
||||
import { GenericContainer, StartedTestContainer } from "testcontainers"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import cloneDeep from "lodash/cloneDeep"
|
||||
|
@ -17,6 +18,7 @@ export enum DatabaseName {
|
|||
MYSQL = "mysql",
|
||||
SQL_SERVER = "mssql",
|
||||
MARIADB = "mariadb",
|
||||
ORACLE = "oracle",
|
||||
}
|
||||
|
||||
const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||
|
@ -25,6 +27,7 @@ const providers: Record<DatabaseName, DatasourceProvider> = {
|
|||
[DatabaseName.MYSQL]: mysql.getDatasource,
|
||||
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
||||
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
||||
[DatabaseName.ORACLE]: oracle.getDatasource,
|
||||
}
|
||||
|
||||
export function getDatasourceProviders(
|
||||
|
@ -60,6 +63,9 @@ export async function knexClient(ds: Datasource) {
|
|||
case SourceName.SQL_SERVER: {
|
||||
return mssql.knexClient(ds)
|
||||
}
|
||||
case SourceName.ORACLE: {
|
||||
return oracle.knexClient(ds)
|
||||
}
|
||||
default: {
|
||||
throw new Error(`Unsupported source: ${ds.source}`)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
import knex from "knex"
|
||||
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
// password needs to conform to Oracle standards
|
||||
const password = "password"
|
||||
if (!ports) {
|
||||
// couldn't build 19.3.0 for X64
|
||||
let image = "budibase/oracle-database:23.2-slim-faststart"
|
||||
if (process.arch.startsWith("arm")) {
|
||||
// there isn't an ARM compatible 23.2 build
|
||||
image = "budibase/oracle-database:19.3.0-ee-slim-faststart"
|
||||
}
|
||||
|
||||
ports = startContainer(
|
||||
new GenericContainer(image)
|
||||
.withExposedPorts(1521)
|
||||
.withEnvironment({
|
||||
ORACLE_PASSWORD: password,
|
||||
})
|
||||
.withWaitStrategy(Wait.forLogMessage("DATABASE IS READY TO USE!"))
|
||||
)
|
||||
}
|
||||
|
||||
const port = (await ports).find(x => x.container === 1521)?.host
|
||||
if (!port) {
|
||||
throw new Error("Oracle port not found")
|
||||
}
|
||||
|
||||
const host = "127.0.0.1"
|
||||
const user = "SYSTEM"
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.ORACLE,
|
||||
plus: true,
|
||||
config: { host, port, user, password, database: "FREEPDB1" },
|
||||
}
|
||||
|
||||
const newUser = "a" + generator.guid().replaceAll("-", "")
|
||||
const client = await knexClient(datasource)
|
||||
await client.raw(`CREATE USER ${newUser} IDENTIFIED BY password`)
|
||||
await client.raw(
|
||||
`GRANT CONNECT, RESOURCE, CREATE VIEW, CREATE SESSION TO ${newUser}`
|
||||
)
|
||||
await client.raw(`GRANT UNLIMITED TABLESPACE TO ${newUser}`)
|
||||
datasource.config!.user = newUser
|
||||
|
||||
return datasource
|
||||
}
|
||||
|
||||
export async function knexClient(ds: Datasource) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
if (ds.source !== SourceName.ORACLE) {
|
||||
throw new Error("Datasource source is not Oracle")
|
||||
}
|
||||
|
||||
const db = ds.config.database || "FREEPDB1"
|
||||
const connectString = `${ds.config.host}:${ds.config.port}/${db}`
|
||||
|
||||
const c = knex({
|
||||
client: "oracledb",
|
||||
connection: {
|
||||
connectString,
|
||||
user: ds.config.user,
|
||||
password: ds.config.password,
|
||||
},
|
||||
})
|
||||
|
||||
return c
|
||||
}
|
|
@ -21,7 +21,8 @@ export async function getRow(
|
|||
? IncludeRelationship.INCLUDE
|
||||
: IncludeRelationship.EXCLUDE,
|
||||
})
|
||||
return response ? response[0] : response
|
||||
const rows = response?.rows || []
|
||||
return rows[0]
|
||||
}
|
||||
|
||||
export async function save(
|
||||
|
|
|
@ -8,7 +8,6 @@ import {
|
|||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
import * as internal from "./search/internal"
|
||||
import * as external from "./search/external"
|
||||
import * as sqs from "./search/sqs"
|
||||
import { ExportRowsParams, ExportRowsResult } from "./search/types"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
import sdk from "../../index"
|
||||
|
@ -55,9 +54,9 @@ export async function search(
|
|||
if (isExternalTable) {
|
||||
return external.search(options, table)
|
||||
} else if (dbCore.isSqsEnabledForTenant()) {
|
||||
return sqs.search(options, table)
|
||||
return internal.sqs.search(options, table)
|
||||
} else {
|
||||
return internal.search(options, table)
|
||||
return internal.lucene.search(options, table)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ function getPaginationAndLimitParameters(
|
|||
limit: limit + 1,
|
||||
}
|
||||
if (bookmark) {
|
||||
paginateObj.offset = limit * bookmark
|
||||
paginateObj.offset = bookmark
|
||||
}
|
||||
} else if (limit) {
|
||||
paginateObj = {
|
||||
|
@ -105,37 +105,37 @@ export async function search(
|
|||
paginate: paginateObj as PaginationJson,
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
}
|
||||
const queries: Promise<Row[] | number>[] = []
|
||||
queries.push(handleRequest(Operation.READ, tableId, parameters))
|
||||
if (countRows) {
|
||||
queries.push(handleRequest(Operation.COUNT, tableId, parameters))
|
||||
}
|
||||
const responses = await Promise.all(queries)
|
||||
let rows = responses[0] as Row[]
|
||||
const totalRows =
|
||||
responses.length > 1 ? (responses[1] as number) : undefined
|
||||
const [{ rows, rawResponseSize }, totalRows] = await Promise.all([
|
||||
handleRequest(Operation.READ, tableId, parameters),
|
||||
countRows
|
||||
? handleRequest(Operation.COUNT, tableId, parameters)
|
||||
: Promise.resolve(undefined),
|
||||
])
|
||||
|
||||
let hasNextPage = false
|
||||
// remove the extra row if it's there
|
||||
if (paginate && limit && rows.length > limit) {
|
||||
rows.pop()
|
||||
hasNextPage = true
|
||||
}
|
||||
|
||||
if (options.fields) {
|
||||
const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS]
|
||||
rows = rows.map((r: any) => pick(r, fields))
|
||||
}
|
||||
|
||||
rows = await outputProcessing<Row[]>(table, rows, {
|
||||
let processed = await outputProcessing<Row[]>(table, rows, {
|
||||
preserveLinks: true,
|
||||
squash: true,
|
||||
})
|
||||
|
||||
let hasNextPage = false
|
||||
// if the raw rows is greater than the limit then we likely need to paginate
|
||||
if (paginate && limit && rawResponseSize > limit) {
|
||||
hasNextPage = true
|
||||
// processed rows has merged relationships down, this might not be more than limit
|
||||
if (processed.length > limit) {
|
||||
processed.pop()
|
||||
}
|
||||
}
|
||||
|
||||
if (options.fields) {
|
||||
const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS]
|
||||
processed = processed.map((r: any) => pick(r, fields))
|
||||
}
|
||||
|
||||
// need wrapper object for bookmarks etc when paginating
|
||||
const response: SearchResponse<Row> = { rows, hasNextPage }
|
||||
const response: SearchResponse<Row> = { rows: processed, hasNextPage }
|
||||
if (hasNextPage && bookmark != null) {
|
||||
response.bookmark = bookmark + 1
|
||||
response.bookmark = bookmark + processed.length
|
||||
}
|
||||
if (totalRows != null) {
|
||||
response.totalRows = totalRows
|
||||
|
@ -147,7 +147,8 @@ export async function search(
|
|||
} catch (err: any) {
|
||||
if (err.message && err.message.includes("does not exist")) {
|
||||
throw new Error(
|
||||
`Table updated externally, please re-fetch - ${err.message}`
|
||||
`Table updated externally, please re-fetch - ${err.message}`,
|
||||
{ cause: err }
|
||||
)
|
||||
} else {
|
||||
throw err
|
||||
|
@ -255,24 +256,21 @@ export async function exportRows(
|
|||
}
|
||||
|
||||
export async function fetch(tableId: string): Promise<Row[]> {
|
||||
const response = await handleRequest<Operation.READ>(
|
||||
Operation.READ,
|
||||
tableId,
|
||||
{
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
}
|
||||
)
|
||||
const response = await handleRequest(Operation.READ, tableId, {
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
return await outputProcessing<Row[]>(table, response, {
|
||||
return await outputProcessing<Row[]>(table, response.rows, {
|
||||
preserveLinks: true,
|
||||
squash: true,
|
||||
})
|
||||
}
|
||||
|
||||
export async function fetchRaw(tableId: string): Promise<Row[]> {
|
||||
return await handleRequest<Operation.READ>(Operation.READ, tableId, {
|
||||
const response = await handleRequest(Operation.READ, tableId, {
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
return response.rows
|
||||
}
|
||||
|
||||
export async function fetchView(viewName: string) {
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
export * as sqs from "./sqs"
|
||||
export * as lucene from "./lucene"
|
||||
export * from "./internal"
|
|
@ -1,89 +1,30 @@
|
|||
import { context, HTTPError } from "@budibase/backend-core"
|
||||
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
||||
import env from "../../../../environment"
|
||||
import { fullSearch, paginatedSearch } from "./utils"
|
||||
import { getRowParams, InternalTables } from "../../../../db/utils"
|
||||
import env from "../../../../../environment"
|
||||
import { getRowParams, InternalTables } from "../../../../../db/utils"
|
||||
import {
|
||||
Database,
|
||||
DocumentType,
|
||||
Row,
|
||||
RowSearchParams,
|
||||
SearchResponse,
|
||||
SortType,
|
||||
Table,
|
||||
User,
|
||||
TableSchema,
|
||||
} from "@budibase/types"
|
||||
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
|
||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
||||
import {
|
||||
csv,
|
||||
Format,
|
||||
json,
|
||||
jsonWithSchema,
|
||||
} from "../../../../api/controllers/view/exporters"
|
||||
import * as inMemoryViews from "../../../../db/inMemoryView"
|
||||
} from "../../../../../api/controllers/view/exporters"
|
||||
import * as inMemoryViews from "../../../../../db/inMemoryView"
|
||||
import {
|
||||
getFromDesignDoc,
|
||||
getFromMemoryDoc,
|
||||
migrateToDesignView,
|
||||
migrateToInMemoryView,
|
||||
} from "../../../../api/controllers/view/utils"
|
||||
import sdk from "../../../../sdk"
|
||||
import { ExportRowsParams, ExportRowsResult } from "./types"
|
||||
import pick from "lodash/pick"
|
||||
import { breakRowIdField } from "../../../../integrations/utils"
|
||||
|
||||
export async function search(
|
||||
options: RowSearchParams,
|
||||
table: Table
|
||||
): Promise<SearchResponse<Row>> {
|
||||
const { tableId } = options
|
||||
|
||||
const { paginate, query } = options
|
||||
|
||||
const params: RowSearchParams = {
|
||||
tableId: options.tableId,
|
||||
sort: options.sort,
|
||||
sortOrder: options.sortOrder,
|
||||
sortType: options.sortType,
|
||||
limit: options.limit,
|
||||
bookmark: options.bookmark,
|
||||
version: options.version,
|
||||
disableEscaping: options.disableEscaping,
|
||||
query: {},
|
||||
}
|
||||
|
||||
if (params.sort && !params.sortType) {
|
||||
const schema = table.schema
|
||||
const sortField = schema[params.sort]
|
||||
params.sortType =
|
||||
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
|
||||
}
|
||||
|
||||
let response
|
||||
if (paginate) {
|
||||
response = await paginatedSearch(query, params)
|
||||
} else {
|
||||
response = await fullSearch(query, params)
|
||||
}
|
||||
|
||||
// Enrich search results with relationships
|
||||
if (response.rows && response.rows.length) {
|
||||
// enrich with global users if from users table
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
|
||||
}
|
||||
|
||||
if (options.fields) {
|
||||
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS]
|
||||
response.rows = response.rows.map((r: any) => pick(r, fields))
|
||||
}
|
||||
|
||||
response.rows = await outputProcessing(table, response.rows)
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
} from "../../../../../api/controllers/view/utils"
|
||||
import sdk from "../../../../../sdk"
|
||||
import { ExportRowsParams, ExportRowsResult } from "../types"
|
||||
import { breakRowIdField } from "../../../../../integrations/utils"
|
||||
|
||||
export async function exportRows(
|
||||
options: ExportRowsParams
|
||||
|
@ -122,21 +63,21 @@ export async function exportRows(
|
|||
|
||||
result = await outputProcessing<Row[]>(table, response)
|
||||
} else if (query) {
|
||||
let searchResponse = await search(
|
||||
{
|
||||
tableId,
|
||||
query,
|
||||
sort,
|
||||
sortOrder,
|
||||
},
|
||||
table
|
||||
)
|
||||
let searchResponse = await sdk.rows.search({
|
||||
tableId,
|
||||
query,
|
||||
sort,
|
||||
sortOrder,
|
||||
})
|
||||
result = searchResponse.rows
|
||||
}
|
||||
|
||||
let rows: Row[] = []
|
||||
let schema = table.schema
|
||||
let headers
|
||||
|
||||
result = trimFields(result, schema)
|
||||
|
||||
// Filter data to only specified columns if required
|
||||
if (columns && columns.length) {
|
||||
for (let i = 0; i < result.length; i++) {
|
||||
|
@ -299,3 +240,13 @@ async function getView(db: Database, viewName: string) {
|
|||
}
|
||||
return viewInfo
|
||||
}
|
||||
|
||||
function trimFields(rows: Row[], schema: TableSchema) {
|
||||
const allowedFields = ["_id", ...Object.keys(schema)]
|
||||
const result = rows.map(row =>
|
||||
Object.keys(row)
|
||||
.filter(key => allowedFields.includes(key))
|
||||
.reduce((acc, key) => ({ ...acc, [key]: row[key] }), {} as Row)
|
||||
)
|
||||
return result
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
||||
import { fullSearch, paginatedSearch } from "../utils"
|
||||
import { InternalTables } from "../../../../../db/utils"
|
||||
import {
|
||||
Row,
|
||||
RowSearchParams,
|
||||
SearchResponse,
|
||||
SortType,
|
||||
Table,
|
||||
User,
|
||||
} from "@budibase/types"
|
||||
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
|
||||
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
||||
import pick from "lodash/pick"
|
||||
|
||||
export async function search(
|
||||
options: RowSearchParams,
|
||||
table: Table
|
||||
): Promise<SearchResponse<Row>> {
|
||||
const { tableId } = options
|
||||
|
||||
const { paginate, query } = options
|
||||
|
||||
const params: RowSearchParams = {
|
||||
tableId: options.tableId,
|
||||
sort: options.sort,
|
||||
sortOrder: options.sortOrder,
|
||||
sortType: options.sortType,
|
||||
limit: options.limit,
|
||||
bookmark: options.bookmark,
|
||||
version: options.version,
|
||||
disableEscaping: options.disableEscaping,
|
||||
query: {},
|
||||
}
|
||||
|
||||
if (params.sort && !params.sortType) {
|
||||
const schema = table.schema
|
||||
const sortField = schema[params.sort]
|
||||
params.sortType =
|
||||
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
|
||||
}
|
||||
|
||||
let response
|
||||
if (paginate) {
|
||||
response = await paginatedSearch(query, params)
|
||||
} else {
|
||||
response = await fullSearch(query, params)
|
||||
}
|
||||
|
||||
// Enrich search results with relationships
|
||||
if (response.rows && response.rows.length) {
|
||||
// enrich with global users if from users table
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
|
||||
}
|
||||
|
||||
if (options.fields) {
|
||||
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS]
|
||||
response.rows = response.rows.map((r: any) => pick(r, fields))
|
||||
}
|
||||
|
||||
response.rows = await outputProcessing(table, response.rows)
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
|
@ -18,34 +18,38 @@ import {
|
|||
import {
|
||||
buildInternalRelationships,
|
||||
sqlOutputProcessing,
|
||||
} from "../../../../api/controllers/row/utils"
|
||||
} from "../../../../../api/controllers/row/utils"
|
||||
import sdk from "../../../../index"
|
||||
import {
|
||||
decodeNonAscii,
|
||||
mapToUserColumn,
|
||||
USER_COLUMN_PREFIX,
|
||||
} from "../../tables/internal/sqs"
|
||||
import sdk from "../../../index"
|
||||
} from "../../../tables/internal/sqs"
|
||||
import {
|
||||
context,
|
||||
sql,
|
||||
SQLITE_DESIGN_DOC_ID,
|
||||
SQS_DATASOURCE_INTERNAL,
|
||||
} from "@budibase/backend-core"
|
||||
import { generateJunctionTableID } from "../../../../db/utils"
|
||||
import AliasTables from "../sqlAlias"
|
||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||
import { generateJunctionTableID } from "../../../../../db/utils"
|
||||
import AliasTables from "../../sqlAlias"
|
||||
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
||||
import pick from "lodash/pick"
|
||||
import { processRowCountResponse } from "../utils"
|
||||
import { processRowCountResponse } from "../../utils"
|
||||
import {
|
||||
updateFilterKeys,
|
||||
getRelationshipColumns,
|
||||
getTableIDList,
|
||||
} from "./filters"
|
||||
import { dataFilters, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
||||
import { isSearchingByRowID } from "./utils"
|
||||
} from "../filters"
|
||||
import {
|
||||
dataFilters,
|
||||
helpers,
|
||||
PROTECTED_INTERNAL_COLUMNS,
|
||||
} from "@budibase/shared-core"
|
||||
import { isSearchingByRowID } from "../utils"
|
||||
import tracer from "dd-trace"
|
||||
|
||||
const builder = new sql.Sql(SqlClient.SQL_LITE)
|
||||
const SQLITE_COLUMN_LIMIT = 2000
|
||||
const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
|
||||
const MISSING_TABLE_REGX = new RegExp(`no such table: .+`)
|
||||
const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`)
|
||||
|
@ -56,12 +60,14 @@ function buildInternalFieldList(
|
|||
opts?: { relationships?: RelationshipsJson[] }
|
||||
) {
|
||||
let fieldList: string[] = []
|
||||
const addJunctionFields = (relatedTable: Table, fields: string[]) => {
|
||||
const getJunctionFields = (relatedTable: Table, fields: string[]) => {
|
||||
const junctionFields: string[] = []
|
||||
fields.forEach(field => {
|
||||
fieldList.push(
|
||||
junctionFields.push(
|
||||
`${generateJunctionTableID(table._id!, relatedTable._id!)}.${field}`
|
||||
)
|
||||
})
|
||||
return junctionFields
|
||||
}
|
||||
fieldList = fieldList.concat(
|
||||
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`)
|
||||
|
@ -71,18 +77,22 @@ function buildInternalFieldList(
|
|||
if (!opts?.relationships && isRelationship) {
|
||||
continue
|
||||
}
|
||||
if (isRelationship) {
|
||||
if (!isRelationship) {
|
||||
fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`)
|
||||
} else {
|
||||
const linkCol = col as RelationshipFieldMetadata
|
||||
const relatedTable = tables.find(table => table._id === linkCol.tableId)
|
||||
// no relationships provided, don't go more than a layer deep
|
||||
if (relatedTable) {
|
||||
fieldList = fieldList.concat(
|
||||
buildInternalFieldList(relatedTable, tables)
|
||||
)
|
||||
addJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
|
||||
if (!relatedTable) {
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`)
|
||||
const relatedFields = buildInternalFieldList(relatedTable, tables).concat(
|
||||
getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
|
||||
)
|
||||
// break out of the loop if we have reached the max number of columns
|
||||
if (relatedFields.length + fieldList.length > SQLITE_COLUMN_LIMIT) {
|
||||
break
|
||||
}
|
||||
fieldList = fieldList.concat(relatedFields)
|
||||
}
|
||||
}
|
||||
return [...new Set(fieldList)]
|
||||
|
@ -167,7 +177,7 @@ function reverseUserColumnMapping(rows: Row[]) {
|
|||
if (index !== -1) {
|
||||
// cut out the prefix
|
||||
const newKey = key.slice(0, index) + key.slice(index + prefixLength)
|
||||
const decoded = decodeNonAscii(newKey)
|
||||
const decoded = helpers.schema.decodeNonAscii(newKey)
|
||||
finalRow[decoded] = row[key]
|
||||
} else {
|
||||
finalRow[key] = row[key]
|
||||
|
@ -320,25 +330,19 @@ export async function search(
|
|||
paginate = true
|
||||
request.paginate = {
|
||||
limit: params.limit + 1,
|
||||
offset: bookmark * params.limit,
|
||||
offset: bookmark,
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const queries: Promise<Row[] | number>[] = []
|
||||
queries.push(runSqlQuery(request, allTables, relationships))
|
||||
if (options.countRows) {
|
||||
// get the total count of rows
|
||||
queries.push(
|
||||
runSqlQuery(request, allTables, relationships, {
|
||||
countTotalRows: true,
|
||||
})
|
||||
)
|
||||
}
|
||||
const responses = await Promise.all(queries)
|
||||
let rows = responses[0] as Row[]
|
||||
const totalRows =
|
||||
responses.length > 1 ? (responses[1] as number) : undefined
|
||||
const [rows, totalRows] = await Promise.all([
|
||||
runSqlQuery(request, allTables, relationships),
|
||||
options.countRows
|
||||
? runSqlQuery(request, allTables, relationships, {
|
||||
countTotalRows: true,
|
||||
})
|
||||
: Promise.resolve(undefined),
|
||||
])
|
||||
|
||||
// process from the format of tableId.column to expected format also
|
||||
// make sure JSON columns corrected
|
||||
|
@ -350,10 +354,13 @@ export async function search(
|
|||
)
|
||||
|
||||
// check for pagination final row
|
||||
let nextRow: Row | undefined
|
||||
let nextRow: boolean = false
|
||||
if (paginate && params.limit && rows.length > params.limit) {
|
||||
// remove the extra row that confirmed if there is another row to move to
|
||||
nextRow = processed.pop()
|
||||
nextRow = true
|
||||
if (processed.length > params.limit) {
|
||||
processed.pop()
|
||||
}
|
||||
}
|
||||
|
||||
// get the rows
|
||||
|
@ -377,7 +384,7 @@ export async function search(
|
|||
// check for pagination
|
||||
if (paginate && nextRow) {
|
||||
response.hasNextPage = true
|
||||
response.bookmark = bookmark + 1
|
||||
response.bookmark = bookmark + processed.length
|
||||
}
|
||||
if (paginate && !nextRow) {
|
||||
response.hasNextPage = false
|
|
@ -76,7 +76,7 @@ export async function getDatasourceAndQuery(
|
|||
}
|
||||
|
||||
export function cleanExportRows(
|
||||
rows: any[],
|
||||
rows: Row[],
|
||||
schema: TableSchema,
|
||||
format: string,
|
||||
columns?: string[],
|
||||
|
|
|
@ -48,9 +48,7 @@ export async function save(
|
|||
}
|
||||
|
||||
// check for case sensitivity - we don't want to allow duplicated columns
|
||||
const duplicateColumn = findDuplicateInternalColumns(table, {
|
||||
ignoreProtectedColumnNames: !oldTable && !!opts?.isImport,
|
||||
})
|
||||
const duplicateColumn = findDuplicateInternalColumns(table)
|
||||
if (duplicateColumn.length) {
|
||||
throw new Error(
|
||||
`Column(s) "${duplicateColumn.join(
|
||||
|
|
|
@ -13,7 +13,7 @@ import tablesSdk from "../"
|
|||
import { generateJunctionTableID } from "../../../../db/utils"
|
||||
import { isEqual } from "lodash"
|
||||
import { DEFAULT_TABLES } from "../../../../db/defaultData/datasource_bb_default"
|
||||
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
||||
import { helpers, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
||||
|
||||
const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
||||
[FieldType.BOOLEAN]: SQLiteType.NUMERIC,
|
||||
|
@ -63,29 +63,10 @@ function buildRelationshipDefinitions(
|
|||
|
||||
export const USER_COLUMN_PREFIX = "data_"
|
||||
|
||||
// SQS does not support non-ASCII characters in column names, so we need to
|
||||
// replace them with unicode escape sequences.
|
||||
function encodeNonAscii(str: string): string {
|
||||
return str
|
||||
.split("")
|
||||
.map(char => {
|
||||
return char.charCodeAt(0) > 127
|
||||
? "\\u" + char.charCodeAt(0).toString(16).padStart(4, "0")
|
||||
: char
|
||||
})
|
||||
.join("")
|
||||
}
|
||||
|
||||
export function decodeNonAscii(str: string): string {
|
||||
return str.replace(/\\u([0-9a-fA-F]{4})/g, (match, p1) =>
|
||||
String.fromCharCode(parseInt(p1, 16))
|
||||
)
|
||||
}
|
||||
|
||||
// utility function to denote that columns in SQLite are mapped to avoid overlap issues
|
||||
// the overlaps can occur due to case insensitivity and some of the columns which Budibase requires
|
||||
export function mapToUserColumn(key: string) {
|
||||
return `${USER_COLUMN_PREFIX}${encodeNonAscii(key)}`
|
||||
return `${USER_COLUMN_PREFIX}${helpers.schema.encodeNonAscii(key)}`
|
||||
}
|
||||
|
||||
// this can generate relationship tables as part of the mapping
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import TestConfiguration from "../../tests/utilities/TestConfiguration"
|
||||
import { startup } from "../index"
|
||||
import { users, utils, tenancy } from "@budibase/backend-core"
|
||||
import nock from "nock"
|
||||
|
||||
describe("check BB_ADMIN environment variables", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
@ -8,7 +9,17 @@ describe("check BB_ADMIN environment variables", () => {
|
|||
await config.init()
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should be able to create a user with the BB_ADMIN environment variables", async () => {
|
||||
nock("http://localhost:10000")
|
||||
.get("/api/global/configs/checklist")
|
||||
.reply(200, {})
|
||||
.get("/api/global/self/api_key")
|
||||
.reply(200, {})
|
||||
|
||||
const EMAIL = "budibase@budibase.com",
|
||||
PASSWORD = "budibase"
|
||||
await tenancy.doInTenant(tenancy.DEFAULT_TENANT_ID, async () => {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import env from "../environment"
|
||||
import { env as coreEnv, timers } from "@budibase/backend-core"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import nock from "nock"
|
||||
|
||||
if (!process.env.CI) {
|
||||
// set a longer timeout in dev for debugging 100 seconds
|
||||
|
@ -9,6 +10,15 @@ if (!process.env.CI) {
|
|||
jest.setTimeout(30 * 1000)
|
||||
}
|
||||
|
||||
nock.disableNetConnect()
|
||||
nock.enableNetConnect(host => {
|
||||
return (
|
||||
host.includes("localhost") ||
|
||||
host.includes("127.0.0.1") ||
|
||||
host.includes("::1")
|
||||
)
|
||||
})
|
||||
|
||||
testContainerUtils.setupEnv(env, coreEnv)
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -15,6 +15,7 @@ import { RoleAPI } from "./role"
|
|||
import { TemplateAPI } from "./template"
|
||||
import { RowActionAPI } from "./rowAction"
|
||||
import { AutomationAPI } from "./automation"
|
||||
import { PluginAPI } from "./plugin"
|
||||
|
||||
export default class API {
|
||||
table: TableAPI
|
||||
|
@ -33,6 +34,7 @@ export default class API {
|
|||
templates: TemplateAPI
|
||||
rowAction: RowActionAPI
|
||||
automation: AutomationAPI
|
||||
plugin: PluginAPI
|
||||
|
||||
constructor(config: TestConfiguration) {
|
||||
this.table = new TableAPI(config)
|
||||
|
@ -51,5 +53,6 @@ export default class API {
|
|||
this.templates = new TemplateAPI(config)
|
||||
this.rowAction = new RowActionAPI(config)
|
||||
this.automation = new AutomationAPI(config)
|
||||
this.plugin = new PluginAPI(config)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { Expectations, TestAPI } from "./base"
|
||||
import { Row, View, ViewCalculation } from "@budibase/types"
|
||||
import { Row, RowExportFormat, View, ViewCalculation } from "@budibase/types"
|
||||
|
||||
export class LegacyViewAPI extends TestAPI {
|
||||
get = async (
|
||||
|
@ -24,7 +24,7 @@ export class LegacyViewAPI extends TestAPI {
|
|||
|
||||
export = async (
|
||||
viewName: string,
|
||||
format: "json" | "csv" | "jsonWithSchema",
|
||||
format: `${RowExportFormat}`,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
const response = await this._requestRaw("get", `/api/views/export`, {
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
import { Expectations, TestAPI } from "./base"
|
||||
import { CreatePluginRequest, CreatePluginResponse } from "@budibase/types"
|
||||
|
||||
export class PluginAPI extends TestAPI {
|
||||
create = async (body: CreatePluginRequest, expectations?: Expectations) => {
|
||||
return await this._post<CreatePluginResponse>(`/api/plugin`, {
|
||||
body,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
}
|
|
@ -11,6 +11,7 @@ import {
|
|||
DeleteRows,
|
||||
DeleteRow,
|
||||
PaginatedSearchRowResponse,
|
||||
RowExportFormat,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
|
@ -105,6 +106,7 @@ export class RowAPI extends TestAPI {
|
|||
exportRows = async (
|
||||
tableId: string,
|
||||
body: ExportRowsRequest,
|
||||
format: RowExportFormat = RowExportFormat.JSON,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
const response = await this._requestRaw(
|
||||
|
@ -112,7 +114,7 @@ export class RowAPI extends TestAPI {
|
|||
`/api/${tableId}/rows/exportRows`,
|
||||
{
|
||||
body,
|
||||
query: { format: "json" },
|
||||
query: { format },
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
CsvToJsonRequest,
|
||||
CsvToJsonResponse,
|
||||
MigrateRequest,
|
||||
MigrateResponse,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
Table,
|
||||
ValidateNewTableImportRequest,
|
||||
ValidateTableImportRequest,
|
||||
ValidateTableImportResponse,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
|
@ -61,7 +66,43 @@ export class TableAPI extends TestAPI {
|
|||
revId: string,
|
||||
expectations?: Expectations
|
||||
): Promise<void> => {
|
||||
return await this._delete<void>(`/api/tables/${tableId}/${revId}`, {
|
||||
return await this._delete(`/api/tables/${tableId}/${revId}`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
validateNewTableImport = async (
|
||||
body: ValidateNewTableImportRequest,
|
||||
expectations?: Expectations
|
||||
): Promise<ValidateTableImportResponse> => {
|
||||
return await this._post<ValidateTableImportResponse>(
|
||||
`/api/tables/validateNewTableImport`,
|
||||
{
|
||||
body,
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
validateExistingTableImport = async (
|
||||
body: ValidateTableImportRequest,
|
||||
expectations?: Expectations
|
||||
): Promise<ValidateTableImportResponse> => {
|
||||
return await this._post<ValidateTableImportResponse>(
|
||||
`/api/tables/validateExistingTableImport`,
|
||||
{
|
||||
body,
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
csvToJson = async (
|
||||
body: CsvToJsonRequest,
|
||||
expectations?: Expectations
|
||||
): Promise<CsvToJsonResponse> => {
|
||||
return await this._post<CsvToJsonResponse>(`/api/convert/csvToJson`, {
|
||||
body,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -25,8 +25,13 @@ import {
|
|||
Webhook,
|
||||
WebhookActionType,
|
||||
AutomationEventType,
|
||||
LoopStepType,
|
||||
FieldSchema,
|
||||
BBReferenceFieldSubType,
|
||||
JsonFieldSubType,
|
||||
AutoFieldSubType,
|
||||
} from "@budibase/types"
|
||||
import { LoopInput, LoopStepType } from "../../definitions/automations"
|
||||
import { LoopInput } from "../../definitions/automations"
|
||||
import { merge } from "lodash"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
|
@ -572,3 +577,161 @@ export function basicEnvironmentVariable(
|
|||
development: dev || prod,
|
||||
}
|
||||
}
|
||||
|
||||
export function fullSchemaWithoutLinks({
|
||||
allRequired,
|
||||
}: {
|
||||
allRequired?: boolean
|
||||
}) {
|
||||
const schema: {
|
||||
[type in Exclude<FieldType, FieldType.LINK>]: FieldSchema & { type: type }
|
||||
} = {
|
||||
[FieldType.STRING]: {
|
||||
name: "string",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.LONGFORM]: {
|
||||
name: "longform",
|
||||
type: FieldType.LONGFORM,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.OPTIONS]: {
|
||||
name: "options",
|
||||
type: FieldType.OPTIONS,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
inclusion: ["option 1", "option 2", "option 3", "option 4"],
|
||||
},
|
||||
},
|
||||
[FieldType.ARRAY]: {
|
||||
name: "array",
|
||||
type: FieldType.ARRAY,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
type: JsonFieldSubType.ARRAY,
|
||||
inclusion: ["options 1", "options 2", "options 3", "options 4"],
|
||||
},
|
||||
},
|
||||
[FieldType.NUMBER]: {
|
||||
name: "number",
|
||||
type: FieldType.NUMBER,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.BOOLEAN]: {
|
||||
name: "boolean",
|
||||
type: FieldType.BOOLEAN,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.DATETIME]: {
|
||||
name: "datetime",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
timeOnly: false,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.FORMULA]: {
|
||||
name: "formula",
|
||||
type: FieldType.FORMULA,
|
||||
formula: "any formula",
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.BARCODEQR]: {
|
||||
name: "barcodeqr",
|
||||
type: FieldType.BARCODEQR,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.BIGINT]: {
|
||||
name: "bigint",
|
||||
type: FieldType.BIGINT,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.BB_REFERENCE]: {
|
||||
name: "user",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.BB_REFERENCE_SINGLE]: {
|
||||
name: "users",
|
||||
type: FieldType.BB_REFERENCE_SINGLE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.ATTACHMENTS]: {
|
||||
name: "attachments",
|
||||
type: FieldType.ATTACHMENTS,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.ATTACHMENT_SINGLE]: {
|
||||
name: "attachment_single",
|
||||
type: FieldType.ATTACHMENT_SINGLE,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.AUTO]: {
|
||||
name: "auto",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.JSON]: {
|
||||
name: "json",
|
||||
type: FieldType.JSON,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.INTERNAL]: {
|
||||
name: "internal",
|
||||
type: FieldType.INTERNAL,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
[FieldType.SIGNATURE_SINGLE]: {
|
||||
name: "signature_single",
|
||||
type: FieldType.SIGNATURE_SINGLE,
|
||||
constraints: {
|
||||
presence: allRequired,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return schema
|
||||
}
|
||||
export function basicAttachment() {
|
||||
return {
|
||||
key: generator.guid(),
|
||||
name: generator.word(),
|
||||
extension: generator.word(),
|
||||
size: generator.natural(),
|
||||
url: `/${generator.guid()}`,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ import { AutomationErrors, MAX_AUTOMATION_RECURRING_ERRORS } from "../constants"
|
|||
import { storeLog } from "../automations/logging"
|
||||
import {
|
||||
Automation,
|
||||
AutomationActionStepId,
|
||||
AutomationData,
|
||||
AutomationJob,
|
||||
AutomationMetadata,
|
||||
|
@ -108,7 +109,7 @@ class Orchestrator {
|
|||
return triggerOutput
|
||||
}
|
||||
|
||||
async getStepFunctionality(stepId: string) {
|
||||
async getStepFunctionality(stepId: AutomationActionStepId) {
|
||||
let step = await actions.getAction(stepId)
|
||||
if (step == null) {
|
||||
throw `Cannot find automation step by name ${stepId}`
|
||||
|
@ -422,7 +423,9 @@ class Orchestrator {
|
|||
continue
|
||||
}
|
||||
|
||||
let stepFn = await this.getStepFunctionality(step.stepId)
|
||||
let stepFn = await this.getStepFunctionality(
|
||||
step.stepId as AutomationActionStepId
|
||||
)
|
||||
let inputs = await processObject(originalStepInput, this._context)
|
||||
inputs = automationUtils.cleanInputValues(
|
||||
inputs,
|
||||
|
|
|
@ -315,6 +315,21 @@ export async function outputProcessing<T extends Row[] | Row>(
|
|||
column.subtype
|
||||
)
|
||||
}
|
||||
} else if (column.type === FieldType.DATETIME && column.timeOnly) {
|
||||
for (let row of enriched) {
|
||||
if (row[property] instanceof Date) {
|
||||
const hours = row[property].getUTCHours().toString().padStart(2, "0")
|
||||
const minutes = row[property]
|
||||
.getUTCMinutes()
|
||||
.toString()
|
||||
.padStart(2, "0")
|
||||
const seconds = row[property]
|
||||
.getUTCSeconds()
|
||||
.toString()
|
||||
.padStart(2, "0")
|
||||
row[property] = `${hours}:${minutes}:${seconds}`
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue