Merge branch 'master' of github.com:Budibase/budibase into fix/formula-response-types
This commit is contained in:
commit
83b1919019
|
@ -147,7 +147,10 @@ jobs:
|
|||
fi
|
||||
|
||||
test-server:
|
||||
runs-on: budi-tubby-tornado-quad-core-300gb
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
datasource: [mssql, mysql, postgres, mongodb, mariadb, oracle, none]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -170,12 +173,19 @@ jobs:
|
|||
|
||||
- name: Pull testcontainers images
|
||||
run: |
|
||||
docker pull mcr.microsoft.com/mssql/server@${{ steps.dotenv.outputs.MSSQL_SHA }} &
|
||||
docker pull mysql@${{ steps.dotenv.outputs.MYSQL_SHA }} &
|
||||
docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }} &
|
||||
docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }} &
|
||||
docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }} &
|
||||
docker pull budibase/oracle-database:23.2-slim-faststart &
|
||||
if [ "${{ matrix.datasource }}" == "mssql" ]; then
|
||||
docker pull mcr.microsoft.com/mssql/server@${{ steps.dotenv.outputs.MSSQL_SHA }}
|
||||
elif [ "${{ matrix.datasource }}" == "mysql" ]; then
|
||||
docker pull mysql@${{ steps.dotenv.outputs.MYSQL_SHA }}
|
||||
elif [ "${{ matrix.datasource }}" == "postgres" ]; then
|
||||
docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }}
|
||||
elif [ "${{ matrix.datasource }}" == "mongodb" ]; then
|
||||
docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }}
|
||||
elif [ "${{ matrix.datasource }}" == "mariadb" ]; then
|
||||
docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }}
|
||||
elif [ "${{ matrix.datasource }}" == "oracle" ]; then
|
||||
docker pull budibase/oracle-database:23.2-slim-faststart
|
||||
fi
|
||||
docker pull minio/minio &
|
||||
docker pull redis &
|
||||
docker pull testcontainers/ryuk:0.5.1 &
|
||||
|
@ -186,11 +196,19 @@ jobs:
|
|||
- run: yarn --frozen-lockfile
|
||||
|
||||
- name: Test server
|
||||
env:
|
||||
DATASOURCE: ${{ matrix.datasource }}
|
||||
run: |
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
node scripts/run-affected.js --task=test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
||||
AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/server)
|
||||
if [ -n "$AFFECTED" ]; then
|
||||
cd packages/server
|
||||
if [ "${{ matrix.datasource }}" == "none" ]; then
|
||||
yarn test --filter ./src/tests/filters/non-datasource-tests.js --passWithNoTests
|
||||
else
|
||||
yarn test --filter ./src/tests/filters/datasource-tests.js --passWithNoTests
|
||||
fi
|
||||
else
|
||||
yarn test --scope=@budibase/server
|
||||
echo "No affected tests to run"
|
||||
fi
|
||||
|
||||
check-pro-submodule:
|
||||
|
|
|
@ -62,6 +62,7 @@ export default async function setup() {
|
|||
},
|
||||
])
|
||||
.withLabels({ "com.budibase": "true" })
|
||||
.withTmpFs({ "/data": "rw" })
|
||||
.withReuse()
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
|
@ -72,6 +73,7 @@ export default async function setup() {
|
|||
const minio = new GenericContainer("minio/minio")
|
||||
.withExposedPorts(9000)
|
||||
.withCommand(["server", "/data"])
|
||||
.withTmpFs({ "/data": "rw" })
|
||||
.withEnvironment({
|
||||
MINIO_ACCESS_KEY: "budibase",
|
||||
MINIO_SECRET_KEY: "budibase",
|
||||
|
|
|
@ -50,19 +50,6 @@ http {
|
|||
ignore_invalid_headers off;
|
||||
proxy_buffering off;
|
||||
|
||||
set $csp_default "default-src 'self'";
|
||||
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com";
|
||||
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
|
||||
set $csp_object "object-src 'none'";
|
||||
set $csp_base_uri "base-uri 'self'";
|
||||
set $csp_connect "connect-src 'self' https://*.budibase.app https://*.budibaseqa.app https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com https://us.i.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
|
||||
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
|
||||
set $csp_frame "frame-src 'self' https:";
|
||||
set $csp_img "img-src http: https: data: blob:";
|
||||
set $csp_manifest "manifest-src 'self'";
|
||||
set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live";
|
||||
set $csp_worker "worker-src blob:";
|
||||
|
||||
error_page 502 503 504 /error.html;
|
||||
location = /error.html {
|
||||
root /usr/share/nginx/html;
|
||||
|
@ -73,7 +60,6 @@ http {
|
|||
add_header X-Frame-Options SAMEORIGIN always;
|
||||
add_header X-Content-Type-Options nosniff always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always;
|
||||
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always;
|
||||
|
||||
# upstreams
|
||||
|
@ -120,6 +106,12 @@ http {
|
|||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
proxy_set_header Host $host;
|
||||
|
||||
# Enable buffering for potentially large OIDC configs
|
||||
proxy_buffering on;
|
||||
proxy_buffer_size 16k;
|
||||
proxy_buffers 4 32k;
|
||||
|
||||
proxy_pass $worker;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.2.1",
|
||||
"version": "3.2.3",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -10,7 +10,6 @@ import {
|
|||
DatabaseQueryOpts,
|
||||
DBError,
|
||||
Document,
|
||||
FeatureFlag,
|
||||
isDocument,
|
||||
RowResponse,
|
||||
RowValue,
|
||||
|
@ -27,7 +26,6 @@ import { SQLITE_DESIGN_DOC_ID } from "../../constants"
|
|||
import { DDInstrumentedDatabase } from "../instrumentation"
|
||||
import { checkSlashesInUrl } from "../../helpers"
|
||||
import { sqlLog } from "../../sql/utils"
|
||||
import { flags } from "../../features"
|
||||
|
||||
const DATABASE_NOT_FOUND = "Database does not exist."
|
||||
|
||||
|
@ -456,10 +454,7 @@ export class DatabaseImpl implements Database {
|
|||
}
|
||||
|
||||
async destroy() {
|
||||
if (
|
||||
(await flags.isEnabled(FeatureFlag.SQS)) &&
|
||||
(await this.exists(SQLITE_DESIGN_DOC_ID))
|
||||
) {
|
||||
if (await this.exists(SQLITE_DESIGN_DOC_ID)) {
|
||||
// delete the design document, then run the cleanup operation
|
||||
const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||
// remove all tables - save the definition then trigger a cleanup
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { existsSync, readFileSync } from "fs"
|
||||
import { ServiceType } from "@budibase/types"
|
||||
import { cloneDeep } from "lodash"
|
||||
import { createSecretKey } from "crypto"
|
||||
|
||||
function isTest() {
|
||||
return isJest()
|
||||
|
@ -126,8 +127,12 @@ const environment = {
|
|||
},
|
||||
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
|
||||
JS_BCRYPT: process.env.JS_BCRYPT,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK,
|
||||
JWT_SECRET: process.env.JWT_SECRET
|
||||
? createSecretKey(Buffer.from(process.env.JWT_SECRET))
|
||||
: undefined,
|
||||
JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK
|
||||
? createSecretKey(Buffer.from(process.env.JWT_SECRET_FALLBACK))
|
||||
: undefined,
|
||||
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
|
||||
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||
|
@ -225,6 +230,10 @@ const environment = {
|
|||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||
MIN_VERSION_WITHOUT_POWER_ROLE:
|
||||
process.env.MIN_VERSION_WITHOUT_POWER_ROLE || "3.0.0",
|
||||
DISABLE_CONTENT_SECURITY_POLICY: process.env.DISABLE_CONTENT_SECURITY_POLICY,
|
||||
// stopgap migration strategy until we can ensure backwards compat without unsafe-inline in CSP
|
||||
DISABLE_CSP_UNSAFE_INLINE_SCRIPTS:
|
||||
process.env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS,
|
||||
}
|
||||
|
||||
export function setEnv(newEnvVars: Partial<typeof environment>): () => void {
|
||||
|
|
|
@ -269,8 +269,6 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
|||
export const flags = new FlagSet({
|
||||
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
|
||||
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
|
||||
[FeatureFlag.SQS]: Flag.boolean(true),
|
||||
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(true),
|
||||
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
|
||||
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
|
||||
})
|
||||
|
|
|
@ -0,0 +1,118 @@
|
|||
import crypto from "crypto"
|
||||
import env from "../environment"
|
||||
|
||||
const CSP_DIRECTIVES = {
|
||||
"default-src": ["'self'"],
|
||||
"script-src": [
|
||||
"'self'",
|
||||
"'unsafe-eval'",
|
||||
"https://*.budibase.net",
|
||||
"https://cdn.budi.live",
|
||||
"https://js.intercomcdn.com",
|
||||
"https://widget.intercom.io",
|
||||
"https://d2l5prqdbvm3op.cloudfront.net",
|
||||
"https://us-assets.i.posthog.com",
|
||||
],
|
||||
"style-src": [
|
||||
"'self'",
|
||||
"'unsafe-inline'",
|
||||
"https://cdn.jsdelivr.net",
|
||||
"https://fonts.googleapis.com",
|
||||
"https://rsms.me",
|
||||
"https://maxcdn.bootstrapcdn.com",
|
||||
],
|
||||
"object-src": ["'none'"],
|
||||
"base-uri": ["'self'"],
|
||||
"connect-src": [
|
||||
"'self'",
|
||||
"https://*.budibase.app",
|
||||
"https://*.budibaseqa.app",
|
||||
"https://*.budibase.net",
|
||||
"https://api-iam.intercom.io",
|
||||
"https://api-ping.intercom.io",
|
||||
"https://app.posthog.com",
|
||||
"https://us.i.posthog.com",
|
||||
"wss://nexus-websocket-a.intercom.io",
|
||||
"wss://nexus-websocket-b.intercom.io",
|
||||
"https://nexus-websocket-a.intercom.io",
|
||||
"https://nexus-websocket-b.intercom.io",
|
||||
"https://uploads.intercomcdn.com",
|
||||
"https://uploads.intercomusercontent.com",
|
||||
"https://*.amazonaws.com",
|
||||
"https://*.s3.amazonaws.com",
|
||||
"https://*.s3.us-east-2.amazonaws.com",
|
||||
"https://*.s3.us-east-1.amazonaws.com",
|
||||
"https://*.s3.us-west-1.amazonaws.com",
|
||||
"https://*.s3.us-west-2.amazonaws.com",
|
||||
"https://*.s3.af-south-1.amazonaws.com",
|
||||
"https://*.s3.ap-east-1.amazonaws.com",
|
||||
"https://*.s3.ap-south-1.amazonaws.com",
|
||||
"https://*.s3.ap-northeast-2.amazonaws.com",
|
||||
"https://*.s3.ap-southeast-1.amazonaws.com",
|
||||
"https://*.s3.ap-southeast-2.amazonaws.com",
|
||||
"https://*.s3.ap-northeast-1.amazonaws.com",
|
||||
"https://*.s3.ca-central-1.amazonaws.com",
|
||||
"https://*.s3.cn-north-1.amazonaws.com",
|
||||
"https://*.s3.cn-northwest-1.amazonaws.com",
|
||||
"https://*.s3.eu-central-1.amazonaws.com",
|
||||
"https://*.s3.eu-west-1.amazonaws.com",
|
||||
"https://*.s3.eu-west-2.amazonaws.com",
|
||||
"https://*.s3.eu-south-1.amazonaws.com",
|
||||
"https://*.s3.eu-west-3.amazonaws.com",
|
||||
"https://*.s3.eu-north-1.amazonaws.com",
|
||||
"https://*.s3.sa-east-1.amazonaws.com",
|
||||
"https://*.s3.me-south-1.amazonaws.com",
|
||||
"https://*.s3.us-gov-east-1.amazonaws.com",
|
||||
"https://*.s3.us-gov-west-1.amazonaws.com",
|
||||
"https://api.github.com",
|
||||
],
|
||||
"font-src": [
|
||||
"'self'",
|
||||
"data:",
|
||||
"https://cdn.jsdelivr.net",
|
||||
"https://fonts.gstatic.com",
|
||||
"https://rsms.me",
|
||||
"https://maxcdn.bootstrapcdn.com",
|
||||
"https://js.intercomcdn.com",
|
||||
"https://fonts.intercomcdn.com",
|
||||
],
|
||||
"frame-src": ["'self'", "https:"],
|
||||
"img-src": ["http:", "https:", "data:", "blob:"],
|
||||
"manifest-src": ["'self'"],
|
||||
"media-src": [
|
||||
"'self'",
|
||||
"https://js.intercomcdn.com",
|
||||
"https://cdn.budi.live",
|
||||
],
|
||||
"worker-src": ["blob:"],
|
||||
}
|
||||
|
||||
export async function contentSecurityPolicy(ctx: any, next: any) {
|
||||
try {
|
||||
const nonce = crypto.randomBytes(16).toString("base64")
|
||||
|
||||
const directives = { ...CSP_DIRECTIVES }
|
||||
directives["script-src"] = [
|
||||
...CSP_DIRECTIVES["script-src"],
|
||||
`'nonce-${nonce}'`,
|
||||
]
|
||||
|
||||
if (!env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS) {
|
||||
directives["script-src"].push("'unsafe-inline'")
|
||||
}
|
||||
|
||||
ctx.state.nonce = nonce
|
||||
|
||||
const cspHeader = Object.entries(directives)
|
||||
.map(([key, sources]) => `${key} ${sources.join(" ")}`)
|
||||
.join("; ")
|
||||
ctx.set("Content-Security-Policy", cspHeader)
|
||||
await next()
|
||||
} catch (err: any) {
|
||||
console.error(
|
||||
`Error occurred in Content-Security-Policy middleware: ${err}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export default contentSecurityPolicy
|
|
@ -19,5 +19,6 @@ export { default as pino } from "../logging/pino/middleware"
|
|||
export { default as correlation } from "../logging/correlation/middleware"
|
||||
export { default as errorHandling } from "./errorHandling"
|
||||
export { default as querystringToBody } from "./querystringToBody"
|
||||
export { default as csp } from "./contentSecurityPolicy"
|
||||
export * as joiValidator from "./joi-validator"
|
||||
export { default as ip } from "./ip"
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
import crypto from "crypto"
|
||||
import contentSecurityPolicy from "../contentSecurityPolicy"
|
||||
|
||||
jest.mock("crypto", () => ({
|
||||
randomBytes: jest.fn(),
|
||||
randomUUID: jest.fn(),
|
||||
}))
|
||||
|
||||
describe("contentSecurityPolicy middleware", () => {
|
||||
let ctx: any
|
||||
let next: any
|
||||
const mockNonce = "mocked/nonce"
|
||||
|
||||
beforeEach(() => {
|
||||
ctx = {
|
||||
state: {},
|
||||
set: jest.fn(),
|
||||
}
|
||||
next = jest.fn()
|
||||
// @ts-ignore
|
||||
crypto.randomBytes.mockReturnValue(Buffer.from(mockNonce, "base64"))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it("should generate a nonce and set it in the script-src directive", async () => {
|
||||
await contentSecurityPolicy(ctx, next)
|
||||
|
||||
expect(ctx.state.nonce).toBe(mockNonce)
|
||||
expect(ctx.set).toHaveBeenCalledWith(
|
||||
"Content-Security-Policy",
|
||||
expect.stringContaining(
|
||||
`script-src 'self' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com 'nonce-${mockNonce}'`
|
||||
)
|
||||
)
|
||||
expect(next).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it("should include all CSP directives in the header", async () => {
|
||||
await contentSecurityPolicy(ctx, next)
|
||||
|
||||
const cspHeader = ctx.set.mock.calls[0][1]
|
||||
expect(cspHeader).toContain("default-src 'self'")
|
||||
expect(cspHeader).toContain("script-src 'self' 'unsafe-eval'")
|
||||
expect(cspHeader).toContain("style-src 'self' 'unsafe-inline'")
|
||||
expect(cspHeader).toContain("object-src 'none'")
|
||||
expect(cspHeader).toContain("base-uri 'self'")
|
||||
expect(cspHeader).toContain("connect-src 'self'")
|
||||
expect(cspHeader).toContain("font-src 'self'")
|
||||
expect(cspHeader).toContain("frame-src 'self'")
|
||||
expect(cspHeader).toContain("img-src http: https: data: blob:")
|
||||
expect(cspHeader).toContain("manifest-src 'self'")
|
||||
expect(cspHeader).toContain("media-src 'self'")
|
||||
expect(cspHeader).toContain("worker-src blob:")
|
||||
})
|
||||
|
||||
it("should handle errors and log an error message", async () => {
|
||||
const consoleSpy = jest.spyOn(console, "error").mockImplementation()
|
||||
const error = new Error("Test error")
|
||||
// @ts-ignore
|
||||
crypto.randomBytes.mockImplementation(() => {
|
||||
throw error
|
||||
})
|
||||
|
||||
await contentSecurityPolicy(ctx, next)
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
`Error occurred in Content-Security-Policy middleware: ${error}`
|
||||
)
|
||||
expect(next).not.toHaveBeenCalled()
|
||||
consoleSpy.mockRestore()
|
||||
})
|
||||
})
|
|
@ -4,7 +4,7 @@ import env from "../../environment"
|
|||
describe("encryption", () => {
|
||||
it("should throw an error if API encryption key is not set", () => {
|
||||
const jwt = getSecret(SecretOption.API)
|
||||
expect(jwt).toBe(env.JWT_SECRET)
|
||||
expect(jwt).toBe(env.JWT_SECRET?.export().toString())
|
||||
})
|
||||
|
||||
it("should throw an error if encryption key is not set", () => {
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import Link from "../../Link/Link.svelte"
|
||||
import Tag from "../../Tags/Tag.svelte"
|
||||
import Tags from "../../Tags/Tags.svelte"
|
||||
import ProgressCircle from "../../ProgressCircle/ProgressCircle.svelte"
|
||||
|
||||
const BYTES_IN_KB = 1000
|
||||
const BYTES_IN_MB = 1000000
|
||||
|
@ -39,12 +40,14 @@
|
|||
"jfif",
|
||||
"webp",
|
||||
]
|
||||
|
||||
const fieldId = id || uuid()
|
||||
|
||||
let selectedImageIdx = 0
|
||||
let fileDragged = false
|
||||
let selectedUrl
|
||||
let fileInput
|
||||
let loading = false
|
||||
|
||||
$: selectedImage = value?.[selectedImageIdx] ?? null
|
||||
$: fileCount = value?.length ?? 0
|
||||
$: isImage =
|
||||
|
@ -86,10 +89,15 @@
|
|||
}
|
||||
|
||||
if (processFiles) {
|
||||
const processedFiles = await processFiles(fileList)
|
||||
const newValue = [...value, ...processedFiles]
|
||||
dispatch("change", newValue)
|
||||
selectedImageIdx = newValue.length - 1
|
||||
loading = true
|
||||
try {
|
||||
const processedFiles = await processFiles(fileList)
|
||||
const newValue = [...value, ...processedFiles]
|
||||
dispatch("change", newValue)
|
||||
selectedImageIdx = newValue.length - 1
|
||||
} finally {
|
||||
loading = false
|
||||
}
|
||||
} else {
|
||||
dispatch("change", fileList)
|
||||
}
|
||||
|
@ -227,7 +235,7 @@
|
|||
{#if showDropzone}
|
||||
<div
|
||||
class="spectrum-Dropzone"
|
||||
class:disabled
|
||||
class:disabled={disabled || loading}
|
||||
role="region"
|
||||
tabindex="0"
|
||||
on:dragover={handleDragOver}
|
||||
|
@ -241,7 +249,7 @@
|
|||
id={fieldId}
|
||||
{disabled}
|
||||
type="file"
|
||||
multiple
|
||||
multiple={maximum !== 1}
|
||||
accept={extensions}
|
||||
bind:this={fileInput}
|
||||
on:change={handleFile}
|
||||
|
@ -339,6 +347,12 @@
|
|||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
{#if loading}
|
||||
<div class="loading">
|
||||
<ProgressCircle size="M" />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
@ -464,6 +478,7 @@
|
|||
|
||||
.spectrum-Dropzone {
|
||||
height: 220px;
|
||||
position: relative;
|
||||
}
|
||||
.compact .spectrum-Dropzone {
|
||||
height: 40px;
|
||||
|
@ -488,4 +503,14 @@
|
|||
.tag {
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
.loading {
|
||||
position: absolute;
|
||||
display: grid;
|
||||
place-items: center;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
top: 0;
|
||||
left: 0;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
export let disabledPermissions = []
|
||||
export let columns
|
||||
export let fromRelationshipField
|
||||
export let canSetRelationshipSchemas
|
||||
|
||||
const { datasource, dispatch } = getContext("grid")
|
||||
|
||||
|
@ -129,6 +128,8 @@
|
|||
}
|
||||
})
|
||||
|
||||
$: hasLinkColumns = columns.some(c => c.schema.type === FieldType.LINK)
|
||||
|
||||
async function toggleColumn(column, permission) {
|
||||
const visible = permission !== FieldPermissions.HIDDEN
|
||||
const readonly = permission === FieldPermissions.READONLY
|
||||
|
@ -184,7 +185,7 @@
|
|||
value={columnToPermissionOptions(column)}
|
||||
options={column.options}
|
||||
/>
|
||||
{#if canSetRelationshipSchemas && column.schema.type === FieldType.LINK && columnToPermissionOptions(column) !== FieldPermissions.HIDDEN}
|
||||
{#if column.schema.type === FieldType.LINK && columnToPermissionOptions(column) !== FieldPermissions.HIDDEN}
|
||||
<div class="relationship-columns">
|
||||
<ActionButton
|
||||
on:click={e => {
|
||||
|
@ -203,7 +204,7 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
{#if canSetRelationshipSchemas}
|
||||
{#if hasLinkColumns}
|
||||
<Popover
|
||||
on:close={() => (relationshipFieldName = null)}
|
||||
open={relationshipFieldName}
|
||||
|
|
|
@ -10,8 +10,6 @@
|
|||
import { getContext } from "svelte"
|
||||
import { ActionButton } from "@budibase/bbui"
|
||||
import ColumnsSettingContent from "./ColumnsSettingContent.svelte"
|
||||
import { isEnabled } from "helpers/featureFlags"
|
||||
import { FeatureFlag } from "@budibase/types"
|
||||
import DetailPopover from "components/common/DetailPopover.svelte"
|
||||
|
||||
const { tableColumns, datasource } = getContext("grid")
|
||||
|
@ -46,9 +44,5 @@
|
|||
{text}
|
||||
</ActionButton>
|
||||
</svelte:fragment>
|
||||
<ColumnsSettingContent
|
||||
columns={$tableColumns}
|
||||
canSetRelationshipSchemas={isEnabled(FeatureFlag.ENRICHED_RELATIONSHIPS)}
|
||||
{permissions}
|
||||
/>
|
||||
<ColumnsSettingContent columns={$tableColumns} {permissions} />
|
||||
</DetailPopover>
|
||||
|
|
|
@ -53,6 +53,7 @@
|
|||
on:close={close}
|
||||
maxHeight={null}
|
||||
resizable
|
||||
minWidth={360}
|
||||
>
|
||||
<div class="content">
|
||||
<slot />
|
||||
|
@ -80,7 +81,6 @@
|
|||
}
|
||||
|
||||
.content {
|
||||
width: 300px;
|
||||
padding: 20px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
|
|
@ -5,6 +5,7 @@ export default class NestedProviderFetch extends DataFetch {
|
|||
// Nested providers should already have exposed their own schema
|
||||
return {
|
||||
schema: datasource?.value?.schema,
|
||||
primaryDisplay: datasource?.value?.primaryDisplay,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 80770215c6159e4d47f3529fd02e74bc8ad07543
|
||||
Subproject commit bfeece324a03a3a5f25137bf3f8c66d5ed6103d8
|
|
@ -1,12 +1,12 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set -ex
|
||||
|
||||
if [[ -n $CI ]]
|
||||
then
|
||||
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
|
||||
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
|
||||
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail "$@"
|
||||
else
|
||||
# --maxWorkers performs better in development
|
||||
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"
|
||||
jest --coverage --maxWorkers=2 --forceExit $@
|
||||
jest --coverage --maxWorkers=2 --forceExit "$@"
|
||||
fi
|
|
@ -209,6 +209,7 @@ export const serveApp = async function (ctx: UserCtx) {
|
|||
? objectStore.getGlobalFileUrl("settings", "logoUrl")
|
||||
: "",
|
||||
appMigrating: needMigrations,
|
||||
nonce: ctx.state.nonce,
|
||||
})
|
||||
const appHbs = loadHandlebarsFile(appHbsPath)
|
||||
ctx.body = await processString(appHbs, {
|
||||
|
@ -217,6 +218,7 @@ export const serveApp = async function (ctx: UserCtx) {
|
|||
css: `:root{${themeVariables}} ${css.code}`,
|
||||
appId,
|
||||
embedded: bbHeaderEmbed,
|
||||
nonce: ctx.state.nonce,
|
||||
})
|
||||
} else {
|
||||
// just return the app info for jest to assert on
|
||||
|
@ -258,6 +260,7 @@ export const serveBuilderPreview = async function (ctx: Ctx) {
|
|||
const previewHbs = loadHandlebarsFile(join(previewLoc, "preview.hbs"))
|
||||
ctx.body = await processString(previewHbs, {
|
||||
clientLibPath: objectStore.clientLibraryUrl(appId!, appInfo.version),
|
||||
nonce: ctx.state.nonce,
|
||||
})
|
||||
} else {
|
||||
// just return the app info for jest to assert on
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
export let hideDevTools
|
||||
export let sideNav
|
||||
export let hideFooter
|
||||
|
||||
export let nonce
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
|
@ -118,11 +120,11 @@
|
|||
<p />
|
||||
{/if}
|
||||
</div>
|
||||
<script type="application/javascript">
|
||||
<script type="application/javascript" {nonce}>
|
||||
window.INIT_TIME = Date.now()
|
||||
</script>
|
||||
{#if appMigrating}
|
||||
<script type="application/javascript">
|
||||
<script type="application/javascript" {nonce}>
|
||||
window.MIGRATING_APP = true
|
||||
</script>
|
||||
{/if}
|
||||
|
@ -135,7 +137,7 @@
|
|||
<script type="application/javascript" src={plugin.jsUrl}></script>
|
||||
{/each}
|
||||
{/if}
|
||||
<script type="application/javascript">
|
||||
<script type="application/javascript" {nonce}>
|
||||
if (window.loadBudibase) {
|
||||
window.loadBudibase()
|
||||
} else {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<html>
|
||||
<script>
|
||||
<script nonce="{{ nonce }}">
|
||||
document.fonts.ready.then(() => {
|
||||
window.parent.postMessage({ type: "docLoaded" });
|
||||
})
|
||||
|
@ -9,7 +9,7 @@
|
|||
<style>{{{css}}}</style>
|
||||
</head>
|
||||
|
||||
<script>
|
||||
<script nonce="{{ nonce }}">
|
||||
window["##BUDIBASE_APP_ID##"] = "{{appId}}"
|
||||
window["##BUDIBASE_APP_EMBEDDED##"] = "{{embedded}}"
|
||||
</script>
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
}
|
||||
</style>
|
||||
<script src='{{ clientLibPath }}'></script>
|
||||
<script>
|
||||
<script nonce="{{ nonce }}">
|
||||
function receiveMessage(event) {
|
||||
if (!event.data) {
|
||||
return
|
||||
|
|
|
@ -15,12 +15,11 @@ import { getViews, saveView } from "../view/utils"
|
|||
import viewTemplate from "../view/viewBuilder"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { context, events, features, HTTPError } from "@budibase/backend-core"
|
||||
import { context, events, HTTPError } from "@budibase/backend-core"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
Database,
|
||||
Datasource,
|
||||
FeatureFlag,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
NumberFieldMetadata,
|
||||
|
@ -336,9 +335,8 @@ class TableSaveFunctions {
|
|||
importRows: this.importRows,
|
||||
userId: this.userId,
|
||||
})
|
||||
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
|
||||
await sdk.tables.sqs.addTable(table)
|
||||
}
|
||||
|
||||
await sdk.tables.sqs.addTable(table)
|
||||
return table
|
||||
}
|
||||
|
||||
|
@ -530,9 +528,8 @@ export async function internalTableCleanup(table: Table, rows?: Row[]) {
|
|||
if (rows) {
|
||||
await AttachmentCleanup.tableDelete(table, rows)
|
||||
}
|
||||
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
|
||||
await sdk.tables.sqs.removeTable(table)
|
||||
}
|
||||
|
||||
await sdk.tables.sqs.removeTable(table)
|
||||
}
|
||||
|
||||
const _TableSaveFunctions = TableSaveFunctions
|
||||
|
|
|
@ -16,7 +16,7 @@ jest.mock("../../../utilities/redis", () => ({
|
|||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import * as setup from "./utilities"
|
||||
import { AppStatus } from "../../../db/utils"
|
||||
import { events, utils, context, features } from "@budibase/backend-core"
|
||||
import { events, utils, context } from "@budibase/backend-core"
|
||||
import env from "../../../environment"
|
||||
import { type App, BuiltinPermissionID } from "@budibase/types"
|
||||
import tk from "timekeeper"
|
||||
|
@ -355,21 +355,6 @@ describe("/applications", () => {
|
|||
expect(events.app.deleted).toHaveBeenCalledTimes(1)
|
||||
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should be able to delete an app after SQS has been set but app hasn't been migrated", async () => {
|
||||
const prodAppId = app.appId.replace("_dev", "")
|
||||
nock("http://localhost:10000")
|
||||
.delete(`/api/global/roles/${prodAppId}`)
|
||||
.reply(200, {})
|
||||
|
||||
await features.testutils.withFeatureFlags(
|
||||
"*",
|
||||
{ SQS: true },
|
||||
async () => {
|
||||
await config.api.application.delete(app.appId)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/applications/:appId/duplicate", () => {
|
||||
|
|
|
@ -19,8 +19,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
knexClient,
|
||||
datasourceDescribe,
|
||||
} from "../../../integrations/tests/utils"
|
||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||
import nock from "nock"
|
||||
|
@ -69,7 +68,7 @@ describe("/datasources", () => {
|
|||
{
|
||||
status: 500,
|
||||
body: {
|
||||
message: "No datasource implementation found.",
|
||||
message: 'No datasource implementation found called: "invalid"',
|
||||
},
|
||||
}
|
||||
)
|
||||
|
@ -163,21 +162,23 @@ describe("/datasources", () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe.each([
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
||||
])("%s", (_, dsProvider) => {
|
||||
datasourceDescribe(
|
||||
{ name: "%s", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] },
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let rawDatasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeEach(async () => {
|
||||
rawDatasource = await dsProvider
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
client = await knexClient(rawDatasource)
|
||||
const ds = await dsProvider()
|
||||
rawDatasource = ds.rawDatasource!
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
|
||||
jest.clearAllMocks()
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
describe("get", () => {
|
||||
|
@ -491,5 +492,5 @@ describe("/datasources", () => {
|
|||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -9,15 +9,20 @@ import {
|
|||
import { automations } from "@budibase/pro"
|
||||
import {
|
||||
CreateRowActionRequest,
|
||||
Datasource,
|
||||
DocumentType,
|
||||
PermissionLevel,
|
||||
RowActionResponse,
|
||||
Table,
|
||||
TableRowActions,
|
||||
} from "@budibase/types"
|
||||
import * as setup from "./utilities"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import { Expectations } from "../../../tests/utilities/api/base"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
import {
|
||||
DatabaseName,
|
||||
datasourceDescribe,
|
||||
} from "../../../integrations/tests/utils"
|
||||
import { generateRowActionsID } from "../../../db/utils"
|
||||
|
||||
const expectAutomationId = () =>
|
||||
|
@ -969,70 +974,66 @@ describe("/rowsActions", () => {
|
|||
status: 200,
|
||||
})
|
||||
})
|
||||
|
||||
it.each([
|
||||
[
|
||||
"internal",
|
||||
async () => {
|
||||
await config.newTenant()
|
||||
await config.api.application.addSampleData(config.getAppId())
|
||||
const tables = await config.api.table.fetch()
|
||||
const table = tables.find(
|
||||
t => t.sourceId === DEFAULT_BB_DATASOURCE_ID
|
||||
)!
|
||||
return table
|
||||
},
|
||||
],
|
||||
[
|
||||
"external",
|
||||
async () => {
|
||||
await config.newTenant()
|
||||
const ds = await config.createDatasource({
|
||||
datasource: await getDatasource(DatabaseName.POSTGRES),
|
||||
})
|
||||
const table = await config.api.table.save(
|
||||
setup.structures.tableForDatasource(ds)
|
||||
)
|
||||
return table
|
||||
},
|
||||
],
|
||||
])(
|
||||
"should delete all the row actions (and automations) for its tables when a datasource is deleted",
|
||||
async (_, getTable) => {
|
||||
async function getRowActionsFromDb(tableId: string) {
|
||||
return await context.doInAppContext(config.getAppId(), async () => {
|
||||
const db = context.getAppDB()
|
||||
const tableDoc = await db.tryGet<TableRowActions>(
|
||||
generateRowActionsID(tableId)
|
||||
)
|
||||
return tableDoc
|
||||
})
|
||||
}
|
||||
|
||||
const table = await getTable()
|
||||
const tableId = table._id!
|
||||
|
||||
await config.api.rowAction.save(tableId, {
|
||||
name: generator.guid(),
|
||||
})
|
||||
await config.api.rowAction.save(tableId, {
|
||||
name: generator.guid(),
|
||||
})
|
||||
|
||||
const { actions } = (await getRowActionsFromDb(tableId))!
|
||||
expect(Object.entries(actions)).toHaveLength(2)
|
||||
|
||||
const { automations } = await config.api.automation.fetch()
|
||||
expect(automations).toHaveLength(2)
|
||||
|
||||
const datasource = await config.api.datasource.get(table.sourceId)
|
||||
await config.api.datasource.delete(datasource)
|
||||
|
||||
const automationsResp = await config.api.automation.fetch()
|
||||
expect(automationsResp.automations).toHaveLength(0)
|
||||
|
||||
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "row actions (%s)", only: [DatabaseName.SQS, DatabaseName.POSTGRES] },
|
||||
({ config, dsProvider, isInternal }) => {
|
||||
let datasource: Datasource | undefined
|
||||
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource
|
||||
})
|
||||
|
||||
async function getTable(): Promise<Table> {
|
||||
if (isInternal) {
|
||||
await config.api.application.addSampleData(config.getAppId())
|
||||
const tables = await config.api.table.fetch()
|
||||
return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)!
|
||||
} else {
|
||||
const table = await config.api.table.save(
|
||||
setup.structures.tableForDatasource(datasource!)
|
||||
)
|
||||
return table
|
||||
}
|
||||
}
|
||||
|
||||
it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => {
|
||||
async function getRowActionsFromDb(tableId: string) {
|
||||
return await context.doInAppContext(config.getAppId(), async () => {
|
||||
const db = context.getAppDB()
|
||||
const tableDoc = await db.tryGet<TableRowActions>(
|
||||
generateRowActionsID(tableId)
|
||||
)
|
||||
return tableDoc
|
||||
})
|
||||
}
|
||||
|
||||
const table = await getTable()
|
||||
const tableId = table._id!
|
||||
|
||||
await config.api.rowAction.save(tableId, {
|
||||
name: generator.guid(),
|
||||
})
|
||||
await config.api.rowAction.save(tableId, {
|
||||
name: generator.guid(),
|
||||
})
|
||||
|
||||
const { actions } = (await getRowActionsFromDb(tableId))!
|
||||
expect(Object.entries(actions)).toHaveLength(2)
|
||||
|
||||
const { automations } = await config.api.automation.fetch()
|
||||
expect(automations).toHaveLength(2)
|
||||
|
||||
const datasource = await config.api.datasource.get(table.sourceId)
|
||||
await config.api.datasource.delete(datasource)
|
||||
|
||||
const automationsResp = await config.api.automation.fetch()
|
||||
expect(automationsResp.automations).toHaveLength(0)
|
||||
|
||||
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -2,7 +2,6 @@ import * as setup from "./utilities"
|
|||
import path from "path"
|
||||
import nock from "nock"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { features } from "@budibase/backend-core"
|
||||
|
||||
interface App {
|
||||
background: string
|
||||
|
@ -82,48 +81,36 @@ describe("/templates", () => {
|
|||
})
|
||||
|
||||
describe("create app from template", () => {
|
||||
it.each(["sqs", "lucene"])(
|
||||
`should be able to create an app from a template (%s)`,
|
||||
async source => {
|
||||
await features.testutils.withFeatureFlags(
|
||||
"*",
|
||||
{ SQS: source === "sqs" },
|
||||
async () => {
|
||||
const name = generator.guid().replaceAll("-", "")
|
||||
const url = `/${name}`
|
||||
it("should be able to create an app from a template", async () => {
|
||||
const name = generator.guid().replaceAll("-", "")
|
||||
const url = `/${name}`
|
||||
|
||||
const app = await config.api.application.create({
|
||||
name,
|
||||
url,
|
||||
useTemplate: "true",
|
||||
templateName: "Agency Client Portal",
|
||||
templateKey: "app/agency-client-portal",
|
||||
})
|
||||
expect(app.name).toBe(name)
|
||||
expect(app.url).toBe(url)
|
||||
const app = await config.api.application.create({
|
||||
name,
|
||||
url,
|
||||
useTemplate: "true",
|
||||
templateName: "Agency Client Portal",
|
||||
templateKey: "app/agency-client-portal",
|
||||
})
|
||||
expect(app.name).toBe(name)
|
||||
expect(app.url).toBe(url)
|
||||
|
||||
await config.withApp(app, async () => {
|
||||
const tables = await config.api.table.fetch()
|
||||
expect(tables).toHaveLength(2)
|
||||
await config.withApp(app, async () => {
|
||||
const tables = await config.api.table.fetch()
|
||||
expect(tables).toHaveLength(2)
|
||||
|
||||
tables.sort((a, b) => a.name.localeCompare(b.name))
|
||||
const [agencyProjects, users] = tables
|
||||
expect(agencyProjects.name).toBe("Agency Projects")
|
||||
expect(users.name).toBe("Users")
|
||||
tables.sort((a, b) => a.name.localeCompare(b.name))
|
||||
const [agencyProjects, users] = tables
|
||||
expect(agencyProjects.name).toBe("Agency Projects")
|
||||
expect(users.name).toBe("Users")
|
||||
|
||||
const { rows } = await config.api.row.search(
|
||||
agencyProjects._id!,
|
||||
{
|
||||
tableId: agencyProjects._id!,
|
||||
query: {},
|
||||
}
|
||||
)
|
||||
const { rows } = await config.api.row.search(agencyProjects._id!, {
|
||||
tableId: agencyProjects._id!,
|
||||
query: {},
|
||||
})
|
||||
|
||||
expect(rows).toHaveLength(3)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
expect(rows).toHaveLength(3)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,10 +1,6 @@
|
|||
import * as setup from "../../../api/routes/tests/utilities"
|
||||
import { basicTable } from "../../../tests/utilities/structures"
|
||||
import {
|
||||
db as dbCore,
|
||||
features,
|
||||
SQLITE_DESIGN_DOC_ID,
|
||||
} from "@budibase/backend-core"
|
||||
import { db as dbCore, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
|
||||
import {
|
||||
LinkDocument,
|
||||
DocumentType,
|
||||
|
@ -70,24 +66,14 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
|
|||
}
|
||||
}
|
||||
|
||||
async function sqsDisabled(cb: () => Promise<void>) {
|
||||
await features.testutils.withFeatureFlags("*", { SQS: false }, cb)
|
||||
}
|
||||
|
||||
async function sqsEnabled(cb: () => Promise<void>) {
|
||||
await features.testutils.withFeatureFlags("*", { SQS: true }, cb)
|
||||
}
|
||||
|
||||
describe("SQS migration", () => {
|
||||
beforeAll(async () => {
|
||||
await sqsDisabled(async () => {
|
||||
await config.init()
|
||||
const table = await config.api.table.save(basicTable())
|
||||
tableId = table._id!
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// old link document
|
||||
await db.put(oldLinkDocument())
|
||||
})
|
||||
await config.init()
|
||||
const table = await config.api.table.save(basicTable())
|
||||
tableId = table._id!
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// old link document
|
||||
await db.put(oldLinkDocument())
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
|
@ -101,43 +87,32 @@ describe("SQS migration", () => {
|
|||
|
||||
it("test migration runs as expected against an older DB", async () => {
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// confirm nothing exists initially
|
||||
await sqsDisabled(async () => {
|
||||
let error: any | undefined
|
||||
try {
|
||||
await db.get(SQLITE_DESIGN_DOC_ID)
|
||||
} catch (err: any) {
|
||||
error = err
|
||||
}
|
||||
expect(error).toBeDefined()
|
||||
expect(error.status).toBe(404)
|
||||
|
||||
// remove sqlite design doc to simulate it comes from an older installation
|
||||
const doc = await db.get(SQLITE_DESIGN_DOC_ID)
|
||||
await db.remove({ _id: doc._id, _rev: doc._rev })
|
||||
|
||||
await processMigrations(config.appId!, MIGRATIONS)
|
||||
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||
expect(designDoc.sql.tables).toBeDefined()
|
||||
const mainTableDef = designDoc.sql.tables[tableId]
|
||||
expect(mainTableDef).toBeDefined()
|
||||
expect(mainTableDef.fields[prefix("name")]).toEqual({
|
||||
field: "name",
|
||||
type: SQLiteType.TEXT,
|
||||
})
|
||||
expect(mainTableDef.fields[prefix("description")]).toEqual({
|
||||
field: "description",
|
||||
type: SQLiteType.TEXT,
|
||||
})
|
||||
|
||||
await sqsEnabled(async () => {
|
||||
await processMigrations(config.appId!, MIGRATIONS)
|
||||
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||
expect(designDoc.sql.tables).toBeDefined()
|
||||
const mainTableDef = designDoc.sql.tables[tableId]
|
||||
expect(mainTableDef).toBeDefined()
|
||||
expect(mainTableDef.fields[prefix("name")]).toEqual({
|
||||
field: "name",
|
||||
type: SQLiteType.TEXT,
|
||||
})
|
||||
expect(mainTableDef.fields[prefix("description")]).toEqual({
|
||||
field: "description",
|
||||
type: SQLiteType.TEXT,
|
||||
})
|
||||
|
||||
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
||||
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
||||
expect(linkDoc.tableId).toEqual(
|
||||
generateJunctionTableID(tableId1, tableId2)
|
||||
)
|
||||
// should have swapped the documents
|
||||
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
||||
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
||||
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
||||
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
||||
})
|
||||
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
||||
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
||||
expect(linkDoc.tableId).toEqual(generateJunctionTableID(tableId1, tableId2))
|
||||
// should have swapped the documents
|
||||
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
||||
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
||||
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
||||
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
const setup = require("./utilities")
|
||||
import { getConfig, afterAll as _afterAll, runStep } from "./utilities"
|
||||
|
||||
describe("test the bash action", () => {
|
||||
let config = setup.getConfig()
|
||||
let config = getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
afterAll(setup.afterAll)
|
||||
afterAll(_afterAll)
|
||||
|
||||
it("should be able to execute a script", async () => {
|
||||
let res = await setup.runStep("EXECUTE_BASH", {
|
||||
let res = await runStep(config, "EXECUTE_BASH", {
|
||||
code: "echo 'test'",
|
||||
})
|
||||
expect(res.stdout).toEqual("test\n")
|
||||
|
@ -17,7 +17,7 @@ describe("test the bash action", () => {
|
|||
})
|
||||
|
||||
it("should handle a null value", async () => {
|
||||
let res = await setup.runStep("EXECUTE_BASH", {
|
||||
let res = await runStep(config, "EXECUTE_BASH", {
|
||||
code: null,
|
||||
})
|
||||
expect(res.stdout).toEqual(
|
|
@ -31,7 +31,7 @@ describe("test the create row action", () => {
|
|||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to run the action", async () => {
|
||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {
|
||||
row,
|
||||
})
|
||||
expect(res.id).toBeDefined()
|
||||
|
@ -43,7 +43,7 @@ describe("test the create row action", () => {
|
|||
})
|
||||
|
||||
it("should return an error (not throw) when bad info provided", async () => {
|
||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {
|
||||
row: {
|
||||
tableId: "invalid",
|
||||
invalid: "invalid",
|
||||
|
@ -53,7 +53,7 @@ describe("test the create row action", () => {
|
|||
})
|
||||
|
||||
it("should check invalid inputs return an error", async () => {
|
||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {})
|
||||
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {})
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
|
@ -76,7 +76,7 @@ describe("test the create row action", () => {
|
|||
]
|
||||
|
||||
attachmentRow.file_attachment = attachmentObject
|
||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {
|
||||
row: attachmentRow,
|
||||
})
|
||||
|
||||
|
@ -111,7 +111,7 @@ describe("test the create row action", () => {
|
|||
}
|
||||
|
||||
attachmentRow.single_file_attachment = attachmentObject
|
||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {
|
||||
row: attachmentRow,
|
||||
})
|
||||
|
||||
|
@ -146,7 +146,7 @@ describe("test the create row action", () => {
|
|||
}
|
||||
|
||||
attachmentRow.single_file_attachment = attachmentObject
|
||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {
|
||||
row: attachmentRow,
|
||||
})
|
||||
|
||||
|
|
|
@ -1,14 +1,20 @@
|
|||
const setup = require("./utilities")
|
||||
import { runStep, actions, getConfig } from "./utilities"
|
||||
import { reset } from "timekeeper"
|
||||
|
||||
// need real Date for this test
|
||||
const tk = require("timekeeper")
|
||||
tk.reset()
|
||||
reset()
|
||||
|
||||
describe("test the delay logic", () => {
|
||||
const config = getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
it("should be able to run the delay", async () => {
|
||||
const time = 100
|
||||
const before = Date.now()
|
||||
await setup.runStep(setup.actions.DELAY.stepId, { time: time })
|
||||
await runStep(config, actions.DELAY.stepId, { time: time })
|
||||
const now = Date.now()
|
||||
// divide by two just so that test will always pass as long as there was some sort of delay
|
||||
expect(now - before).toBeGreaterThanOrEqual(time / 2)
|
|
@ -1,4 +1,4 @@
|
|||
const setup = require("./utilities")
|
||||
import * as setup from "./utilities"
|
||||
|
||||
describe("test the delete row action", () => {
|
||||
let table: any
|
||||
|
@ -20,32 +20,29 @@ describe("test the delete row action", () => {
|
|||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to run the action", async () => {
|
||||
const res = await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
|
||||
const res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.DELETE_ROW.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toEqual(true)
|
||||
expect(res.response).toBeDefined()
|
||||
expect(res.row._id).toEqual(row._id)
|
||||
let error
|
||||
try {
|
||||
await config.getRow(table._id, res.row._id)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
expect(error).toBeDefined()
|
||||
})
|
||||
|
||||
it("check usage quota attempts", async () => {
|
||||
await setup.runInProd(async () => {
|
||||
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
|
||||
await setup.runStep(config, setup.actions.DELETE_ROW.stepId, inputs)
|
||||
})
|
||||
})
|
||||
|
||||
it("should check invalid inputs return an error", async () => {
|
||||
const res = await setup.runStep(setup.actions.DELETE_ROW.stepId, {})
|
||||
const res = await setup.runStep(config, setup.actions.DELETE_ROW.stepId, {})
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should return an error when table doesn't exist", async () => {
|
||||
const res = await setup.runStep(setup.actions.DELETE_ROW.stepId, {
|
||||
const res = await setup.runStep(config, setup.actions.DELETE_ROW.stepId, {
|
||||
tableId: "invalid",
|
||||
id: "invalid",
|
||||
revision: "invalid",
|
||||
|
|
|
@ -16,7 +16,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
it("should be able to run the action", async () => {
|
||||
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
||||
const res = await runStep(actions.discord.stepId, {
|
||||
const res = await runStep(config, actions.discord.stepId, {
|
||||
url: "http://www.example.com",
|
||||
username: "joe_bloggs",
|
||||
})
|
||||
|
|
|
@ -1,65 +1,77 @@
|
|||
import { Datasource, Query } from "@budibase/types"
|
||||
import * as setup from "./utilities"
|
||||
import { DatabaseName } from "../../integrations/tests/utils"
|
||||
import {
|
||||
DatabaseName,
|
||||
datasourceDescribe,
|
||||
} from "../../integrations/tests/utils"
|
||||
import { Knex } from "knex"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
describe.each([
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.MARIADB,
|
||||
DatabaseName.ORACLE,
|
||||
])("execute query action (%s)", name => {
|
||||
let tableName: string
|
||||
let client: Knex
|
||||
let datasource: Datasource
|
||||
let query: Query
|
||||
const config = setup.getConfig()
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "execute query action",
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let tableName: string
|
||||
let client: Knex
|
||||
let datasource: Datasource
|
||||
let query: Query
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
||||
const testSetup = await setup.setupTestDatasource(config, name)
|
||||
datasource = testSetup.datasource
|
||||
client = testSetup.client
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = await setup.createTestTable(client, {
|
||||
a: { type: "string" },
|
||||
b: { type: "number" },
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
await setup.insertTestData(client, tableName, [{ a: "string", b: 1 }])
|
||||
query = await setup.saveTestQuery(config, client, tableName, datasource)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTable(tableName)
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to execute a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: query._id },
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.string("a")
|
||||
table.integer("b")
|
||||
})
|
||||
await client(tableName).insert({ a: "string", b: 1 })
|
||||
query = await setup.saveTestQuery(config, client, tableName, datasource)
|
||||
})
|
||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should handle a null query value", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: null,
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTable(tableName)
|
||||
})
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle an error executing a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: "wrong_id" },
|
||||
it("should be able to execute a query", async () => {
|
||||
let res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.EXECUTE_QUERY.stepId,
|
||||
{
|
||||
query: { queryId: query._id },
|
||||
}
|
||||
)
|
||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
expect(res.response).toBeDefined()
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle a null query value", async () => {
|
||||
let res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.EXECUTE_QUERY.stepId,
|
||||
{
|
||||
query: null,
|
||||
}
|
||||
)
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle an error executing a query", async () => {
|
||||
let res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.EXECUTE_QUERY.stepId,
|
||||
{
|
||||
query: { queryId: "wrong_id" },
|
||||
}
|
||||
)
|
||||
expect(res.response).toBeDefined()
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
const setup = require("./utilities")
|
||||
import { getConfig, afterAll as _afterAll, runStep, actions } from "./utilities"
|
||||
|
||||
describe("test the execute script action", () => {
|
||||
let config = setup.getConfig()
|
||||
let config = getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
afterAll(setup.afterAll)
|
||||
afterAll(_afterAll)
|
||||
|
||||
it("should be able to execute a script", async () => {
|
||||
const res = await setup.runStep(setup.actions.EXECUTE_SCRIPT.stepId, {
|
||||
const res = await runStep(config, actions.EXECUTE_SCRIPT.stepId, {
|
||||
code: "return 1 + 1",
|
||||
})
|
||||
expect(res.value).toEqual(2)
|
||||
|
@ -17,7 +17,7 @@ describe("test the execute script action", () => {
|
|||
})
|
||||
|
||||
it("should handle a null value", async () => {
|
||||
const res = await setup.runStep(setup.actions.EXECUTE_SCRIPT.stepId, {
|
||||
const res = await runStep(config, actions.EXECUTE_SCRIPT.stepId, {
|
||||
code: null,
|
||||
})
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
|
@ -25,8 +25,9 @@ describe("test the execute script action", () => {
|
|||
})
|
||||
|
||||
it("should be able to get a value from context", async () => {
|
||||
const res = await setup.runStep(
|
||||
setup.actions.EXECUTE_SCRIPT.stepId,
|
||||
const res = await runStep(
|
||||
config,
|
||||
actions.EXECUTE_SCRIPT.stepId,
|
||||
{
|
||||
code: "return steps.map(d => d.value)",
|
||||
},
|
||||
|
@ -40,7 +41,7 @@ describe("test the execute script action", () => {
|
|||
})
|
||||
|
||||
it("should be able to handle an error gracefully", async () => {
|
||||
const res = await setup.runStep(setup.actions.EXECUTE_SCRIPT.stepId, {
|
||||
const res = await runStep(config, actions.EXECUTE_SCRIPT.stepId, {
|
||||
code: "return something.map(x => x.name)",
|
||||
})
|
||||
expect(res.response).toEqual("ReferenceError: something is not defined")
|
|
@ -2,13 +2,19 @@ import * as setup from "./utilities"
|
|||
import { FilterConditions } from "../steps/filter"
|
||||
|
||||
describe("test the filter logic", () => {
|
||||
const config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
async function checkFilter(
|
||||
field: any,
|
||||
condition: string,
|
||||
value: any,
|
||||
pass = true
|
||||
) {
|
||||
let res = await setup.runStep(setup.actions.FILTER.stepId, {
|
||||
let res = await setup.runStep(config, setup.actions.FILTER.stepId, {
|
||||
field,
|
||||
condition,
|
||||
value,
|
||||
|
|
|
@ -16,7 +16,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
it("should be able to run the action", async () => {
|
||||
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
||||
const res = await runStep(actions.integromat.stepId, {
|
||||
const res = await runStep(config, actions.integromat.stepId, {
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
expect(res.response.foo).toEqual("bar")
|
||||
|
@ -38,7 +38,7 @@ describe("test the outgoing webhook action", () => {
|
|||
.post("/", payload)
|
||||
.reply(200, { foo: "bar" })
|
||||
|
||||
const res = await runStep(actions.integromat.stepId, {
|
||||
const res = await runStep(config, actions.integromat.stepId, {
|
||||
body: { value: JSON.stringify(payload) },
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
|
@ -47,7 +47,7 @@ describe("test the outgoing webhook action", () => {
|
|||
})
|
||||
|
||||
it("should return a 400 if the JSON payload string is malformed", async () => {
|
||||
const res = await runStep(actions.integromat.stepId, {
|
||||
const res = await runStep(config, actions.integromat.stepId, {
|
||||
body: { value: "{ invalid json }" },
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
|
|
|
@ -16,7 +16,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
it("should be able to run the action and default to 'get'", async () => {
|
||||
nock("http://www.example.com/").get("/").reply(200, { foo: "bar" })
|
||||
const res = await runStep(actions.n8n.stepId, {
|
||||
const res = await runStep(config, actions.n8n.stepId, {
|
||||
url: "http://www.example.com",
|
||||
body: {
|
||||
test: "IGNORE_ME",
|
||||
|
@ -30,7 +30,7 @@ describe("test the outgoing webhook action", () => {
|
|||
nock("http://www.example.com/")
|
||||
.post("/", { name: "Adam", age: 9 })
|
||||
.reply(200)
|
||||
const res = await runStep(actions.n8n.stepId, {
|
||||
const res = await runStep(config, actions.n8n.stepId, {
|
||||
body: {
|
||||
value: JSON.stringify({ name: "Adam", age: 9 }),
|
||||
},
|
||||
|
@ -42,7 +42,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
it("should return a 400 if the JSON payload string is malformed", async () => {
|
||||
const payload = `{ value1 1 }`
|
||||
const res = await runStep(actions.n8n.stepId, {
|
||||
const res = await runStep(config, actions.n8n.stepId, {
|
||||
value1: "ONE",
|
||||
body: {
|
||||
value: payload,
|
||||
|
@ -59,7 +59,7 @@ describe("test the outgoing webhook action", () => {
|
|||
nock("http://www.example.com/")
|
||||
.head("/", body => body === "")
|
||||
.reply(200)
|
||||
const res = await runStep(actions.n8n.stepId, {
|
||||
const res = await runStep(config, actions.n8n.stepId, {
|
||||
url: "http://www.example.com",
|
||||
method: "HEAD",
|
||||
body: {
|
||||
|
|
|
@ -62,13 +62,13 @@ describe("test the openai action", () => {
|
|||
afterAll(_afterAll)
|
||||
|
||||
it("should be able to receive a response from ChatGPT given a prompt", async () => {
|
||||
const res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
|
||||
const res = await runStep(config, "OPENAI", { prompt: OPENAI_PROMPT })
|
||||
expect(res.response).toEqual("This is a test")
|
||||
expect(res.success).toBeTruthy()
|
||||
})
|
||||
|
||||
it("should present the correct error message when a prompt is not provided", async () => {
|
||||
const res = await runStep("OPENAI", { prompt: null })
|
||||
const res = await runStep(config, "OPENAI", { prompt: null })
|
||||
expect(res.response).toEqual(
|
||||
"Budibase OpenAI Automation Failed: No prompt supplied"
|
||||
)
|
||||
|
@ -91,7 +91,7 @@ describe("test the openai action", () => {
|
|||
} as any)
|
||||
)
|
||||
|
||||
const res = await runStep("OPENAI", {
|
||||
const res = await runStep(config, "OPENAI", {
|
||||
prompt: OPENAI_PROMPT,
|
||||
})
|
||||
|
||||
|
@ -106,7 +106,7 @@ describe("test the openai action", () => {
|
|||
jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true)
|
||||
|
||||
const prompt = "What is the meaning of life?"
|
||||
await runStep("OPENAI", {
|
||||
await runStep(config, "OPENAI", {
|
||||
model: "gpt-4o-mini",
|
||||
prompt,
|
||||
})
|
||||
|
|
|
@ -18,7 +18,7 @@ describe("test the outgoing webhook action", () => {
|
|||
nock("http://www.example.com")
|
||||
.post("/", { a: 1 })
|
||||
.reply(200, { foo: "bar" })
|
||||
const res = await runStep(actions.OUTGOING_WEBHOOK.stepId, {
|
||||
const res = await runStep(config, actions.OUTGOING_WEBHOOK.stepId, {
|
||||
requestMethod: "POST",
|
||||
url: "www.example.com",
|
||||
requestBody: JSON.stringify({ a: 1 }),
|
||||
|
@ -28,7 +28,7 @@ describe("test the outgoing webhook action", () => {
|
|||
})
|
||||
|
||||
it("should return an error if something goes wrong in fetch", async () => {
|
||||
const res = await runStep(actions.OUTGOING_WEBHOOK.stepId, {
|
||||
const res = await runStep(config, actions.OUTGOING_WEBHOOK.stepId, {
|
||||
requestMethod: "GET",
|
||||
url: "www.invalid.com",
|
||||
})
|
||||
|
|
|
@ -33,7 +33,11 @@ describe("Test a query step automation", () => {
|
|||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
||||
const res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.QUERY_ROWS.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toBe(true)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(2)
|
||||
|
@ -48,7 +52,11 @@ describe("Test a query step automation", () => {
|
|||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
||||
const res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.QUERY_ROWS.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toBe(true)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(2)
|
||||
|
@ -65,7 +73,11 @@ describe("Test a query step automation", () => {
|
|||
limit: 10,
|
||||
onEmptyFilter: "none",
|
||||
}
|
||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
||||
const res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.QUERY_ROWS.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toBe(false)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(0)
|
||||
|
@ -85,7 +97,11 @@ describe("Test a query step automation", () => {
|
|||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
||||
const res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.QUERY_ROWS.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toBe(false)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(0)
|
||||
|
@ -100,7 +116,11 @@ describe("Test a query step automation", () => {
|
|||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
||||
const res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.QUERY_ROWS.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toBe(true)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(2)
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
import * as automation from "../../index"
|
||||
import * as setup from "../utilities"
|
||||
import { LoopStepType, FieldType, Table } from "@budibase/types"
|
||||
import { LoopStepType, FieldType, Table, Datasource } from "@budibase/types"
|
||||
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
|
||||
import { DatabaseName } from "../../../integrations/tests/utils"
|
||||
import {
|
||||
DatabaseName,
|
||||
datasourceDescribe,
|
||||
} from "../../../integrations/tests/utils"
|
||||
import { FilterConditions } from "../../../automations/steps/filter"
|
||||
import { Knex } from "knex"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
describe("Automation Scenarios", () => {
|
||||
let config = setup.getConfig()
|
||||
|
@ -107,96 +112,6 @@ describe("Automation Scenarios", () => {
|
|||
expect(results.steps[2].outputs.rows).toHaveLength(1)
|
||||
})
|
||||
|
||||
it("should query an external database for some data then insert than into an internal table", async () => {
|
||||
const { datasource, client } = await setup.setupTestDatasource(
|
||||
config,
|
||||
DatabaseName.MYSQL
|
||||
)
|
||||
|
||||
const newTable = await config.createTable({
|
||||
name: "table",
|
||||
type: "table",
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const tableName = await setup.createTestTable(client, {
|
||||
name: { type: "string" },
|
||||
age: { type: "number" },
|
||||
})
|
||||
|
||||
const rows = [
|
||||
{ name: "Joe", age: 20 },
|
||||
{ name: "Bob", age: 25 },
|
||||
{ name: "Paul", age: 30 },
|
||||
]
|
||||
|
||||
await setup.insertTestData(client, tableName, rows)
|
||||
|
||||
const query = await setup.saveTestQuery(
|
||||
config,
|
||||
client,
|
||||
tableName,
|
||||
datasource
|
||||
)
|
||||
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test external query and save",
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.appAction({
|
||||
fields: {},
|
||||
})
|
||||
.executeQuery({
|
||||
query: {
|
||||
queryId: query._id!,
|
||||
},
|
||||
})
|
||||
.loop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: "{{ steps.1.response }}",
|
||||
})
|
||||
.createRow({
|
||||
row: {
|
||||
name: "{{ loop.currentItem.name }}",
|
||||
age: "{{ loop.currentItem.age }}",
|
||||
tableId: newTable._id!,
|
||||
},
|
||||
})
|
||||
.queryRows({
|
||||
tableId: newTable._id!,
|
||||
})
|
||||
.run()
|
||||
|
||||
expect(results.steps).toHaveLength(3)
|
||||
|
||||
expect(results.steps[1].outputs.iterations).toBe(3)
|
||||
expect(results.steps[1].outputs.items).toHaveLength(3)
|
||||
|
||||
expect(results.steps[2].outputs.rows).toHaveLength(3)
|
||||
|
||||
rows.forEach(expectedRow => {
|
||||
expect(results.steps[2].outputs.rows).toEqual(
|
||||
expect.arrayContaining([expect.objectContaining(expectedRow)])
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("should trigger an automation which creates and then updates a row", async () => {
|
||||
const table = await config.createTable({
|
||||
name: "TestTable",
|
||||
|
@ -517,3 +432,104 @@ describe("Automation Scenarios", () => {
|
|||
expect(results.steps[0].outputs.message).toContain("example.com")
|
||||
})
|
||||
})
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "", only: [DatabaseName.MYSQL] },
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
|
||||
it("should query an external database for some data then insert than into an internal table", async () => {
|
||||
const newTable = await config.createTable({
|
||||
name: "table",
|
||||
type: "table",
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.string("name")
|
||||
table.integer("age")
|
||||
})
|
||||
|
||||
const rows = [
|
||||
{ name: "Joe", age: 20 },
|
||||
{ name: "Bob", age: 25 },
|
||||
{ name: "Paul", age: 30 },
|
||||
]
|
||||
|
||||
await client(tableName).insert(rows)
|
||||
|
||||
const query = await setup.saveTestQuery(
|
||||
config,
|
||||
client,
|
||||
tableName,
|
||||
datasource
|
||||
)
|
||||
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test external query and save",
|
||||
config,
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.appAction({
|
||||
fields: {},
|
||||
})
|
||||
.executeQuery({
|
||||
query: {
|
||||
queryId: query._id!,
|
||||
},
|
||||
})
|
||||
.loop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: "{{ steps.1.response }}",
|
||||
})
|
||||
.createRow({
|
||||
row: {
|
||||
name: "{{ loop.currentItem.name }}",
|
||||
age: "{{ loop.currentItem.age }}",
|
||||
tableId: newTable._id!,
|
||||
},
|
||||
})
|
||||
.queryRows({
|
||||
tableId: newTable._id!,
|
||||
})
|
||||
.run()
|
||||
|
||||
expect(results.steps).toHaveLength(3)
|
||||
|
||||
expect(results.steps[1].outputs.iterations).toBe(3)
|
||||
expect(results.steps[1].outputs.items).toHaveLength(3)
|
||||
|
||||
expect(results.steps[2].outputs.rows).toHaveLength(3)
|
||||
|
||||
rows.forEach(expectedRow => {
|
||||
expect(results.steps[2].outputs.rows).toEqual(
|
||||
expect.arrayContaining([expect.objectContaining(expectedRow)])
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
|
@ -18,7 +18,7 @@ function generateResponse(to: string, from: string) {
|
|||
}
|
||||
}
|
||||
|
||||
const setup = require("./utilities")
|
||||
import * as setup from "./utilities"
|
||||
|
||||
describe("test the outgoing webhook action", () => {
|
||||
let inputs
|
||||
|
@ -58,6 +58,7 @@ describe("test the outgoing webhook action", () => {
|
|||
}
|
||||
let resp = generateResponse(inputs.to, inputs.from)
|
||||
const res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.SEND_EMAIL_SMTP.stepId,
|
||||
inputs
|
||||
)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
const setup = require("./utilities")
|
||||
import { getConfig, afterAll as _afterAll, runStep, actions } from "./utilities"
|
||||
|
||||
describe("test the server log action", () => {
|
||||
let config = setup.getConfig()
|
||||
let inputs
|
||||
let config = getConfig()
|
||||
let inputs: any
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
@ -10,10 +10,10 @@ describe("test the server log action", () => {
|
|||
text: "log message",
|
||||
}
|
||||
})
|
||||
afterAll(setup.afterAll)
|
||||
afterAll(_afterAll)
|
||||
|
||||
it("should be able to log the text", async () => {
|
||||
let res = await setup.runStep(setup.actions.SERVER_LOG.stepId, inputs)
|
||||
let res = await runStep(config, actions.SERVER_LOG.stepId, inputs)
|
||||
expect(res.message).toEqual(`App ${config.getAppId()} - ${inputs.text}`)
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
|
@ -29,6 +29,7 @@ describe("Test triggering an automation from another automation", () => {
|
|||
},
|
||||
}
|
||||
const res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.TRIGGER_AUTOMATION_RUN.stepId,
|
||||
inputs
|
||||
)
|
||||
|
@ -44,6 +45,7 @@ describe("Test triggering an automation from another automation", () => {
|
|||
},
|
||||
}
|
||||
const res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.TRIGGER_AUTOMATION_RUN.stepId,
|
||||
inputs
|
||||
)
|
||||
|
|
|
@ -34,7 +34,11 @@ describe("test the update row action", () => {
|
|||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to run the action", async () => {
|
||||
const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, inputs)
|
||||
const res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.UPDATE_ROW.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toEqual(true)
|
||||
const updatedRow = await config.api.row.get(table._id!, res.id)
|
||||
expect(updatedRow.name).toEqual("Updated name")
|
||||
|
@ -42,12 +46,12 @@ describe("test the update row action", () => {
|
|||
})
|
||||
|
||||
it("should check invalid inputs return an error", async () => {
|
||||
const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {})
|
||||
const res = await setup.runStep(config, setup.actions.UPDATE_ROW.stepId, {})
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should return an error when table doesn't exist", async () => {
|
||||
const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
|
||||
const res = await setup.runStep(config, setup.actions.UPDATE_ROW.stepId, {
|
||||
row: { _id: "invalid" },
|
||||
rowId: "invalid",
|
||||
})
|
||||
|
@ -90,16 +94,20 @@ describe("test the update row action", () => {
|
|||
expect(getResp.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
|
||||
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
|
||||
rowId: row._id,
|
||||
row: {
|
||||
_id: row._id,
|
||||
_rev: row._rev,
|
||||
tableId: row.tableId,
|
||||
user1: [user2._id],
|
||||
user2: "",
|
||||
},
|
||||
})
|
||||
let stepResp = await setup.runStep(
|
||||
config,
|
||||
setup.actions.UPDATE_ROW.stepId,
|
||||
{
|
||||
rowId: row._id,
|
||||
row: {
|
||||
_id: row._id,
|
||||
_rev: row._rev,
|
||||
tableId: row.tableId,
|
||||
user1: [user2._id],
|
||||
user2: "",
|
||||
},
|
||||
}
|
||||
)
|
||||
expect(stepResp.success).toEqual(true)
|
||||
|
||||
getResp = await config.api.row.get(table._id!, row._id!)
|
||||
|
@ -143,23 +151,27 @@ describe("test the update row action", () => {
|
|||
expect(getResp.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
|
||||
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
|
||||
rowId: row._id,
|
||||
row: {
|
||||
_id: row._id,
|
||||
_rev: row._rev,
|
||||
tableId: row.tableId,
|
||||
user1: [user2._id],
|
||||
user2: "",
|
||||
},
|
||||
meta: {
|
||||
fields: {
|
||||
user2: {
|
||||
clearRelationships: true,
|
||||
let stepResp = await setup.runStep(
|
||||
config,
|
||||
setup.actions.UPDATE_ROW.stepId,
|
||||
{
|
||||
rowId: row._id,
|
||||
row: {
|
||||
_id: row._id,
|
||||
_rev: row._rev,
|
||||
tableId: row.tableId,
|
||||
user1: [user2._id],
|
||||
user2: "",
|
||||
},
|
||||
meta: {
|
||||
fields: {
|
||||
user2: {
|
||||
clearRelationships: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
)
|
||||
expect(stepResp.success).toEqual(true)
|
||||
|
||||
getResp = await config.api.row.get(table._id!, row._id!)
|
||||
|
|
|
@ -1,22 +1,16 @@
|
|||
import TestConfig from "../../../tests/utilities/TestConfiguration"
|
||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { BUILTIN_ACTION_DEFINITIONS, getAction } from "../../actions"
|
||||
import emitter from "../../../events/index"
|
||||
import env from "../../../environment"
|
||||
import { AutomationActionStepId, Datasource } from "@budibase/types"
|
||||
import { Knex } from "knex"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import {
|
||||
getDatasource,
|
||||
knexClient,
|
||||
DatabaseName,
|
||||
} from "../../../integrations/tests/utils"
|
||||
|
||||
let config: TestConfig
|
||||
let config: TestConfiguration
|
||||
|
||||
export function getConfig(): TestConfig {
|
||||
export function getConfig(): TestConfiguration {
|
||||
if (!config) {
|
||||
config = new TestConfig(true)
|
||||
config = new TestConfiguration(true)
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
@ -39,7 +33,12 @@ export async function runInProd(fn: any) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function runStep(stepId: string, inputs: any, stepContext?: any) {
|
||||
export async function runStep(
|
||||
config: TestConfiguration,
|
||||
stepId: string,
|
||||
inputs: any,
|
||||
stepContext?: any
|
||||
) {
|
||||
async function run() {
|
||||
let step = await getAction(stepId as AutomationActionStepId)
|
||||
expect(step).toBeDefined()
|
||||
|
@ -55,7 +54,7 @@ export async function runStep(stepId: string, inputs: any, stepContext?: any) {
|
|||
emitter,
|
||||
})
|
||||
}
|
||||
if (config?.appId) {
|
||||
if (config.appId) {
|
||||
return context.doInContext(config?.appId, async () => {
|
||||
return run()
|
||||
})
|
||||
|
@ -64,31 +63,8 @@ export async function runStep(stepId: string, inputs: any, stepContext?: any) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function createTestTable(client: Knex, schema: any) {
|
||||
const tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
for (const fieldName in schema) {
|
||||
const field = schema[fieldName]
|
||||
if (field.type === "string") {
|
||||
table.string(fieldName)
|
||||
} else if (field.type === "number") {
|
||||
table.integer(fieldName)
|
||||
}
|
||||
}
|
||||
})
|
||||
return tableName
|
||||
}
|
||||
|
||||
export async function insertTestData(
|
||||
client: Knex,
|
||||
tableName: string,
|
||||
rows: any[]
|
||||
) {
|
||||
await client(tableName).insert(rows)
|
||||
}
|
||||
|
||||
export async function saveTestQuery(
|
||||
config: TestConfig,
|
||||
config: TestConfiguration,
|
||||
client: Knex,
|
||||
tableName: string,
|
||||
datasource: Datasource
|
||||
|
@ -107,15 +83,5 @@ export async function saveTestQuery(
|
|||
})
|
||||
}
|
||||
|
||||
export async function setupTestDatasource(
|
||||
config: TestConfig,
|
||||
dbName: DatabaseName
|
||||
) {
|
||||
const db = await getDatasource(dbName)
|
||||
const datasource = await config.api.datasource.create(db)
|
||||
const client = await knexClient(db)
|
||||
return { datasource, client }
|
||||
}
|
||||
|
||||
export const apiKey = "test"
|
||||
export const actions = BUILTIN_ACTION_DEFINITIONS
|
||||
|
|
|
@ -16,7 +16,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
it("should be able to run the action", async () => {
|
||||
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
||||
const res = await runStep(actions.zapier.stepId, {
|
||||
const res = await runStep(config, actions.zapier.stepId, {
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
expect(res.response.foo).toEqual("bar")
|
||||
|
@ -38,7 +38,7 @@ describe("test the outgoing webhook action", () => {
|
|||
.post("/", { ...payload, platform: "budibase" })
|
||||
.reply(200, { foo: "bar" })
|
||||
|
||||
const res = await runStep(actions.zapier.stepId, {
|
||||
const res = await runStep(config, actions.zapier.stepId, {
|
||||
body: { value: JSON.stringify(payload) },
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
|
@ -47,7 +47,7 @@ describe("test the outgoing webhook action", () => {
|
|||
})
|
||||
|
||||
it("should return a 400 if the JSON payload string is malformed", async () => {
|
||||
const res = await runStep(actions.zapier.stepId, {
|
||||
const res = await runStep(config, actions.zapier.stepId, {
|
||||
body: { value: "{ invalid json }" },
|
||||
url: "http://www.example.com",
|
||||
})
|
||||
|
|
|
@ -14,11 +14,10 @@ import {
|
|||
coreOutputProcessing,
|
||||
processFormulas,
|
||||
} from "../../utilities/rowProcessor"
|
||||
import { context, features } from "@budibase/backend-core"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import {
|
||||
ContextUser,
|
||||
EventType,
|
||||
FeatureFlag,
|
||||
FieldType,
|
||||
LinkDocumentValue,
|
||||
Row,
|
||||
|
@ -251,19 +250,13 @@ export async function squashLinks<T = Row[] | Row>(
|
|||
source: Table | ViewV2,
|
||||
enriched: T
|
||||
): Promise<T> {
|
||||
const allowRelationshipSchemas = await features.flags.isEnabled(
|
||||
FeatureFlag.ENRICHED_RELATIONSHIPS
|
||||
)
|
||||
|
||||
let viewSchema: ViewV2Schema = {}
|
||||
if (sdk.views.isView(source)) {
|
||||
if (helpers.views.isCalculationView(source)) {
|
||||
return enriched
|
||||
}
|
||||
|
||||
if (allowRelationshipSchemas) {
|
||||
viewSchema = source.schema || {}
|
||||
}
|
||||
viewSchema = source.schema || {}
|
||||
}
|
||||
|
||||
let table: Table
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
import * as setup from "../api/routes/tests/utilities"
|
||||
import { Datasource, FieldType } from "@budibase/types"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
knexClient,
|
||||
} from "../integrations/tests/utils"
|
||||
import { DatabaseName, datasourceDescribe } from "../integrations/tests/utils"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { Knex } from "knex"
|
||||
|
||||
|
@ -15,31 +10,24 @@ function uniqueTableName(length?: number): string {
|
|||
.substring(0, length || 10)
|
||||
}
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
describe("mysql integrations", () => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
const rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
client = await knexClient(rawDatasource)
|
||||
})
|
||||
|
||||
afterAll(config.end)
|
||||
|
||||
describe("Integration compatibility with mysql search_path", () => {
|
||||
let datasource: Datasource
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "Integration compatibility with mysql search_path",
|
||||
only: [DatabaseName.MYSQL],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let rawDatasource: Datasource
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
const database = generator.guid()
|
||||
const database2 = generator.guid()
|
||||
|
||||
beforeAll(async () => {
|
||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||
client = await knexClient(rawDatasource)
|
||||
const ds = await dsProvider()
|
||||
rawDatasource = ds.rawDatasource!
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
|
||||
await client.raw(`CREATE DATABASE \`${database}\`;`)
|
||||
await client.raw(`CREATE DATABASE \`${database2}\`;`)
|
||||
|
@ -87,11 +75,25 @@ describe("mysql integrations", () => {
|
|||
const schema = res.datasource.entities![repeated_table_name].schema
|
||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "POST /api/datasources/:datasourceId/schema",
|
||||
only: [DatabaseName.MYSQL],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = uniqueTableName()
|
||||
})
|
||||
|
@ -122,5 +124,5 @@ describe("mysql integrations", () => {
|
|||
expect(table).toBeDefined()
|
||||
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,105 +1,230 @@
|
|||
import * as setup from "../api/routes/tests/utilities"
|
||||
import { Datasource, FieldType, Table } from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
datasourceDescribe,
|
||||
knexClient,
|
||||
} from "../integrations/tests/utils"
|
||||
import { Knex } from "knex"
|
||||
|
||||
const config = setup.getConfig()!
|
||||
datasourceDescribe(
|
||||
{ name: "postgres integrations", only: [DatabaseName.POSTGRES] },
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
describe("postgres integrations", () => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
client = await knexClient(rawDatasource)
|
||||
})
|
||||
|
||||
afterAll(config.end)
|
||||
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTableIfExists(tableName)
|
||||
})
|
||||
afterAll(config.end)
|
||||
|
||||
it("recognises when a table has no primary key", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id", { primaryKey: false })
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTableIfExists(tableName)
|
||||
})
|
||||
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table must have a primary key.",
|
||||
})
|
||||
})
|
||||
it("recognises when a table has no primary key", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id", { primaryKey: false })
|
||||
})
|
||||
|
||||
it("recognises when a table is using a reserved column name", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("_id").primary()
|
||||
})
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table contains invalid columns.",
|
||||
})
|
||||
})
|
||||
|
||||
it("recognises enum columns as options", async () => {
|
||||
const tableName = `orders_${generator
|
||||
.guid()
|
||||
.replaceAll("-", "")
|
||||
.substring(0, 6)}`
|
||||
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("order_id").primary()
|
||||
table.string("customer_name").notNullable()
|
||||
table.enum("status", ["pending", "processing", "shipped"], {
|
||||
useNative: true,
|
||||
enumName: `${tableName}_status`,
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table must have a primary key.",
|
||||
})
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
it("recognises when a table is using a reserved column name", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("_id").primary()
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table contains invalid columns.",
|
||||
})
|
||||
})
|
||||
|
||||
const table = response.datasource.entities?.[tableName]
|
||||
it("recognises enum columns as options", async () => {
|
||||
const tableName = `orders_${generator
|
||||
.guid()
|
||||
.replaceAll("-", "")
|
||||
.substring(0, 6)}`
|
||||
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("order_id").primary()
|
||||
table.string("customer_name").notNullable()
|
||||
table.enum("status", ["pending", "processing", "shipped"], {
|
||||
useNative: true,
|
||||
enumName: `${tableName}_status`,
|
||||
})
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
const table = response.datasource.entities?.[tableName]
|
||||
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Integration compatibility with postgres search_path", () => {
|
||||
describe("check custom column types", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("binaryTable", table => {
|
||||
table.binary("id").primary()
|
||||
table.string("column1")
|
||||
table.integer("column2")
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle binary columns", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
expect(response.datasource.entities).toBeDefined()
|
||||
const table = response.datasource.entities?.["binaryTable"]
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.id.externalType).toBe("bytea")
|
||||
const row = await config.api.row.save(table?._id!, {
|
||||
id: "1111",
|
||||
column1: "hello",
|
||||
column2: 222,
|
||||
})
|
||||
expect(row._id).toBeDefined()
|
||||
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
|
||||
expect(JSON.parse(decoded)[0]).toBe("1111")
|
||||
})
|
||||
})
|
||||
|
||||
describe("check fetching null/not null table", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("nullableTable", table => {
|
||||
table.increments("order_id").primary()
|
||||
table.integer("order_number").notNullable()
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to change the table to allow nullable and refetch this", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entities = response.datasource.entities
|
||||
expect(entities).toBeDefined()
|
||||
const nullableTable = entities?.["nullableTable"]
|
||||
expect(nullableTable).toBeDefined()
|
||||
expect(
|
||||
nullableTable?.schema["order_number"].constraints?.presence
|
||||
).toEqual(true)
|
||||
|
||||
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
|
||||
// is aware of - therefore we can try to fetch and make sure BB updates correctly
|
||||
await client.schema.alterTable("nullableTable", table => {
|
||||
table.setNullable("order_number")
|
||||
})
|
||||
|
||||
const responseAfter = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entitiesAfter = responseAfter.datasource.entities
|
||||
expect(entitiesAfter).toBeDefined()
|
||||
const nullableTableAfter = entitiesAfter?.["nullableTable"]
|
||||
expect(nullableTableAfter).toBeDefined()
|
||||
expect(
|
||||
nullableTableAfter?.schema["order_number"].constraints?.presence
|
||||
).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("money field 💰", () => {
|
||||
const tableName = "moneytable"
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await client.raw(`
|
||||
CREATE TABLE ${tableName} (
|
||||
id serial PRIMARY KEY,
|
||||
price money
|
||||
)
|
||||
`)
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
table = response.datasource.entities![tableName]
|
||||
})
|
||||
|
||||
it("should be able to import a money field", async () => {
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
|
||||
})
|
||||
|
||||
it("should be able to search a money field", async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [{ price: 200 }, { price: 300 }],
|
||||
})
|
||||
|
||||
const { rows } = await config.api.row.search(table._id!, {
|
||||
query: {
|
||||
equal: {
|
||||
price: 200,
|
||||
},
|
||||
},
|
||||
})
|
||||
expect(rows).toHaveLength(1)
|
||||
expect(rows[0].price).toBe("200.00")
|
||||
})
|
||||
|
||||
it("should be able to update a money field", async () => {
|
||||
let row = await config.api.row.save(table._id!, { price: 200 })
|
||||
expect(row.price).toBe("200.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: 300 })
|
||||
expect(row.price).toBe("300.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: "400.00" })
|
||||
expect(row.price).toBe("400.00")
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "Integration compatibility with postgres search_path",
|
||||
only: [DatabaseName.POSTGRES],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
let schema1: string
|
||||
let schema2: string
|
||||
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
const rawDatasource = ds.rawDatasource!
|
||||
|
||||
schema1 = generator.guid().replaceAll("-", "")
|
||||
schema2 = generator.guid().replaceAll("-", "")
|
||||
|
||||
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||
client = await knexClient(rawDatasource)
|
||||
|
||||
await client.schema.createSchema(schema1)
|
||||
|
@ -161,122 +286,5 @@ describe("postgres integrations", () => {
|
|||
const schema = response.datasource.entities?.[repeated_table_name].schema
|
||||
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
})
|
||||
|
||||
describe("check custom column types", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("binaryTable", table => {
|
||||
table.binary("id").primary()
|
||||
table.string("column1")
|
||||
table.integer("column2")
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle binary columns", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
expect(response.datasource.entities).toBeDefined()
|
||||
const table = response.datasource.entities?.["binaryTable"]
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.id.externalType).toBe("bytea")
|
||||
const row = await config.api.row.save(table?._id!, {
|
||||
id: "1111",
|
||||
column1: "hello",
|
||||
column2: 222,
|
||||
})
|
||||
expect(row._id).toBeDefined()
|
||||
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
|
||||
expect(JSON.parse(decoded)[0]).toBe("1111")
|
||||
})
|
||||
})
|
||||
|
||||
describe("check fetching null/not null table", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("nullableTable", table => {
|
||||
table.increments("order_id").primary()
|
||||
table.integer("order_number").notNullable()
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to change the table to allow nullable and refetch this", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entities = response.datasource.entities
|
||||
expect(entities).toBeDefined()
|
||||
const nullableTable = entities?.["nullableTable"]
|
||||
expect(nullableTable).toBeDefined()
|
||||
expect(
|
||||
nullableTable?.schema["order_number"].constraints?.presence
|
||||
).toEqual(true)
|
||||
|
||||
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
|
||||
// is aware of - therefore we can try to fetch and make sure BB updates correctly
|
||||
await client.schema.alterTable("nullableTable", table => {
|
||||
table.setNullable("order_number")
|
||||
})
|
||||
|
||||
const responseAfter = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entitiesAfter = responseAfter.datasource.entities
|
||||
expect(entitiesAfter).toBeDefined()
|
||||
const nullableTableAfter = entitiesAfter?.["nullableTable"]
|
||||
expect(nullableTableAfter).toBeDefined()
|
||||
expect(
|
||||
nullableTableAfter?.schema["order_number"].constraints?.presence
|
||||
).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("money field 💰", () => {
|
||||
const tableName = "moneytable"
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await client.raw(`
|
||||
CREATE TABLE ${tableName} (
|
||||
id serial PRIMARY KEY,
|
||||
price money
|
||||
)
|
||||
`)
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
table = response.datasource.entities![tableName]
|
||||
})
|
||||
|
||||
it("should be able to import a money field", async () => {
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
|
||||
})
|
||||
|
||||
it("should be able to search a money field", async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [{ price: 200 }, { price: 300 }],
|
||||
})
|
||||
|
||||
const { rows } = await config.api.row.search(table._id!, {
|
||||
query: {
|
||||
equal: {
|
||||
price: 200,
|
||||
},
|
||||
},
|
||||
})
|
||||
expect(rows).toHaveLength(1)
|
||||
expect(rows[0].price).toBe("200.00")
|
||||
})
|
||||
|
||||
it("should be able to update a money field", async () => {
|
||||
let row = await config.api.row.save(table._id!, { price: 200 })
|
||||
expect(row.price).toBe("200.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: 300 })
|
||||
expect(row.price).toBe("300.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: "400.00" })
|
||||
expect(row.price).toBe("400.00")
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
|
@ -120,7 +120,7 @@ export async function getIntegration(integration: SourceName) {
|
|||
}
|
||||
}
|
||||
}
|
||||
throw new Error("No datasource implementation found.")
|
||||
throw new Error(`No datasource implementation found called: "${integration}"`)
|
||||
}
|
||||
|
||||
export default {
|
||||
|
|
|
@ -7,8 +7,10 @@ import * as mssql from "./mssql"
|
|||
import * as mariadb from "./mariadb"
|
||||
import * as oracle from "./oracle"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { Knex } from "knex"
|
||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||
|
||||
export type DatasourceProvider = () => Promise<Datasource>
|
||||
export type DatasourceProvider = () => Promise<Datasource | undefined>
|
||||
|
||||
export const { startContainer } = testContainerUtils
|
||||
|
||||
|
@ -19,6 +21,7 @@ export enum DatabaseName {
|
|||
SQL_SERVER = "mssql",
|
||||
MARIADB = "mariadb",
|
||||
ORACLE = "oracle",
|
||||
SQS = "sqs",
|
||||
}
|
||||
|
||||
const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||
|
@ -28,30 +31,143 @@ const providers: Record<DatabaseName, DatasourceProvider> = {
|
|||
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
||||
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
||||
[DatabaseName.ORACLE]: oracle.getDatasource,
|
||||
[DatabaseName.SQS]: async () => undefined,
|
||||
}
|
||||
|
||||
export function getDatasourceProviders(
|
||||
...sourceNames: DatabaseName[]
|
||||
): Promise<Datasource>[] {
|
||||
return sourceNames.map(sourceName => providers[sourceName]())
|
||||
export interface DatasourceDescribeOpts {
|
||||
name: string
|
||||
only?: DatabaseName[]
|
||||
exclude?: DatabaseName[]
|
||||
}
|
||||
|
||||
export function getDatasourceProvider(
|
||||
export interface DatasourceDescribeReturnPromise {
|
||||
rawDatasource: Datasource | undefined
|
||||
datasource: Datasource | undefined
|
||||
client: Knex | undefined
|
||||
}
|
||||
|
||||
export interface DatasourceDescribeReturn {
|
||||
name: DatabaseName
|
||||
config: TestConfiguration
|
||||
dsProvider: () => Promise<DatasourceDescribeReturnPromise>
|
||||
isInternal: boolean
|
||||
isExternal: boolean
|
||||
isSql: boolean
|
||||
isMySQL: boolean
|
||||
isPostgres: boolean
|
||||
isMongodb: boolean
|
||||
isMSSQL: boolean
|
||||
isOracle: boolean
|
||||
}
|
||||
|
||||
async function createDatasources(
|
||||
config: TestConfiguration,
|
||||
name: DatabaseName
|
||||
): Promise<DatasourceDescribeReturnPromise> {
|
||||
await config.init()
|
||||
|
||||
const rawDatasource = await getDatasource(name)
|
||||
|
||||
let datasource: Datasource | undefined
|
||||
if (rawDatasource) {
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
}
|
||||
|
||||
let client: Knex | undefined
|
||||
if (rawDatasource) {
|
||||
try {
|
||||
client = await knexClient(rawDatasource)
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
rawDatasource,
|
||||
datasource,
|
||||
client,
|
||||
}
|
||||
}
|
||||
|
||||
// Jest doesn't allow test files to exist with no tests in them. When we run
|
||||
// these tests in CI, we break them out by data source, and there are a bunch of
|
||||
// test files that only run for a subset of data sources, and for the rest of
|
||||
// them they will be empty test files. Defining a dummy test makes it so that
|
||||
// Jest doesn't error in this situation.
|
||||
function createDummyTest() {
|
||||
describe("no tests", () => {
|
||||
it("no tests", () => {
|
||||
// no tests
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export function datasourceDescribe(
|
||||
opts: DatasourceDescribeOpts,
|
||||
cb: (args: DatasourceDescribeReturn) => void
|
||||
) {
|
||||
if (process.env.DATASOURCE === "none") {
|
||||
createDummyTest()
|
||||
return
|
||||
}
|
||||
|
||||
const { name, only, exclude } = opts
|
||||
|
||||
if (only && exclude) {
|
||||
throw new Error("you can only supply one of 'only' or 'exclude'")
|
||||
}
|
||||
|
||||
let databases = Object.values(DatabaseName)
|
||||
if (only) {
|
||||
databases = only
|
||||
} else if (exclude) {
|
||||
databases = databases.filter(db => !exclude.includes(db))
|
||||
}
|
||||
|
||||
if (process.env.DATASOURCE) {
|
||||
databases = databases.filter(db => db === process.env.DATASOURCE)
|
||||
}
|
||||
|
||||
if (databases.length === 0) {
|
||||
createDummyTest()
|
||||
return
|
||||
}
|
||||
|
||||
describe.each(databases)(name, name => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
afterAll(() => {
|
||||
config.end()
|
||||
})
|
||||
|
||||
cb({
|
||||
name,
|
||||
config,
|
||||
dsProvider: () => createDatasources(config, name),
|
||||
isInternal: name === DatabaseName.SQS,
|
||||
isExternal: name !== DatabaseName.SQS,
|
||||
isSql: [
|
||||
DatabaseName.MARIADB,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.ORACLE,
|
||||
].includes(name),
|
||||
isMySQL: name === DatabaseName.MYSQL,
|
||||
isPostgres: name === DatabaseName.POSTGRES,
|
||||
isMongodb: name === DatabaseName.MONGODB,
|
||||
isMSSQL: name === DatabaseName.SQL_SERVER,
|
||||
isOracle: name === DatabaseName.ORACLE,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function getDatasource(
|
||||
sourceName: DatabaseName
|
||||
): DatasourceProvider {
|
||||
return providers[sourceName]
|
||||
}
|
||||
|
||||
export function getDatasource(sourceName: DatabaseName): Promise<Datasource> {
|
||||
): Promise<Datasource | undefined> {
|
||||
return providers[sourceName]()
|
||||
}
|
||||
|
||||
export async function getDatasources(
|
||||
...sourceNames: DatabaseName[]
|
||||
): Promise<Datasource[]> {
|
||||
return Promise.all(sourceNames.map(sourceName => providers[sourceName]()))
|
||||
}
|
||||
|
||||
export async function knexClient(ds: Datasource) {
|
||||
switch (ds.source) {
|
||||
case SourceName.POSTGRES: {
|
||||
|
|
|
@ -31,7 +31,7 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
new GenericContainer(MARIADB_IMAGE)
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MariaDBWaitStrategy())
|
||||
.withWaitStrategy(new MariaDBWaitStrategy().withStartupTimeout(20000))
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
`mongosh --eval "db.version()"`
|
||||
).withStartupTimeout(10000)
|
||||
).withStartupTimeout(20000)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||
)
|
||||
).withStartupTimeout(20000)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
new GenericContainer(MYSQL_IMAGE)
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
|
||||
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(20000))
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,11 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
.withEnvironment({
|
||||
ORACLE_PASSWORD: password,
|
||||
})
|
||||
.withWaitStrategy(Wait.forLogMessage("DATABASE IS READY TO USE!"))
|
||||
.withWaitStrategy(
|
||||
Wait.forLogMessage("DATABASE IS READY TO USE!").withStartupTimeout(
|
||||
20000
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"pg_isready -h localhost -p 5432"
|
||||
).withStartupTimeout(10000)
|
||||
).withStartupTimeout(20000)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -6,7 +6,13 @@ import * as api from "./api"
|
|||
import * as automations from "./automations"
|
||||
import { Thread } from "./threads"
|
||||
import * as redis from "./utilities/redis"
|
||||
import { events, logging, middleware, timers } from "@budibase/backend-core"
|
||||
import {
|
||||
events,
|
||||
logging,
|
||||
middleware,
|
||||
timers,
|
||||
env as coreEnv,
|
||||
} from "@budibase/backend-core"
|
||||
import destroyable from "server-destroy"
|
||||
import { userAgent } from "koa-useragent"
|
||||
|
||||
|
@ -37,6 +43,9 @@ export default function createKoaApp() {
|
|||
app.use(middleware.correlation)
|
||||
app.use(middleware.pino)
|
||||
app.use(middleware.ip)
|
||||
if (!coreEnv.DISABLE_CONTENT_SECURITY_POLICY) {
|
||||
app.use(middleware.csp)
|
||||
}
|
||||
app.use(userAgent)
|
||||
|
||||
const server = http.createServer(app.callback())
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
import {
|
||||
EmptyFilterOption,
|
||||
FeatureFlag,
|
||||
LegacyFilter,
|
||||
LogicalOperator,
|
||||
Row,
|
||||
RowSearchParams,
|
||||
SearchFilterKey,
|
||||
SearchFilters,
|
||||
SearchResponse,
|
||||
SortOrder,
|
||||
|
@ -19,7 +16,6 @@ import { ExportRowsParams, ExportRowsResult } from "./search/types"
|
|||
import { dataFilters } from "@budibase/shared-core"
|
||||
import sdk from "../../index"
|
||||
import { checkFilters, searchInputMapping } from "./search/utils"
|
||||
import { db, features } from "@budibase/backend-core"
|
||||
import tracer from "dd-trace"
|
||||
import { getQueryableFields, removeInvalidFilters } from "./queryUtils"
|
||||
import { enrichSearchContext } from "../../../api/controllers/row/utils"
|
||||
|
@ -104,44 +100,14 @@ export async function search(
|
|||
}
|
||||
viewQuery = checkFilters(table, viewQuery)
|
||||
|
||||
const sqsEnabled = await features.flags.isEnabled(FeatureFlag.SQS)
|
||||
const supportsLogicalOperators =
|
||||
isExternalTableID(view.tableId) || sqsEnabled
|
||||
|
||||
if (!supportsLogicalOperators) {
|
||||
// In the unlikely event that a Grouped Filter is in a non-SQS environment
|
||||
// It needs to be ignored entirely
|
||||
let queryFilters: LegacyFilter[] = Array.isArray(view.query)
|
||||
? view.query
|
||||
: []
|
||||
|
||||
const { filters } = dataFilters.splitFiltersArray(queryFilters)
|
||||
|
||||
// Extract existing fields
|
||||
const existingFields = filters.map(filter =>
|
||||
db.removeKeyNumbering(filter.field)
|
||||
)
|
||||
|
||||
// Carry over filters for unused fields
|
||||
Object.keys(options.query).forEach(key => {
|
||||
const operator = key as Exclude<SearchFilterKey, LogicalOperator>
|
||||
Object.keys(options.query[operator] || {}).forEach(field => {
|
||||
if (!existingFields.includes(db.removeKeyNumbering(field))) {
|
||||
viewQuery[operator]![field] = options.query[operator]![field]
|
||||
}
|
||||
})
|
||||
})
|
||||
options.query = viewQuery
|
||||
} else {
|
||||
const conditions = viewQuery ? [viewQuery] : []
|
||||
options.query = {
|
||||
$and: {
|
||||
conditions: [...conditions, options.query],
|
||||
},
|
||||
}
|
||||
if (viewQuery.onEmptyFilter) {
|
||||
options.query.onEmptyFilter = viewQuery.onEmptyFilter
|
||||
}
|
||||
const conditions = viewQuery ? [viewQuery] : []
|
||||
options.query = {
|
||||
$and: {
|
||||
conditions: [...conditions, options.query],
|
||||
},
|
||||
}
|
||||
if (viewQuery.onEmptyFilter) {
|
||||
options.query.onEmptyFilter = viewQuery.onEmptyFilter
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -170,12 +136,9 @@ export async function search(
|
|||
if (isExternalTable) {
|
||||
span?.addTags({ searchType: "external" })
|
||||
result = await external.search(options, source)
|
||||
} else if (await features.flags.isEnabled(FeatureFlag.SQS)) {
|
||||
} else {
|
||||
span?.addTags({ searchType: "sqs" })
|
||||
result = await internal.sqs.search(options, source)
|
||||
} else {
|
||||
span?.addTags({ searchType: "lucene" })
|
||||
result = await internal.lucene.search(options, source)
|
||||
}
|
||||
|
||||
span.addTags({
|
||||
|
|
|
@ -1,3 +1,2 @@
|
|||
export * as sqs from "./sqs"
|
||||
export * as lucene from "./lucene"
|
||||
export * from "./internal"
|
||||
|
|
|
@ -1,79 +0,0 @@
|
|||
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
||||
import { fullSearch, paginatedSearch } from "../utils"
|
||||
import { InternalTables } from "../../../../../db/utils"
|
||||
import {
|
||||
Row,
|
||||
RowSearchParams,
|
||||
SearchResponse,
|
||||
SortType,
|
||||
Table,
|
||||
User,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
|
||||
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
||||
import pick from "lodash/pick"
|
||||
import sdk from "../../../../"
|
||||
|
||||
export async function search(
|
||||
options: RowSearchParams,
|
||||
source: Table | ViewV2
|
||||
): Promise<SearchResponse<Row>> {
|
||||
let table: Table
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
|
||||
const { paginate, query } = options
|
||||
|
||||
const params: RowSearchParams = {
|
||||
tableId: options.tableId,
|
||||
viewId: options.viewId,
|
||||
sort: options.sort,
|
||||
sortOrder: options.sortOrder,
|
||||
sortType: options.sortType,
|
||||
limit: options.limit,
|
||||
bookmark: options.bookmark,
|
||||
version: options.version,
|
||||
disableEscaping: options.disableEscaping,
|
||||
query: {},
|
||||
}
|
||||
|
||||
if (params.sort && !params.sortType) {
|
||||
const schema = table.schema
|
||||
const sortField = schema[params.sort]
|
||||
params.sortType =
|
||||
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
|
||||
}
|
||||
|
||||
let response
|
||||
if (paginate) {
|
||||
response = await paginatedSearch(query, params)
|
||||
} else {
|
||||
response = await fullSearch(query, params)
|
||||
}
|
||||
|
||||
// Enrich search results with relationships
|
||||
if (response.rows && response.rows.length) {
|
||||
// enrich with global users if from users table
|
||||
if (table._id === InternalTables.USER_METADATA) {
|
||||
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
|
||||
}
|
||||
|
||||
const visibleFields =
|
||||
options.fields ||
|
||||
Object.keys(source.schema || {}).filter(
|
||||
key => source.schema?.[key].visible !== false
|
||||
)
|
||||
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
|
||||
response.rows = response.rows.map((r: any) => pick(r, allowedFields))
|
||||
|
||||
response.rows = await outputProcessing(source, response.rows, {
|
||||
squash: true,
|
||||
})
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
|
@ -7,211 +7,184 @@ import {
|
|||
Table,
|
||||
} from "@budibase/types"
|
||||
|
||||
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
||||
import { search } from "../../../../../sdk/app/rows/search"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { features } from "@budibase/backend-core"
|
||||
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
datasourceDescribe,
|
||||
} from "../../../../../integrations/tests/utils"
|
||||
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
||||
|
||||
// These test cases are only for things that cannot be tested through the API
|
||||
// (e.g. limiting searches to returning specific fields). If it's possible to
|
||||
// test through the API, it should be done there instead.
|
||||
describe.each([
|
||||
["lucene", undefined],
|
||||
["sqs", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("search sdk (%s)", (name, dsProvider) => {
|
||||
const isSqs = name === "sqs"
|
||||
const isLucene = name === "lucene"
|
||||
const isInternal = isLucene || isSqs
|
||||
const config = new TestConfiguration()
|
||||
datasourceDescribe(
|
||||
{ name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] },
|
||||
({ config, dsProvider, isInternal }) => {
|
||||
let datasource: Datasource | undefined
|
||||
let table: Table
|
||||
|
||||
let envCleanup: (() => void) | undefined
|
||||
let datasource: Datasource | undefined
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await features.testutils.withFeatureFlags("*", { SQS: isSqs }, () =>
|
||||
config.init()
|
||||
)
|
||||
|
||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
||||
SQS: isSqs,
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource
|
||||
})
|
||||
|
||||
if (dsProvider) {
|
||||
datasource = await config.createDatasource({
|
||||
datasource: await dsProvider,
|
||||
})
|
||||
}
|
||||
})
|
||||
beforeEach(async () => {
|
||||
const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata =
|
||||
isInternal
|
||||
? {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
}
|
||||
: {
|
||||
name: "id",
|
||||
type: FieldType.NUMBER,
|
||||
autocolumn: true,
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata =
|
||||
isInternal
|
||||
? {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
}
|
||||
: {
|
||||
name: "id",
|
||||
type: FieldType.NUMBER,
|
||||
autocolumn: true,
|
||||
}
|
||||
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: idFieldSchema,
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
surname: {
|
||||
name: "surname",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
address: {
|
||||
name: "address",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await config.api.row.save(table._id!, {
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
config.end()
|
||||
if (envCleanup) {
|
||||
envCleanup()
|
||||
}
|
||||
})
|
||||
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const { rows } = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
|
||||
expect(rows).toHaveLength(10)
|
||||
for (const row of rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
it("will decode _id in oneOf query", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: idFieldSchema,
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
surname: {
|
||||
name: "surname",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
address: {
|
||||
name: "address",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
expect(result.rows).toHaveLength(3)
|
||||
expect(result.rows.map(row => row.id)).toEqual(
|
||||
expect.arrayContaining([1, 4, 8])
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("surname")
|
||||
expect(keys).toContain("address")
|
||||
expect(keys).not.toContain("age")
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await config.api.row.save(table._id!, {
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields even if requested", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).not.toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
afterAll(async () => {
|
||||
config.end()
|
||||
})
|
||||
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const { rows } = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
|
||||
expect(rows).toHaveLength(10)
|
||||
for (const row of rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
it("will decode _id in oneOf query", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.rows).toHaveLength(3)
|
||||
expect(result.rows.map(row => row.id)).toEqual(
|
||||
expect.arrayContaining([1, 4, 8])
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("surname")
|
||||
expect(keys).toContain("address")
|
||||
expect(keys).not.toContain("age")
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields even if requested", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).not.toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
it.each([
|
||||
[["id", "name", "age"], 3],
|
||||
[["name", "age"], 10],
|
||||
|
@ -243,4 +216,5 @@ describe.each([
|
|||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { context, features } from "@budibase/backend-core"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { getTableParams } from "../../../db/utils"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
|
@ -12,7 +12,6 @@ import {
|
|||
TableResponse,
|
||||
TableSourceType,
|
||||
TableViewsResponse,
|
||||
FeatureFlag,
|
||||
} from "@budibase/types"
|
||||
import datasources from "../datasources"
|
||||
import sdk from "../../../sdk"
|
||||
|
@ -49,10 +48,7 @@ export async function processTable(table: Table): Promise<Table> {
|
|||
type: "table",
|
||||
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
}
|
||||
const sqsEnabled = await features.flags.isEnabled(FeatureFlag.SQS)
|
||||
if (sqsEnabled) {
|
||||
processed.sql = true
|
||||
sql: true,
|
||||
}
|
||||
return processed
|
||||
}
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
const { isDatasourceTest } = require(".")
|
||||
|
||||
module.exports = paths => {
|
||||
return {
|
||||
filtered: paths
|
||||
.filter(path => isDatasourceTest(path))
|
||||
.map(path => ({ test: path })),
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
const fs = require("fs")
|
||||
|
||||
function isDatasourceTest(path) {
|
||||
const content = fs.readFileSync(path, "utf8")
|
||||
return content.includes("datasourceDescribe(")
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isDatasourceTest,
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
const { isDatasourceTest } = require(".")
|
||||
|
||||
module.exports = paths => {
|
||||
return {
|
||||
filtered: paths
|
||||
.filter(path => !isDatasourceTest(path))
|
||||
.map(path => ({ test: path })),
|
||||
}
|
||||
}
|
|
@ -3,7 +3,6 @@ import { fixAutoColumnSubType, processFormulas } from "./utils"
|
|||
import {
|
||||
cache,
|
||||
context,
|
||||
features,
|
||||
HTTPError,
|
||||
objectStore,
|
||||
utils,
|
||||
|
@ -19,7 +18,6 @@ import {
|
|||
Table,
|
||||
User,
|
||||
ViewV2,
|
||||
FeatureFlag,
|
||||
} from "@budibase/types"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import {
|
||||
|
@ -423,45 +421,43 @@ export async function coreOutputProcessing(
|
|||
|
||||
// remove null properties to match internal API
|
||||
const isExternal = isExternalTableID(table._id!)
|
||||
if (isExternal || (await features.flags.isEnabled(FeatureFlag.SQS))) {
|
||||
for (const row of rows) {
|
||||
for (const key of Object.keys(row)) {
|
||||
if (row[key] === null) {
|
||||
delete row[key]
|
||||
} else if (row[key] && table.schema[key]?.type === FieldType.LINK) {
|
||||
for (const link of row[key] || []) {
|
||||
for (const linkKey of Object.keys(link)) {
|
||||
if (link[linkKey] === null) {
|
||||
delete link[linkKey]
|
||||
}
|
||||
for (const row of rows) {
|
||||
for (const key of Object.keys(row)) {
|
||||
if (row[key] === null) {
|
||||
delete row[key]
|
||||
} else if (row[key] && table.schema[key]?.type === FieldType.LINK) {
|
||||
for (const link of row[key] || []) {
|
||||
for (const linkKey of Object.keys(link)) {
|
||||
if (link[linkKey] === null) {
|
||||
delete link[linkKey]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sdk.views.isView(source)) {
|
||||
// We ensure calculation fields are returned as numbers. During the
|
||||
// testing of this feature it was discovered that the COUNT operation
|
||||
// returns a string for MySQL, MariaDB, and Postgres. But given that all
|
||||
// calculation fields (except ones operating on BIGINTs) should be
|
||||
// numbers, we blanket make sure of that here.
|
||||
for (const [name, field] of Object.entries(
|
||||
helpers.views.calculationFields(source)
|
||||
)) {
|
||||
if ("field" in field) {
|
||||
const targetSchema = table.schema[field.field]
|
||||
// We don't convert BIGINT fields to floats because we could lose
|
||||
// precision.
|
||||
if (targetSchema.type === FieldType.BIGINT) {
|
||||
continue
|
||||
}
|
||||
if (sdk.views.isView(source)) {
|
||||
// We ensure calculation fields are returned as numbers. During the
|
||||
// testing of this feature it was discovered that the COUNT operation
|
||||
// returns a string for MySQL, MariaDB, and Postgres. But given that all
|
||||
// calculation fields (except ones operating on BIGINTs) should be
|
||||
// numbers, we blanket make sure of that here.
|
||||
for (const [name, field] of Object.entries(
|
||||
helpers.views.calculationFields(source)
|
||||
)) {
|
||||
if ("field" in field) {
|
||||
const targetSchema = table.schema[field.field]
|
||||
// We don't convert BIGINT fields to floats because we could lose
|
||||
// precision.
|
||||
if (targetSchema.type === FieldType.BIGINT) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
if (typeof row[name] === "string") {
|
||||
row[name] = parseFloat(row[name])
|
||||
}
|
||||
for (const row of rows) {
|
||||
if (typeof row[name] === "string") {
|
||||
row[name] = parseFloat(row[name])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import { outputProcessing } from ".."
|
||||
import { generator, structures } from "@budibase/backend-core/tests"
|
||||
import { features } from "@budibase/backend-core"
|
||||
|
||||
import * as bbReferenceProcessor from "../bbReferenceProcessor"
|
||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||
|
||||
|
@ -21,7 +21,6 @@ jest.mock("../bbReferenceProcessor", (): typeof bbReferenceProcessor => ({
|
|||
|
||||
describe("rowProcessor - outputProcessing", () => {
|
||||
const config = new TestConfiguration()
|
||||
let cleanupFlags: () => void = () => {}
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
@ -33,11 +32,6 @@ describe("rowProcessor - outputProcessing", () => {
|
|||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks()
|
||||
cleanupFlags = features.testutils.setFeatureFlags("*", { SQS: true })
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
cleanupFlags()
|
||||
})
|
||||
|
||||
const processOutputBBReferenceMock =
|
||||
|
|
|
@ -527,7 +527,12 @@ export function search<T extends Record<string, any>>(
|
|||
): SearchResponse<T> {
|
||||
let result = runQuery(docs, query.query)
|
||||
if (query.sort) {
|
||||
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
|
||||
result = sort(
|
||||
result,
|
||||
query.sort,
|
||||
query.sortOrder || SortOrder.ASCENDING,
|
||||
query.sortType
|
||||
)
|
||||
}
|
||||
const totalRows = result.length
|
||||
if (query.limit) {
|
||||
|
|
|
@ -48,7 +48,7 @@ export function validate(
|
|||
cronExpression: string
|
||||
): { valid: false; err: string[] } | { valid: true } {
|
||||
const result = cronValidate(cronExpression, {
|
||||
preset: "npm-node-cron",
|
||||
preset: "npm-cron-schedule",
|
||||
override: {
|
||||
useSeconds: false,
|
||||
},
|
||||
|
|
|
@ -12,7 +12,6 @@ import type PouchDB from "pouchdb-find"
|
|||
|
||||
export enum SearchIndex {
|
||||
ROWS = "rows",
|
||||
AUDIT = "audit",
|
||||
USER = "user",
|
||||
}
|
||||
|
||||
|
|
|
@ -2,10 +2,9 @@ export enum FeatureFlag {
|
|||
PER_CREATOR_PER_USER_PRICE = "PER_CREATOR_PER_USER_PRICE",
|
||||
PER_CREATOR_PER_USER_PRICE_ALERT = "PER_CREATOR_PER_USER_PRICE_ALERT",
|
||||
AUTOMATION_BRANCHING = "AUTOMATION_BRANCHING",
|
||||
SQS = "SQS",
|
||||
AI_CUSTOM_CONFIGS = "AI_CUSTOM_CONFIGS",
|
||||
DEFAULT_VALUES = "DEFAULT_VALUES",
|
||||
ENRICHED_RELATIONSHIPS = "ENRICHED_RELATIONSHIPS",
|
||||
|
||||
BUDIBASE_AI = "BUDIBASE_AI",
|
||||
}
|
||||
|
||||
|
|
|
@ -48,6 +48,7 @@ export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
|
|||
request: BBRequest<RequestBody>
|
||||
body: ResponseBody
|
||||
userAgent: UserAgentContext["userAgent"]
|
||||
state: { nonce?: string }
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -56,6 +57,7 @@ export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
|
|||
export interface UserCtx<RequestBody = any, ResponseBody = any>
|
||||
extends Ctx<RequestBody, ResponseBody> {
|
||||
user: ContextUser
|
||||
state: { nonce?: string }
|
||||
roleId?: string
|
||||
eventEmitter?: ContextEmitter
|
||||
loginMethod?: LoginMethod
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Ctx, MaintenanceType, FeatureFlag } from "@budibase/types"
|
||||
import { Ctx, MaintenanceType } from "@budibase/types"
|
||||
import env from "../../../environment"
|
||||
import { env as coreEnv, db as dbCore, features } from "@budibase/backend-core"
|
||||
import { env as coreEnv, db as dbCore } from "@budibase/backend-core"
|
||||
import nodeFetch from "node-fetch"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
|
@ -35,10 +35,7 @@ async function isSqsAvailable() {
|
|||
}
|
||||
|
||||
async function isSqsMissing() {
|
||||
return (
|
||||
(await features.flags.isEnabled(FeatureFlag.SQS)) &&
|
||||
!(await isSqsAvailable())
|
||||
)
|
||||
return !(await isSqsAvailable())
|
||||
}
|
||||
|
||||
export const fetch = async (ctx: Ctx) => {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { mocks, structures } from "@budibase/backend-core/tests"
|
||||
import { context, events, features } from "@budibase/backend-core"
|
||||
import { context, events } from "@budibase/backend-core"
|
||||
import { Event, IdentityType } from "@budibase/types"
|
||||
import { TestConfiguration } from "../../../../tests"
|
||||
|
||||
|
@ -12,19 +12,14 @@ const BASE_IDENTITY = {
|
|||
const USER_AUDIT_LOG_COUNT = 3
|
||||
const APP_ID = "app_1"
|
||||
|
||||
describe.each(["lucene", "sql"])("/api/global/auditlogs (%s)", method => {
|
||||
describe("/api/global/auditlogs (%s)", () => {
|
||||
const config = new TestConfiguration()
|
||||
let envCleanup: (() => void) | undefined
|
||||
|
||||
beforeAll(async () => {
|
||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
||||
SQS: method === "sql",
|
||||
})
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
envCleanup?.()
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
|
|
|
@ -56,6 +56,9 @@ app.use(koaSession(app))
|
|||
app.use(middleware.correlation)
|
||||
app.use(middleware.pino)
|
||||
app.use(middleware.ip)
|
||||
if (!coreEnv.DISABLE_CONTENT_SECURITY_POLICY) {
|
||||
app.use(middleware.csp)
|
||||
}
|
||||
app.use(userAgent)
|
||||
|
||||
// authentication
|
||||
|
|
Loading…
Reference in New Issue