Merge branch 'master' of github.com:Budibase/budibase into security-updates
This commit is contained in:
commit
52dc2b942f
|
@ -147,7 +147,10 @@ jobs:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
test-server:
|
test-server:
|
||||||
runs-on: budi-tubby-tornado-quad-core-300gb
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
datasource: [mssql, mysql, postgres, mongodb, mariadb, oracle, none]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -170,12 +173,19 @@ jobs:
|
||||||
|
|
||||||
- name: Pull testcontainers images
|
- name: Pull testcontainers images
|
||||||
run: |
|
run: |
|
||||||
docker pull mcr.microsoft.com/mssql/server@${{ steps.dotenv.outputs.MSSQL_SHA }} &
|
if [ "${{ matrix.datasource }}" == "mssql" ]; then
|
||||||
docker pull mysql@${{ steps.dotenv.outputs.MYSQL_SHA }} &
|
docker pull mcr.microsoft.com/mssql/server@${{ steps.dotenv.outputs.MSSQL_SHA }}
|
||||||
docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }} &
|
elif [ "${{ matrix.datasource }}" == "mysql" ]; then
|
||||||
docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }} &
|
docker pull mysql@${{ steps.dotenv.outputs.MYSQL_SHA }}
|
||||||
docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }} &
|
elif [ "${{ matrix.datasource }}" == "postgres" ]; then
|
||||||
docker pull budibase/oracle-database:23.2-slim-faststart &
|
docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }}
|
||||||
|
elif [ "${{ matrix.datasource }}" == "mongodb" ]; then
|
||||||
|
docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }}
|
||||||
|
elif [ "${{ matrix.datasource }}" == "mariadb" ]; then
|
||||||
|
docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }}
|
||||||
|
elif [ "${{ matrix.datasource }}" == "oracle" ]; then
|
||||||
|
docker pull budibase/oracle-database:23.2-slim-faststart
|
||||||
|
fi
|
||||||
docker pull minio/minio &
|
docker pull minio/minio &
|
||||||
docker pull redis &
|
docker pull redis &
|
||||||
docker pull testcontainers/ryuk:0.5.1 &
|
docker pull testcontainers/ryuk:0.5.1 &
|
||||||
|
@ -186,12 +196,24 @@ jobs:
|
||||||
- run: yarn --frozen-lockfile
|
- run: yarn --frozen-lockfile
|
||||||
|
|
||||||
- name: Test server
|
- name: Test server
|
||||||
|
env:
|
||||||
|
DATASOURCE: ${{ matrix.datasource }}
|
||||||
run: |
|
run: |
|
||||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||||
node scripts/run-affected.js --task=test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/server)
|
||||||
else
|
if [ -z "$AFFECTED" ]; then
|
||||||
yarn test --scope=@budibase/server
|
echo "No affected tests to run"
|
||||||
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
FILTER="./src/tests/filters/datasource-tests.js"
|
||||||
|
if [ "${{ matrix.datasource }}" == "none" ]; then
|
||||||
|
FILTER="./src/tests/filters/non-datasource-tests.js"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd packages/server
|
||||||
|
yarn test --filter $FILTER --passWithNoTests
|
||||||
|
|
||||||
check-pro-submodule:
|
check-pro-submodule:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
@ -62,6 +62,7 @@ export default async function setup() {
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
.withLabels({ "com.budibase": "true" })
|
.withLabels({ "com.budibase": "true" })
|
||||||
|
.withTmpFs({ "/data": "rw" })
|
||||||
.withReuse()
|
.withReuse()
|
||||||
.withWaitStrategy(
|
.withWaitStrategy(
|
||||||
Wait.forSuccessfulCommand(
|
Wait.forSuccessfulCommand(
|
||||||
|
@ -72,6 +73,7 @@ export default async function setup() {
|
||||||
const minio = new GenericContainer("minio/minio")
|
const minio = new GenericContainer("minio/minio")
|
||||||
.withExposedPorts(9000)
|
.withExposedPorts(9000)
|
||||||
.withCommand(["server", "/data"])
|
.withCommand(["server", "/data"])
|
||||||
|
.withTmpFs({ "/data": "rw" })
|
||||||
.withEnvironment({
|
.withEnvironment({
|
||||||
MINIO_ACCESS_KEY: "budibase",
|
MINIO_ACCESS_KEY: "budibase",
|
||||||
MINIO_SECRET_KEY: "budibase",
|
MINIO_SECRET_KEY: "budibase",
|
||||||
|
|
|
@ -50,19 +50,6 @@ http {
|
||||||
ignore_invalid_headers off;
|
ignore_invalid_headers off;
|
||||||
proxy_buffering off;
|
proxy_buffering off;
|
||||||
|
|
||||||
set $csp_default "default-src 'self'";
|
|
||||||
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com";
|
|
||||||
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
|
|
||||||
set $csp_object "object-src 'none'";
|
|
||||||
set $csp_base_uri "base-uri 'self'";
|
|
||||||
set $csp_connect "connect-src 'self' https://*.budibase.app https://*.budibaseqa.app https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com https://us.i.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
|
|
||||||
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
|
|
||||||
set $csp_frame "frame-src 'self' https:";
|
|
||||||
set $csp_img "img-src http: https: data: blob:";
|
|
||||||
set $csp_manifest "manifest-src 'self'";
|
|
||||||
set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live";
|
|
||||||
set $csp_worker "worker-src blob:";
|
|
||||||
|
|
||||||
error_page 502 503 504 /error.html;
|
error_page 502 503 504 /error.html;
|
||||||
location = /error.html {
|
location = /error.html {
|
||||||
root /usr/share/nginx/html;
|
root /usr/share/nginx/html;
|
||||||
|
@ -73,7 +60,6 @@ http {
|
||||||
add_header X-Frame-Options SAMEORIGIN always;
|
add_header X-Frame-Options SAMEORIGIN always;
|
||||||
add_header X-Content-Type-Options nosniff always;
|
add_header X-Content-Type-Options nosniff always;
|
||||||
add_header X-XSS-Protection "1; mode=block" always;
|
add_header X-XSS-Protection "1; mode=block" always;
|
||||||
add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always;
|
|
||||||
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always;
|
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always;
|
||||||
|
|
||||||
# upstreams
|
# upstreams
|
||||||
|
@ -120,6 +106,12 @@ http {
|
||||||
|
|
||||||
location ~ ^/api/(system|admin|global)/ {
|
location ~ ^/api/(system|admin|global)/ {
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
|
|
||||||
|
# Enable buffering for potentially large OIDC configs
|
||||||
|
proxy_buffering on;
|
||||||
|
proxy_buffer_size 16k;
|
||||||
|
proxy_buffers 4 32k;
|
||||||
|
|
||||||
proxy_pass $worker;
|
proxy_pass $worker;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||||
"version": "3.2.1",
|
"version": "3.2.5",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*",
|
"packages/*",
|
||||||
|
|
|
@ -10,7 +10,6 @@ import {
|
||||||
DatabaseQueryOpts,
|
DatabaseQueryOpts,
|
||||||
DBError,
|
DBError,
|
||||||
Document,
|
Document,
|
||||||
FeatureFlag,
|
|
||||||
isDocument,
|
isDocument,
|
||||||
RowResponse,
|
RowResponse,
|
||||||
RowValue,
|
RowValue,
|
||||||
|
@ -27,7 +26,6 @@ import { SQLITE_DESIGN_DOC_ID } from "../../constants"
|
||||||
import { DDInstrumentedDatabase } from "../instrumentation"
|
import { DDInstrumentedDatabase } from "../instrumentation"
|
||||||
import { checkSlashesInUrl } from "../../helpers"
|
import { checkSlashesInUrl } from "../../helpers"
|
||||||
import { sqlLog } from "../../sql/utils"
|
import { sqlLog } from "../../sql/utils"
|
||||||
import { flags } from "../../features"
|
|
||||||
|
|
||||||
const DATABASE_NOT_FOUND = "Database does not exist."
|
const DATABASE_NOT_FOUND = "Database does not exist."
|
||||||
|
|
||||||
|
@ -456,10 +454,7 @@ export class DatabaseImpl implements Database {
|
||||||
}
|
}
|
||||||
|
|
||||||
async destroy() {
|
async destroy() {
|
||||||
if (
|
if (await this.exists(SQLITE_DESIGN_DOC_ID)) {
|
||||||
(await flags.isEnabled(FeatureFlag.SQS)) &&
|
|
||||||
(await this.exists(SQLITE_DESIGN_DOC_ID))
|
|
||||||
) {
|
|
||||||
// delete the design document, then run the cleanup operation
|
// delete the design document, then run the cleanup operation
|
||||||
const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||||
// remove all tables - save the definition then trigger a cleanup
|
// remove all tables - save the definition then trigger a cleanup
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import { existsSync, readFileSync } from "fs"
|
import { existsSync, readFileSync } from "fs"
|
||||||
import { ServiceType } from "@budibase/types"
|
import { ServiceType } from "@budibase/types"
|
||||||
import { cloneDeep } from "lodash"
|
import { cloneDeep } from "lodash"
|
||||||
|
import { createSecretKey } from "crypto"
|
||||||
|
|
||||||
function isTest() {
|
function isTest() {
|
||||||
return isJest()
|
return isJest()
|
||||||
|
@ -126,8 +127,12 @@ const environment = {
|
||||||
},
|
},
|
||||||
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
|
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
|
||||||
JS_BCRYPT: process.env.JS_BCRYPT,
|
JS_BCRYPT: process.env.JS_BCRYPT,
|
||||||
JWT_SECRET: process.env.JWT_SECRET,
|
JWT_SECRET: process.env.JWT_SECRET
|
||||||
JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK,
|
? createSecretKey(Buffer.from(process.env.JWT_SECRET))
|
||||||
|
: undefined,
|
||||||
|
JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK
|
||||||
|
? createSecretKey(Buffer.from(process.env.JWT_SECRET_FALLBACK))
|
||||||
|
: undefined,
|
||||||
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
|
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
|
||||||
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
||||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||||
|
@ -225,6 +230,7 @@ const environment = {
|
||||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||||
MIN_VERSION_WITHOUT_POWER_ROLE:
|
MIN_VERSION_WITHOUT_POWER_ROLE:
|
||||||
process.env.MIN_VERSION_WITHOUT_POWER_ROLE || "3.0.0",
|
process.env.MIN_VERSION_WITHOUT_POWER_ROLE || "3.0.0",
|
||||||
|
DISABLE_CONTENT_SECURITY_POLICY: process.env.DISABLE_CONTENT_SECURITY_POLICY,
|
||||||
}
|
}
|
||||||
|
|
||||||
export function setEnv(newEnvVars: Partial<typeof environment>): () => void {
|
export function setEnv(newEnvVars: Partial<typeof environment>): () => void {
|
||||||
|
|
|
@ -269,8 +269,6 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
||||||
export const flags = new FlagSet({
|
export const flags = new FlagSet({
|
||||||
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
|
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
|
||||||
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
|
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
|
||||||
[FeatureFlag.SQS]: Flag.boolean(true),
|
|
||||||
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(true),
|
|
||||||
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
|
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
|
||||||
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
|
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
|
||||||
})
|
})
|
||||||
|
|
|
@ -0,0 +1,113 @@
|
||||||
|
import crypto from "crypto"
|
||||||
|
|
||||||
|
const CSP_DIRECTIVES = {
|
||||||
|
"default-src": ["'self'"],
|
||||||
|
"script-src": [
|
||||||
|
"'self'",
|
||||||
|
"'unsafe-eval'",
|
||||||
|
"https://*.budibase.net",
|
||||||
|
"https://cdn.budi.live",
|
||||||
|
"https://js.intercomcdn.com",
|
||||||
|
"https://widget.intercom.io",
|
||||||
|
"https://d2l5prqdbvm3op.cloudfront.net",
|
||||||
|
"https://us-assets.i.posthog.com",
|
||||||
|
],
|
||||||
|
"style-src": [
|
||||||
|
"'self'",
|
||||||
|
"'unsafe-inline'",
|
||||||
|
"https://cdn.jsdelivr.net",
|
||||||
|
"https://fonts.googleapis.com",
|
||||||
|
"https://rsms.me",
|
||||||
|
"https://maxcdn.bootstrapcdn.com",
|
||||||
|
],
|
||||||
|
"object-src": ["'none'"],
|
||||||
|
"base-uri": ["'self'"],
|
||||||
|
"connect-src": [
|
||||||
|
"'self'",
|
||||||
|
"https://*.budibase.app",
|
||||||
|
"https://*.budibaseqa.app",
|
||||||
|
"https://*.budibase.net",
|
||||||
|
"https://api-iam.intercom.io",
|
||||||
|
"https://api-ping.intercom.io",
|
||||||
|
"https://app.posthog.com",
|
||||||
|
"https://us.i.posthog.com",
|
||||||
|
"wss://nexus-websocket-a.intercom.io",
|
||||||
|
"wss://nexus-websocket-b.intercom.io",
|
||||||
|
"https://nexus-websocket-a.intercom.io",
|
||||||
|
"https://nexus-websocket-b.intercom.io",
|
||||||
|
"https://uploads.intercomcdn.com",
|
||||||
|
"https://uploads.intercomusercontent.com",
|
||||||
|
"https://*.amazonaws.com",
|
||||||
|
"https://*.s3.amazonaws.com",
|
||||||
|
"https://*.s3.us-east-2.amazonaws.com",
|
||||||
|
"https://*.s3.us-east-1.amazonaws.com",
|
||||||
|
"https://*.s3.us-west-1.amazonaws.com",
|
||||||
|
"https://*.s3.us-west-2.amazonaws.com",
|
||||||
|
"https://*.s3.af-south-1.amazonaws.com",
|
||||||
|
"https://*.s3.ap-east-1.amazonaws.com",
|
||||||
|
"https://*.s3.ap-south-1.amazonaws.com",
|
||||||
|
"https://*.s3.ap-northeast-2.amazonaws.com",
|
||||||
|
"https://*.s3.ap-southeast-1.amazonaws.com",
|
||||||
|
"https://*.s3.ap-southeast-2.amazonaws.com",
|
||||||
|
"https://*.s3.ap-northeast-1.amazonaws.com",
|
||||||
|
"https://*.s3.ca-central-1.amazonaws.com",
|
||||||
|
"https://*.s3.cn-north-1.amazonaws.com",
|
||||||
|
"https://*.s3.cn-northwest-1.amazonaws.com",
|
||||||
|
"https://*.s3.eu-central-1.amazonaws.com",
|
||||||
|
"https://*.s3.eu-west-1.amazonaws.com",
|
||||||
|
"https://*.s3.eu-west-2.amazonaws.com",
|
||||||
|
"https://*.s3.eu-south-1.amazonaws.com",
|
||||||
|
"https://*.s3.eu-west-3.amazonaws.com",
|
||||||
|
"https://*.s3.eu-north-1.amazonaws.com",
|
||||||
|
"https://*.s3.sa-east-1.amazonaws.com",
|
||||||
|
"https://*.s3.me-south-1.amazonaws.com",
|
||||||
|
"https://*.s3.us-gov-east-1.amazonaws.com",
|
||||||
|
"https://*.s3.us-gov-west-1.amazonaws.com",
|
||||||
|
"https://api.github.com",
|
||||||
|
],
|
||||||
|
"font-src": [
|
||||||
|
"'self'",
|
||||||
|
"data:",
|
||||||
|
"https://cdn.jsdelivr.net",
|
||||||
|
"https://fonts.gstatic.com",
|
||||||
|
"https://rsms.me",
|
||||||
|
"https://maxcdn.bootstrapcdn.com",
|
||||||
|
"https://js.intercomcdn.com",
|
||||||
|
"https://fonts.intercomcdn.com",
|
||||||
|
],
|
||||||
|
"frame-src": ["'self'", "https:"],
|
||||||
|
"img-src": ["http:", "https:", "data:", "blob:"],
|
||||||
|
"manifest-src": ["'self'"],
|
||||||
|
"media-src": [
|
||||||
|
"'self'",
|
||||||
|
"https://js.intercomcdn.com",
|
||||||
|
"https://cdn.budi.live",
|
||||||
|
],
|
||||||
|
"worker-src": ["blob:"],
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function contentSecurityPolicy(ctx: any, next: any) {
|
||||||
|
try {
|
||||||
|
const nonce = crypto.randomBytes(16).toString("base64")
|
||||||
|
|
||||||
|
const directives = { ...CSP_DIRECTIVES }
|
||||||
|
directives["script-src"] = [
|
||||||
|
...CSP_DIRECTIVES["script-src"],
|
||||||
|
`'nonce-${nonce}'`,
|
||||||
|
]
|
||||||
|
|
||||||
|
ctx.state.nonce = nonce
|
||||||
|
|
||||||
|
const cspHeader = Object.entries(directives)
|
||||||
|
.map(([key, sources]) => `${key} ${sources.join(" ")}`)
|
||||||
|
.join("; ")
|
||||||
|
ctx.set("Content-Security-Policy", cspHeader)
|
||||||
|
await next()
|
||||||
|
} catch (err: any) {
|
||||||
|
console.error(
|
||||||
|
`Error occurred in Content-Security-Policy middleware: ${err}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default contentSecurityPolicy
|
|
@ -19,5 +19,6 @@ export { default as pino } from "../logging/pino/middleware"
|
||||||
export { default as correlation } from "../logging/correlation/middleware"
|
export { default as correlation } from "../logging/correlation/middleware"
|
||||||
export { default as errorHandling } from "./errorHandling"
|
export { default as errorHandling } from "./errorHandling"
|
||||||
export { default as querystringToBody } from "./querystringToBody"
|
export { default as querystringToBody } from "./querystringToBody"
|
||||||
|
export { default as csp } from "./contentSecurityPolicy"
|
||||||
export * as joiValidator from "./joi-validator"
|
export * as joiValidator from "./joi-validator"
|
||||||
export { default as ip } from "./ip"
|
export { default as ip } from "./ip"
|
||||||
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
import crypto from "crypto"
|
||||||
|
import contentSecurityPolicy from "../contentSecurityPolicy"
|
||||||
|
|
||||||
|
jest.mock("crypto", () => ({
|
||||||
|
randomBytes: jest.fn(),
|
||||||
|
randomUUID: jest.fn(),
|
||||||
|
}))
|
||||||
|
|
||||||
|
describe("contentSecurityPolicy middleware", () => {
|
||||||
|
let ctx: any
|
||||||
|
let next: any
|
||||||
|
const mockNonce = "mocked/nonce"
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
ctx = {
|
||||||
|
state: {},
|
||||||
|
set: jest.fn(),
|
||||||
|
}
|
||||||
|
next = jest.fn()
|
||||||
|
// @ts-ignore
|
||||||
|
crypto.randomBytes.mockReturnValue(Buffer.from(mockNonce, "base64"))
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
jest.clearAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should generate a nonce and set it in the script-src directive", async () => {
|
||||||
|
await contentSecurityPolicy(ctx, next)
|
||||||
|
|
||||||
|
expect(ctx.state.nonce).toBe(mockNonce)
|
||||||
|
expect(ctx.set).toHaveBeenCalledWith(
|
||||||
|
"Content-Security-Policy",
|
||||||
|
expect.stringContaining(
|
||||||
|
`script-src 'self' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com 'nonce-${mockNonce}'`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
expect(next).toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include all CSP directives in the header", async () => {
|
||||||
|
await contentSecurityPolicy(ctx, next)
|
||||||
|
|
||||||
|
const cspHeader = ctx.set.mock.calls[0][1]
|
||||||
|
expect(cspHeader).toContain("default-src 'self'")
|
||||||
|
expect(cspHeader).toContain("script-src 'self' 'unsafe-eval'")
|
||||||
|
expect(cspHeader).toContain("style-src 'self' 'unsafe-inline'")
|
||||||
|
expect(cspHeader).toContain("object-src 'none'")
|
||||||
|
expect(cspHeader).toContain("base-uri 'self'")
|
||||||
|
expect(cspHeader).toContain("connect-src 'self'")
|
||||||
|
expect(cspHeader).toContain("font-src 'self'")
|
||||||
|
expect(cspHeader).toContain("frame-src 'self'")
|
||||||
|
expect(cspHeader).toContain("img-src http: https: data: blob:")
|
||||||
|
expect(cspHeader).toContain("manifest-src 'self'")
|
||||||
|
expect(cspHeader).toContain("media-src 'self'")
|
||||||
|
expect(cspHeader).toContain("worker-src blob:")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle errors and log an error message", async () => {
|
||||||
|
const consoleSpy = jest.spyOn(console, "error").mockImplementation()
|
||||||
|
const error = new Error("Test error")
|
||||||
|
// @ts-ignore
|
||||||
|
crypto.randomBytes.mockImplementation(() => {
|
||||||
|
throw error
|
||||||
|
})
|
||||||
|
|
||||||
|
await contentSecurityPolicy(ctx, next)
|
||||||
|
|
||||||
|
expect(consoleSpy).toHaveBeenCalledWith(
|
||||||
|
`Error occurred in Content-Security-Policy middleware: ${error}`
|
||||||
|
)
|
||||||
|
expect(next).not.toHaveBeenCalled()
|
||||||
|
consoleSpy.mockRestore()
|
||||||
|
})
|
||||||
|
})
|
|
@ -4,7 +4,7 @@ import env from "../../environment"
|
||||||
describe("encryption", () => {
|
describe("encryption", () => {
|
||||||
it("should throw an error if API encryption key is not set", () => {
|
it("should throw an error if API encryption key is not set", () => {
|
||||||
const jwt = getSecret(SecretOption.API)
|
const jwt = getSecret(SecretOption.API)
|
||||||
expect(jwt).toBe(env.JWT_SECRET)
|
expect(jwt).toBe(env.JWT_SECRET?.export().toString())
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should throw an error if encryption key is not set", () => {
|
it("should throw an error if encryption key is not set", () => {
|
||||||
|
|
|
@ -81,6 +81,7 @@
|
||||||
"@spectrum-css/typography": "3.0.1",
|
"@spectrum-css/typography": "3.0.1",
|
||||||
"@spectrum-css/underlay": "2.0.9",
|
"@spectrum-css/underlay": "2.0.9",
|
||||||
"@spectrum-css/vars": "3.0.1",
|
"@spectrum-css/vars": "3.0.1",
|
||||||
|
"atrament": "^4.3.0",
|
||||||
"dayjs": "^1.10.8",
|
"dayjs": "^1.10.8",
|
||||||
"easymde": "^2.16.1",
|
"easymde": "^2.16.1",
|
||||||
"svelte-dnd-action": "^0.9.8",
|
"svelte-dnd-action": "^0.9.8",
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
import Link from "../../Link/Link.svelte"
|
import Link from "../../Link/Link.svelte"
|
||||||
import Tag from "../../Tags/Tag.svelte"
|
import Tag from "../../Tags/Tag.svelte"
|
||||||
import Tags from "../../Tags/Tags.svelte"
|
import Tags from "../../Tags/Tags.svelte"
|
||||||
|
import ProgressCircle from "../../ProgressCircle/ProgressCircle.svelte"
|
||||||
|
|
||||||
const BYTES_IN_KB = 1000
|
const BYTES_IN_KB = 1000
|
||||||
const BYTES_IN_MB = 1000000
|
const BYTES_IN_MB = 1000000
|
||||||
|
@ -39,12 +40,14 @@
|
||||||
"jfif",
|
"jfif",
|
||||||
"webp",
|
"webp",
|
||||||
]
|
]
|
||||||
|
|
||||||
const fieldId = id || uuid()
|
const fieldId = id || uuid()
|
||||||
|
|
||||||
let selectedImageIdx = 0
|
let selectedImageIdx = 0
|
||||||
let fileDragged = false
|
let fileDragged = false
|
||||||
let selectedUrl
|
let selectedUrl
|
||||||
let fileInput
|
let fileInput
|
||||||
|
let loading = false
|
||||||
|
|
||||||
$: selectedImage = value?.[selectedImageIdx] ?? null
|
$: selectedImage = value?.[selectedImageIdx] ?? null
|
||||||
$: fileCount = value?.length ?? 0
|
$: fileCount = value?.length ?? 0
|
||||||
$: isImage =
|
$: isImage =
|
||||||
|
@ -86,10 +89,15 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
if (processFiles) {
|
if (processFiles) {
|
||||||
|
loading = true
|
||||||
|
try {
|
||||||
const processedFiles = await processFiles(fileList)
|
const processedFiles = await processFiles(fileList)
|
||||||
const newValue = [...value, ...processedFiles]
|
const newValue = [...value, ...processedFiles]
|
||||||
dispatch("change", newValue)
|
dispatch("change", newValue)
|
||||||
selectedImageIdx = newValue.length - 1
|
selectedImageIdx = newValue.length - 1
|
||||||
|
} finally {
|
||||||
|
loading = false
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
dispatch("change", fileList)
|
dispatch("change", fileList)
|
||||||
}
|
}
|
||||||
|
@ -227,7 +235,7 @@
|
||||||
{#if showDropzone}
|
{#if showDropzone}
|
||||||
<div
|
<div
|
||||||
class="spectrum-Dropzone"
|
class="spectrum-Dropzone"
|
||||||
class:disabled
|
class:disabled={disabled || loading}
|
||||||
role="region"
|
role="region"
|
||||||
tabindex="0"
|
tabindex="0"
|
||||||
on:dragover={handleDragOver}
|
on:dragover={handleDragOver}
|
||||||
|
@ -241,7 +249,7 @@
|
||||||
id={fieldId}
|
id={fieldId}
|
||||||
{disabled}
|
{disabled}
|
||||||
type="file"
|
type="file"
|
||||||
multiple
|
multiple={maximum !== 1}
|
||||||
accept={extensions}
|
accept={extensions}
|
||||||
bind:this={fileInput}
|
bind:this={fileInput}
|
||||||
on:change={handleFile}
|
on:change={handleFile}
|
||||||
|
@ -339,6 +347,12 @@
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{#if loading}
|
||||||
|
<div class="loading">
|
||||||
|
<ProgressCircle size="M" />
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
@ -464,6 +478,7 @@
|
||||||
|
|
||||||
.spectrum-Dropzone {
|
.spectrum-Dropzone {
|
||||||
height: 220px;
|
height: 220px;
|
||||||
|
position: relative;
|
||||||
}
|
}
|
||||||
.compact .spectrum-Dropzone {
|
.compact .spectrum-Dropzone {
|
||||||
height: 40px;
|
height: 40px;
|
||||||
|
@ -488,4 +503,14 @@
|
||||||
.tag {
|
.tag {
|
||||||
margin-top: 8px;
|
margin-top: 8px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.loading {
|
||||||
|
position: absolute;
|
||||||
|
display: grid;
|
||||||
|
place-items: center;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
<script>
|
<script>
|
||||||
|
import { tick } from "svelte"
|
||||||
import {
|
import {
|
||||||
ModalContent,
|
ModalContent,
|
||||||
TextArea,
|
TextArea,
|
||||||
|
@ -8,7 +9,6 @@
|
||||||
import { automationStore, selectedAutomation } from "stores/builder"
|
import { automationStore, selectedAutomation } from "stores/builder"
|
||||||
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
|
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { memo } from "@budibase/frontend-core"
|
|
||||||
import { AutomationEventType } from "@budibase/types"
|
import { AutomationEventType } from "@budibase/types"
|
||||||
|
|
||||||
let failedParse = null
|
let failedParse = null
|
||||||
|
@ -63,8 +63,7 @@
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
const memoTestData = memo(parseTestData($selectedAutomation.data.testData))
|
$: testData = testData || parseTestData($selectedAutomation.data.testData)
|
||||||
$: memoTestData.set(parseTestData($selectedAutomation.data.testData))
|
|
||||||
|
|
||||||
$: {
|
$: {
|
||||||
// clone the trigger so we're not mutating the reference
|
// clone the trigger so we're not mutating the reference
|
||||||
|
@ -83,7 +82,7 @@
|
||||||
$: isError =
|
$: isError =
|
||||||
!isTriggerValid(trigger) ||
|
!isTriggerValid(trigger) ||
|
||||||
!(trigger.schema.outputs.required || []).every(
|
!(trigger.schema.outputs.required || []).every(
|
||||||
required => $memoTestData?.[required] || required !== "row"
|
required => testData?.[required] || required !== "row"
|
||||||
)
|
)
|
||||||
|
|
||||||
function parseTestJSON(e) {
|
function parseTestJSON(e) {
|
||||||
|
@ -110,11 +109,10 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
const testAutomation = async () => {
|
const testAutomation = async () => {
|
||||||
|
// Ensure testData reactiveness is processed
|
||||||
|
await tick()
|
||||||
try {
|
try {
|
||||||
await automationStore.actions.test(
|
await automationStore.actions.test($selectedAutomation.data, testData)
|
||||||
$selectedAutomation.data,
|
|
||||||
$memoTestData
|
|
||||||
)
|
|
||||||
$automationStore.showTestPanel = true
|
$automationStore.showTestPanel = true
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error(error)
|
notifications.error(error)
|
||||||
|
@ -152,7 +150,7 @@
|
||||||
{#if selectedValues}
|
{#if selectedValues}
|
||||||
<div class="tab-content-padding">
|
<div class="tab-content-padding">
|
||||||
<AutomationBlockSetup
|
<AutomationBlockSetup
|
||||||
testData={$memoTestData}
|
bind:testData
|
||||||
{schemaProperties}
|
{schemaProperties}
|
||||||
isTestModal
|
isTestModal
|
||||||
block={trigger}
|
block={trigger}
|
||||||
|
|
|
@ -503,7 +503,15 @@
|
||||||
row: { "Active": true, "Order Id" : 14, ... }
|
row: { "Active": true, "Order Id" : 14, ... }
|
||||||
})
|
})
|
||||||
*/
|
*/
|
||||||
const onChange = Utils.sequential(async update => {
|
const onChange = async update => {
|
||||||
|
if (isTestModal) {
|
||||||
|
testData = update
|
||||||
|
}
|
||||||
|
|
||||||
|
updateAutomation(update)
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateAutomation = Utils.sequential(async update => {
|
||||||
const request = cloneDeep(update)
|
const request = cloneDeep(update)
|
||||||
// Process app trigger updates
|
// Process app trigger updates
|
||||||
if (isTrigger && !isTestModal) {
|
if (isTrigger && !isTestModal) {
|
||||||
|
|
|
@ -11,7 +11,6 @@
|
||||||
export let disabledPermissions = []
|
export let disabledPermissions = []
|
||||||
export let columns
|
export let columns
|
||||||
export let fromRelationshipField
|
export let fromRelationshipField
|
||||||
export let canSetRelationshipSchemas
|
|
||||||
|
|
||||||
const { datasource, dispatch } = getContext("grid")
|
const { datasource, dispatch } = getContext("grid")
|
||||||
|
|
||||||
|
@ -129,6 +128,8 @@
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
$: hasLinkColumns = columns.some(c => c.schema.type === FieldType.LINK)
|
||||||
|
|
||||||
async function toggleColumn(column, permission) {
|
async function toggleColumn(column, permission) {
|
||||||
const visible = permission !== FieldPermissions.HIDDEN
|
const visible = permission !== FieldPermissions.HIDDEN
|
||||||
const readonly = permission === FieldPermissions.READONLY
|
const readonly = permission === FieldPermissions.READONLY
|
||||||
|
@ -184,7 +185,7 @@
|
||||||
value={columnToPermissionOptions(column)}
|
value={columnToPermissionOptions(column)}
|
||||||
options={column.options}
|
options={column.options}
|
||||||
/>
|
/>
|
||||||
{#if canSetRelationshipSchemas && column.schema.type === FieldType.LINK && columnToPermissionOptions(column) !== FieldPermissions.HIDDEN}
|
{#if column.schema.type === FieldType.LINK && columnToPermissionOptions(column) !== FieldPermissions.HIDDEN}
|
||||||
<div class="relationship-columns">
|
<div class="relationship-columns">
|
||||||
<ActionButton
|
<ActionButton
|
||||||
on:click={e => {
|
on:click={e => {
|
||||||
|
@ -203,7 +204,7 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{#if canSetRelationshipSchemas}
|
{#if hasLinkColumns}
|
||||||
<Popover
|
<Popover
|
||||||
on:close={() => (relationshipFieldName = null)}
|
on:close={() => (relationshipFieldName = null)}
|
||||||
open={relationshipFieldName}
|
open={relationshipFieldName}
|
||||||
|
|
|
@ -10,8 +10,6 @@
|
||||||
import { getContext } from "svelte"
|
import { getContext } from "svelte"
|
||||||
import { ActionButton } from "@budibase/bbui"
|
import { ActionButton } from "@budibase/bbui"
|
||||||
import ColumnsSettingContent from "./ColumnsSettingContent.svelte"
|
import ColumnsSettingContent from "./ColumnsSettingContent.svelte"
|
||||||
import { isEnabled } from "helpers/featureFlags"
|
|
||||||
import { FeatureFlag } from "@budibase/types"
|
|
||||||
import DetailPopover from "components/common/DetailPopover.svelte"
|
import DetailPopover from "components/common/DetailPopover.svelte"
|
||||||
|
|
||||||
const { tableColumns, datasource } = getContext("grid")
|
const { tableColumns, datasource } = getContext("grid")
|
||||||
|
@ -46,9 +44,5 @@
|
||||||
{text}
|
{text}
|
||||||
</ActionButton>
|
</ActionButton>
|
||||||
</svelte:fragment>
|
</svelte:fragment>
|
||||||
<ColumnsSettingContent
|
<ColumnsSettingContent columns={$tableColumns} {permissions} />
|
||||||
columns={$tableColumns}
|
|
||||||
canSetRelationshipSchemas={isEnabled(FeatureFlag.ENRICHED_RELATIONSHIPS)}
|
|
||||||
{permissions}
|
|
||||||
/>
|
|
||||||
</DetailPopover>
|
</DetailPopover>
|
||||||
|
|
|
@ -84,8 +84,8 @@
|
||||||
on:mouseleave
|
on:mouseleave
|
||||||
on:click={onClick}
|
on:click={onClick}
|
||||||
on:contextmenu
|
on:contextmenu
|
||||||
ondragover="return false"
|
on:dragover={e => e.preventDefault()}
|
||||||
ondragenter="return false"
|
on:dragenter={e => e.preventDefault()}
|
||||||
{id}
|
{id}
|
||||||
{style}
|
{style}
|
||||||
{draggable}
|
{draggable}
|
||||||
|
|
|
@ -68,8 +68,8 @@
|
||||||
on:scroll
|
on:scroll
|
||||||
bind:this={scrollRef}
|
bind:this={scrollRef}
|
||||||
on:drop={onDrop}
|
on:drop={onDrop}
|
||||||
ondragover="return false"
|
on:dragover={e => e.preventDefault()}
|
||||||
ondragenter="return false"
|
on:dragenter={e => e.preventDefault()}
|
||||||
>
|
>
|
||||||
<slot />
|
<slot />
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -33,8 +33,7 @@
|
||||||
"sanitize-html": "^2.13.0",
|
"sanitize-html": "^2.13.0",
|
||||||
"screenfull": "^6.0.1",
|
"screenfull": "^6.0.1",
|
||||||
"shortid": "^2.2.15",
|
"shortid": "^2.2.15",
|
||||||
"svelte-spa-router": "^4.0.1",
|
"svelte-spa-router": "^4.0.1"
|
||||||
"atrament": "^4.3.0"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@rollup/plugin-alias": "^5.1.0",
|
"@rollup/plugin-alias": "^5.1.0",
|
||||||
|
|
|
@ -53,6 +53,7 @@
|
||||||
on:close={close}
|
on:close={close}
|
||||||
maxHeight={null}
|
maxHeight={null}
|
||||||
resizable
|
resizable
|
||||||
|
minWidth={360}
|
||||||
>
|
>
|
||||||
<div class="content">
|
<div class="content">
|
||||||
<slot />
|
<slot />
|
||||||
|
@ -80,7 +81,6 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
.content {
|
.content {
|
||||||
width: 300px;
|
|
||||||
padding: 20px;
|
padding: 20px;
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
|
|
|
@ -5,6 +5,7 @@ export default class NestedProviderFetch extends DataFetch {
|
||||||
// Nested providers should already have exposed their own schema
|
// Nested providers should already have exposed their own schema
|
||||||
return {
|
return {
|
||||||
schema: datasource?.value?.schema,
|
schema: datasource?.value?.schema,
|
||||||
|
primaryDisplay: datasource?.value?.primaryDisplay,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 80770215c6159e4d47f3529fd02e74bc8ad07543
|
Subproject commit bfeece324a03a3a5f25137bf3f8c66d5ed6103d8
|
|
@ -1,12 +1,12 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -ex
|
||||||
|
|
||||||
if [[ -n $CI ]]
|
if [[ -n $CI ]]
|
||||||
then
|
then
|
||||||
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
|
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
|
||||||
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
|
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail "$@"
|
||||||
else
|
else
|
||||||
# --maxWorkers performs better in development
|
# --maxWorkers performs better in development
|
||||||
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"
|
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"
|
||||||
jest --coverage --maxWorkers=2 --forceExit $@
|
jest --coverage --maxWorkers=2 --forceExit "$@"
|
||||||
fi
|
fi
|
|
@ -209,6 +209,7 @@ export const serveApp = async function (ctx: UserCtx) {
|
||||||
? objectStore.getGlobalFileUrl("settings", "logoUrl")
|
? objectStore.getGlobalFileUrl("settings", "logoUrl")
|
||||||
: "",
|
: "",
|
||||||
appMigrating: needMigrations,
|
appMigrating: needMigrations,
|
||||||
|
nonce: ctx.state.nonce,
|
||||||
})
|
})
|
||||||
const appHbs = loadHandlebarsFile(appHbsPath)
|
const appHbs = loadHandlebarsFile(appHbsPath)
|
||||||
ctx.body = await processString(appHbs, {
|
ctx.body = await processString(appHbs, {
|
||||||
|
@ -217,6 +218,7 @@ export const serveApp = async function (ctx: UserCtx) {
|
||||||
css: `:root{${themeVariables}} ${css.code}`,
|
css: `:root{${themeVariables}} ${css.code}`,
|
||||||
appId,
|
appId,
|
||||||
embedded: bbHeaderEmbed,
|
embedded: bbHeaderEmbed,
|
||||||
|
nonce: ctx.state.nonce,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
// just return the app info for jest to assert on
|
// just return the app info for jest to assert on
|
||||||
|
@ -258,6 +260,7 @@ export const serveBuilderPreview = async function (ctx: Ctx) {
|
||||||
const previewHbs = loadHandlebarsFile(join(previewLoc, "preview.hbs"))
|
const previewHbs = loadHandlebarsFile(join(previewLoc, "preview.hbs"))
|
||||||
ctx.body = await processString(previewHbs, {
|
ctx.body = await processString(previewHbs, {
|
||||||
clientLibPath: objectStore.clientLibraryUrl(appId!, appInfo.version),
|
clientLibPath: objectStore.clientLibraryUrl(appId!, appInfo.version),
|
||||||
|
nonce: ctx.state.nonce,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
// just return the app info for jest to assert on
|
// just return the app info for jest to assert on
|
||||||
|
|
|
@ -16,6 +16,8 @@
|
||||||
export let hideDevTools
|
export let hideDevTools
|
||||||
export let sideNav
|
export let sideNav
|
||||||
export let hideFooter
|
export let hideFooter
|
||||||
|
|
||||||
|
export let nonce
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<svelte:head>
|
<svelte:head>
|
||||||
|
@ -118,11 +120,11 @@
|
||||||
<p />
|
<p />
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
<script type="application/javascript">
|
<script type="application/javascript" {nonce}>
|
||||||
window.INIT_TIME = Date.now()
|
window.INIT_TIME = Date.now()
|
||||||
</script>
|
</script>
|
||||||
{#if appMigrating}
|
{#if appMigrating}
|
||||||
<script type="application/javascript">
|
<script type="application/javascript" {nonce}>
|
||||||
window.MIGRATING_APP = true
|
window.MIGRATING_APP = true
|
||||||
</script>
|
</script>
|
||||||
{/if}
|
{/if}
|
||||||
|
@ -135,7 +137,7 @@
|
||||||
<script type="application/javascript" src={plugin.jsUrl}></script>
|
<script type="application/javascript" src={plugin.jsUrl}></script>
|
||||||
{/each}
|
{/each}
|
||||||
{/if}
|
{/if}
|
||||||
<script type="application/javascript">
|
<script type="application/javascript" {nonce}>
|
||||||
if (window.loadBudibase) {
|
if (window.loadBudibase) {
|
||||||
window.loadBudibase()
|
window.loadBudibase()
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
<html>
|
<html>
|
||||||
<script>
|
<script nonce="{{ nonce }}">
|
||||||
document.fonts.ready.then(() => {
|
document.fonts.ready.then(() => {
|
||||||
window.parent.postMessage({ type: "docLoaded" });
|
window.parent.postMessage({ type: "docLoaded" });
|
||||||
})
|
})
|
||||||
|
@ -9,7 +9,7 @@
|
||||||
<style>{{{css}}}</style>
|
<style>{{{css}}}</style>
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<script>
|
<script nonce="{{ nonce }}">
|
||||||
window["##BUDIBASE_APP_ID##"] = "{{appId}}"
|
window["##BUDIBASE_APP_ID##"] = "{{appId}}"
|
||||||
window["##BUDIBASE_APP_EMBEDDED##"] = "{{embedded}}"
|
window["##BUDIBASE_APP_EMBEDDED##"] = "{{embedded}}"
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -31,7 +31,7 @@
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
<script src='{{ clientLibPath }}'></script>
|
<script src='{{ clientLibPath }}'></script>
|
||||||
<script>
|
<script nonce="{{ nonce }}">
|
||||||
function receiveMessage(event) {
|
function receiveMessage(event) {
|
||||||
if (!event.data) {
|
if (!event.data) {
|
||||||
return
|
return
|
||||||
|
|
|
@ -15,12 +15,11 @@ import { getViews, saveView } from "../view/utils"
|
||||||
import viewTemplate from "../view/viewBuilder"
|
import viewTemplate from "../view/viewBuilder"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import { context, events, features, HTTPError } from "@budibase/backend-core"
|
import { context, events, HTTPError } from "@budibase/backend-core"
|
||||||
import {
|
import {
|
||||||
AutoFieldSubType,
|
AutoFieldSubType,
|
||||||
Database,
|
Database,
|
||||||
Datasource,
|
Datasource,
|
||||||
FeatureFlag,
|
|
||||||
FieldSchema,
|
FieldSchema,
|
||||||
FieldType,
|
FieldType,
|
||||||
NumberFieldMetadata,
|
NumberFieldMetadata,
|
||||||
|
@ -336,9 +335,8 @@ class TableSaveFunctions {
|
||||||
importRows: this.importRows,
|
importRows: this.importRows,
|
||||||
userId: this.userId,
|
userId: this.userId,
|
||||||
})
|
})
|
||||||
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
|
|
||||||
await sdk.tables.sqs.addTable(table)
|
await sdk.tables.sqs.addTable(table)
|
||||||
}
|
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -530,9 +528,8 @@ export async function internalTableCleanup(table: Table, rows?: Row[]) {
|
||||||
if (rows) {
|
if (rows) {
|
||||||
await AttachmentCleanup.tableDelete(table, rows)
|
await AttachmentCleanup.tableDelete(table, rows)
|
||||||
}
|
}
|
||||||
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
|
|
||||||
await sdk.tables.sqs.removeTable(table)
|
await sdk.tables.sqs.removeTable(table)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const _TableSaveFunctions = TableSaveFunctions
|
const _TableSaveFunctions = TableSaveFunctions
|
||||||
|
|
|
@ -16,7 +16,7 @@ jest.mock("../../../utilities/redis", () => ({
|
||||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import { AppStatus } from "../../../db/utils"
|
import { AppStatus } from "../../../db/utils"
|
||||||
import { events, utils, context, features } from "@budibase/backend-core"
|
import { events, utils, context } from "@budibase/backend-core"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { type App, BuiltinPermissionID } from "@budibase/types"
|
import { type App, BuiltinPermissionID } from "@budibase/types"
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
|
@ -355,21 +355,6 @@ describe("/applications", () => {
|
||||||
expect(events.app.deleted).toHaveBeenCalledTimes(1)
|
expect(events.app.deleted).toHaveBeenCalledTimes(1)
|
||||||
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
|
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to delete an app after SQS has been set but app hasn't been migrated", async () => {
|
|
||||||
const prodAppId = app.appId.replace("_dev", "")
|
|
||||||
nock("http://localhost:10000")
|
|
||||||
.delete(`/api/global/roles/${prodAppId}`)
|
|
||||||
.reply(200, {})
|
|
||||||
|
|
||||||
await features.testutils.withFeatureFlags(
|
|
||||||
"*",
|
|
||||||
{ SQS: true },
|
|
||||||
async () => {
|
|
||||||
await config.api.application.delete(app.appId)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("POST /api/applications/:appId/duplicate", () => {
|
describe("POST /api/applications/:appId/duplicate", () => {
|
||||||
|
|
|
@ -19,8 +19,7 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
datasourceDescribe,
|
||||||
knexClient,
|
|
||||||
} from "../../../integrations/tests/utils"
|
} from "../../../integrations/tests/utils"
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
|
@ -69,7 +68,7 @@ describe("/datasources", () => {
|
||||||
{
|
{
|
||||||
status: 500,
|
status: 500,
|
||||||
body: {
|
body: {
|
||||||
message: "No datasource implementation found.",
|
message: 'No datasource implementation found called: "invalid"',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -163,21 +162,23 @@ describe("/datasources", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe.each([
|
datasourceDescribe(
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
{ name: "%s", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] },
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
({ config, dsProvider }) => {
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
let datasource: Datasource
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
|
||||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
|
||||||
])("%s", (_, dsProvider) => {
|
|
||||||
let rawDatasource: Datasource
|
let rawDatasource: Datasource
|
||||||
let client: Knex
|
let client: Knex
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
rawDatasource = await dsProvider
|
const ds = await dsProvider()
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
rawDatasource = ds.rawDatasource!
|
||||||
client = await knexClient(rawDatasource)
|
datasource = ds.datasource!
|
||||||
|
client = ds.client!
|
||||||
|
|
||||||
|
jest.clearAllMocks()
|
||||||
|
nock.cleanAll()
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("get", () => {
|
describe("get", () => {
|
||||||
|
@ -491,5 +492,5 @@ describe("/datasources", () => {
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
})
|
)
|
||||||
|
|
|
@ -3,38 +3,26 @@ import {
|
||||||
Operation,
|
Operation,
|
||||||
Query,
|
Query,
|
||||||
QueryPreview,
|
QueryPreview,
|
||||||
SourceName,
|
|
||||||
TableSourceType,
|
TableSourceType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as setup from "../utilities"
|
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
datasourceDescribe,
|
||||||
knexClient,
|
|
||||||
} from "../../../../integrations/tests/utils"
|
} from "../../../../integrations/tests/utils"
|
||||||
import { Expectations } from "src/tests/utilities/api/base"
|
import { Expectations } from "src/tests/utilities/api/base"
|
||||||
import { events } from "@budibase/backend-core"
|
import { events } from "@budibase/backend-core"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
describe.each(
|
datasourceDescribe(
|
||||||
[
|
{ name: "queries (%s)", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] },
|
||||||
DatabaseName.POSTGRES,
|
({ config, dsProvider, isOracle, isMSSQL, isPostgres }) => {
|
||||||
DatabaseName.MYSQL,
|
|
||||||
DatabaseName.SQL_SERVER,
|
|
||||||
DatabaseName.MARIADB,
|
|
||||||
DatabaseName.ORACLE,
|
|
||||||
].map(name => [name, getDatasource(name)])
|
|
||||||
)("queries (%s)", (dbName, dsProvider) => {
|
|
||||||
const config = setup.getConfig()
|
|
||||||
const isOracle = dbName === DatabaseName.ORACLE
|
|
||||||
const isMsSQL = dbName === DatabaseName.SQL_SERVER
|
|
||||||
const isPostgres = dbName === DatabaseName.POSTGRES
|
|
||||||
const mainTableName = "test_table"
|
|
||||||
|
|
||||||
let rawDatasource: Datasource
|
let rawDatasource: Datasource
|
||||||
let datasource: Datasource
|
let datasource: Datasource
|
||||||
let client: Knex
|
let client: Knex
|
||||||
|
|
||||||
|
let tableName: string
|
||||||
|
|
||||||
async function createQuery(
|
async function createQuery(
|
||||||
query: Partial<Query>,
|
query: Partial<Query>,
|
||||||
expectations?: Expectations
|
expectations?: Expectations
|
||||||
|
@ -59,28 +47,28 @@ describe.each(
|
||||||
}
|
}
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
const ds = await dsProvider()
|
||||||
|
rawDatasource = ds.rawDatasource!
|
||||||
|
datasource = ds.datasource!
|
||||||
|
client = ds.client!
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
rawDatasource = await dsProvider
|
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
|
||||||
|
|
||||||
// The Datasource API doesn ot return the password, but we need it later to
|
// The Datasource API doesn ot return the password, but we need it later to
|
||||||
// connect to the underlying database, so we fill it back in here.
|
// connect to the underlying database, so we fill it back in here.
|
||||||
datasource.config!.password = rawDatasource.config!.password
|
datasource.config!.password = rawDatasource.config!.password
|
||||||
|
|
||||||
client = await knexClient(rawDatasource)
|
tableName = generator.guid()
|
||||||
|
|
||||||
await client.schema.dropTableIfExists(mainTableName)
|
await client.schema.dropTableIfExists(tableName)
|
||||||
await client.schema.createTable(mainTableName, table => {
|
await client.schema.createTable(tableName, table => {
|
||||||
table.increments("id").primary()
|
table.increments("id").primary()
|
||||||
table.string("name")
|
table.string("name")
|
||||||
table.timestamp("birthday")
|
table.timestamp("birthday")
|
||||||
table.integer("number")
|
table.integer("number")
|
||||||
})
|
})
|
||||||
|
|
||||||
await client(mainTableName).insert([
|
await client(tableName).insert([
|
||||||
{ name: "one" },
|
{ name: "one" },
|
||||||
{ name: "two" },
|
{ name: "two" },
|
||||||
{ name: "three" },
|
{ name: "three" },
|
||||||
|
@ -91,22 +79,13 @@ describe.each(
|
||||||
jest.clearAllMocks()
|
jest.clearAllMocks()
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(async () => {
|
|
||||||
const ds = await config.api.datasource.get(datasource._id!)
|
|
||||||
await config.api.datasource.delete(ds)
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(async () => {
|
|
||||||
setup.afterAll()
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("query admin", () => {
|
describe("query admin", () => {
|
||||||
describe("create", () => {
|
describe("create", () => {
|
||||||
it("should be able to create a query", async () => {
|
it("should be able to create a query", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
name: "New Query",
|
name: "New Query",
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("*").toString(),
|
sql: client(tableName).select("*").toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -115,7 +94,7 @@ describe.each(
|
||||||
name: "New Query",
|
name: "New Query",
|
||||||
parameters: [],
|
parameters: [],
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("*").toString(),
|
sql: client(tableName).select("*").toString(),
|
||||||
},
|
},
|
||||||
schema: {},
|
schema: {},
|
||||||
queryVerb: "read",
|
queryVerb: "read",
|
||||||
|
@ -134,7 +113,7 @@ describe.each(
|
||||||
it("should be able to update a query", async () => {
|
it("should be able to update a query", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("*").toString(),
|
sql: client(tableName).select("*").toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -144,7 +123,7 @@ describe.each(
|
||||||
...query,
|
...query,
|
||||||
name: "Updated Query",
|
name: "Updated Query",
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).where({ id: 1 }).toString(),
|
sql: client(tableName).where({ id: 1 }).toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -153,7 +132,7 @@ describe.each(
|
||||||
name: "Updated Query",
|
name: "Updated Query",
|
||||||
parameters: [],
|
parameters: [],
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).where({ id: 1 }).toString(),
|
sql: client(tableName).where({ id: 1 }).toString(),
|
||||||
},
|
},
|
||||||
schema: {},
|
schema: {},
|
||||||
queryVerb: "read",
|
queryVerb: "read",
|
||||||
|
@ -170,7 +149,7 @@ describe.each(
|
||||||
it("should be able to delete a query", async () => {
|
it("should be able to delete a query", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("*").toString(),
|
sql: client(tableName).select("*").toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -189,7 +168,7 @@ describe.each(
|
||||||
it("should be able to list queries", async () => {
|
it("should be able to list queries", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("*").toString(),
|
sql: client(tableName).select("*").toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -200,7 +179,7 @@ describe.each(
|
||||||
it("should strip sensitive fields for prod apps", async () => {
|
it("should strip sensitive fields for prod apps", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("*").toString(),
|
sql: client(tableName).select("*").toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -218,7 +197,7 @@ describe.each(
|
||||||
const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)`
|
const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)`
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName)
|
sql: client(tableName)
|
||||||
.select([
|
.select([
|
||||||
"*",
|
"*",
|
||||||
client.raw(
|
client.raw(
|
||||||
|
@ -246,7 +225,7 @@ describe.each(
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
queryVerb: "read",
|
queryVerb: "read",
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).where({ id: 1 }).toString(),
|
sql: client(tableName).where({ id: 1 }).toString(),
|
||||||
},
|
},
|
||||||
parameters: [],
|
parameters: [],
|
||||||
transformer: "return data",
|
transformer: "return data",
|
||||||
|
@ -349,10 +328,10 @@ describe.each(
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should work with static variables", async () => {
|
it("should work with static variables", async () => {
|
||||||
await config.api.datasource.update({
|
const datasource = await config.api.datasource.create({
|
||||||
...datasource,
|
...rawDatasource,
|
||||||
config: {
|
config: {
|
||||||
...datasource.config,
|
...rawDatasource.config,
|
||||||
staticVariables: {
|
staticVariables: {
|
||||||
foo: "bar",
|
foo: "bar",
|
||||||
},
|
},
|
||||||
|
@ -390,9 +369,12 @@ describe.each(
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should work with dynamic variables", async () => {
|
it("should work with dynamic variables", async () => {
|
||||||
|
const datasource = await config.api.datasource.create(rawDatasource)
|
||||||
|
|
||||||
const basedOnQuery = await createQuery({
|
const basedOnQuery = await createQuery({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("name").where({ id: 1 }).toString(),
|
sql: client(tableName).select("name").where({ id: 1 }).toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -439,9 +421,12 @@ describe.each(
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle the dynamic base query being deleted", async () => {
|
it("should handle the dynamic base query being deleted", async () => {
|
||||||
|
const datasource = await config.api.datasource.create(rawDatasource)
|
||||||
|
|
||||||
const basedOnQuery = await createQuery({
|
const basedOnQuery = await createQuery({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("name").where({ id: 1 }).toString(),
|
sql: client(tableName).select("name").where({ id: 1 }).toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -482,11 +467,7 @@ describe.each(
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(preview.rows).toEqual([
|
expect(preview.rows).toEqual([{ [key]: isMSSQL ? "" : null }])
|
||||||
{
|
|
||||||
[key]: datasource.source === SourceName.SQL_SERVER ? "" : null,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -495,7 +476,7 @@ describe.each(
|
||||||
it("should be able to insert with bindings", async () => {
|
it("should be able to insert with bindings", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).insert({ name: "{{ foo }}" }).toString(),
|
sql: client(tableName).insert({ name: "{{ foo }}" }).toString(),
|
||||||
},
|
},
|
||||||
parameters: [
|
parameters: [
|
||||||
{
|
{
|
||||||
|
@ -518,7 +499,7 @@ describe.each(
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
|
|
||||||
const rows = await client(mainTableName).where({ name: "baz" }).select()
|
const rows = await client(tableName).where({ name: "baz" }).select()
|
||||||
expect(rows).toHaveLength(1)
|
expect(rows).toHaveLength(1)
|
||||||
for (const row of rows) {
|
for (const row of rows) {
|
||||||
expect(row).toMatchObject({ name: "baz" })
|
expect(row).toMatchObject({ name: "baz" })
|
||||||
|
@ -528,7 +509,7 @@ describe.each(
|
||||||
it("should not allow handlebars as parameters", async () => {
|
it("should not allow handlebars as parameters", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).insert({ name: "{{ foo }}" }).toString(),
|
sql: client(tableName).insert({ name: "{{ foo }}" }).toString(),
|
||||||
},
|
},
|
||||||
parameters: [
|
parameters: [
|
||||||
{
|
{
|
||||||
|
@ -564,7 +545,7 @@ describe.each(
|
||||||
const date = new Date(datetimeStr)
|
const date = new Date(datetimeStr)
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName)
|
sql: client(tableName)
|
||||||
.insert({
|
.insert({
|
||||||
name: "foo",
|
name: "foo",
|
||||||
birthday: client.raw("{{ birthday }}"),
|
birthday: client.raw("{{ birthday }}"),
|
||||||
|
@ -586,7 +567,7 @@ describe.each(
|
||||||
|
|
||||||
expect(result.data).toEqual([{ created: true }])
|
expect(result.data).toEqual([{ created: true }])
|
||||||
|
|
||||||
const rows = await client(mainTableName)
|
const rows = await client(tableName)
|
||||||
.where({ birthday: datetimeStr })
|
.where({ birthday: datetimeStr })
|
||||||
.select()
|
.select()
|
||||||
expect(rows).toHaveLength(1)
|
expect(rows).toHaveLength(1)
|
||||||
|
@ -602,7 +583,7 @@ describe.each(
|
||||||
async notDateStr => {
|
async notDateStr => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName)
|
sql: client(tableName)
|
||||||
.insert({ name: client.raw("{{ name }}") })
|
.insert({ name: client.raw("{{ name }}") })
|
||||||
.toString(),
|
.toString(),
|
||||||
},
|
},
|
||||||
|
@ -623,7 +604,7 @@ describe.each(
|
||||||
|
|
||||||
expect(result.data).toEqual([{ created: true }])
|
expect(result.data).toEqual([{ created: true }])
|
||||||
|
|
||||||
const rows = await client(mainTableName)
|
const rows = await client(tableName)
|
||||||
.where({ name: notDateStr })
|
.where({ name: notDateStr })
|
||||||
.select()
|
.select()
|
||||||
expect(rows).toHaveLength(1)
|
expect(rows).toHaveLength(1)
|
||||||
|
@ -635,7 +616,7 @@ describe.each(
|
||||||
it("should execute a query", async () => {
|
it("should execute a query", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("*").orderBy("id").toString(),
|
sql: client(tableName).select("*").orderBy("id").toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -678,7 +659,7 @@ describe.each(
|
||||||
it("should be able to transform a query", async () => {
|
it("should be able to transform a query", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).where({ id: 1 }).select("*").toString(),
|
sql: client(tableName).where({ id: 1 }).select("*").toString(),
|
||||||
},
|
},
|
||||||
transformer: `
|
transformer: `
|
||||||
data[0].id = data[0].id + 1;
|
data[0].id = data[0].id + 1;
|
||||||
|
@ -701,7 +682,7 @@ describe.each(
|
||||||
it("should coerce numeric bindings", async () => {
|
it("should coerce numeric bindings", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName)
|
sql: client(tableName)
|
||||||
.where({ id: client.raw("{{ id }}") })
|
.where({ id: client.raw("{{ id }}") })
|
||||||
.select("*")
|
.select("*")
|
||||||
.toString(),
|
.toString(),
|
||||||
|
@ -735,7 +716,7 @@ describe.each(
|
||||||
it("should be able to update rows", async () => {
|
it("should be able to update rows", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName)
|
sql: client(tableName)
|
||||||
.update({ name: client.raw("{{ name }}") })
|
.update({ name: client.raw("{{ name }}") })
|
||||||
.where({ id: client.raw("{{ id }}") })
|
.where({ id: client.raw("{{ id }}") })
|
||||||
.toString(),
|
.toString(),
|
||||||
|
@ -760,7 +741,7 @@ describe.each(
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const rows = await client(mainTableName).where({ id: 1 }).select()
|
const rows = await client(tableName).where({ id: 1 }).select()
|
||||||
expect(rows).toEqual([
|
expect(rows).toEqual([
|
||||||
{ id: 1, name: "foo", birthday: null, number: null },
|
{ id: 1, name: "foo", birthday: null, number: null },
|
||||||
])
|
])
|
||||||
|
@ -769,7 +750,7 @@ describe.each(
|
||||||
it("should be able to execute an update that updates no rows", async () => {
|
it("should be able to execute an update that updates no rows", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName)
|
sql: client(tableName)
|
||||||
.update({ name: "updated" })
|
.update({ name: "updated" })
|
||||||
.where({ id: 100 })
|
.where({ id: 100 })
|
||||||
.toString(),
|
.toString(),
|
||||||
|
@ -779,7 +760,7 @@ describe.each(
|
||||||
|
|
||||||
await config.api.query.execute(query._id!)
|
await config.api.query.execute(query._id!)
|
||||||
|
|
||||||
const rows = await client(mainTableName).select()
|
const rows = await client(tableName).select()
|
||||||
for (const row of rows) {
|
for (const row of rows) {
|
||||||
expect(row.name).not.toEqual("updated")
|
expect(row.name).not.toEqual("updated")
|
||||||
}
|
}
|
||||||
|
@ -788,14 +769,14 @@ describe.each(
|
||||||
it("should be able to execute a delete that deletes no rows", async () => {
|
it("should be able to execute a delete that deletes no rows", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).where({ id: 100 }).delete().toString(),
|
sql: client(tableName).where({ id: 100 }).delete().toString(),
|
||||||
},
|
},
|
||||||
queryVerb: "delete",
|
queryVerb: "delete",
|
||||||
})
|
})
|
||||||
|
|
||||||
await config.api.query.execute(query._id!)
|
await config.api.query.execute(query._id!)
|
||||||
|
|
||||||
const rows = await client(mainTableName).select()
|
const rows = await client(tableName).select()
|
||||||
expect(rows).toHaveLength(5)
|
expect(rows).toHaveLength(5)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -804,7 +785,7 @@ describe.each(
|
||||||
it("should be able to delete rows", async () => {
|
it("should be able to delete rows", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName)
|
sql: client(tableName)
|
||||||
.where({ id: client.raw("{{ id }}") })
|
.where({ id: client.raw("{{ id }}") })
|
||||||
.delete()
|
.delete()
|
||||||
.toString(),
|
.toString(),
|
||||||
|
@ -824,7 +805,7 @@ describe.each(
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const rows = await client(mainTableName).where({ id: 1 }).select()
|
const rows = await client(tableName).where({ id: 1 }).select()
|
||||||
expect(rows).toHaveLength(0)
|
expect(rows).toHaveLength(0)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -832,7 +813,9 @@ describe.each(
|
||||||
|
|
||||||
describe("query through datasource", () => {
|
describe("query through datasource", () => {
|
||||||
it("should be able to query the datasource", async () => {
|
it("should be able to query the datasource", async () => {
|
||||||
const entityId = mainTableName
|
const datasource = await config.api.datasource.create(rawDatasource)
|
||||||
|
|
||||||
|
const entityId = tableName
|
||||||
await config.api.datasource.update({
|
await config.api.datasource.update({
|
||||||
...datasource,
|
...datasource,
|
||||||
entities: {
|
entities: {
|
||||||
|
@ -846,6 +829,7 @@ describe.each(
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const res = await config.api.datasource.query({
|
const res = await config.api.datasource.query({
|
||||||
endpoint: {
|
endpoint: {
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
|
@ -877,7 +861,7 @@ describe.each(
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
queryParams = {
|
queryParams = {
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName)
|
sql: client(tableName)
|
||||||
.insert({
|
.insert({
|
||||||
name: client.raw("{{ bindingName }}"),
|
name: client.raw("{{ bindingName }}"),
|
||||||
number: client.raw("{{ bindingNumber }}"),
|
number: client.raw("{{ bindingNumber }}"),
|
||||||
|
@ -911,7 +895,7 @@ describe.each(
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
error = err.message
|
error = err.message
|
||||||
}
|
}
|
||||||
if (isMsSQL || isOracle) {
|
if (isMSSQL || isOracle) {
|
||||||
expect(error).toBeUndefined()
|
expect(error).toBeUndefined()
|
||||||
} else {
|
} else {
|
||||||
expect(error).toBeDefined()
|
expect(error).toBeDefined()
|
||||||
|
@ -934,11 +918,11 @@ describe.each(
|
||||||
describe("edge cases", () => {
|
describe("edge cases", () => {
|
||||||
it("should find rows with a binding containing a slash", async () => {
|
it("should find rows with a binding containing a slash", async () => {
|
||||||
const slashValue = "1/10"
|
const slashValue = "1/10"
|
||||||
await client(mainTableName).insert([{ name: slashValue }])
|
await client(tableName).insert([{ name: slashValue }])
|
||||||
|
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName)
|
sql: client(tableName)
|
||||||
.select("*")
|
.select("*")
|
||||||
.where("name", "=", client.raw("{{ bindingName }}"))
|
.where("name", "=", client.raw("{{ bindingName }}"))
|
||||||
.toString(),
|
.toString(),
|
||||||
|
@ -960,4 +944,5 @@ describe.each(
|
||||||
expect(results.data.length).toEqual(1)
|
expect(results.data.length).toEqual(1)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
import { Datasource, Query } from "@budibase/types"
|
import { Datasource, Query } from "@budibase/types"
|
||||||
import * as setup from "../utilities"
|
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
datasourceDescribe,
|
||||||
} from "../../../../integrations/tests/utils"
|
} from "../../../../integrations/tests/utils"
|
||||||
import { MongoClient, type Collection, BSON, Db } from "mongodb"
|
import { MongoClient, type Collection, BSON, Db } from "mongodb"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
@ -10,9 +9,10 @@ import { generator } from "@budibase/backend-core/tests"
|
||||||
const expectValidId = expect.stringMatching(/^\w{24}$/)
|
const expectValidId = expect.stringMatching(/^\w{24}$/)
|
||||||
const expectValidBsonObjectId = expect.any(BSON.ObjectId)
|
const expectValidBsonObjectId = expect.any(BSON.ObjectId)
|
||||||
|
|
||||||
describe("/queries", () => {
|
datasourceDescribe(
|
||||||
|
{ name: "/queries", only: [DatabaseName.MONGODB] },
|
||||||
|
({ config, dsProvider }) => {
|
||||||
let collection: string
|
let collection: string
|
||||||
let config = setup.getConfig()
|
|
||||||
let datasource: Datasource
|
let datasource: Datasource
|
||||||
|
|
||||||
async function createQuery(query: Partial<Query>): Promise<Query> {
|
async function createQuery(query: Partial<Query>): Promise<Query> {
|
||||||
|
@ -63,15 +63,9 @@ describe("/queries", () => {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
afterAll(async () => {
|
|
||||||
setup.afterAll()
|
|
||||||
})
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
const ds = await dsProvider()
|
||||||
datasource = await config.api.datasource.create(
|
datasource = ds.datasource!
|
||||||
await getDatasource(DatabaseName.MONGODB)
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
|
@ -319,7 +313,9 @@ describe("/queries", () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
json: {
|
json: {
|
||||||
filter: { _id: { $eq: `ObjectId("${insertResult.insertedId}")` } },
|
filter: {
|
||||||
|
_id: { $eq: `ObjectId("${insertResult.insertedId}")` },
|
||||||
|
},
|
||||||
update: { $set: { name: "newName" } },
|
update: { $set: { name: "newName" } },
|
||||||
},
|
},
|
||||||
extra: {
|
extra: {
|
||||||
|
@ -717,4 +713,5 @@ describe("/queries", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -2,34 +2,24 @@ import * as setup from "./utilities"
|
||||||
|
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
datasourceDescribe,
|
||||||
knexClient,
|
|
||||||
} from "../../../integrations/tests/utils"
|
} from "../../../integrations/tests/utils"
|
||||||
|
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import emitter from "../../../../src/events"
|
import emitter from "../../../../src/events"
|
||||||
import { outputProcessing } from "../../../utilities/rowProcessor"
|
import { outputProcessing } from "../../../utilities/rowProcessor"
|
||||||
import {
|
import { context, InternalTable, tenancy, utils } from "@budibase/backend-core"
|
||||||
context,
|
|
||||||
InternalTable,
|
|
||||||
tenancy,
|
|
||||||
features,
|
|
||||||
utils,
|
|
||||||
} from "@budibase/backend-core"
|
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import {
|
import {
|
||||||
AIOperationEnum,
|
AIOperationEnum,
|
||||||
AttachmentFieldMetadata,
|
|
||||||
AutoFieldSubType,
|
AutoFieldSubType,
|
||||||
Datasource,
|
Datasource,
|
||||||
DateFieldMetadata,
|
|
||||||
DeleteRow,
|
DeleteRow,
|
||||||
FieldSchema,
|
FieldSchema,
|
||||||
FieldType,
|
FieldType,
|
||||||
BBReferenceFieldSubType,
|
BBReferenceFieldSubType,
|
||||||
FormulaType,
|
FormulaType,
|
||||||
INTERNAL_TABLE_SOURCE_ID,
|
INTERNAL_TABLE_SOURCE_ID,
|
||||||
NumberFieldMetadata,
|
|
||||||
QuotaUsageType,
|
QuotaUsageType,
|
||||||
RelationshipType,
|
RelationshipType,
|
||||||
Row,
|
Row,
|
||||||
|
@ -89,49 +79,21 @@ async function waitForEvent(
|
||||||
return await p
|
return await p
|
||||||
}
|
}
|
||||||
|
|
||||||
describe.each([
|
datasourceDescribe(
|
||||||
["lucene", undefined],
|
{ name: "/rows (%s)", exclude: [DatabaseName.MONGODB] },
|
||||||
["sqs", undefined],
|
({ config, dsProvider, isInternal, isMSSQL, isOracle }) => {
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
|
||||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
|
||||||
])("/rows (%s)", (providerType, dsProvider) => {
|
|
||||||
const isInternal = dsProvider === undefined
|
|
||||||
const isLucene = providerType === "lucene"
|
|
||||||
const isSqs = providerType === "sqs"
|
|
||||||
const isMSSQL = providerType === DatabaseName.SQL_SERVER
|
|
||||||
const isOracle = providerType === DatabaseName.ORACLE
|
|
||||||
const config = setup.getConfig()
|
|
||||||
|
|
||||||
let table: Table
|
let table: Table
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
let client: Knex | undefined
|
let client: Knex | undefined
|
||||||
let envCleanup: (() => void) | undefined
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await features.testutils.withFeatureFlags("*", { SQS: true }, () =>
|
const ds = await dsProvider()
|
||||||
config.init()
|
datasource = ds.datasource
|
||||||
)
|
client = ds.client
|
||||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
|
||||||
SQS: isSqs,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (dsProvider) {
|
|
||||||
const rawDatasource = await dsProvider
|
|
||||||
datasource = await config.createDatasource({
|
|
||||||
datasource: rawDatasource,
|
|
||||||
})
|
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
setup.afterAll()
|
setup.afterAll()
|
||||||
if (envCleanup) {
|
|
||||||
envCleanup()
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
function saveTableRequest(
|
function saveTableRequest(
|
||||||
|
@ -207,7 +169,10 @@ describe.each([
|
||||||
|
|
||||||
const getRowUsage = async () => {
|
const getRowUsage = async () => {
|
||||||
const { total } = await config.doInContext(undefined, () =>
|
const { total } = await config.doInContext(undefined, () =>
|
||||||
quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
|
quotas.getCurrentUsageValues(
|
||||||
|
QuotaUsageType.STATIC,
|
||||||
|
StaticQuotaName.ROWS
|
||||||
|
)
|
||||||
)
|
)
|
||||||
return total
|
return total
|
||||||
}
|
}
|
||||||
|
@ -381,185 +346,6 @@ describe.each([
|
||||||
expect(ids).toEqual(expect.arrayContaining(sequence))
|
expect(ids).toEqual(expect.arrayContaining(sequence))
|
||||||
})
|
})
|
||||||
|
|
||||||
isLucene &&
|
|
||||||
it("row values are coerced", async () => {
|
|
||||||
const str: FieldSchema = {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
name: "str",
|
|
||||||
constraints: { type: "string", presence: false },
|
|
||||||
}
|
|
||||||
const singleAttachment: FieldSchema = {
|
|
||||||
type: FieldType.ATTACHMENT_SINGLE,
|
|
||||||
name: "single attachment",
|
|
||||||
constraints: { presence: false },
|
|
||||||
}
|
|
||||||
const attachmentList: AttachmentFieldMetadata = {
|
|
||||||
type: FieldType.ATTACHMENTS,
|
|
||||||
name: "attachments",
|
|
||||||
constraints: { type: "array", presence: false },
|
|
||||||
}
|
|
||||||
const signature: FieldSchema = {
|
|
||||||
type: FieldType.SIGNATURE_SINGLE,
|
|
||||||
name: "signature",
|
|
||||||
constraints: { presence: false },
|
|
||||||
}
|
|
||||||
const bool: FieldSchema = {
|
|
||||||
type: FieldType.BOOLEAN,
|
|
||||||
name: "boolean",
|
|
||||||
constraints: { type: "boolean", presence: false },
|
|
||||||
}
|
|
||||||
const number: NumberFieldMetadata = {
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
name: "str",
|
|
||||||
constraints: { type: "number", presence: false },
|
|
||||||
}
|
|
||||||
const datetime: DateFieldMetadata = {
|
|
||||||
type: FieldType.DATETIME,
|
|
||||||
name: "datetime",
|
|
||||||
constraints: {
|
|
||||||
type: "string",
|
|
||||||
presence: false,
|
|
||||||
datetime: { earliest: "", latest: "" },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
const arrayField: FieldSchema = {
|
|
||||||
type: FieldType.ARRAY,
|
|
||||||
constraints: {
|
|
||||||
type: JsonFieldSubType.ARRAY,
|
|
||||||
presence: false,
|
|
||||||
inclusion: ["One", "Two", "Three"],
|
|
||||||
},
|
|
||||||
name: "Sample Tags",
|
|
||||||
sortable: false,
|
|
||||||
}
|
|
||||||
const optsField: FieldSchema = {
|
|
||||||
name: "Sample Opts",
|
|
||||||
type: FieldType.OPTIONS,
|
|
||||||
constraints: {
|
|
||||||
type: "string",
|
|
||||||
presence: false,
|
|
||||||
inclusion: ["Alpha", "Beta", "Gamma"],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
const table = await config.api.table.save(
|
|
||||||
saveTableRequest({
|
|
||||||
schema: {
|
|
||||||
name: str,
|
|
||||||
stringUndefined: str,
|
|
||||||
stringNull: str,
|
|
||||||
stringString: str,
|
|
||||||
numberEmptyString: number,
|
|
||||||
numberNull: number,
|
|
||||||
numberUndefined: number,
|
|
||||||
numberString: number,
|
|
||||||
numberNumber: number,
|
|
||||||
datetimeEmptyString: datetime,
|
|
||||||
datetimeNull: datetime,
|
|
||||||
datetimeUndefined: datetime,
|
|
||||||
datetimeString: datetime,
|
|
||||||
datetimeDate: datetime,
|
|
||||||
boolNull: bool,
|
|
||||||
boolEmpty: bool,
|
|
||||||
boolUndefined: bool,
|
|
||||||
boolString: bool,
|
|
||||||
boolBool: bool,
|
|
||||||
singleAttachmentNull: singleAttachment,
|
|
||||||
singleAttachmentUndefined: singleAttachment,
|
|
||||||
attachmentListNull: attachmentList,
|
|
||||||
attachmentListUndefined: attachmentList,
|
|
||||||
attachmentListEmpty: attachmentList,
|
|
||||||
attachmentListEmptyArrayStr: attachmentList,
|
|
||||||
signatureNull: signature,
|
|
||||||
signatureUndefined: signature,
|
|
||||||
arrayFieldEmptyArrayStr: arrayField,
|
|
||||||
arrayFieldArrayStrKnown: arrayField,
|
|
||||||
arrayFieldNull: arrayField,
|
|
||||||
arrayFieldUndefined: arrayField,
|
|
||||||
optsFieldEmptyStr: optsField,
|
|
||||||
optsFieldUndefined: optsField,
|
|
||||||
optsFieldNull: optsField,
|
|
||||||
optsFieldStrKnown: optsField,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
const datetimeStr = "1984-04-20T00:00:00.000Z"
|
|
||||||
|
|
||||||
const row = await config.api.row.save(table._id!, {
|
|
||||||
name: "Test Row",
|
|
||||||
stringUndefined: undefined,
|
|
||||||
stringNull: null,
|
|
||||||
stringString: "i am a string",
|
|
||||||
numberEmptyString: "",
|
|
||||||
numberNull: null,
|
|
||||||
numberUndefined: undefined,
|
|
||||||
numberString: "123",
|
|
||||||
numberNumber: 123,
|
|
||||||
datetimeEmptyString: "",
|
|
||||||
datetimeNull: null,
|
|
||||||
datetimeUndefined: undefined,
|
|
||||||
datetimeString: datetimeStr,
|
|
||||||
datetimeDate: new Date(datetimeStr),
|
|
||||||
boolNull: null,
|
|
||||||
boolEmpty: "",
|
|
||||||
boolUndefined: undefined,
|
|
||||||
boolString: "true",
|
|
||||||
boolBool: true,
|
|
||||||
tableId: table._id,
|
|
||||||
singleAttachmentNull: null,
|
|
||||||
singleAttachmentUndefined: undefined,
|
|
||||||
attachmentListNull: null,
|
|
||||||
attachmentListUndefined: undefined,
|
|
||||||
attachmentListEmpty: "",
|
|
||||||
attachmentListEmptyArrayStr: "[]",
|
|
||||||
signatureNull: null,
|
|
||||||
signatureUndefined: undefined,
|
|
||||||
arrayFieldEmptyArrayStr: "[]",
|
|
||||||
arrayFieldUndefined: undefined,
|
|
||||||
arrayFieldNull: null,
|
|
||||||
arrayFieldArrayStrKnown: "['One']",
|
|
||||||
optsFieldEmptyStr: "",
|
|
||||||
optsFieldUndefined: undefined,
|
|
||||||
optsFieldNull: null,
|
|
||||||
optsFieldStrKnown: "Alpha",
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(row.stringUndefined).toBe(undefined)
|
|
||||||
expect(row.stringNull).toBe(null)
|
|
||||||
expect(row.stringString).toBe("i am a string")
|
|
||||||
expect(row.numberEmptyString).toBe(null)
|
|
||||||
expect(row.numberNull).toBe(null)
|
|
||||||
expect(row.numberUndefined).toBe(undefined)
|
|
||||||
expect(row.numberString).toBe(123)
|
|
||||||
expect(row.numberNumber).toBe(123)
|
|
||||||
expect(row.datetimeEmptyString).toBe(null)
|
|
||||||
expect(row.datetimeNull).toBe(null)
|
|
||||||
expect(row.datetimeUndefined).toBe(undefined)
|
|
||||||
expect(row.datetimeString).toBe(new Date(datetimeStr).toISOString())
|
|
||||||
expect(row.datetimeDate).toBe(new Date(datetimeStr).toISOString())
|
|
||||||
expect(row.boolNull).toBe(null)
|
|
||||||
expect(row.boolEmpty).toBe(null)
|
|
||||||
expect(row.boolUndefined).toBe(undefined)
|
|
||||||
expect(row.boolString).toBe(true)
|
|
||||||
expect(row.boolBool).toBe(true)
|
|
||||||
expect(row.singleAttachmentNull).toEqual(null)
|
|
||||||
expect(row.singleAttachmentUndefined).toBe(undefined)
|
|
||||||
expect(row.attachmentListNull).toEqual([])
|
|
||||||
expect(row.attachmentListUndefined).toBe(undefined)
|
|
||||||
expect(row.attachmentListEmpty).toEqual([])
|
|
||||||
expect(row.attachmentListEmptyArrayStr).toEqual([])
|
|
||||||
expect(row.signatureNull).toEqual(null)
|
|
||||||
expect(row.signatureUndefined).toBe(undefined)
|
|
||||||
expect(row.arrayFieldEmptyArrayStr).toEqual([])
|
|
||||||
expect(row.arrayFieldNull).toEqual([])
|
|
||||||
expect(row.arrayFieldUndefined).toEqual(undefined)
|
|
||||||
expect(row.optsFieldEmptyStr).toEqual(null)
|
|
||||||
expect(row.optsFieldUndefined).toEqual(undefined)
|
|
||||||
expect(row.optsFieldNull).toEqual(null)
|
|
||||||
expect(row.arrayFieldArrayStrKnown).toEqual(["One"])
|
|
||||||
expect(row.optsFieldStrKnown).toEqual("Alpha")
|
|
||||||
})
|
|
||||||
|
|
||||||
isInternal &&
|
isInternal &&
|
||||||
it("doesn't allow creating in user table", async () => {
|
it("doesn't allow creating in user table", async () => {
|
||||||
const response = await config.api.row.save(
|
const response = await config.api.row.save(
|
||||||
|
@ -1023,7 +809,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
describe("relations to same table", () => {
|
describe("relations to same table", () => {
|
||||||
let relatedRows: Row[]
|
let relatedRows: Row[]
|
||||||
|
|
||||||
|
@ -1224,7 +1009,6 @@ describe.each([
|
||||||
expect(rows).toHaveLength(1)
|
expect(rows).toHaveLength(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
describe("relations to same table", () => {
|
describe("relations to same table", () => {
|
||||||
let relatedRows: Row[]
|
let relatedRows: Row[]
|
||||||
|
|
||||||
|
@ -1541,7 +1325,7 @@ describe.each([
|
||||||
!isInternal &&
|
!isInternal &&
|
||||||
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
||||||
// to identity columns. This is not something Budibase does currently.
|
// to identity columns. This is not something Budibase does currently.
|
||||||
providerType !== DatabaseName.SQL_SERVER &&
|
!isMSSQL &&
|
||||||
it("should support updating fields that are part of a composite key", async () => {
|
it("should support updating fields that are part of a composite key", async () => {
|
||||||
const tableRequest = saveTableRequest({
|
const tableRequest = saveTableRequest({
|
||||||
primary: ["number", "string"],
|
primary: ["number", "string"],
|
||||||
|
@ -1572,7 +1356,9 @@ describe.each([
|
||||||
number: naturalValue,
|
number: naturalValue,
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(existing._id).toEqual(`%5B${naturalValue}%2C'${stringValue}'%5D`)
|
expect(existing._id).toEqual(
|
||||||
|
`%5B${naturalValue}%2C'${stringValue}'%5D`
|
||||||
|
)
|
||||||
|
|
||||||
const row = await config.api.row.patch(table._id!, {
|
const row = await config.api.row.patch(table._id!, {
|
||||||
_id: existing._id!,
|
_id: existing._id!,
|
||||||
|
@ -1628,7 +1414,6 @@ describe.each([
|
||||||
expect(res.length).toEqual(2)
|
expect(res.length).toEqual(2)
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
describe("relations to same table", () => {
|
describe("relations to same table", () => {
|
||||||
let relatedRows: Row[]
|
let relatedRows: Row[]
|
||||||
|
|
||||||
|
@ -2433,7 +2218,7 @@ describe.each([
|
||||||
|
|
||||||
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
||||||
// to identity columns. This is not something Budibase does currently.
|
// to identity columns. This is not something Budibase does currently.
|
||||||
providerType !== DatabaseName.SQL_SERVER &&
|
!isMSSQL &&
|
||||||
it("should handle filtering by composite primary keys", async () => {
|
it("should handle filtering by composite primary keys", async () => {
|
||||||
const tableRequest = saveTableRequest({
|
const tableRequest = saveTableRequest({
|
||||||
primary: ["number", "string"],
|
primary: ["number", "string"],
|
||||||
|
@ -2560,10 +2345,16 @@ describe.each([
|
||||||
const stringified = (value: string) =>
|
const stringified = (value: string) =>
|
||||||
JSON.stringify(value).replace(/"/g, "'")
|
JSON.stringify(value).replace(/"/g, "'")
|
||||||
|
|
||||||
const matchingObject = (key: string, value: any, isArray: boolean) => {
|
const matchingObject = (
|
||||||
|
key: string,
|
||||||
|
value: any,
|
||||||
|
isArray: boolean
|
||||||
|
) => {
|
||||||
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
|
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
|
||||||
if (isArray) {
|
if (isArray) {
|
||||||
return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`))
|
return expect.stringMatching(
|
||||||
|
new RegExp(`^\\[${objectMatcher}\\]$`)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
|
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
|
||||||
}
|
}
|
||||||
|
@ -2836,7 +2627,9 @@ describe.each([
|
||||||
name: "foo",
|
name: "foo",
|
||||||
description: "bar",
|
description: "bar",
|
||||||
tableId,
|
tableId,
|
||||||
users: expect.arrayContaining(selectedUsers.map(u => resultMapper(u))),
|
users: expect.arrayContaining(
|
||||||
|
selectedUsers.map(u => resultMapper(u))
|
||||||
|
),
|
||||||
_id: expect.any(String),
|
_id: expect.any(String),
|
||||||
_rev: expect.any(String),
|
_rev: expect.any(String),
|
||||||
id: isInternal ? undefined : expect.any(Number),
|
id: isInternal ? undefined : expect.any(Number),
|
||||||
|
@ -2881,7 +2674,9 @@ describe.each([
|
||||||
description: "bar",
|
description: "bar",
|
||||||
tableId,
|
tableId,
|
||||||
user: expect.arrayContaining([user1].map(u => resultMapper(u))),
|
user: expect.arrayContaining([user1].map(u => resultMapper(u))),
|
||||||
users: expect.arrayContaining([user2, user3].map(u => resultMapper(u))),
|
users: expect.arrayContaining(
|
||||||
|
[user2, user3].map(u => resultMapper(u))
|
||||||
|
),
|
||||||
_id: row._id,
|
_id: row._id,
|
||||||
_rev: expect.any(String),
|
_rev: expect.any(String),
|
||||||
id: isInternal ? undefined : expect.any(Number),
|
id: isInternal ? undefined : expect.any(Number),
|
||||||
|
@ -3061,13 +2856,7 @@ describe.each([
|
||||||
|
|
||||||
let auxData: Row[] = []
|
let auxData: Row[] = []
|
||||||
|
|
||||||
let flagCleanup: (() => void) | undefined
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
flagCleanup = features.testutils.setFeatureFlags("*", {
|
|
||||||
ENRICHED_RELATIONSHIPS: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
const aux2Table = await config.api.table.save(saveTableRequest())
|
const aux2Table = await config.api.table.save(saveTableRequest())
|
||||||
const aux2Data = await config.api.row.save(aux2Table._id!, {})
|
const aux2Data = await config.api.row.save(aux2Table._id!, {})
|
||||||
|
|
||||||
|
@ -3214,10 +3003,6 @@ describe.each([
|
||||||
viewId = view.id
|
viewId = view.id
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
flagCleanup?.()
|
|
||||||
})
|
|
||||||
|
|
||||||
const testScenarios: [string, (row: Row) => Promise<Row> | Row][] = [
|
const testScenarios: [string, (row: Row) => Promise<Row> | Row][] = [
|
||||||
["get row", (row: Row) => config.api.row.get(viewId, row._id!)],
|
["get row", (row: Row) => config.api.row.get(viewId, row._id!)],
|
||||||
[
|
[
|
||||||
|
@ -3290,68 +3075,6 @@ describe.each([
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
it.each(testScenarios)(
|
|
||||||
"does not enrich relationships when not enabled (via %s)",
|
|
||||||
async (__, retrieveDelegate) => {
|
|
||||||
await features.testutils.withFeatureFlags(
|
|
||||||
"*",
|
|
||||||
{
|
|
||||||
ENRICHED_RELATIONSHIPS: false,
|
|
||||||
},
|
|
||||||
async () => {
|
|
||||||
const otherRows = _.sampleSize(auxData, 5)
|
|
||||||
|
|
||||||
const row = await config.api.row.save(viewId, {
|
|
||||||
title: generator.word(),
|
|
||||||
relWithNoSchema: [otherRows[0]],
|
|
||||||
relWithEmptySchema: [otherRows[1]],
|
|
||||||
relWithFullSchema: [otherRows[2]],
|
|
||||||
relWithHalfSchema: [otherRows[3]],
|
|
||||||
relWithIllegalSchema: [otherRows[4]],
|
|
||||||
})
|
|
||||||
|
|
||||||
const retrieved = await retrieveDelegate(row)
|
|
||||||
|
|
||||||
expect(retrieved).toEqual(
|
|
||||||
expect.objectContaining({
|
|
||||||
title: row.title,
|
|
||||||
relWithNoSchema: [
|
|
||||||
{
|
|
||||||
_id: otherRows[0]._id,
|
|
||||||
primaryDisplay: otherRows[0].name,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
relWithEmptySchema: [
|
|
||||||
{
|
|
||||||
_id: otherRows[1]._id,
|
|
||||||
primaryDisplay: otherRows[1].name,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
relWithFullSchema: [
|
|
||||||
{
|
|
||||||
_id: otherRows[2]._id,
|
|
||||||
primaryDisplay: otherRows[2].name,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
relWithHalfSchema: [
|
|
||||||
{
|
|
||||||
_id: otherRows[3]._id,
|
|
||||||
primaryDisplay: otherRows[3].name,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
relWithIllegalSchema: [
|
|
||||||
{
|
|
||||||
_id: otherRows[4]._id,
|
|
||||||
primaryDisplay: otherRows[4].name,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.each([
|
it.each([
|
||||||
[
|
[
|
||||||
"from table fetch",
|
"from table fetch",
|
||||||
|
@ -3422,7 +3145,7 @@ describe.each([
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
isSqs &&
|
isInternal &&
|
||||||
describe("AI fields", () => {
|
describe("AI fields", () => {
|
||||||
let table: Table
|
let table: Table
|
||||||
|
|
||||||
|
@ -3671,6 +3394,7 @@ describe.each([
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
// todo: remove me
|
// todo: remove me
|
||||||
|
|
|
@ -9,15 +9,20 @@ import {
|
||||||
import { automations } from "@budibase/pro"
|
import { automations } from "@budibase/pro"
|
||||||
import {
|
import {
|
||||||
CreateRowActionRequest,
|
CreateRowActionRequest,
|
||||||
|
Datasource,
|
||||||
DocumentType,
|
DocumentType,
|
||||||
PermissionLevel,
|
PermissionLevel,
|
||||||
RowActionResponse,
|
RowActionResponse,
|
||||||
|
Table,
|
||||||
TableRowActions,
|
TableRowActions,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||||
import { Expectations } from "../../../tests/utilities/api/base"
|
import { Expectations } from "../../../tests/utilities/api/base"
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import {
|
||||||
|
DatabaseName,
|
||||||
|
datasourceDescribe,
|
||||||
|
} from "../../../integrations/tests/utils"
|
||||||
import { generateRowActionsID } from "../../../db/utils"
|
import { generateRowActionsID } from "../../../db/utils"
|
||||||
|
|
||||||
const expectAutomationId = () =>
|
const expectAutomationId = () =>
|
||||||
|
@ -969,36 +974,33 @@ describe("/rowsActions", () => {
|
||||||
status: 200,
|
status: 200,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
it.each([
|
datasourceDescribe(
|
||||||
[
|
{ name: "row actions (%s)", only: [DatabaseName.SQS, DatabaseName.POSTGRES] },
|
||||||
"internal",
|
({ config, dsProvider, isInternal }) => {
|
||||||
async () => {
|
let datasource: Datasource | undefined
|
||||||
await config.newTenant()
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const ds = await dsProvider()
|
||||||
|
datasource = ds.datasource
|
||||||
|
})
|
||||||
|
|
||||||
|
async function getTable(): Promise<Table> {
|
||||||
|
if (isInternal) {
|
||||||
await config.api.application.addSampleData(config.getAppId())
|
await config.api.application.addSampleData(config.getAppId())
|
||||||
const tables = await config.api.table.fetch()
|
const tables = await config.api.table.fetch()
|
||||||
const table = tables.find(
|
return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)!
|
||||||
t => t.sourceId === DEFAULT_BB_DATASOURCE_ID
|
} else {
|
||||||
)!
|
|
||||||
return table
|
|
||||||
},
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"external",
|
|
||||||
async () => {
|
|
||||||
await config.newTenant()
|
|
||||||
const ds = await config.createDatasource({
|
|
||||||
datasource: await getDatasource(DatabaseName.POSTGRES),
|
|
||||||
})
|
|
||||||
const table = await config.api.table.save(
|
const table = await config.api.table.save(
|
||||||
setup.structures.tableForDatasource(ds)
|
setup.structures.tableForDatasource(datasource!)
|
||||||
)
|
)
|
||||||
return table
|
return table
|
||||||
},
|
}
|
||||||
],
|
}
|
||||||
])(
|
|
||||||
"should delete all the row actions (and automations) for its tables when a datasource is deleted",
|
it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => {
|
||||||
async (_, getTable) => {
|
|
||||||
async function getRowActionsFromDb(tableId: string) {
|
async function getRowActionsFromDb(tableId: string) {
|
||||||
return await context.doInAppContext(config.getAppId(), async () => {
|
return await context.doInAppContext(config.getAppId(), async () => {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
|
@ -1032,7 +1034,6 @@ describe("/rowsActions", () => {
|
||||||
expect(automationsResp.automations).toHaveLength(0)
|
expect(automationsResp.automations).toHaveLength(0)
|
||||||
|
|
||||||
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
|
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -1,14 +1,12 @@
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
datasourceDescribe,
|
||||||
knexClient,
|
|
||||||
} from "../../../integrations/tests/utils"
|
} from "../../../integrations/tests/utils"
|
||||||
import {
|
import {
|
||||||
context,
|
context,
|
||||||
db as dbCore,
|
db as dbCore,
|
||||||
docIds,
|
docIds,
|
||||||
features,
|
|
||||||
MAX_VALID_DATE,
|
MAX_VALID_DATE,
|
||||||
MIN_VALID_DATE,
|
MIN_VALID_DATE,
|
||||||
SQLITE_DESIGN_DOC_ID,
|
SQLITE_DESIGN_DOC_ID,
|
||||||
|
@ -16,7 +14,6 @@ import {
|
||||||
withEnv as withCoreEnv,
|
withEnv as withCoreEnv,
|
||||||
} from "@budibase/backend-core"
|
} from "@budibase/backend-core"
|
||||||
|
|
||||||
import * as setup from "./utilities"
|
|
||||||
import {
|
import {
|
||||||
AIOperationEnum,
|
AIOperationEnum,
|
||||||
AutoFieldSubType,
|
AutoFieldSubType,
|
||||||
|
@ -62,25 +59,12 @@ jest.mock("@budibase/pro", () => ({
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
describe.each([
|
datasourceDescribe(
|
||||||
["in-memory", undefined],
|
{
|
||||||
["lucene", undefined],
|
name: "search (%s)",
|
||||||
["sqs", undefined],
|
exclude: [DatabaseName.MONGODB],
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
},
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
({ config, dsProvider, isInternal, isOracle, isSql }) => {
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
|
||||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
|
||||||
])("search (%s)", (name, dsProvider) => {
|
|
||||||
const isSqs = name === "sqs"
|
|
||||||
const isLucene = name === "lucene"
|
|
||||||
const isInMemory = name === "in-memory"
|
|
||||||
const isInternal = isSqs || isLucene || isInMemory
|
|
||||||
const isOracle = name === DatabaseName.ORACLE
|
|
||||||
const isSql = !isInMemory && !isLucene
|
|
||||||
const config = setup.getConfig()
|
|
||||||
|
|
||||||
let envCleanup: (() => void) | undefined
|
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
let client: Knex | undefined
|
let client: Knex | undefined
|
||||||
let tableOrViewId: string
|
let tableOrViewId: string
|
||||||
|
@ -111,38 +95,24 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await features.testutils.withFeatureFlags("*", { SQS: true }, () =>
|
const ds = await dsProvider()
|
||||||
config.init()
|
datasource = ds.datasource
|
||||||
)
|
client = ds.client
|
||||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
|
||||||
SQS: isSqs,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (config.app?.appId) {
|
config.app = await config.api.application.update(config.getAppId(), {
|
||||||
config.app = await config.api.application.update(config.app?.appId, {
|
|
||||||
snippets: [
|
snippets: [
|
||||||
{
|
{
|
||||||
name: "WeeksAgo",
|
name: "WeeksAgo",
|
||||||
code: `return function (weeks) {\n const currentTime = new Date(${Date.now()});\n currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1)));\n return currentTime.toISOString();\n}`,
|
code: `
|
||||||
|
return function (weeks) {
|
||||||
|
const currentTime = new Date(${Date.now()});
|
||||||
|
currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1)));
|
||||||
|
return currentTime.toISOString();
|
||||||
|
}
|
||||||
|
`,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
}
|
|
||||||
|
|
||||||
if (dsProvider) {
|
|
||||||
const rawDatasource = await dsProvider
|
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
datasource = await config.createDatasource({
|
|
||||||
datasource: rawDatasource,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(async () => {
|
|
||||||
setup.afterAll()
|
|
||||||
if (envCleanup) {
|
|
||||||
envCleanup()
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
async function createTable(schema?: TableSchema) {
|
async function createTable(schema?: TableSchema) {
|
||||||
|
@ -195,9 +165,17 @@ describe.each([
|
||||||
expect(count).toEqual(numRows)
|
expect(count).toEqual(numRows)
|
||||||
}
|
}
|
||||||
|
|
||||||
describe.each([
|
describe.each([true, false])("in-memory: %s", isInMemory => {
|
||||||
["table", createTable],
|
// We only run the in-memory tests during the SQS (isInternal) run
|
||||||
[
|
if (isInMemory && !isInternal) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
type CreateFn = (schema?: TableSchema) => Promise<string>
|
||||||
|
let tableOrView: [string, CreateFn][] = [["table", createTable]]
|
||||||
|
|
||||||
|
if (!isInMemory) {
|
||||||
|
tableOrView.push([
|
||||||
"view",
|
"view",
|
||||||
async (schema?: TableSchema) => {
|
async (schema?: TableSchema) => {
|
||||||
const tableId = await createTable(schema)
|
const tableId = await createTable(schema)
|
||||||
|
@ -217,15 +195,12 @@ describe.each([
|
||||||
)
|
)
|
||||||
return viewId
|
return viewId
|
||||||
},
|
},
|
||||||
],
|
])
|
||||||
])("from %s", (sourceType, createTableOrView) => {
|
|
||||||
const isView = sourceType === "view"
|
|
||||||
|
|
||||||
if (isView && isLucene) {
|
|
||||||
// Some tests don't have the expected result in views via lucene, and given that it is getting deprecated, we exclude them from the tests
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
describe.each(tableOrView)("from %s", (sourceType, createTableOrView) => {
|
||||||
|
const isView = sourceType === "view"
|
||||||
|
|
||||||
class SearchAssertion {
|
class SearchAssertion {
|
||||||
constructor(private readonly query: SearchRowRequest) {}
|
constructor(private readonly query: SearchRowRequest) {}
|
||||||
|
|
||||||
|
@ -246,7 +221,10 @@ describe.each([
|
||||||
// the source array is an exact match of the target.
|
// the source array is an exact match of the target.
|
||||||
//
|
//
|
||||||
// _.isMatch("100", "1") also returns true which is not what we want.
|
// _.isMatch("100", "1") also returns true which is not what we want.
|
||||||
private isMatch<T extends Record<string, any>>(expected: T, found: T) {
|
private isMatch<T extends Record<string, any>>(
|
||||||
|
expected: T,
|
||||||
|
found: T
|
||||||
|
) {
|
||||||
if (!expected) {
|
if (!expected) {
|
||||||
throw new Error("Expected is undefined")
|
throw new Error("Expected is undefined")
|
||||||
}
|
}
|
||||||
|
@ -351,7 +329,9 @@ describe.each([
|
||||||
async toMatch(properties: Record<string, any>) {
|
async toMatch(properties: Record<string, any>) {
|
||||||
const response = await this.performSearch()
|
const response = await this.performSearch()
|
||||||
const cloned = cloneDeep(response)
|
const cloned = cloneDeep(response)
|
||||||
const keys = Object.keys(properties) as Array<keyof SearchResponse<Row>>
|
const keys = Object.keys(properties) as Array<
|
||||||
|
keyof SearchResponse<Row>
|
||||||
|
>
|
||||||
for (let key of keys) {
|
for (let key of keys) {
|
||||||
// eslint-disable-next-line jest/no-standalone-expect
|
// eslint-disable-next-line jest/no-standalone-expect
|
||||||
expect(response[key]).toBeDefined()
|
expect(response[key]).toBeDefined()
|
||||||
|
@ -437,29 +417,29 @@ describe.each([
|
||||||
|
|
||||||
describe("notEqual", () => {
|
describe("notEqual", () => {
|
||||||
it("successfully finds false row", async () => {
|
it("successfully finds false row", async () => {
|
||||||
await expectQuery({ notEqual: { isTrue: true } }).toContainExactly([
|
await expectQuery({
|
||||||
{ isTrue: false },
|
notEqual: { isTrue: true },
|
||||||
])
|
}).toContainExactly([{ isTrue: false }])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("successfully finds true row", async () => {
|
it("successfully finds true row", async () => {
|
||||||
await expectQuery({ notEqual: { isTrue: false } }).toContainExactly([
|
await expectQuery({
|
||||||
{ isTrue: true },
|
notEqual: { isTrue: false },
|
||||||
])
|
}).toContainExactly([{ isTrue: true }])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("oneOf", () => {
|
describe("oneOf", () => {
|
||||||
it("successfully finds true row", async () => {
|
it("successfully finds true row", async () => {
|
||||||
await expectQuery({ oneOf: { isTrue: [true] } }).toContainExactly([
|
await expectQuery({ oneOf: { isTrue: [true] } }).toContainExactly(
|
||||||
{ isTrue: true },
|
[{ isTrue: true }]
|
||||||
])
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("successfully finds false row", async () => {
|
it("successfully finds false row", async () => {
|
||||||
await expectQuery({ oneOf: { isTrue: [false] } }).toContainExactly([
|
await expectQuery({
|
||||||
{ isTrue: false },
|
oneOf: { isTrue: [false] },
|
||||||
])
|
}).toContainExactly([{ isTrue: false }])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -493,13 +473,18 @@ describe.each([
|
||||||
// expected.
|
// expected.
|
||||||
serverTime.setMilliseconds(0)
|
serverTime.setMilliseconds(0)
|
||||||
|
|
||||||
const future = new Date(serverTime.getTime() + 1000 * 60 * 60 * 24 * 30)
|
const future = new Date(
|
||||||
|
serverTime.getTime() + 1000 * 60 * 60 * 24 * 30
|
||||||
|
)
|
||||||
|
|
||||||
const rows = (currentUser: User) => {
|
const rows = (currentUser: User) => {
|
||||||
return [
|
return [
|
||||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||||
{ name: currentUser.firstName, appointment: future.toISOString() },
|
{
|
||||||
|
name: currentUser.firstName,
|
||||||
|
appointment: future.toISOString(),
|
||||||
|
},
|
||||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||||
{
|
{
|
||||||
name: "single user, session user",
|
name: "single user, session user",
|
||||||
|
@ -598,7 +583,6 @@ describe.each([
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
it("should return all rows matching the session user firstname when logical operator used", async () => {
|
it("should return all rows matching the session user firstname when logical operator used", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
$and: {
|
$and: {
|
||||||
|
@ -913,15 +897,15 @@ describe.each([
|
||||||
|
|
||||||
describe("notEqual", () => {
|
describe("notEqual", () => {
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({ notEqual: { name: "foo" } }).toContainExactly([
|
await expectQuery({ notEqual: { name: "foo" } }).toContainExactly(
|
||||||
{ name: "bar" },
|
[{ name: "bar" }]
|
||||||
])
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
await expectQuery({ notEqual: { name: "bar" } }).toContainExactly([
|
await expectQuery({ notEqual: { name: "bar" } }).toContainExactly(
|
||||||
{ name: "foo" },
|
[{ name: "foo" }]
|
||||||
])
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -1034,7 +1018,6 @@ describe.each([
|
||||||
}).toFindNothing()
|
}).toFindNothing()
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
it("ignores low if it's an empty object", async () => {
|
it("ignores low if it's an empty object", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
|
@ -1042,7 +1025,6 @@ describe.each([
|
||||||
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
it("ignores high if it's an empty object", async () => {
|
it("ignores high if it's an empty object", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
|
@ -1202,10 +1184,6 @@ describe.each([
|
||||||
await expectQuery({ oneOf: { age: [2] } }).toFindNothing()
|
await expectQuery({ oneOf: { age: [2] } }).toFindNothing()
|
||||||
})
|
})
|
||||||
|
|
||||||
// I couldn't find a way to make this work in Lucene and given that
|
|
||||||
// we're getting rid of Lucene soon I wasn't inclined to spend time on
|
|
||||||
// it.
|
|
||||||
!isLucene &&
|
|
||||||
it("can convert from a string", async () => {
|
it("can convert from a string", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
oneOf: {
|
oneOf: {
|
||||||
|
@ -1215,10 +1193,6 @@ describe.each([
|
||||||
}).toContainExactly([{ age: 1 }])
|
}).toContainExactly([{ age: 1 }])
|
||||||
})
|
})
|
||||||
|
|
||||||
// I couldn't find a way to make this work in Lucene and given that
|
|
||||||
// we're getting rid of Lucene soon I wasn't inclined to spend time on
|
|
||||||
// it.
|
|
||||||
!isLucene &&
|
|
||||||
it("can find multiple values for same column", async () => {
|
it("can find multiple values for same column", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
oneOf: {
|
oneOf: {
|
||||||
|
@ -1355,23 +1329,23 @@ describe.each([
|
||||||
|
|
||||||
describe("notEqual", () => {
|
describe("notEqual", () => {
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({ notEqual: { dob: JAN_1ST } }).toContainExactly([
|
await expectQuery({
|
||||||
{ dob: JAN_10TH },
|
notEqual: { dob: JAN_1ST },
|
||||||
])
|
}).toContainExactly([{ dob: JAN_10TH }])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
await expectQuery({ notEqual: { dob: JAN_10TH } }).toContainExactly([
|
await expectQuery({
|
||||||
{ dob: JAN_1ST },
|
notEqual: { dob: JAN_10TH },
|
||||||
])
|
}).toContainExactly([{ dob: JAN_1ST }])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("oneOf", () => {
|
describe("oneOf", () => {
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly([
|
await expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly(
|
||||||
{ dob: JAN_1ST },
|
[{ dob: JAN_1ST }]
|
||||||
])
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
|
@ -1491,7 +1465,11 @@ describe.each([
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
tableOrViewId = await createTableOrView({
|
tableOrViewId = await createTableOrView({
|
||||||
timeid: { name: "timeid", type: FieldType.STRING },
|
timeid: { name: "timeid", type: FieldType.STRING },
|
||||||
time: { name: "time", type: FieldType.DATETIME, timeOnly: true },
|
time: {
|
||||||
|
name: "time",
|
||||||
|
type: FieldType.DATETIME,
|
||||||
|
timeOnly: true,
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
await createRows([
|
await createRows([
|
||||||
|
@ -1506,9 +1484,9 @@ describe.each([
|
||||||
|
|
||||||
describe("equal", () => {
|
describe("equal", () => {
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({ equal: { time: T_1000 } }).toContainExactly([
|
await expectQuery({ equal: { time: T_1000 } }).toContainExactly(
|
||||||
{ time: "10:00:00" },
|
[{ time: "10:00:00" }]
|
||||||
])
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
|
@ -1520,7 +1498,9 @@ describe.each([
|
||||||
|
|
||||||
describe("notEqual", () => {
|
describe("notEqual", () => {
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({ notEqual: { time: T_1000 } }).toContainExactly([
|
await expectQuery({
|
||||||
|
notEqual: { time: T_1000 },
|
||||||
|
}).toContainExactly([
|
||||||
{ timeid: NULL_TIME__ID },
|
{ timeid: NULL_TIME__ID },
|
||||||
{ time: "10:45:00" },
|
{ time: "10:45:00" },
|
||||||
{ time: "12:00:00" },
|
{ time: "12:00:00" },
|
||||||
|
@ -1545,9 +1525,9 @@ describe.each([
|
||||||
|
|
||||||
describe("oneOf", () => {
|
describe("oneOf", () => {
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({ oneOf: { time: [T_1000] } }).toContainExactly([
|
await expectQuery({
|
||||||
{ time: "10:00:00" },
|
oneOf: { time: [T_1000] },
|
||||||
])
|
}).toContainExactly([{ time: "10:00:00" }])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
|
@ -1576,7 +1556,9 @@ describe.each([
|
||||||
|
|
||||||
it("successfully finds no rows", async () => {
|
it("successfully finds no rows", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
range: { time: { low: UNEXISTING_TIME, high: UNEXISTING_TIME } },
|
range: {
|
||||||
|
time: { low: UNEXISTING_TIME, high: UNEXISTING_TIME },
|
||||||
|
},
|
||||||
}).toFindNothing()
|
}).toFindNothing()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -1648,7 +1630,8 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
isSqs &&
|
isInternal &&
|
||||||
|
!isInMemory &&
|
||||||
describe("AI Column", () => {
|
describe("AI Column", () => {
|
||||||
const UNEXISTING_AI_COLUMN = "Real LLM Response"
|
const UNEXISTING_AI_COLUMN = "Real LLM Response"
|
||||||
|
|
||||||
|
@ -1666,7 +1649,10 @@ describe.each([
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
await createRows([{ product: "Big Mac" }, { product: "McCrispy" }])
|
await createRows([
|
||||||
|
{ product: "Big Mac" },
|
||||||
|
{ product: "McCrispy" },
|
||||||
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("equal", () => {
|
describe("equal", () => {
|
||||||
|
@ -1733,7 +1719,10 @@ describe.each([
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
await createRows([{ numbers: ["one", "two"] }, { numbers: ["three"] }])
|
await createRows([
|
||||||
|
{ numbers: ["one", "two"] },
|
||||||
|
{ numbers: ["three"] },
|
||||||
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("contains", () => {
|
describe("contains", () => {
|
||||||
|
@ -1744,7 +1733,9 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
await expectQuery({ contains: { numbers: ["none"] } }).toFindNothing()
|
await expectQuery({
|
||||||
|
contains: { numbers: ["none"] },
|
||||||
|
}).toFindNothing()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find row containing all", async () => {
|
it("fails to find row containing all", async () => {
|
||||||
|
@ -1754,10 +1745,9 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
it("finds all with empty list", async () => {
|
it("finds all with empty list", async () => {
|
||||||
await expectQuery({ contains: { numbers: [] } }).toContainExactly([
|
await expectQuery({ contains: { numbers: [] } }).toContainExactly(
|
||||||
{ numbers: ["one", "two"] },
|
[{ numbers: ["one", "two"] }, { numbers: ["three"] }]
|
||||||
{ numbers: ["three"] },
|
)
|
||||||
])
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -1780,7 +1770,9 @@ describe.each([
|
||||||
// Not sure if this is correct behaviour but changing it would be a
|
// Not sure if this is correct behaviour but changing it would be a
|
||||||
// breaking change.
|
// breaking change.
|
||||||
it("finds all with empty list", async () => {
|
it("finds all with empty list", async () => {
|
||||||
await expectQuery({ notContains: { numbers: [] } }).toContainExactly([
|
await expectQuery({
|
||||||
|
notContains: { numbers: [] },
|
||||||
|
}).toContainExactly([
|
||||||
{ numbers: ["one", "two"] },
|
{ numbers: ["one", "two"] },
|
||||||
{ numbers: ["three"] },
|
{ numbers: ["three"] },
|
||||||
])
|
])
|
||||||
|
@ -1804,7 +1796,9 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
it("finds all with empty list", async () => {
|
it("finds all with empty list", async () => {
|
||||||
await expectQuery({ containsAny: { numbers: [] } }).toContainExactly([
|
await expectQuery({
|
||||||
|
containsAny: { numbers: [] },
|
||||||
|
}).toContainExactly([
|
||||||
{ numbers: ["one", "two"] },
|
{ numbers: ["one", "two"] },
|
||||||
{ numbers: ["three"] },
|
{ numbers: ["three"] },
|
||||||
])
|
])
|
||||||
|
@ -1871,7 +1865,11 @@ describe.each([
|
||||||
it("successfully finds all rows", async () => {
|
it("successfully finds all rows", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
oneOf: { num: [SMALL, MEDIUM, BIG] },
|
oneOf: { num: [SMALL, MEDIUM, BIG] },
|
||||||
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }, { num: BIG }])
|
}).toContainExactly([
|
||||||
|
{ num: SMALL },
|
||||||
|
{ num: MEDIUM },
|
||||||
|
{ num: BIG },
|
||||||
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
|
@ -1879,10 +1877,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
// Range searches against bigints don't seem to work at all in Lucene, and I
|
|
||||||
// couldn't figure out why. Given that we're replacing Lucene with SQS,
|
|
||||||
// we've decided not to spend time on it.
|
|
||||||
!isLucene &&
|
|
||||||
describe("range", () => {
|
describe("range", () => {
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
|
@ -2016,14 +2010,12 @@ describe.each([
|
||||||
}).toFindNothing()
|
}).toFindNothing()
|
||||||
})
|
})
|
||||||
|
|
||||||
isSqs &&
|
|
||||||
it("can search using just a low value", async () => {
|
it("can search using just a low value", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
range: { auto: { low: 9 } },
|
range: { auto: { low: 9 } },
|
||||||
}).toContainExactly([{ auto: 9 }, { auto: 10 }])
|
}).toContainExactly([{ auto: 9 }, { auto: 10 }])
|
||||||
})
|
})
|
||||||
|
|
||||||
isSqs &&
|
|
||||||
it("can search using just a high value", async () => {
|
it("can search using just a high value", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
range: { auto: { high: 2 } },
|
range: { auto: { high: 2 } },
|
||||||
|
@ -2031,13 +2023,13 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
isSqs &&
|
|
||||||
describe("sort", () => {
|
describe("sort", () => {
|
||||||
it("sorts ascending", async () => {
|
it("sorts ascending", async () => {
|
||||||
await expectSearch({
|
await expectSearch({
|
||||||
query: {},
|
query: {},
|
||||||
sort: "auto",
|
sort: "auto",
|
||||||
sortOrder: SortOrder.ASCENDING,
|
sortOrder: SortOrder.ASCENDING,
|
||||||
|
sortType: SortType.NUMBER,
|
||||||
}).toMatchExactly([
|
}).toMatchExactly([
|
||||||
{ auto: 1 },
|
{ auto: 1 },
|
||||||
{ auto: 2 },
|
{ auto: 2 },
|
||||||
|
@ -2057,6 +2049,7 @@ describe.each([
|
||||||
query: {},
|
query: {},
|
||||||
sort: "auto",
|
sort: "auto",
|
||||||
sortOrder: SortOrder.DESCENDING,
|
sortOrder: SortOrder.DESCENDING,
|
||||||
|
sortType: SortType.NUMBER,
|
||||||
}).toMatchExactly([
|
}).toMatchExactly([
|
||||||
{ auto: 10 },
|
{ auto: 10 },
|
||||||
{ auto: 9 },
|
{ auto: 9 },
|
||||||
|
@ -2128,7 +2121,9 @@ describe.each([
|
||||||
bookmark = response.bookmark
|
bookmark = response.bookmark
|
||||||
}
|
}
|
||||||
|
|
||||||
const autoValues = rows.map(row => row.auto).sort((a, b) => a - b)
|
const autoValues = rows
|
||||||
|
.map(row => row.auto)
|
||||||
|
.sort((a, b) => a - b)
|
||||||
expect(autoValues).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
|
expect(autoValues).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -2143,9 +2138,9 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({ equal: { "1:1:name": "bar" } }).toContainExactly([
|
await expectQuery({
|
||||||
{ "1:name": "bar" },
|
equal: { "1:1:name": "bar" },
|
||||||
])
|
}).toContainExactly([{ "1:name": "bar" }])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
|
@ -2204,7 +2199,9 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
it("formula is correct with relationship arrays", async () => {
|
it("formula is correct with relationship arrays", async () => {
|
||||||
await expectQuery({}).toContain([{ formula: "option 1,option 2" }])
|
await expectQuery({}).toContain([
|
||||||
|
{ formula: "option 1,option 2" },
|
||||||
|
])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -2229,9 +2226,9 @@ describe.each([
|
||||||
|
|
||||||
describe("equal", () => {
|
describe("equal", () => {
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({ equal: { user: user1._id } }).toContainExactly([
|
await expectQuery({
|
||||||
{ user: { _id: user1._id } },
|
equal: { user: user1._id },
|
||||||
])
|
}).toContainExactly([{ user: { _id: user1._id } }])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
|
@ -2241,33 +2238,41 @@ describe.each([
|
||||||
|
|
||||||
describe("notEqual", () => {
|
describe("notEqual", () => {
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({ notEqual: { user: user1._id } }).toContainExactly(
|
await expectQuery({
|
||||||
[{ user: { _id: user2._id } }, {}]
|
notEqual: { user: user1._id },
|
||||||
)
|
}).toContainExactly([{ user: { _id: user2._id } }, {}])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
await expectQuery({ notEqual: { user: "us_none" } }).toContainExactly(
|
await expectQuery({
|
||||||
[{ user: { _id: user1._id } }, { user: { _id: user2._id } }, {}]
|
notEqual: { user: "us_none" },
|
||||||
)
|
}).toContainExactly([
|
||||||
|
{ user: { _id: user1._id } },
|
||||||
|
{ user: { _id: user2._id } },
|
||||||
|
{},
|
||||||
|
])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("oneOf", () => {
|
describe("oneOf", () => {
|
||||||
it("successfully finds a row", async () => {
|
it("successfully finds a row", async () => {
|
||||||
await expectQuery({ oneOf: { user: [user1._id] } }).toContainExactly([
|
await expectQuery({
|
||||||
{ user: { _id: user1._id } },
|
oneOf: { user: [user1._id] },
|
||||||
])
|
}).toContainExactly([{ user: { _id: user1._id } }])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
await expectQuery({ oneOf: { user: ["us_none"] } }).toFindNothing()
|
await expectQuery({
|
||||||
|
oneOf: { user: ["us_none"] },
|
||||||
|
}).toFindNothing()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("empty", () => {
|
describe("empty", () => {
|
||||||
it("finds empty rows", async () => {
|
it("finds empty rows", async () => {
|
||||||
await expectQuery({ empty: { user: null } }).toContainExactly([{}])
|
await expectQuery({ empty: { user: null } }).toContainExactly([
|
||||||
|
{},
|
||||||
|
])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -2392,8 +2397,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
// This will never work for Lucene.
|
|
||||||
!isLucene &&
|
|
||||||
// It also can't work for in-memory searching because the related table name
|
// It also can't work for in-memory searching because the related table name
|
||||||
// isn't available.
|
// isn't available.
|
||||||
!isInMemory &&
|
!isInMemory &&
|
||||||
|
@ -2468,7 +2471,10 @@ describe.each([
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
|
{
|
||||||
|
name: "foo",
|
||||||
|
productCat: [{ _id: productCatRows[0]._id }],
|
||||||
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -2567,7 +2573,10 @@ describe.each([
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
|
{
|
||||||
|
name: "foo",
|
||||||
|
productCat: [{ _id: productCatRows[0]._id }],
|
||||||
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -2582,8 +2591,14 @@ describe.each([
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
|
{
|
||||||
{ name: "bar", productCat: [{ _id: productCatRows[1]._id }] },
|
name: "foo",
|
||||||
|
productCat: [{ _id: productCatRows[0]._id }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bar",
|
||||||
|
productCat: [{ _id: productCatRows[1]._id }],
|
||||||
|
},
|
||||||
{ name: "baz", productCat: undefined },
|
{ name: "baz", productCat: undefined },
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
@ -2663,8 +2678,14 @@ describe.each([
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
|
{
|
||||||
{ name: "bar", productCat: [{ _id: productCatRows[1]._id }] },
|
name: "foo",
|
||||||
|
productCat: [{ _id: productCatRows[0]._id }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bar",
|
||||||
|
productCat: [{ _id: productCatRows[1]._id }],
|
||||||
|
},
|
||||||
{ name: "baz", productCat: undefined },
|
{ name: "baz", productCat: undefined },
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
@ -2695,13 +2716,17 @@ describe.each([
|
||||||
|
|
||||||
it("can only pull 10 related rows", async () => {
|
it("can only pull 10 related rows", async () => {
|
||||||
await withCoreEnv({ SQL_MAX_RELATED_ROWS: "10" }, async () => {
|
await withCoreEnv({ SQL_MAX_RELATED_ROWS: "10" }, async () => {
|
||||||
const response = await expectQuery({}).toContain([{ name: "foo" }])
|
const response = await expectQuery({}).toContain([
|
||||||
|
{ name: "foo" },
|
||||||
|
])
|
||||||
expect(response.rows[0].productCat).toBeArrayOfSize(10)
|
expect(response.rows[0].productCat).toBeArrayOfSize(10)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("can pull max rows when env not set (defaults to 500)", async () => {
|
it("can pull max rows when env not set (defaults to 500)", async () => {
|
||||||
const response = await expectQuery({}).toContain([{ name: "foo" }])
|
const response = await expectQuery({}).toContain([
|
||||||
|
{ name: "foo" },
|
||||||
|
])
|
||||||
expect(response.rows[0].productCat).toBeArrayOfSize(11)
|
expect(response.rows[0].productCat).toBeArrayOfSize(11)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -2847,8 +2872,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
// lucene can't count the total rows
|
|
||||||
!isLucene &&
|
|
||||||
describe("row counting", () => {
|
describe("row counting", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
tableOrViewId = await createTableOrView({
|
tableOrViewId = await createTableOrView({
|
||||||
|
@ -3065,9 +3088,7 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
// This was never actually supported in Lucene but SQS does support it, so may
|
isInternal &&
|
||||||
// as well have a test for it.
|
|
||||||
;(isSqs || isInMemory) &&
|
|
||||||
describe("space at start of column name", () => {
|
describe("space at start of column name", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
tableOrViewId = await createTableOrView({
|
tableOrViewId = await createTableOrView({
|
||||||
|
@ -3100,7 +3121,7 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
isSqs &&
|
isInternal &&
|
||||||
!isView &&
|
!isView &&
|
||||||
describe("duplicate columns", () => {
|
describe("duplicate columns", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
|
@ -3262,7 +3283,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
describe("$and", () => {
|
describe("$and", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
tableOrViewId = await createTableOrView({
|
tableOrViewId = await createTableOrView({
|
||||||
|
@ -3280,7 +3300,10 @@ describe.each([
|
||||||
it("successfully finds a row for one level condition", async () => {
|
it("successfully finds a row for one level condition", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
$and: {
|
$and: {
|
||||||
conditions: [{ equal: { age: 10 } }, { equal: { name: "Jack" } }],
|
conditions: [
|
||||||
|
{ equal: { age: 10 } },
|
||||||
|
{ equal: { name: "Jack" } },
|
||||||
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([{ age: 10, name: "Jack" }])
|
}).toContainExactly([{ age: 10, name: "Jack" }])
|
||||||
})
|
})
|
||||||
|
@ -3288,7 +3311,10 @@ describe.each([
|
||||||
it("successfully finds a row for one level with multiple conditions", async () => {
|
it("successfully finds a row for one level with multiple conditions", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
$and: {
|
$and: {
|
||||||
conditions: [{ equal: { age: 10 } }, { equal: { name: "Jack" } }],
|
conditions: [
|
||||||
|
{ equal: { age: 10 } },
|
||||||
|
{ equal: { name: "Jack" } },
|
||||||
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([{ age: 10, name: "Jack" }])
|
}).toContainExactly([{ age: 10, name: "Jack" }])
|
||||||
})
|
})
|
||||||
|
@ -3330,7 +3356,10 @@ describe.each([
|
||||||
it("returns nothing when filtering out all data", async () => {
|
it("returns nothing when filtering out all data", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
$and: {
|
$and: {
|
||||||
conditions: [{ equal: { age: 7 } }, { equal: { name: "Jack" } }],
|
conditions: [
|
||||||
|
{ equal: { age: 7 } },
|
||||||
|
{ equal: { name: "Jack" } },
|
||||||
|
],
|
||||||
},
|
},
|
||||||
}).toFindNothing()
|
}).toFindNothing()
|
||||||
})
|
})
|
||||||
|
@ -3396,7 +3425,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
describe("$or", () => {
|
describe("$or", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
tableOrViewId = await createTableOrView({
|
tableOrViewId = await createTableOrView({
|
||||||
|
@ -3414,7 +3442,10 @@ describe.each([
|
||||||
it("successfully finds a row for one level condition", async () => {
|
it("successfully finds a row for one level condition", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
$or: {
|
$or: {
|
||||||
conditions: [{ equal: { age: 7 } }, { equal: { name: "Jack" } }],
|
conditions: [
|
||||||
|
{ equal: { age: 7 } },
|
||||||
|
{ equal: { name: "Jack" } },
|
||||||
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{ age: 10, name: "Jack" },
|
{ age: 10, name: "Jack" },
|
||||||
|
@ -3425,7 +3456,10 @@ describe.each([
|
||||||
it("successfully finds a row for one level with multiple conditions", async () => {
|
it("successfully finds a row for one level with multiple conditions", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
$or: {
|
$or: {
|
||||||
conditions: [{ equal: { age: 7 } }, { equal: { name: "Jack" } }],
|
conditions: [
|
||||||
|
{ equal: { age: 7 } },
|
||||||
|
{ equal: { name: "Jack" } },
|
||||||
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{ age: 10, name: "Jack" },
|
{ age: 10, name: "Jack" },
|
||||||
|
@ -3475,7 +3509,10 @@ describe.each([
|
||||||
it("returns nothing when filtering out all data", async () => {
|
it("returns nothing when filtering out all data", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
$or: {
|
$or: {
|
||||||
conditions: [{ equal: { age: 6 } }, { equal: { name: "John" } }],
|
conditions: [
|
||||||
|
{ equal: { age: 6 } },
|
||||||
|
{ equal: { name: "John" } },
|
||||||
|
],
|
||||||
},
|
},
|
||||||
}).toFindNothing()
|
}).toFindNothing()
|
||||||
})
|
})
|
||||||
|
@ -3590,8 +3627,7 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
isSql &&
|
!isInternal &&
|
||||||
!isSqs &&
|
|
||||||
describe("SQL injection", () => {
|
describe("SQL injection", () => {
|
||||||
const badStrings = [
|
const badStrings = [
|
||||||
"1; DROP TABLE %table_name%;",
|
"1; DROP TABLE %table_name%;",
|
||||||
|
@ -3613,7 +3649,8 @@ describe.each([
|
||||||
// The SQL that knex generates when you try to use a double quote in a
|
// The SQL that knex generates when you try to use a double quote in a
|
||||||
// field name is always invalid and never works, so we skip it for these
|
// field name is always invalid and never works, so we skip it for these
|
||||||
// tests.
|
// tests.
|
||||||
const skipFieldNameCheck = isOracle && badStringTemplate.includes('"')
|
const skipFieldNameCheck =
|
||||||
|
isOracle && badStringTemplate.includes('"')
|
||||||
|
|
||||||
!skipFieldNameCheck &&
|
!skipFieldNameCheck &&
|
||||||
it("should not allow SQL injection as a field name", async () => {
|
it("should not allow SQL injection as a field name", async () => {
|
||||||
|
@ -3642,7 +3679,9 @@ describe.each([
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
await config.api.row.save(tableOrViewId, { [badString]: "foo" })
|
await config.api.row.save(tableOrViewId, {
|
||||||
|
[badString]: "foo",
|
||||||
|
})
|
||||||
|
|
||||||
await assertTableExists(table)
|
await assertTableExists(table)
|
||||||
await assertTableNumRows(table, 1)
|
await assertTableNumRows(table, 1)
|
||||||
|
@ -3690,4 +3729,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -28,32 +28,24 @@ import * as setup from "./utilities"
|
||||||
import * as uuid from "uuid"
|
import * as uuid from "uuid"
|
||||||
|
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import {
|
||||||
|
DatabaseName,
|
||||||
|
datasourceDescribe,
|
||||||
|
} from "../../../integrations/tests/utils"
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
import timekeeper from "timekeeper"
|
import timekeeper from "timekeeper"
|
||||||
|
|
||||||
const { basicTable } = setup.structures
|
const { basicTable } = setup.structures
|
||||||
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
||||||
|
|
||||||
describe.each([
|
datasourceDescribe(
|
||||||
["sqs", undefined],
|
{ name: "/tables (%s)", exclude: [DatabaseName.MONGODB] },
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
({ config, dsProvider, isInternal, isOracle }) => {
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
|
||||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
|
||||||
])("/tables (%s)", (name, dsProvider) => {
|
|
||||||
const isInternal: boolean = !dsProvider
|
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
let config = setup.getConfig()
|
|
||||||
|
|
||||||
afterAll(setup.afterAll)
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
const ds = await dsProvider()
|
||||||
if (dsProvider) {
|
datasource = ds.datasource
|
||||||
datasource = await config.api.datasource.create(await dsProvider)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("create", () => {
|
describe("create", () => {
|
||||||
|
@ -69,7 +61,7 @@ describe.each([
|
||||||
"with `backticks`",
|
"with `backticks`",
|
||||||
]
|
]
|
||||||
|
|
||||||
if (name !== DatabaseName.ORACLE) {
|
if (!isOracle) {
|
||||||
names.push(`with "double quotes"`)
|
names.push(`with "double quotes"`)
|
||||||
names.push(`with 'single quotes'`)
|
names.push(`with 'single quotes'`)
|
||||||
}
|
}
|
||||||
|
@ -229,12 +221,10 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
// check base permissions
|
// check base permissions
|
||||||
const { permissions: basePermissions } = await config.api.permission.get(
|
const { permissions: basePermissions } =
|
||||||
table._id!,
|
await config.api.permission.get(table._id!, {
|
||||||
{
|
|
||||||
status: 200,
|
status: 200,
|
||||||
}
|
})
|
||||||
)
|
|
||||||
const basePerms = { role: "BASIC", permissionType: "BASE" }
|
const basePerms = { role: "BASIC", permissionType: "BASE" }
|
||||||
expect(basePermissions.write).toEqual(basePerms)
|
expect(basePermissions.write).toEqual(basePerms)
|
||||||
expect(basePermissions.read).toEqual(basePerms)
|
expect(basePermissions.read).toEqual(basePerms)
|
||||||
|
@ -1147,7 +1137,10 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
[RowExportFormat.CSV, (val: any) => JSON.stringify(val).replace(/"/g, "'")],
|
[
|
||||||
|
RowExportFormat.CSV,
|
||||||
|
(val: any) => JSON.stringify(val).replace(/"/g, "'"),
|
||||||
|
],
|
||||||
[RowExportFormat.JSON, (val: any) => val],
|
[RowExportFormat.JSON, (val: any) => val],
|
||||||
])("import validation (%s)", (_, userParser) => {
|
])("import validation (%s)", (_, userParser) => {
|
||||||
const basicSchema: TableSchema = {
|
const basicSchema: TableSchema = {
|
||||||
|
@ -1163,7 +1156,10 @@ describe.each([
|
||||||
|
|
||||||
const importCases: [
|
const importCases: [
|
||||||
string,
|
string,
|
||||||
(rows: Row[], schema: TableSchema) => Promise<ValidateTableImportResponse>
|
(
|
||||||
|
rows: Row[],
|
||||||
|
schema: TableSchema
|
||||||
|
) => Promise<ValidateTableImportResponse>
|
||||||
][] = [
|
][] = [
|
||||||
[
|
[
|
||||||
"validateNewTableImport",
|
"validateNewTableImport",
|
||||||
|
@ -1272,7 +1268,9 @@ describe.each([
|
||||||
isInternal &&
|
isInternal &&
|
||||||
it.each(
|
it.each(
|
||||||
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
||||||
)("don't allow protected names in the rows (%s)", async columnName => {
|
)(
|
||||||
|
"don't allow protected names in the rows (%s)",
|
||||||
|
async columnName => {
|
||||||
const result = await config.api.table.validateNewTableImport({
|
const result = await config.api.table.validateNewTableImport({
|
||||||
rows: [
|
rows: [
|
||||||
{
|
{
|
||||||
|
@ -1301,7 +1299,8 @@ describe.each([
|
||||||
[columnName]: false,
|
[columnName]: false,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
it("validates required fields and valid rows", async () => {
|
it("validates required fields and valid rows", async () => {
|
||||||
const schema: TableSchema = {
|
const schema: TableSchema = {
|
||||||
|
@ -1512,4 +1511,5 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -2,7 +2,6 @@ import * as setup from "./utilities"
|
||||||
import path from "path"
|
import path from "path"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import { features } from "@budibase/backend-core"
|
|
||||||
|
|
||||||
interface App {
|
interface App {
|
||||||
background: string
|
background: string
|
||||||
|
@ -82,13 +81,7 @@ describe("/templates", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("create app from template", () => {
|
describe("create app from template", () => {
|
||||||
it.each(["sqs", "lucene"])(
|
it("should be able to create an app from a template", async () => {
|
||||||
`should be able to create an app from a template (%s)`,
|
|
||||||
async source => {
|
|
||||||
await features.testutils.withFeatureFlags(
|
|
||||||
"*",
|
|
||||||
{ SQS: source === "sqs" },
|
|
||||||
async () => {
|
|
||||||
const name = generator.guid().replaceAll("-", "")
|
const name = generator.guid().replaceAll("-", "")
|
||||||
const url = `/${name}`
|
const url = `/${name}`
|
||||||
|
|
||||||
|
@ -111,19 +104,13 @@ describe("/templates", () => {
|
||||||
expect(agencyProjects.name).toBe("Agency Projects")
|
expect(agencyProjects.name).toBe("Agency Projects")
|
||||||
expect(users.name).toBe("Users")
|
expect(users.name).toBe("Users")
|
||||||
|
|
||||||
const { rows } = await config.api.row.search(
|
const { rows } = await config.api.row.search(agencyProjects._id!, {
|
||||||
agencyProjects._id!,
|
|
||||||
{
|
|
||||||
tableId: agencyProjects._id!,
|
tableId: agencyProjects._id!,
|
||||||
query: {},
|
query: {},
|
||||||
}
|
})
|
||||||
)
|
|
||||||
|
|
||||||
expect(rows).toHaveLength(3)
|
expect(rows).toHaveLength(3)
|
||||||
})
|
})
|
||||||
}
|
})
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import * as setup from "./utilities"
|
|
||||||
import {
|
import {
|
||||||
CreateViewRequest,
|
CreateViewRequest,
|
||||||
Datasource,
|
Datasource,
|
||||||
|
@ -37,29 +36,20 @@ import {
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import {
|
||||||
|
DatabaseName,
|
||||||
|
datasourceDescribe,
|
||||||
|
} from "../../../integrations/tests/utils"
|
||||||
import merge from "lodash/merge"
|
import merge from "lodash/merge"
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import { db, roles, features, context } from "@budibase/backend-core"
|
import { db, roles, context } from "@budibase/backend-core"
|
||||||
|
|
||||||
describe.each([
|
|
||||||
["lucene", undefined],
|
|
||||||
["sqs", undefined],
|
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
|
||||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
|
||||||
])("/v2/views (%s)", (name, dsProvider) => {
|
|
||||||
const config = setup.getConfig()
|
|
||||||
const isSqs = name === "sqs"
|
|
||||||
const isLucene = name === "lucene"
|
|
||||||
const isInternal = isSqs || isLucene
|
|
||||||
|
|
||||||
|
datasourceDescribe(
|
||||||
|
{ name: "/v2/views (%s)", exclude: [DatabaseName.MONGODB] },
|
||||||
|
({ config, isInternal, dsProvider }) => {
|
||||||
let table: Table
|
let table: Table
|
||||||
let rawDatasource: Datasource | undefined
|
let rawDatasource: Datasource | undefined
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
let envCleanup: (() => void) | undefined
|
|
||||||
|
|
||||||
function saveTableRequest(
|
function saveTableRequest(
|
||||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||||
|
@ -106,30 +96,14 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await features.testutils.withFeatureFlags("*", { SQS: isSqs }, () =>
|
await config.init()
|
||||||
config.init()
|
|
||||||
)
|
|
||||||
|
|
||||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
const ds = await dsProvider()
|
||||||
SQS: isSqs,
|
rawDatasource = ds.rawDatasource
|
||||||
})
|
datasource = ds.datasource
|
||||||
|
|
||||||
if (dsProvider) {
|
|
||||||
rawDatasource = await dsProvider
|
|
||||||
datasource = await config.createDatasource({
|
|
||||||
datasource: rawDatasource,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
table = await config.api.table.save(priceTable())
|
table = await config.api.table.save(priceTable())
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(async () => {
|
|
||||||
setup.afterAll()
|
|
||||||
if (envCleanup) {
|
|
||||||
envCleanup()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.clearAllMocks()
|
jest.clearAllMocks()
|
||||||
mocks.licenses.useCloudFree()
|
mocks.licenses.useCloudFree()
|
||||||
|
@ -220,7 +194,8 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
it("can create a view with just a query field, no queryUI, for backwards compatibility", async () => {
|
it("can create a view with just a query field, no queryUI, for backwards compatibility", async () => {
|
||||||
const newView: Required<Omit<CreateViewRequest, "queryUI" | "type">> = {
|
const newView: Required<Omit<CreateViewRequest, "queryUI" | "type">> =
|
||||||
|
{
|
||||||
name: generator.name(),
|
name: generator.name(),
|
||||||
tableId: table._id!,
|
tableId: table._id!,
|
||||||
primaryDisplay: "id",
|
primaryDisplay: "id",
|
||||||
|
@ -390,7 +365,8 @@ describe.each([
|
||||||
await config.api.viewV2.create(newView, {
|
await config.api.viewV2.create(newView, {
|
||||||
status: 400,
|
status: 400,
|
||||||
body: {
|
body: {
|
||||||
message: 'Field "nonExisting" is not valid for the requested table',
|
message:
|
||||||
|
'Field "nonExisting" is not valid for the requested table',
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -585,7 +561,8 @@ describe.each([
|
||||||
await config.api.viewV2.create(newView, {
|
await config.api.viewV2.create(newView, {
|
||||||
status: 400,
|
status: 400,
|
||||||
body: {
|
body: {
|
||||||
message: 'You can\'t hide "name" because it is the display column.',
|
message:
|
||||||
|
'You can\'t hide "name" because it is the display column.',
|
||||||
status: 400,
|
status: 400,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -719,7 +696,8 @@ describe.each([
|
||||||
{
|
{
|
||||||
status: 400,
|
status: 400,
|
||||||
body: {
|
body: {
|
||||||
message: "Calculation views can only have a maximum of 5 fields",
|
message:
|
||||||
|
"Calculation views can only have a maximum of 5 fields",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -855,7 +833,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
it("does not get confused when a calculation field shadows a basic one", async () => {
|
it("does not get confused when a calculation field shadows a basic one", async () => {
|
||||||
const table = await config.api.table.save(
|
const table = await config.api.table.save(
|
||||||
saveTableRequest({
|
saveTableRequest({
|
||||||
|
@ -1093,7 +1070,11 @@ describe.each([
|
||||||
expect(await config.api.table.get(tableId)).toEqual(
|
expect(await config.api.table.get(tableId)).toEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
views: {
|
views: {
|
||||||
[newName]: { ...view, name: newName, schema: expect.anything() },
|
[newName]: {
|
||||||
|
...view,
|
||||||
|
name: newName,
|
||||||
|
schema: expect.anything(),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
@ -1306,7 +1287,8 @@ describe.each([
|
||||||
{
|
{
|
||||||
status: 400,
|
status: 400,
|
||||||
body: {
|
body: {
|
||||||
message: 'You can\'t hide "id" because it is a required field.',
|
message:
|
||||||
|
'You can\'t hide "id" because it is a required field.',
|
||||||
status: 400,
|
status: 400,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -1453,7 +1435,6 @@ describe.each([
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
describe("calculation views", () => {
|
describe("calculation views", () => {
|
||||||
let table: Table
|
let table: Table
|
||||||
let view: ViewV2
|
let view: ViewV2
|
||||||
|
@ -2293,7 +2274,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
describe("calculation views", () => {
|
describe("calculation views", () => {
|
||||||
it("should not remove calculation columns when modifying table schema", async () => {
|
it("should not remove calculation columns when modifying table schema", async () => {
|
||||||
let table = await config.api.table.save(
|
let table = await config.api.table.save(
|
||||||
|
@ -2721,7 +2701,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
describe("search", () => {
|
describe("search", () => {
|
||||||
it("returns empty rows from view when no schema is passed", async () => {
|
it("returns empty rows from view when no schema is passed", async () => {
|
||||||
const rows = await Promise.all(
|
const rows = await Promise.all(
|
||||||
|
@ -2923,9 +2902,6 @@ describe.each([
|
||||||
hasNextPage: false,
|
hasNextPage: false,
|
||||||
totalRows: 10,
|
totalRows: 10,
|
||||||
}
|
}
|
||||||
if (isLucene) {
|
|
||||||
expectation.bookmark = expect.anything()
|
|
||||||
}
|
|
||||||
expect(page3).toEqual(expectation)
|
expect(page3).toEqual(expectation)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -3211,7 +3187,6 @@ describe.each([
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
it.each([true, false])(
|
it.each([true, false])(
|
||||||
"can filter a view without a view filter",
|
"can filter a view without a view filter",
|
||||||
async allOr => {
|
async allOr => {
|
||||||
|
@ -3249,7 +3224,6 @@ describe.each([
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
it.each([true, false])("cannot bypass a view filter", async allOr => {
|
it.each([true, false])("cannot bypass a view filter", async allOr => {
|
||||||
await config.api.row.save(table._id!, {
|
await config.api.row.save(table._id!, {
|
||||||
one: "foo",
|
one: "foo",
|
||||||
|
@ -3295,17 +3269,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("foreign relationship columns", () => {
|
describe("foreign relationship columns", () => {
|
||||||
let envCleanup: () => void
|
|
||||||
beforeAll(() => {
|
|
||||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
|
||||||
ENRICHED_RELATIONSHIPS: true,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
envCleanup?.()
|
|
||||||
})
|
|
||||||
|
|
||||||
const createMainTable = async (
|
const createMainTable = async (
|
||||||
links: {
|
links: {
|
||||||
name: string
|
name: string
|
||||||
|
@ -3455,7 +3418,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
describe("calculations", () => {
|
describe("calculations", () => {
|
||||||
let table: Table
|
let table: Table
|
||||||
let rows: Row[]
|
let rows: Row[]
|
||||||
|
@ -3508,10 +3470,7 @@ describe.each([
|
||||||
expect(response.rows).toEqual(
|
expect(response.rows).toEqual(
|
||||||
expect.arrayContaining([
|
expect.arrayContaining([
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
"Quantity Sum": rows.reduce(
|
"Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0),
|
||||||
(acc, r) => acc + r.quantity,
|
|
||||||
0
|
|
||||||
),
|
|
||||||
}),
|
}),
|
||||||
])
|
])
|
||||||
)
|
)
|
||||||
|
@ -3552,9 +3511,7 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const row of response.rows) {
|
for (const row of response.rows) {
|
||||||
expect(row["Total Price"]).toEqual(
|
expect(row["Total Price"]).toEqual(priceByQuantity[row.quantity])
|
||||||
priceByQuantity[row.quantity]
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -3744,12 +3701,9 @@ describe.each([
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const apertureScience = await config.api.row.save(
|
const apertureScience = await config.api.row.save(companies._id!, {
|
||||||
companies._id!,
|
|
||||||
{
|
|
||||||
name: "Aperture Science Laboratories",
|
name: "Aperture Science Laboratories",
|
||||||
}
|
})
|
||||||
)
|
|
||||||
|
|
||||||
const blackMesa = await config.api.row.save(companies._id!, {
|
const blackMesa = await config.api.row.save(companies._id!, {
|
||||||
name: "Black Mesa",
|
name: "Black Mesa",
|
||||||
|
@ -4075,7 +4029,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
it("should not need required fields to be present", async () => {
|
it("should not need required fields to be present", async () => {
|
||||||
const table = await config.api.table.save(
|
const table = await config.api.table.save(
|
||||||
saveTableRequest({
|
saveTableRequest({
|
||||||
|
@ -4614,7 +4567,9 @@ describe.each([
|
||||||
describe("permissions", () => {
|
describe("permissions", () => {
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
|
Array.from({ length: 10 }, () =>
|
||||||
|
config.api.row.save(table._id!, {})
|
||||||
|
)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -4675,4 +4630,5 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -1,10 +1,6 @@
|
||||||
import * as setup from "../../../api/routes/tests/utilities"
|
import * as setup from "../../../api/routes/tests/utilities"
|
||||||
import { basicTable } from "../../../tests/utilities/structures"
|
import { basicTable } from "../../../tests/utilities/structures"
|
||||||
import {
|
import { db as dbCore, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
|
||||||
db as dbCore,
|
|
||||||
features,
|
|
||||||
SQLITE_DESIGN_DOC_ID,
|
|
||||||
} from "@budibase/backend-core"
|
|
||||||
import {
|
import {
|
||||||
LinkDocument,
|
LinkDocument,
|
||||||
DocumentType,
|
DocumentType,
|
||||||
|
@ -70,17 +66,8 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function sqsDisabled(cb: () => Promise<void>) {
|
|
||||||
await features.testutils.withFeatureFlags("*", { SQS: false }, cb)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sqsEnabled(cb: () => Promise<void>) {
|
|
||||||
await features.testutils.withFeatureFlags("*", { SQS: true }, cb)
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("SQS migration", () => {
|
describe("SQS migration", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await sqsDisabled(async () => {
|
|
||||||
await config.init()
|
await config.init()
|
||||||
const table = await config.api.table.save(basicTable())
|
const table = await config.api.table.save(basicTable())
|
||||||
tableId = table._id!
|
tableId = table._id!
|
||||||
|
@ -88,7 +75,6 @@ describe("SQS migration", () => {
|
||||||
// old link document
|
// old link document
|
||||||
await db.put(oldLinkDocument())
|
await db.put(oldLinkDocument())
|
||||||
})
|
})
|
||||||
})
|
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await config.doInTenant(async () => {
|
await config.doInTenant(async () => {
|
||||||
|
@ -101,19 +87,11 @@ describe("SQS migration", () => {
|
||||||
|
|
||||||
it("test migration runs as expected against an older DB", async () => {
|
it("test migration runs as expected against an older DB", async () => {
|
||||||
const db = dbCore.getDB(config.appId!)
|
const db = dbCore.getDB(config.appId!)
|
||||||
// confirm nothing exists initially
|
|
||||||
await sqsDisabled(async () => {
|
|
||||||
let error: any | undefined
|
|
||||||
try {
|
|
||||||
await db.get(SQLITE_DESIGN_DOC_ID)
|
|
||||||
} catch (err: any) {
|
|
||||||
error = err
|
|
||||||
}
|
|
||||||
expect(error).toBeDefined()
|
|
||||||
expect(error.status).toBe(404)
|
|
||||||
})
|
|
||||||
|
|
||||||
await sqsEnabled(async () => {
|
// remove sqlite design doc to simulate it comes from an older installation
|
||||||
|
const doc = await db.get(SQLITE_DESIGN_DOC_ID)
|
||||||
|
await db.remove({ _id: doc._id, _rev: doc._rev })
|
||||||
|
|
||||||
await processMigrations(config.appId!, MIGRATIONS)
|
await processMigrations(config.appId!, MIGRATIONS)
|
||||||
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||||
expect(designDoc.sql.tables).toBeDefined()
|
expect(designDoc.sql.tables).toBeDefined()
|
||||||
|
@ -130,14 +108,11 @@ describe("SQS migration", () => {
|
||||||
|
|
||||||
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
||||||
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
||||||
expect(linkDoc.tableId).toEqual(
|
expect(linkDoc.tableId).toEqual(generateJunctionTableID(tableId1, tableId2))
|
||||||
generateJunctionTableID(tableId1, tableId2)
|
|
||||||
)
|
|
||||||
// should have swapped the documents
|
// should have swapped the documents
|
||||||
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
||||||
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
||||||
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
||||||
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
||||||
})
|
})
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
const setup = require("./utilities")
|
import { getConfig, afterAll as _afterAll, runStep } from "./utilities"
|
||||||
|
|
||||||
describe("test the bash action", () => {
|
describe("test the bash action", () => {
|
||||||
let config = setup.getConfig()
|
let config = getConfig()
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
})
|
})
|
||||||
afterAll(setup.afterAll)
|
afterAll(_afterAll)
|
||||||
|
|
||||||
it("should be able to execute a script", async () => {
|
it("should be able to execute a script", async () => {
|
||||||
let res = await setup.runStep("EXECUTE_BASH", {
|
let res = await runStep(config, "EXECUTE_BASH", {
|
||||||
code: "echo 'test'",
|
code: "echo 'test'",
|
||||||
})
|
})
|
||||||
expect(res.stdout).toEqual("test\n")
|
expect(res.stdout).toEqual("test\n")
|
||||||
|
@ -17,7 +17,7 @@ describe("test the bash action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle a null value", async () => {
|
it("should handle a null value", async () => {
|
||||||
let res = await setup.runStep("EXECUTE_BASH", {
|
let res = await runStep(config, "EXECUTE_BASH", {
|
||||||
code: null,
|
code: null,
|
||||||
})
|
})
|
||||||
expect(res.stdout).toEqual(
|
expect(res.stdout).toEqual(
|
|
@ -31,7 +31,7 @@ describe("test the create row action", () => {
|
||||||
afterAll(setup.afterAll)
|
afterAll(setup.afterAll)
|
||||||
|
|
||||||
it("should be able to run the action", async () => {
|
it("should be able to run the action", async () => {
|
||||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {
|
||||||
row,
|
row,
|
||||||
})
|
})
|
||||||
expect(res.id).toBeDefined()
|
expect(res.id).toBeDefined()
|
||||||
|
@ -43,7 +43,7 @@ describe("test the create row action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return an error (not throw) when bad info provided", async () => {
|
it("should return an error (not throw) when bad info provided", async () => {
|
||||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {
|
||||||
row: {
|
row: {
|
||||||
tableId: "invalid",
|
tableId: "invalid",
|
||||||
invalid: "invalid",
|
invalid: "invalid",
|
||||||
|
@ -53,7 +53,7 @@ describe("test the create row action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should check invalid inputs return an error", async () => {
|
it("should check invalid inputs return an error", async () => {
|
||||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {})
|
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {})
|
||||||
expect(res.success).toEqual(false)
|
expect(res.success).toEqual(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -76,7 +76,7 @@ describe("test the create row action", () => {
|
||||||
]
|
]
|
||||||
|
|
||||||
attachmentRow.file_attachment = attachmentObject
|
attachmentRow.file_attachment = attachmentObject
|
||||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {
|
||||||
row: attachmentRow,
|
row: attachmentRow,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -111,7 +111,7 @@ describe("test the create row action", () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
attachmentRow.single_file_attachment = attachmentObject
|
attachmentRow.single_file_attachment = attachmentObject
|
||||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {
|
||||||
row: attachmentRow,
|
row: attachmentRow,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -146,7 +146,7 @@ describe("test the create row action", () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
attachmentRow.single_file_attachment = attachmentObject
|
attachmentRow.single_file_attachment = attachmentObject
|
||||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
const res = await setup.runStep(config, setup.actions.CREATE_ROW.stepId, {
|
||||||
row: attachmentRow,
|
row: attachmentRow,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,20 @@
|
||||||
const setup = require("./utilities")
|
import { runStep, actions, getConfig } from "./utilities"
|
||||||
|
import { reset } from "timekeeper"
|
||||||
|
|
||||||
// need real Date for this test
|
// need real Date for this test
|
||||||
const tk = require("timekeeper")
|
reset()
|
||||||
tk.reset()
|
|
||||||
|
|
||||||
describe("test the delay logic", () => {
|
describe("test the delay logic", () => {
|
||||||
|
const config = getConfig()
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await config.init()
|
||||||
|
})
|
||||||
|
|
||||||
it("should be able to run the delay", async () => {
|
it("should be able to run the delay", async () => {
|
||||||
const time = 100
|
const time = 100
|
||||||
const before = Date.now()
|
const before = Date.now()
|
||||||
await setup.runStep(setup.actions.DELAY.stepId, { time: time })
|
await runStep(config, actions.DELAY.stepId, { time: time })
|
||||||
const now = Date.now()
|
const now = Date.now()
|
||||||
// divide by two just so that test will always pass as long as there was some sort of delay
|
// divide by two just so that test will always pass as long as there was some sort of delay
|
||||||
expect(now - before).toBeGreaterThanOrEqual(time / 2)
|
expect(now - before).toBeGreaterThanOrEqual(time / 2)
|
|
@ -1,4 +1,4 @@
|
||||||
const setup = require("./utilities")
|
import * as setup from "./utilities"
|
||||||
|
|
||||||
describe("test the delete row action", () => {
|
describe("test the delete row action", () => {
|
||||||
let table: any
|
let table: any
|
||||||
|
@ -20,32 +20,29 @@ describe("test the delete row action", () => {
|
||||||
afterAll(setup.afterAll)
|
afterAll(setup.afterAll)
|
||||||
|
|
||||||
it("should be able to run the action", async () => {
|
it("should be able to run the action", async () => {
|
||||||
const res = await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
|
const res = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.DELETE_ROW.stepId,
|
||||||
|
inputs
|
||||||
|
)
|
||||||
expect(res.success).toEqual(true)
|
expect(res.success).toEqual(true)
|
||||||
expect(res.response).toBeDefined()
|
expect(res.response).toBeDefined()
|
||||||
expect(res.row._id).toEqual(row._id)
|
expect(res.row._id).toEqual(row._id)
|
||||||
let error
|
|
||||||
try {
|
|
||||||
await config.getRow(table._id, res.row._id)
|
|
||||||
} catch (err) {
|
|
||||||
error = err
|
|
||||||
}
|
|
||||||
expect(error).toBeDefined()
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("check usage quota attempts", async () => {
|
it("check usage quota attempts", async () => {
|
||||||
await setup.runInProd(async () => {
|
await setup.runInProd(async () => {
|
||||||
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
|
await setup.runStep(config, setup.actions.DELETE_ROW.stepId, inputs)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should check invalid inputs return an error", async () => {
|
it("should check invalid inputs return an error", async () => {
|
||||||
const res = await setup.runStep(setup.actions.DELETE_ROW.stepId, {})
|
const res = await setup.runStep(config, setup.actions.DELETE_ROW.stepId, {})
|
||||||
expect(res.success).toEqual(false)
|
expect(res.success).toEqual(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return an error when table doesn't exist", async () => {
|
it("should return an error when table doesn't exist", async () => {
|
||||||
const res = await setup.runStep(setup.actions.DELETE_ROW.stepId, {
|
const res = await setup.runStep(config, setup.actions.DELETE_ROW.stepId, {
|
||||||
tableId: "invalid",
|
tableId: "invalid",
|
||||||
id: "invalid",
|
id: "invalid",
|
||||||
revision: "invalid",
|
revision: "invalid",
|
||||||
|
|
|
@ -16,7 +16,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
|
|
||||||
it("should be able to run the action", async () => {
|
it("should be able to run the action", async () => {
|
||||||
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
||||||
const res = await runStep(actions.discord.stepId, {
|
const res = await runStep(config, actions.discord.stepId, {
|
||||||
url: "http://www.example.com",
|
url: "http://www.example.com",
|
||||||
username: "joe_bloggs",
|
username: "joe_bloggs",
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,35 +1,36 @@
|
||||||
import { Datasource, Query } from "@budibase/types"
|
import { Datasource, Query } from "@budibase/types"
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import { DatabaseName } from "../../integrations/tests/utils"
|
import {
|
||||||
|
DatabaseName,
|
||||||
|
datasourceDescribe,
|
||||||
|
} from "../../integrations/tests/utils"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
describe.each([
|
datasourceDescribe(
|
||||||
DatabaseName.POSTGRES,
|
{
|
||||||
DatabaseName.MYSQL,
|
name: "execute query action",
|
||||||
DatabaseName.SQL_SERVER,
|
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||||
DatabaseName.MARIADB,
|
},
|
||||||
DatabaseName.ORACLE,
|
({ config, dsProvider }) => {
|
||||||
])("execute query action (%s)", name => {
|
|
||||||
let tableName: string
|
let tableName: string
|
||||||
let client: Knex
|
let client: Knex
|
||||||
let datasource: Datasource
|
let datasource: Datasource
|
||||||
let query: Query
|
let query: Query
|
||||||
const config = setup.getConfig()
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
const ds = await dsProvider()
|
||||||
|
datasource = ds.datasource!
|
||||||
const testSetup = await setup.setupTestDatasource(config, name)
|
client = ds.client!
|
||||||
datasource = testSetup.datasource
|
|
||||||
client = testSetup.client
|
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
tableName = await setup.createTestTable(client, {
|
tableName = generator.guid()
|
||||||
a: { type: "string" },
|
await client.schema.createTable(tableName, table => {
|
||||||
b: { type: "number" },
|
table.string("a")
|
||||||
|
table.integer("b")
|
||||||
})
|
})
|
||||||
await setup.insertTestData(client, tableName, [{ a: "string", b: 1 }])
|
await client(tableName).insert({ a: "string", b: 1 })
|
||||||
query = await setup.saveTestQuery(config, client, tableName, datasource)
|
query = await setup.saveTestQuery(config, client, tableName, datasource)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -37,29 +38,40 @@ describe.each([
|
||||||
await client.schema.dropTable(tableName)
|
await client.schema.dropTable(tableName)
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(setup.afterAll)
|
|
||||||
|
|
||||||
it("should be able to execute a query", async () => {
|
it("should be able to execute a query", async () => {
|
||||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
let res = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.EXECUTE_QUERY.stepId,
|
||||||
|
{
|
||||||
query: { queryId: query._id },
|
query: { queryId: query._id },
|
||||||
})
|
}
|
||||||
|
)
|
||||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||||
expect(res.success).toEqual(true)
|
expect(res.success).toEqual(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle a null query value", async () => {
|
it("should handle a null query value", async () => {
|
||||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
let res = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.EXECUTE_QUERY.stepId,
|
||||||
|
{
|
||||||
query: null,
|
query: null,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
expect(res.response.message).toEqual("Invalid inputs")
|
expect(res.response.message).toEqual("Invalid inputs")
|
||||||
expect(res.success).toEqual(false)
|
expect(res.success).toEqual(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle an error executing a query", async () => {
|
it("should handle an error executing a query", async () => {
|
||||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
let res = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.EXECUTE_QUERY.stepId,
|
||||||
|
{
|
||||||
query: { queryId: "wrong_id" },
|
query: { queryId: "wrong_id" },
|
||||||
})
|
}
|
||||||
|
)
|
||||||
expect(res.response).toBeDefined()
|
expect(res.response).toBeDefined()
|
||||||
expect(res.success).toEqual(false)
|
expect(res.success).toEqual(false)
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
const setup = require("./utilities")
|
import { getConfig, afterAll as _afterAll, runStep, actions } from "./utilities"
|
||||||
|
|
||||||
describe("test the execute script action", () => {
|
describe("test the execute script action", () => {
|
||||||
let config = setup.getConfig()
|
let config = getConfig()
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
})
|
})
|
||||||
afterAll(setup.afterAll)
|
afterAll(_afterAll)
|
||||||
|
|
||||||
it("should be able to execute a script", async () => {
|
it("should be able to execute a script", async () => {
|
||||||
const res = await setup.runStep(setup.actions.EXECUTE_SCRIPT.stepId, {
|
const res = await runStep(config, actions.EXECUTE_SCRIPT.stepId, {
|
||||||
code: "return 1 + 1",
|
code: "return 1 + 1",
|
||||||
})
|
})
|
||||||
expect(res.value).toEqual(2)
|
expect(res.value).toEqual(2)
|
||||||
|
@ -17,7 +17,7 @@ describe("test the execute script action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle a null value", async () => {
|
it("should handle a null value", async () => {
|
||||||
const res = await setup.runStep(setup.actions.EXECUTE_SCRIPT.stepId, {
|
const res = await runStep(config, actions.EXECUTE_SCRIPT.stepId, {
|
||||||
code: null,
|
code: null,
|
||||||
})
|
})
|
||||||
expect(res.response.message).toEqual("Invalid inputs")
|
expect(res.response.message).toEqual("Invalid inputs")
|
||||||
|
@ -25,8 +25,9 @@ describe("test the execute script action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to get a value from context", async () => {
|
it("should be able to get a value from context", async () => {
|
||||||
const res = await setup.runStep(
|
const res = await runStep(
|
||||||
setup.actions.EXECUTE_SCRIPT.stepId,
|
config,
|
||||||
|
actions.EXECUTE_SCRIPT.stepId,
|
||||||
{
|
{
|
||||||
code: "return steps.map(d => d.value)",
|
code: "return steps.map(d => d.value)",
|
||||||
},
|
},
|
||||||
|
@ -40,7 +41,7 @@ describe("test the execute script action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to handle an error gracefully", async () => {
|
it("should be able to handle an error gracefully", async () => {
|
||||||
const res = await setup.runStep(setup.actions.EXECUTE_SCRIPT.stepId, {
|
const res = await runStep(config, actions.EXECUTE_SCRIPT.stepId, {
|
||||||
code: "return something.map(x => x.name)",
|
code: "return something.map(x => x.name)",
|
||||||
})
|
})
|
||||||
expect(res.response).toEqual("ReferenceError: something is not defined")
|
expect(res.response).toEqual("ReferenceError: something is not defined")
|
|
@ -2,13 +2,19 @@ import * as setup from "./utilities"
|
||||||
import { FilterConditions } from "../steps/filter"
|
import { FilterConditions } from "../steps/filter"
|
||||||
|
|
||||||
describe("test the filter logic", () => {
|
describe("test the filter logic", () => {
|
||||||
|
const config = setup.getConfig()
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await config.init()
|
||||||
|
})
|
||||||
|
|
||||||
async function checkFilter(
|
async function checkFilter(
|
||||||
field: any,
|
field: any,
|
||||||
condition: string,
|
condition: string,
|
||||||
value: any,
|
value: any,
|
||||||
pass = true
|
pass = true
|
||||||
) {
|
) {
|
||||||
let res = await setup.runStep(setup.actions.FILTER.stepId, {
|
let res = await setup.runStep(config, setup.actions.FILTER.stepId, {
|
||||||
field,
|
field,
|
||||||
condition,
|
condition,
|
||||||
value,
|
value,
|
||||||
|
|
|
@ -16,7 +16,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
|
|
||||||
it("should be able to run the action", async () => {
|
it("should be able to run the action", async () => {
|
||||||
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
||||||
const res = await runStep(actions.integromat.stepId, {
|
const res = await runStep(config, actions.integromat.stepId, {
|
||||||
url: "http://www.example.com",
|
url: "http://www.example.com",
|
||||||
})
|
})
|
||||||
expect(res.response.foo).toEqual("bar")
|
expect(res.response.foo).toEqual("bar")
|
||||||
|
@ -38,7 +38,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
.post("/", payload)
|
.post("/", payload)
|
||||||
.reply(200, { foo: "bar" })
|
.reply(200, { foo: "bar" })
|
||||||
|
|
||||||
const res = await runStep(actions.integromat.stepId, {
|
const res = await runStep(config, actions.integromat.stepId, {
|
||||||
body: { value: JSON.stringify(payload) },
|
body: { value: JSON.stringify(payload) },
|
||||||
url: "http://www.example.com",
|
url: "http://www.example.com",
|
||||||
})
|
})
|
||||||
|
@ -47,7 +47,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return a 400 if the JSON payload string is malformed", async () => {
|
it("should return a 400 if the JSON payload string is malformed", async () => {
|
||||||
const res = await runStep(actions.integromat.stepId, {
|
const res = await runStep(config, actions.integromat.stepId, {
|
||||||
body: { value: "{ invalid json }" },
|
body: { value: "{ invalid json }" },
|
||||||
url: "http://www.example.com",
|
url: "http://www.example.com",
|
||||||
})
|
})
|
||||||
|
|
|
@ -16,7 +16,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
|
|
||||||
it("should be able to run the action and default to 'get'", async () => {
|
it("should be able to run the action and default to 'get'", async () => {
|
||||||
nock("http://www.example.com/").get("/").reply(200, { foo: "bar" })
|
nock("http://www.example.com/").get("/").reply(200, { foo: "bar" })
|
||||||
const res = await runStep(actions.n8n.stepId, {
|
const res = await runStep(config, actions.n8n.stepId, {
|
||||||
url: "http://www.example.com",
|
url: "http://www.example.com",
|
||||||
body: {
|
body: {
|
||||||
test: "IGNORE_ME",
|
test: "IGNORE_ME",
|
||||||
|
@ -30,7 +30,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
nock("http://www.example.com/")
|
nock("http://www.example.com/")
|
||||||
.post("/", { name: "Adam", age: 9 })
|
.post("/", { name: "Adam", age: 9 })
|
||||||
.reply(200)
|
.reply(200)
|
||||||
const res = await runStep(actions.n8n.stepId, {
|
const res = await runStep(config, actions.n8n.stepId, {
|
||||||
body: {
|
body: {
|
||||||
value: JSON.stringify({ name: "Adam", age: 9 }),
|
value: JSON.stringify({ name: "Adam", age: 9 }),
|
||||||
},
|
},
|
||||||
|
@ -42,7 +42,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
|
|
||||||
it("should return a 400 if the JSON payload string is malformed", async () => {
|
it("should return a 400 if the JSON payload string is malformed", async () => {
|
||||||
const payload = `{ value1 1 }`
|
const payload = `{ value1 1 }`
|
||||||
const res = await runStep(actions.n8n.stepId, {
|
const res = await runStep(config, actions.n8n.stepId, {
|
||||||
value1: "ONE",
|
value1: "ONE",
|
||||||
body: {
|
body: {
|
||||||
value: payload,
|
value: payload,
|
||||||
|
@ -59,7 +59,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
nock("http://www.example.com/")
|
nock("http://www.example.com/")
|
||||||
.head("/", body => body === "")
|
.head("/", body => body === "")
|
||||||
.reply(200)
|
.reply(200)
|
||||||
const res = await runStep(actions.n8n.stepId, {
|
const res = await runStep(config, actions.n8n.stepId, {
|
||||||
url: "http://www.example.com",
|
url: "http://www.example.com",
|
||||||
method: "HEAD",
|
method: "HEAD",
|
||||||
body: {
|
body: {
|
||||||
|
|
|
@ -62,13 +62,13 @@ describe("test the openai action", () => {
|
||||||
afterAll(_afterAll)
|
afterAll(_afterAll)
|
||||||
|
|
||||||
it("should be able to receive a response from ChatGPT given a prompt", async () => {
|
it("should be able to receive a response from ChatGPT given a prompt", async () => {
|
||||||
const res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
|
const res = await runStep(config, "OPENAI", { prompt: OPENAI_PROMPT })
|
||||||
expect(res.response).toEqual("This is a test")
|
expect(res.response).toEqual("This is a test")
|
||||||
expect(res.success).toBeTruthy()
|
expect(res.success).toBeTruthy()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should present the correct error message when a prompt is not provided", async () => {
|
it("should present the correct error message when a prompt is not provided", async () => {
|
||||||
const res = await runStep("OPENAI", { prompt: null })
|
const res = await runStep(config, "OPENAI", { prompt: null })
|
||||||
expect(res.response).toEqual(
|
expect(res.response).toEqual(
|
||||||
"Budibase OpenAI Automation Failed: No prompt supplied"
|
"Budibase OpenAI Automation Failed: No prompt supplied"
|
||||||
)
|
)
|
||||||
|
@ -91,7 +91,7 @@ describe("test the openai action", () => {
|
||||||
} as any)
|
} as any)
|
||||||
)
|
)
|
||||||
|
|
||||||
const res = await runStep("OPENAI", {
|
const res = await runStep(config, "OPENAI", {
|
||||||
prompt: OPENAI_PROMPT,
|
prompt: OPENAI_PROMPT,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ describe("test the openai action", () => {
|
||||||
jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true)
|
jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true)
|
||||||
|
|
||||||
const prompt = "What is the meaning of life?"
|
const prompt = "What is the meaning of life?"
|
||||||
await runStep("OPENAI", {
|
await runStep(config, "OPENAI", {
|
||||||
model: "gpt-4o-mini",
|
model: "gpt-4o-mini",
|
||||||
prompt,
|
prompt,
|
||||||
})
|
})
|
||||||
|
|
|
@ -18,7 +18,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
nock("http://www.example.com")
|
nock("http://www.example.com")
|
||||||
.post("/", { a: 1 })
|
.post("/", { a: 1 })
|
||||||
.reply(200, { foo: "bar" })
|
.reply(200, { foo: "bar" })
|
||||||
const res = await runStep(actions.OUTGOING_WEBHOOK.stepId, {
|
const res = await runStep(config, actions.OUTGOING_WEBHOOK.stepId, {
|
||||||
requestMethod: "POST",
|
requestMethod: "POST",
|
||||||
url: "www.example.com",
|
url: "www.example.com",
|
||||||
requestBody: JSON.stringify({ a: 1 }),
|
requestBody: JSON.stringify({ a: 1 }),
|
||||||
|
@ -28,7 +28,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return an error if something goes wrong in fetch", async () => {
|
it("should return an error if something goes wrong in fetch", async () => {
|
||||||
const res = await runStep(actions.OUTGOING_WEBHOOK.stepId, {
|
const res = await runStep(config, actions.OUTGOING_WEBHOOK.stepId, {
|
||||||
requestMethod: "GET",
|
requestMethod: "GET",
|
||||||
url: "www.invalid.com",
|
url: "www.invalid.com",
|
||||||
})
|
})
|
||||||
|
|
|
@ -33,7 +33,11 @@ describe("Test a query step automation", () => {
|
||||||
sortOrder: "ascending",
|
sortOrder: "ascending",
|
||||||
limit: 10,
|
limit: 10,
|
||||||
}
|
}
|
||||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
const res = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.QUERY_ROWS.stepId,
|
||||||
|
inputs
|
||||||
|
)
|
||||||
expect(res.success).toBe(true)
|
expect(res.success).toBe(true)
|
||||||
expect(res.rows).toBeDefined()
|
expect(res.rows).toBeDefined()
|
||||||
expect(res.rows.length).toBe(2)
|
expect(res.rows.length).toBe(2)
|
||||||
|
@ -48,7 +52,11 @@ describe("Test a query step automation", () => {
|
||||||
sortOrder: "ascending",
|
sortOrder: "ascending",
|
||||||
limit: 10,
|
limit: 10,
|
||||||
}
|
}
|
||||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
const res = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.QUERY_ROWS.stepId,
|
||||||
|
inputs
|
||||||
|
)
|
||||||
expect(res.success).toBe(true)
|
expect(res.success).toBe(true)
|
||||||
expect(res.rows).toBeDefined()
|
expect(res.rows).toBeDefined()
|
||||||
expect(res.rows.length).toBe(2)
|
expect(res.rows.length).toBe(2)
|
||||||
|
@ -65,7 +73,11 @@ describe("Test a query step automation", () => {
|
||||||
limit: 10,
|
limit: 10,
|
||||||
onEmptyFilter: "none",
|
onEmptyFilter: "none",
|
||||||
}
|
}
|
||||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
const res = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.QUERY_ROWS.stepId,
|
||||||
|
inputs
|
||||||
|
)
|
||||||
expect(res.success).toBe(false)
|
expect(res.success).toBe(false)
|
||||||
expect(res.rows).toBeDefined()
|
expect(res.rows).toBeDefined()
|
||||||
expect(res.rows.length).toBe(0)
|
expect(res.rows.length).toBe(0)
|
||||||
|
@ -85,7 +97,11 @@ describe("Test a query step automation", () => {
|
||||||
sortOrder: "ascending",
|
sortOrder: "ascending",
|
||||||
limit: 10,
|
limit: 10,
|
||||||
}
|
}
|
||||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
const res = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.QUERY_ROWS.stepId,
|
||||||
|
inputs
|
||||||
|
)
|
||||||
expect(res.success).toBe(false)
|
expect(res.success).toBe(false)
|
||||||
expect(res.rows).toBeDefined()
|
expect(res.rows).toBeDefined()
|
||||||
expect(res.rows.length).toBe(0)
|
expect(res.rows.length).toBe(0)
|
||||||
|
@ -100,7 +116,11 @@ describe("Test a query step automation", () => {
|
||||||
sortOrder: "ascending",
|
sortOrder: "ascending",
|
||||||
limit: 10,
|
limit: 10,
|
||||||
}
|
}
|
||||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
const res = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.QUERY_ROWS.stepId,
|
||||||
|
inputs
|
||||||
|
)
|
||||||
expect(res.success).toBe(true)
|
expect(res.success).toBe(true)
|
||||||
expect(res.rows).toBeDefined()
|
expect(res.rows).toBeDefined()
|
||||||
expect(res.rows.length).toBe(2)
|
expect(res.rows.length).toBe(2)
|
||||||
|
|
|
@ -1,9 +1,14 @@
|
||||||
import * as automation from "../../index"
|
import * as automation from "../../index"
|
||||||
import * as setup from "../utilities"
|
import * as setup from "../utilities"
|
||||||
import { LoopStepType, FieldType, Table } from "@budibase/types"
|
import { LoopStepType, FieldType, Table, Datasource } from "@budibase/types"
|
||||||
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
|
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
|
||||||
import { DatabaseName } from "../../../integrations/tests/utils"
|
import {
|
||||||
|
DatabaseName,
|
||||||
|
datasourceDescribe,
|
||||||
|
} from "../../../integrations/tests/utils"
|
||||||
import { FilterConditions } from "../../../automations/steps/filter"
|
import { FilterConditions } from "../../../automations/steps/filter"
|
||||||
|
import { Knex } from "knex"
|
||||||
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
describe("Automation Scenarios", () => {
|
describe("Automation Scenarios", () => {
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
@ -107,96 +112,6 @@ describe("Automation Scenarios", () => {
|
||||||
expect(results.steps[2].outputs.rows).toHaveLength(1)
|
expect(results.steps[2].outputs.rows).toHaveLength(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should query an external database for some data then insert than into an internal table", async () => {
|
|
||||||
const { datasource, client } = await setup.setupTestDatasource(
|
|
||||||
config,
|
|
||||||
DatabaseName.MYSQL
|
|
||||||
)
|
|
||||||
|
|
||||||
const newTable = await config.createTable({
|
|
||||||
name: "table",
|
|
||||||
type: "table",
|
|
||||||
schema: {
|
|
||||||
name: {
|
|
||||||
name: "name",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
constraints: {
|
|
||||||
presence: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
age: {
|
|
||||||
name: "age",
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
constraints: {
|
|
||||||
presence: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const tableName = await setup.createTestTable(client, {
|
|
||||||
name: { type: "string" },
|
|
||||||
age: { type: "number" },
|
|
||||||
})
|
|
||||||
|
|
||||||
const rows = [
|
|
||||||
{ name: "Joe", age: 20 },
|
|
||||||
{ name: "Bob", age: 25 },
|
|
||||||
{ name: "Paul", age: 30 },
|
|
||||||
]
|
|
||||||
|
|
||||||
await setup.insertTestData(client, tableName, rows)
|
|
||||||
|
|
||||||
const query = await setup.saveTestQuery(
|
|
||||||
config,
|
|
||||||
client,
|
|
||||||
tableName,
|
|
||||||
datasource
|
|
||||||
)
|
|
||||||
|
|
||||||
const builder = createAutomationBuilder({
|
|
||||||
name: "Test external query and save",
|
|
||||||
})
|
|
||||||
|
|
||||||
const results = await builder
|
|
||||||
.appAction({
|
|
||||||
fields: {},
|
|
||||||
})
|
|
||||||
.executeQuery({
|
|
||||||
query: {
|
|
||||||
queryId: query._id!,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.loop({
|
|
||||||
option: LoopStepType.ARRAY,
|
|
||||||
binding: "{{ steps.1.response }}",
|
|
||||||
})
|
|
||||||
.createRow({
|
|
||||||
row: {
|
|
||||||
name: "{{ loop.currentItem.name }}",
|
|
||||||
age: "{{ loop.currentItem.age }}",
|
|
||||||
tableId: newTable._id!,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.queryRows({
|
|
||||||
tableId: newTable._id!,
|
|
||||||
})
|
|
||||||
.run()
|
|
||||||
|
|
||||||
expect(results.steps).toHaveLength(3)
|
|
||||||
|
|
||||||
expect(results.steps[1].outputs.iterations).toBe(3)
|
|
||||||
expect(results.steps[1].outputs.items).toHaveLength(3)
|
|
||||||
|
|
||||||
expect(results.steps[2].outputs.rows).toHaveLength(3)
|
|
||||||
|
|
||||||
rows.forEach(expectedRow => {
|
|
||||||
expect(results.steps[2].outputs.rows).toEqual(
|
|
||||||
expect.arrayContaining([expect.objectContaining(expectedRow)])
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should trigger an automation which creates and then updates a row", async () => {
|
it("should trigger an automation which creates and then updates a row", async () => {
|
||||||
const table = await config.createTable({
|
const table = await config.createTable({
|
||||||
name: "TestTable",
|
name: "TestTable",
|
||||||
|
@ -517,3 +432,104 @@ describe("Automation Scenarios", () => {
|
||||||
expect(results.steps[0].outputs.message).toContain("example.com")
|
expect(results.steps[0].outputs.message).toContain("example.com")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
datasourceDescribe(
|
||||||
|
{ name: "", only: [DatabaseName.MYSQL] },
|
||||||
|
({ config, dsProvider }) => {
|
||||||
|
let datasource: Datasource
|
||||||
|
let client: Knex
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const ds = await dsProvider()
|
||||||
|
datasource = ds.datasource!
|
||||||
|
client = ds.client!
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should query an external database for some data then insert than into an internal table", async () => {
|
||||||
|
const newTable = await config.createTable({
|
||||||
|
name: "table",
|
||||||
|
type: "table",
|
||||||
|
schema: {
|
||||||
|
name: {
|
||||||
|
name: "name",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
constraints: {
|
||||||
|
presence: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
age: {
|
||||||
|
name: "age",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
constraints: {
|
||||||
|
presence: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const tableName = generator.guid()
|
||||||
|
await client.schema.createTable(tableName, table => {
|
||||||
|
table.string("name")
|
||||||
|
table.integer("age")
|
||||||
|
})
|
||||||
|
|
||||||
|
const rows = [
|
||||||
|
{ name: "Joe", age: 20 },
|
||||||
|
{ name: "Bob", age: 25 },
|
||||||
|
{ name: "Paul", age: 30 },
|
||||||
|
]
|
||||||
|
|
||||||
|
await client(tableName).insert(rows)
|
||||||
|
|
||||||
|
const query = await setup.saveTestQuery(
|
||||||
|
config,
|
||||||
|
client,
|
||||||
|
tableName,
|
||||||
|
datasource
|
||||||
|
)
|
||||||
|
|
||||||
|
const builder = createAutomationBuilder({
|
||||||
|
name: "Test external query and save",
|
||||||
|
config,
|
||||||
|
})
|
||||||
|
|
||||||
|
const results = await builder
|
||||||
|
.appAction({
|
||||||
|
fields: {},
|
||||||
|
})
|
||||||
|
.executeQuery({
|
||||||
|
query: {
|
||||||
|
queryId: query._id!,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.loop({
|
||||||
|
option: LoopStepType.ARRAY,
|
||||||
|
binding: "{{ steps.1.response }}",
|
||||||
|
})
|
||||||
|
.createRow({
|
||||||
|
row: {
|
||||||
|
name: "{{ loop.currentItem.name }}",
|
||||||
|
age: "{{ loop.currentItem.age }}",
|
||||||
|
tableId: newTable._id!,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.queryRows({
|
||||||
|
tableId: newTable._id!,
|
||||||
|
})
|
||||||
|
.run()
|
||||||
|
|
||||||
|
expect(results.steps).toHaveLength(3)
|
||||||
|
|
||||||
|
expect(results.steps[1].outputs.iterations).toBe(3)
|
||||||
|
expect(results.steps[1].outputs.items).toHaveLength(3)
|
||||||
|
|
||||||
|
expect(results.steps[2].outputs.rows).toHaveLength(3)
|
||||||
|
|
||||||
|
rows.forEach(expectedRow => {
|
||||||
|
expect(results.steps[2].outputs.rows).toEqual(
|
||||||
|
expect.arrayContaining([expect.objectContaining(expectedRow)])
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -18,7 +18,7 @@ function generateResponse(to: string, from: string) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const setup = require("./utilities")
|
import * as setup from "./utilities"
|
||||||
|
|
||||||
describe("test the outgoing webhook action", () => {
|
describe("test the outgoing webhook action", () => {
|
||||||
let inputs
|
let inputs
|
||||||
|
@ -58,6 +58,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
}
|
}
|
||||||
let resp = generateResponse(inputs.to, inputs.from)
|
let resp = generateResponse(inputs.to, inputs.from)
|
||||||
const res = await setup.runStep(
|
const res = await setup.runStep(
|
||||||
|
config,
|
||||||
setup.actions.SEND_EMAIL_SMTP.stepId,
|
setup.actions.SEND_EMAIL_SMTP.stepId,
|
||||||
inputs
|
inputs
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
const setup = require("./utilities")
|
import { getConfig, afterAll as _afterAll, runStep, actions } from "./utilities"
|
||||||
|
|
||||||
describe("test the server log action", () => {
|
describe("test the server log action", () => {
|
||||||
let config = setup.getConfig()
|
let config = getConfig()
|
||||||
let inputs
|
let inputs: any
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
|
@ -10,10 +10,10 @@ describe("test the server log action", () => {
|
||||||
text: "log message",
|
text: "log message",
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
afterAll(setup.afterAll)
|
afterAll(_afterAll)
|
||||||
|
|
||||||
it("should be able to log the text", async () => {
|
it("should be able to log the text", async () => {
|
||||||
let res = await setup.runStep(setup.actions.SERVER_LOG.stepId, inputs)
|
let res = await runStep(config, actions.SERVER_LOG.stepId, inputs)
|
||||||
expect(res.message).toEqual(`App ${config.getAppId()} - ${inputs.text}`)
|
expect(res.message).toEqual(`App ${config.getAppId()} - ${inputs.text}`)
|
||||||
expect(res.success).toEqual(true)
|
expect(res.success).toEqual(true)
|
||||||
})
|
})
|
|
@ -29,6 +29,7 @@ describe("Test triggering an automation from another automation", () => {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
const res = await setup.runStep(
|
const res = await setup.runStep(
|
||||||
|
config,
|
||||||
setup.actions.TRIGGER_AUTOMATION_RUN.stepId,
|
setup.actions.TRIGGER_AUTOMATION_RUN.stepId,
|
||||||
inputs
|
inputs
|
||||||
)
|
)
|
||||||
|
@ -44,6 +45,7 @@ describe("Test triggering an automation from another automation", () => {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
const res = await setup.runStep(
|
const res = await setup.runStep(
|
||||||
|
config,
|
||||||
setup.actions.TRIGGER_AUTOMATION_RUN.stepId,
|
setup.actions.TRIGGER_AUTOMATION_RUN.stepId,
|
||||||
inputs
|
inputs
|
||||||
)
|
)
|
||||||
|
|
|
@ -34,7 +34,11 @@ describe("test the update row action", () => {
|
||||||
afterAll(setup.afterAll)
|
afterAll(setup.afterAll)
|
||||||
|
|
||||||
it("should be able to run the action", async () => {
|
it("should be able to run the action", async () => {
|
||||||
const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, inputs)
|
const res = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.UPDATE_ROW.stepId,
|
||||||
|
inputs
|
||||||
|
)
|
||||||
expect(res.success).toEqual(true)
|
expect(res.success).toEqual(true)
|
||||||
const updatedRow = await config.api.row.get(table._id!, res.id)
|
const updatedRow = await config.api.row.get(table._id!, res.id)
|
||||||
expect(updatedRow.name).toEqual("Updated name")
|
expect(updatedRow.name).toEqual("Updated name")
|
||||||
|
@ -42,12 +46,12 @@ describe("test the update row action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should check invalid inputs return an error", async () => {
|
it("should check invalid inputs return an error", async () => {
|
||||||
const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {})
|
const res = await setup.runStep(config, setup.actions.UPDATE_ROW.stepId, {})
|
||||||
expect(res.success).toEqual(false)
|
expect(res.success).toEqual(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return an error when table doesn't exist", async () => {
|
it("should return an error when table doesn't exist", async () => {
|
||||||
const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
|
const res = await setup.runStep(config, setup.actions.UPDATE_ROW.stepId, {
|
||||||
row: { _id: "invalid" },
|
row: { _id: "invalid" },
|
||||||
rowId: "invalid",
|
rowId: "invalid",
|
||||||
})
|
})
|
||||||
|
@ -90,7 +94,10 @@ describe("test the update row action", () => {
|
||||||
expect(getResp.user1[0]._id).toEqual(user1._id)
|
expect(getResp.user1[0]._id).toEqual(user1._id)
|
||||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||||
|
|
||||||
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
|
let stepResp = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.UPDATE_ROW.stepId,
|
||||||
|
{
|
||||||
rowId: row._id,
|
rowId: row._id,
|
||||||
row: {
|
row: {
|
||||||
_id: row._id,
|
_id: row._id,
|
||||||
|
@ -99,7 +106,8 @@ describe("test the update row action", () => {
|
||||||
user1: [user2._id],
|
user1: [user2._id],
|
||||||
user2: "",
|
user2: "",
|
||||||
},
|
},
|
||||||
})
|
}
|
||||||
|
)
|
||||||
expect(stepResp.success).toEqual(true)
|
expect(stepResp.success).toEqual(true)
|
||||||
|
|
||||||
getResp = await config.api.row.get(table._id!, row._id!)
|
getResp = await config.api.row.get(table._id!, row._id!)
|
||||||
|
@ -143,7 +151,10 @@ describe("test the update row action", () => {
|
||||||
expect(getResp.user1[0]._id).toEqual(user1._id)
|
expect(getResp.user1[0]._id).toEqual(user1._id)
|
||||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||||
|
|
||||||
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
|
let stepResp = await setup.runStep(
|
||||||
|
config,
|
||||||
|
setup.actions.UPDATE_ROW.stepId,
|
||||||
|
{
|
||||||
rowId: row._id,
|
rowId: row._id,
|
||||||
row: {
|
row: {
|
||||||
_id: row._id,
|
_id: row._id,
|
||||||
|
@ -159,7 +170,8 @@ describe("test the update row action", () => {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
}
|
||||||
|
)
|
||||||
expect(stepResp.success).toEqual(true)
|
expect(stepResp.success).toEqual(true)
|
||||||
|
|
||||||
getResp = await config.api.row.get(table._id!, row._id!)
|
getResp = await config.api.row.get(table._id!, row._id!)
|
||||||
|
|
|
@ -1,22 +1,16 @@
|
||||||
import TestConfig from "../../../tests/utilities/TestConfiguration"
|
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||||
import { context } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import { BUILTIN_ACTION_DEFINITIONS, getAction } from "../../actions"
|
import { BUILTIN_ACTION_DEFINITIONS, getAction } from "../../actions"
|
||||||
import emitter from "../../../events/index"
|
import emitter from "../../../events/index"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { AutomationActionStepId, Datasource } from "@budibase/types"
|
import { AutomationActionStepId, Datasource } from "@budibase/types"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
|
||||||
import {
|
|
||||||
getDatasource,
|
|
||||||
knexClient,
|
|
||||||
DatabaseName,
|
|
||||||
} from "../../../integrations/tests/utils"
|
|
||||||
|
|
||||||
let config: TestConfig
|
let config: TestConfiguration
|
||||||
|
|
||||||
export function getConfig(): TestConfig {
|
export function getConfig(): TestConfiguration {
|
||||||
if (!config) {
|
if (!config) {
|
||||||
config = new TestConfig(true)
|
config = new TestConfiguration(true)
|
||||||
}
|
}
|
||||||
return config
|
return config
|
||||||
}
|
}
|
||||||
|
@ -39,7 +33,12 @@ export async function runInProd(fn: any) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function runStep(stepId: string, inputs: any, stepContext?: any) {
|
export async function runStep(
|
||||||
|
config: TestConfiguration,
|
||||||
|
stepId: string,
|
||||||
|
inputs: any,
|
||||||
|
stepContext?: any
|
||||||
|
) {
|
||||||
async function run() {
|
async function run() {
|
||||||
let step = await getAction(stepId as AutomationActionStepId)
|
let step = await getAction(stepId as AutomationActionStepId)
|
||||||
expect(step).toBeDefined()
|
expect(step).toBeDefined()
|
||||||
|
@ -55,7 +54,7 @@ export async function runStep(stepId: string, inputs: any, stepContext?: any) {
|
||||||
emitter,
|
emitter,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (config?.appId) {
|
if (config.appId) {
|
||||||
return context.doInContext(config?.appId, async () => {
|
return context.doInContext(config?.appId, async () => {
|
||||||
return run()
|
return run()
|
||||||
})
|
})
|
||||||
|
@ -64,31 +63,8 @@ export async function runStep(stepId: string, inputs: any, stepContext?: any) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createTestTable(client: Knex, schema: any) {
|
|
||||||
const tableName = generator.guid()
|
|
||||||
await client.schema.createTable(tableName, table => {
|
|
||||||
for (const fieldName in schema) {
|
|
||||||
const field = schema[fieldName]
|
|
||||||
if (field.type === "string") {
|
|
||||||
table.string(fieldName)
|
|
||||||
} else if (field.type === "number") {
|
|
||||||
table.integer(fieldName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return tableName
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function insertTestData(
|
|
||||||
client: Knex,
|
|
||||||
tableName: string,
|
|
||||||
rows: any[]
|
|
||||||
) {
|
|
||||||
await client(tableName).insert(rows)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function saveTestQuery(
|
export async function saveTestQuery(
|
||||||
config: TestConfig,
|
config: TestConfiguration,
|
||||||
client: Knex,
|
client: Knex,
|
||||||
tableName: string,
|
tableName: string,
|
||||||
datasource: Datasource
|
datasource: Datasource
|
||||||
|
@ -107,15 +83,5 @@ export async function saveTestQuery(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function setupTestDatasource(
|
|
||||||
config: TestConfig,
|
|
||||||
dbName: DatabaseName
|
|
||||||
) {
|
|
||||||
const db = await getDatasource(dbName)
|
|
||||||
const datasource = await config.api.datasource.create(db)
|
|
||||||
const client = await knexClient(db)
|
|
||||||
return { datasource, client }
|
|
||||||
}
|
|
||||||
|
|
||||||
export const apiKey = "test"
|
export const apiKey = "test"
|
||||||
export const actions = BUILTIN_ACTION_DEFINITIONS
|
export const actions = BUILTIN_ACTION_DEFINITIONS
|
||||||
|
|
|
@ -16,7 +16,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
|
|
||||||
it("should be able to run the action", async () => {
|
it("should be able to run the action", async () => {
|
||||||
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
|
||||||
const res = await runStep(actions.zapier.stepId, {
|
const res = await runStep(config, actions.zapier.stepId, {
|
||||||
url: "http://www.example.com",
|
url: "http://www.example.com",
|
||||||
})
|
})
|
||||||
expect(res.response.foo).toEqual("bar")
|
expect(res.response.foo).toEqual("bar")
|
||||||
|
@ -38,7 +38,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
.post("/", { ...payload, platform: "budibase" })
|
.post("/", { ...payload, platform: "budibase" })
|
||||||
.reply(200, { foo: "bar" })
|
.reply(200, { foo: "bar" })
|
||||||
|
|
||||||
const res = await runStep(actions.zapier.stepId, {
|
const res = await runStep(config, actions.zapier.stepId, {
|
||||||
body: { value: JSON.stringify(payload) },
|
body: { value: JSON.stringify(payload) },
|
||||||
url: "http://www.example.com",
|
url: "http://www.example.com",
|
||||||
})
|
})
|
||||||
|
@ -47,7 +47,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return a 400 if the JSON payload string is malformed", async () => {
|
it("should return a 400 if the JSON payload string is malformed", async () => {
|
||||||
const res = await runStep(actions.zapier.stepId, {
|
const res = await runStep(config, actions.zapier.stepId, {
|
||||||
body: { value: "{ invalid json }" },
|
body: { value: "{ invalid json }" },
|
||||||
url: "http://www.example.com",
|
url: "http://www.example.com",
|
||||||
})
|
})
|
||||||
|
|
|
@ -14,11 +14,10 @@ import {
|
||||||
coreOutputProcessing,
|
coreOutputProcessing,
|
||||||
processFormulas,
|
processFormulas,
|
||||||
} from "../../utilities/rowProcessor"
|
} from "../../utilities/rowProcessor"
|
||||||
import { context, features } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import {
|
import {
|
||||||
ContextUser,
|
ContextUser,
|
||||||
EventType,
|
EventType,
|
||||||
FeatureFlag,
|
|
||||||
FieldType,
|
FieldType,
|
||||||
LinkDocumentValue,
|
LinkDocumentValue,
|
||||||
Row,
|
Row,
|
||||||
|
@ -251,20 +250,14 @@ export async function squashLinks<T = Row[] | Row>(
|
||||||
source: Table | ViewV2,
|
source: Table | ViewV2,
|
||||||
enriched: T
|
enriched: T
|
||||||
): Promise<T> {
|
): Promise<T> {
|
||||||
const allowRelationshipSchemas = await features.flags.isEnabled(
|
|
||||||
FeatureFlag.ENRICHED_RELATIONSHIPS
|
|
||||||
)
|
|
||||||
|
|
||||||
let viewSchema: ViewV2Schema = {}
|
let viewSchema: ViewV2Schema = {}
|
||||||
if (sdk.views.isView(source)) {
|
if (sdk.views.isView(source)) {
|
||||||
if (helpers.views.isCalculationView(source)) {
|
if (helpers.views.isCalculationView(source)) {
|
||||||
return enriched
|
return enriched
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allowRelationshipSchemas) {
|
|
||||||
viewSchema = source.schema || {}
|
viewSchema = source.schema || {}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let table: Table
|
let table: Table
|
||||||
if (sdk.views.isView(source)) {
|
if (sdk.views.isView(source)) {
|
||||||
|
|
|
@ -1,10 +1,5 @@
|
||||||
import * as setup from "../api/routes/tests/utilities"
|
|
||||||
import { Datasource, FieldType } from "@budibase/types"
|
import { Datasource, FieldType } from "@budibase/types"
|
||||||
import {
|
import { DatabaseName, datasourceDescribe } from "../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
getDatasource,
|
|
||||||
knexClient,
|
|
||||||
} from "../integrations/tests/utils"
|
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
|
||||||
|
@ -15,31 +10,24 @@ function uniqueTableName(length?: number): string {
|
||||||
.substring(0, length || 10)
|
.substring(0, length || 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
const config = setup.getConfig()!
|
datasourceDescribe(
|
||||||
|
{
|
||||||
describe("mysql integrations", () => {
|
name: "Integration compatibility with mysql search_path",
|
||||||
let datasource: Datasource
|
only: [DatabaseName.MYSQL],
|
||||||
let client: Knex
|
},
|
||||||
|
({ config, dsProvider }) => {
|
||||||
beforeAll(async () => {
|
|
||||||
await config.init()
|
|
||||||
const rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(config.end)
|
|
||||||
|
|
||||||
describe("Integration compatibility with mysql search_path", () => {
|
|
||||||
let datasource: Datasource
|
|
||||||
let rawDatasource: Datasource
|
let rawDatasource: Datasource
|
||||||
|
let datasource: Datasource
|
||||||
let client: Knex
|
let client: Knex
|
||||||
|
|
||||||
const database = generator.guid()
|
const database = generator.guid()
|
||||||
const database2 = generator.guid()
|
const database2 = generator.guid()
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
const ds = await dsProvider()
|
||||||
client = await knexClient(rawDatasource)
|
rawDatasource = ds.rawDatasource!
|
||||||
|
datasource = ds.datasource!
|
||||||
|
client = ds.client!
|
||||||
|
|
||||||
await client.raw(`CREATE DATABASE \`${database}\`;`)
|
await client.raw(`CREATE DATABASE \`${database}\`;`)
|
||||||
await client.raw(`CREATE DATABASE \`${database2}\`;`)
|
await client.raw(`CREATE DATABASE \`${database2}\`;`)
|
||||||
|
@ -87,11 +75,25 @@ describe("mysql integrations", () => {
|
||||||
const schema = res.datasource.entities![repeated_table_name].schema
|
const schema = res.datasource.entities![repeated_table_name].schema
|
||||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
datasourceDescribe(
|
||||||
|
{
|
||||||
|
name: "POST /api/datasources/:datasourceId/schema",
|
||||||
|
only: [DatabaseName.MYSQL],
|
||||||
|
},
|
||||||
|
({ config, dsProvider }) => {
|
||||||
|
let datasource: Datasource
|
||||||
|
let client: Knex
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const ds = await dsProvider()
|
||||||
|
datasource = ds.datasource!
|
||||||
|
client = ds.client!
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
|
||||||
let tableName: string
|
let tableName: string
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
tableName = uniqueTableName()
|
tableName = uniqueTableName()
|
||||||
})
|
})
|
||||||
|
@ -122,5 +124,5 @@ describe("mysql integrations", () => {
|
||||||
expect(table).toBeDefined()
|
expect(table).toBeDefined()
|
||||||
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
|
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
})
|
)
|
||||||
|
|
|
@ -1,25 +1,23 @@
|
||||||
import * as setup from "../api/routes/tests/utilities"
|
|
||||||
import { Datasource, FieldType, Table } from "@budibase/types"
|
import { Datasource, FieldType, Table } from "@budibase/types"
|
||||||
import _ from "lodash"
|
import _ from "lodash"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
datasourceDescribe,
|
||||||
knexClient,
|
knexClient,
|
||||||
} from "../integrations/tests/utils"
|
} from "../integrations/tests/utils"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
|
||||||
const config = setup.getConfig()!
|
datasourceDescribe(
|
||||||
|
{ name: "postgres integrations", only: [DatabaseName.POSTGRES] },
|
||||||
describe("postgres integrations", () => {
|
({ config, dsProvider }) => {
|
||||||
let datasource: Datasource
|
let datasource: Datasource
|
||||||
let client: Knex
|
let client: Knex
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
const ds = await dsProvider()
|
||||||
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
datasource = ds.datasource!
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
client = ds.client!
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(config.end)
|
afterAll(config.end)
|
||||||
|
@ -89,80 +87,6 @@ describe("postgres integrations", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("Integration compatibility with postgres search_path", () => {
|
|
||||||
let datasource: Datasource
|
|
||||||
let client: Knex
|
|
||||||
let schema1: string
|
|
||||||
let schema2: string
|
|
||||||
|
|
||||||
beforeEach(async () => {
|
|
||||||
schema1 = generator.guid().replaceAll("-", "")
|
|
||||||
schema2 = generator.guid().replaceAll("-", "")
|
|
||||||
|
|
||||||
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
|
|
||||||
await client.schema.createSchema(schema1)
|
|
||||||
await client.schema.createSchema(schema2)
|
|
||||||
|
|
||||||
rawDatasource.config!.schema = `${schema1}, ${schema2}`
|
|
||||||
|
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
|
||||||
})
|
|
||||||
|
|
||||||
afterEach(async () => {
|
|
||||||
await client.schema.dropSchema(schema1, true)
|
|
||||||
await client.schema.dropSchema(schema2, true)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("discovers tables from any schema in search path", async () => {
|
|
||||||
await client.schema.createTable(`${schema1}.table1`, table => {
|
|
||||||
table.increments("id1").primary()
|
|
||||||
})
|
|
||||||
|
|
||||||
await client.schema.createTable(`${schema2}.table2`, table => {
|
|
||||||
table.increments("id2").primary()
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await config.api.datasource.info(datasource)
|
|
||||||
expect(response.tableNames).toBeDefined()
|
|
||||||
expect(response.tableNames).toEqual(
|
|
||||||
expect.arrayContaining(["table1", "table2"])
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("does not mix columns from different tables", async () => {
|
|
||||||
const repeated_table_name = "table_same_name"
|
|
||||||
|
|
||||||
await client.schema.createTable(
|
|
||||||
`${schema1}.${repeated_table_name}`,
|
|
||||||
table => {
|
|
||||||
table.increments("id").primary()
|
|
||||||
table.string("val1")
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
await client.schema.createTable(
|
|
||||||
`${schema2}.${repeated_table_name}`,
|
|
||||||
table => {
|
|
||||||
table.increments("id2").primary()
|
|
||||||
table.string("val2")
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const response = await config.api.datasource.fetchSchema({
|
|
||||||
datasourceId: datasource._id!,
|
|
||||||
tablesFilter: [repeated_table_name],
|
|
||||||
})
|
|
||||||
expect(
|
|
||||||
response.datasource.entities?.[repeated_table_name].schema
|
|
||||||
).toBeDefined()
|
|
||||||
const schema = response.datasource.entities?.[repeated_table_name].schema
|
|
||||||
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("check custom column types", () => {
|
describe("check custom column types", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await client.schema.createTable("binaryTable", table => {
|
await client.schema.createTable("binaryTable", table => {
|
||||||
|
@ -279,4 +203,88 @@ describe("postgres integrations", () => {
|
||||||
expect(row.price).toBe("400.00")
|
expect(row.price).toBe("400.00")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
datasourceDescribe(
|
||||||
|
{
|
||||||
|
name: "Integration compatibility with postgres search_path",
|
||||||
|
only: [DatabaseName.POSTGRES],
|
||||||
|
},
|
||||||
|
({ config, dsProvider }) => {
|
||||||
|
let datasource: Datasource
|
||||||
|
let client: Knex
|
||||||
|
let schema1: string
|
||||||
|
let schema2: string
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const ds = await dsProvider()
|
||||||
|
datasource = ds.datasource!
|
||||||
|
const rawDatasource = ds.rawDatasource!
|
||||||
|
|
||||||
|
schema1 = generator.guid().replaceAll("-", "")
|
||||||
|
schema2 = generator.guid().replaceAll("-", "")
|
||||||
|
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
|
|
||||||
|
await client.schema.createSchema(schema1)
|
||||||
|
await client.schema.createSchema(schema2)
|
||||||
|
|
||||||
|
rawDatasource.config!.schema = `${schema1}, ${schema2}`
|
||||||
|
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
|
datasource = await config.api.datasource.create(rawDatasource)
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await client.schema.dropSchema(schema1, true)
|
||||||
|
await client.schema.dropSchema(schema2, true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("discovers tables from any schema in search path", async () => {
|
||||||
|
await client.schema.createTable(`${schema1}.table1`, table => {
|
||||||
|
table.increments("id1").primary()
|
||||||
|
})
|
||||||
|
|
||||||
|
await client.schema.createTable(`${schema2}.table2`, table => {
|
||||||
|
table.increments("id2").primary()
|
||||||
|
})
|
||||||
|
|
||||||
|
const response = await config.api.datasource.info(datasource)
|
||||||
|
expect(response.tableNames).toBeDefined()
|
||||||
|
expect(response.tableNames).toEqual(
|
||||||
|
expect.arrayContaining(["table1", "table2"])
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("does not mix columns from different tables", async () => {
|
||||||
|
const repeated_table_name = "table_same_name"
|
||||||
|
|
||||||
|
await client.schema.createTable(
|
||||||
|
`${schema1}.${repeated_table_name}`,
|
||||||
|
table => {
|
||||||
|
table.increments("id").primary()
|
||||||
|
table.string("val1")
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
await client.schema.createTable(
|
||||||
|
`${schema2}.${repeated_table_name}`,
|
||||||
|
table => {
|
||||||
|
table.increments("id2").primary()
|
||||||
|
table.string("val2")
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const response = await config.api.datasource.fetchSchema({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
tablesFilter: [repeated_table_name],
|
||||||
|
})
|
||||||
|
expect(
|
||||||
|
response.datasource.entities?.[repeated_table_name].schema
|
||||||
|
).toBeDefined()
|
||||||
|
const schema = response.datasource.entities?.[repeated_table_name].schema
|
||||||
|
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -120,7 +120,7 @@ export async function getIntegration(integration: SourceName) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new Error("No datasource implementation found.")
|
throw new Error(`No datasource implementation found called: "${integration}"`)
|
||||||
}
|
}
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
|
|
|
@ -7,8 +7,10 @@ import * as mssql from "./mssql"
|
||||||
import * as mariadb from "./mariadb"
|
import * as mariadb from "./mariadb"
|
||||||
import * as oracle from "./oracle"
|
import * as oracle from "./oracle"
|
||||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
|
import { Knex } from "knex"
|
||||||
|
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||||
|
|
||||||
export type DatasourceProvider = () => Promise<Datasource>
|
export type DatasourceProvider = () => Promise<Datasource | undefined>
|
||||||
|
|
||||||
export const { startContainer } = testContainerUtils
|
export const { startContainer } = testContainerUtils
|
||||||
|
|
||||||
|
@ -19,6 +21,7 @@ export enum DatabaseName {
|
||||||
SQL_SERVER = "mssql",
|
SQL_SERVER = "mssql",
|
||||||
MARIADB = "mariadb",
|
MARIADB = "mariadb",
|
||||||
ORACLE = "oracle",
|
ORACLE = "oracle",
|
||||||
|
SQS = "sqs",
|
||||||
}
|
}
|
||||||
|
|
||||||
const providers: Record<DatabaseName, DatasourceProvider> = {
|
const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||||
|
@ -28,30 +31,143 @@ const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||||
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
||||||
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
||||||
[DatabaseName.ORACLE]: oracle.getDatasource,
|
[DatabaseName.ORACLE]: oracle.getDatasource,
|
||||||
|
[DatabaseName.SQS]: async () => undefined,
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getDatasourceProviders(
|
export interface DatasourceDescribeOpts {
|
||||||
...sourceNames: DatabaseName[]
|
name: string
|
||||||
): Promise<Datasource>[] {
|
only?: DatabaseName[]
|
||||||
return sourceNames.map(sourceName => providers[sourceName]())
|
exclude?: DatabaseName[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getDatasourceProvider(
|
export interface DatasourceDescribeReturnPromise {
|
||||||
|
rawDatasource: Datasource | undefined
|
||||||
|
datasource: Datasource | undefined
|
||||||
|
client: Knex | undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DatasourceDescribeReturn {
|
||||||
|
name: DatabaseName
|
||||||
|
config: TestConfiguration
|
||||||
|
dsProvider: () => Promise<DatasourceDescribeReturnPromise>
|
||||||
|
isInternal: boolean
|
||||||
|
isExternal: boolean
|
||||||
|
isSql: boolean
|
||||||
|
isMySQL: boolean
|
||||||
|
isPostgres: boolean
|
||||||
|
isMongodb: boolean
|
||||||
|
isMSSQL: boolean
|
||||||
|
isOracle: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createDatasources(
|
||||||
|
config: TestConfiguration,
|
||||||
|
name: DatabaseName
|
||||||
|
): Promise<DatasourceDescribeReturnPromise> {
|
||||||
|
await config.init()
|
||||||
|
|
||||||
|
const rawDatasource = await getDatasource(name)
|
||||||
|
|
||||||
|
let datasource: Datasource | undefined
|
||||||
|
if (rawDatasource) {
|
||||||
|
datasource = await config.api.datasource.create(rawDatasource)
|
||||||
|
}
|
||||||
|
|
||||||
|
let client: Knex | undefined
|
||||||
|
if (rawDatasource) {
|
||||||
|
try {
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
|
} catch (e) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
rawDatasource,
|
||||||
|
datasource,
|
||||||
|
client,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Jest doesn't allow test files to exist with no tests in them. When we run
|
||||||
|
// these tests in CI, we break them out by data source, and there are a bunch of
|
||||||
|
// test files that only run for a subset of data sources, and for the rest of
|
||||||
|
// them they will be empty test files. Defining a dummy test makes it so that
|
||||||
|
// Jest doesn't error in this situation.
|
||||||
|
function createDummyTest() {
|
||||||
|
describe("no tests", () => {
|
||||||
|
it("no tests", () => {
|
||||||
|
// no tests
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export function datasourceDescribe(
|
||||||
|
opts: DatasourceDescribeOpts,
|
||||||
|
cb: (args: DatasourceDescribeReturn) => void
|
||||||
|
) {
|
||||||
|
if (process.env.DATASOURCE === "none") {
|
||||||
|
createDummyTest()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const { name, only, exclude } = opts
|
||||||
|
|
||||||
|
if (only && exclude) {
|
||||||
|
throw new Error("you can only supply one of 'only' or 'exclude'")
|
||||||
|
}
|
||||||
|
|
||||||
|
let databases = Object.values(DatabaseName)
|
||||||
|
if (only) {
|
||||||
|
databases = only
|
||||||
|
} else if (exclude) {
|
||||||
|
databases = databases.filter(db => !exclude.includes(db))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.DATASOURCE) {
|
||||||
|
databases = databases.filter(db => db === process.env.DATASOURCE)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (databases.length === 0) {
|
||||||
|
createDummyTest()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
describe.each(databases)(name, name => {
|
||||||
|
const config = new TestConfiguration()
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
config.end()
|
||||||
|
})
|
||||||
|
|
||||||
|
cb({
|
||||||
|
name,
|
||||||
|
config,
|
||||||
|
dsProvider: () => createDatasources(config, name),
|
||||||
|
isInternal: name === DatabaseName.SQS,
|
||||||
|
isExternal: name !== DatabaseName.SQS,
|
||||||
|
isSql: [
|
||||||
|
DatabaseName.MARIADB,
|
||||||
|
DatabaseName.MYSQL,
|
||||||
|
DatabaseName.POSTGRES,
|
||||||
|
DatabaseName.SQL_SERVER,
|
||||||
|
DatabaseName.ORACLE,
|
||||||
|
].includes(name),
|
||||||
|
isMySQL: name === DatabaseName.MYSQL,
|
||||||
|
isPostgres: name === DatabaseName.POSTGRES,
|
||||||
|
isMongodb: name === DatabaseName.MONGODB,
|
||||||
|
isMSSQL: name === DatabaseName.SQL_SERVER,
|
||||||
|
isOracle: name === DatabaseName.ORACLE,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDatasource(
|
||||||
sourceName: DatabaseName
|
sourceName: DatabaseName
|
||||||
): DatasourceProvider {
|
): Promise<Datasource | undefined> {
|
||||||
return providers[sourceName]
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getDatasource(sourceName: DatabaseName): Promise<Datasource> {
|
|
||||||
return providers[sourceName]()
|
return providers[sourceName]()
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getDatasources(
|
|
||||||
...sourceNames: DatabaseName[]
|
|
||||||
): Promise<Datasource[]> {
|
|
||||||
return Promise.all(sourceNames.map(sourceName => providers[sourceName]()))
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function knexClient(ds: Datasource) {
|
export async function knexClient(ds: Datasource) {
|
||||||
switch (ds.source) {
|
switch (ds.source) {
|
||||||
case SourceName.POSTGRES: {
|
case SourceName.POSTGRES: {
|
||||||
|
|
|
@ -31,7 +31,7 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
new GenericContainer(MARIADB_IMAGE)
|
new GenericContainer(MARIADB_IMAGE)
|
||||||
.withExposedPorts(3306)
|
.withExposedPorts(3306)
|
||||||
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
|
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
|
||||||
.withWaitStrategy(new MariaDBWaitStrategy())
|
.withWaitStrategy(new MariaDBWaitStrategy().withStartupTimeout(20000))
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
.withWaitStrategy(
|
.withWaitStrategy(
|
||||||
Wait.forSuccessfulCommand(
|
Wait.forSuccessfulCommand(
|
||||||
`mongosh --eval "db.version()"`
|
`mongosh --eval "db.version()"`
|
||||||
).withStartupTimeout(10000)
|
).withStartupTimeout(20000)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
.withWaitStrategy(
|
.withWaitStrategy(
|
||||||
Wait.forSuccessfulCommand(
|
Wait.forSuccessfulCommand(
|
||||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||||
)
|
).withStartupTimeout(20000)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,7 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
new GenericContainer(MYSQL_IMAGE)
|
new GenericContainer(MYSQL_IMAGE)
|
||||||
.withExposedPorts(3306)
|
.withExposedPorts(3306)
|
||||||
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
|
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
|
||||||
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
|
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(20000))
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,11 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
.withEnvironment({
|
.withEnvironment({
|
||||||
ORACLE_PASSWORD: password,
|
ORACLE_PASSWORD: password,
|
||||||
})
|
})
|
||||||
.withWaitStrategy(Wait.forLogMessage("DATABASE IS READY TO USE!"))
|
.withWaitStrategy(
|
||||||
|
Wait.forLogMessage("DATABASE IS READY TO USE!").withStartupTimeout(
|
||||||
|
20000
|
||||||
|
)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
.withWaitStrategy(
|
.withWaitStrategy(
|
||||||
Wait.forSuccessfulCommand(
|
Wait.forSuccessfulCommand(
|
||||||
"pg_isready -h localhost -p 5432"
|
"pg_isready -h localhost -p 5432"
|
||||||
).withStartupTimeout(10000)
|
).withStartupTimeout(20000)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,13 @@ import * as api from "./api"
|
||||||
import * as automations from "./automations"
|
import * as automations from "./automations"
|
||||||
import { Thread } from "./threads"
|
import { Thread } from "./threads"
|
||||||
import * as redis from "./utilities/redis"
|
import * as redis from "./utilities/redis"
|
||||||
import { events, logging, middleware, timers } from "@budibase/backend-core"
|
import {
|
||||||
|
events,
|
||||||
|
logging,
|
||||||
|
middleware,
|
||||||
|
timers,
|
||||||
|
env as coreEnv,
|
||||||
|
} from "@budibase/backend-core"
|
||||||
import destroyable from "server-destroy"
|
import destroyable from "server-destroy"
|
||||||
import { userAgent } from "koa-useragent"
|
import { userAgent } from "koa-useragent"
|
||||||
|
|
||||||
|
@ -37,6 +43,9 @@ export default function createKoaApp() {
|
||||||
app.use(middleware.correlation)
|
app.use(middleware.correlation)
|
||||||
app.use(middleware.pino)
|
app.use(middleware.pino)
|
||||||
app.use(middleware.ip)
|
app.use(middleware.ip)
|
||||||
|
if (!coreEnv.DISABLE_CONTENT_SECURITY_POLICY) {
|
||||||
|
app.use(middleware.csp)
|
||||||
|
}
|
||||||
app.use(userAgent)
|
app.use(userAgent)
|
||||||
|
|
||||||
const server = http.createServer(app.callback())
|
const server = http.createServer(app.callback())
|
||||||
|
|
|
@ -1,11 +1,8 @@
|
||||||
import {
|
import {
|
||||||
EmptyFilterOption,
|
EmptyFilterOption,
|
||||||
FeatureFlag,
|
|
||||||
LegacyFilter,
|
LegacyFilter,
|
||||||
LogicalOperator,
|
|
||||||
Row,
|
Row,
|
||||||
RowSearchParams,
|
RowSearchParams,
|
||||||
SearchFilterKey,
|
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
SearchResponse,
|
SearchResponse,
|
||||||
SortOrder,
|
SortOrder,
|
||||||
|
@ -19,7 +16,6 @@ import { ExportRowsParams, ExportRowsResult } from "./search/types"
|
||||||
import { dataFilters } from "@budibase/shared-core"
|
import { dataFilters } from "@budibase/shared-core"
|
||||||
import sdk from "../../index"
|
import sdk from "../../index"
|
||||||
import { checkFilters, searchInputMapping } from "./search/utils"
|
import { checkFilters, searchInputMapping } from "./search/utils"
|
||||||
import { db, features } from "@budibase/backend-core"
|
|
||||||
import tracer from "dd-trace"
|
import tracer from "dd-trace"
|
||||||
import { getQueryableFields, removeInvalidFilters } from "./queryUtils"
|
import { getQueryableFields, removeInvalidFilters } from "./queryUtils"
|
||||||
import { enrichSearchContext } from "../../../api/controllers/row/utils"
|
import { enrichSearchContext } from "../../../api/controllers/row/utils"
|
||||||
|
@ -104,35 +100,6 @@ export async function search(
|
||||||
}
|
}
|
||||||
viewQuery = checkFilters(table, viewQuery)
|
viewQuery = checkFilters(table, viewQuery)
|
||||||
|
|
||||||
const sqsEnabled = await features.flags.isEnabled(FeatureFlag.SQS)
|
|
||||||
const supportsLogicalOperators =
|
|
||||||
isExternalTableID(view.tableId) || sqsEnabled
|
|
||||||
|
|
||||||
if (!supportsLogicalOperators) {
|
|
||||||
// In the unlikely event that a Grouped Filter is in a non-SQS environment
|
|
||||||
// It needs to be ignored entirely
|
|
||||||
let queryFilters: LegacyFilter[] = Array.isArray(view.query)
|
|
||||||
? view.query
|
|
||||||
: []
|
|
||||||
|
|
||||||
const { filters } = dataFilters.splitFiltersArray(queryFilters)
|
|
||||||
|
|
||||||
// Extract existing fields
|
|
||||||
const existingFields = filters.map(filter =>
|
|
||||||
db.removeKeyNumbering(filter.field)
|
|
||||||
)
|
|
||||||
|
|
||||||
// Carry over filters for unused fields
|
|
||||||
Object.keys(options.query).forEach(key => {
|
|
||||||
const operator = key as Exclude<SearchFilterKey, LogicalOperator>
|
|
||||||
Object.keys(options.query[operator] || {}).forEach(field => {
|
|
||||||
if (!existingFields.includes(db.removeKeyNumbering(field))) {
|
|
||||||
viewQuery[operator]![field] = options.query[operator]![field]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
options.query = viewQuery
|
|
||||||
} else {
|
|
||||||
const conditions = viewQuery ? [viewQuery] : []
|
const conditions = viewQuery ? [viewQuery] : []
|
||||||
options.query = {
|
options.query = {
|
||||||
$and: {
|
$and: {
|
||||||
|
@ -143,7 +110,6 @@ export async function search(
|
||||||
options.query.onEmptyFilter = viewQuery.onEmptyFilter
|
options.query.onEmptyFilter = viewQuery.onEmptyFilter
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
options.query = dataFilters.cleanupQuery(options.query)
|
options.query = dataFilters.cleanupQuery(options.query)
|
||||||
options.query = dataFilters.fixupFilterArrays(options.query)
|
options.query = dataFilters.fixupFilterArrays(options.query)
|
||||||
|
@ -170,12 +136,9 @@ export async function search(
|
||||||
if (isExternalTable) {
|
if (isExternalTable) {
|
||||||
span?.addTags({ searchType: "external" })
|
span?.addTags({ searchType: "external" })
|
||||||
result = await external.search(options, source)
|
result = await external.search(options, source)
|
||||||
} else if (await features.flags.isEnabled(FeatureFlag.SQS)) {
|
} else {
|
||||||
span?.addTags({ searchType: "sqs" })
|
span?.addTags({ searchType: "sqs" })
|
||||||
result = await internal.sqs.search(options, source)
|
result = await internal.sqs.search(options, source)
|
||||||
} else {
|
|
||||||
span?.addTags({ searchType: "lucene" })
|
|
||||||
result = await internal.lucene.search(options, source)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
span.addTags({
|
span.addTags({
|
||||||
|
|
|
@ -1,3 +1,2 @@
|
||||||
export * as sqs from "./sqs"
|
export * as sqs from "./sqs"
|
||||||
export * as lucene from "./lucene"
|
|
||||||
export * from "./internal"
|
export * from "./internal"
|
||||||
|
|
|
@ -1,79 +0,0 @@
|
||||||
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
|
||||||
import { fullSearch, paginatedSearch } from "../utils"
|
|
||||||
import { InternalTables } from "../../../../../db/utils"
|
|
||||||
import {
|
|
||||||
Row,
|
|
||||||
RowSearchParams,
|
|
||||||
SearchResponse,
|
|
||||||
SortType,
|
|
||||||
Table,
|
|
||||||
User,
|
|
||||||
ViewV2,
|
|
||||||
} from "@budibase/types"
|
|
||||||
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
|
|
||||||
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
|
||||||
import pick from "lodash/pick"
|
|
||||||
import sdk from "../../../../"
|
|
||||||
|
|
||||||
export async function search(
|
|
||||||
options: RowSearchParams,
|
|
||||||
source: Table | ViewV2
|
|
||||||
): Promise<SearchResponse<Row>> {
|
|
||||||
let table: Table
|
|
||||||
if (sdk.views.isView(source)) {
|
|
||||||
table = await sdk.views.getTable(source.id)
|
|
||||||
} else {
|
|
||||||
table = source
|
|
||||||
}
|
|
||||||
|
|
||||||
const { paginate, query } = options
|
|
||||||
|
|
||||||
const params: RowSearchParams = {
|
|
||||||
tableId: options.tableId,
|
|
||||||
viewId: options.viewId,
|
|
||||||
sort: options.sort,
|
|
||||||
sortOrder: options.sortOrder,
|
|
||||||
sortType: options.sortType,
|
|
||||||
limit: options.limit,
|
|
||||||
bookmark: options.bookmark,
|
|
||||||
version: options.version,
|
|
||||||
disableEscaping: options.disableEscaping,
|
|
||||||
query: {},
|
|
||||||
}
|
|
||||||
|
|
||||||
if (params.sort && !params.sortType) {
|
|
||||||
const schema = table.schema
|
|
||||||
const sortField = schema[params.sort]
|
|
||||||
params.sortType =
|
|
||||||
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
|
|
||||||
}
|
|
||||||
|
|
||||||
let response
|
|
||||||
if (paginate) {
|
|
||||||
response = await paginatedSearch(query, params)
|
|
||||||
} else {
|
|
||||||
response = await fullSearch(query, params)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Enrich search results with relationships
|
|
||||||
if (response.rows && response.rows.length) {
|
|
||||||
// enrich with global users if from users table
|
|
||||||
if (table._id === InternalTables.USER_METADATA) {
|
|
||||||
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
|
|
||||||
}
|
|
||||||
|
|
||||||
const visibleFields =
|
|
||||||
options.fields ||
|
|
||||||
Object.keys(source.schema || {}).filter(
|
|
||||||
key => source.schema?.[key].visible !== false
|
|
||||||
)
|
|
||||||
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
|
|
||||||
response.rows = response.rows.map((r: any) => pick(r, allowedFields))
|
|
||||||
|
|
||||||
response.rows = await outputProcessing(source, response.rows, {
|
|
||||||
squash: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return response
|
|
||||||
}
|
|
|
@ -7,50 +7,27 @@ import {
|
||||||
Table,
|
Table,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
|
||||||
import { search } from "../../../../../sdk/app/rows/search"
|
import { search } from "../../../../../sdk/app/rows/search"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import { features } from "@budibase/backend-core"
|
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
datasourceDescribe,
|
||||||
} from "../../../../../integrations/tests/utils"
|
} from "../../../../../integrations/tests/utils"
|
||||||
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
||||||
|
|
||||||
// These test cases are only for things that cannot be tested through the API
|
// These test cases are only for things that cannot be tested through the API
|
||||||
// (e.g. limiting searches to returning specific fields). If it's possible to
|
// (e.g. limiting searches to returning specific fields). If it's possible to
|
||||||
// test through the API, it should be done there instead.
|
// test through the API, it should be done there instead.
|
||||||
describe.each([
|
datasourceDescribe(
|
||||||
["lucene", undefined],
|
{ name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] },
|
||||||
["sqs", undefined],
|
({ config, dsProvider, isInternal }) => {
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
|
||||||
])("search sdk (%s)", (name, dsProvider) => {
|
|
||||||
const isSqs = name === "sqs"
|
|
||||||
const isLucene = name === "lucene"
|
|
||||||
const isInternal = isLucene || isSqs
|
|
||||||
const config = new TestConfiguration()
|
|
||||||
|
|
||||||
let envCleanup: (() => void) | undefined
|
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
let table: Table
|
let table: Table
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await features.testutils.withFeatureFlags("*", { SQS: isSqs }, () =>
|
const ds = await dsProvider()
|
||||||
config.init()
|
datasource = ds.datasource
|
||||||
)
|
|
||||||
|
|
||||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
|
||||||
SQS: isSqs,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (dsProvider) {
|
|
||||||
datasource = await config.createDatasource({
|
|
||||||
datasource: await dsProvider,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
|
@ -105,9 +82,6 @@ describe.each([
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
config.end()
|
config.end()
|
||||||
if (envCleanup) {
|
|
||||||
envCleanup()
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("querying by fields will always return data attribute columns", async () => {
|
it("querying by fields will always return data attribute columns", async () => {
|
||||||
|
@ -211,7 +185,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
!isLucene &&
|
|
||||||
it.each([
|
it.each([
|
||||||
[["id", "name", "age"], 3],
|
[["id", "name", "age"], 3],
|
||||||
[["name", "age"], 10],
|
[["name", "age"], 10],
|
||||||
|
@ -243,4 +216,5 @@ describe.each([
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { context, features } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import { getTableParams } from "../../../db/utils"
|
import { getTableParams } from "../../../db/utils"
|
||||||
import {
|
import {
|
||||||
breakExternalTableId,
|
breakExternalTableId,
|
||||||
|
@ -12,7 +12,6 @@ import {
|
||||||
TableResponse,
|
TableResponse,
|
||||||
TableSourceType,
|
TableSourceType,
|
||||||
TableViewsResponse,
|
TableViewsResponse,
|
||||||
FeatureFlag,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import datasources from "../datasources"
|
import datasources from "../datasources"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
@ -49,10 +48,7 @@ export async function processTable(table: Table): Promise<Table> {
|
||||||
type: "table",
|
type: "table",
|
||||||
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
|
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
|
||||||
sourceType: TableSourceType.INTERNAL,
|
sourceType: TableSourceType.INTERNAL,
|
||||||
}
|
sql: true,
|
||||||
const sqsEnabled = await features.flags.isEnabled(FeatureFlag.SQS)
|
|
||||||
if (sqsEnabled) {
|
|
||||||
processed.sql = true
|
|
||||||
}
|
}
|
||||||
return processed
|
return processed
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
const { isDatasourceTest } = require(".")
|
||||||
|
|
||||||
|
module.exports = paths => {
|
||||||
|
return {
|
||||||
|
filtered: paths
|
||||||
|
.filter(path => isDatasourceTest(path))
|
||||||
|
.map(path => ({ test: path })),
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
const fs = require("fs")
|
||||||
|
|
||||||
|
function isDatasourceTest(path) {
|
||||||
|
const content = fs.readFileSync(path, "utf8")
|
||||||
|
return content.includes("datasourceDescribe(")
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
isDatasourceTest,
|
||||||
|
}
|
|
@ -0,0 +1,9 @@
|
||||||
|
const { isDatasourceTest } = require(".")
|
||||||
|
|
||||||
|
module.exports = paths => {
|
||||||
|
return {
|
||||||
|
filtered: paths
|
||||||
|
.filter(path => !isDatasourceTest(path))
|
||||||
|
.map(path => ({ test: path })),
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,7 +3,6 @@ import { fixAutoColumnSubType, processFormulas } from "./utils"
|
||||||
import {
|
import {
|
||||||
cache,
|
cache,
|
||||||
context,
|
context,
|
||||||
features,
|
|
||||||
HTTPError,
|
HTTPError,
|
||||||
objectStore,
|
objectStore,
|
||||||
utils,
|
utils,
|
||||||
|
@ -19,7 +18,6 @@ import {
|
||||||
Table,
|
Table,
|
||||||
User,
|
User,
|
||||||
ViewV2,
|
ViewV2,
|
||||||
FeatureFlag,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import {
|
import {
|
||||||
|
@ -423,7 +421,6 @@ export async function coreOutputProcessing(
|
||||||
|
|
||||||
// remove null properties to match internal API
|
// remove null properties to match internal API
|
||||||
const isExternal = isExternalTableID(table._id!)
|
const isExternal = isExternalTableID(table._id!)
|
||||||
if (isExternal || (await features.flags.isEnabled(FeatureFlag.SQS))) {
|
|
||||||
for (const row of rows) {
|
for (const row of rows) {
|
||||||
for (const key of Object.keys(row)) {
|
for (const key of Object.keys(row)) {
|
||||||
if (row[key] === null) {
|
if (row[key] === null) {
|
||||||
|
@ -465,7 +462,6 @@ export async function coreOutputProcessing(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (!isUserMetadataTable(table._id!)) {
|
if (!isUserMetadataTable(table._id!)) {
|
||||||
const protectedColumns = isExternal
|
const protectedColumns = isExternal
|
||||||
|
|
|
@ -8,7 +8,7 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { outputProcessing } from ".."
|
import { outputProcessing } from ".."
|
||||||
import { generator, structures } from "@budibase/backend-core/tests"
|
import { generator, structures } from "@budibase/backend-core/tests"
|
||||||
import { features } from "@budibase/backend-core"
|
|
||||||
import * as bbReferenceProcessor from "../bbReferenceProcessor"
|
import * as bbReferenceProcessor from "../bbReferenceProcessor"
|
||||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||||
|
|
||||||
|
@ -21,7 +21,6 @@ jest.mock("../bbReferenceProcessor", (): typeof bbReferenceProcessor => ({
|
||||||
|
|
||||||
describe("rowProcessor - outputProcessing", () => {
|
describe("rowProcessor - outputProcessing", () => {
|
||||||
const config = new TestConfiguration()
|
const config = new TestConfiguration()
|
||||||
let cleanupFlags: () => void = () => {}
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
|
@ -33,11 +32,6 @@ describe("rowProcessor - outputProcessing", () => {
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.resetAllMocks()
|
jest.resetAllMocks()
|
||||||
cleanupFlags = features.testutils.setFeatureFlags("*", { SQS: true })
|
|
||||||
})
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
cleanupFlags()
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const processOutputBBReferenceMock =
|
const processOutputBBReferenceMock =
|
||||||
|
|
|
@ -527,7 +527,12 @@ export function search<T extends Record<string, any>>(
|
||||||
): SearchResponse<T> {
|
): SearchResponse<T> {
|
||||||
let result = runQuery(docs, query.query)
|
let result = runQuery(docs, query.query)
|
||||||
if (query.sort) {
|
if (query.sort) {
|
||||||
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
|
result = sort(
|
||||||
|
result,
|
||||||
|
query.sort,
|
||||||
|
query.sortOrder || SortOrder.ASCENDING,
|
||||||
|
query.sortType
|
||||||
|
)
|
||||||
}
|
}
|
||||||
const totalRows = result.length
|
const totalRows = result.length
|
||||||
if (query.limit) {
|
if (query.limit) {
|
||||||
|
|
|
@ -48,7 +48,7 @@ export function validate(
|
||||||
cronExpression: string
|
cronExpression: string
|
||||||
): { valid: false; err: string[] } | { valid: true } {
|
): { valid: false; err: string[] } | { valid: true } {
|
||||||
const result = cronValidate(cronExpression, {
|
const result = cronValidate(cronExpression, {
|
||||||
preset: "npm-node-cron",
|
preset: "npm-cron-schedule",
|
||||||
override: {
|
override: {
|
||||||
useSeconds: false,
|
useSeconds: false,
|
||||||
},
|
},
|
||||||
|
|
|
@ -12,7 +12,6 @@ import type PouchDB from "pouchdb-find"
|
||||||
|
|
||||||
export enum SearchIndex {
|
export enum SearchIndex {
|
||||||
ROWS = "rows",
|
ROWS = "rows",
|
||||||
AUDIT = "audit",
|
|
||||||
USER = "user",
|
USER = "user",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,10 +2,9 @@ export enum FeatureFlag {
|
||||||
PER_CREATOR_PER_USER_PRICE = "PER_CREATOR_PER_USER_PRICE",
|
PER_CREATOR_PER_USER_PRICE = "PER_CREATOR_PER_USER_PRICE",
|
||||||
PER_CREATOR_PER_USER_PRICE_ALERT = "PER_CREATOR_PER_USER_PRICE_ALERT",
|
PER_CREATOR_PER_USER_PRICE_ALERT = "PER_CREATOR_PER_USER_PRICE_ALERT",
|
||||||
AUTOMATION_BRANCHING = "AUTOMATION_BRANCHING",
|
AUTOMATION_BRANCHING = "AUTOMATION_BRANCHING",
|
||||||
SQS = "SQS",
|
|
||||||
AI_CUSTOM_CONFIGS = "AI_CUSTOM_CONFIGS",
|
AI_CUSTOM_CONFIGS = "AI_CUSTOM_CONFIGS",
|
||||||
DEFAULT_VALUES = "DEFAULT_VALUES",
|
DEFAULT_VALUES = "DEFAULT_VALUES",
|
||||||
ENRICHED_RELATIONSHIPS = "ENRICHED_RELATIONSHIPS",
|
|
||||||
BUDIBASE_AI = "BUDIBASE_AI",
|
BUDIBASE_AI = "BUDIBASE_AI",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -48,6 +48,7 @@ export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
|
||||||
request: BBRequest<RequestBody>
|
request: BBRequest<RequestBody>
|
||||||
body: ResponseBody
|
body: ResponseBody
|
||||||
userAgent: UserAgentContext["userAgent"]
|
userAgent: UserAgentContext["userAgent"]
|
||||||
|
state: { nonce?: string }
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -56,6 +57,7 @@ export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
|
||||||
export interface UserCtx<RequestBody = any, ResponseBody = any>
|
export interface UserCtx<RequestBody = any, ResponseBody = any>
|
||||||
extends Ctx<RequestBody, ResponseBody> {
|
extends Ctx<RequestBody, ResponseBody> {
|
||||||
user: ContextUser
|
user: ContextUser
|
||||||
|
state: { nonce?: string }
|
||||||
roleId?: string
|
roleId?: string
|
||||||
eventEmitter?: ContextEmitter
|
eventEmitter?: ContextEmitter
|
||||||
loginMethod?: LoginMethod
|
loginMethod?: LoginMethod
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { Ctx, MaintenanceType, FeatureFlag } from "@budibase/types"
|
import { Ctx, MaintenanceType } from "@budibase/types"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { env as coreEnv, db as dbCore, features } from "@budibase/backend-core"
|
import { env as coreEnv, db as dbCore } from "@budibase/backend-core"
|
||||||
import nodeFetch from "node-fetch"
|
import nodeFetch from "node-fetch"
|
||||||
import { helpers } from "@budibase/shared-core"
|
import { helpers } from "@budibase/shared-core"
|
||||||
|
|
||||||
|
@ -35,10 +35,7 @@ async function isSqsAvailable() {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function isSqsMissing() {
|
async function isSqsMissing() {
|
||||||
return (
|
return !(await isSqsAvailable())
|
||||||
(await features.flags.isEnabled(FeatureFlag.SQS)) &&
|
|
||||||
!(await isSqsAvailable())
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const fetch = async (ctx: Ctx) => {
|
export const fetch = async (ctx: Ctx) => {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { mocks, structures } from "@budibase/backend-core/tests"
|
import { mocks, structures } from "@budibase/backend-core/tests"
|
||||||
import { context, events, features } from "@budibase/backend-core"
|
import { context, events } from "@budibase/backend-core"
|
||||||
import { Event, IdentityType } from "@budibase/types"
|
import { Event, IdentityType } from "@budibase/types"
|
||||||
import { TestConfiguration } from "../../../../tests"
|
import { TestConfiguration } from "../../../../tests"
|
||||||
|
|
||||||
|
@ -12,19 +12,14 @@ const BASE_IDENTITY = {
|
||||||
const USER_AUDIT_LOG_COUNT = 3
|
const USER_AUDIT_LOG_COUNT = 3
|
||||||
const APP_ID = "app_1"
|
const APP_ID = "app_1"
|
||||||
|
|
||||||
describe.each(["lucene", "sql"])("/api/global/auditlogs (%s)", method => {
|
describe("/api/global/auditlogs (%s)", () => {
|
||||||
const config = new TestConfiguration()
|
const config = new TestConfiguration()
|
||||||
let envCleanup: (() => void) | undefined
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
|
||||||
SQS: method === "sql",
|
|
||||||
})
|
|
||||||
await config.beforeAll()
|
await config.beforeAll()
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
envCleanup?.()
|
|
||||||
await config.afterAll()
|
await config.afterAll()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -56,6 +56,9 @@ app.use(koaSession(app))
|
||||||
app.use(middleware.correlation)
|
app.use(middleware.correlation)
|
||||||
app.use(middleware.pino)
|
app.use(middleware.pino)
|
||||||
app.use(middleware.ip)
|
app.use(middleware.ip)
|
||||||
|
if (!coreEnv.DISABLE_CONTENT_SECURITY_POLICY) {
|
||||||
|
app.use(middleware.csp)
|
||||||
|
}
|
||||||
app.use(userAgent)
|
app.use(userAgent)
|
||||||
|
|
||||||
// authentication
|
// authentication
|
||||||
|
|
Loading…
Reference in New Issue