Merge branch 'master' into feature/deprecate-table-component
This commit is contained in:
commit
41b3243ace
|
@ -36,12 +36,14 @@
|
|||
"files": ["**/*.ts"],
|
||||
"excludedFiles": ["qa-core/**"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": ["eslint:recommended"],
|
||||
"globals": {
|
||||
"NodeJS": true
|
||||
},
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"local-rules/no-budibase-imports": "error"
|
||||
}
|
||||
},
|
||||
|
@ -49,7 +51,7 @@
|
|||
"files": ["**/*.spec.ts"],
|
||||
"excludedFiles": ["qa-core/**"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["jest"],
|
||||
"plugins": ["jest", "@typescript-eslint"],
|
||||
"extends": ["eslint:recommended", "plugin:jest/recommended"],
|
||||
"env": {
|
||||
"jest/globals": true
|
||||
|
@ -59,6 +61,7 @@
|
|||
},
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"local-rules/no-test-com": "error",
|
||||
"local-rules/email-domain-example-com": "error",
|
||||
"no-console": "warn",
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.22.5",
|
||||
"version": "2.22.7",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
"svelte": "^4.2.10",
|
||||
"svelte-eslint-parser": "^0.33.1",
|
||||
"typescript": "5.2.2",
|
||||
"typescript-eslint": "^7.3.1",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"scripts": {
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 6465dc9c2a38e1380b32204cad4ae0c1f33e065a
|
||||
Subproject commit f5b467b6b1c55c48847545db41be7b1c035e167a
|
|
@ -129,7 +129,7 @@ export default class BaseCache {
|
|||
}
|
||||
}
|
||||
|
||||
async bustCache(key: string, opts = { client: null }) {
|
||||
async bustCache(key: string) {
|
||||
const client = await this.getClient()
|
||||
try {
|
||||
await client.delete(generateTenantKey(key))
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as utils from "../utils"
|
||||
import { Duration, DurationType } from "../utils"
|
||||
import { Duration } from "../utils"
|
||||
import env from "../environment"
|
||||
import { getTenantId } from "../context"
|
||||
import * as redis from "../redis/init"
|
||||
|
|
|
@ -8,7 +8,7 @@ const DEFAULT_WRITE_RATE_MS = 10000
|
|||
let CACHE: BaseCache | null = null
|
||||
|
||||
interface CacheItem<T extends Document> {
|
||||
doc: any
|
||||
doc: T
|
||||
lastWrite: number
|
||||
}
|
||||
|
||||
|
|
|
@ -10,10 +10,6 @@ interface SearchResponse<T> {
|
|||
totalRows: number
|
||||
}
|
||||
|
||||
interface PaginatedSearchResponse<T> extends SearchResponse<T> {
|
||||
hasNextPage: boolean
|
||||
}
|
||||
|
||||
export type SearchParams<T> = {
|
||||
tableId?: string
|
||||
sort?: string
|
||||
|
|
|
@ -17,13 +17,8 @@ export function init(processors: ProcessorMap) {
|
|||
// if not processing in this instance, kick it off
|
||||
if (!processingPromise) {
|
||||
processingPromise = asyncEventQueue.process(async job => {
|
||||
const { event, identity, properties, timestamp } = job.data
|
||||
await documentProcessor.processEvent(
|
||||
event,
|
||||
identity,
|
||||
properties,
|
||||
timestamp
|
||||
)
|
||||
const { event, identity, properties } = job.data
|
||||
await documentProcessor.processEvent(event, identity, properties)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import {
|
||||
Event,
|
||||
Identity,
|
||||
Group,
|
||||
IdentityType,
|
||||
AuditLogQueueEvent,
|
||||
AuditLogFn,
|
||||
|
@ -79,11 +78,11 @@ export default class AuditLogsProcessor implements EventProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
async identify(identity: Identity, timestamp?: string | number) {
|
||||
async identify() {
|
||||
// no-op
|
||||
}
|
||||
|
||||
async identifyGroup(group: Group, timestamp?: string | number) {
|
||||
async identifyGroup() {
|
||||
// no-op
|
||||
}
|
||||
|
||||
|
|
|
@ -8,8 +8,7 @@ export default class LoggingProcessor implements EventProcessor {
|
|||
async processEvent(
|
||||
event: Event,
|
||||
identity: Identity,
|
||||
properties: any,
|
||||
timestamp?: string
|
||||
properties: any
|
||||
): Promise<void> {
|
||||
if (skipLogging) {
|
||||
return
|
||||
|
@ -17,14 +16,14 @@ export default class LoggingProcessor implements EventProcessor {
|
|||
console.log(`[audit] [identityType=${identity.type}] ${event}`, properties)
|
||||
}
|
||||
|
||||
async identify(identity: Identity, timestamp?: string | number) {
|
||||
async identify(identity: Identity) {
|
||||
if (skipLogging) {
|
||||
return
|
||||
}
|
||||
console.log(`[audit] identified`, identity)
|
||||
}
|
||||
|
||||
async identifyGroup(group: Group, timestamp?: string | number) {
|
||||
async identifyGroup(group: Group) {
|
||||
if (skipLogging) {
|
||||
return
|
||||
}
|
||||
|
|
|
@ -14,12 +14,7 @@ export default class DocumentUpdateProcessor implements EventProcessor {
|
|||
this.processors = processors
|
||||
}
|
||||
|
||||
async processEvent(
|
||||
event: Event,
|
||||
identity: Identity,
|
||||
properties: any,
|
||||
timestamp?: string | number
|
||||
) {
|
||||
async processEvent(event: Event, identity: Identity, properties: any) {
|
||||
const tenantId = identity.realTenantId
|
||||
const docId = getDocumentId(event, properties)
|
||||
if (!tenantId || !docId) {
|
||||
|
|
|
@ -28,7 +28,7 @@ export const buildMatcherRegex = (
|
|||
}
|
||||
|
||||
export const matches = (ctx: BBContext, options: RegexMatcher[]) => {
|
||||
return options.find(({ regex, method, route }) => {
|
||||
return options.find(({ regex, method }) => {
|
||||
const urlMatch = regex.test(ctx.request.url)
|
||||
const methodMatch =
|
||||
method === "ALL"
|
||||
|
|
|
@ -3,7 +3,7 @@ import { Cookie } from "../../../constants"
|
|||
import * as configs from "../../../configs"
|
||||
import * as cache from "../../../cache"
|
||||
import * as utils from "../../../utils"
|
||||
import { UserCtx, SSOProfile, DatasourceAuthCookie } from "@budibase/types"
|
||||
import { UserCtx, SSOProfile } from "@budibase/types"
|
||||
import { ssoSaveUserNoOp } from "../sso/sso"
|
||||
|
||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||
|
|
|
@ -5,7 +5,6 @@ import * as context from "../../../context"
|
|||
import fetch from "node-fetch"
|
||||
import {
|
||||
SaveSSOUserFunction,
|
||||
SaveUserOpts,
|
||||
SSOAuthDetails,
|
||||
SSOUser,
|
||||
User,
|
||||
|
@ -14,10 +13,8 @@ import {
|
|||
// no-op function for user save
|
||||
// - this allows datasource auth and access token refresh to work correctly
|
||||
// - prefer no-op over an optional argument to ensure function is provided to login flows
|
||||
export const ssoSaveUserNoOp: SaveSSOUserFunction = (
|
||||
user: SSOUser,
|
||||
opts: SaveUserOpts
|
||||
) => Promise.resolve(user)
|
||||
export const ssoSaveUserNoOp: SaveSSOUserFunction = (user: SSOUser) =>
|
||||
Promise.resolve(user)
|
||||
|
||||
/**
|
||||
* Common authentication logic for third parties. e.g. OAuth, OIDC.
|
||||
|
|
|
@ -45,10 +45,6 @@ export const runMigration = async (
|
|||
options: MigrationOptions = {}
|
||||
) => {
|
||||
const migrationType = migration.type
|
||||
let tenantId: string | undefined
|
||||
if (migrationType !== MigrationType.INSTALLATION) {
|
||||
tenantId = context.getTenantId()
|
||||
}
|
||||
const migrationName = migration.name
|
||||
const silent = migration.silent
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ describe("app", () => {
|
|||
|
||||
it("gets url with embedded minio", async () => {
|
||||
testEnv.withMinio()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
await testEnv.withTenant(() => {
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
|
@ -136,7 +136,7 @@ describe("app", () => {
|
|||
|
||||
it("gets url with custom S3", async () => {
|
||||
testEnv.withS3()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
await testEnv.withTenant(() => {
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
|
@ -146,7 +146,7 @@ describe("app", () => {
|
|||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
testEnv.withCloudfront()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
await testEnv.withTenant(() => {
|
||||
const url = getAppFileUrl()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
|
|
|
@ -3,7 +3,7 @@ import { DBTestConfiguration } from "../../../tests/extra"
|
|||
import * as tenants from "../tenants"
|
||||
|
||||
describe("tenants", () => {
|
||||
const config = new DBTestConfiguration()
|
||||
new DBTestConfiguration()
|
||||
|
||||
describe("addTenant", () => {
|
||||
it("concurrently adds multiple tenants safely", async () => {
|
||||
|
|
|
@ -166,7 +166,7 @@ class InMemoryQueue implements Partial<Queue> {
|
|||
return []
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async removeJobs(pattern: string) {
|
||||
// no-op
|
||||
}
|
||||
|
|
|
@ -132,7 +132,7 @@ function logging(queue: Queue, jobQueue: JobQueue) {
|
|||
// A Job is waiting to be processed as soon as a worker is idling.
|
||||
console.info(...getLogParams(eventType, BullEvent.WAITING, { jobId }))
|
||||
})
|
||||
.on(BullEvent.ACTIVE, async (job: Job, jobPromise: any) => {
|
||||
.on(BullEvent.ACTIVE, async (job: Job) => {
|
||||
// A job has started. You can use `jobPromise.cancel()`` to abort it.
|
||||
await doInJobContext(job, () => {
|
||||
console.info(...getLogParams(eventType, BullEvent.ACTIVE, { job }))
|
||||
|
|
|
@ -40,6 +40,7 @@ export async function shutdown() {
|
|||
if (inviteClient) await inviteClient.finish()
|
||||
if (passwordResetClient) await passwordResetClient.finish()
|
||||
if (socketClient) await socketClient.finish()
|
||||
if (docWritethroughClient) await docWritethroughClient.finish()
|
||||
}
|
||||
|
||||
process.on("exit", async () => {
|
||||
|
|
|
@ -120,7 +120,7 @@ describe("redis", () => {
|
|||
|
||||
await redis.bulkStore(data, ttl)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
for (const key of Object.keys(data)) {
|
||||
expect(await redis.get(key)).toBe(null)
|
||||
}
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ describe("Users", () => {
|
|||
...{ _id: groupId, roles: { app1: "ADMIN" } },
|
||||
}
|
||||
const users: User[] = []
|
||||
for (const _ of Array.from({ length: usersInGroup })) {
|
||||
for (let i = 0; i < usersInGroup; i++) {
|
||||
const userId = `us_${generator.guid()}`
|
||||
const user: User = structures.users.user({
|
||||
_id: userId,
|
||||
|
|
|
@ -39,19 +39,23 @@ const handleClick = event => {
|
|||
return
|
||||
}
|
||||
|
||||
if (handler.allowedType && event.type !== handler.allowedType) {
|
||||
return
|
||||
}
|
||||
|
||||
handler.callback?.(event)
|
||||
})
|
||||
}
|
||||
document.documentElement.addEventListener("click", handleClick, true)
|
||||
document.documentElement.addEventListener("contextmenu", handleClick, true)
|
||||
document.documentElement.addEventListener("mousedown", handleClick, true)
|
||||
|
||||
/**
|
||||
* Adds or updates a click handler
|
||||
*/
|
||||
const updateHandler = (id, element, anchor, callback) => {
|
||||
const updateHandler = (id, element, anchor, callback, allowedType) => {
|
||||
let existingHandler = clickHandlers.find(x => x.id === id)
|
||||
if (!existingHandler) {
|
||||
clickHandlers.push({ id, element, anchor, callback })
|
||||
clickHandlers.push({ id, element, anchor, callback, allowedType })
|
||||
} else {
|
||||
existingHandler.callback = callback
|
||||
}
|
||||
|
@ -77,7 +81,8 @@ export default (element, opts) => {
|
|||
const update = newOpts => {
|
||||
const callback = newOpts?.callback || newOpts
|
||||
const anchor = newOpts?.anchor || element
|
||||
updateHandler(id, element, anchor, callback)
|
||||
const allowedType = newOpts?.allowedType || "click"
|
||||
updateHandler(id, element, anchor, callback, allowedType)
|
||||
}
|
||||
update(opts)
|
||||
return {
|
||||
|
|
|
@ -28,7 +28,6 @@
|
|||
let deleteTableName
|
||||
|
||||
$: externalTable = table?.sourceType === DB_TYPE_EXTERNAL
|
||||
$: allowDeletion = !externalTable || table?.created
|
||||
|
||||
function showDeleteModal() {
|
||||
templateScreens = $screenStore.screens.filter(
|
||||
|
@ -56,7 +55,7 @@
|
|||
$goto(`./datasource/${table.datasourceId}`)
|
||||
}
|
||||
} catch (error) {
|
||||
notifications.error("Error deleting table")
|
||||
notifications.error(`Error deleting table - ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,17 +85,15 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
{#if allowDeletion}
|
||||
<ActionMenu>
|
||||
<div slot="control" class="icon">
|
||||
<Icon s hoverable name="MoreSmallList" />
|
||||
</div>
|
||||
{#if !externalTable}
|
||||
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
|
||||
{/if}
|
||||
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
|
||||
</ActionMenu>
|
||||
{/if}
|
||||
<ActionMenu>
|
||||
<div slot="control" class="icon">
|
||||
<Icon s hoverable name="MoreSmallList" />
|
||||
</div>
|
||||
{#if !externalTable}
|
||||
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
|
||||
{/if}
|
||||
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
|
||||
</ActionMenu>
|
||||
|
||||
<Modal bind:this={editorModal} on:show={initForm}>
|
||||
<ModalContent
|
||||
|
|
|
@ -129,10 +129,7 @@
|
|||
filteredUsers = $usersFetch.rows
|
||||
.filter(user => user.email !== $auth.user.email)
|
||||
.map(user => {
|
||||
const isAdminOrGlobalBuilder = sdk.users.isAdminOrGlobalBuilder(
|
||||
user,
|
||||
prodAppId
|
||||
)
|
||||
const isAdminOrGlobalBuilder = sdk.users.isAdminOrGlobalBuilder(user)
|
||||
const isAppBuilder = user.builder?.apps?.includes(prodAppId)
|
||||
let role
|
||||
if (isAdminOrGlobalBuilder) {
|
||||
|
|
|
@ -291,7 +291,10 @@
|
|||
<div
|
||||
id="side-panel-container"
|
||||
class:open={$sidePanelStore.open}
|
||||
use:clickOutside={autoCloseSidePanel ? sidePanelStore.actions.close : null}
|
||||
use:clickOutside={{
|
||||
callback: autoCloseSidePanel ? sidePanelStore.actions.close : null,
|
||||
allowedType: "mousedown",
|
||||
}}
|
||||
class:builder={$builderStore.inBuilder}
|
||||
>
|
||||
<div class="side-panel-header">
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
<script>
|
||||
import { getContext } from "svelte"
|
||||
import { get } from "svelte/store"
|
||||
import { generate } from "shortid"
|
||||
import Block from "components/Block.svelte"
|
||||
import BlockComponent from "components/BlockComponent.svelte"
|
||||
|
@ -33,8 +34,9 @@
|
|||
export let sidePanelDeleteLabel
|
||||
export let notificationOverride
|
||||
|
||||
const { fetchDatasourceSchema, API } = getContext("sdk")
|
||||
const { fetchDatasourceSchema, API, generateGoldenSample } = getContext("sdk")
|
||||
const component = getContext("component")
|
||||
const context = getContext("context")
|
||||
const stateKey = `ID_${generate()}`
|
||||
|
||||
let formId
|
||||
|
@ -48,20 +50,6 @@
|
|||
let schemaLoaded = false
|
||||
|
||||
$: deleteLabel = setDeleteLabel(sidePanelDeleteLabel, sidePanelShowDelete)
|
||||
|
||||
const setDeleteLabel = sidePanelDeleteLabel => {
|
||||
// Accommodate old config to ensure delete button does not reappear
|
||||
let labelText = sidePanelShowDelete === false ? "" : sidePanelDeleteLabel
|
||||
|
||||
// Empty text is considered hidden.
|
||||
if (labelText?.trim() === "") {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Default to "Delete" if the value is unset
|
||||
return labelText || "Delete"
|
||||
}
|
||||
|
||||
$: isDSPlus = dataSource?.type === "table" || dataSource?.type === "viewV2"
|
||||
$: fetchSchema(dataSource)
|
||||
$: enrichSearchColumns(searchColumns, schema).then(
|
||||
|
@ -105,6 +93,30 @@
|
|||
},
|
||||
]
|
||||
|
||||
// Provide additional data context for live binding eval
|
||||
export const getAdditionalDataContext = () => {
|
||||
const rows = get(context)[dataProviderId]?.rows
|
||||
const goldenRow = generateGoldenSample(rows)
|
||||
return {
|
||||
eventContext: {
|
||||
row: goldenRow,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const setDeleteLabel = sidePanelDeleteLabel => {
|
||||
// Accommodate old config to ensure delete button does not reappear
|
||||
let labelText = sidePanelShowDelete === false ? "" : sidePanelDeleteLabel
|
||||
|
||||
// Empty text is considered hidden.
|
||||
if (labelText?.trim() === "") {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Default to "Delete" if the value is unset
|
||||
return labelText || "Delete"
|
||||
}
|
||||
|
||||
// Load the datasource schema so we can determine column types
|
||||
const fetchSchema = async dataSource => {
|
||||
if (dataSource?.type === "table") {
|
||||
|
|
|
@ -40,16 +40,18 @@
|
|||
}
|
||||
}
|
||||
|
||||
// Handle certain key presses regardless of selection state
|
||||
if (e.key === "Enter" && (e.ctrlKey || e.metaKey) && $config.canAddRows) {
|
||||
e.preventDefault()
|
||||
dispatch("add-row-inline")
|
||||
return
|
||||
}
|
||||
|
||||
// If nothing selected avoid processing further key presses
|
||||
if (!$focusedCellId) {
|
||||
if (e.key === "Tab" || e.key?.startsWith("Arrow")) {
|
||||
e.preventDefault()
|
||||
focusFirstCell()
|
||||
} else if (e.key === "Enter" && (e.ctrlKey || e.metaKey)) {
|
||||
if ($config.canAddRows) {
|
||||
e.preventDefault()
|
||||
dispatch("add-row-inline")
|
||||
}
|
||||
} else if (e.key === "Delete" || e.key === "Backspace") {
|
||||
if (Object.keys($selectedRows).length && $config.canDeleteRows) {
|
||||
dispatch("request-bulk-delete")
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 8baf8586ec078951230c8466d5f13f9b6d5ed055
|
||||
Subproject commit dd748e045ffdbc6662c5d2b76075f01d65a96a2f
|
|
@ -1,3 +1,4 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
module FirebaseMock {
|
||||
const firebase: any = {}
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
module SendgridMock {
|
||||
class Email {
|
||||
constructor() {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
module ArangoMock {
|
||||
const arangodb: any = {}
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
module MongoMock {
|
||||
const mongodb: any = {}
|
||||
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
module.exports = {
|
||||
ConnectionPool: jest.fn(() => ({
|
||||
connect: jest.fn(() => ({
|
||||
request: jest.fn(() => ({
|
||||
query: jest.fn(sql => ({ recordset: [sql] })),
|
||||
})),
|
||||
})),
|
||||
})),
|
||||
query: jest.fn(() => ({
|
||||
recordset: [
|
||||
{
|
||||
a: "string",
|
||||
b: 1,
|
||||
},
|
||||
],
|
||||
})),
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
const client = {
|
||||
connect: jest.fn(),
|
||||
query: jest.fn((query, bindings, fn) => {
|
||||
fn(null, [])
|
||||
}),
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createConnection: jest.fn(() => client),
|
||||
client,
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
module MySQLMock {
|
||||
const mysql: any = {}
|
||||
|
||||
const client = {
|
||||
connect: jest.fn(),
|
||||
end: jest.fn(),
|
||||
query: jest.fn(async () => {
|
||||
return [[]]
|
||||
}),
|
||||
}
|
||||
|
||||
mysql.createConnection = jest.fn(async () => {
|
||||
return client
|
||||
})
|
||||
|
||||
module.exports = mysql
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
// @ts-ignore
|
||||
import fs from "fs"
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
module FetchMock {
|
||||
// @ts-ignore
|
||||
const fetch = jest.requireActual("node-fetch")
|
||||
|
|
|
@ -26,7 +26,6 @@ import {
|
|||
env as envCore,
|
||||
ErrorCode,
|
||||
events,
|
||||
HTTPError,
|
||||
migrations,
|
||||
objectStore,
|
||||
roles,
|
||||
|
|
|
@ -116,7 +116,7 @@ export async function save(ctx: UserCtx<SaveRoleRequest, SaveRoleResponse>) {
|
|||
target: prodDb.name,
|
||||
})
|
||||
await replication.replicate({
|
||||
filter: (doc: any, params: any) => {
|
||||
filter: (doc: any) => {
|
||||
return doc._id && doc._id.startsWith("role_")
|
||||
},
|
||||
})
|
||||
|
|
|
@ -7,13 +7,11 @@ import {
|
|||
FilterType,
|
||||
IncludeRelationship,
|
||||
ManyToManyRelationshipFieldMetadata,
|
||||
ManyToOneRelationshipFieldMetadata,
|
||||
OneToManyRelationshipFieldMetadata,
|
||||
Operation,
|
||||
PaginationJson,
|
||||
RelationshipFieldMetadata,
|
||||
RelationshipsJson,
|
||||
RelationshipType,
|
||||
Row,
|
||||
SearchFilters,
|
||||
SortJson,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import { quotas } from "@budibase/pro"
|
||||
import {
|
||||
UserCtx,
|
||||
ViewV2,
|
||||
|
|
|
@ -61,9 +61,6 @@ export async function destroy(ctx: UserCtx) {
|
|||
const tableToDelete: TableRequest = await sdk.tables.getTable(
|
||||
ctx.params.tableId
|
||||
)
|
||||
if (!tableToDelete || !tableToDelete.created) {
|
||||
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
|
||||
}
|
||||
const datasourceId = getDatasourceId(tableToDelete)
|
||||
try {
|
||||
const { datasource, table } = await sdk.tables.external.destroy(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { generateUserFlagID, InternalTables } from "../../db/utils"
|
||||
import { getFullUser } from "../../utilities/users"
|
||||
import { cache, context } from "@budibase/backend-core"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import {
|
||||
ContextUserMetadata,
|
||||
Ctx,
|
||||
|
|
|
@ -24,7 +24,7 @@ async function parseSchema(view: CreateViewRequest) {
|
|||
icon: schemaValue.icon,
|
||||
}
|
||||
Object.entries(fieldSchema)
|
||||
.filter(([_, val]) => val === undefined)
|
||||
.filter(([, val]) => val === undefined)
|
||||
.forEach(([key]) => {
|
||||
delete fieldSchema[key as keyof UIFieldMetadata]
|
||||
})
|
||||
|
|
|
@ -33,7 +33,6 @@ export { default as staticRoutes } from "./static"
|
|||
export { default as publicRoutes } from "./public"
|
||||
|
||||
const appBackupRoutes = pro.appBackups
|
||||
const scheduleRoutes = pro.schedules
|
||||
const environmentVariableRoutes = pro.environmentVariables
|
||||
|
||||
export const mainRoutes: Router[] = [
|
||||
|
@ -65,7 +64,6 @@ export const mainRoutes: Router[] = [
|
|||
pluginRoutes,
|
||||
opsRoutes,
|
||||
debugRoutes,
|
||||
scheduleRoutes,
|
||||
environmentVariableRoutes,
|
||||
// these need to be handled last as they still use /api/:tableId
|
||||
// this could be breaking as koa may recognise other routes as this
|
||||
|
|
|
@ -16,7 +16,7 @@ describe("/applications/:appId/import", () => {
|
|||
|
||||
it("should be able to perform import", async () => {
|
||||
const appId = config.getAppId()
|
||||
const res = await request
|
||||
await request
|
||||
.post(`/api/applications/${appId}/import`)
|
||||
.field("encryptionPassword", PASSWORD)
|
||||
.attach("appExport", path.join(__dirname, "assets", "export.tar.gz"))
|
||||
|
|
|
@ -2,7 +2,6 @@ import * as setup from "./utilities"
|
|||
import { roles, db as dbCore } from "@budibase/backend-core"
|
||||
|
||||
describe("/api/applications/:appId/sync", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let app
|
||||
|
||||
|
|
|
@ -369,7 +369,7 @@ describe("/applications", () => {
|
|||
})
|
||||
|
||||
it("should reject with a known name", async () => {
|
||||
const resp = await config.api.application.duplicateApp(
|
||||
await config.api.application.duplicateApp(
|
||||
app.appId,
|
||||
{
|
||||
name: app.name,
|
||||
|
@ -381,7 +381,7 @@ describe("/applications", () => {
|
|||
})
|
||||
|
||||
it("should reject with a known url", async () => {
|
||||
const resp = await config.api.application.duplicateApp(
|
||||
await config.api.application.duplicateApp(
|
||||
app.appId,
|
||||
{
|
||||
name: "this is fine",
|
||||
|
|
|
@ -1,13 +1,5 @@
|
|||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||
const setup = require("./utilities")
|
||||
|
||||
import os from "os"
|
||||
|
||||
jest.mock("process", () => ({
|
||||
arch: "arm64",
|
||||
version: "v14.20.1",
|
||||
platform: "darwin",
|
||||
}))
|
||||
import * as setup from "./utilities"
|
||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
|
||||
describe("/component", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -17,21 +9,6 @@ describe("/component", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
os.cpus = () => [
|
||||
{
|
||||
model: "test",
|
||||
speed: 12323,
|
||||
times: {
|
||||
user: 0,
|
||||
nice: 0,
|
||||
sys: 0,
|
||||
idle: 0,
|
||||
irq: 0,
|
||||
},
|
||||
},
|
||||
]
|
||||
os.uptime = () => 123123123123
|
||||
os.totalmem = () => 10000000000
|
||||
})
|
||||
|
||||
describe("/api/debug", () => {
|
||||
|
@ -43,14 +20,16 @@ describe("/component", () => {
|
|||
.expect(200)
|
||||
expect(res.body).toEqual({
|
||||
budibaseVersion: "0.0.0+jest",
|
||||
cpuArch: "arm64",
|
||||
cpuCores: 1,
|
||||
cpuInfo: "test",
|
||||
cpuArch: expect.any(String),
|
||||
cpuCores: expect.any(Number),
|
||||
cpuInfo: expect.any(String),
|
||||
hosting: "docker-compose",
|
||||
nodeVersion: "v14.20.1",
|
||||
platform: "darwin",
|
||||
totalMemory: "9.313225746154785GB",
|
||||
uptime: "1425036 day(s), 3 hour(s), 32 minute(s)",
|
||||
nodeVersion: expect.stringMatching(/^v\d+\.\d+\.\d+$/),
|
||||
platform: expect.any(String),
|
||||
totalMemory: expect.stringMatching(/^[0-9\\.]+GB$/),
|
||||
uptime: expect.stringMatching(
|
||||
/^\d+ day\(s\), \d+ hour\(s\), \d+ minute\(s\)$/
|
||||
),
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -156,7 +156,7 @@ describe("/permission", () => {
|
|||
level: PermissionLevel.READ,
|
||||
})
|
||||
|
||||
const response = await config.api.permission.revoke(
|
||||
await config.api.permission.revoke(
|
||||
{
|
||||
roleId: STD_ROLE_ID,
|
||||
resourceId: table._id,
|
||||
|
|
|
@ -0,0 +1,401 @@
|
|||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
import * as setup from "../utilities"
|
||||
import { databaseTestProviders } from "../../../../integrations/tests/utils"
|
||||
import pg from "pg"
|
||||
import mysql from "mysql2/promise"
|
||||
import mssql from "mssql"
|
||||
|
||||
jest.unmock("pg")
|
||||
|
||||
const createTableSQL: Record<string, string> = {
|
||||
[SourceName.POSTGRES]: `
|
||||
CREATE TABLE test_table (
|
||||
id serial PRIMARY KEY,
|
||||
name VARCHAR ( 50 ) NOT NULL,
|
||||
birthday TIMESTAMP
|
||||
);`,
|
||||
[SourceName.MYSQL]: `
|
||||
CREATE TABLE test_table (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(50) NOT NULL,
|
||||
birthday TIMESTAMP
|
||||
);`,
|
||||
[SourceName.SQL_SERVER]: `
|
||||
CREATE TABLE test_table (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
name NVARCHAR(50) NOT NULL,
|
||||
birthday DATETIME
|
||||
);`,
|
||||
}
|
||||
|
||||
const insertSQL = `INSERT INTO test_table (name) VALUES ('one'), ('two'), ('three'), ('four'), ('five')`
|
||||
const dropTableSQL = `DROP TABLE test_table;`
|
||||
|
||||
describe.each([
|
||||
["postgres", databaseTestProviders.postgres],
|
||||
["mysql", databaseTestProviders.mysql],
|
||||
["mssql", databaseTestProviders.mssql],
|
||||
["mariadb", databaseTestProviders.mariadb],
|
||||
])("queries (%s)", (__, dsProvider) => {
|
||||
const config = setup.getConfig()
|
||||
let datasource: Datasource
|
||||
|
||||
async function createQuery(query: Partial<Query>): Promise<Query> {
|
||||
const defaultQuery: Query = {
|
||||
datasourceId: datasource._id!,
|
||||
name: "New Query",
|
||||
parameters: [],
|
||||
fields: {},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
}
|
||||
return await config.api.query.create({ ...defaultQuery, ...query })
|
||||
}
|
||||
|
||||
async function rawQuery(sql: string): Promise<any> {
|
||||
// We re-fetch the datasource here because the one returned by
|
||||
// config.api.datasource.create has the password field blanked out, and we
|
||||
// need the password to connect to the database.
|
||||
const ds = await dsProvider.datasource()
|
||||
switch (ds.source) {
|
||||
case SourceName.POSTGRES: {
|
||||
const client = new pg.Client(ds.config!)
|
||||
await client.connect()
|
||||
try {
|
||||
const { rows } = await client.query(sql)
|
||||
return rows
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
}
|
||||
case SourceName.MYSQL: {
|
||||
const con = await mysql.createConnection(ds.config!)
|
||||
try {
|
||||
const [rows] = await con.query(sql)
|
||||
return rows
|
||||
} finally {
|
||||
con.end()
|
||||
}
|
||||
}
|
||||
case SourceName.SQL_SERVER: {
|
||||
const pool = new mssql.ConnectionPool(ds.config! as mssql.config)
|
||||
const client = await pool.connect()
|
||||
try {
|
||||
const { recordset } = await client.query(sql)
|
||||
return recordset
|
||||
} finally {
|
||||
await pool.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create(
|
||||
await dsProvider.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await rawQuery(createTableSQL[datasource.source])
|
||||
await rawQuery(insertSQL)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await rawQuery(dropTableSQL)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await dsProvider.stop()
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
describe("create", () => {
|
||||
it("should be able to insert with bindings", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "foo",
|
||||
default: "bar",
|
||||
},
|
||||
],
|
||||
queryVerb: "create",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
foo: "baz",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
created: true,
|
||||
},
|
||||
])
|
||||
|
||||
const rows = await rawQuery("SELECT * FROM test_table WHERE name = 'baz'")
|
||||
expect(rows).toHaveLength(1)
|
||||
})
|
||||
|
||||
it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])(
|
||||
"should coerce %s into a date",
|
||||
async datetimeStr => {
|
||||
const date = new Date(datetimeStr)
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: `INSERT INTO test_table (name, birthday) VALUES ('foo', {{ birthday }})`,
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "birthday",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
queryVerb: "create",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { birthday: datetimeStr },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([{ created: true }])
|
||||
|
||||
const rows = await rawQuery(
|
||||
`SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'`
|
||||
)
|
||||
expect(rows).toHaveLength(1)
|
||||
}
|
||||
)
|
||||
|
||||
it.each(["2021,02,05", "202205-1500"])(
|
||||
"should not coerce %s as a date",
|
||||
async notDateStr => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "INSERT INTO test_table (name) VALUES ({{ name }})",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "name",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
queryVerb: "create",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
name: notDateStr,
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([{ created: true }])
|
||||
|
||||
const rows = await rawQuery(
|
||||
`SELECT * FROM test_table WHERE name = '${notDateStr}'`
|
||||
)
|
||||
expect(rows).toHaveLength(1)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe("read", () => {
|
||||
it("should execute a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table ORDER BY id",
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
id: 1,
|
||||
name: "one",
|
||||
birthday: null,
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: "two",
|
||||
birthday: null,
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: "three",
|
||||
birthday: null,
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: "four",
|
||||
birthday: null,
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: "five",
|
||||
birthday: null,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should be able to transform a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table WHERE id = 1",
|
||||
},
|
||||
transformer: `
|
||||
data[0].id = data[0].id + 1;
|
||||
return data;
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
id: 2,
|
||||
name: "one",
|
||||
birthday: null,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should coerce numeric bindings", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table WHERE id = {{ id }}",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "id",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
id: "1",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
id: 1,
|
||||
name: "one",
|
||||
birthday: null,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
it("should be able to update rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "UPDATE test_table SET name = {{ name }} WHERE id = {{ id }}",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "id",
|
||||
default: "",
|
||||
},
|
||||
{
|
||||
name: "name",
|
||||
default: "updated",
|
||||
},
|
||||
],
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
id: "1",
|
||||
name: "foo",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
updated: true,
|
||||
},
|
||||
])
|
||||
|
||||
const rows = await rawQuery("SELECT * FROM test_table WHERE id = 1")
|
||||
expect(rows).toEqual([{ id: 1, name: "foo", birthday: null }])
|
||||
})
|
||||
|
||||
it("should be able to execute an update that updates no rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "UPDATE test_table SET name = 'updated' WHERE id = 100",
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
updated: true,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should be able to execute a delete that deletes no rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "DELETE FROM test_table WHERE id = 100",
|
||||
},
|
||||
queryVerb: "delete",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
deleted: true,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe("delete", () => {
|
||||
it("should be able to delete rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "DELETE FROM test_table WHERE id = {{ id }}",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "id",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
queryVerb: "delete",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
id: "1",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
deleted: true,
|
||||
},
|
||||
])
|
||||
|
||||
const rows = await rawQuery("SELECT * FROM test_table WHERE id = 1")
|
||||
expect(rows).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,239 +0,0 @@
|
|||
import { Datasource, Query } from "@budibase/types"
|
||||
import * as setup from "../utilities"
|
||||
import { databaseTestProviders } from "../../../../integrations/tests/utils"
|
||||
import mysql from "mysql2/promise"
|
||||
|
||||
jest.unmock("mysql2")
|
||||
jest.unmock("mysql2/promise")
|
||||
|
||||
const createTableSQL = `
|
||||
CREATE TABLE test_table (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(50) NOT NULL
|
||||
)
|
||||
`
|
||||
|
||||
const insertSQL = `
|
||||
INSERT INTO test_table (name) VALUES ('one'), ('two'), ('three'), ('four'), ('five')
|
||||
`
|
||||
|
||||
const dropTableSQL = `
|
||||
DROP TABLE test_table
|
||||
`
|
||||
|
||||
describe("/queries", () => {
|
||||
let config = setup.getConfig()
|
||||
let datasource: Datasource
|
||||
|
||||
async function createQuery(query: Partial<Query>): Promise<Query> {
|
||||
const defaultQuery: Query = {
|
||||
datasourceId: datasource._id!,
|
||||
name: "New Query",
|
||||
parameters: [],
|
||||
fields: {},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
}
|
||||
return await config.api.query.create({ ...defaultQuery, ...query })
|
||||
}
|
||||
|
||||
async function withConnection(
|
||||
callback: (client: mysql.Connection) => Promise<void>
|
||||
): Promise<void> {
|
||||
const ds = await databaseTestProviders.mysql.datasource()
|
||||
const con = await mysql.createConnection(ds.config!)
|
||||
try {
|
||||
await callback(con)
|
||||
} finally {
|
||||
con.end()
|
||||
}
|
||||
}
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.mysql.stop()
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await withConnection(async connection => {
|
||||
const resp = await connection.query(createTableSQL)
|
||||
await connection.query(insertSQL)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await withConnection(async connection => {
|
||||
await connection.query(dropTableSQL)
|
||||
})
|
||||
})
|
||||
|
||||
it("should execute a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table ORDER BY id",
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
id: 1,
|
||||
name: "one",
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: "two",
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: "three",
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: "four",
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: "five",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should be able to transform a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table WHERE id = 1",
|
||||
},
|
||||
transformer: `
|
||||
data[0].id = data[0].id + 1;
|
||||
return data;
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
id: 2,
|
||||
name: "one",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should be able to insert with bindings", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "foo",
|
||||
default: "bar",
|
||||
},
|
||||
],
|
||||
queryVerb: "create",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
foo: "baz",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
created: true,
|
||||
},
|
||||
])
|
||||
|
||||
await withConnection(async connection => {
|
||||
const [rows] = await connection.query(
|
||||
"SELECT * FROM test_table WHERE name = 'baz'"
|
||||
)
|
||||
expect(rows).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to update rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "UPDATE test_table SET name = {{ name }} WHERE id = {{ id }}",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "id",
|
||||
default: "",
|
||||
},
|
||||
{
|
||||
name: "name",
|
||||
default: "updated",
|
||||
},
|
||||
],
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
id: "1",
|
||||
name: "foo",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
updated: true,
|
||||
},
|
||||
])
|
||||
|
||||
await withConnection(async connection => {
|
||||
const [rows] = await connection.query(
|
||||
"SELECT * FROM test_table WHERE id = 1"
|
||||
)
|
||||
expect(rows).toEqual([{ id: 1, name: "foo" }])
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "DELETE FROM test_table WHERE id = {{ id }}",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "id",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
queryVerb: "delete",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
id: "1",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
deleted: true,
|
||||
},
|
||||
])
|
||||
|
||||
await withConnection(async connection => {
|
||||
const [rows] = await connection.query(
|
||||
"SELECT * FROM test_table WHERE id = 1"
|
||||
)
|
||||
expect(rows).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,243 +0,0 @@
|
|||
import { Datasource, Query } from "@budibase/types"
|
||||
import * as setup from "../utilities"
|
||||
import { databaseTestProviders } from "../../../../integrations/tests/utils"
|
||||
import { Client } from "pg"
|
||||
|
||||
jest.unmock("pg")
|
||||
|
||||
const createTableSQL = `
|
||||
CREATE TABLE test_table (
|
||||
id serial PRIMARY KEY,
|
||||
name VARCHAR ( 50 ) NOT NULL
|
||||
);
|
||||
`
|
||||
|
||||
const insertSQL = `
|
||||
INSERT INTO test_table (name) VALUES ('one');
|
||||
INSERT INTO test_table (name) VALUES ('two');
|
||||
INSERT INTO test_table (name) VALUES ('three');
|
||||
INSERT INTO test_table (name) VALUES ('four');
|
||||
INSERT INTO test_table (name) VALUES ('five');
|
||||
`
|
||||
|
||||
const dropTableSQL = `
|
||||
DROP TABLE test_table;
|
||||
`
|
||||
|
||||
describe("/queries", () => {
|
||||
let config = setup.getConfig()
|
||||
let datasource: Datasource
|
||||
|
||||
async function createQuery(query: Partial<Query>): Promise<Query> {
|
||||
const defaultQuery: Query = {
|
||||
datasourceId: datasource._id!,
|
||||
name: "New Query",
|
||||
parameters: [],
|
||||
fields: {},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
}
|
||||
return await config.api.query.create({ ...defaultQuery, ...query })
|
||||
}
|
||||
|
||||
async function withClient(
|
||||
callback: (client: Client) => Promise<void>
|
||||
): Promise<void> {
|
||||
const ds = await databaseTestProviders.postgres.datasource()
|
||||
const client = new Client(ds.config!)
|
||||
await client.connect()
|
||||
try {
|
||||
await callback(client)
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
}
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.postgres.stop()
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.postgres.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await withClient(async client => {
|
||||
await client.query(createTableSQL)
|
||||
await client.query(insertSQL)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await withClient(async client => {
|
||||
await client.query(dropTableSQL)
|
||||
})
|
||||
})
|
||||
|
||||
it("should execute a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table ORDER BY id",
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
id: 1,
|
||||
name: "one",
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: "two",
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: "three",
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: "four",
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: "five",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should be able to transform a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table WHERE id = 1",
|
||||
},
|
||||
transformer: `
|
||||
data[0].id = data[0].id + 1;
|
||||
return data;
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
id: 2,
|
||||
name: "one",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should be able to insert with bindings", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "foo",
|
||||
default: "bar",
|
||||
},
|
||||
],
|
||||
queryVerb: "create",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
foo: "baz",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
created: true,
|
||||
},
|
||||
])
|
||||
|
||||
await withClient(async client => {
|
||||
const { rows } = await client.query(
|
||||
"SELECT * FROM test_table WHERE name = 'baz'"
|
||||
)
|
||||
expect(rows).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to update rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "UPDATE test_table SET name = {{ name }} WHERE id = {{ id }}",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "id",
|
||||
default: "",
|
||||
},
|
||||
{
|
||||
name: "name",
|
||||
default: "updated",
|
||||
},
|
||||
],
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
id: "1",
|
||||
name: "foo",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
updated: true,
|
||||
},
|
||||
])
|
||||
|
||||
await withClient(async client => {
|
||||
const { rows } = await client.query(
|
||||
"SELECT * FROM test_table WHERE id = 1"
|
||||
)
|
||||
expect(rows).toEqual([{ id: 1, name: "foo" }])
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "DELETE FROM test_table WHERE id = {{ id }}",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "id",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
queryVerb: "delete",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: {
|
||||
id: "1",
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
deleted: true,
|
||||
},
|
||||
])
|
||||
|
||||
await withClient(async client => {
|
||||
const { rows } = await client.query(
|
||||
"SELECT * FROM test_table WHERE id = 1"
|
||||
)
|
||||
expect(rows).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -3,7 +3,7 @@ import { databaseTestProviders } from "../../../integrations/tests/utils"
|
|||
import tk from "timekeeper"
|
||||
import { outputProcessing } from "../../../utilities/rowProcessor"
|
||||
import * as setup from "./utilities"
|
||||
import { context, InternalTable, roles, tenancy } from "@budibase/backend-core"
|
||||
import { context, InternalTable, tenancy } from "@budibase/backend-core"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
|
@ -14,33 +14,21 @@ import {
|
|||
FieldTypeSubtypes,
|
||||
FormulaType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
PermissionLevel,
|
||||
QuotaUsageType,
|
||||
RelationshipType,
|
||||
Row,
|
||||
SaveTableRequest,
|
||||
SearchQueryOperators,
|
||||
SortOrder,
|
||||
SortType,
|
||||
StaticQuotaName,
|
||||
Table,
|
||||
TableSourceType,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
expectAnyExternalColsAttributes,
|
||||
expectAnyInternalColsAttributes,
|
||||
generator,
|
||||
mocks,
|
||||
} from "@budibase/backend-core/tests"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import _, { merge } from "lodash"
|
||||
import * as uuid from "uuid"
|
||||
|
||||
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
||||
tk.freeze(timestamp)
|
||||
|
||||
jest.unmock("mysql2")
|
||||
jest.unmock("mysql2/promise")
|
||||
jest.unmock("mssql")
|
||||
jest.unmock("pg")
|
||||
|
||||
|
@ -392,6 +380,23 @@ describe.each([
|
|||
expect(row.arrayFieldArrayStrKnown).toEqual(["One"])
|
||||
expect(row.optsFieldStrKnown).toEqual("Alpha")
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("doesn't allow creating in user table", async () => {
|
||||
const userTableId = InternalTable.USER_METADATA
|
||||
const response = await config.api.row.save(
|
||||
userTableId,
|
||||
{
|
||||
tableId: userTableId,
|
||||
firstName: "Joe",
|
||||
lastName: "Joe",
|
||||
email: "joe@joe.com",
|
||||
roles: {},
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
expect(response.message).toBe("Cannot create new user entry.")
|
||||
})
|
||||
})
|
||||
|
||||
describe("get", () => {
|
||||
|
@ -890,642 +895,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
describe("view 2.0", () => {
|
||||
async function userTable(): Promise<Table> {
|
||||
return saveTableRequest({
|
||||
name: `users_${uuid.v4()}`,
|
||||
type: "table",
|
||||
schema: {
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
},
|
||||
surname: {
|
||||
type: FieldType.STRING,
|
||||
name: "surname",
|
||||
},
|
||||
age: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "age",
|
||||
},
|
||||
address: {
|
||||
type: FieldType.STRING,
|
||||
name: "address",
|
||||
},
|
||||
jobTitle: {
|
||||
type: FieldType.STRING,
|
||||
name: "jobTitle",
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const randomRowData = () => ({
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
jobTitle: generator.word(),
|
||||
})
|
||||
|
||||
describe("create", () => {
|
||||
it("should persist a new row with only the provided view fields", async () => {
|
||||
const table = await config.api.table.save(await userTable())
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
schema: {
|
||||
name: { visible: true },
|
||||
surname: { visible: true },
|
||||
address: { visible: true },
|
||||
},
|
||||
})
|
||||
|
||||
const data = randomRowData()
|
||||
const newRow = await config.api.row.save(view.id, {
|
||||
tableId: table!._id,
|
||||
_viewId: view.id,
|
||||
...data,
|
||||
})
|
||||
|
||||
const row = await config.api.row.get(table._id!, newRow._id!)
|
||||
expect(row).toEqual({
|
||||
name: data.name,
|
||||
surname: data.surname,
|
||||
address: data.address,
|
||||
tableId: table!._id,
|
||||
_id: newRow._id,
|
||||
_rev: newRow._rev,
|
||||
id: newRow.id,
|
||||
...defaultRowFields,
|
||||
})
|
||||
expect(row._viewId).toBeUndefined()
|
||||
expect(row.age).toBeUndefined()
|
||||
expect(row.jobTitle).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("patch", () => {
|
||||
it("should update only the view fields for a row", async () => {
|
||||
const table = await config.api.table.save(await userTable())
|
||||
const tableId = table._id!
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: tableId,
|
||||
name: generator.guid(),
|
||||
schema: {
|
||||
name: { visible: true },
|
||||
address: { visible: true },
|
||||
},
|
||||
})
|
||||
|
||||
const newRow = await config.api.row.save(view.id, {
|
||||
tableId,
|
||||
_viewId: view.id,
|
||||
...randomRowData(),
|
||||
})
|
||||
const newData = randomRowData()
|
||||
await config.api.row.patch(view.id, {
|
||||
tableId,
|
||||
_viewId: view.id,
|
||||
_id: newRow._id!,
|
||||
_rev: newRow._rev!,
|
||||
...newData,
|
||||
})
|
||||
|
||||
const row = await config.api.row.get(tableId, newRow._id!)
|
||||
expect(row).toEqual({
|
||||
...newRow,
|
||||
name: newData.name,
|
||||
address: newData.address,
|
||||
_id: newRow._id,
|
||||
_rev: expect.any(String),
|
||||
id: newRow.id,
|
||||
...defaultRowFields,
|
||||
})
|
||||
expect(row._viewId).toBeUndefined()
|
||||
expect(row.age).toBeUndefined()
|
||||
expect(row.jobTitle).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
it("should be able to delete a row", async () => {
|
||||
const table = await config.api.table.save(await userTable())
|
||||
const tableId = table._id!
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: tableId,
|
||||
name: generator.guid(),
|
||||
schema: {
|
||||
name: { visible: true },
|
||||
address: { visible: true },
|
||||
},
|
||||
})
|
||||
|
||||
const createdRow = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
||||
|
||||
await assertRowUsage(rowUsage - 1)
|
||||
|
||||
await config.api.row.get(tableId, createdRow._id!, {
|
||||
status: 404,
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete multiple rows", async () => {
|
||||
const table = await config.api.table.save(await userTable())
|
||||
const tableId = table._id!
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: tableId,
|
||||
name: generator.guid(),
|
||||
schema: {
|
||||
name: { visible: true },
|
||||
address: { visible: true },
|
||||
},
|
||||
})
|
||||
|
||||
const rows = await Promise.all([
|
||||
config.api.row.save(table._id!, {}),
|
||||
config.api.row.save(table._id!, {}),
|
||||
config.api.row.save(table._id!, {}),
|
||||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] })
|
||||
|
||||
await assertRowUsage(rowUsage - 2)
|
||||
|
||||
await config.api.row.get(tableId, rows[0]._id!, {
|
||||
status: 404,
|
||||
})
|
||||
await config.api.row.get(tableId, rows[2]._id!, {
|
||||
status: 404,
|
||||
})
|
||||
await config.api.row.get(tableId, rows[1]._id!, { status: 200 })
|
||||
})
|
||||
})
|
||||
|
||||
describe("view search", () => {
|
||||
let table: Table
|
||||
const viewSchema = { age: { visible: true }, name: { visible: true } }
|
||||
|
||||
beforeAll(async () => {
|
||||
table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: `users_${uuid.v4()}`,
|
||||
schema: {
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
constraints: { type: "string" },
|
||||
},
|
||||
age: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "age",
|
||||
constraints: {},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it("returns empty rows from view when no schema is passed", async () => {
|
||||
const rows = await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
config.api.row.save(table._id!, { tableId: table._id })
|
||||
)
|
||||
)
|
||||
|
||||
const createViewResponse = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
})
|
||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||
|
||||
expect(response.rows).toHaveLength(10)
|
||||
expect(response).toEqual({
|
||||
rows: expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
_viewId: createViewResponse.id,
|
||||
tableId: table._id,
|
||||
_id: r._id,
|
||||
_rev: r._rev,
|
||||
...defaultRowFields,
|
||||
}))
|
||||
),
|
||||
...(isInternal
|
||||
? {}
|
||||
: {
|
||||
hasNextPage: false,
|
||||
bookmark: null,
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
it("searching respects the view filters", async () => {
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
config.api.row.save(table._id!, {
|
||||
tableId: table._id,
|
||||
name: generator.name(),
|
||||
age: generator.integer({ min: 10, max: 30 }),
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
const expectedRows = await Promise.all(
|
||||
Array.from({ length: 5 }, () =>
|
||||
config.api.row.save(table._id!, {
|
||||
tableId: table._id,
|
||||
name: generator.name(),
|
||||
age: 40,
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
const createViewResponse = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
query: [
|
||||
{ operator: SearchQueryOperators.EQUAL, field: "age", value: 40 },
|
||||
],
|
||||
schema: viewSchema,
|
||||
})
|
||||
|
||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||
|
||||
expect(response.rows).toHaveLength(5)
|
||||
expect(response).toEqual({
|
||||
rows: expect.arrayContaining(
|
||||
expectedRows.map(r => ({
|
||||
_viewId: createViewResponse.id,
|
||||
tableId: table._id,
|
||||
name: r.name,
|
||||
age: r.age,
|
||||
_id: r._id,
|
||||
_rev: r._rev,
|
||||
...defaultRowFields,
|
||||
}))
|
||||
),
|
||||
...(isInternal
|
||||
? {}
|
||||
: {
|
||||
hasNextPage: false,
|
||||
bookmark: null,
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
const sortTestOptions: [
|
||||
{
|
||||
field: string
|
||||
order?: SortOrder
|
||||
type?: SortType
|
||||
},
|
||||
string[]
|
||||
][] = [
|
||||
[
|
||||
{
|
||||
field: "name",
|
||||
order: SortOrder.ASCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
["Alice", "Bob", "Charly", "Danny"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "name",
|
||||
},
|
||||
["Alice", "Bob", "Charly", "Danny"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "name",
|
||||
order: SortOrder.DESCENDING,
|
||||
},
|
||||
["Danny", "Charly", "Bob", "Alice"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "name",
|
||||
order: SortOrder.DESCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
["Danny", "Charly", "Bob", "Alice"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.ASCENDING,
|
||||
type: SortType.number,
|
||||
},
|
||||
["Danny", "Alice", "Charly", "Bob"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.ASCENDING,
|
||||
},
|
||||
["Danny", "Alice", "Charly", "Bob"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.DESCENDING,
|
||||
},
|
||||
["Bob", "Charly", "Alice", "Danny"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.DESCENDING,
|
||||
type: SortType.number,
|
||||
},
|
||||
["Bob", "Charly", "Alice", "Danny"],
|
||||
],
|
||||
]
|
||||
|
||||
describe("sorting", () => {
|
||||
let table: Table
|
||||
beforeAll(async () => {
|
||||
table = await config.api.table.save(await userTable())
|
||||
const users = [
|
||||
{ name: "Alice", age: 25 },
|
||||
{ name: "Bob", age: 30 },
|
||||
{ name: "Charly", age: 27 },
|
||||
{ name: "Danny", age: 15 },
|
||||
]
|
||||
await Promise.all(
|
||||
users.map(u =>
|
||||
config.api.row.save(table._id!, {
|
||||
tableId: table._id,
|
||||
...u,
|
||||
})
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it.each(sortTestOptions)(
|
||||
"allow sorting (%s)",
|
||||
async (sortParams, expected) => {
|
||||
const createViewResponse = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
sort: sortParams,
|
||||
schema: viewSchema,
|
||||
})
|
||||
|
||||
const response = await config.api.viewV2.search(
|
||||
createViewResponse.id
|
||||
)
|
||||
|
||||
expect(response.rows).toHaveLength(4)
|
||||
expect(response.rows).toEqual(
|
||||
expected.map(name => expect.objectContaining({ name }))
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
it.each(sortTestOptions)(
|
||||
"allow override the default view sorting (%s)",
|
||||
async (sortParams, expected) => {
|
||||
const createViewResponse = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
sort: {
|
||||
field: "name",
|
||||
order: SortOrder.ASCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
schema: viewSchema,
|
||||
})
|
||||
|
||||
const response = await config.api.viewV2.search(
|
||||
createViewResponse.id,
|
||||
{
|
||||
sort: sortParams.field,
|
||||
sortOrder: sortParams.order,
|
||||
sortType: sortParams.type,
|
||||
query: {},
|
||||
}
|
||||
)
|
||||
|
||||
expect(response.rows).toHaveLength(4)
|
||||
expect(response.rows).toEqual(
|
||||
expected.map(name => expect.objectContaining({ name }))
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("when schema is defined, defined columns and row attributes are returned", async () => {
|
||||
const table = await config.api.table.save(await userTable())
|
||||
const rows = await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
config.api.row.save(table._id!, {
|
||||
tableId: table._id,
|
||||
name: generator.name(),
|
||||
age: generator.age(),
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
schema: { name: { visible: true } },
|
||||
})
|
||||
const response = await config.api.viewV2.search(view.id)
|
||||
|
||||
expect(response.rows).toHaveLength(10)
|
||||
expect(response.rows).toEqual(
|
||||
expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
...(isInternal
|
||||
? expectAnyInternalColsAttributes
|
||||
: expectAnyExternalColsAttributes),
|
||||
_viewId: view.id,
|
||||
name: r.name,
|
||||
}))
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it("views without data can be returned", async () => {
|
||||
const table = await config.api.table.save(await userTable())
|
||||
const createViewResponse = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
})
|
||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||
expect(response.rows).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("respects the limit parameter", async () => {
|
||||
const table = await config.api.table.save(await userTable())
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
|
||||
)
|
||||
|
||||
const limit = generator.integer({ min: 1, max: 8 })
|
||||
|
||||
const createViewResponse = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
})
|
||||
const response = await config.api.viewV2.search(createViewResponse.id, {
|
||||
limit,
|
||||
query: {},
|
||||
})
|
||||
|
||||
expect(response.rows).toHaveLength(limit)
|
||||
})
|
||||
|
||||
it("can handle pagination", async () => {
|
||||
const table = await config.api.table.save(await userTable())
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
|
||||
)
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
})
|
||||
const rows = (await config.api.viewV2.search(view.id)).rows
|
||||
|
||||
const page1 = await config.api.viewV2.search(view.id, {
|
||||
paginate: true,
|
||||
limit: 4,
|
||||
query: {},
|
||||
})
|
||||
expect(page1).toEqual({
|
||||
rows: expect.arrayContaining(rows.slice(0, 4)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: true,
|
||||
bookmark: expect.anything(),
|
||||
})
|
||||
|
||||
const page2 = await config.api.viewV2.search(view.id, {
|
||||
paginate: true,
|
||||
limit: 4,
|
||||
bookmark: page1.bookmark,
|
||||
|
||||
query: {},
|
||||
})
|
||||
expect(page2).toEqual({
|
||||
rows: expect.arrayContaining(rows.slice(4, 8)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: true,
|
||||
bookmark: expect.anything(),
|
||||
})
|
||||
|
||||
const page3 = await config.api.viewV2.search(view.id, {
|
||||
paginate: true,
|
||||
limit: 4,
|
||||
bookmark: page2.bookmark,
|
||||
query: {},
|
||||
})
|
||||
expect(page3).toEqual({
|
||||
rows: expect.arrayContaining(rows.slice(8)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: false,
|
||||
bookmark: expect.anything(),
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("doesn't allow creating in user table", async () => {
|
||||
const userTableId = InternalTable.USER_METADATA
|
||||
const response = await config.api.row.save(
|
||||
userTableId,
|
||||
{
|
||||
tableId: userTableId,
|
||||
firstName: "Joe",
|
||||
lastName: "Joe",
|
||||
email: "joe@joe.com",
|
||||
roles: {},
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
expect(response.message).toBe("Cannot create new user entry.")
|
||||
})
|
||||
|
||||
describe("permissions", () => {
|
||||
let table: Table
|
||||
let view: ViewV2
|
||||
|
||||
beforeAll(async () => {
|
||||
table = await config.api.table.save(await userTable())
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
config.api.row.save(table._id!, {})
|
||||
)
|
||||
)
|
||||
|
||||
view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
mocks.licenses.useViewPermissions()
|
||||
})
|
||||
|
||||
it("does not allow public users to fetch by default", async () => {
|
||||
await config.publish()
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, {
|
||||
status: 403,
|
||||
})
|
||||
})
|
||||
|
||||
it("allow public users to fetch when permissions are explicit", async () => {
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: view.id,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
const response = await config.api.viewV2.publicSearch(view.id)
|
||||
|
||||
expect(response.rows).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("allow public users to fetch when permissions are inherited", async () => {
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: table._id!,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
const response = await config.api.viewV2.publicSearch(view.id)
|
||||
|
||||
expect(response.rows).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("respects inherited permissions, not allowing not public views from public tables", async () => {
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: table._id!,
|
||||
})
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.POWER,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: view.id,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, {
|
||||
status: 403,
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
let o2mTable: Table
|
||||
let m2mTable: Table
|
||||
beforeAll(async () => {
|
||||
|
|
|
@ -74,7 +74,7 @@ describe("/views", () => {
|
|||
|
||||
describe("create", () => {
|
||||
it("returns a success message when the view is successfully created", async () => {
|
||||
const res = await saveView()
|
||||
await saveView()
|
||||
expect(events.view.created).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
|
|
|
@ -5,23 +5,26 @@ import {
|
|||
FieldSchema,
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
PermissionLevel,
|
||||
QuotaUsageType,
|
||||
SaveTableRequest,
|
||||
SearchQueryOperators,
|
||||
SortOrder,
|
||||
SortType,
|
||||
StaticQuotaName,
|
||||
Table,
|
||||
TableSourceType,
|
||||
UIFieldMetadata,
|
||||
UpdateViewRequest,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import * as uuid from "uuid"
|
||||
import { databaseTestProviders } from "../../../integrations/tests/utils"
|
||||
import merge from "lodash/merge"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { roles } from "@budibase/backend-core"
|
||||
|
||||
jest.unmock("mysql2")
|
||||
jest.unmock("mysql2/promise")
|
||||
jest.unmock("mssql")
|
||||
jest.unmock("pg")
|
||||
|
||||
|
@ -33,6 +36,7 @@ describe.each([
|
|||
["mariadb", databaseTestProviders.mariadb],
|
||||
])("/v2/views (%s)", (_, dsProvider) => {
|
||||
const config = setup.getConfig()
|
||||
const isInternal = !dsProvider
|
||||
|
||||
let table: Table
|
||||
let datasource: Datasource
|
||||
|
@ -99,6 +103,18 @@ describe.each([
|
|||
setup.afterAll()
|
||||
})
|
||||
|
||||
const getRowUsage = async () => {
|
||||
const { total } = await config.doInContext(undefined, () =>
|
||||
quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
|
||||
)
|
||||
return total
|
||||
}
|
||||
|
||||
const assertRowUsage = async (expected: number) => {
|
||||
const usage = await getRowUsage()
|
||||
expect(usage).toBe(expected)
|
||||
}
|
||||
|
||||
describe("create", () => {
|
||||
it("persist the view when the view is successfully created", async () => {
|
||||
const newView: CreateViewRequest = {
|
||||
|
@ -525,4 +541,468 @@ describe.each([
|
|||
expect(row.Country).toEqual("Aussy")
|
||||
})
|
||||
})
|
||||
|
||||
describe("row operations", () => {
|
||||
let table: Table, view: ViewV2
|
||||
beforeEach(async () => {
|
||||
table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
one: { type: FieldType.STRING, name: "one" },
|
||||
two: { type: FieldType.STRING, name: "two" },
|
||||
},
|
||||
})
|
||||
)
|
||||
view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
schema: {
|
||||
two: { visible: true },
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
describe("create", () => {
|
||||
it("should persist a new row with only the provided view fields", async () => {
|
||||
const newRow = await config.api.row.save(view.id, {
|
||||
tableId: table!._id,
|
||||
_viewId: view.id,
|
||||
one: "foo",
|
||||
two: "bar",
|
||||
})
|
||||
|
||||
const row = await config.api.row.get(table._id!, newRow._id!)
|
||||
expect(row.one).toBeUndefined()
|
||||
expect(row.two).toEqual("bar")
|
||||
})
|
||||
})
|
||||
|
||||
describe("patch", () => {
|
||||
it("should update only the view fields for a row", async () => {
|
||||
const newRow = await config.api.row.save(table._id!, {
|
||||
one: "foo",
|
||||
two: "bar",
|
||||
})
|
||||
await config.api.row.patch(view.id, {
|
||||
tableId: table._id!,
|
||||
_id: newRow._id!,
|
||||
_rev: newRow._rev!,
|
||||
one: "newFoo",
|
||||
two: "newBar",
|
||||
})
|
||||
|
||||
const row = await config.api.row.get(table._id!, newRow._id!)
|
||||
expect(row.one).toEqual("foo")
|
||||
expect(row.two).toEqual("newBar")
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
it("should be able to delete a row", async () => {
|
||||
const createdRow = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
||||
await assertRowUsage(rowUsage - 1)
|
||||
await config.api.row.get(table._id!, createdRow._id!, {
|
||||
status: 404,
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete multiple rows", async () => {
|
||||
const rows = await Promise.all([
|
||||
config.api.row.save(table._id!, {}),
|
||||
config.api.row.save(table._id!, {}),
|
||||
config.api.row.save(table._id!, {}),
|
||||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] })
|
||||
|
||||
await assertRowUsage(rowUsage - 2)
|
||||
|
||||
await config.api.row.get(table._id!, rows[0]._id!, {
|
||||
status: 404,
|
||||
})
|
||||
await config.api.row.get(table._id!, rows[2]._id!, {
|
||||
status: 404,
|
||||
})
|
||||
await config.api.row.get(table._id!, rows[1]._id!, { status: 200 })
|
||||
})
|
||||
})
|
||||
|
||||
describe("search", () => {
|
||||
it("returns empty rows from view when no schema is passed", async () => {
|
||||
const rows = await Promise.all(
|
||||
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
|
||||
)
|
||||
const response = await config.api.viewV2.search(view.id)
|
||||
expect(response.rows).toHaveLength(10)
|
||||
expect(response).toEqual({
|
||||
rows: expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
_viewId: view.id,
|
||||
tableId: table._id,
|
||||
_id: r._id,
|
||||
_rev: r._rev,
|
||||
...(isInternal
|
||||
? {
|
||||
type: "row",
|
||||
updatedAt: expect.any(String),
|
||||
createdAt: expect.any(String),
|
||||
}
|
||||
: {}),
|
||||
}))
|
||||
),
|
||||
...(isInternal
|
||||
? {}
|
||||
: {
|
||||
hasNextPage: false,
|
||||
bookmark: null,
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
it("searching respects the view filters", async () => {
|
||||
await config.api.row.save(table._id!, {
|
||||
one: "foo",
|
||||
two: "bar",
|
||||
})
|
||||
const two = await config.api.row.save(table._id!, {
|
||||
one: "foo2",
|
||||
two: "bar2",
|
||||
})
|
||||
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
query: [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
field: "two",
|
||||
value: "bar2",
|
||||
},
|
||||
],
|
||||
schema: {
|
||||
two: { visible: true },
|
||||
},
|
||||
})
|
||||
|
||||
const response = await config.api.viewV2.search(view.id)
|
||||
expect(response.rows).toHaveLength(1)
|
||||
expect(response).toEqual({
|
||||
rows: expect.arrayContaining([
|
||||
{
|
||||
_viewId: view.id,
|
||||
tableId: table._id,
|
||||
two: two.two,
|
||||
_id: two._id,
|
||||
_rev: two._rev,
|
||||
...(isInternal
|
||||
? {
|
||||
type: "row",
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
]),
|
||||
...(isInternal
|
||||
? {}
|
||||
: {
|
||||
hasNextPage: false,
|
||||
bookmark: null,
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
it("views without data can be returned", async () => {
|
||||
const response = await config.api.viewV2.search(view.id)
|
||||
expect(response.rows).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("respects the limit parameter", async () => {
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
|
||||
)
|
||||
const limit = generator.integer({ min: 1, max: 8 })
|
||||
const response = await config.api.viewV2.search(view.id, {
|
||||
limit,
|
||||
query: {},
|
||||
})
|
||||
expect(response.rows).toHaveLength(limit)
|
||||
})
|
||||
|
||||
it("can handle pagination", async () => {
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
|
||||
)
|
||||
const rows = (await config.api.viewV2.search(view.id)).rows
|
||||
|
||||
const page1 = await config.api.viewV2.search(view.id, {
|
||||
paginate: true,
|
||||
limit: 4,
|
||||
query: {},
|
||||
})
|
||||
expect(page1).toEqual({
|
||||
rows: expect.arrayContaining(rows.slice(0, 4)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: true,
|
||||
bookmark: expect.anything(),
|
||||
})
|
||||
|
||||
const page2 = await config.api.viewV2.search(view.id, {
|
||||
paginate: true,
|
||||
limit: 4,
|
||||
bookmark: page1.bookmark,
|
||||
query: {},
|
||||
})
|
||||
expect(page2).toEqual({
|
||||
rows: expect.arrayContaining(rows.slice(4, 8)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: true,
|
||||
bookmark: expect.anything(),
|
||||
})
|
||||
|
||||
const page3 = await config.api.viewV2.search(view.id, {
|
||||
paginate: true,
|
||||
limit: 4,
|
||||
bookmark: page2.bookmark,
|
||||
query: {},
|
||||
})
|
||||
expect(page3).toEqual({
|
||||
rows: expect.arrayContaining(rows.slice(8)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: false,
|
||||
bookmark: expect.anything(),
|
||||
})
|
||||
})
|
||||
|
||||
const sortTestOptions: [
|
||||
{
|
||||
field: string
|
||||
order?: SortOrder
|
||||
type?: SortType
|
||||
},
|
||||
string[]
|
||||
][] = [
|
||||
[
|
||||
{
|
||||
field: "name",
|
||||
order: SortOrder.ASCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
["Alice", "Bob", "Charly", "Danny"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "name",
|
||||
},
|
||||
["Alice", "Bob", "Charly", "Danny"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "name",
|
||||
order: SortOrder.DESCENDING,
|
||||
},
|
||||
["Danny", "Charly", "Bob", "Alice"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "name",
|
||||
order: SortOrder.DESCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
["Danny", "Charly", "Bob", "Alice"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.ASCENDING,
|
||||
type: SortType.number,
|
||||
},
|
||||
["Danny", "Alice", "Charly", "Bob"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.ASCENDING,
|
||||
},
|
||||
["Danny", "Alice", "Charly", "Bob"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.DESCENDING,
|
||||
},
|
||||
["Bob", "Charly", "Alice", "Danny"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.DESCENDING,
|
||||
type: SortType.number,
|
||||
},
|
||||
["Bob", "Charly", "Alice", "Danny"],
|
||||
],
|
||||
]
|
||||
|
||||
describe("sorting", () => {
|
||||
let table: Table
|
||||
const viewSchema = { age: { visible: true }, name: { visible: true } }
|
||||
|
||||
beforeAll(async () => {
|
||||
table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: `users_${uuid.v4()}`,
|
||||
type: "table",
|
||||
schema: {
|
||||
name: {
|
||||
type: FieldType.STRING,
|
||||
name: "name",
|
||||
},
|
||||
surname: {
|
||||
type: FieldType.STRING,
|
||||
name: "surname",
|
||||
},
|
||||
age: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "age",
|
||||
},
|
||||
address: {
|
||||
type: FieldType.STRING,
|
||||
name: "address",
|
||||
},
|
||||
jobTitle: {
|
||||
type: FieldType.STRING,
|
||||
name: "jobTitle",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const users = [
|
||||
{ name: "Alice", age: 25 },
|
||||
{ name: "Bob", age: 30 },
|
||||
{ name: "Charly", age: 27 },
|
||||
{ name: "Danny", age: 15 },
|
||||
]
|
||||
await Promise.all(
|
||||
users.map(u =>
|
||||
config.api.row.save(table._id!, {
|
||||
tableId: table._id,
|
||||
...u,
|
||||
})
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it.each(sortTestOptions)(
|
||||
"allow sorting (%s)",
|
||||
async (sortParams, expected) => {
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
sort: sortParams,
|
||||
schema: viewSchema,
|
||||
})
|
||||
|
||||
const response = await config.api.viewV2.search(view.id)
|
||||
|
||||
expect(response.rows).toHaveLength(4)
|
||||
expect(response.rows).toEqual(
|
||||
expected.map(name => expect.objectContaining({ name }))
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
it.each(sortTestOptions)(
|
||||
"allow override the default view sorting (%s)",
|
||||
async (sortParams, expected) => {
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
sort: {
|
||||
field: "name",
|
||||
order: SortOrder.ASCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
schema: viewSchema,
|
||||
})
|
||||
|
||||
const response = await config.api.viewV2.search(view.id, {
|
||||
sort: sortParams.field,
|
||||
sortOrder: sortParams.order,
|
||||
sortType: sortParams.type,
|
||||
query: {},
|
||||
})
|
||||
|
||||
expect(response.rows).toHaveLength(4)
|
||||
expect(response.rows).toEqual(
|
||||
expected.map(name => expect.objectContaining({ name }))
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("permissions", () => {
|
||||
beforeEach(async () => {
|
||||
mocks.licenses.useViewPermissions()
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
|
||||
)
|
||||
})
|
||||
|
||||
it("does not allow public users to fetch by default", async () => {
|
||||
await config.publish()
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, {
|
||||
status: 403,
|
||||
})
|
||||
})
|
||||
|
||||
it("allow public users to fetch when permissions are explicit", async () => {
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: view.id,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
const response = await config.api.viewV2.publicSearch(view.id)
|
||||
|
||||
expect(response.rows).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("allow public users to fetch when permissions are inherited", async () => {
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: table._id!,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
const response = await config.api.viewV2.publicSearch(view.id)
|
||||
|
||||
expect(response.rows).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("respects inherited permissions, not allowing not public views from public tables", async () => {
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: table._id!,
|
||||
})
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.POWER,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: view.id,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, {
|
||||
status: 403,
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -5,7 +5,7 @@ import {
|
|||
} from "@budibase/string-templates"
|
||||
import sdk from "../sdk"
|
||||
import { Row } from "@budibase/types"
|
||||
import { LoopInput, LoopStep, LoopStepType } from "../definitions/automations"
|
||||
import { LoopInput, LoopStepType } from "../definitions/automations"
|
||||
|
||||
/**
|
||||
* When values are input to the system generally they will be of type string as this is required for template strings.
|
||||
|
|
|
@ -4,7 +4,6 @@ import {
|
|||
AutomationStepInput,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationFeature,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
|
|
@ -10,8 +10,6 @@ import {
|
|||
AutomationStepSchema,
|
||||
AutomationStepType,
|
||||
} from "@budibase/types"
|
||||
import { utils } from "@budibase/backend-core"
|
||||
import env from "../../environment"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
name: "External Data Connector",
|
||||
|
|
|
@ -58,7 +58,7 @@ export const definition: AutomationStepSchema = {
|
|||
},
|
||||
}
|
||||
|
||||
export async function run({ inputs, context }: AutomationStepInput) {
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
if (!environment.OPENAI_API_KEY) {
|
||||
return {
|
||||
success: false,
|
||||
|
|
|
@ -62,6 +62,7 @@ export const definition: AutomationStepSchema = {
|
|||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { automationId, ...fieldParams } = inputs.automation
|
||||
|
||||
if (await features.isTriggerAutomationRunEnabled()) {
|
||||
|
|
|
@ -3,19 +3,18 @@ import * as triggers from "../triggers"
|
|||
import { loopAutomation } from "../../tests/utilities/structures"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import * as setup from "./utilities"
|
||||
import { Row, Table } from "@budibase/types"
|
||||
import { Table } from "@budibase/types"
|
||||
import { LoopInput, LoopStepType } from "../../definitions/automations"
|
||||
|
||||
describe("Attempt to run a basic loop automation", () => {
|
||||
let config = setup.getConfig(),
|
||||
table: Table,
|
||||
row: Row
|
||||
table: Table
|
||||
|
||||
beforeEach(async () => {
|
||||
await automation.init()
|
||||
await config.init()
|
||||
table = await config.createTable()
|
||||
row = await config.createRow()
|
||||
await config.createRow()
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { LoopStep, LoopStepType } from "../../definitions/automations"
|
||||
import { LoopStepType } from "../../definitions/automations"
|
||||
import {
|
||||
typecastForLooping,
|
||||
cleanInputValues,
|
||||
|
|
|
@ -6,6 +6,10 @@ import {
|
|||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
|
||||
import env from "../environment"
|
||||
|
||||
export const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
|
||||
|
||||
export enum FilterTypes {
|
||||
STRING = "string",
|
||||
FUZZY = "fuzzy",
|
||||
|
|
|
@ -1,147 +0,0 @@
|
|||
import merge from "lodash/merge"
|
||||
import env from "../environment"
|
||||
|
||||
export const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
|
||||
|
||||
const TableInfo = {
|
||||
API_KEYS: {
|
||||
name: "beta-api-key-table",
|
||||
primary: "pk",
|
||||
},
|
||||
USERS: {
|
||||
name: "prod-budi-table",
|
||||
primary: "pk",
|
||||
sort: "sk",
|
||||
},
|
||||
}
|
||||
|
||||
let docClient: any = null
|
||||
|
||||
type GetOpts = {
|
||||
primary: string
|
||||
sort?: string
|
||||
otherProps?: any
|
||||
}
|
||||
|
||||
type UpdateOpts = {
|
||||
primary: string
|
||||
sort?: string
|
||||
expression?: string
|
||||
condition?: string
|
||||
names?: string[]
|
||||
values?: any[]
|
||||
exists?: boolean
|
||||
otherProps?: any
|
||||
}
|
||||
|
||||
type PutOpts = {
|
||||
item: any
|
||||
otherProps?: any
|
||||
}
|
||||
|
||||
class Table {
|
||||
_name: string
|
||||
_primary: string
|
||||
_sort?: string
|
||||
|
||||
constructor(tableInfo: { name: string; primary: string; sort?: string }) {
|
||||
if (!tableInfo.name || !tableInfo.primary) {
|
||||
throw "Table info must specify a name and a primary key"
|
||||
}
|
||||
this._name = tableInfo.name
|
||||
this._primary = tableInfo.primary
|
||||
this._sort = tableInfo.sort
|
||||
}
|
||||
|
||||
async get({ primary, sort, otherProps }: GetOpts) {
|
||||
let params = {
|
||||
TableName: this._name,
|
||||
Key: {
|
||||
[this._primary]: primary,
|
||||
},
|
||||
}
|
||||
if (this._sort && sort) {
|
||||
params.Key[this._sort] = sort
|
||||
}
|
||||
if (otherProps) {
|
||||
params = merge(params, otherProps)
|
||||
}
|
||||
let response = await docClient.get(params).promise()
|
||||
return response.Item
|
||||
}
|
||||
|
||||
async update({
|
||||
primary,
|
||||
sort,
|
||||
expression,
|
||||
condition,
|
||||
names,
|
||||
values,
|
||||
exists,
|
||||
otherProps,
|
||||
}: UpdateOpts) {
|
||||
let params: any = {
|
||||
TableName: this._name,
|
||||
Key: {
|
||||
[this._primary]: primary,
|
||||
},
|
||||
ExpressionAttributeNames: names,
|
||||
ExpressionAttributeValues: values,
|
||||
UpdateExpression: expression,
|
||||
}
|
||||
if (condition) {
|
||||
params.ConditionExpression = condition
|
||||
}
|
||||
if (this._sort && sort) {
|
||||
params.Key[this._sort] = sort
|
||||
}
|
||||
if (exists) {
|
||||
params.ExpressionAttributeNames["#PRIMARY"] = this._primary
|
||||
if (params.ConditionExpression) {
|
||||
params.ConditionExpression += " AND "
|
||||
}
|
||||
params.ConditionExpression += "attribute_exists(#PRIMARY)"
|
||||
}
|
||||
if (otherProps) {
|
||||
params = merge(params, otherProps)
|
||||
}
|
||||
return docClient.update(params).promise()
|
||||
}
|
||||
|
||||
async put({ item, otherProps }: PutOpts) {
|
||||
if (
|
||||
item[this._primary] == null ||
|
||||
(this._sort && item[this._sort] == null)
|
||||
) {
|
||||
throw "Cannot put item without primary and sort key (if required)"
|
||||
}
|
||||
let params = {
|
||||
TableName: this._name,
|
||||
Item: item,
|
||||
}
|
||||
if (otherProps) {
|
||||
params = merge(params, otherProps)
|
||||
}
|
||||
return docClient.put(params).promise()
|
||||
}
|
||||
}
|
||||
|
||||
export function init(endpoint: string) {
|
||||
let AWS = require("aws-sdk")
|
||||
let docClientParams: any = {
|
||||
correctClockSkew: true,
|
||||
region: AWS_REGION,
|
||||
}
|
||||
if (endpoint) {
|
||||
docClientParams.endpoint = endpoint
|
||||
} else if (env.DYNAMO_ENDPOINT) {
|
||||
docClientParams.endpoint = env.DYNAMO_ENDPOINT
|
||||
}
|
||||
docClient = new AWS.DynamoDB.DocumentClient(docClientParams)
|
||||
}
|
||||
|
||||
if (!env.isProd() && !env.isJest()) {
|
||||
env._set("AWS_ACCESS_KEY_ID", "KEY_ID")
|
||||
env._set("AWS_SECRET_ACCESS_KEY", "SECRET_KEY")
|
||||
init("http://localhost:8333")
|
||||
}
|
|
@ -18,7 +18,6 @@ import {
|
|||
Row,
|
||||
LinkDocumentValue,
|
||||
FieldType,
|
||||
LinkDocument,
|
||||
ContextUser,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import { features } from "@budibase/backend-core"
|
||||
import env from "./environment"
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
enum AppFeature {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
API = "api",
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
AUTOMATIONS = "automations",
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,6 @@ import {
|
|||
TableRequest,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
import { databaseTestProviders } from "../integrations/tests/utils"
|
||||
import mysql from "mysql2/promise"
|
||||
import { builderSocket } from "../websockets"
|
||||
|
@ -21,7 +20,6 @@ fetch.mockSearch()
|
|||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.unmock("mysql2/promise")
|
||||
jest.mock("../websockets", () => ({
|
||||
clientAppSocket: jest.fn(),
|
||||
gridAppSocket: jest.fn(),
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
|
||||
import AWS from "aws-sdk"
|
||||
import { AWS_REGION } from "../db/dynamoClient"
|
||||
import { AWS_REGION } from "../constants"
|
||||
import { DocumentClient } from "aws-sdk/clients/dynamodb"
|
||||
|
||||
interface DynamoDBConfig {
|
||||
|
|
|
@ -168,6 +168,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
return ""
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
getStringConcat(parts: string[]) {
|
||||
return ""
|
||||
}
|
||||
|
|
|
@ -14,8 +14,6 @@ import {
|
|||
Schema,
|
||||
TableSourceType,
|
||||
DatasourcePlusQueryResponse,
|
||||
FieldType,
|
||||
FieldSubtype,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getSqlQuery,
|
||||
|
|
|
@ -13,8 +13,6 @@ import {
|
|||
Schema,
|
||||
TableSourceType,
|
||||
DatasourcePlusQueryResponse,
|
||||
FieldType,
|
||||
FieldSubtype,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getSqlQuery,
|
||||
|
|
|
@ -28,7 +28,7 @@ describe("Airtable Integration", () => {
|
|||
})
|
||||
|
||||
it("calls the create method with the correct params", async () => {
|
||||
const response = await config.integration.create({
|
||||
await config.integration.create({
|
||||
table: "test",
|
||||
json: {},
|
||||
})
|
||||
|
@ -40,7 +40,7 @@ describe("Airtable Integration", () => {
|
|||
})
|
||||
|
||||
it("calls the read method with the correct params", async () => {
|
||||
const response = await config.integration.read({
|
||||
await config.integration.read({
|
||||
table: "test",
|
||||
view: "Grid view",
|
||||
})
|
||||
|
@ -51,7 +51,7 @@ describe("Airtable Integration", () => {
|
|||
})
|
||||
|
||||
it("calls the update method with the correct params", async () => {
|
||||
const response = await config.integration.update({
|
||||
await config.integration.update({
|
||||
table: "table",
|
||||
id: "123",
|
||||
json: {
|
||||
|
@ -68,7 +68,7 @@ describe("Airtable Integration", () => {
|
|||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
const ids = [1, 2, 3, 4]
|
||||
const response = await config.integration.delete({
|
||||
await config.integration.delete({
|
||||
ids,
|
||||
})
|
||||
expect(config.client.destroy).toHaveBeenCalledWith(ids)
|
||||
|
|
|
@ -12,7 +12,6 @@ class TestConfiguration {
|
|||
|
||||
describe("ArangoDB Integration", () => {
|
||||
let config: any
|
||||
let indexName = "Users"
|
||||
|
||||
beforeEach(() => {
|
||||
config = new TestConfiguration()
|
||||
|
@ -23,7 +22,7 @@ describe("ArangoDB Integration", () => {
|
|||
json: "Hello",
|
||||
}
|
||||
|
||||
const response = await config.integration.create(body)
|
||||
await config.integration.create(body)
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(
|
||||
`INSERT Hello INTO collection RETURN NEW`
|
||||
)
|
||||
|
@ -33,7 +32,7 @@ describe("ArangoDB Integration", () => {
|
|||
const query = {
|
||||
sql: `test`,
|
||||
}
|
||||
const response = await config.integration.read(query)
|
||||
await config.integration.read(query)
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(query.sql)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -79,7 +79,7 @@ describe("CouchDB Integration", () => {
|
|||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
const id = "1234"
|
||||
const response = await config.integration.delete({ id })
|
||||
await config.integration.delete({ id })
|
||||
expect(config.integration.client.get).toHaveBeenCalledWith(id)
|
||||
expect(config.integration.client.remove).toHaveBeenCalled()
|
||||
})
|
||||
|
|
|
@ -19,7 +19,7 @@ describe("DynamoDB Integration", () => {
|
|||
})
|
||||
|
||||
it("calls the create method with the correct params", async () => {
|
||||
const response = await config.integration.create({
|
||||
await config.integration.create({
|
||||
table: tableName,
|
||||
json: {
|
||||
Name: "John",
|
||||
|
@ -66,7 +66,7 @@ describe("DynamoDB Integration", () => {
|
|||
})
|
||||
|
||||
it("calls the get method with the correct params", async () => {
|
||||
const response = await config.integration.get({
|
||||
await config.integration.get({
|
||||
table: tableName,
|
||||
json: {
|
||||
Id: 123,
|
||||
|
@ -80,7 +80,7 @@ describe("DynamoDB Integration", () => {
|
|||
})
|
||||
|
||||
it("calls the update method with the correct params", async () => {
|
||||
const response = await config.integration.update({
|
||||
await config.integration.update({
|
||||
table: tableName,
|
||||
json: {
|
||||
Name: "John",
|
||||
|
@ -93,7 +93,7 @@ describe("DynamoDB Integration", () => {
|
|||
})
|
||||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
const response = await config.integration.delete({
|
||||
await config.integration.delete({
|
||||
table: tableName,
|
||||
json: {
|
||||
Name: "John",
|
||||
|
|
|
@ -22,7 +22,7 @@ describe("Elasticsearch Integration", () => {
|
|||
const body = {
|
||||
name: "Hello",
|
||||
}
|
||||
const response = await config.integration.create({
|
||||
await config.integration.create({
|
||||
index: indexName,
|
||||
json: body,
|
||||
})
|
||||
|
|
|
@ -81,7 +81,7 @@ describe("Firebase Integration", () => {
|
|||
})
|
||||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
const response = await config.integration.delete({
|
||||
await config.integration.delete({
|
||||
table: tableName,
|
||||
json: {
|
||||
id: "test",
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
import { default as MSSQLIntegration } from "../microsoftSqlServer"
|
||||
|
||||
jest.mock("mssql")
|
||||
|
||||
class TestConfiguration {
|
||||
integration: any
|
||||
|
||||
constructor(config: any = {}) {
|
||||
this.integration = new MSSQLIntegration.integration(config)
|
||||
}
|
||||
}
|
||||
|
||||
describe("MS SQL Server Integration", () => {
|
||||
let config: any
|
||||
|
||||
beforeEach(async () => {
|
||||
config = new TestConfiguration()
|
||||
})
|
||||
|
||||
describe("check sql used", () => {
|
||||
beforeEach(async () => {
|
||||
await config.integration.connect()
|
||||
})
|
||||
|
||||
it("calls the create method with the correct params", async () => {
|
||||
const sql = "insert into users (name, age) values ('Joe', 123);"
|
||||
const response = await config.integration.create({
|
||||
sql,
|
||||
})
|
||||
expect(config.integration.client.request).toHaveBeenCalledWith()
|
||||
expect(response[0]).toEqual(sql)
|
||||
})
|
||||
|
||||
it("calls the read method with the correct params", async () => {
|
||||
const sql = "select * from users;"
|
||||
const response = await config.integration.read({
|
||||
sql,
|
||||
})
|
||||
expect(config.integration.client.request).toHaveBeenCalledWith()
|
||||
expect(response[0]).toEqual(sql)
|
||||
})
|
||||
})
|
||||
|
||||
describe("no rows returned", () => {
|
||||
beforeEach(async () => {
|
||||
await config.integration.connect()
|
||||
})
|
||||
|
||||
it("returns the correct response when the create response has no rows", async () => {
|
||||
const sql = "insert into users (name, age) values ('Joe', 123);"
|
||||
const response = await config.integration.create({
|
||||
sql,
|
||||
})
|
||||
expect(response[0]).toEqual(sql)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,152 +0,0 @@
|
|||
import { default as MySQLIntegration, bindingTypeCoerce } from "../mysql"
|
||||
|
||||
jest.mock("mysql2")
|
||||
|
||||
class TestConfiguration {
|
||||
integration: any
|
||||
|
||||
constructor(config: any = { ssl: {} }) {
|
||||
this.integration = new MySQLIntegration.integration(config)
|
||||
}
|
||||
}
|
||||
|
||||
describe("MySQL Integration", () => {
|
||||
let config: any
|
||||
|
||||
beforeEach(() => {
|
||||
config = new TestConfiguration()
|
||||
})
|
||||
|
||||
it("calls the create method with the correct params", async () => {
|
||||
const sql = "insert into users (name, age) values ('Joe', 123);"
|
||||
await config.integration.create({
|
||||
sql,
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
it("calls the read method with the correct params", async () => {
|
||||
const sql = "select * from users;"
|
||||
await config.integration.read({
|
||||
sql,
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
it("calls the update method with the correct params", async () => {
|
||||
const sql = "update table users set name = 'test';"
|
||||
await config.integration.update({
|
||||
sql,
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
const sql = "delete from users where name = 'todelete';"
|
||||
await config.integration.delete({
|
||||
sql,
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
describe("no rows returned", () => {
|
||||
it("returns the correct response when the create response has no rows", async () => {
|
||||
const sql = "insert into users (name, age) values ('Joe', 123);"
|
||||
const response = await config.integration.create({
|
||||
sql,
|
||||
})
|
||||
expect(response).toEqual([{ created: true }])
|
||||
})
|
||||
|
||||
it("returns the correct response when the update response has no rows", async () => {
|
||||
const sql = "update table users set name = 'test';"
|
||||
const response = await config.integration.update({
|
||||
sql,
|
||||
})
|
||||
expect(response).toEqual([{ updated: true }])
|
||||
})
|
||||
|
||||
it("returns the correct response when the delete response has no rows", async () => {
|
||||
const sql = "delete from users where name = 'todelete';"
|
||||
const response = await config.integration.delete({
|
||||
sql,
|
||||
})
|
||||
expect(response).toEqual([{ deleted: true }])
|
||||
})
|
||||
})
|
||||
|
||||
describe("binding type coerce", () => {
|
||||
it("ignores non-string types", async () => {
|
||||
const sql = "select * from users;"
|
||||
const date = new Date()
|
||||
await config.integration.read({
|
||||
sql,
|
||||
bindings: [11, date, ["a", "b", "c"], { id: 1 }],
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [
|
||||
11,
|
||||
date,
|
||||
["a", "b", "c"],
|
||||
{ id: 1 },
|
||||
])
|
||||
})
|
||||
|
||||
it("parses strings matching a number regex", async () => {
|
||||
const sql = "select * from users;"
|
||||
await config.integration.read({
|
||||
sql,
|
||||
bindings: ["101", "3.14"],
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(
|
||||
sql,
|
||||
[101, 3.14]
|
||||
)
|
||||
})
|
||||
|
||||
it("parses strings matching a valid date format", async () => {
|
||||
const sql = "select * from users;"
|
||||
await config.integration.read({
|
||||
sql,
|
||||
bindings: [
|
||||
"2001-10-30",
|
||||
"2010-09-01T13:30:59.123Z",
|
||||
"2021-02-05 12:01 PM",
|
||||
],
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [
|
||||
new Date("2001-10-30T00:00:00.000Z"),
|
||||
new Date("2010-09-01T13:30:59.123Z"),
|
||||
new Date("2021-02-05T12:01:00.000Z"),
|
||||
])
|
||||
})
|
||||
|
||||
it("does not parse string matching a valid array of numbers as date", async () => {
|
||||
const sql = "select * from users;"
|
||||
await config.integration.read({
|
||||
sql,
|
||||
bindings: ["1,2,2017"],
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [
|
||||
"1,2,2017",
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("bindingTypeCoercion", () => {
|
||||
it("shouldn't coerce something that looks like a date", () => {
|
||||
const response = bindingTypeCoerce(["202205-1500"])
|
||||
expect(response[0]).toBe("202205-1500")
|
||||
})
|
||||
|
||||
it("should coerce an actual date", () => {
|
||||
const date = new Date("2023-06-13T14:24:22.620Z")
|
||||
const response = bindingTypeCoerce(["2023-06-13T14:24:22.620Z"])
|
||||
expect(response[0]).toEqual(date)
|
||||
})
|
||||
|
||||
it("should coerce numbers", () => {
|
||||
const response = bindingTypeCoerce(["0"])
|
||||
expect(response[0]).toEqual(0)
|
||||
})
|
||||
})
|
|
@ -44,7 +44,7 @@ describe("Oracle Integration", () => {
|
|||
|
||||
it("calls the update method with the correct params", async () => {
|
||||
const sql = "update table users set name = 'test';"
|
||||
const response = await config.integration.update({
|
||||
await config.integration.update({
|
||||
sql,
|
||||
})
|
||||
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
|
||||
|
|
|
@ -1,83 +0,0 @@
|
|||
const pg = require("pg")
|
||||
|
||||
import { default as PostgresIntegration } from "../postgres"
|
||||
|
||||
jest.mock("pg")
|
||||
|
||||
class TestConfiguration {
|
||||
integration: any
|
||||
|
||||
constructor(config: any = {}) {
|
||||
this.integration = new PostgresIntegration.integration(config)
|
||||
}
|
||||
}
|
||||
|
||||
describe("Postgres Integration", () => {
|
||||
let config: any
|
||||
|
||||
beforeEach(() => {
|
||||
config = new TestConfiguration()
|
||||
})
|
||||
|
||||
it("calls the create method with the correct params", async () => {
|
||||
const sql = "insert into users (name, age) values ('Joe', 123);"
|
||||
await config.integration.create({
|
||||
sql,
|
||||
})
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
it("calls the read method with the correct params", async () => {
|
||||
const sql = "select * from users;"
|
||||
await config.integration.read({
|
||||
sql,
|
||||
})
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
it("calls the update method with the correct params", async () => {
|
||||
const sql = "update table users set name = 'test';"
|
||||
const response = await config.integration.update({
|
||||
sql,
|
||||
})
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
const sql = "delete from users where name = 'todelete';"
|
||||
await config.integration.delete({
|
||||
sql,
|
||||
})
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
describe("no rows returned", () => {
|
||||
beforeEach(() => {
|
||||
pg.queryMock.mockImplementation(() => ({ rows: [] }))
|
||||
})
|
||||
|
||||
it("returns the correct response when the create response has no rows", async () => {
|
||||
const sql = "insert into users (name, age) values ('Joe', 123);"
|
||||
const response = await config.integration.create({
|
||||
sql,
|
||||
})
|
||||
expect(response).toEqual([{ created: true }])
|
||||
})
|
||||
|
||||
it("returns the correct response when the update response has no rows", async () => {
|
||||
const sql = "update table users set name = 'test';"
|
||||
const response = await config.integration.update({
|
||||
sql,
|
||||
})
|
||||
expect(response).toEqual([{ updated: true }])
|
||||
})
|
||||
|
||||
it("returns the correct response when the delete response has no rows", async () => {
|
||||
const sql = "delete from users where name = 'todelete';"
|
||||
const response = await config.integration.delete({
|
||||
sql,
|
||||
})
|
||||
expect(response).toEqual([{ deleted: true }])
|
||||
})
|
||||
})
|
||||
})
|
|
@ -70,7 +70,7 @@ describe("REST Integration", () => {
|
|||
Accept: "text/html",
|
||||
},
|
||||
}
|
||||
const response = await config.integration.read(query)
|
||||
await config.integration.read(query)
|
||||
expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1`, {
|
||||
headers: {
|
||||
Accept: "text/html",
|
||||
|
@ -91,7 +91,7 @@ describe("REST Integration", () => {
|
|||
name: "test",
|
||||
}),
|
||||
}
|
||||
const response = await config.integration.update(query)
|
||||
await config.integration.update(query)
|
||||
expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1`, {
|
||||
method: "PUT",
|
||||
body: '{"name":"test"}',
|
||||
|
@ -111,7 +111,7 @@ describe("REST Integration", () => {
|
|||
name: "test",
|
||||
}),
|
||||
}
|
||||
const response = await config.integration.delete(query)
|
||||
await config.integration.delete(query)
|
||||
expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1`, {
|
||||
method: "DELETE",
|
||||
headers: HEADERS,
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
const AWS = require("aws-sdk")
|
||||
|
||||
import { default as S3Integration } from "../s3"
|
||||
|
||||
jest.mock("aws-sdk")
|
||||
|
|
|
@ -41,6 +41,9 @@ export async function datasource(): Promise<Datasource> {
|
|||
port,
|
||||
user: "sa",
|
||||
password: "Password_123",
|
||||
options: {
|
||||
encrypt: false,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
import { utils } from "@budibase/shared-core"
|
||||
import environment from "../../environment"
|
||||
import fs from "fs"
|
||||
|
||||
export const enum BundleType {
|
||||
|
|
|
@ -8,11 +8,10 @@ import {
|
|||
import { context, logging } from "@budibase/backend-core"
|
||||
import tracer from "dd-trace"
|
||||
import { IsolatedVM } from "./vm"
|
||||
import type { VM } from "@budibase/types"
|
||||
|
||||
export function init() {
|
||||
setJSRunner((js: string, ctx: Record<string, any>) => {
|
||||
return tracer.trace("runJS", {}, span => {
|
||||
return tracer.trace("runJS", {}, () => {
|
||||
try {
|
||||
// Reuse an existing isolate from context, or make a new one
|
||||
const bbCtx = context.getCurrentContext()
|
||||
|
@ -36,6 +35,7 @@ export function init() {
|
|||
// Because we can't pass functions into an Isolate, we remove them from
|
||||
// the passed context and rely on the withHelpers() method to add them
|
||||
// back in.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { helpers, snippets, ...rest } = ctx
|
||||
return vm.withContext(rest, () => vm.execute(js))
|
||||
} catch (error: any) {
|
||||
|
|
|
@ -13,7 +13,7 @@ export default async (ctx: Ctx, next: any) => {
|
|||
let errors = []
|
||||
for (let fn of current.cleanup) {
|
||||
try {
|
||||
await tracer.trace("cleanup", async span => {
|
||||
await tracer.trace("cleanup", async () => {
|
||||
await fn()
|
||||
})
|
||||
} catch (e) {
|
||||
|
|
|
@ -11,7 +11,6 @@ import {
|
|||
|
||||
import authorizedMiddleware from "../authorized"
|
||||
import env from "../../environment"
|
||||
import { generateTableID, generateViewID } from "../../db/utils"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import { initProMocks } from "../../tests/utilities/mocks/pro"
|
||||
import { getResourcePerms } from "../../sdk/app/permissions"
|
||||
|
|
|
@ -32,10 +32,7 @@ export default async (ctx: Ctx<Row>, next: Next) => {
|
|||
}
|
||||
|
||||
// have to mutate the koa context, can't return
|
||||
export async function trimViewFields<T extends Row>(
|
||||
body: Row,
|
||||
viewId: string
|
||||
): Promise<void> {
|
||||
export async function trimViewFields(body: Row, viewId: string): Promise<void> {
|
||||
const view = await sdk.views.get(viewId)
|
||||
const allowedKeys = sdk.views.allowedFields(view)
|
||||
// have to mutate the context, can't update reference
|
||||
|
|
|
@ -43,7 +43,7 @@ export const backfill = async (
|
|||
}
|
||||
|
||||
if (user.roles) {
|
||||
for (const [appId, role] of Object.entries(user.roles)) {
|
||||
for (const [, role] of Object.entries(user.roles)) {
|
||||
await events.role.assigned(user, role, timestamp)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,6 @@ import env from "../environment"
|
|||
// migration functions
|
||||
import * as userEmailViewCasing from "./functions/userEmailViewCasing"
|
||||
import * as syncQuotas from "./functions/syncQuotas"
|
||||
import * as syncUsers from "./functions/usageQuotas/syncUsers"
|
||||
import * as appUrls from "./functions/appUrls"
|
||||
import * as tableSettings from "./functions/tableSettings"
|
||||
import * as backfill from "./functions/backfill"
|
||||
|
|
|
@ -3,11 +3,7 @@ import { db as dbCore, context, logging, roles } from "@budibase/backend-core"
|
|||
import { User, ContextUser, UserGroup } from "@budibase/types"
|
||||
import { sdk as proSdk } from "@budibase/pro"
|
||||
import sdk from "../../"
|
||||
import {
|
||||
getGlobalUsers,
|
||||
getRawGlobalUsers,
|
||||
processUser,
|
||||
} from "../../../utilities/global"
|
||||
import { getRawGlobalUsers, processUser } from "../../../utilities/global"
|
||||
import { generateUserMetadataID, InternalTables } from "../../../db/utils"
|
||||
|
||||
type DeletedUser = { _id: string; deleted: boolean }
|
||||
|
|
|
@ -6,7 +6,7 @@ import EventEmitter from "events"
|
|||
import { UserGroup, UserMetadata, UserRoles, User } from "@budibase/types"
|
||||
|
||||
const config = new TestConfiguration()
|
||||
let app, group: UserGroup, groupUser: User
|
||||
let group: UserGroup, groupUser: User
|
||||
const ROLE_ID = roles.BUILTIN_ROLE_IDS.BASIC
|
||||
|
||||
const emitter = new EventEmitter()
|
||||
|
@ -36,7 +36,7 @@ function waitForUpdate(opts: { group?: boolean }) {
|
|||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
app = await config.init("syncApp")
|
||||
await config.init("syncApp")
|
||||
})
|
||||
|
||||
async function createUser(email: string, roles: UserRoles, builder?: boolean) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { db, env, roles } from "@budibase/backend-core"
|
||||
import { db, roles } from "@budibase/backend-core"
|
||||
import { features } from "@budibase/pro"
|
||||
import {
|
||||
DocumentType,
|
||||
|
@ -133,7 +133,7 @@ export async function getDependantResources(
|
|||
}
|
||||
|
||||
const permissions = await getResourcePerms(view.id)
|
||||
for (const [level, roleInfo] of Object.entries(permissions)) {
|
||||
for (const [, roleInfo] of Object.entries(permissions)) {
|
||||
if (roleInfo.type === PermissionSource.INHERITED) {
|
||||
dependants[VirtualDocumentType.VIEW] ??= new Set()
|
||||
dependants[VirtualDocumentType.VIEW].add(view.id)
|
||||
|
|
|
@ -17,8 +17,6 @@ import {
|
|||
generator,
|
||||
} from "@budibase/backend-core/tests"
|
||||
|
||||
jest.unmock("mysql2/promise")
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("external search", () => {
|
||||
|
|
|
@ -351,6 +351,7 @@ describe("table sdk", () => {
|
|||
const view: ViewV2 = {
|
||||
...basicView,
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { name, description, ...newTableSchema } = basicTable.schema
|
||||
|
||||
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
|
||||
|
@ -364,6 +365,7 @@ describe("table sdk", () => {
|
|||
const view: ViewV2 = {
|
||||
...basicView,
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { description, ...newTableSchema } = {
|
||||
...basicTable.schema,
|
||||
updatedDescription: {
|
||||
|
@ -448,6 +450,7 @@ describe("table sdk", () => {
|
|||
hiddenField: { visible: false },
|
||||
},
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { name, description, ...newTableSchema } = basicTable.schema
|
||||
|
||||
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
|
||||
|
@ -471,6 +474,7 @@ describe("table sdk", () => {
|
|||
hiddenField: { visible: false },
|
||||
},
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { name, description, ...newTableSchema } = {
|
||||
...basicTable.schema,
|
||||
newField1: {
|
||||
|
@ -502,6 +506,7 @@ describe("table sdk", () => {
|
|||
hiddenField: { visible: false },
|
||||
},
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { description, ...newTableSchema } = {
|
||||
...basicTable.schema,
|
||||
updatedDescription: {
|
||||
|
|
|
@ -49,7 +49,6 @@ import {
|
|||
AuthToken,
|
||||
Automation,
|
||||
CreateViewRequest,
|
||||
Ctx,
|
||||
Datasource,
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
|
|
|
@ -6,7 +6,6 @@ import {
|
|||
PaginatedSearchRowResponse,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
export class ViewV2API extends TestAPI {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue