Merge remote-tracking branch 'origin/master' into feat/pc-ts-conversions-2
This commit is contained in:
commit
86249e0e1d
|
@ -165,6 +165,7 @@ jobs:
|
|||
oracle,
|
||||
sqs,
|
||||
elasticsearch,
|
||||
dynamodb,
|
||||
none,
|
||||
]
|
||||
steps:
|
||||
|
@ -205,6 +206,8 @@ jobs:
|
|||
docker pull postgres:9.5.25
|
||||
elif [ "${{ matrix.datasource }}" == "elasticsearch" ]; then
|
||||
docker pull elasticsearch@${{ steps.dotenv.outputs.ELASTICSEARCH_SHA }}
|
||||
elif [ "${{ matrix.datasource }}" == "dynamodb" ]; then
|
||||
docker pull amazon/dynamodb-local@${{ steps.dotenv.outputs.DYNAMODB_SHA }}
|
||||
fi
|
||||
docker pull minio/minio &
|
||||
docker pull redis &
|
||||
|
|
|
@ -88,6 +88,16 @@ export default async function setup() {
|
|||
content: `
|
||||
[log]
|
||||
level = warn
|
||||
|
||||
[httpd]
|
||||
socket_options = [{nodelay, true}]
|
||||
|
||||
[couchdb]
|
||||
single_node = true
|
||||
|
||||
[cluster]
|
||||
n = 1
|
||||
q = 1
|
||||
`,
|
||||
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
||||
},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.4.22",
|
||||
"version": "3.5.0",
|
||||
"npmClient": "yarn",
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
"eslint-plugin-jest": "28.9.0",
|
||||
"eslint-plugin-local-rules": "3.0.2",
|
||||
"eslint-plugin-svelte": "2.46.1",
|
||||
"svelte-preprocess": "^6.0.3",
|
||||
"husky": "^8.0.3",
|
||||
"kill-port": "^1.6.1",
|
||||
"lerna": "7.4.2",
|
||||
|
@ -29,7 +28,9 @@
|
|||
"prettier-plugin-svelte": "^2.3.0",
|
||||
"proper-lockfile": "^4.1.2",
|
||||
"svelte": "4.2.19",
|
||||
"svelte-check": "^4.1.5",
|
||||
"svelte-eslint-parser": "0.43.0",
|
||||
"svelte-preprocess": "^6.0.3",
|
||||
"typescript": "5.7.2",
|
||||
"typescript-eslint": "8.17.0",
|
||||
"yargs": "^17.7.2"
|
||||
|
|
|
@ -222,9 +222,12 @@ export class DatabaseImpl implements Database {
|
|||
}
|
||||
|
||||
async getMultiple<T extends Document>(
|
||||
ids: string[],
|
||||
ids?: string[],
|
||||
opts?: { allowMissing?: boolean; excludeDocs?: boolean }
|
||||
): Promise<T[]> {
|
||||
if (!ids || ids.length === 0) {
|
||||
return []
|
||||
}
|
||||
// get unique
|
||||
ids = [...new Set(ids)]
|
||||
const includeDocs = !opts?.excludeDocs
|
||||
|
@ -249,7 +252,7 @@ export class DatabaseImpl implements Database {
|
|||
if (!opts?.allowMissing && someMissing) {
|
||||
const missing = response.rows.filter(row => rowUnavailable(row))
|
||||
const missingIds = missing.map(row => row.key).join(", ")
|
||||
throw new Error(`Unable to get documents: ${missingIds}`)
|
||||
throw new Error(`Unable to get bulk documents: ${missingIds}`)
|
||||
}
|
||||
return rows.map(row => (includeDocs ? row.doc! : row.value))
|
||||
}
|
||||
|
|
|
@ -52,13 +52,13 @@ export class DDInstrumentedDatabase implements Database {
|
|||
}
|
||||
|
||||
getMultiple<T extends Document>(
|
||||
ids: string[],
|
||||
ids?: string[],
|
||||
opts?: { allowMissing?: boolean | undefined } | undefined
|
||||
): Promise<T[]> {
|
||||
return tracer.trace("db.getMultiple", async span => {
|
||||
span.addTags({
|
||||
db_name: this.name,
|
||||
num_docs: ids.length,
|
||||
num_docs: ids?.length || 0,
|
||||
allow_missing: opts?.allowMissing,
|
||||
})
|
||||
const docs = await this.db.getMultiple<T>(ids, opts)
|
||||
|
|
|
@ -3,7 +3,6 @@ import { newid } from "../utils"
|
|||
import { Queue, QueueOptions, JobOptions } from "./queue"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
import { Job, JobId, JobInformation } from "bull"
|
||||
import { cloneDeep } from "lodash"
|
||||
|
||||
function jobToJobInformation(job: Job): JobInformation {
|
||||
let cron = ""
|
||||
|
@ -88,9 +87,7 @@ export class InMemoryQueue<T = any> implements Partial<Queue<T>> {
|
|||
*/
|
||||
async process(concurrencyOrFunc: number | any, func?: any) {
|
||||
func = typeof concurrencyOrFunc === "number" ? func : concurrencyOrFunc
|
||||
this._emitter.on("message", async msg => {
|
||||
const message = cloneDeep(msg)
|
||||
|
||||
this._emitter.on("message", async message => {
|
||||
// For the purpose of testing, don't trigger cron jobs immediately.
|
||||
// Require the test to trigger them manually with timestamps.
|
||||
if (!message.manualTrigger && message.opts?.repeat != null) {
|
||||
|
@ -165,6 +162,9 @@ export class InMemoryQueue<T = any> implements Partial<Queue<T>> {
|
|||
opts,
|
||||
}
|
||||
this._messages.push(message)
|
||||
if (this._messages.length > 1000) {
|
||||
this._messages.shift()
|
||||
}
|
||||
this._addCount++
|
||||
this._emitter.emit("message", message)
|
||||
}
|
||||
|
|
|
@ -26,8 +26,9 @@ import {
|
|||
import {
|
||||
getAccountHolderFromUsers,
|
||||
isAdmin,
|
||||
isCreator,
|
||||
creatorsInList,
|
||||
validateUniqueUser,
|
||||
isCreatorAsync,
|
||||
} from "./utils"
|
||||
import {
|
||||
getFirstPlatformUser,
|
||||
|
@ -261,8 +262,16 @@ export class UserDB {
|
|||
}
|
||||
|
||||
const change = dbUser ? 0 : 1 // no change if there is existing user
|
||||
const creatorsChange =
|
||||
(await isCreator(dbUser)) !== (await isCreator(user)) ? 1 : 0
|
||||
|
||||
let creatorsChange = 0
|
||||
if (dbUser) {
|
||||
const [isDbUserCreator, isUserCreator] = await creatorsInList([
|
||||
dbUser,
|
||||
user,
|
||||
])
|
||||
creatorsChange = isDbUserCreator !== isUserCreator ? 1 : 0
|
||||
}
|
||||
|
||||
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
|
||||
if (!opts.isAccountHolder) {
|
||||
await validateUniqueUser(email, tenantId)
|
||||
|
@ -353,7 +362,7 @@ export class UserDB {
|
|||
}
|
||||
newUser.userGroups = groups || []
|
||||
newUsers.push(newUser)
|
||||
if (await isCreator(newUser)) {
|
||||
if (await isCreatorAsync(newUser)) {
|
||||
newCreators.push(newUser)
|
||||
}
|
||||
}
|
||||
|
@ -453,10 +462,8 @@ export class UserDB {
|
|||
}))
|
||||
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
|
||||
|
||||
const creatorsEval = await Promise.all(usersToDelete.map(isCreator))
|
||||
const creatorsToDeleteCount = creatorsEval.filter(
|
||||
creator => !!creator
|
||||
).length
|
||||
const creatorsEval = await creatorsInList(usersToDelete)
|
||||
const creatorsToDeleteCount = creatorsEval.filter(creator => creator).length
|
||||
|
||||
const ssoUsersToDelete: AnyDocument[] = []
|
||||
for (let user of usersToDelete) {
|
||||
|
@ -533,7 +540,7 @@ export class UserDB {
|
|||
|
||||
await db.remove(userId, dbUser._rev!)
|
||||
|
||||
const creatorsToDelete = (await isCreator(dbUser)) ? 1 : 0
|
||||
const creatorsToDelete = (await isCreatorAsync(dbUser)) ? 1 : 0
|
||||
await UserDB.quotas.removeUsers(1, creatorsToDelete)
|
||||
await eventHelpers.handleDeleteEvents(dbUser)
|
||||
await cache.user.invalidateUser(userId)
|
||||
|
|
|
@ -2,39 +2,39 @@ import { User, UserGroup } from "@budibase/types"
|
|||
import { generator, structures } from "../../../tests"
|
||||
import { DBTestConfiguration } from "../../../tests/extra"
|
||||
import { getGlobalDB } from "../../context"
|
||||
import { isCreator } from "../utils"
|
||||
import { isCreatorSync, creatorsInList } from "../utils"
|
||||
|
||||
const config = new DBTestConfiguration()
|
||||
|
||||
describe("Users", () => {
|
||||
it("User is a creator if it is configured as a global builder", async () => {
|
||||
it("User is a creator if it is configured as a global builder", () => {
|
||||
const user: User = structures.users.user({ builder: { global: true } })
|
||||
expect(await isCreator(user)).toBe(true)
|
||||
expect(isCreatorSync(user, [])).toBe(true)
|
||||
})
|
||||
|
||||
it("User is a creator if it is configured as a global admin", async () => {
|
||||
it("User is a creator if it is configured as a global admin", () => {
|
||||
const user: User = structures.users.user({ admin: { global: true } })
|
||||
expect(await isCreator(user)).toBe(true)
|
||||
expect(isCreatorSync(user, [])).toBe(true)
|
||||
})
|
||||
|
||||
it("User is a creator if it is configured with creator permission", async () => {
|
||||
it("User is a creator if it is configured with creator permission", () => {
|
||||
const user: User = structures.users.user({ builder: { creator: true } })
|
||||
expect(await isCreator(user)).toBe(true)
|
||||
expect(isCreatorSync(user, [])).toBe(true)
|
||||
})
|
||||
|
||||
it("User is a creator if it is a builder in some application", async () => {
|
||||
it("User is a creator if it is a builder in some application", () => {
|
||||
const user: User = structures.users.user({ builder: { apps: ["app1"] } })
|
||||
expect(await isCreator(user)).toBe(true)
|
||||
expect(isCreatorSync(user, [])).toBe(true)
|
||||
})
|
||||
|
||||
it("User is a creator if it has CREATOR permission in some application", async () => {
|
||||
it("User is a creator if it has CREATOR permission in some application", () => {
|
||||
const user: User = structures.users.user({ roles: { app1: "CREATOR" } })
|
||||
expect(await isCreator(user)).toBe(true)
|
||||
expect(isCreatorSync(user, [])).toBe(true)
|
||||
})
|
||||
|
||||
it("User is a creator if it has ADMIN permission in some application", async () => {
|
||||
it("User is a creator if it has ADMIN permission in some application", () => {
|
||||
const user: User = structures.users.user({ roles: { app1: "ADMIN" } })
|
||||
expect(await isCreator(user)).toBe(true)
|
||||
expect(isCreatorSync(user, [])).toBe(true)
|
||||
})
|
||||
|
||||
it("User is a creator if it remains to a group with ADMIN permissions", async () => {
|
||||
|
@ -59,7 +59,7 @@ describe("Users", () => {
|
|||
await db.put(group)
|
||||
for (let user of users) {
|
||||
await db.put(user)
|
||||
const creator = await isCreator(user)
|
||||
const creator = (await creatorsInList([user]))[0]
|
||||
expect(creator).toBe(true)
|
||||
}
|
||||
})
|
||||
|
|
|
@ -22,7 +22,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import * as context from "../context"
|
||||
import { getGlobalDB } from "../context"
|
||||
import { isCreator } from "./utils"
|
||||
import { creatorsInList } from "./utils"
|
||||
import { UserDB } from "./db"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
|
||||
|
@ -305,8 +305,8 @@ export async function getCreatorCount() {
|
|||
let creators = 0
|
||||
async function iterate(startPage?: string) {
|
||||
const page = await paginatedUsers({ bookmark: startPage })
|
||||
const creatorsEval = await Promise.all(page.data.map(isCreator))
|
||||
creators += creatorsEval.filter(creator => !!creator).length
|
||||
const creatorsEval = await creatorsInList(page.data)
|
||||
creators += creatorsEval.filter(creator => creator).length
|
||||
if (page.hasNextPage) {
|
||||
await iterate(page.nextPage)
|
||||
}
|
||||
|
|
|
@ -16,30 +16,47 @@ export const hasAdminPermissions = sdk.users.hasAdminPermissions
|
|||
export const hasBuilderPermissions = sdk.users.hasBuilderPermissions
|
||||
export const hasAppBuilderPermissions = sdk.users.hasAppBuilderPermissions
|
||||
|
||||
export async function isCreator(user?: User | ContextUser) {
|
||||
export async function creatorsInList(
|
||||
users: (User | ContextUser)[],
|
||||
groups?: UserGroup[]
|
||||
) {
|
||||
const groupIds = [
|
||||
...new Set(
|
||||
users.filter(user => user.userGroups).flatMap(user => user.userGroups!)
|
||||
),
|
||||
]
|
||||
const db = context.getGlobalDB()
|
||||
groups = await db.getMultiple<UserGroup>(groupIds, { allowMissing: true })
|
||||
return users.map(user => isCreatorSync(user, groups))
|
||||
}
|
||||
|
||||
// fetches groups if no provided, but is async and shouldn't be looped with
|
||||
export async function isCreatorAsync(user: User | ContextUser) {
|
||||
let groups: UserGroup[] = []
|
||||
if (user.userGroups) {
|
||||
const db = context.getGlobalDB()
|
||||
groups = await db.getMultiple<UserGroup>(user.userGroups)
|
||||
}
|
||||
return isCreatorSync(user, groups)
|
||||
}
|
||||
|
||||
export function isCreatorSync(user: User | ContextUser, groups?: UserGroup[]) {
|
||||
const isCreatorByUserDefinition = sdk.users.isCreator(user)
|
||||
if (!isCreatorByUserDefinition && user) {
|
||||
return await isCreatorByGroupMembership(user)
|
||||
return isCreatorByGroupMembership(user, groups)
|
||||
}
|
||||
return isCreatorByUserDefinition
|
||||
}
|
||||
|
||||
async function isCreatorByGroupMembership(user?: User | ContextUser) {
|
||||
const userGroups = user?.userGroups || []
|
||||
if (userGroups.length > 0) {
|
||||
const db = context.getGlobalDB()
|
||||
const groups: UserGroup[] = []
|
||||
for (let groupId of userGroups) {
|
||||
try {
|
||||
const group = await db.get<UserGroup>(groupId)
|
||||
groups.push(group)
|
||||
} catch (e: any) {
|
||||
if (e.error !== "not_found") {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
}
|
||||
return groups.some(group =>
|
||||
function isCreatorByGroupMembership(
|
||||
user: User | ContextUser,
|
||||
groups?: UserGroup[]
|
||||
) {
|
||||
const userGroups = groups?.filter(
|
||||
group => user.userGroups?.indexOf(group._id!) !== -1
|
||||
)
|
||||
if (userGroups && userGroups.length > 0) {
|
||||
return userGroups.some(group =>
|
||||
Object.values(group.roles || {}).includes(BUILTIN_ROLE_IDS.ADMIN)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,52 +0,0 @@
|
|||
import { range } from "lodash/fp"
|
||||
import { structures } from "../.."
|
||||
|
||||
jest.mock("../../../src/context")
|
||||
jest.mock("../../../src/db")
|
||||
|
||||
import * as context from "../../../src/context"
|
||||
import * as db from "../../../src/db"
|
||||
|
||||
import { getCreatorCount } from "../../../src/users/users"
|
||||
|
||||
describe("Users", () => {
|
||||
let getGlobalDBMock: jest.SpyInstance
|
||||
let paginationMock: jest.SpyInstance
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks()
|
||||
|
||||
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
|
||||
paginationMock = jest.spyOn(db, "pagination")
|
||||
|
||||
jest.spyOn(db, "getGlobalUserParams")
|
||||
})
|
||||
|
||||
it("retrieves the number of creators", async () => {
|
||||
const getUsers = (offset: number, limit: number, creators = false) => {
|
||||
const opts = creators ? { builder: { global: true } } : undefined
|
||||
return range(offset, limit).map(() => structures.users.user(opts))
|
||||
}
|
||||
const page1Data = getUsers(0, 8)
|
||||
const page2Data = getUsers(8, 12, true)
|
||||
getGlobalDBMock.mockImplementation(() => ({
|
||||
name: "fake-db",
|
||||
allDocs: () => ({
|
||||
rows: [...page1Data, ...page2Data],
|
||||
}),
|
||||
}))
|
||||
paginationMock.mockImplementationOnce(() => ({
|
||||
data: page1Data,
|
||||
hasNextPage: true,
|
||||
nextPage: "1",
|
||||
}))
|
||||
paginationMock.mockImplementation(() => ({
|
||||
data: page2Data,
|
||||
hasNextPage: false,
|
||||
nextPage: undefined,
|
||||
}))
|
||||
const creatorsCount = await getCreatorCount()
|
||||
expect(creatorsCount).toBe(4)
|
||||
expect(paginationMock).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
})
|
|
@ -1,22 +1,26 @@
|
|||
<script>
|
||||
<script lang="ts" context="module">
|
||||
type Option = any
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import Picker from "./Picker.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
export let value = []
|
||||
export let id = null
|
||||
export let placeholder = null
|
||||
export let disabled = false
|
||||
export let options = []
|
||||
export let getOptionLabel = option => option
|
||||
export let getOptionValue = option => option
|
||||
export let readonly = false
|
||||
export let autocomplete = false
|
||||
export let sort = false
|
||||
export let autoWidth = false
|
||||
export let searchTerm = null
|
||||
export let customPopoverHeight = undefined
|
||||
export let open = false
|
||||
export let loading
|
||||
export let value: string[] = []
|
||||
export let id: string | undefined = undefined
|
||||
export let placeholder: string | null = null
|
||||
export let disabled: boolean = false
|
||||
export let options: Option[] = []
|
||||
export let getOptionLabel = (option: Option, _index?: number) => option
|
||||
export let getOptionValue = (option: Option, _index?: number) => option
|
||||
export let readonly: boolean = false
|
||||
export let autocomplete: boolean = false
|
||||
export let sort: boolean = false
|
||||
export let autoWidth: boolean = false
|
||||
export let searchTerm: string | null = null
|
||||
export let customPopoverHeight: string | undefined = undefined
|
||||
export let open: boolean = false
|
||||
export let loading: boolean
|
||||
export let onOptionMouseenter = () => {}
|
||||
export let onOptionMouseleave = () => {}
|
||||
|
||||
|
@ -27,10 +31,15 @@
|
|||
$: optionLookupMap = getOptionLookupMap(options)
|
||||
|
||||
$: fieldText = getFieldText(arrayValue, optionLookupMap, placeholder)
|
||||
$: isOptionSelected = optionValue => selectedLookupMap[optionValue] === true
|
||||
$: isOptionSelected = (optionValue: string) =>
|
||||
selectedLookupMap[optionValue] === true
|
||||
$: toggleOption = makeToggleOption(selectedLookupMap, arrayValue)
|
||||
|
||||
const getFieldText = (value, map, placeholder) => {
|
||||
const getFieldText = (
|
||||
value: string[],
|
||||
map: Record<string, any> | null,
|
||||
placeholder: string | null
|
||||
) => {
|
||||
if (Array.isArray(value) && value.length > 0) {
|
||||
if (!map) {
|
||||
return ""
|
||||
|
@ -42,8 +51,8 @@
|
|||
}
|
||||
}
|
||||
|
||||
const getSelectedLookupMap = value => {
|
||||
let map = {}
|
||||
const getSelectedLookupMap = (value: string[]) => {
|
||||
const map: Record<string, boolean> = {}
|
||||
if (Array.isArray(value) && value.length > 0) {
|
||||
value.forEach(option => {
|
||||
if (option) {
|
||||
|
@ -54,22 +63,23 @@
|
|||
return map
|
||||
}
|
||||
|
||||
const getOptionLookupMap = options => {
|
||||
let map = null
|
||||
if (options?.length) {
|
||||
map = {}
|
||||
options.forEach((option, idx) => {
|
||||
const optionValue = getOptionValue(option, idx)
|
||||
if (optionValue != null) {
|
||||
map[optionValue] = getOptionLabel(option, idx) || ""
|
||||
}
|
||||
})
|
||||
const getOptionLookupMap = (options: Option[]) => {
|
||||
if (!options?.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
const map: Record<string, any> = {}
|
||||
options.forEach((option, idx) => {
|
||||
const optionValue = getOptionValue(option, idx)
|
||||
if (optionValue != null) {
|
||||
map[optionValue] = getOptionLabel(option, idx) || ""
|
||||
}
|
||||
})
|
||||
return map
|
||||
}
|
||||
|
||||
const makeToggleOption = (map, value) => {
|
||||
return optionValue => {
|
||||
const makeToggleOption = (map: Record<string, boolean>, value: string[]) => {
|
||||
return (optionValue: string) => {
|
||||
if (map[optionValue]) {
|
||||
const filtered = value.filter(option => option !== optionValue)
|
||||
dispatch("change", filtered)
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
<script>
|
||||
<script lang="ts">
|
||||
import "@spectrum-css/textfield/dist/index-vars.css"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
export let value = ""
|
||||
export let placeholder = null
|
||||
export let placeholder: string | undefined = undefined
|
||||
export let disabled = false
|
||||
export let readonly = false
|
||||
export let id = null
|
||||
export let height = null
|
||||
export let minHeight = null
|
||||
export let id: string | undefined = undefined
|
||||
export let height: string | number | undefined = undefined
|
||||
export let minHeight: string | number | undefined = undefined
|
||||
export const getCaretPosition = () => ({
|
||||
start: textarea.selectionStart,
|
||||
end: textarea.selectionEnd,
|
||||
|
@ -16,18 +16,21 @@
|
|||
export let align = null
|
||||
|
||||
let focus = false
|
||||
let textarea
|
||||
let textarea: any
|
||||
const dispatch = createEventDispatcher()
|
||||
const onChange = event => {
|
||||
const onChange = (event: any) => {
|
||||
dispatch("change", event.target.value)
|
||||
focus = false
|
||||
}
|
||||
|
||||
const getStyleString = (attribute, value) => {
|
||||
const getStyleString = (
|
||||
attribute: string,
|
||||
value: string | number | undefined
|
||||
) => {
|
||||
if (!attribute || value == null) {
|
||||
return ""
|
||||
}
|
||||
if (isNaN(value)) {
|
||||
if (typeof value === "number" && isNaN(value)) {
|
||||
return `${attribute}:${value};`
|
||||
}
|
||||
return `${attribute}:${value}px;`
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
<script>
|
||||
<script lang="ts">
|
||||
import Field from "./Field.svelte"
|
||||
import TextArea from "./Core/TextArea.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
export let value = null
|
||||
export let label = null
|
||||
export let labelPosition = "above"
|
||||
export let placeholder = null
|
||||
export let value: string | undefined = undefined
|
||||
export let label: string | undefined = undefined
|
||||
export let labelPosition: string = "above"
|
||||
export let placeholder: string | undefined = undefined
|
||||
export let disabled = false
|
||||
export let error = null
|
||||
export let getCaretPosition = null
|
||||
export let height = null
|
||||
export let minHeight = null
|
||||
export let helpText = null
|
||||
export let error: string | undefined = undefined
|
||||
export let getCaretPosition: any = undefined
|
||||
export let height: string | number | undefined = undefined
|
||||
export let minHeight: string | number | undefined = undefined
|
||||
export let helpText: string | undefined = undefined
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const onChange = e => {
|
||||
const onChange = (e: any) => {
|
||||
value = e.detail
|
||||
dispatch("change", e.detail)
|
||||
}
|
||||
|
@ -24,7 +24,6 @@
|
|||
<Field {helpText} {label} {labelPosition} {error}>
|
||||
<TextArea
|
||||
bind:getCaretPosition
|
||||
{error}
|
||||
{disabled}
|
||||
{value}
|
||||
{placeholder}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
<script lang="ts">
|
||||
import "@spectrum-css/inlinealert/dist/index-vars.css"
|
||||
import Button from "../Button/Button.svelte"
|
||||
import Icon from "../Icon/Icon.svelte"
|
||||
|
||||
export let type: "info" | "error" | "success" | "help" | "negative" = "info"
|
||||
export let header: string = ""
|
||||
|
@ -8,6 +9,8 @@
|
|||
export let onConfirm: (() => void) | undefined = undefined
|
||||
export let buttonText: string = ""
|
||||
export let cta: boolean = false
|
||||
export let link: string = ""
|
||||
export let linkText: string = ""
|
||||
|
||||
$: icon = selectIcon(type)
|
||||
// if newlines used, convert them to different elements
|
||||
|
@ -49,6 +52,19 @@
|
|||
>
|
||||
</div>
|
||||
{/if}
|
||||
{#if link && linkText}
|
||||
<div id="docs-link">
|
||||
<a
|
||||
href={link}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
class="docs-link"
|
||||
>
|
||||
{linkText}
|
||||
<Icon name="LinkOut" size="XS" />
|
||||
</a>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
|
@ -64,4 +80,21 @@
|
|||
margin: 0;
|
||||
border-width: 1px;
|
||||
}
|
||||
|
||||
a {
|
||||
color: white;
|
||||
}
|
||||
|
||||
#docs-link {
|
||||
padding-top: 10px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
#docs-link > * {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 5px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
export let title
|
||||
export let icon = ""
|
||||
export let id
|
||||
export let id = undefined
|
||||
export let href = "#"
|
||||
export let link = false
|
||||
|
||||
|
|
|
@ -8,11 +8,13 @@
|
|||
export let invalid: boolean = false
|
||||
export let disabled: boolean = false
|
||||
export let closable: boolean = false
|
||||
export let emphasized: boolean = false
|
||||
</script>
|
||||
|
||||
<div
|
||||
class:is-invalid={invalid}
|
||||
class:is-disabled={disabled}
|
||||
class:is-emphasized={emphasized}
|
||||
class="spectrum-Tags-item"
|
||||
role="listitem"
|
||||
>
|
||||
|
@ -40,4 +42,9 @@
|
|||
margin-bottom: 0;
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.is-emphasized {
|
||||
border-color: var(--spectrum-global-color-blue-700);
|
||||
color: var(--spectrum-global-color-blue-700);
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -100,7 +100,6 @@
|
|||
"jest": "29.7.0",
|
||||
"jsdom": "^21.1.1",
|
||||
"resize-observer-polyfill": "^1.5.1",
|
||||
"svelte-check": "^4.1.0",
|
||||
"svelte-jester": "^1.3.2",
|
||||
"vite": "^4.5.0",
|
||||
"vite-plugin-static-copy": "^0.17.0",
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import posthog from "posthog-js"
|
||||
import { Events } from "./constants"
|
||||
|
||||
export default class PosthogClient {
|
||||
constructor(token) {
|
||||
token: string
|
||||
initialised: boolean
|
||||
|
||||
constructor(token: string) {
|
||||
this.token = token
|
||||
this.initialised = false
|
||||
}
|
||||
|
||||
init() {
|
||||
|
@ -12,6 +15,8 @@ export default class PosthogClient {
|
|||
posthog.init(this.token, {
|
||||
autocapture: false,
|
||||
capture_pageview: false,
|
||||
// disable by default
|
||||
disable_session_recording: true,
|
||||
})
|
||||
posthog.set_config({ persistence: "cookie" })
|
||||
|
||||
|
@ -22,7 +27,7 @@ export default class PosthogClient {
|
|||
* Set the posthog context to the current user
|
||||
* @param {String} id - unique user id
|
||||
*/
|
||||
identify(id) {
|
||||
identify(id: string) {
|
||||
if (!this.initialised) return
|
||||
|
||||
posthog.identify(id)
|
||||
|
@ -32,7 +37,7 @@ export default class PosthogClient {
|
|||
* Update user metadata associated with current user in posthog
|
||||
* @param {Object} meta - user fields
|
||||
*/
|
||||
updateUser(meta) {
|
||||
updateUser(meta: Record<string, any>) {
|
||||
if (!this.initialised) return
|
||||
|
||||
posthog.people.set(meta)
|
||||
|
@ -43,28 +48,22 @@ export default class PosthogClient {
|
|||
* @param {String} event - event identifier
|
||||
* @param {Object} props - properties for the event
|
||||
*/
|
||||
captureEvent(eventName, props) {
|
||||
if (!this.initialised) return
|
||||
|
||||
props.sourceApp = "builder"
|
||||
posthog.capture(eventName, props)
|
||||
}
|
||||
|
||||
/**
|
||||
* Submit NPS feedback to posthog.
|
||||
* @param {Object} values - NPS Values
|
||||
*/
|
||||
npsFeedback(values) {
|
||||
if (!this.initialised) return
|
||||
|
||||
localStorage.setItem(Events.NPS.SUBMITTED, Date.now())
|
||||
|
||||
const prefixedFeedback = {}
|
||||
for (let key in values) {
|
||||
prefixedFeedback[`feedback_${key}`] = values[key]
|
||||
captureEvent(event: string, props: Record<string, any>) {
|
||||
if (!this.initialised) {
|
||||
return
|
||||
}
|
||||
|
||||
posthog.capture(Events.NPS.SUBMITTED, prefixedFeedback)
|
||||
props.sourceApp = "builder"
|
||||
posthog.capture(event, props)
|
||||
}
|
||||
|
||||
enableSessionRecording() {
|
||||
if (!this.initialised) {
|
||||
return
|
||||
}
|
||||
posthog.set_config({
|
||||
disable_session_recording: false,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
|
@ -31,6 +31,10 @@ class AnalyticsHub {
|
|||
posthog.captureEvent(eventName, props)
|
||||
}
|
||||
|
||||
enableSessionRecording() {
|
||||
posthog.enableSessionRecording()
|
||||
}
|
||||
|
||||
async logout() {
|
||||
posthog.logout()
|
||||
}
|
||||
|
|
|
@ -23,9 +23,8 @@
|
|||
let collectBlockAllowedSteps = [TriggerStepID.APP, TriggerStepID.WEBHOOK]
|
||||
let selectedAction
|
||||
let actions = Object.entries($automationStore.blockDefinitions.ACTION).filter(
|
||||
entry => {
|
||||
const [key] = entry
|
||||
return key !== AutomationActionStepId.BRANCH
|
||||
([key, action]) => {
|
||||
return key !== AutomationActionStepId.BRANCH && action.deprecated !== true
|
||||
}
|
||||
)
|
||||
let lockedFeatures = [
|
||||
|
@ -186,6 +185,10 @@
|
|||
</div>
|
||||
{:else if isDisabled}
|
||||
<Icon name="Help" tooltip={disabled()[idx].message} />
|
||||
{:else if action.new}
|
||||
<Tags>
|
||||
<Tag emphasized>New</Tag>
|
||||
</Tags>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
@ -227,6 +230,10 @@
|
|||
grid-gap: var(--spectrum-alias-grid-baseline);
|
||||
}
|
||||
|
||||
.item :global(.spectrum-Tags-itemLabel) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.item {
|
||||
cursor: pointer;
|
||||
grid-gap: var(--spectrum-alias-grid-margin-xsmall);
|
||||
|
@ -237,6 +244,8 @@
|
|||
border-radius: 5px;
|
||||
box-sizing: border-box;
|
||||
border-width: 2px;
|
||||
min-height: 3.5rem;
|
||||
display: flex;
|
||||
}
|
||||
.item:not(.disabled):hover,
|
||||
.selected {
|
||||
|
|
|
@ -18,8 +18,12 @@
|
|||
import AutomationBindingPanel from "@/components/common/bindings/ServerBindingPanel.svelte"
|
||||
import FlowItemHeader from "./FlowItemHeader.svelte"
|
||||
import FlowItemActions from "./FlowItemActions.svelte"
|
||||
import { automationStore, selectedAutomation } from "@/stores/builder"
|
||||
import { QueryUtils, Utils } from "@budibase/frontend-core"
|
||||
import {
|
||||
automationStore,
|
||||
selectedAutomation,
|
||||
evaluationContext,
|
||||
} from "@/stores/builder"
|
||||
import { QueryUtils, Utils, memo } from "@budibase/frontend-core"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { createEventDispatcher, getContext } from "svelte"
|
||||
import DragZone from "./DragZone.svelte"
|
||||
|
@ -34,11 +38,14 @@
|
|||
export let automation
|
||||
|
||||
const view = getContext("draggableView")
|
||||
const memoContext = memo({})
|
||||
|
||||
let drawer
|
||||
let open = true
|
||||
let confirmDeleteModal
|
||||
|
||||
$: memoContext.set($evaluationContext)
|
||||
|
||||
$: branch = step.inputs?.branches?.[branchIdx]
|
||||
$: editableConditionUI = branch.conditionUI || {}
|
||||
|
||||
|
@ -100,6 +107,7 @@
|
|||
allowOnEmpty={false}
|
||||
builderType={"condition"}
|
||||
docsURL={null}
|
||||
evaluationContext={$memoContext}
|
||||
/>
|
||||
</DrawerContent>
|
||||
</Drawer>
|
||||
|
|
|
@ -1,6 +1,13 @@
|
|||
<script>
|
||||
import { automationStore, selectedAutomation } from "@/stores/builder"
|
||||
import { Icon, Body, AbsTooltip, StatusLight } from "@budibase/bbui"
|
||||
import {
|
||||
Icon,
|
||||
Body,
|
||||
AbsTooltip,
|
||||
StatusLight,
|
||||
Tags,
|
||||
Tag,
|
||||
} from "@budibase/bbui"
|
||||
import { externalActions } from "./ExternalActions"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { Features } from "@/constants/backend/automations"
|
||||
|
@ -24,6 +31,7 @@
|
|||
$: blockRefs = $selectedAutomation?.blockRefs || {}
|
||||
$: stepNames = automation?.definition.stepNames || {}
|
||||
$: allSteps = automation?.definition.steps || []
|
||||
$: blockDefinition = $automationStore.blockDefinitions.ACTION[block.stepId]
|
||||
$: automationName = itemName || stepNames?.[block.id] || block?.name || ""
|
||||
$: automationNameError = getAutomationNameError(automationName)
|
||||
$: status = updateStatus(testResult)
|
||||
|
@ -135,7 +143,16 @@
|
|||
{#if isHeaderTrigger}
|
||||
<Body size="XS"><b>Trigger</b></Body>
|
||||
{:else}
|
||||
<Body size="XS"><b>{isBranch ? "Branch" : "Step"}</b></Body>
|
||||
<Body size="XS">
|
||||
<div style="display: flex; gap: 0.5rem; align-items: center;">
|
||||
<b>{isBranch ? "Branch" : "Step"}</b>
|
||||
{#if blockDefinition.deprecated}
|
||||
<Tags>
|
||||
<Tag invalid>Deprecated</Tag>
|
||||
</Tags>
|
||||
{/if}
|
||||
</div>
|
||||
</Body>
|
||||
{/if}
|
||||
|
||||
{#if enableNaming}
|
||||
|
|
|
@ -102,6 +102,10 @@
|
|||
if (rowTriggers.includes(trigger?.event)) {
|
||||
const tableId = trigger?.inputs?.tableId
|
||||
|
||||
if (!jsonUpdate.row) {
|
||||
jsonUpdate.row = {}
|
||||
}
|
||||
|
||||
// Reset the tableId as it must match the trigger
|
||||
if (jsonUpdate?.row?.tableId !== tableId) {
|
||||
jsonUpdate.row.tableId = tableId
|
||||
|
@ -161,7 +165,7 @@
|
|||
block={trigger}
|
||||
on:update={e => {
|
||||
const { testData: updatedTestData } = e.detail
|
||||
testData = updatedTestData
|
||||
testData = parseTestData(updatedTestData)
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
})
|
||||
|
||||
$: groupedAutomations = groupAutomations(filteredAutomations)
|
||||
|
||||
$: showNoResults = searchString && !filteredAutomations.length
|
||||
|
||||
const groupAutomations = automations => {
|
||||
|
@ -41,7 +40,6 @@
|
|||
for (let auto of automations) {
|
||||
let category = null
|
||||
let dataTrigger = false
|
||||
|
||||
// Group by datasource if possible
|
||||
if (dsTriggers.includes(auto.definition?.trigger?.stepId)) {
|
||||
if (auto.definition.trigger.inputs?.tableId) {
|
||||
|
@ -97,7 +95,10 @@
|
|||
{triggerGroup?.name}
|
||||
</div>
|
||||
{#each triggerGroup.entries as automation}
|
||||
<AutomationNavItem {automation} icon={triggerGroup.icon} />
|
||||
<AutomationNavItem
|
||||
{automation}
|
||||
icon={automation?.definition?.trigger?.icon}
|
||||
/>
|
||||
{/each}
|
||||
</div>
|
||||
{/each}
|
||||
|
|
|
@ -18,10 +18,11 @@
|
|||
Toggle,
|
||||
Divider,
|
||||
Icon,
|
||||
CoreSelect,
|
||||
} from "@budibase/bbui"
|
||||
|
||||
import CreateWebhookModal from "@/components/automation/Shared/CreateWebhookModal.svelte"
|
||||
import { automationStore, tables } from "@/stores/builder"
|
||||
import { automationStore, tables, evaluationContext } from "@/stores/builder"
|
||||
import { environment } from "@/stores/portal"
|
||||
import WebhookDisplay from "../Shared/WebhookDisplay.svelte"
|
||||
import {
|
||||
|
@ -48,7 +49,13 @@
|
|||
EditorModes,
|
||||
} from "@/components/common/CodeEditor"
|
||||
import FilterBuilder from "@/components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
|
||||
import { QueryUtils, Utils, search, memo } from "@budibase/frontend-core"
|
||||
import {
|
||||
QueryUtils,
|
||||
Utils,
|
||||
search,
|
||||
memo,
|
||||
fetchData,
|
||||
} from "@budibase/frontend-core"
|
||||
import { getSchemaForDatasourcePlus } from "@/dataBinding"
|
||||
import { TriggerStepID, ActionStepID } from "@/constants/backend/automations"
|
||||
import { onMount, createEventDispatcher } from "svelte"
|
||||
|
@ -59,9 +66,13 @@
|
|||
AutomationStepType,
|
||||
AutomationActionStepId,
|
||||
AutomationCustomIOType,
|
||||
SortOrder,
|
||||
} from "@budibase/types"
|
||||
import PropField from "./PropField.svelte"
|
||||
import { utils } from "@budibase/shared-core"
|
||||
import DrawerBindableCodeEditorField from "@/components/common/bindings/DrawerBindableCodeEditorField.svelte"
|
||||
import { API } from "@/api"
|
||||
import InfoDisplay from "@/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/InfoDisplay.svelte"
|
||||
|
||||
export let automation
|
||||
export let block
|
||||
|
@ -74,6 +85,7 @@
|
|||
|
||||
// Stop unnecessary rendering
|
||||
const memoBlock = memo(block)
|
||||
const memoContext = memo({})
|
||||
|
||||
const rowTriggers = [
|
||||
TriggerStepID.ROW_UPDATED,
|
||||
|
@ -95,8 +107,11 @@
|
|||
let inputData
|
||||
let insertAtPos, getCaretPosition
|
||||
let stepLayouts = {}
|
||||
let rowSearchTerm = ""
|
||||
let selectedRow
|
||||
|
||||
$: memoBlock.set(block)
|
||||
$: memoContext.set($evaluationContext)
|
||||
|
||||
$: filters = lookForFilters(schemaProperties)
|
||||
$: filterCount =
|
||||
|
@ -109,9 +124,13 @@
|
|||
$: stepId = $memoBlock.stepId
|
||||
|
||||
$: getInputData(testData, $memoBlock.inputs)
|
||||
$: tableId = inputData ? inputData.tableId : null
|
||||
$: tableId =
|
||||
inputData?.row?.tableId ||
|
||||
testData?.row?.tableId ||
|
||||
inputData?.tableId ||
|
||||
null
|
||||
$: table = tableId
|
||||
? $tables.list.find(table => table._id === inputData.tableId)
|
||||
? $tables.list.find(table => table._id === tableId)
|
||||
: { schema: {} }
|
||||
$: schema = getSchemaForDatasourcePlus(tableId, {
|
||||
searchableSchema: true,
|
||||
|
@ -140,6 +159,40 @@
|
|||
? [hbAutocomplete([...bindingsToCompletions(bindings, codeMode)])]
|
||||
: []
|
||||
|
||||
$: fetch = createFetch({ type: "table", tableId })
|
||||
$: fetchedRows = $fetch?.rows
|
||||
$: fetch?.update({
|
||||
query: {
|
||||
fuzzy: {
|
||||
[primaryDisplay]: rowSearchTerm || "",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
$: fetchLoading = $fetch?.loading
|
||||
$: primaryDisplay = table?.primaryDisplay
|
||||
|
||||
const createFetch = datasource => {
|
||||
if (!datasource) {
|
||||
return
|
||||
}
|
||||
|
||||
return fetchData({
|
||||
API,
|
||||
datasource,
|
||||
options: {
|
||||
sortColumn: primaryDisplay,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
query: {
|
||||
fuzzy: {
|
||||
[primaryDisplay]: rowSearchTerm || "",
|
||||
},
|
||||
},
|
||||
limit: 20,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const getInputData = (testData, blockInputs) => {
|
||||
// Test data is not cloned for reactivity
|
||||
let newInputData = testData || cloneDeep(blockInputs)
|
||||
|
@ -167,7 +220,7 @@
|
|||
const stepStore = writable({})
|
||||
$: stepState = $stepStore?.[block.id]
|
||||
|
||||
$: customStepLayouts($memoBlock, schemaProperties, stepState)
|
||||
$: customStepLayouts($memoBlock, schemaProperties, stepState, fetchedRows)
|
||||
|
||||
const customStepLayouts = block => {
|
||||
if (
|
||||
|
@ -200,7 +253,6 @@
|
|||
onChange({ ["revision"]: e.detail })
|
||||
},
|
||||
updateOnChange: false,
|
||||
forceModal: true,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
@ -228,7 +280,6 @@
|
|||
onChange({ [rowIdentifier]: e.detail })
|
||||
},
|
||||
updateOnChange: false,
|
||||
forceModal: true,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
@ -362,6 +413,49 @@
|
|||
disabled: isTestModal,
|
||||
},
|
||||
},
|
||||
{
|
||||
type: CoreSelect,
|
||||
title: "Row",
|
||||
props: {
|
||||
disabled: !table,
|
||||
placeholder: "Select a row",
|
||||
options: fetchedRows,
|
||||
loading: fetchLoading,
|
||||
value: selectedRow,
|
||||
autocomplete: true,
|
||||
filter: false,
|
||||
getOptionLabel: row => row?.[primaryDisplay] || "",
|
||||
compare: (a, b) => a?.[primaryDisplay] === b?.[primaryDisplay],
|
||||
onChange: e => {
|
||||
if (isTestModal) {
|
||||
onChange({
|
||||
id: e.detail?._id,
|
||||
revision: e.detail?._rev,
|
||||
row: e.detail,
|
||||
oldRow: e.detail,
|
||||
meta: {
|
||||
fields: inputData["meta"]?.fields || {},
|
||||
oldFields: e.detail?.meta?.fields || {},
|
||||
},
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
type: InfoDisplay,
|
||||
props: {
|
||||
warning: true,
|
||||
icon: "AlertCircleFilled",
|
||||
body: `Be careful when testing this automation because your data may be modified or deleted.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
type: Divider,
|
||||
props: {
|
||||
noMargin: true,
|
||||
},
|
||||
},
|
||||
...getIdConfig(),
|
||||
...getRevConfig(),
|
||||
...getRowTypeConfig(),
|
||||
|
@ -476,6 +570,10 @@
|
|||
...update,
|
||||
})
|
||||
|
||||
if (!updatedAutomation) {
|
||||
return
|
||||
}
|
||||
|
||||
// Exclude default or invalid data from the test data
|
||||
let updatedFields = {}
|
||||
for (const key of Object.keys(block?.inputs?.fields || {})) {
|
||||
|
@ -547,7 +645,7 @@
|
|||
...newTestData,
|
||||
body: {
|
||||
...update,
|
||||
...automation.testData?.body,
|
||||
...(automation?.testData?.body || {}),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -556,6 +654,15 @@
|
|||
...request,
|
||||
}
|
||||
|
||||
if (
|
||||
newTestData?.row == null ||
|
||||
Object.keys(newTestData?.row).length === 0
|
||||
) {
|
||||
selectedRow = null
|
||||
} else {
|
||||
selectedRow = newTestData.row
|
||||
}
|
||||
|
||||
const updatedAuto =
|
||||
automationStore.actions.addTestDataToAutomation(newTestData)
|
||||
|
||||
|
@ -668,6 +775,8 @@
|
|||
{...config.props}
|
||||
{bindings}
|
||||
on:change={config.props.onChange}
|
||||
context={$memoContext}
|
||||
bind:searchTerm={rowSearchTerm}
|
||||
/>
|
||||
</PropField>
|
||||
{:else}
|
||||
|
@ -676,6 +785,7 @@
|
|||
{...config.props}
|
||||
{bindings}
|
||||
on:change={config.props.onChange}
|
||||
context={$memoContext}
|
||||
/>
|
||||
{/if}
|
||||
{/each}
|
||||
|
@ -800,6 +910,7 @@
|
|||
: "Add signature"}
|
||||
keyPlaceholder={"URL"}
|
||||
valuePlaceholder={"Filename"}
|
||||
context={$memoContext}
|
||||
/>
|
||||
{:else if isTestModal}
|
||||
<ModalBindableInput
|
||||
|
@ -824,6 +935,7 @@
|
|||
? queryLimit
|
||||
: ""}
|
||||
drawerLeft="260px"
|
||||
context={$memoContext}
|
||||
/>
|
||||
{/if}
|
||||
</div>
|
||||
|
@ -853,6 +965,7 @@
|
|||
panel={AutomationBindingPanel}
|
||||
showFilterEmptyDropdown={!rowTriggers.includes(stepId)}
|
||||
on:change={e => (tempFilters = e.detail)}
|
||||
evaluationContext={$memoContext}
|
||||
/>
|
||||
</DrawerContent>
|
||||
</Drawer>
|
||||
|
@ -895,7 +1008,19 @@
|
|||
on:change={e => onChange({ [key]: e.detail })}
|
||||
value={inputData[key]}
|
||||
/>
|
||||
{:else if value.customType === "code"}
|
||||
{:else if value.customType === "code" && stepId === ActionStepID.EXECUTE_SCRIPT_V2}
|
||||
<div class="scriptv2-wrapper">
|
||||
<DrawerBindableCodeEditorField
|
||||
{bindings}
|
||||
{schema}
|
||||
panel={AutomationBindingPanel}
|
||||
on:change={e => onChange({ [key]: e.detail })}
|
||||
context={$memoContext}
|
||||
value={inputData[key]}
|
||||
/>
|
||||
</div>
|
||||
{:else if value.customType === "code" && stepId === ActionStepID.EXECUTE_SCRIPT}
|
||||
<!-- DEPRECATED -->
|
||||
<CodeEditorModal
|
||||
on:hide={() => {
|
||||
// Push any pending changes when the window closes
|
||||
|
@ -977,6 +1102,7 @@
|
|||
? queryLimit
|
||||
: ""}
|
||||
drawerLeft="260px"
|
||||
context={$memoContext}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
export let meta
|
||||
export let bindings
|
||||
export let isTestModal
|
||||
export let context = {}
|
||||
|
||||
const typeToField = Object.values(FIELDS).reduce((acc, field) => {
|
||||
acc[field.type] = field
|
||||
|
@ -58,7 +59,7 @@
|
|||
|
||||
$: parsedBindings = bindings.map(binding => {
|
||||
let clone = Object.assign({}, binding)
|
||||
clone.icon = "ShareAndroid"
|
||||
clone.icon = clone.icon ?? "ShareAndroid"
|
||||
return clone
|
||||
})
|
||||
|
||||
|
@ -258,6 +259,7 @@
|
|||
fields: editableFields,
|
||||
}}
|
||||
{onChange}
|
||||
{context}
|
||||
/>
|
||||
{:else}
|
||||
<DrawerBindableSlot
|
||||
|
@ -276,6 +278,7 @@
|
|||
allowJS={true}
|
||||
updateOnChange={false}
|
||||
drawerLeft="260px"
|
||||
{context}
|
||||
>
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
|
@ -286,6 +289,7 @@
|
|||
meta={{
|
||||
fields: editableFields,
|
||||
}}
|
||||
{context}
|
||||
onChange={change => onChange(change)}
|
||||
/>
|
||||
</DrawerBindableSlot>
|
||||
|
@ -303,13 +307,22 @@
|
|||
>
|
||||
<ActionButton
|
||||
icon="Add"
|
||||
fullWidth
|
||||
on:click={() => {
|
||||
customPopover.show()
|
||||
}}
|
||||
disabled={!schemaFields}
|
||||
>Add fields
|
||||
</ActionButton>
|
||||
<ActionButton
|
||||
icon="Remove"
|
||||
on:click={() => {
|
||||
dispatch("change", {
|
||||
meta: { fields: {} },
|
||||
row: {},
|
||||
})
|
||||
}}
|
||||
>Clear
|
||||
</ActionButton>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
|
@ -375,4 +388,11 @@
|
|||
.prop-control-wrap :global(.icon.json-slot-icon) {
|
||||
right: 1px !important;
|
||||
}
|
||||
|
||||
.add-fields-btn {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: center;
|
||||
gap: var(--spacing-s);
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -25,12 +25,13 @@
|
|||
export let meta
|
||||
export let bindings
|
||||
export let isTestModal
|
||||
export let context
|
||||
|
||||
$: fieldData = value[field]
|
||||
|
||||
$: parsedBindings = bindings.map(binding => {
|
||||
let clone = Object.assign({}, binding)
|
||||
clone.icon = "ShareAndroid"
|
||||
clone.icon = clone.icon ?? "ShareAndroid"
|
||||
return clone
|
||||
})
|
||||
|
||||
|
@ -232,6 +233,7 @@
|
|||
actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE ||
|
||||
schema.type === FieldType.SIGNATURE_SINGLE) &&
|
||||
fieldData}
|
||||
{context}
|
||||
/>
|
||||
</div>
|
||||
{:else}
|
||||
|
|
|
@ -1,18 +1,11 @@
|
|||
<script>
|
||||
import { Input, Select, Button } from "@budibase/bbui"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
import { memo } from "@budibase/frontend-core"
|
||||
import { generate } from "shortid"
|
||||
|
||||
export let value = {}
|
||||
|
||||
$: fieldsArray = value
|
||||
? Object.entries(value).map(([name, type]) => ({
|
||||
name,
|
||||
type,
|
||||
}))
|
||||
: []
|
||||
|
||||
const typeOptions = [
|
||||
{
|
||||
label: "Text",
|
||||
|
@ -36,16 +29,42 @@
|
|||
},
|
||||
]
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const memoValue = memo({ data: {} })
|
||||
|
||||
$: memoValue.set({ data: value })
|
||||
|
||||
$: fieldsArray = $memoValue.data
|
||||
? Object.entries($memoValue.data).map(([name, type]) => ({
|
||||
name,
|
||||
type,
|
||||
id: generate(),
|
||||
}))
|
||||
: []
|
||||
|
||||
function addField() {
|
||||
const newValue = { ...value }
|
||||
const newValue = { ...$memoValue.data }
|
||||
newValue[""] = "string"
|
||||
dispatch("change", newValue)
|
||||
fieldsArray = [...fieldsArray, { name: "", type: "string", id: generate() }]
|
||||
}
|
||||
|
||||
function removeField(name) {
|
||||
const newValues = { ...value }
|
||||
delete newValues[name]
|
||||
dispatch("change", newValues)
|
||||
function removeField(idx) {
|
||||
const entries = [...fieldsArray]
|
||||
|
||||
// Remove empty field
|
||||
if (!entries[idx]?.name) {
|
||||
fieldsArray.splice(idx, 1)
|
||||
fieldsArray = [...fieldsArray]
|
||||
return
|
||||
}
|
||||
|
||||
entries.splice(idx, 1)
|
||||
|
||||
const update = entries.reduce((newVals, current) => {
|
||||
newVals[current.name.trim()] = current.type
|
||||
return newVals
|
||||
}, {})
|
||||
dispatch("change", update)
|
||||
}
|
||||
|
||||
const fieldNameChanged = originalName => e => {
|
||||
|
@ -57,11 +76,16 @@
|
|||
} else {
|
||||
entries = entries.filter(f => f.name !== originalName)
|
||||
}
|
||||
value = entries.reduce((newVals, current) => {
|
||||
newVals[current.name.trim()] = current.type
|
||||
return newVals
|
||||
}, {})
|
||||
dispatch("change", value)
|
||||
|
||||
const update = entries
|
||||
.filter(entry => entry.name)
|
||||
.reduce((newVals, current) => {
|
||||
newVals[current.name.trim()] = current.type
|
||||
return newVals
|
||||
}, {})
|
||||
if (Object.keys(update).length) {
|
||||
dispatch("change", update)
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
|
@ -69,7 +93,7 @@
|
|||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div class="root">
|
||||
<div class="spacer" />
|
||||
{#each fieldsArray as field}
|
||||
{#each fieldsArray as field, idx (field.id)}
|
||||
<div class="field">
|
||||
<Input
|
||||
value={field.name}
|
||||
|
@ -88,7 +112,9 @@
|
|||
/>
|
||||
<i
|
||||
class="remove-field ri-delete-bin-line"
|
||||
on:click={() => removeField(field.name)}
|
||||
on:click={() => {
|
||||
removeField(idx)
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
{/each}
|
||||
|
@ -115,4 +141,12 @@
|
|||
align-items: center;
|
||||
gap: var(--spacing-m);
|
||||
}
|
||||
|
||||
.remove-field {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.remove-field:hover {
|
||||
color: var(--spectrum-global-color-gray-900);
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -1,132 +0,0 @@
|
|||
<script>
|
||||
import { goto } from "@roxi/routify"
|
||||
import {
|
||||
keepOpen,
|
||||
ModalContent,
|
||||
notifications,
|
||||
Body,
|
||||
Layout,
|
||||
Tabs,
|
||||
Tab,
|
||||
Heading,
|
||||
TextArea,
|
||||
Dropzone,
|
||||
} from "@budibase/bbui"
|
||||
import { datasources, queries } from "@/stores/builder"
|
||||
import { writable } from "svelte/store"
|
||||
|
||||
export let navigateDatasource = false
|
||||
export let datasourceId
|
||||
export let createDatasource = false
|
||||
export let onCancel
|
||||
|
||||
const data = writable({
|
||||
url: "",
|
||||
raw: "",
|
||||
file: undefined,
|
||||
})
|
||||
|
||||
let lastTouched = "url"
|
||||
|
||||
const getData = async () => {
|
||||
let dataString
|
||||
|
||||
// parse the file into memory and send as string
|
||||
if (lastTouched === "file") {
|
||||
dataString = await $data.file.text()
|
||||
} else if (lastTouched === "url") {
|
||||
const response = await fetch($data.url)
|
||||
dataString = await response.text()
|
||||
} else if (lastTouched === "raw") {
|
||||
dataString = $data.raw
|
||||
}
|
||||
|
||||
return dataString
|
||||
}
|
||||
|
||||
async function importQueries() {
|
||||
try {
|
||||
const dataString = await getData()
|
||||
|
||||
if (!datasourceId && !createDatasource) {
|
||||
throw new Error("No datasource id")
|
||||
}
|
||||
|
||||
const body = {
|
||||
data: dataString,
|
||||
datasourceId,
|
||||
}
|
||||
|
||||
const importResult = await queries.import(body)
|
||||
if (!datasourceId) {
|
||||
datasourceId = importResult.datasourceId
|
||||
}
|
||||
|
||||
// reload
|
||||
await datasources.fetch()
|
||||
await queries.fetch()
|
||||
|
||||
if (navigateDatasource) {
|
||||
$goto(`./datasource/${datasourceId}`)
|
||||
}
|
||||
|
||||
notifications.success(`Imported successfully.`)
|
||||
} catch (error) {
|
||||
notifications.error("Error importing queries")
|
||||
return keepOpen
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<ModalContent
|
||||
onConfirm={() => importQueries()}
|
||||
{onCancel}
|
||||
confirmText={"Import"}
|
||||
cancelText="Back"
|
||||
size="L"
|
||||
>
|
||||
<Layout noPadding>
|
||||
<Heading size="S">Import</Heading>
|
||||
<Body size="XS"
|
||||
>Import your rest collection using one of the options below</Body
|
||||
>
|
||||
<Tabs selected="File">
|
||||
<!-- Commenting until nginx csp issue resolved -->
|
||||
<!-- <Tab title="Link">
|
||||
<Input
|
||||
bind:value={$data.url}
|
||||
on:change={() => (lastTouched = "url")}
|
||||
label="Enter a URL"
|
||||
placeholder="e.g. https://petstore.swagger.io/v2/swagger.json"
|
||||
/>
|
||||
</Tab> -->
|
||||
<Tab title="File">
|
||||
<Dropzone
|
||||
gallery={false}
|
||||
value={$data.file ? [$data.file] : []}
|
||||
on:change={e => {
|
||||
$data.file = e.detail?.[0]
|
||||
lastTouched = "file"
|
||||
}}
|
||||
fileTags={[
|
||||
"OpenAPI 3.0",
|
||||
"OpenAPI 2.0",
|
||||
"Swagger 2.0",
|
||||
"cURL",
|
||||
"YAML",
|
||||
"JSON",
|
||||
]}
|
||||
maximum={1}
|
||||
/>
|
||||
</Tab>
|
||||
<Tab title="Raw Text">
|
||||
<TextArea
|
||||
bind:value={$data.raw}
|
||||
on:change={() => (lastTouched = "raw")}
|
||||
label={"Paste raw text"}
|
||||
placeholder={'e.g. curl --location --request GET "https://example.com"'}
|
||||
/>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
</Layout>
|
||||
</ModalContent>
|
|
@ -43,7 +43,7 @@
|
|||
|
||||
const validateDescription = description => {
|
||||
if (!description?.length) {
|
||||
return "Please enter a name"
|
||||
return "Please enter a description"
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
<script context="module" lang="ts">
|
||||
export const DropdownPosition = {
|
||||
Relative: "top",
|
||||
Absolute: "right",
|
||||
}
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import { Label } from "@budibase/bbui"
|
||||
import { onMount, createEventDispatcher, onDestroy } from "svelte"
|
||||
|
@ -47,6 +54,7 @@
|
|||
import { EditorModes } from "./"
|
||||
import { themeStore } from "@/stores/portal"
|
||||
import type { EditorMode } from "@budibase/types"
|
||||
import { tooltips } from "@codemirror/view"
|
||||
import type { BindingCompletion, CodeValidator } from "@/types"
|
||||
import { validateHbsTemplate } from "./validator/hbs"
|
||||
import { validateJsTemplate } from "./validator/js"
|
||||
|
@ -62,11 +70,13 @@
|
|||
export let jsBindingWrapping = true
|
||||
export let readonly = false
|
||||
export let readonlyLineNumbers = false
|
||||
export let dropdown = DropdownPosition.Relative
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
let textarea: HTMLDivElement
|
||||
let editor: EditorView
|
||||
let editorEle: HTMLDivElement
|
||||
let mounted = false
|
||||
let isEditorInitialised = false
|
||||
let queuedRefresh = false
|
||||
|
@ -117,7 +127,6 @@
|
|||
queuedRefresh = true
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
editor &&
|
||||
value &&
|
||||
|
@ -271,16 +280,15 @@
|
|||
EditorView.inputHandler.of((view, from, to, insert) => {
|
||||
if (jsBindingWrapping && insert === "$") {
|
||||
let { text } = view.state.doc.lineAt(from)
|
||||
|
||||
const left = from ? text.substring(0, from) : ""
|
||||
const right = to ? text.substring(to) : ""
|
||||
const wrap = !left.includes('$("') || !right.includes('")')
|
||||
const wrap =
|
||||
(!left.includes('$("') || !right.includes('")')) &&
|
||||
!(left.includes("`") && right.includes("`"))
|
||||
const anchor = from + (wrap ? 3 : 1)
|
||||
const tr = view.state.update(
|
||||
{
|
||||
changes: [{ from, insert: wrap ? '$("")' : "$" }],
|
||||
selection: {
|
||||
anchor: from + (wrap ? 3 : 1),
|
||||
},
|
||||
},
|
||||
{
|
||||
scrollIntoView: true,
|
||||
|
@ -288,6 +296,19 @@
|
|||
}
|
||||
)
|
||||
view.dispatch(tr)
|
||||
// the selection needs to fired after the dispatch - this seems
|
||||
// to fix an issue with the cursor not moving when the editor is
|
||||
// first loaded, the first usage of the editor is not ready
|
||||
// for the anchor to move as well as perform a change
|
||||
setTimeout(() => {
|
||||
view.dispatch(
|
||||
view.state.update({
|
||||
selection: {
|
||||
anchor,
|
||||
},
|
||||
})
|
||||
)
|
||||
}, 1)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
|
@ -369,14 +390,25 @@
|
|||
const baseExtensions = buildBaseExtensions()
|
||||
|
||||
editor = new EditorView({
|
||||
doc: value?.toString(),
|
||||
extensions: buildExtensions(baseExtensions),
|
||||
doc: String(value),
|
||||
extensions: buildExtensions([
|
||||
...baseExtensions,
|
||||
dropdown == DropdownPosition.Absolute
|
||||
? tooltips({
|
||||
position: "absolute",
|
||||
})
|
||||
: [],
|
||||
]),
|
||||
parent: textarea,
|
||||
})
|
||||
}
|
||||
|
||||
onMount(async () => {
|
||||
mounted = true
|
||||
// Capture scrolling
|
||||
editorEle.addEventListener("wheel", e => {
|
||||
e.stopPropagation()
|
||||
})
|
||||
})
|
||||
|
||||
onDestroy(() => {
|
||||
|
@ -391,7 +423,8 @@
|
|||
<Label size="S">{label}</Label>
|
||||
</div>
|
||||
{/if}
|
||||
<div class={`code-editor ${mode?.name || ""}`}>
|
||||
|
||||
<div class={`code-editor ${mode?.name || ""}`} bind:this={editorEle}>
|
||||
<div tabindex="-1" bind:this={textarea} />
|
||||
</div>
|
||||
|
||||
|
@ -400,6 +433,7 @@
|
|||
.code-editor {
|
||||
font-size: 12px;
|
||||
height: 100%;
|
||||
cursor: text;
|
||||
}
|
||||
.code-editor :global(.cm-editor) {
|
||||
height: 100%;
|
||||
|
@ -559,12 +593,11 @@
|
|||
|
||||
/* Live binding value / helper container */
|
||||
.code-editor :global(.cm-completionInfo) {
|
||||
margin-left: var(--spacing-s);
|
||||
margin: 0px var(--spacing-s);
|
||||
border: 1px solid var(--spectrum-global-color-gray-300);
|
||||
border-radius: var(--border-radius-s);
|
||||
background-color: var(--spectrum-global-color-gray-50);
|
||||
padding: var(--spacing-m);
|
||||
margin-top: -2px;
|
||||
}
|
||||
|
||||
/* Wrapper around helpers */
|
||||
|
@ -589,6 +622,7 @@
|
|||
white-space: pre;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
overflow-y: auto;
|
||||
max-height: 480px;
|
||||
}
|
||||
.code-editor :global(.binding__example.helper) {
|
||||
|
|
|
@ -354,7 +354,7 @@
|
|||
{#if mode === BindingMode.Text}
|
||||
{#key completions}
|
||||
<CodeEditor
|
||||
value={hbsValue}
|
||||
value={hbsValue || ""}
|
||||
on:change={onChangeHBSValue}
|
||||
bind:getCaretPosition
|
||||
bind:insertAtPos
|
||||
|
@ -369,7 +369,7 @@
|
|||
{:else if mode === BindingMode.JavaScript}
|
||||
{#key completions}
|
||||
<CodeEditor
|
||||
value={jsValue ? decodeJSBinding(jsValue) : jsValue}
|
||||
value={jsValue ? decodeJSBinding(jsValue) : ""}
|
||||
on:change={onChangeJSValue}
|
||||
{completions}
|
||||
{validations}
|
||||
|
|
|
@ -145,9 +145,11 @@
|
|||
return
|
||||
}
|
||||
popoverAnchor = target
|
||||
|
||||
const doc = new DOMParser().parseFromString(helper.description, "text/html")
|
||||
hoverTarget = {
|
||||
type: "helper",
|
||||
description: helper.description,
|
||||
description: doc.body.textContent || "",
|
||||
code: getHelperExample(helper, mode === BindingMode.JavaScript),
|
||||
}
|
||||
popover.show()
|
||||
|
@ -241,20 +243,19 @@
|
|||
>
|
||||
{#if hoverTarget.description}
|
||||
<div>
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
|
||||
{@html hoverTarget.description}
|
||||
{hoverTarget.description}
|
||||
</div>
|
||||
{/if}
|
||||
{#if hoverTarget.code}
|
||||
{#if mode === BindingMode.JavaScript}
|
||||
{#if mode === BindingMode.Text || (mode === BindingMode.JavaScript && hoverTarget.type === "binding")}
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
|
||||
<pre>{@html hoverTarget.code}</pre>
|
||||
{:else}
|
||||
<CodeEditor
|
||||
value={hoverTarget.code?.trim()}
|
||||
mode={EditorModes.JS}
|
||||
readonly
|
||||
/>
|
||||
{:else if mode === BindingMode.Text}
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
|
||||
<pre>{@html hoverTarget.code}</pre>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
|
|
@ -0,0 +1,173 @@
|
|||
<script lang="ts">
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import {
|
||||
decodeJSBinding,
|
||||
encodeJSBinding,
|
||||
processObjectSync,
|
||||
} from "@budibase/string-templates"
|
||||
import { runtimeToReadableBinding } from "@/dataBinding"
|
||||
import CodeEditor, { DropdownPosition } from "../CodeEditor/CodeEditor.svelte"
|
||||
import {
|
||||
getHelperCompletions,
|
||||
jsAutocomplete,
|
||||
snippetAutoComplete,
|
||||
EditorModes,
|
||||
bindingsToCompletions,
|
||||
jsHelperAutocomplete,
|
||||
} from "../CodeEditor"
|
||||
import { JsonFormatter } from "@budibase/frontend-core"
|
||||
import { licensing } from "@/stores/portal"
|
||||
import type {
|
||||
EnrichedBinding,
|
||||
Snippet,
|
||||
CaretPositionFn,
|
||||
InsertAtPositionFn,
|
||||
JSONValue,
|
||||
} from "@budibase/types"
|
||||
import type { BindingCompletion, BindingCompletionOption } from "@/types"
|
||||
import { snippets } from "@/stores/builder"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
export let bindings: EnrichedBinding[] = []
|
||||
export let value: string = ""
|
||||
export let allowHelpers = true
|
||||
export let allowSnippets = true
|
||||
export let context = null
|
||||
export let autofocusEditor = false
|
||||
export let placeholder = null
|
||||
export let height = 180
|
||||
|
||||
let getCaretPosition: CaretPositionFn | undefined
|
||||
let insertAtPos: InsertAtPositionFn | undefined
|
||||
|
||||
$: readable = runtimeToReadableBinding(bindings, value || "")
|
||||
$: jsValue = decodeJSBinding(readable)
|
||||
|
||||
$: useSnippets = allowSnippets && !$licensing.isFreePlan
|
||||
$: enrichedBindings = enrichBindings(bindings, context, $snippets)
|
||||
$: editorMode = EditorModes.JS
|
||||
$: bindingCompletions = bindingsToCompletions(enrichedBindings, editorMode)
|
||||
$: jsCompletions = getJSCompletions(bindingCompletions, $snippets, {
|
||||
useHelpers: allowHelpers,
|
||||
useSnippets,
|
||||
})
|
||||
|
||||
const getJSCompletions = (
|
||||
bindingCompletions: BindingCompletionOption[],
|
||||
snippets: Snippet[] | null,
|
||||
config: {
|
||||
useHelpers: boolean
|
||||
useSnippets: boolean
|
||||
}
|
||||
) => {
|
||||
const completions: BindingCompletion[] = []
|
||||
if (bindingCompletions.length) {
|
||||
completions.push(jsAutocomplete([...bindingCompletions]))
|
||||
}
|
||||
if (config.useHelpers) {
|
||||
completions.push(
|
||||
jsHelperAutocomplete([...getHelperCompletions(EditorModes.JS)])
|
||||
)
|
||||
}
|
||||
if (config.useSnippets && snippets) {
|
||||
completions.push(snippetAutoComplete(snippets))
|
||||
}
|
||||
return completions
|
||||
}
|
||||
|
||||
const highlightJSON = (json: JSONValue) => {
|
||||
return JsonFormatter.format(json, {
|
||||
keyColor: "#e06c75",
|
||||
numberColor: "#e5c07b",
|
||||
stringColor: "#98c379",
|
||||
trueColor: "#d19a66",
|
||||
falseColor: "#d19a66",
|
||||
nullColor: "#c678dd",
|
||||
})
|
||||
}
|
||||
|
||||
const enrichBindings = (
|
||||
bindings: EnrichedBinding[],
|
||||
context: any,
|
||||
snippets: Snippet[] | null
|
||||
) => {
|
||||
// Create a single big array to enrich in one go
|
||||
const bindingStrings = bindings.map(binding => {
|
||||
if (binding.runtimeBinding.startsWith('trim "')) {
|
||||
// Account for nasty hardcoded HBS bindings for roles, for legacy
|
||||
// compatibility
|
||||
return `{{ ${binding.runtimeBinding} }}`
|
||||
} else {
|
||||
return `{{ literal ${binding.runtimeBinding} }}`
|
||||
}
|
||||
})
|
||||
const bindingEvaluations = processObjectSync(bindingStrings, {
|
||||
...context,
|
||||
snippets,
|
||||
})
|
||||
|
||||
// Enrich bindings with evaluations and highlighted HTML
|
||||
return bindings.map((binding, idx) => {
|
||||
if (!context || typeof bindingEvaluations !== "object") {
|
||||
return binding
|
||||
}
|
||||
const evalObj: Record<any, any> = bindingEvaluations
|
||||
const value = JSON.stringify(evalObj[idx], null, 2)
|
||||
return {
|
||||
...binding,
|
||||
value,
|
||||
valueHTML: highlightJSON(value),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const updateValue = (val: any) => {
|
||||
dispatch("change", val)
|
||||
}
|
||||
|
||||
const onChangeJSValue = (e: { detail: string }) => {
|
||||
if (!e.detail?.trim()) {
|
||||
// Don't bother saving empty values as JS
|
||||
updateValue(null)
|
||||
} else {
|
||||
updateValue(encodeJSBinding(e.detail))
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="code-panel" style="height:{height}px;">
|
||||
<div class="editor">
|
||||
{#key jsCompletions}
|
||||
<CodeEditor
|
||||
value={jsValue || ""}
|
||||
on:change={onChangeJSValue}
|
||||
on:blur
|
||||
completions={jsCompletions}
|
||||
mode={EditorModes.JS}
|
||||
bind:getCaretPosition
|
||||
bind:insertAtPos
|
||||
autofocus={autofocusEditor}
|
||||
placeholder={placeholder ||
|
||||
"Add bindings by typing $ or use the menu on the right"}
|
||||
jsBindingWrapping
|
||||
dropdown={DropdownPosition.Absolute}
|
||||
/>
|
||||
{/key}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.code-panel {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
/* Editor */
|
||||
.editor {
|
||||
flex: 1 1 auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: flex-start;
|
||||
align-items: stretch;
|
||||
}
|
||||
</style>
|
|
@ -0,0 +1,68 @@
|
|||
<script>
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import {
|
||||
ClientBindingPanel,
|
||||
DrawerBindableSlot,
|
||||
} from "@/components/common/bindings"
|
||||
import CodeEditorField from "@/components/common/bindings/CodeEditorField.svelte"
|
||||
|
||||
export let value = ""
|
||||
export let panel = ClientBindingPanel
|
||||
export let schema = null
|
||||
export let bindings = []
|
||||
export let context = {}
|
||||
export let height = 180
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
</script>
|
||||
|
||||
<div class="wrapper">
|
||||
<DrawerBindableSlot
|
||||
{panel}
|
||||
{schema}
|
||||
{value}
|
||||
{bindings}
|
||||
{context}
|
||||
title="Edit Code"
|
||||
type="longform"
|
||||
allowJS={true}
|
||||
allowHBS={false}
|
||||
updateOnChange={false}
|
||||
on:change={e => {
|
||||
value = e.detail
|
||||
dispatch("change", value)
|
||||
}}
|
||||
>
|
||||
<div class="code-editor-wrapper">
|
||||
<CodeEditorField
|
||||
{value}
|
||||
{bindings}
|
||||
{context}
|
||||
{height}
|
||||
allowHBS={false}
|
||||
allowJS
|
||||
placeholder={"Add bindings by typing $"}
|
||||
on:change={e => (value = e.detail)}
|
||||
on:blur={() => dispatch("change", value)}
|
||||
/>
|
||||
</div>
|
||||
</DrawerBindableSlot>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.wrapper :global(.icon.slot-icon) {
|
||||
top: 1px;
|
||||
border-radius: 0 4px 0 4px;
|
||||
border-right: 0;
|
||||
border-bottom: 1px solid var(--spectrum-alias-border-color);
|
||||
}
|
||||
.wrapper :global(.cm-editor),
|
||||
.wrapper :global(.cm-scroller) {
|
||||
border-radius: 4px;
|
||||
}
|
||||
.code-editor-wrapper {
|
||||
box-sizing: border-box;
|
||||
border: 1px solid var(--spectrum-global-color-gray-400);
|
||||
border-radius: 4px;
|
||||
}
|
||||
</style>
|
|
@ -22,6 +22,8 @@
|
|||
export let updateOnChange = true
|
||||
export let type
|
||||
export let schema
|
||||
export let allowHBS = true
|
||||
export let context = {}
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
let bindingDrawer
|
||||
|
@ -147,7 +149,7 @@
|
|||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div class="control" class:disabled>
|
||||
{#if !isValid(value)}
|
||||
{#if !isValid(value) && !$$slots.default}
|
||||
<Input
|
||||
{label}
|
||||
{disabled}
|
||||
|
@ -171,7 +173,7 @@
|
|||
{:else}
|
||||
<slot />
|
||||
{/if}
|
||||
{#if !disabled && type !== "formula" && !disabled && !attachmentTypes.includes(type)}
|
||||
{#if !disabled && type !== "formula" && !attachmentTypes.includes(type)}
|
||||
<div
|
||||
class={`icon ${getIconClass(value, type)}`}
|
||||
on:click={() => {
|
||||
|
@ -187,7 +189,6 @@
|
|||
on:drawerShow
|
||||
bind:this={bindingDrawer}
|
||||
title={title ?? placeholder ?? "Bindings"}
|
||||
forceModal={true}
|
||||
>
|
||||
<Button cta slot="buttons" on:click={saveBinding}>Save</Button>
|
||||
<svelte:component
|
||||
|
@ -197,7 +198,9 @@
|
|||
on:change={event => (tempValue = event.detail)}
|
||||
{bindings}
|
||||
{allowJS}
|
||||
{allowHBS}
|
||||
{allowHelpers}
|
||||
{context}
|
||||
/>
|
||||
</Drawer>
|
||||
|
||||
|
@ -208,22 +211,22 @@
|
|||
}
|
||||
|
||||
.slot-icon {
|
||||
right: 31px !important;
|
||||
right: 31px;
|
||||
border-right: 1px solid var(--spectrum-alias-border-color);
|
||||
border-top-right-radius: 0px !important;
|
||||
border-bottom-right-radius: 0px !important;
|
||||
border-top-right-radius: 0px;
|
||||
border-bottom-right-radius: 0px;
|
||||
}
|
||||
|
||||
.text-area-slot-icon {
|
||||
border-bottom: 1px solid var(--spectrum-alias-border-color);
|
||||
border-bottom-right-radius: 0px !important;
|
||||
top: 1px !important;
|
||||
border-bottom-right-radius: 0px;
|
||||
top: 1px;
|
||||
}
|
||||
.json-slot-icon {
|
||||
border-bottom: 1px solid var(--spectrum-alias-border-color);
|
||||
border-bottom-right-radius: 0px !important;
|
||||
top: 1px !important;
|
||||
right: 0px !important;
|
||||
border-bottom-right-radius: 0px;
|
||||
top: 1px;
|
||||
right: 0px;
|
||||
}
|
||||
|
||||
.icon {
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
export let bindings = []
|
||||
export let value = ""
|
||||
export let allowJS = false
|
||||
export let allowHBS = true
|
||||
export let context = null
|
||||
|
||||
$: enrichedBindings = enrichBindings(bindings)
|
||||
|
@ -22,8 +23,10 @@
|
|||
<BindingPanel
|
||||
bindings={enrichedBindings}
|
||||
snippets={$snippets}
|
||||
allowHelpers
|
||||
{value}
|
||||
{allowJS}
|
||||
{allowHBS}
|
||||
{context}
|
||||
on:change
|
||||
/>
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import { Label, Select, Body } from "@budibase/bbui"
|
||||
import { findAllMatchingComponents } from "@/helpers/components"
|
||||
import { selectedScreen } from "@/stores/builder"
|
||||
import { InlineAlert } from "@budibase/bbui"
|
||||
|
||||
export let parameters
|
||||
|
||||
|
@ -27,6 +28,12 @@
|
|||
<Label small>Table</Label>
|
||||
<Select bind:value={parameters.componentId} options={componentOptions} />
|
||||
</div>
|
||||
<InlineAlert
|
||||
header="Legacy action"
|
||||
message="This action is only compatible with the (deprecated) Table Block. Please see the documentation for further info."
|
||||
link="https://docs.budibase.com/docs/data-actions#clear-row-selection"
|
||||
linkText="Budibase Documentation"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
<script>
|
||||
import { Label, Checkbox } from "@budibase/bbui"
|
||||
import DrawerBindableInput from "@/components/common/bindings/DrawerBindableInput.svelte"
|
||||
|
||||
export let parameters
|
||||
export let bindings = []
|
||||
</script>
|
||||
|
||||
<div class="root">
|
||||
<Label>Text to copy</Label>
|
||||
<DrawerBindableInput
|
||||
title="Text to copy"
|
||||
{bindings}
|
||||
value={parameters.textToCopy}
|
||||
on:change={e => (parameters.textToCopy = e.detail)}
|
||||
/>
|
||||
<Label />
|
||||
<Checkbox text="Show notification" bind:value={parameters.showNotification} />
|
||||
{#if parameters.showNotification}
|
||||
<Label>Notification message</Label>
|
||||
<DrawerBindableInput
|
||||
title="Notification message"
|
||||
{bindings}
|
||||
value={parameters.notificationMessage}
|
||||
placeholder="Copied to clipboard"
|
||||
on:change={e => (parameters.notificationMessage = e.detail)}
|
||||
/>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.root {
|
||||
display: grid;
|
||||
column-gap: var(--spacing-l);
|
||||
row-gap: var(--spacing-s);
|
||||
grid-template-columns: 120px 1fr;
|
||||
align-items: center;
|
||||
max-width: 400px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
</style>
|
|
@ -26,3 +26,4 @@ export { default as CloseModal } from "./CloseModal.svelte"
|
|||
export { default as ClearRowSelection } from "./ClearRowSelection.svelte"
|
||||
export { default as DownloadFile } from "./DownloadFile.svelte"
|
||||
export { default as RowAction } from "./RowAction.svelte"
|
||||
export { default as CopyToClipboard } from "./CopyToClipboard.svelte"
|
||||
|
|
|
@ -183,6 +183,17 @@
|
|||
"name": "Row Action",
|
||||
"type": "data",
|
||||
"component": "RowAction"
|
||||
},
|
||||
{
|
||||
"name": "Copy To Clipboard",
|
||||
"type": "data",
|
||||
"component": "CopyToClipboard",
|
||||
"context": [
|
||||
{
|
||||
"label": "Copied text",
|
||||
"value": "copied"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
export let datasource
|
||||
export let builderType
|
||||
export let docsURL
|
||||
export let evaluationContext = {}
|
||||
</script>
|
||||
|
||||
<CoreFilterBuilder
|
||||
|
@ -32,5 +33,6 @@
|
|||
{allowOnEmpty}
|
||||
{builderType}
|
||||
{docsURL}
|
||||
{evaluationContext}
|
||||
on:change
|
||||
/>
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
export let allowJS = false
|
||||
export let actionButtonDisabled = false
|
||||
export let compare = (option, value) => option === value
|
||||
export let context = null
|
||||
|
||||
let fields = Object.entries(object || {}).map(([name, value]) => ({
|
||||
name,
|
||||
|
@ -132,6 +133,7 @@
|
|||
{allowJS}
|
||||
{allowHelpers}
|
||||
drawerLeft={bindingDrawerLeft}
|
||||
{context}
|
||||
/>
|
||||
{:else}
|
||||
<Input readonly={readOnly} bind:value={field.name} on:blur={changed} />
|
||||
|
@ -158,6 +160,7 @@
|
|||
{allowJS}
|
||||
{allowHelpers}
|
||||
drawerLeft={bindingDrawerLeft}
|
||||
{context}
|
||||
/>
|
||||
{:else}
|
||||
<Input
|
||||
|
|
|
@ -15,6 +15,7 @@ export const ActionStepID = {
|
|||
DELETE_ROW: "DELETE_ROW",
|
||||
OUTGOING_WEBHOOK: "OUTGOING_WEBHOOK",
|
||||
EXECUTE_SCRIPT: "EXECUTE_SCRIPT",
|
||||
EXECUTE_SCRIPT_V2: "EXECUTE_SCRIPT_V2",
|
||||
EXECUTE_QUERY: "EXECUTE_QUERY",
|
||||
SERVER_LOG: "SERVER_LOG",
|
||||
DELAY: "DELAY",
|
||||
|
|
|
@ -29,7 +29,12 @@
|
|||
let modal
|
||||
let webhookModal
|
||||
|
||||
onMount(() => {
|
||||
onMount(async () => {
|
||||
await automationStore.actions.initAppSelf()
|
||||
|
||||
// Init the binding evaluation context
|
||||
automationStore.actions.initContext()
|
||||
|
||||
$automationStore.showTestPanel = false
|
||||
})
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
<script>
|
||||
<script lang="ts">
|
||||
import { goto } from "@roxi/routify"
|
||||
import {
|
||||
keepOpen,
|
||||
|
@ -14,13 +14,14 @@
|
|||
} from "@budibase/bbui"
|
||||
import { datasources, queries } from "@/stores/builder"
|
||||
import { writable } from "svelte/store"
|
||||
import type { Datasource } from "@budibase/types"
|
||||
|
||||
export let navigateDatasource = false
|
||||
export let datasourceId
|
||||
export let datasourceId: string | undefined = undefined
|
||||
export let createDatasource = false
|
||||
export let onCancel
|
||||
export let onCancel: (() => void) | undefined = undefined
|
||||
|
||||
const data = writable({
|
||||
const data = writable<{ url: string; raw: string; file?: any }>({
|
||||
url: "",
|
||||
raw: "",
|
||||
file: undefined,
|
||||
|
@ -28,12 +29,14 @@
|
|||
|
||||
let lastTouched = "url"
|
||||
|
||||
const getData = async () => {
|
||||
$: datasource = $datasources.selected as Datasource
|
||||
|
||||
const getData = async (): Promise<string> => {
|
||||
let dataString
|
||||
|
||||
// parse the file into memory and send as string
|
||||
if (lastTouched === "file") {
|
||||
dataString = await $data.file.text()
|
||||
dataString = await $data.file?.text()
|
||||
} else if (lastTouched === "url") {
|
||||
const response = await fetch($data.url)
|
||||
dataString = await response.text()
|
||||
|
@ -55,9 +58,9 @@
|
|||
const body = {
|
||||
data: dataString,
|
||||
datasourceId,
|
||||
datasource,
|
||||
}
|
||||
|
||||
const importResult = await queries.import(body)
|
||||
const importResult = await queries.importQueries(body)
|
||||
if (!datasourceId) {
|
||||
datasourceId = importResult.datasourceId
|
||||
}
|
||||
|
@ -71,8 +74,8 @@
|
|||
}
|
||||
|
||||
notifications.success("Imported successfully")
|
||||
} catch (error) {
|
||||
notifications.error("Error importing queries")
|
||||
} catch (error: any) {
|
||||
notifications.error(`Error importing queries - ${error.message}`)
|
||||
|
||||
return keepOpen
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import { derived, get } from "svelte/store"
|
||||
import { derived, get, readable, Readable } from "svelte/store"
|
||||
import { API } from "@/api"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { generate } from "shortid"
|
||||
import { createHistoryStore, HistoryStore } from "@/stores/builder/history"
|
||||
import { licensing } from "@/stores/portal"
|
||||
import { licensing, organisation, environment } from "@/stores/portal"
|
||||
import { tables, appStore } from "@/stores/builder"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import {
|
||||
|
@ -33,9 +33,20 @@ import {
|
|||
isRowSaveTrigger,
|
||||
isAppTrigger,
|
||||
BranchStep,
|
||||
GetAutomationTriggerDefinitionsResponse,
|
||||
GetAutomationActionDefinitionsResponse,
|
||||
AppSelfResponse,
|
||||
TestAutomationResponse,
|
||||
isAutomationResults,
|
||||
RowActionTriggerOutputs,
|
||||
WebhookTriggerOutputs,
|
||||
AutomationCustomIOType,
|
||||
AutomationTriggerResultOutputs,
|
||||
AutomationTriggerResult,
|
||||
AutomationStepType,
|
||||
} from "@budibase/types"
|
||||
import { ActionStepID } from "@/constants/backend/automations"
|
||||
import { FIELDS } from "@/constants/backend"
|
||||
import { ActionStepID, TriggerStepID } from "@/constants/backend/automations"
|
||||
import { FIELDS as COLUMNS } from "@/constants/backend"
|
||||
import { sdk } from "@budibase/shared-core"
|
||||
import { rowActions } from "./rowActions"
|
||||
import { getNewStepName } from "@/helpers/automations/nameHelpers"
|
||||
|
@ -44,10 +55,11 @@ import { BudiStore, DerivedBudiStore } from "@/stores/BudiStore"
|
|||
|
||||
interface AutomationState {
|
||||
automations: Automation[]
|
||||
testResults: any | null
|
||||
testResults?: TestAutomationResponse
|
||||
showTestPanel: boolean
|
||||
blockDefinitions: BlockDefinitions
|
||||
selectedAutomationId: string | null
|
||||
appSelf?: AppSelfResponse
|
||||
}
|
||||
|
||||
interface DerivedAutomationState extends AutomationState {
|
||||
|
@ -57,7 +69,6 @@ interface DerivedAutomationState extends AutomationState {
|
|||
|
||||
const initialAutomationState: AutomationState = {
|
||||
automations: [],
|
||||
testResults: null,
|
||||
showTestPanel: false,
|
||||
blockDefinitions: {
|
||||
TRIGGER: {},
|
||||
|
@ -68,16 +79,19 @@ const initialAutomationState: AutomationState = {
|
|||
}
|
||||
|
||||
const getFinalDefinitions = (
|
||||
triggers: Record<string, any>,
|
||||
actions: Record<string, any>
|
||||
triggers: GetAutomationTriggerDefinitionsResponse,
|
||||
actions: GetAutomationActionDefinitionsResponse
|
||||
): BlockDefinitions => {
|
||||
const creatable: Record<string, any> = {}
|
||||
Object.entries(triggers).forEach(entry => {
|
||||
if (entry[0] === AutomationTriggerStepId.ROW_ACTION) {
|
||||
return
|
||||
const creatable: Partial<GetAutomationTriggerDefinitionsResponse> = {}
|
||||
for (const [key, trigger] of Object.entries(triggers)) {
|
||||
if (key === AutomationTriggerStepId.ROW_ACTION) {
|
||||
continue
|
||||
}
|
||||
creatable[entry[0]] = entry[1]
|
||||
})
|
||||
if (trigger.deprecated === true) {
|
||||
continue
|
||||
}
|
||||
creatable[key as keyof GetAutomationTriggerDefinitionsResponse] = trigger
|
||||
}
|
||||
return {
|
||||
TRIGGER: triggers,
|
||||
CREATABLE_TRIGGER: creatable,
|
||||
|
@ -86,6 +100,116 @@ const getFinalDefinitions = (
|
|||
}
|
||||
|
||||
const automationActions = (store: AutomationStore) => ({
|
||||
/**
|
||||
* Generates a derived store acting as an evaluation context
|
||||
* for bindings in automations
|
||||
*
|
||||
* @returns {Readable<AutomationContext>}
|
||||
*/
|
||||
generateContext: (): Readable<AutomationContext> => {
|
||||
return derived(
|
||||
[organisation, store.selected, environment, tables],
|
||||
([$organisation, $selectedAutomation, $env, $tables]) => {
|
||||
const { platformUrl: url, company, logoUrl: logo } = $organisation
|
||||
|
||||
const results: TestAutomationResponse | undefined =
|
||||
$selectedAutomation?.testResults
|
||||
|
||||
const testData: AutomationTriggerResultOutputs | undefined =
|
||||
$selectedAutomation.data?.testData
|
||||
const triggerDef = $selectedAutomation.data?.definition?.trigger
|
||||
|
||||
const isWebhook = triggerDef?.stepId === TriggerStepID.WEBHOOK
|
||||
const isRowAction = triggerDef?.stepId === TriggerStepID.ROW_ACTION
|
||||
const rowActionTableId = triggerDef?.inputs?.tableId
|
||||
const rowActionTable = rowActionTableId
|
||||
? $tables.list.find(table => table._id === rowActionTableId)
|
||||
: undefined
|
||||
|
||||
let triggerData: AutomationTriggerResultOutputs | undefined
|
||||
|
||||
if (results && isAutomationResults(results)) {
|
||||
const automationTrigger: AutomationTriggerResult | undefined =
|
||||
results?.trigger
|
||||
|
||||
const outputs: AutomationTriggerResultOutputs | undefined =
|
||||
automationTrigger?.outputs
|
||||
triggerData = outputs ? outputs : undefined
|
||||
|
||||
if (triggerData) {
|
||||
if (isRowAction && rowActionTable) {
|
||||
const rowTrigger = triggerData as RowActionTriggerOutputs
|
||||
// Row action table must always be retrieved as it is never
|
||||
// returned in the test results
|
||||
rowTrigger.table = rowActionTable
|
||||
} else if (isWebhook) {
|
||||
const webhookTrigger = triggerData as WebhookTriggerOutputs
|
||||
// Ensure it displays in the event that the configuration have been skipped
|
||||
webhookTrigger.body = webhookTrigger.body ?? {}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up unnecessary data from the context
|
||||
// Meta contains UI/UX config data. Non-bindable
|
||||
delete triggerData?.meta
|
||||
} else {
|
||||
// Substitute test data in place of the trigger data if the test hasn't been run
|
||||
triggerData = testData
|
||||
}
|
||||
|
||||
// AppSelf context required to mirror server user context
|
||||
const userContext = $selectedAutomation.appSelf || {}
|
||||
|
||||
// Extract step results from a valid response
|
||||
const stepResults =
|
||||
results && isAutomationResults(results) ? results?.steps : []
|
||||
|
||||
return {
|
||||
user: userContext,
|
||||
// Merge in the trigger data.
|
||||
...(triggerData ? { trigger: { ...triggerData } } : {}),
|
||||
// This will initially be empty for each step but will populate
|
||||
// upon running the test.
|
||||
steps: stepResults.reduce(
|
||||
(acc: Record<string, any>, res: Record<string, any>) => {
|
||||
acc[res.id] = res.outputs
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
env: ($env?.variables || []).reduce(
|
||||
(acc: Record<string, any>, variable: Record<string, any>) => {
|
||||
acc[variable.name] = ""
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
settings: { url, company, logo },
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
/**
|
||||
* Initialise the automation evaluation context
|
||||
*/
|
||||
initContext: () => {
|
||||
store.context = store.actions.generateContext()
|
||||
},
|
||||
/**
|
||||
* Fetches the app user context used for live evaluation
|
||||
* This matches the context used on the server
|
||||
* @returns {AppSelfResponse | null}
|
||||
*/
|
||||
initAppSelf: async (): Promise<AppSelfResponse | null> => {
|
||||
// Fetch and update the app self if it hasn't been set
|
||||
const appSelfResponse = await API.fetchSelf()
|
||||
store.update(state => ({
|
||||
...state,
|
||||
...(appSelfResponse ? { appSelf: appSelfResponse } : {}),
|
||||
}))
|
||||
return appSelfResponse
|
||||
},
|
||||
/**
|
||||
* Move a given block from one location on the tree to another.
|
||||
*
|
||||
|
@ -282,9 +406,12 @@ const automationActions = (store: AutomationStore) => ({
|
|||
* Build a sequential list of all steps on the step path provided
|
||||
*
|
||||
* @param {Array<Object>} pathWay e.g. [{stepIdx:2},{branchIdx:0, stepIdx:2},...]
|
||||
* @returns {Array<Object>} all steps encountered on the provided path
|
||||
* @returns {Array<AutomationStep | AutomationTrigger>} all steps encountered on the provided path
|
||||
*/
|
||||
getPathSteps: (pathWay: Array<BranchPath>, automation: Automation) => {
|
||||
getPathSteps: (
|
||||
pathWay: Array<BranchPath>,
|
||||
automation: Automation
|
||||
): Array<AutomationStep | AutomationTrigger> => {
|
||||
// Base Steps, including trigger
|
||||
const steps = [
|
||||
automation.definition.trigger,
|
||||
|
@ -531,41 +658,72 @@ const automationActions = (store: AutomationStore) => ({
|
|||
let bindings: any[] = []
|
||||
const addBinding = (
|
||||
name: string,
|
||||
value: any,
|
||||
schema: any,
|
||||
icon: string,
|
||||
idx: number,
|
||||
isLoopBlock: boolean,
|
||||
bindingName?: string
|
||||
pathBlock: AutomationStep | AutomationTrigger,
|
||||
bindingName: string
|
||||
) => {
|
||||
if (!name) return
|
||||
|
||||
const runtimeBinding = store.actions.determineRuntimeBinding(
|
||||
name,
|
||||
idx,
|
||||
isLoopBlock,
|
||||
bindingName,
|
||||
automation,
|
||||
currentBlock,
|
||||
pathSteps
|
||||
)
|
||||
|
||||
// Skip binding if its invalid
|
||||
if (!runtimeBinding) {
|
||||
return
|
||||
}
|
||||
|
||||
const readableBinding = store.actions.determineReadableBinding(
|
||||
name,
|
||||
pathBlock
|
||||
)
|
||||
|
||||
const categoryName = store.actions.determineCategoryName(
|
||||
idx,
|
||||
isLoopBlock,
|
||||
bindingName,
|
||||
loopBlockCount
|
||||
)
|
||||
bindings.push(
|
||||
store.actions.createBindingObject(
|
||||
name,
|
||||
value,
|
||||
icon,
|
||||
idx,
|
||||
loopBlockCount,
|
||||
isLoopBlock,
|
||||
runtimeBinding,
|
||||
categoryName,
|
||||
bindingName
|
||||
)
|
||||
|
||||
const isStep = !isLoopBlock && idx !== 0
|
||||
const defaultReadable =
|
||||
bindingName && isStep ? `steps.${bindingName}.${name}` : runtimeBinding
|
||||
|
||||
// Check if the schema matches any column types.
|
||||
const column = Object.values(COLUMNS).find(
|
||||
col =>
|
||||
col.type === schema.type &&
|
||||
("subtype" in col ? col.subtype === schema.subtype : true)
|
||||
)
|
||||
|
||||
// Automation types and column types can collide e.g. "array"
|
||||
// Exclude where necessary
|
||||
const ignoreColumnType = schema.customType === AutomationCustomIOType.ROWS
|
||||
|
||||
// Shown in the bindable menus
|
||||
const displayType = ignoreColumnType ? schema.type : column?.name
|
||||
|
||||
bindings.push({
|
||||
readableBinding: readableBinding || defaultReadable,
|
||||
runtimeBinding,
|
||||
type: schema.type,
|
||||
description: schema.description,
|
||||
icon,
|
||||
category: categoryName,
|
||||
display: {
|
||||
type: displayType,
|
||||
name,
|
||||
rank: isLoopBlock ? idx + 1 : idx - loopBlockCount,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
let loopBlockCount = 0
|
||||
|
@ -633,8 +791,17 @@ const automationActions = (store: AutomationStore) => ({
|
|||
console.error("Loop block missing.")
|
||||
}
|
||||
}
|
||||
|
||||
Object.entries(schema).forEach(([name, value]) => {
|
||||
addBinding(name, value, icon, blockIdx, isLoopBlock, bindingName)
|
||||
addBinding(
|
||||
name,
|
||||
value,
|
||||
icon,
|
||||
blockIdx,
|
||||
isLoopBlock,
|
||||
pathBlock,
|
||||
bindingName
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -645,23 +812,60 @@ const automationActions = (store: AutomationStore) => ({
|
|||
return bindings
|
||||
},
|
||||
|
||||
determineReadableBinding: (
|
||||
name: string,
|
||||
block: AutomationStep | AutomationTrigger
|
||||
) => {
|
||||
const rowTriggers = [
|
||||
TriggerStepID.ROW_UPDATED,
|
||||
TriggerStepID.ROW_SAVED,
|
||||
TriggerStepID.ROW_DELETED,
|
||||
TriggerStepID.ROW_ACTION,
|
||||
]
|
||||
|
||||
const isTrigger = block.type === AutomationStepType.TRIGGER
|
||||
const isAppTrigger = block.stepId === AutomationTriggerStepId.APP
|
||||
const isRowTrigger = rowTriggers.includes(block.stepId)
|
||||
|
||||
let readableBinding = ""
|
||||
if (isTrigger) {
|
||||
if (isAppTrigger) {
|
||||
readableBinding = `trigger.fields.${name}`
|
||||
} else if (isRowTrigger) {
|
||||
let noRowKeywordBindings = ["id", "revision", "oldRow"]
|
||||
readableBinding = noRowKeywordBindings.includes(name)
|
||||
? `trigger.${name}`
|
||||
: `trigger.row.${name}`
|
||||
} else {
|
||||
readableBinding = `trigger.${name}`
|
||||
}
|
||||
}
|
||||
|
||||
return readableBinding
|
||||
},
|
||||
|
||||
determineRuntimeBinding: (
|
||||
name: string,
|
||||
idx: number,
|
||||
isLoopBlock: boolean,
|
||||
bindingName: string | undefined,
|
||||
automation: Automation,
|
||||
currentBlock: AutomationStep | AutomationTrigger | undefined,
|
||||
pathSteps: (AutomationStep | AutomationTrigger)[]
|
||||
) => {
|
||||
let runtimeName: string | null
|
||||
let runtimeName: string
|
||||
|
||||
// Legacy support for EXECUTE_SCRIPT steps
|
||||
const isJSScript =
|
||||
currentBlock?.stepId === AutomationActionStepId.EXECUTE_SCRIPT
|
||||
|
||||
/* Begin special cases for generating custom schemas based on triggers */
|
||||
if (
|
||||
idx === 0 &&
|
||||
automation.definition.trigger?.event === AutomationEventType.APP_TRIGGER
|
||||
) {
|
||||
return `trigger.fields.${name}`
|
||||
return isJSScript
|
||||
? `trigger.fields["${name}"]`
|
||||
: `trigger.fields.[${name}]`
|
||||
}
|
||||
|
||||
if (
|
||||
|
@ -671,26 +875,28 @@ const automationActions = (store: AutomationStore) => ({
|
|||
automation.definition.trigger?.event === AutomationEventType.ROW_SAVE)
|
||||
) {
|
||||
let noRowKeywordBindings = ["id", "revision", "oldRow"]
|
||||
if (!noRowKeywordBindings.includes(name)) return `trigger.row.${name}`
|
||||
if (!noRowKeywordBindings.includes(name)) {
|
||||
return isJSScript ? `trigger.row["${name}"]` : `trigger.row.[${name}]`
|
||||
}
|
||||
}
|
||||
/* End special cases for generating custom schemas based on triggers */
|
||||
|
||||
if (isLoopBlock) {
|
||||
runtimeName = `loop.${name}`
|
||||
} else if (idx === 0) {
|
||||
runtimeName = `trigger.${name}`
|
||||
} else if (currentBlock?.stepId === AutomationActionStepId.EXECUTE_SCRIPT) {
|
||||
runtimeName = `trigger.[${name}]`
|
||||
} else if (isJSScript) {
|
||||
const stepId = pathSteps[idx].id
|
||||
if (!stepId) {
|
||||
notifications.error("Error generating binding: Step ID not found.")
|
||||
return null
|
||||
return
|
||||
}
|
||||
runtimeName = `steps["${stepId}"].${name}`
|
||||
} else {
|
||||
const stepId = pathSteps[idx].id
|
||||
if (!stepId) {
|
||||
notifications.error("Error generating binding: Step ID not found.")
|
||||
return null
|
||||
return
|
||||
}
|
||||
runtimeName = `steps.${stepId}.${name}`
|
||||
}
|
||||
|
@ -711,40 +917,6 @@ const automationActions = (store: AutomationStore) => ({
|
|||
: `Step ${idx - loopBlockCount} outputs`
|
||||
},
|
||||
|
||||
createBindingObject: (
|
||||
name: string,
|
||||
value: any,
|
||||
icon: string,
|
||||
idx: number,
|
||||
loopBlockCount: number,
|
||||
isLoopBlock: boolean,
|
||||
runtimeBinding: string | null,
|
||||
categoryName: string,
|
||||
bindingName?: string
|
||||
) => {
|
||||
const field = Object.values(FIELDS).find(
|
||||
field =>
|
||||
field.type === value.type &&
|
||||
("subtype" in field ? field.subtype === value.subtype : true)
|
||||
)
|
||||
return {
|
||||
readableBinding:
|
||||
bindingName && !isLoopBlock && idx !== 0
|
||||
? `steps.${bindingName}.${name}`
|
||||
: runtimeBinding,
|
||||
runtimeBinding,
|
||||
type: value.type,
|
||||
description: value.description,
|
||||
icon,
|
||||
category: categoryName,
|
||||
display: {
|
||||
type: field?.name || value.type,
|
||||
name,
|
||||
rank: isLoopBlock ? idx + 1 : idx - loopBlockCount,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
processBlockInputs: async (
|
||||
block: AutomationStep,
|
||||
data: Record<string, any>
|
||||
|
@ -796,19 +968,14 @@ const automationActions = (store: AutomationStore) => ({
|
|||
},
|
||||
|
||||
test: async (automation: Automation, testData: any) => {
|
||||
let result: any
|
||||
let result: TestAutomationResponse
|
||||
try {
|
||||
result = await API.testAutomation(automation._id!, testData)
|
||||
} catch (err: any) {
|
||||
const message = err.message || err.status || JSON.stringify(err)
|
||||
throw `Automation test failed - ${message}`
|
||||
}
|
||||
if (!result?.trigger && !result?.steps?.length && !result?.message) {
|
||||
if (result?.err?.code === "usage_limit_exceeded") {
|
||||
throw "You have exceeded your automation quota"
|
||||
}
|
||||
throw "Something went wrong testing your automation"
|
||||
}
|
||||
|
||||
store.update(state => {
|
||||
state.testResults = result
|
||||
return state
|
||||
|
@ -1396,7 +1563,7 @@ const automationActions = (store: AutomationStore) => ({
|
|||
}
|
||||
store.update(state => {
|
||||
state.selectedAutomationId = id
|
||||
state.testResults = null
|
||||
delete state.testResults
|
||||
state.showTestPanel = false
|
||||
return state
|
||||
})
|
||||
|
@ -1436,29 +1603,14 @@ const automationActions = (store: AutomationStore) => ({
|
|||
},
|
||||
})
|
||||
|
||||
class AutomationStore extends BudiStore<AutomationState> {
|
||||
history: HistoryStore<Automation>
|
||||
actions: ReturnType<typeof automationActions>
|
||||
|
||||
constructor() {
|
||||
super(initialAutomationState)
|
||||
this.actions = automationActions(this)
|
||||
this.history = createHistoryStore({
|
||||
getDoc: this.actions.getDefinition.bind(this),
|
||||
selectDoc: this.actions.select.bind(this),
|
||||
})
|
||||
|
||||
// Then wrap save and delete with history
|
||||
const originalSave = this.actions.save.bind(this.actions)
|
||||
const originalDelete = this.actions.delete.bind(this.actions)
|
||||
this.actions.save = this.history.wrapSaveDoc(originalSave)
|
||||
this.actions.delete = this.history.wrapDeleteDoc(originalDelete)
|
||||
}
|
||||
export interface AutomationContext {
|
||||
user: AppSelfResponse | null
|
||||
trigger?: AutomationTriggerResultOutputs
|
||||
steps: Record<string, AutomationStep>
|
||||
env: Record<string, any>
|
||||
settings: Record<string, any>
|
||||
}
|
||||
|
||||
export const automationStore = new AutomationStore()
|
||||
export const automationHistoryStore = automationStore.history
|
||||
|
||||
export class SelectedAutomationStore extends DerivedBudiStore<
|
||||
AutomationState,
|
||||
DerivedAutomationState
|
||||
|
@ -1519,4 +1671,49 @@ export class SelectedAutomationStore extends DerivedBudiStore<
|
|||
super(initialAutomationState, makeDerivedStore)
|
||||
}
|
||||
}
|
||||
export const selectedAutomation = new SelectedAutomationStore(automationStore)
|
||||
|
||||
class AutomationStore extends BudiStore<AutomationState> {
|
||||
history: HistoryStore<Automation>
|
||||
actions: ReturnType<typeof automationActions>
|
||||
selected: SelectedAutomationStore
|
||||
context: Readable<AutomationContext> | undefined
|
||||
|
||||
constructor() {
|
||||
super(initialAutomationState)
|
||||
this.actions = automationActions(this)
|
||||
this.history = createHistoryStore({
|
||||
getDoc: this.actions.getDefinition.bind(this),
|
||||
selectDoc: this.actions.select.bind(this),
|
||||
})
|
||||
|
||||
// Then wrap save and delete with history
|
||||
const originalSave = this.actions.save.bind(this.actions)
|
||||
const originalDelete = this.actions.delete.bind(this.actions)
|
||||
this.actions.save = this.history.wrapSaveDoc(originalSave)
|
||||
this.actions.delete = this.history.wrapDeleteDoc(originalDelete)
|
||||
|
||||
this.selected = new SelectedAutomationStore(this)
|
||||
}
|
||||
}
|
||||
|
||||
export const automationStore = new AutomationStore()
|
||||
|
||||
export const automationHistoryStore = automationStore.history
|
||||
export const selectedAutomation = automationStore.selected
|
||||
|
||||
// Define an empty evaluate context at the start
|
||||
const emptyContext: AutomationContext = {
|
||||
user: {},
|
||||
steps: {},
|
||||
env: {},
|
||||
settings: {},
|
||||
}
|
||||
|
||||
// Page layout kicks off initialisation, subscription happens within the page
|
||||
export const evaluationContext: Readable<AutomationContext> = readable(
|
||||
emptyContext,
|
||||
set => {
|
||||
const unsubscribe = automationStore.context?.subscribe(set) ?? (() => {})
|
||||
return () => unsubscribe()
|
||||
}
|
||||
)
|
||||
|
|
|
@ -11,6 +11,7 @@ import {
|
|||
automationStore,
|
||||
selectedAutomation,
|
||||
automationHistoryStore,
|
||||
evaluationContext,
|
||||
} from "./automations"
|
||||
import { userStore, userSelectedResourceMap, isOnlyUser } from "./users"
|
||||
import { deploymentStore } from "./deployments"
|
||||
|
@ -72,6 +73,7 @@ export {
|
|||
snippets,
|
||||
rowActions,
|
||||
appPublished,
|
||||
evaluationContext,
|
||||
screenComponentsList,
|
||||
screenComponentErrors,
|
||||
screenComponentErrorList,
|
||||
|
|
|
@ -8,6 +8,7 @@ import {
|
|||
SystemStatusResponse,
|
||||
} from "@budibase/types"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
import Analytics from "../../analytics"
|
||||
|
||||
interface AdminState extends GetEnvironmentResponse {
|
||||
loaded: boolean
|
||||
|
@ -33,6 +34,8 @@ export class AdminStore extends BudiStore<AdminState> {
|
|||
await this.getEnvironment()
|
||||
// enable system status checks in the cloud
|
||||
if (get(this.store).cloud) {
|
||||
// in cloud allow this
|
||||
Analytics.enableSessionRecording()
|
||||
await this.getSystemStatus()
|
||||
this.checkStatus()
|
||||
}
|
||||
|
|
|
@ -16,7 +16,8 @@
|
|||
},
|
||||
"scripts": {
|
||||
"build": "vite build",
|
||||
"dev": "vite build --watch --mode=dev"
|
||||
"dev": "vite build --watch --mode=dev",
|
||||
"check:types": "yarn svelte-check"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "*",
|
||||
|
|
|
@ -10,7 +10,9 @@ export const API = createAPIClient({
|
|||
// Attach client specific headers
|
||||
attachHeaders: headers => {
|
||||
// Attach app ID header
|
||||
headers["x-budibase-app-id"] = window["##BUDIBASE_APP_ID##"]
|
||||
if (window["##BUDIBASE_APP_ID##"]) {
|
||||
headers["x-budibase-app-id"] = window["##BUDIBASE_APP_ID##"]
|
||||
}
|
||||
|
||||
// Attach client header if not inside the builder preview
|
||||
if (!window["##BUDIBASE_IN_BUILDER##"]) {
|
||||
|
|
|
@ -141,6 +141,7 @@
|
|||
var(--spectrum-global-dimension-size-300)
|
||||
);
|
||||
display: -webkit-box;
|
||||
line-clamp: 2;
|
||||
-webkit-line-clamp: 2;
|
||||
-webkit-box-orient: vertical;
|
||||
overflow: hidden;
|
||||
|
|
|
@ -4,12 +4,12 @@
|
|||
import { Utils } from "@budibase/frontend-core"
|
||||
import FormBlockWrapper from "./FormBlockWrapper.svelte"
|
||||
import { get } from "svelte/store"
|
||||
import type { TableSchema, UIDatasource } from "@budibase/types"
|
||||
import type { TableSchema } from "@budibase/types"
|
||||
|
||||
type Field = { name: string; active: boolean }
|
||||
|
||||
export let actionType: string
|
||||
export let dataSource: UIDatasource
|
||||
export let dataSource: { resourceId: string }
|
||||
export let size: string
|
||||
export let disabled: boolean
|
||||
export let fields: (Field | string)[]
|
||||
|
@ -30,8 +30,8 @@
|
|||
// Legacy
|
||||
export let showDeleteButton: boolean
|
||||
export let showSaveButton: boolean
|
||||
export let saveButtonLabel: boolean
|
||||
export let deleteButtonLabel: boolean
|
||||
export let saveButtonLabel: string
|
||||
export let deleteButtonLabel: string
|
||||
|
||||
const { fetchDatasourceSchema, generateGoldenSample } = getContext("sdk")
|
||||
const component = getContext("component")
|
||||
|
@ -107,7 +107,7 @@
|
|||
return [...fields, ...defaultFields].filter(field => field.active)
|
||||
}
|
||||
|
||||
const fetchSchema = async (datasource: UIDatasource) => {
|
||||
const fetchSchema = async (datasource: { resourceId: string }) => {
|
||||
schema = (await fetchDatasourceSchema(datasource)) || {}
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -49,7 +49,11 @@
|
|||
data.append("file", fileList[i])
|
||||
}
|
||||
try {
|
||||
return await API.uploadAttachment(formContext?.dataSource?.tableId, data)
|
||||
let sourceId = formContext?.dataSource?.tableId
|
||||
if (formContext?.dataSource?.type === "viewV2") {
|
||||
sourceId = formContext.dataSource.id
|
||||
}
|
||||
return await API.uploadAttachment(sourceId, data)
|
||||
} catch (error) {
|
||||
return []
|
||||
}
|
||||
|
|
|
@ -5,11 +5,12 @@
|
|||
import { memo } from "@budibase/frontend-core"
|
||||
import Placeholder from "../Placeholder.svelte"
|
||||
import InnerForm from "./InnerForm.svelte"
|
||||
import type { FieldApi } from "."
|
||||
|
||||
export let label: string | undefined = undefined
|
||||
export let field: string | undefined = undefined
|
||||
export let fieldState: any
|
||||
export let fieldApi: any
|
||||
export let fieldApi: FieldApi
|
||||
export let fieldSchema: any
|
||||
export let defaultValue: string | undefined = undefined
|
||||
export let type: any
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<script lang="ts">
|
||||
import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
|
||||
import { FieldType, InternalTable } from "@budibase/types"
|
||||
import { BasicOperator, FieldType, InternalTable } from "@budibase/types"
|
||||
import { fetchData, Utils } from "@budibase/frontend-core"
|
||||
import { getContext } from "svelte"
|
||||
import Field from "./Field.svelte"
|
||||
|
@ -9,10 +9,11 @@
|
|||
RelationshipFieldMetadata,
|
||||
Row,
|
||||
} from "@budibase/types"
|
||||
import type { FieldApi, FieldState } from "."
|
||||
|
||||
export let field: string | undefined = undefined
|
||||
export let label: string | undefined = undefined
|
||||
export let placeholder: any = undefined
|
||||
export let placeholder: string | undefined = undefined
|
||||
export let disabled: boolean = false
|
||||
export let readonly: boolean = false
|
||||
export let validation: any
|
||||
|
@ -35,12 +36,13 @@
|
|||
const { API } = getContext("sdk")
|
||||
|
||||
// Field state
|
||||
let fieldState: any
|
||||
let fieldApi: any
|
||||
let fieldState: FieldState<string | string[]> | undefined
|
||||
|
||||
let fieldApi: FieldApi
|
||||
let fieldSchema: RelationshipFieldMetadata | undefined
|
||||
|
||||
// Local UI state
|
||||
let searchTerm: any
|
||||
let searchTerm: string
|
||||
let open: boolean = false
|
||||
|
||||
// Options state
|
||||
|
@ -106,17 +108,14 @@
|
|||
filter: SearchFilter[],
|
||||
linkedTableId?: string
|
||||
) => {
|
||||
if (!linkedTableId) {
|
||||
return undefined
|
||||
}
|
||||
const datasource =
|
||||
datasourceType === "table"
|
||||
dsType === "table"
|
||||
? {
|
||||
type: datasourceType,
|
||||
tableId: fieldSchema?.tableId!,
|
||||
type: dsType,
|
||||
tableId: linkedTableId!,
|
||||
}
|
||||
: {
|
||||
type: datasourceType,
|
||||
type: dsType,
|
||||
tableId: InternalTable.USER_METADATA,
|
||||
}
|
||||
return fetchData({
|
||||
|
@ -306,14 +305,14 @@
|
|||
}
|
||||
|
||||
// Ensure we match all filters, rather than any
|
||||
let newFilter: any = filter
|
||||
let newFilter = filter
|
||||
if (searchTerm) {
|
||||
// @ts-expect-error this doesn't fit types, but don't want to change it yet
|
||||
newFilter = (newFilter || []).filter(x => x.operator !== "allOr")
|
||||
newFilter.push({
|
||||
// Use a big numeric prefix to avoid clashing with an existing filter
|
||||
field: `999:${primaryDisplay}`,
|
||||
operator: "string",
|
||||
operator: BasicOperator.STRING,
|
||||
value: searchTerm,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -19,3 +19,15 @@ export { default as codescanner } from "./CodeScannerField.svelte"
|
|||
export { default as signaturesinglefield } from "./SignatureField.svelte"
|
||||
export { default as bbreferencefield } from "./BBReferenceField.svelte"
|
||||
export { default as bbreferencesinglefield } from "./BBReferenceSingleField.svelte"
|
||||
|
||||
export interface FieldApi {
|
||||
setValue(value: any): boolean
|
||||
deregister(): void
|
||||
}
|
||||
|
||||
export interface FieldState<T> {
|
||||
value: T
|
||||
fieldId: string
|
||||
disabled: boolean
|
||||
readonly: boolean
|
||||
}
|
|
@ -7,6 +7,7 @@
|
|||
import { isGridEvent } from "@/utils/grid"
|
||||
import { DNDPlaceholderID } from "@/constants"
|
||||
import type { Component } from "@budibase/types"
|
||||
import { DropPosition } from "@budibase/types"
|
||||
|
||||
type ChildCoords = {
|
||||
placeholder: boolean
|
||||
|
@ -287,7 +288,7 @@
|
|||
}
|
||||
|
||||
// Convert parent + index into target + mode
|
||||
let legacyDropTarget, legacyDropMode
|
||||
let legacyDropTarget, legacyDropMode: DropPosition
|
||||
const parent: Component | null = findComponentById(
|
||||
get(screenStore).activeScreen?.props,
|
||||
drop.parent
|
||||
|
@ -309,16 +310,16 @@
|
|||
// Use inside if no existing children
|
||||
if (!children?.length) {
|
||||
legacyDropTarget = parent._id
|
||||
legacyDropMode = "inside"
|
||||
legacyDropMode = DropPosition.INSIDE
|
||||
} else if (drop.index === 0) {
|
||||
legacyDropTarget = children[0]?._id
|
||||
legacyDropMode = "above"
|
||||
legacyDropMode = DropPosition.ABOVE
|
||||
} else {
|
||||
legacyDropTarget = children[drop.index - 1]?._id
|
||||
legacyDropMode = "below"
|
||||
legacyDropMode = DropPosition.BELOW
|
||||
}
|
||||
|
||||
if (legacyDropTarget && legacyDropMode) {
|
||||
if (legacyDropTarget && legacyDropMode && source.id) {
|
||||
dropping = true
|
||||
await builderStore.actions.moveComponent(
|
||||
source.id,
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
Devices,
|
||||
GridDragMode,
|
||||
} from "@/utils/grid"
|
||||
import { DropPosition } from "@budibase/types"
|
||||
|
||||
type GridDragSide =
|
||||
| "top"
|
||||
|
@ -222,7 +223,7 @@
|
|||
|
||||
// If holding ctrl/cmd then leave behind a duplicate of this component
|
||||
if (mode === GridDragMode.Move && (e.ctrlKey || e.metaKey)) {
|
||||
builderStore.actions.duplicateComponent(id, "above", false)
|
||||
builderStore.actions.duplicateComponent(id, DropPosition.ABOVE, false)
|
||||
}
|
||||
|
||||
// Find grid parent and read from DOM
|
||||
|
|
|
@ -115,7 +115,7 @@ const createBuilderStore = () => {
|
|||
component: string,
|
||||
parent: string,
|
||||
index: number,
|
||||
props: Record<string, any>
|
||||
props?: Record<string, any>
|
||||
) => {
|
||||
eventStore.actions.dispatchEvent("drop-new-component", {
|
||||
component,
|
||||
|
|
|
@ -421,6 +421,28 @@ const showNotificationHandler = action => {
|
|||
|
||||
const promptUserHandler = () => {}
|
||||
|
||||
const copyToClipboardHandler = async action => {
|
||||
const { textToCopy, showNotification, notificationMessage } =
|
||||
action.parameters
|
||||
|
||||
if (!textToCopy) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
await navigator.clipboard.writeText(textToCopy)
|
||||
if (showNotification) {
|
||||
const message = notificationMessage || "Copied to clipboard"
|
||||
notificationStore.actions.success(message, true, 3000)
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Failed to copy text: ", err)
|
||||
notificationStore.actions.error("Failed to copy to clipboard")
|
||||
}
|
||||
|
||||
return { copied: textToCopy }
|
||||
}
|
||||
|
||||
const openSidePanelHandler = action => {
|
||||
const { id } = action.parameters
|
||||
if (id) {
|
||||
|
@ -514,6 +536,7 @@ const handlerMap = {
|
|||
["Close Modal"]: closeModalHandler,
|
||||
["Download File"]: downloadFileHandler,
|
||||
["Row Action"]: rowActionHandler,
|
||||
["Copy To Clipboard"]: copyToClipboardHandler,
|
||||
}
|
||||
|
||||
const confirmTextMap = {
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
"target": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"skipLibCheck": true,
|
||||
"paths": {
|
||||
"@budibase/*": [
|
||||
"../*/src/index.ts",
|
||||
|
|
|
@ -16,8 +16,5 @@
|
|||
"lodash": "4.17.21",
|
||||
"shortid": "2.2.15",
|
||||
"socket.io-client": "^4.7.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-check": "^4.1.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
export let drawerTitle
|
||||
export let toReadable
|
||||
export let toRuntime
|
||||
export let evaluationContext = {}
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
|
@ -66,7 +67,6 @@
|
|||
>
|
||||
Confirm
|
||||
</Button>
|
||||
|
||||
<svelte:component
|
||||
this={panel}
|
||||
slot="body"
|
||||
|
@ -76,6 +76,7 @@
|
|||
allowHBS
|
||||
on:change={drawerOnChange}
|
||||
{bindings}
|
||||
context={evaluationContext}
|
||||
/>
|
||||
</Drawer>
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@
|
|||
export let panel
|
||||
export let toReadable
|
||||
export let toRuntime
|
||||
export let evaluationContext = {}
|
||||
|
||||
$: editableFilters = migrateFilters(filters)
|
||||
$: {
|
||||
|
@ -385,6 +386,7 @@
|
|||
{panel}
|
||||
{toReadable}
|
||||
{toRuntime}
|
||||
{evaluationContext}
|
||||
on:change={e => {
|
||||
const updated = {
|
||||
...filter,
|
||||
|
@ -423,6 +425,7 @@
|
|||
{panel}
|
||||
{toReadable}
|
||||
{toRuntime}
|
||||
{evaluationContext}
|
||||
on:change={e => {
|
||||
onFilterFieldUpdate(
|
||||
{ ...filter, ...e.detail },
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
export let drawerTitle
|
||||
export let toReadable
|
||||
export let toRuntime
|
||||
export let evaluationContext = {}
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const { OperatorOptions, FilterValueType } = Constants
|
||||
|
@ -156,6 +157,7 @@
|
|||
allowHBS
|
||||
on:change={drawerOnChange}
|
||||
{bindings}
|
||||
context={evaluationContext}
|
||||
/>
|
||||
</Drawer>
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ import {
|
|||
Row,
|
||||
SearchFilters,
|
||||
SortOrder,
|
||||
SortType,
|
||||
TableSchema,
|
||||
} from "@budibase/types"
|
||||
import { APIClient } from "../api/types"
|
||||
|
@ -72,8 +71,6 @@ export default abstract class BaseDataFetch<
|
|||
options: DataFetchOptions<TQuery> & {
|
||||
datasource: TDatasource
|
||||
|
||||
sortType: SortType | null
|
||||
|
||||
// Client side feature customisation
|
||||
clientSideSearching: boolean
|
||||
clientSideSorting: boolean
|
||||
|
@ -106,7 +103,6 @@ export default abstract class BaseDataFetch<
|
|||
// Sorting config
|
||||
sortColumn: null,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
sortType: null,
|
||||
|
||||
// Pagination config
|
||||
paginate: true,
|
||||
|
@ -227,31 +223,12 @@ export default abstract class BaseDataFetch<
|
|||
this.options.sortColumn = this.getDefaultSortColumn(definition, schema)
|
||||
}
|
||||
|
||||
// If we don't have a sort column specified then just ensure we don't set
|
||||
// any sorting params
|
||||
if (!this.options.sortColumn) {
|
||||
// If no sort order, default to ascending
|
||||
if (!this.options.sortOrder) {
|
||||
this.options.sortOrder = SortOrder.ASCENDING
|
||||
this.options.sortType = null
|
||||
} else {
|
||||
// Otherwise determine what sort type to use base on sort column
|
||||
this.options.sortType = SortType.STRING
|
||||
const fieldSchema = schema?.[this.options.sortColumn]
|
||||
if (
|
||||
fieldSchema?.type === FieldType.NUMBER ||
|
||||
fieldSchema?.type === FieldType.BIGINT ||
|
||||
("calculationType" in fieldSchema && fieldSchema?.calculationType)
|
||||
) {
|
||||
this.options.sortType = SortType.NUMBER
|
||||
}
|
||||
|
||||
// If no sort order, default to ascending
|
||||
if (!this.options.sortOrder) {
|
||||
this.options.sortOrder = SortOrder.ASCENDING
|
||||
} else {
|
||||
// Ensure sortOrder matches the enum
|
||||
this.options.sortOrder =
|
||||
this.options.sortOrder.toLowerCase() as SortOrder
|
||||
}
|
||||
// Ensure sortOrder matches the enum
|
||||
this.options.sortOrder = this.options.sortOrder.toLowerCase() as SortOrder
|
||||
}
|
||||
|
||||
// Build the query
|
||||
|
@ -294,7 +271,6 @@ export default abstract class BaseDataFetch<
|
|||
const {
|
||||
sortColumn,
|
||||
sortOrder,
|
||||
sortType,
|
||||
limit,
|
||||
clientSideSearching,
|
||||
clientSideSorting,
|
||||
|
@ -311,8 +287,8 @@ export default abstract class BaseDataFetch<
|
|||
}
|
||||
|
||||
// If we don't support sorting, do a client-side sort
|
||||
if (!this.features.supportsSort && clientSideSorting && sortType) {
|
||||
rows = sort(rows, sortColumn as any, sortOrder, sortType)
|
||||
if (!this.features.supportsSort && clientSideSorting && sortColumn) {
|
||||
rows = sort(rows, sortColumn, sortOrder)
|
||||
}
|
||||
|
||||
// If we don't support pagination, do a client-side limit
|
||||
|
|
|
@ -29,8 +29,7 @@ export default class TableFetch extends BaseDataFetch<TableDatasource, Table> {
|
|||
}
|
||||
|
||||
async getData() {
|
||||
const { datasource, limit, sortColumn, sortOrder, sortType, paginate } =
|
||||
this.options
|
||||
const { datasource, limit, sortColumn, sortOrder, paginate } = this.options
|
||||
const { tableId } = datasource
|
||||
const { cursor, query } = get(this.store)
|
||||
|
||||
|
@ -41,7 +40,6 @@ export default class TableFetch extends BaseDataFetch<TableDatasource, Table> {
|
|||
limit,
|
||||
sort: sortColumn,
|
||||
sortOrder: sortOrder ?? SortOrder.ASCENDING,
|
||||
sortType,
|
||||
paginate,
|
||||
bookmark: cursor,
|
||||
})
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import {
|
||||
SearchViewRowRequest,
|
||||
SortOrder,
|
||||
ViewDatasource,
|
||||
ViewV2Enriched,
|
||||
|
@ -40,8 +41,7 @@ export default class ViewV2Fetch extends BaseDataFetch<
|
|||
}
|
||||
|
||||
async getData() {
|
||||
const { datasource, limit, sortColumn, sortOrder, sortType, paginate } =
|
||||
this.options
|
||||
const { datasource, limit, sortColumn, sortOrder, paginate } = this.options
|
||||
const { cursor, query, definition } = get(this.store)
|
||||
|
||||
// If this is a calculation view and we have no calculations, return nothing
|
||||
|
@ -68,14 +68,13 @@ export default class ViewV2Fetch extends BaseDataFetch<
|
|||
}
|
||||
|
||||
try {
|
||||
const request = {
|
||||
const request: SearchViewRowRequest = {
|
||||
query,
|
||||
paginate,
|
||||
limit,
|
||||
bookmark: cursor,
|
||||
sort: sortColumn,
|
||||
sortOrder: sortOrder,
|
||||
sortType,
|
||||
}
|
||||
if (paginate) {
|
||||
const res = await this.API.viewV2.fetch(datasource.id, {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
export { createAPIClient } from "./api"
|
||||
export type { APIClient } from "./api"
|
||||
export { fetchData, DataFetchMap } from "./fetch"
|
||||
export { fetchData, DataFetchMap, type DataFetchType } from "./fetch"
|
||||
export * as Constants from "./constants"
|
||||
export * from "./stores"
|
||||
export * from "./utils"
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
import { vitePreprocess } from "@sveltejs/vite-plugin-svelte"
|
||||
|
||||
const config = {
|
||||
preprocess: vitePreprocess(),
|
||||
}
|
||||
|
||||
export default config
|
|
@ -1 +1 @@
|
|||
Subproject commit b28dbd549284cf450be7f25ad85aadf614d08f0b
|
||||
Subproject commit 2dd06c2fcb3cf10d5f16f5d8fe6cd344c8e905a5
|
|
@ -3,4 +3,5 @@ MYSQL_SHA=sha256:9de9d54fecee6253130e65154b930978b1fcc336bcc86dfd06e89b72a2588eb
|
|||
POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053090e
|
||||
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d
|
||||
MARIADB_SHA=sha256:e59ba8783bf7bc02a4779f103bb0d8751ac0e10f9471089709608377eded7aa8
|
||||
ELASTICSEARCH_SHA=sha256:9a6443f55243f6acbfeb4a112d15eb3b9aac74bf25e0e39fa19b3ddd3a6879d0
|
||||
ELASTICSEARCH_SHA=sha256:9a6443f55243f6acbfeb4a112d15eb3b9aac74bf25e0e39fa19b3ddd3a6879d0
|
||||
DYNAMODB_SHA=sha256:cf8cebd061f988628c02daff10fdb950a54478feff9c52f6ddf84710fe3c3906
|
|
@ -1,9 +1,15 @@
|
|||
import * as triggers from "../../automations/triggers"
|
||||
import { sdk as coreSdk } from "@budibase/shared-core"
|
||||
import { DocumentType } from "../../db/utils"
|
||||
import { updateTestHistory, removeDeprecated } from "../../automations/utils"
|
||||
import { updateTestHistory } from "../../automations/utils"
|
||||
import { withTestFlag } from "../../utilities/redis"
|
||||
import { context, cache, events, db as dbCore } from "@budibase/backend-core"
|
||||
import {
|
||||
context,
|
||||
cache,
|
||||
events,
|
||||
db as dbCore,
|
||||
HTTPError,
|
||||
} from "@budibase/backend-core"
|
||||
import { automations, features } from "@budibase/pro"
|
||||
import {
|
||||
App,
|
||||
|
@ -28,20 +34,13 @@ import {
|
|||
TriggerAutomationResponse,
|
||||
TestAutomationRequest,
|
||||
TestAutomationResponse,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import { getActionDefinitions as actionDefs } from "../../automations/actions"
|
||||
import sdk from "../../sdk"
|
||||
import { builderSocket } from "../../websockets"
|
||||
import env from "../../environment"
|
||||
|
||||
async function getActionDefinitions() {
|
||||
return removeDeprecated(await actionDefs())
|
||||
}
|
||||
|
||||
function getTriggerDefinitions() {
|
||||
return removeDeprecated(triggers.TRIGGER_DEFINITIONS)
|
||||
}
|
||||
|
||||
/*************************
|
||||
* *
|
||||
* BUILDER FUNCTIONS *
|
||||
|
@ -141,21 +140,21 @@ export async function clearLogError(
|
|||
export async function getActionList(
|
||||
ctx: UserCtx<void, GetAutomationActionDefinitionsResponse>
|
||||
) {
|
||||
ctx.body = await getActionDefinitions()
|
||||
ctx.body = await actionDefs()
|
||||
}
|
||||
|
||||
export async function getTriggerList(
|
||||
ctx: UserCtx<void, GetAutomationTriggerDefinitionsResponse>
|
||||
) {
|
||||
ctx.body = getTriggerDefinitions()
|
||||
ctx.body = triggers.TRIGGER_DEFINITIONS
|
||||
}
|
||||
|
||||
export async function getDefinitionList(
|
||||
ctx: UserCtx<void, GetAutomationStepDefinitionsResponse>
|
||||
) {
|
||||
ctx.body = {
|
||||
trigger: getTriggerDefinitions(),
|
||||
action: await getActionDefinitions(),
|
||||
trigger: triggers.TRIGGER_DEFINITIONS,
|
||||
action: await actionDefs(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -239,14 +238,22 @@ export async function test(
|
|||
const { request, appId } = ctx
|
||||
const { body } = request
|
||||
|
||||
let table: Table | undefined
|
||||
if (coreSdk.automations.isRowAction(automation) && body.row?.tableId) {
|
||||
table = await sdk.tables.getTable(body.row?.tableId)
|
||||
if (!table) {
|
||||
throw new HTTPError("Table not found", 404)
|
||||
}
|
||||
}
|
||||
|
||||
ctx.body = await withTestFlag(automation._id!, async () => {
|
||||
const occurredAt = new Date().getTime()
|
||||
await updateTestHistory(appId, automation, { ...body, occurredAt })
|
||||
|
||||
const input = prepareTestInput(body)
|
||||
const user = sdk.users.getUserContextBindings(ctx.user)
|
||||
return await triggers.externalTrigger(
|
||||
automation,
|
||||
{ ...prepareTestInput(body), appId, user },
|
||||
{ ...{ ...input, ...(table ? { table } : {}) }, appId, user },
|
||||
{ getResponses: true }
|
||||
)
|
||||
})
|
||||
|
|
|
@ -263,7 +263,6 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
|||
limit: searchRequest.limit,
|
||||
sort: searchRequest.sort ?? undefined,
|
||||
sortOrder: searchRequest.sortOrder,
|
||||
sortType: searchRequest.sortType ?? undefined,
|
||||
countRows: searchRequest.countRows,
|
||||
version: searchRequest.version,
|
||||
disableEscaping: searchRequest.disableEscaping,
|
||||
|
|
|
@ -63,14 +63,12 @@ function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
|
|||
return {
|
||||
sort: request.sort,
|
||||
sortOrder: request.sortOrder,
|
||||
sortType: request.sortType ?? undefined,
|
||||
}
|
||||
}
|
||||
if (view.sort) {
|
||||
return {
|
||||
sort: view.sort.field,
|
||||
sortOrder: view.sort.order,
|
||||
sortType: view.sort.type,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ import {
|
|||
import { configs, context, events } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
import {
|
||||
AutomationResults,
|
||||
ConfigType,
|
||||
FieldType,
|
||||
FilterCondition,
|
||||
|
@ -19,7 +20,6 @@ import {
|
|||
Table,
|
||||
} from "@budibase/types"
|
||||
import { mocks } from "@budibase/backend-core/tests"
|
||||
import { removeDeprecated } from "../../../automations/utils"
|
||||
import { createAutomationBuilder } from "../../../automations/tests/utilities/AutomationTestBuilder"
|
||||
import { basicTable } from "../../../tests/utilities/structures"
|
||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||
|
@ -64,15 +64,11 @@ describe("/automations", () => {
|
|||
it("returns all of the definitions in one", async () => {
|
||||
const { action, trigger } = await config.api.automation.getDefinitions()
|
||||
|
||||
let definitionsLength = Object.keys(
|
||||
removeDeprecated(BUILTIN_ACTION_DEFINITIONS)
|
||||
).length
|
||||
|
||||
expect(Object.keys(action).length).toBeGreaterThanOrEqual(
|
||||
definitionsLength
|
||||
Object.keys(BUILTIN_ACTION_DEFINITIONS).length
|
||||
)
|
||||
expect(Object.keys(trigger).length).toEqual(
|
||||
Object.keys(removeDeprecated(TRIGGER_DEFINITIONS)).length
|
||||
Object.keys(TRIGGER_DEFINITIONS).length
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -290,8 +286,7 @@ describe("/automations", () => {
|
|||
await setup.delay(500)
|
||||
let elements = await getAllTableRows(config)
|
||||
// don't test it unless there are values to test
|
||||
if (elements.length > 1) {
|
||||
expect(elements.length).toBeGreaterThanOrEqual(MAX_RETRIES)
|
||||
if (elements.length >= 1) {
|
||||
expect(elements[0].name).toEqual("Test")
|
||||
expect(elements[0].description).toEqual("TEST")
|
||||
return
|
||||
|
@ -627,7 +622,7 @@ describe("/automations", () => {
|
|||
})
|
||||
)
|
||||
|
||||
const res = await config.api.automation.test(automation._id!, {
|
||||
const response = await config.api.automation.test(automation._id!, {
|
||||
fields: {},
|
||||
oldRow: {
|
||||
City: oldCity,
|
||||
|
@ -637,12 +632,14 @@ describe("/automations", () => {
|
|||
},
|
||||
})
|
||||
|
||||
if (isDidNotTriggerResponse(res)) {
|
||||
if (isDidNotTriggerResponse(response)) {
|
||||
throw new Error("Automation did not trigger")
|
||||
}
|
||||
|
||||
const results: AutomationResults = response as AutomationResults
|
||||
|
||||
const expectedResult = oldCity === newCity
|
||||
expect(res.steps[1].outputs.result).toEqual(expectedResult)
|
||||
expect(results.steps[1].outputs.result).toEqual(expectedResult)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
@ -729,7 +726,8 @@ describe("/automations", () => {
|
|||
if (isDidNotTriggerResponse(res)) {
|
||||
expect(expectToRun).toEqual(false)
|
||||
} else {
|
||||
expect(res.steps[1].outputs.success).toEqual(expectToRun)
|
||||
const results: AutomationResults = res as AutomationResults
|
||||
expect(results.steps[1].outputs.success).toEqual(expectToRun)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
const setup = require("./utilities")
|
||||
const tableUtils = require("../../controllers/table/utils")
|
||||
import { handleDataImport } from "../../controllers/table/utils"
|
||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||
import { AutoFieldSubType, FieldType, JsonFieldSubType } from "@budibase/types"
|
||||
|
||||
describe("run misc tests", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
const config = new TestConfiguration()
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
afterAll(() => {
|
||||
config.end()
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
@ -13,69 +15,67 @@ describe("run misc tests", () => {
|
|||
|
||||
describe("/bbtel", () => {
|
||||
it("check if analytics enabled", async () => {
|
||||
const res = await request
|
||||
.get(`/api/bbtel`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(typeof res.body.enabled).toEqual("boolean")
|
||||
const { enabled } = await config.api.misc.bbtel()
|
||||
expect(enabled).toEqual(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("/health", () => {
|
||||
it("should confirm healthy", async () => {
|
||||
await request.get("/health").expect(200)
|
||||
await config.api.misc.health()
|
||||
})
|
||||
})
|
||||
|
||||
describe("/version", () => {
|
||||
it("should confirm version", async () => {
|
||||
const res = await request.get("/version").expect(200)
|
||||
const text = res.text
|
||||
if (text.includes("alpha")) {
|
||||
expect(text.split(".").length).toEqual(4)
|
||||
const version = await config.api.misc.version()
|
||||
if (version.includes("alpha")) {
|
||||
expect(version.split(".").length).toEqual(4)
|
||||
} else {
|
||||
expect(text.split(".").length).toEqual(3)
|
||||
expect(version.split(".").length).toEqual(3)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("test table utilities", () => {
|
||||
it("should be able to import data", async () => {
|
||||
return config.doInContext(null, async () => {
|
||||
return config.doInContext("", async () => {
|
||||
const table = await config.createTable({
|
||||
name: "table",
|
||||
type: "table",
|
||||
key: "name",
|
||||
schema: {
|
||||
a: {
|
||||
type: "string",
|
||||
type: FieldType.STRING,
|
||||
name: "a",
|
||||
constraints: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
b: {
|
||||
type: "string",
|
||||
name: "b",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
c: {
|
||||
type: "string",
|
||||
name: "c",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
d: {
|
||||
type: "string",
|
||||
name: "d",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
e: {
|
||||
name: "Auto ID",
|
||||
type: "number",
|
||||
subtype: "autoID",
|
||||
type: FieldType.NUMBER,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
icon: "ri-magic-line",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
|
@ -88,9 +88,9 @@ describe("run misc tests", () => {
|
|||
},
|
||||
},
|
||||
f: {
|
||||
type: "array",
|
||||
type: FieldType.ARRAY,
|
||||
constraints: {
|
||||
type: "array",
|
||||
type: JsonFieldSubType.ARRAY,
|
||||
presence: {
|
||||
allowEmpty: true,
|
||||
},
|
||||
|
@ -100,7 +100,7 @@ describe("run misc tests", () => {
|
|||
sortable: false,
|
||||
},
|
||||
g: {
|
||||
type: "options",
|
||||
type: FieldType.OPTIONS,
|
||||
constraints: {
|
||||
type: "string",
|
||||
presence: false,
|
||||
|
@ -118,16 +118,18 @@ describe("run misc tests", () => {
|
|||
{ a: "13", b: "14", c: "15", d: "16", g: "Omega" },
|
||||
]
|
||||
// Shift specific row tests to the row spec
|
||||
await tableUtils.handleDataImport(table, {
|
||||
importRows,
|
||||
user: { userId: "test" },
|
||||
})
|
||||
await handleDataImport(table, { importRows, userId: "test" })
|
||||
|
||||
// 4 rows imported, the auto ID starts at 1
|
||||
// We expect the handleDataImport function to update the lastID
|
||||
|
||||
// @ts-expect-error - fields have type FieldSchema, not specific
|
||||
// subtypes.
|
||||
expect(table.schema.e.lastID).toEqual(4)
|
||||
|
||||
// Array/Multi - should have added a new value to the inclusion.
|
||||
// @ts-expect-error - fields have type FieldSchema, not specific
|
||||
// subtypes.
|
||||
expect(table.schema.f.constraints.inclusion).toEqual([
|
||||
"Four",
|
||||
"One",
|
||||
|
@ -136,6 +138,8 @@ describe("run misc tests", () => {
|
|||
])
|
||||
|
||||
// Options - should have a new value in the inclusion
|
||||
// @ts-expect-error - fields have type FieldSchema, not specific
|
||||
// subtypes.
|
||||
expect(table.schema.g.constraints.inclusion).toEqual([
|
||||
"Alpha",
|
||||
"Beta",
|
||||
|
@ -143,25 +147,25 @@ describe("run misc tests", () => {
|
|||
"Omega",
|
||||
])
|
||||
|
||||
const rows = await config.getRows()
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(4)
|
||||
|
||||
const rowOne = rows.find(row => row.e === 1)
|
||||
const rowOne = rows.find(row => row.e === 1)!
|
||||
expect(rowOne.a).toEqual("1")
|
||||
expect(rowOne.f).toEqual(["One"])
|
||||
expect(rowOne.g).toEqual("Alpha")
|
||||
|
||||
const rowTwo = rows.find(row => row.e === 2)
|
||||
const rowTwo = rows.find(row => row.e === 2)!
|
||||
expect(rowTwo.a).toEqual("5")
|
||||
expect(rowTwo.f).toEqual([])
|
||||
expect(rowTwo.g).toEqual(undefined)
|
||||
|
||||
const rowThree = rows.find(row => row.e === 3)
|
||||
const rowThree = rows.find(row => row.e === 3)!
|
||||
expect(rowThree.a).toEqual("9")
|
||||
expect(rowThree.f).toEqual(["Two", "Four"])
|
||||
expect(rowThree.g).toEqual(undefined)
|
||||
|
||||
const rowFour = rows.find(row => row.e === 4)
|
||||
const rowFour = rows.find(row => row.e === 4)!
|
||||
expect(rowFour.a).toEqual("13")
|
||||
expect(rowFour.f).toEqual(undefined)
|
||||
expect(rowFour.g).toEqual("Omega")
|
|
@ -166,18 +166,6 @@ if (descriptions.length) {
|
|||
)
|
||||
}
|
||||
|
||||
const resetRowUsage = async () => {
|
||||
await config.doInContext(
|
||||
undefined,
|
||||
async () =>
|
||||
await quotas.setUsage(
|
||||
0,
|
||||
StaticQuotaName.ROWS,
|
||||
QuotaUsageType.STATIC
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const getRowUsage = async () => {
|
||||
const { total } = await config.doInContext(undefined, () =>
|
||||
quotas.getCurrentUsageValues(
|
||||
|
@ -188,19 +176,27 @@ if (descriptions.length) {
|
|||
return total
|
||||
}
|
||||
|
||||
const assertRowUsage = async (expected: number) => {
|
||||
const usage = await getRowUsage()
|
||||
async function expectRowUsage(expected: number, f: () => Promise<void>) {
|
||||
const before = await getRowUsage()
|
||||
await f()
|
||||
const after = await getRowUsage()
|
||||
const usage = after - before
|
||||
|
||||
// Because our quota tracking is not perfect, we allow a 10% margin of
|
||||
// error. This is to account for the fact that parallel writes can result
|
||||
// in some quota updates getting lost. We don't have any need to solve this
|
||||
// right now, so we just allow for some error.
|
||||
// error. This is to account for the fact that parallel writes can
|
||||
// result in some quota updates getting lost. We don't have any need
|
||||
// to solve this right now, so we just allow for some error.
|
||||
if (expected === 0) {
|
||||
expect(usage).toEqual(0)
|
||||
return
|
||||
}
|
||||
expect(usage).toBeGreaterThan(expected * 0.9)
|
||||
expect(usage).toBeLessThan(expected * 1.1)
|
||||
if (usage < 0) {
|
||||
expect(usage).toBeGreaterThan(expected * 1.1)
|
||||
expect(usage).toBeLessThan(expected * 0.9)
|
||||
} else {
|
||||
expect(usage).toBeGreaterThan(expected * 0.9)
|
||||
expect(usage).toBeLessThan(expected * 1.1)
|
||||
}
|
||||
}
|
||||
|
||||
const defaultRowFields = isInternal
|
||||
|
@ -215,91 +211,86 @@ if (descriptions.length) {
|
|||
table = await config.api.table.save(defaultTable())
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await resetRowUsage()
|
||||
})
|
||||
|
||||
describe("create", () => {
|
||||
it("creates a new row successfully", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
const row = await config.api.row.save(table._id!, {
|
||||
name: "Test Contact",
|
||||
await expectRowUsage(isInternal ? 1 : 0, async () => {
|
||||
const row = await config.api.row.save(table._id!, {
|
||||
name: "Test Contact",
|
||||
})
|
||||
expect(row.name).toEqual("Test Contact")
|
||||
expect(row._rev).toBeDefined()
|
||||
})
|
||||
expect(row.name).toEqual("Test Contact")
|
||||
expect(row._rev).toBeDefined()
|
||||
await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage)
|
||||
})
|
||||
|
||||
it("fails to create a row for a table that does not exist", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
await config.api.row.save("1234567", {}, { status: 404 })
|
||||
await assertRowUsage(rowUsage)
|
||||
await expectRowUsage(0, async () => {
|
||||
await config.api.row.save("1234567", {}, { status: 404 })
|
||||
})
|
||||
})
|
||||
|
||||
it("fails to create a row if required fields are missing", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
required: {
|
||||
type: FieldType.STRING,
|
||||
name: "required",
|
||||
constraints: {
|
||||
type: "string",
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
await config.api.row.save(
|
||||
table._id!,
|
||||
{},
|
||||
{
|
||||
status: 500,
|
||||
body: {
|
||||
validationErrors: {
|
||||
required: ["can't be blank"],
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("increment row autoId per create row request", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const newTable = await config.api.table.save(
|
||||
await expectRowUsage(0, async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
"Row ID": {
|
||||
name: "Row ID",
|
||||
type: FieldType.NUMBER,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
icon: "ri-magic-line",
|
||||
autocolumn: true,
|
||||
required: {
|
||||
type: FieldType.STRING,
|
||||
name: "required",
|
||||
constraints: {
|
||||
type: "number",
|
||||
type: "string",
|
||||
presence: true,
|
||||
numericality: {
|
||||
greaterThanOrEqualTo: "",
|
||||
lessThanOrEqualTo: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
await config.api.row.save(
|
||||
table._id!,
|
||||
{},
|
||||
{
|
||||
status: 500,
|
||||
body: {
|
||||
validationErrors: {
|
||||
required: ["can't be blank"],
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
let previousId = 0
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const row = await config.api.row.save(newTable._id!, {})
|
||||
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
||||
previousId = row["Row ID"]
|
||||
}
|
||||
await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage)
|
||||
isInternal &&
|
||||
it("increment row autoId per create row request", async () => {
|
||||
await expectRowUsage(isInternal ? 10 : 0, async () => {
|
||||
const newTable = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
"Row ID": {
|
||||
name: "Row ID",
|
||||
type: FieldType.NUMBER,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
icon: "ri-magic-line",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: true,
|
||||
numericality: {
|
||||
greaterThanOrEqualTo: "",
|
||||
lessThanOrEqualTo: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
let previousId = 0
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const row = await config.api.row.save(newTable._id!, {})
|
||||
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
||||
previousId = row["Row ID"]
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
|
@ -985,16 +976,16 @@ if (descriptions.length) {
|
|||
describe("update", () => {
|
||||
it("updates an existing row successfully", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.save(table._id!, {
|
||||
_id: existing._id,
|
||||
_rev: existing._rev,
|
||||
name: "Updated Name",
|
||||
await expectRowUsage(0, async () => {
|
||||
const res = await config.api.row.save(table._id!, {
|
||||
_id: existing._id,
|
||||
_rev: existing._rev,
|
||||
name: "Updated Name",
|
||||
})
|
||||
|
||||
expect(res.name).toEqual("Updated Name")
|
||||
})
|
||||
|
||||
expect(res.name).toEqual("Updated Name")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
|
@ -1177,23 +1168,22 @@ if (descriptions.length) {
|
|||
it("should update only the fields that are supplied", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
await expectRowUsage(0, async () => {
|
||||
const row = await config.api.row.patch(table._id!, {
|
||||
_id: existing._id!,
|
||||
_rev: existing._rev!,
|
||||
tableId: table._id!,
|
||||
name: "Updated Name",
|
||||
})
|
||||
|
||||
const row = await config.api.row.patch(table._id!, {
|
||||
_id: existing._id!,
|
||||
_rev: existing._rev!,
|
||||
tableId: table._id!,
|
||||
name: "Updated Name",
|
||||
expect(row.name).toEqual("Updated Name")
|
||||
expect(row.description).toEqual(existing.description)
|
||||
|
||||
const savedRow = await config.api.row.get(table._id!, row._id!)
|
||||
|
||||
expect(savedRow.description).toEqual(existing.description)
|
||||
expect(savedRow.name).toEqual("Updated Name")
|
||||
})
|
||||
|
||||
expect(row.name).toEqual("Updated Name")
|
||||
expect(row.description).toEqual(existing.description)
|
||||
|
||||
const savedRow = await config.api.row.get(table._id!, row._id!)
|
||||
|
||||
expect(savedRow.description).toEqual(existing.description)
|
||||
expect(savedRow.name).toEqual("Updated Name")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
||||
it("should update only the fields that are supplied and emit the correct oldRow", async () => {
|
||||
|
@ -1224,20 +1214,19 @@ if (descriptions.length) {
|
|||
|
||||
it("should throw an error when given improper types", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.patch(
|
||||
table._id!,
|
||||
{
|
||||
_id: existing._id!,
|
||||
_rev: existing._rev!,
|
||||
tableId: table._id!,
|
||||
name: 1,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
await expectRowUsage(0, async () => {
|
||||
await config.api.row.patch(
|
||||
table._id!,
|
||||
{
|
||||
_id: existing._id!,
|
||||
_rev: existing._rev!,
|
||||
tableId: table._id!,
|
||||
name: 1,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("should not overwrite links if those links are not set", async () => {
|
||||
|
@ -1452,25 +1441,25 @@ if (descriptions.length) {
|
|||
|
||||
it("should be able to delete a row", async () => {
|
||||
const createdRow = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [createdRow],
|
||||
await expectRowUsage(isInternal ? -1 : 0, async () => {
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [createdRow],
|
||||
})
|
||||
expect(res[0]._id).toEqual(createdRow._id)
|
||||
})
|
||||
expect(res[0]._id).toEqual(createdRow._id)
|
||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||
})
|
||||
|
||||
it("should be able to delete a row with ID only", async () => {
|
||||
const createdRow = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [createdRow._id!],
|
||||
await expectRowUsage(isInternal ? -1 : 0, async () => {
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [createdRow._id!],
|
||||
})
|
||||
expect(res[0]._id).toEqual(createdRow._id)
|
||||
expect(res[0].tableId).toEqual(table._id!)
|
||||
})
|
||||
expect(res[0]._id).toEqual(createdRow._id)
|
||||
expect(res[0].tableId).toEqual(table._id!)
|
||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||
})
|
||||
|
||||
it("should be able to bulk delete rows, including a row that doesn't exist", async () => {
|
||||
|
@ -1560,31 +1549,29 @@ if (descriptions.length) {
|
|||
})
|
||||
|
||||
it("should return no errors on valid row", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
await expectRowUsage(0, async () => {
|
||||
const res = await config.api.row.validate(table._id!, {
|
||||
name: "ivan",
|
||||
})
|
||||
|
||||
const res = await config.api.row.validate(table._id!, {
|
||||
name: "ivan",
|
||||
expect(res.valid).toBe(true)
|
||||
expect(Object.keys(res.errors)).toEqual([])
|
||||
})
|
||||
|
||||
expect(res.valid).toBe(true)
|
||||
expect(Object.keys(res.errors)).toEqual([])
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
||||
it("should errors on invalid row", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
await expectRowUsage(0, async () => {
|
||||
const res = await config.api.row.validate(table._id!, { name: 1 })
|
||||
|
||||
const res = await config.api.row.validate(table._id!, { name: 1 })
|
||||
|
||||
if (isInternal) {
|
||||
expect(res.valid).toBe(false)
|
||||
expect(Object.keys(res.errors)).toEqual(["name"])
|
||||
} else {
|
||||
// Validation for external is not implemented, so it will always return valid
|
||||
expect(res.valid).toBe(true)
|
||||
expect(Object.keys(res.errors)).toEqual([])
|
||||
}
|
||||
await assertRowUsage(rowUsage)
|
||||
if (isInternal) {
|
||||
expect(res.valid).toBe(false)
|
||||
expect(Object.keys(res.errors)).toEqual(["name"])
|
||||
} else {
|
||||
// Validation for external is not implemented, so it will always return valid
|
||||
expect(res.valid).toBe(true)
|
||||
expect(Object.keys(res.errors)).toEqual([])
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1596,15 +1583,15 @@ if (descriptions.length) {
|
|||
it("should be able to delete a bulk set of rows", async () => {
|
||||
const row1 = await config.api.row.save(table._id!, {})
|
||||
const row2 = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [row1, row2],
|
||||
await expectRowUsage(isInternal ? -2 : 0, async () => {
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [row1, row2],
|
||||
})
|
||||
|
||||
expect(res.length).toEqual(2)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
})
|
||||
|
||||
expect(res.length).toEqual(2)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
|
||||
})
|
||||
|
||||
it("should be able to delete a variety of row set types", async () => {
|
||||
|
@ -1613,41 +1600,42 @@ if (descriptions.length) {
|
|||
config.api.row.save(table._id!, {}),
|
||||
config.api.row.save(table._id!, {}),
|
||||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [row1, row2._id!, { _id: row3._id }],
|
||||
await expectRowUsage(isInternal ? -3 : 0, async () => {
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [row1, row2._id!, { _id: row3._id }],
|
||||
})
|
||||
|
||||
expect(res.length).toEqual(3)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
})
|
||||
|
||||
expect(res.length).toEqual(3)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage)
|
||||
})
|
||||
|
||||
it("should accept a valid row object and delete the row", async () => {
|
||||
const row1 = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(table._id!, row1 as DeleteRow)
|
||||
await expectRowUsage(isInternal ? -1 : 0, async () => {
|
||||
const res = await config.api.row.delete(
|
||||
table._id!,
|
||||
row1 as DeleteRow
|
||||
)
|
||||
|
||||
expect(res.id).toEqual(row1._id)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||
expect(res.id).toEqual(row1._id)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
})
|
||||
})
|
||||
|
||||
it.each([{ not: "valid" }, { rows: 123 }, "invalid"])(
|
||||
"should ignore malformed/invalid delete request: %s",
|
||||
async (request: any) => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.delete(table._id!, request, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
await expectRowUsage(0, async () => {
|
||||
await config.api.row.delete(table._id!, request, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
@ -1733,31 +1721,29 @@ if (descriptions.length) {
|
|||
})
|
||||
)
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
await expectRowUsage(isInternal ? 2 : 0, async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
},
|
||||
{
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
},
|
||||
{
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
},
|
||||
],
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(2)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Row 1")
|
||||
expect(rows[0].description).toEqual("Row 1 description")
|
||||
expect(rows[1].name).toEqual("Row 2")
|
||||
expect(rows[1].description).toEqual("Row 2 description")
|
||||
})
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(2)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Row 1")
|
||||
expect(rows[0].description).toEqual("Row 1 description")
|
||||
expect(rows[1].name).toEqual("Row 2")
|
||||
expect(rows[1].description).toEqual("Row 2 description")
|
||||
|
||||
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
|
@ -1782,35 +1768,33 @@ if (descriptions.length) {
|
|||
description: "Existing description",
|
||||
})
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
await expectRowUsage(2, async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
},
|
||||
{ ...existingRow, name: "Updated existing row" },
|
||||
{
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
},
|
||||
],
|
||||
identifierFields: ["_id"],
|
||||
})
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
},
|
||||
{ ...existingRow, name: "Updated existing row" },
|
||||
{
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
},
|
||||
],
|
||||
identifierFields: ["_id"],
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(3)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Row 1")
|
||||
expect(rows[0].description).toEqual("Row 1 description")
|
||||
expect(rows[1].name).toEqual("Row 2")
|
||||
expect(rows[1].description).toEqual("Row 2 description")
|
||||
expect(rows[2].name).toEqual("Updated existing row")
|
||||
expect(rows[2].description).toEqual("Existing description")
|
||||
})
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(3)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Row 1")
|
||||
expect(rows[0].description).toEqual("Row 1 description")
|
||||
expect(rows[1].name).toEqual("Row 2")
|
||||
expect(rows[1].description).toEqual("Row 2 description")
|
||||
expect(rows[2].name).toEqual("Updated existing row")
|
||||
expect(rows[2].description).toEqual("Existing description")
|
||||
|
||||
await assertRowUsage(rowUsage + 2)
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
|
@ -1835,36 +1819,34 @@ if (descriptions.length) {
|
|||
description: "Existing description",
|
||||
})
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
await expectRowUsage(3, async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
},
|
||||
{ ...existingRow, name: "Updated existing row" },
|
||||
{
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
name: "Row 1",
|
||||
description: "Row 1 description",
|
||||
},
|
||||
{ ...existingRow, name: "Updated existing row" },
|
||||
{
|
||||
name: "Row 2",
|
||||
description: "Row 2 description",
|
||||
},
|
||||
],
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(4)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Existing row")
|
||||
expect(rows[0].description).toEqual("Existing description")
|
||||
expect(rows[1].name).toEqual("Row 1")
|
||||
expect(rows[1].description).toEqual("Row 1 description")
|
||||
expect(rows[2].name).toEqual("Row 2")
|
||||
expect(rows[2].description).toEqual("Row 2 description")
|
||||
expect(rows[3].name).toEqual("Updated existing row")
|
||||
expect(rows[3].description).toEqual("Existing description")
|
||||
})
|
||||
|
||||
const rows = await config.api.row.fetch(table._id!)
|
||||
expect(rows.length).toEqual(4)
|
||||
|
||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||
expect(rows[0].name).toEqual("Existing row")
|
||||
expect(rows[0].description).toEqual("Existing description")
|
||||
expect(rows[1].name).toEqual("Row 1")
|
||||
expect(rows[1].description).toEqual("Row 1 description")
|
||||
expect(rows[2].name).toEqual("Row 2")
|
||||
expect(rows[2].description).toEqual("Row 2 description")
|
||||
expect(rows[3].name).toEqual("Updated existing row")
|
||||
expect(rows[3].description).toEqual("Existing description")
|
||||
|
||||
await assertRowUsage(rowUsage + 3)
|
||||
})
|
||||
|
||||
// Upserting isn't yet supported in MSSQL / Oracle, see:
|
||||
|
@ -2187,29 +2169,29 @@ if (descriptions.length) {
|
|||
return { linkedTable, firstRow, secondRow }
|
||||
}
|
||||
)
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
// test basic enrichment
|
||||
const resBasic = await config.api.row.get(
|
||||
linkedTable._id!,
|
||||
secondRow._id!
|
||||
)
|
||||
expect(resBasic.link.length).toBe(1)
|
||||
expect(resBasic.link[0]).toEqual({
|
||||
_id: firstRow._id,
|
||||
primaryDisplay: firstRow.name,
|
||||
await expectRowUsage(0, async () => {
|
||||
// test basic enrichment
|
||||
const resBasic = await config.api.row.get(
|
||||
linkedTable._id!,
|
||||
secondRow._id!
|
||||
)
|
||||
expect(resBasic.link.length).toBe(1)
|
||||
expect(resBasic.link[0]).toEqual({
|
||||
_id: firstRow._id,
|
||||
primaryDisplay: firstRow.name,
|
||||
})
|
||||
|
||||
// test full enrichment
|
||||
const resEnriched = await config.api.row.getEnriched(
|
||||
linkedTable._id!,
|
||||
secondRow._id!
|
||||
)
|
||||
expect(resEnriched.link.length).toBe(1)
|
||||
expect(resEnriched.link[0]._id).toBe(firstRow._id)
|
||||
expect(resEnriched.link[0].name).toBe("Test Contact")
|
||||
expect(resEnriched.link[0].description).toBe("original description")
|
||||
})
|
||||
|
||||
// test full enrichment
|
||||
const resEnriched = await config.api.row.getEnriched(
|
||||
linkedTable._id!,
|
||||
secondRow._id!
|
||||
)
|
||||
expect(resEnriched.link.length).toBe(1)
|
||||
expect(resEnriched.link[0]._id).toBe(firstRow._id)
|
||||
expect(resEnriched.link[0].name).toBe("Test Contact")
|
||||
expect(resEnriched.link[0].description).toBe("original description")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ import {
|
|||
import _ from "lodash"
|
||||
import tk from "timekeeper"
|
||||
import { encodeJSBinding } from "@budibase/string-templates"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
import { dataFilters, InMemorySearchQuery } from "@budibase/shared-core"
|
||||
import { Knex } from "knex"
|
||||
import { generator, structures, mocks } from "@budibase/backend-core/tests"
|
||||
import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default"
|
||||
|
@ -200,31 +200,26 @@ if (descriptions.length) {
|
|||
const isView = sourceType === "view"
|
||||
|
||||
class SearchAssertion {
|
||||
constructor(private readonly query: SearchRowRequest) {}
|
||||
constructor(
|
||||
private readonly query: SearchRowRequest & {
|
||||
sortType?: SortType
|
||||
}
|
||||
) {}
|
||||
|
||||
private async performSearch(): Promise<SearchResponse<Row>> {
|
||||
if (isInMemory) {
|
||||
const inMemoryQuery: RequiredKeys<
|
||||
Omit<RowSearchParams, "tableId">
|
||||
> = {
|
||||
const inMemoryQuery: RequiredKeys<InMemorySearchQuery> = {
|
||||
sort: this.query.sort ?? undefined,
|
||||
query: { ...this.query.query },
|
||||
paginate: this.query.paginate,
|
||||
bookmark: this.query.bookmark ?? undefined,
|
||||
limit: this.query.limit,
|
||||
sortOrder: this.query.sortOrder,
|
||||
sortType: this.query.sortType ?? undefined,
|
||||
version: this.query.version,
|
||||
disableEscaping: this.query.disableEscaping,
|
||||
countRows: this.query.countRows,
|
||||
viewId: undefined,
|
||||
fields: undefined,
|
||||
indexer: undefined,
|
||||
rows: undefined,
|
||||
}
|
||||
return dataFilters.search(_.cloneDeep(rows), inMemoryQuery)
|
||||
} else {
|
||||
return config.api.row.search(tableOrViewId, this.query)
|
||||
const { sortType, ...query } = this.query
|
||||
return config.api.row.search(tableOrViewId, query)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -400,7 +395,9 @@ if (descriptions.length) {
|
|||
}
|
||||
}
|
||||
|
||||
function expectSearch(query: SearchRowRequest) {
|
||||
function expectSearch(
|
||||
query: SearchRowRequest & { sortType?: SortType }
|
||||
) {
|
||||
return new SearchAssertion(query)
|
||||
}
|
||||
|
||||
|
@ -1119,25 +1116,26 @@ if (descriptions.length) {
|
|||
}).toMatchExactly([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "name",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ name: "bar" }, { name: "foo" }])
|
||||
})
|
||||
isInMemory &&
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "name",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ name: "bar" }, { name: "foo" }])
|
||||
})
|
||||
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "name",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ name: "foo" }, { name: "bar" }])
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "name",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
!isInMemory &&
|
||||
|
@ -1319,25 +1317,26 @@ if (descriptions.length) {
|
|||
})
|
||||
})
|
||||
|
||||
describe("sortType NUMBER", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "age",
|
||||
sortType: SortType.NUMBER,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ age: 1 }, { age: 10 }])
|
||||
})
|
||||
isInMemory &&
|
||||
describe("sortType NUMBER", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "age",
|
||||
sortType: SortType.NUMBER,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ age: 1 }, { age: 10 }])
|
||||
})
|
||||
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "age",
|
||||
sortType: SortType.NUMBER,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ age: 10 }, { age: 1 }])
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "age",
|
||||
sortType: SortType.NUMBER,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ age: 10 }, { age: 1 }])
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("dates", () => {
|
||||
|
@ -1473,25 +1472,26 @@ if (descriptions.length) {
|
|||
}).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }])
|
||||
})
|
||||
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "dob",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }])
|
||||
})
|
||||
isInMemory &&
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "dob",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }])
|
||||
})
|
||||
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "dob",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }])
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "dob",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }])
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1639,220 +1639,196 @@ if (descriptions.length) {
|
|||
])
|
||||
})
|
||||
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "time",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([
|
||||
{ timeid: NULL_TIME__ID },
|
||||
{ time: "00:00:00" },
|
||||
{ time: "10:00:00" },
|
||||
{ time: "10:45:00" },
|
||||
{ time: "12:00:00" },
|
||||
{ time: "15:30:00" },
|
||||
])
|
||||
})
|
||||
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "time",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([
|
||||
{ time: "15:30:00" },
|
||||
{ time: "12:00:00" },
|
||||
{ time: "10:45:00" },
|
||||
{ time: "10:00:00" },
|
||||
{ time: "00:00:00" },
|
||||
{ timeid: NULL_TIME__ID },
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("datetime - date only", () => {
|
||||
describe.each([true, false])(
|
||||
"saved with timestamp: %s",
|
||||
saveWithTimestamp => {
|
||||
describe.each([true, false])(
|
||||
"search with timestamp: %s",
|
||||
searchWithTimestamp => {
|
||||
const SAVE_SUFFIX = saveWithTimestamp
|
||||
? "T00:00:00.000Z"
|
||||
: ""
|
||||
const SEARCH_SUFFIX = searchWithTimestamp
|
||||
? "T00:00:00.000Z"
|
||||
: ""
|
||||
|
||||
const JAN_1ST = `2020-01-01`
|
||||
const JAN_10TH = `2020-01-10`
|
||||
const JAN_30TH = `2020-01-30`
|
||||
const UNEXISTING_DATE = `2020-01-03`
|
||||
const NULL_DATE__ID = `null_date__id`
|
||||
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
dateid: {
|
||||
name: "dateid",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
date: {
|
||||
name: "date",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
},
|
||||
})
|
||||
|
||||
await createRows([
|
||||
{ dateid: NULL_DATE__ID, date: null },
|
||||
{ date: `${JAN_1ST}${SAVE_SUFFIX}` },
|
||||
{ date: `${JAN_10TH}${SAVE_SUFFIX}` },
|
||||
isInMemory &&
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "time",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([
|
||||
{ timeid: NULL_TIME__ID },
|
||||
{ time: "00:00:00" },
|
||||
{ time: "10:00:00" },
|
||||
{ time: "10:45:00" },
|
||||
{ time: "12:00:00" },
|
||||
{ time: "15:30:00" },
|
||||
])
|
||||
})
|
||||
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_1ST}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("successfully finds an ISO8601 row", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_10TH}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_10TH }])
|
||||
})
|
||||
|
||||
it("finds a row with ISO8601 timestamp", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_1ST}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
equal: {
|
||||
date: `${UNEXISTING_DATE}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "time",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([
|
||||
{ time: "15:30:00" },
|
||||
{ time: "12:00:00" },
|
||||
{ time: "10:45:00" },
|
||||
{ time: "10:00:00" },
|
||||
{ time: "00:00:00" },
|
||||
{ timeid: NULL_TIME__ID },
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("notEqual", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
notEqual: {
|
||||
date: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
!isInMemory &&
|
||||
describe("datetime - date only", () => {
|
||||
describe.each([true, false])(
|
||||
"saved with timestamp: %s",
|
||||
saveWithTimestamp => {
|
||||
describe.each([true, false])(
|
||||
"search with timestamp: %s",
|
||||
searchWithTimestamp => {
|
||||
const SAVE_SUFFIX = saveWithTimestamp
|
||||
? "T00:00:00.000Z"
|
||||
: ""
|
||||
const SEARCH_SUFFIX = searchWithTimestamp
|
||||
? "T00:00:00.000Z"
|
||||
: ""
|
||||
|
||||
const JAN_1ST = `2020-01-01`
|
||||
const JAN_10TH = `2020-01-10`
|
||||
const JAN_30TH = `2020-01-30`
|
||||
const UNEXISTING_DATE = `2020-01-03`
|
||||
const NULL_DATE__ID = `null_date__id`
|
||||
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
dateid: {
|
||||
name: "dateid",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ date: JAN_10TH },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
date: {
|
||||
name: "date",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
},
|
||||
})
|
||||
|
||||
await createRows([
|
||||
{ dateid: NULL_DATE__ID, date: null },
|
||||
{ date: `${JAN_1ST}${SAVE_SUFFIX}` },
|
||||
{ date: `${JAN_10TH}${SAVE_SUFFIX}` },
|
||||
])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
notEqual: {
|
||||
date: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
})
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_1ST}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
oneOf: { date: [`${JAN_1ST}${SEARCH_SUFFIX}`] },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
it("successfully finds an ISO8601 row", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_10TH}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_10TH }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
date: [`${UNEXISTING_DATE}${SEARCH_SUFFIX}`],
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
it("finds a row with ISO8601 timestamp", async () => {
|
||||
await expectQuery({
|
||||
equal: { date: `${JAN_1ST}${SEARCH_SUFFIX}` },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
describe("range", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
equal: {
|
||||
date: `${UNEXISTING_DATE}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
it("successfully finds multiple rows", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_10TH}${SEARCH_SUFFIX}`,
|
||||
describe("notEqual", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
notEqual: {
|
||||
date: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
])
|
||||
})
|
||||
}).toContainExactly([
|
||||
{ date: JAN_10TH },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
|
||||
it("successfully finds no rows", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
notEqual: {
|
||||
date: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
describe("sort", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([
|
||||
{ dateid: NULL_DATE__ID },
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
])
|
||||
}).toContainExactly([
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([
|
||||
{ date: JAN_10TH },
|
||||
{ date: JAN_1ST },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
oneOf: { date: [`${JAN_1ST}${SEARCH_SUFFIX}`] },
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
date: [`${UNEXISTING_DATE}${SEARCH_SUFFIX}`],
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
describe("sortType STRING", () => {
|
||||
describe("range", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toContainExactly([{ date: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("successfully finds multiple rows", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_1ST}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_10TH}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
])
|
||||
})
|
||||
|
||||
it("successfully finds no rows", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
date: {
|
||||
low: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
high: `${JAN_30TH}${SEARCH_SUFFIX}`,
|
||||
},
|
||||
},
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
describe("sort", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([
|
||||
{ dateid: NULL_DATE__ID },
|
||||
|
@ -1865,7 +1841,6 @@ if (descriptions.length) {
|
|||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([
|
||||
{ date: JAN_10TH },
|
||||
|
@ -1873,13 +1848,41 @@ if (descriptions.length) {
|
|||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
|
||||
isInMemory &&
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([
|
||||
{ dateid: NULL_DATE__ID },
|
||||
{ date: JAN_1ST },
|
||||
{ date: JAN_10TH },
|
||||
])
|
||||
})
|
||||
|
||||
it("sorts descending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "date",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([
|
||||
{ date: JAN_10TH },
|
||||
{ date: JAN_1ST },
|
||||
{ dateid: NULL_DATE__ID },
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
!isInMemory &&
|
||||
|
|
|
@ -24,7 +24,6 @@ import {
|
|||
SearchResponse,
|
||||
SearchViewRowRequest,
|
||||
SortOrder,
|
||||
SortType,
|
||||
StaticQuotaName,
|
||||
Table,
|
||||
TableSchema,
|
||||
|
@ -154,7 +153,6 @@ if (descriptions.length) {
|
|||
sort: {
|
||||
field: "fieldToSort",
|
||||
order: SortOrder.DESCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
schema: {
|
||||
id: { visible: true },
|
||||
|
@ -217,7 +215,6 @@ if (descriptions.length) {
|
|||
sort: {
|
||||
field: "fieldToSort",
|
||||
order: SortOrder.DESCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
schema: {
|
||||
id: { visible: true },
|
||||
|
@ -1147,7 +1144,6 @@ if (descriptions.length) {
|
|||
sort: {
|
||||
field: generator.word(),
|
||||
order: SortOrder.DESCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
schema: {
|
||||
id: { visible: true },
|
||||
|
@ -2826,34 +2822,44 @@ if (descriptions.length) {
|
|||
return total
|
||||
}
|
||||
|
||||
const assertRowUsage = async (expected: number) => {
|
||||
const usage = await getRowUsage()
|
||||
async function expectRowUsage<T>(
|
||||
expected: number,
|
||||
f: () => Promise<T>
|
||||
): Promise<T> {
|
||||
const before = await getRowUsage()
|
||||
const result = await f()
|
||||
const after = await getRowUsage()
|
||||
const usage = after - before
|
||||
expect(usage).toBe(expected)
|
||||
return result
|
||||
}
|
||||
|
||||
it("should be able to delete a row", async () => {
|
||||
const createdRow = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||
const createdRow = await expectRowUsage(isInternal ? 1 : 0, () =>
|
||||
config.api.row.save(table._id!, {})
|
||||
)
|
||||
await expectRowUsage(isInternal ? -1 : 0, () =>
|
||||
config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
||||
)
|
||||
await config.api.row.get(table._id!, createdRow._id!, {
|
||||
status: 404,
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete multiple rows", async () => {
|
||||
const rows = await Promise.all([
|
||||
config.api.row.save(table._id!, {}),
|
||||
config.api.row.save(table._id!, {}),
|
||||
config.api.row.save(table._id!, {}),
|
||||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.bulkDelete(view.id, {
|
||||
rows: [rows[0], rows[2]],
|
||||
const rows = await expectRowUsage(isInternal ? 3 : 0, async () => {
|
||||
return [
|
||||
await config.api.row.save(table._id!, {}),
|
||||
await config.api.row.save(table._id!, {}),
|
||||
await config.api.row.save(table._id!, {}),
|
||||
]
|
||||
})
|
||||
|
||||
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
|
||||
await expectRowUsage(isInternal ? -2 : 0, async () => {
|
||||
await config.api.row.bulkDelete(view.id, {
|
||||
rows: [rows[0], rows[2]],
|
||||
})
|
||||
})
|
||||
|
||||
await config.api.row.get(table._id!, rows[0]._id!, {
|
||||
status: 404,
|
||||
|
@ -3143,7 +3149,6 @@ if (descriptions.length) {
|
|||
{
|
||||
field: string
|
||||
order?: SortOrder
|
||||
type?: SortType
|
||||
},
|
||||
string[]
|
||||
][] = [
|
||||
|
@ -3151,7 +3156,6 @@ if (descriptions.length) {
|
|||
{
|
||||
field: "name",
|
||||
order: SortOrder.ASCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
["Alice", "Bob", "Charly", "Danny"],
|
||||
],
|
||||
|
@ -3168,22 +3172,6 @@ if (descriptions.length) {
|
|||
},
|
||||
["Danny", "Charly", "Bob", "Alice"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "name",
|
||||
order: SortOrder.DESCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
["Danny", "Charly", "Bob", "Alice"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.ASCENDING,
|
||||
type: SortType.NUMBER,
|
||||
},
|
||||
["Danny", "Alice", "Charly", "Bob"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
|
@ -3194,15 +3182,13 @@ if (descriptions.length) {
|
|||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.DESCENDING,
|
||||
},
|
||||
["Bob", "Charly", "Alice", "Danny"],
|
||||
["Danny", "Alice", "Charly", "Bob"],
|
||||
],
|
||||
[
|
||||
{
|
||||
field: "age",
|
||||
order: SortOrder.DESCENDING,
|
||||
type: SortType.NUMBER,
|
||||
},
|
||||
["Bob", "Charly", "Alice", "Danny"],
|
||||
],
|
||||
|
@ -3289,7 +3275,6 @@ if (descriptions.length) {
|
|||
sort: {
|
||||
field: "name",
|
||||
order: SortOrder.ASCENDING,
|
||||
type: SortType.STRING,
|
||||
},
|
||||
schema: viewSchema,
|
||||
})
|
||||
|
@ -3297,7 +3282,6 @@ if (descriptions.length) {
|
|||
const response = await config.api.viewV2.search(view.id, {
|
||||
sort: sortParams.field,
|
||||
sortOrder: sortParams.order,
|
||||
sortType: sortParams.type,
|
||||
query: {},
|
||||
})
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import * as createRow from "./steps/createRow"
|
|||
import * as updateRow from "./steps/updateRow"
|
||||
import * as deleteRow from "./steps/deleteRow"
|
||||
import * as executeScript from "./steps/executeScript"
|
||||
import * as executeScriptV2 from "./steps/executeScriptV2"
|
||||
import * as executeQuery from "./steps/executeQuery"
|
||||
import * as outgoingWebhook from "./steps/outgoingWebhook"
|
||||
import * as serverLog from "./steps/serverLog"
|
||||
|
@ -44,6 +45,7 @@ const ACTION_IMPLS: ActionImplType = {
|
|||
DELETE_ROW: deleteRow.run,
|
||||
OUTGOING_WEBHOOK: outgoingWebhook.run,
|
||||
EXECUTE_SCRIPT: executeScript.run,
|
||||
EXECUTE_SCRIPT_V2: executeScriptV2.run,
|
||||
EXECUTE_QUERY: executeQuery.run,
|
||||
SERVER_LOG: serverLog.run,
|
||||
DELAY: delay.run,
|
||||
|
@ -70,6 +72,7 @@ export const BUILTIN_ACTION_DEFINITIONS: Record<
|
|||
DELETE_ROW: automations.steps.deleteRow.definition,
|
||||
OUTGOING_WEBHOOK: automations.steps.outgoingWebhook.definition,
|
||||
EXECUTE_SCRIPT: automations.steps.executeScript.definition,
|
||||
EXECUTE_SCRIPT_V2: automations.steps.executeScriptV2.definition,
|
||||
EXECUTE_QUERY: automations.steps.executeQuery.definition,
|
||||
SERVER_LOG: automations.steps.serverLog.definition,
|
||||
DELAY: automations.steps.delay.definition,
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
import * as automationUtils from "../automationUtils"
|
||||
import {
|
||||
ExecuteScriptStepInputs,
|
||||
ExecuteScriptStepOutputs,
|
||||
} from "@budibase/types"
|
||||
import { processStringSync } from "@budibase/string-templates"
|
||||
|
||||
export async function run({
|
||||
inputs,
|
||||
context,
|
||||
}: {
|
||||
inputs: ExecuteScriptStepInputs
|
||||
context: Record<string, any>
|
||||
}): Promise<ExecuteScriptStepOutputs> {
|
||||
let { code } = inputs
|
||||
|
||||
if (code == null) {
|
||||
return {
|
||||
success: false,
|
||||
response: {
|
||||
message: "Invalid inputs",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
code = code.trim()
|
||||
|
||||
if (!code.startsWith("{{ js ")) {
|
||||
return {
|
||||
success: false,
|
||||
response: {
|
||||
message: "Expected code to be a {{ js }} template block",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return {
|
||||
success: true,
|
||||
value: processStringSync(inputs.code, context, { noThrow: false }),
|
||||
}
|
||||
} catch (err) {
|
||||
return {
|
||||
success: false,
|
||||
response: automationUtils.getError(err),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,158 @@
|
|||
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
|
||||
import * as automation from "../index"
|
||||
import * as setup from "./utilities"
|
||||
import { Table } from "@budibase/types"
|
||||
|
||||
function encodeJS(js: string): string {
|
||||
return `{{ js "${Buffer.from(js, "utf-8").toString("base64")}" }}`
|
||||
}
|
||||
|
||||
describe("Execute Script Automations", () => {
|
||||
let config = setup.getConfig(),
|
||||
table: Table
|
||||
|
||||
beforeEach(async () => {
|
||||
await automation.init()
|
||||
await config.init()
|
||||
table = await config.createTable()
|
||||
await config.createRow()
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should execute a basic script and return the result", async () => {
|
||||
config.name = "Basic Script Execution"
|
||||
const builder = createAutomationBuilder(config)
|
||||
|
||||
const results = await builder
|
||||
.onAppAction()
|
||||
.executeScriptV2({ code: encodeJS("return 2 + 2") })
|
||||
.test({ fields: {} })
|
||||
|
||||
expect(results.steps[0].outputs.value).toEqual(4)
|
||||
})
|
||||
|
||||
it("should access bindings from previous steps", async () => {
|
||||
config.name = "Access Bindings"
|
||||
const builder = createAutomationBuilder(config)
|
||||
|
||||
const results = await builder
|
||||
.onAppAction()
|
||||
.executeScriptV2(
|
||||
{
|
||||
code: encodeJS(`return $("trigger.fields.data").map(x => x * 2)`),
|
||||
},
|
||||
{ stepId: "binding-script-step" }
|
||||
)
|
||||
.test({ fields: { data: [1, 2, 3] } })
|
||||
|
||||
expect(results.steps[0].outputs.value).toEqual([2, 4, 6])
|
||||
})
|
||||
|
||||
it("should handle script execution errors gracefully", async () => {
|
||||
config.name = "Handle Script Errors"
|
||||
const builder = createAutomationBuilder(config)
|
||||
|
||||
const results = await builder
|
||||
.onAppAction()
|
||||
.executeScriptV2({
|
||||
code: encodeJS("return nonexistentVariable.map(x => x)"),
|
||||
})
|
||||
.test({ fields: {} })
|
||||
|
||||
expect(results.steps[0].outputs.response).toContain(
|
||||
"ReferenceError: nonexistentVariable is not defined"
|
||||
)
|
||||
expect(results.steps[0].outputs.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle conditional logic in scripts", async () => {
|
||||
config.name = "Conditional Script Logic"
|
||||
const builder = createAutomationBuilder(config)
|
||||
|
||||
const results = await builder
|
||||
.onAppAction()
|
||||
.executeScriptV2({
|
||||
code: encodeJS(`
|
||||
if ($("trigger.fields.value") > 5) {
|
||||
return "Value is greater than 5";
|
||||
} else {
|
||||
return "Value is 5 or less";
|
||||
}
|
||||
`),
|
||||
})
|
||||
.test({ fields: { value: 10 } })
|
||||
|
||||
expect(results.steps[0].outputs.value).toEqual("Value is greater than 5")
|
||||
})
|
||||
|
||||
it("should use multiple steps and validate script execution", async () => {
|
||||
config.name = "Multi-Step Script Execution"
|
||||
const builder = createAutomationBuilder(config)
|
||||
|
||||
const results = await builder
|
||||
.onAppAction()
|
||||
.serverLog(
|
||||
{ text: "Starting multi-step automation" },
|
||||
{ stepId: "start-log-step" }
|
||||
)
|
||||
.createRow(
|
||||
{ row: { name: "Test Row", value: 42, tableId: table._id } },
|
||||
{ stepId: "abc123" }
|
||||
)
|
||||
.executeScriptV2(
|
||||
{
|
||||
code: encodeJS(`
|
||||
const createdRow = $("steps")['abc123'];
|
||||
return createdRow.row.value * 2;
|
||||
`),
|
||||
},
|
||||
{ stepId: "ScriptingStep1" }
|
||||
)
|
||||
.serverLog({
|
||||
text: `Final result is {{ steps.ScriptingStep1.value }}`,
|
||||
})
|
||||
.test({ fields: {} })
|
||||
|
||||
expect(results.steps[0].outputs.message).toContain(
|
||||
"Starting multi-step automation"
|
||||
)
|
||||
expect(results.steps[1].outputs.row.value).toEqual(42)
|
||||
expect(results.steps[2].outputs.value).toEqual(84)
|
||||
expect(results.steps[3].outputs.message).toContain("Final result is 84")
|
||||
})
|
||||
|
||||
it("should fail if the code has not been encoded as a handlebars template", async () => {
|
||||
config.name = "Invalid Code Encoding"
|
||||
const builder = createAutomationBuilder(config)
|
||||
|
||||
const results = await builder
|
||||
.onAppAction()
|
||||
.executeScriptV2({
|
||||
code: "return 2 + 2",
|
||||
})
|
||||
.test({ fields: {} })
|
||||
|
||||
expect(results.steps[0].outputs.response.message).toEqual(
|
||||
"Expected code to be a {{ js }} template block"
|
||||
)
|
||||
expect(results.steps[0].outputs.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("does not process embedded handlebars templates", async () => {
|
||||
config.name = "Embedded Handlebars"
|
||||
const builder = createAutomationBuilder(config)
|
||||
|
||||
const results = await builder
|
||||
.onAppAction()
|
||||
.executeScriptV2({
|
||||
code: encodeJS(`return "{{ triggers.row.whatever }}"`),
|
||||
})
|
||||
.test({ fields: {} })
|
||||
|
||||
expect(results.steps[0].outputs.value).toEqual(
|
||||
"{{ triggers.row.whatever }}"
|
||||
)
|
||||
expect(results.steps[0].outputs.success).toEqual(true)
|
||||
})
|
||||
})
|
|
@ -195,7 +195,34 @@ describe("Attempt to run a basic loop automation", () => {
|
|||
.serverLog({ text: "{{steps.1.iterations}}" })
|
||||
.test({ fields: {} })
|
||||
|
||||
expect(results.steps[0].outputs.status).toBe(
|
||||
AutomationStepStatus.MAX_ITERATIONS
|
||||
)
|
||||
expect(results.steps[0].outputs.iterations).toBe(2)
|
||||
expect(results.steps[0].outputs.items).toHaveLength(2)
|
||||
expect(results.steps[0].outputs.items[0].message).toEndWith("test")
|
||||
expect(results.steps[0].outputs.items[1].message).toEndWith("test2")
|
||||
})
|
||||
|
||||
it("should stop when a failure condition is hit", async () => {
|
||||
const results = await createAutomationBuilder(config)
|
||||
.onAppAction()
|
||||
.loop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: ["test", "test2", "test3"],
|
||||
failure: "test3",
|
||||
})
|
||||
.serverLog({ text: "{{loop.currentItem}}" })
|
||||
.serverLog({ text: "{{steps.1.iterations}}" })
|
||||
.test({ fields: {} })
|
||||
|
||||
expect(results.steps[0].outputs.status).toBe(
|
||||
AutomationStepStatus.FAILURE_CONDITION
|
||||
)
|
||||
expect(results.steps[0].outputs.iterations).toBe(2)
|
||||
expect(results.steps[0].outputs.items).toHaveLength(2)
|
||||
expect(results.steps[0].outputs.items[0].message).toEndWith("test")
|
||||
expect(results.steps[0].outputs.items[1].message).toEndWith("test2")
|
||||
})
|
||||
|
||||
it("should run an automation with loop and max iterations to ensure context correctness further down the tree", async () => {
|
||||
|
|
|
@ -12,7 +12,7 @@ describe("Webhook trigger test", () => {
|
|||
|
||||
async function createWebhookAutomation() {
|
||||
const { automation } = await createAutomationBuilder(config)
|
||||
.onWebhook({ fields: { parameter: "string" } })
|
||||
.onWebhook({ body: { parameter: "string" } })
|
||||
.createRow({
|
||||
row: { tableId: table._id!, name: "{{ trigger.parameter }}" },
|
||||
})
|
||||
|
|
|
@ -4,6 +4,7 @@ import { TRIGGER_DEFINITIONS } from "../../triggers"
|
|||
import {
|
||||
Automation,
|
||||
AutomationActionStepId,
|
||||
AutomationResults,
|
||||
AutomationStep,
|
||||
AutomationStepInputs,
|
||||
AutomationTrigger,
|
||||
|
@ -100,6 +101,7 @@ class BranchStepBuilder<TStep extends AutomationTriggerStepId> {
|
|||
loop = this.step(AutomationActionStepId.LOOP)
|
||||
serverLog = this.step(AutomationActionStepId.SERVER_LOG)
|
||||
executeScript = this.step(AutomationActionStepId.EXECUTE_SCRIPT)
|
||||
executeScriptV2 = this.step(AutomationActionStepId.EXECUTE_SCRIPT_V2)
|
||||
filter = this.step(AutomationActionStepId.FILTER)
|
||||
bash = this.step(AutomationActionStepId.EXECUTE_BASH)
|
||||
openai = this.step(AutomationActionStepId.OPENAI)
|
||||
|
@ -212,10 +214,11 @@ class AutomationRunner<TStep extends AutomationTriggerStepId> {
|
|||
throw new Error(response.message)
|
||||
}
|
||||
|
||||
const results: AutomationResults = response as AutomationResults
|
||||
// Remove the trigger step from the response.
|
||||
response.steps.shift()
|
||||
results.steps.shift()
|
||||
|
||||
return response
|
||||
return results
|
||||
}
|
||||
|
||||
async trigger(
|
||||
|
|
|
@ -22,6 +22,7 @@ import {
|
|||
UserBindings,
|
||||
AutomationResults,
|
||||
DidNotTriggerResponse,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import { executeInThread } from "../threads/automation"
|
||||
import { dataFilters, sdk } from "@budibase/shared-core"
|
||||
|
@ -154,6 +155,7 @@ interface AutomationTriggerParams {
|
|||
timeout?: number
|
||||
appId?: string
|
||||
user?: UserBindings
|
||||
table?: Table
|
||||
}
|
||||
|
||||
export async function externalTrigger(
|
||||
|
|
|
@ -4,17 +4,8 @@ import { automationQueue } from "./bullboard"
|
|||
import { updateEntityMetadata } from "../utilities"
|
||||
import { context, db as dbCore, utils } from "@budibase/backend-core"
|
||||
import { getAutomationMetadataParams } from "../db/utils"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import {
|
||||
Automation,
|
||||
AutomationActionStepId,
|
||||
AutomationJob,
|
||||
AutomationStepDefinition,
|
||||
AutomationTriggerDefinition,
|
||||
AutomationTriggerStepId,
|
||||
MetadataType,
|
||||
} from "@budibase/types"
|
||||
import { Automation, AutomationJob, MetadataType } from "@budibase/types"
|
||||
import { automationsEnabled } from "../features"
|
||||
import { helpers, REBOOT_CRON } from "@budibase/shared-core"
|
||||
import tracer from "dd-trace"
|
||||
|
@ -113,23 +104,6 @@ export async function updateTestHistory(
|
|||
)
|
||||
}
|
||||
|
||||
export function removeDeprecated<
|
||||
T extends
|
||||
| Record<keyof typeof AutomationTriggerStepId, AutomationTriggerDefinition>
|
||||
| Record<keyof typeof AutomationActionStepId, AutomationStepDefinition>
|
||||
>(definitions: T): T {
|
||||
const base: Record<
|
||||
string,
|
||||
AutomationTriggerDefinition | AutomationStepDefinition
|
||||
> = cloneDeep(definitions)
|
||||
for (let key of Object.keys(base)) {
|
||||
if (base[key].deprecated) {
|
||||
delete base[key]
|
||||
}
|
||||
}
|
||||
return base as T
|
||||
}
|
||||
|
||||
// end the repetition and the job itself
|
||||
export async function disableAllCrons(appId: any) {
|
||||
const promises = []
|
||||
|
|
|
@ -20,9 +20,12 @@ export interface TriggerOutput {
|
|||
|
||||
export interface AutomationContext {
|
||||
trigger: AutomationTriggerResultOutputs
|
||||
steps: [AutomationTriggerResultOutputs, ...AutomationStepResultOutputs[]]
|
||||
stepsById: Record<string, AutomationStepResultOutputs>
|
||||
steps: Record<
|
||||
string,
|
||||
AutomationStepResultOutputs | AutomationTriggerResultOutputs
|
||||
>
|
||||
stepsByName: Record<string, AutomationStepResultOutputs>
|
||||
stepsById: Record<string, AutomationStepResultOutputs>
|
||||
env?: Record<string, string>
|
||||
user?: UserBindings
|
||||
settings?: {
|
||||
|
@ -31,4 +34,6 @@ export interface AutomationContext {
|
|||
company?: string
|
||||
}
|
||||
loop?: { currentItem: any }
|
||||
_stepIndex: number
|
||||
_error: boolean
|
||||
}
|
||||
|
|
|
@ -32,6 +32,8 @@ class AutomationEmitter implements ContextEmitter {
|
|||
|
||||
if (chainAutomations === true) {
|
||||
return MAX_AUTOMATIONS_ALLOWED
|
||||
} else if (env.isTest()) {
|
||||
return 0
|
||||
} else if (chainAutomations === undefined && env.SELF_HOSTED) {
|
||||
return MAX_AUTOMATIONS_ALLOWED
|
||||
} else {
|
||||
|
|
|
@ -14,15 +14,14 @@ import {
|
|||
UpdateCommandInput,
|
||||
DeleteCommandInput,
|
||||
} from "@aws-sdk/lib-dynamodb"
|
||||
import { DynamoDB } from "@aws-sdk/client-dynamodb"
|
||||
import { DynamoDB, DynamoDBClientConfig } from "@aws-sdk/client-dynamodb"
|
||||
import { AWS_REGION } from "../constants"
|
||||
|
||||
interface DynamoDBConfig {
|
||||
export interface DynamoDBConfig {
|
||||
region: string
|
||||
accessKeyId: string
|
||||
secretAccessKey: string
|
||||
endpoint?: string
|
||||
currentClockSkew?: boolean
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
|
@ -138,22 +137,16 @@ const SCHEMA: Integration = {
|
|||
},
|
||||
}
|
||||
|
||||
class DynamoDBIntegration implements IntegrationBase {
|
||||
private config: DynamoDBConfig
|
||||
private client
|
||||
export class DynamoDBIntegration implements IntegrationBase {
|
||||
private config: DynamoDBClientConfig
|
||||
private client: DynamoDBDocument
|
||||
|
||||
constructor(config: DynamoDBConfig) {
|
||||
this.config = config
|
||||
|
||||
// User is using a local dynamoDB endpoint, don't auth with remote
|
||||
if (this.config?.endpoint?.includes("localhost")) {
|
||||
// @ts-ignore
|
||||
this.config = {}
|
||||
}
|
||||
|
||||
this.config = {
|
||||
...this.config,
|
||||
currentClockSkew: true,
|
||||
credentials: {
|
||||
accessKeyId: config.accessKeyId,
|
||||
secretAccessKey: config.secretAccessKey,
|
||||
},
|
||||
region: config.region || AWS_REGION,
|
||||
endpoint: config.endpoint || undefined,
|
||||
}
|
||||
|
|
|
@ -168,6 +168,7 @@ class S3Integration implements IntegrationBase {
|
|||
secretAccessKey: config.secretAccessKey,
|
||||
},
|
||||
region: config.region,
|
||||
endpoint: config.endpoint,
|
||||
}
|
||||
if (config.endpoint) {
|
||||
this.config.forcePathStyle = true
|
||||
|
|
|
@ -1,167 +1,108 @@
|
|||
jest.mock("@aws-sdk/lib-dynamodb", () => ({
|
||||
DynamoDBDocument: {
|
||||
from: jest.fn(() => ({
|
||||
update: jest.fn(),
|
||||
put: jest.fn(),
|
||||
query: jest.fn(() => ({
|
||||
Items: [],
|
||||
})),
|
||||
scan: jest.fn(() => ({
|
||||
Items: [],
|
||||
})),
|
||||
delete: jest.fn(),
|
||||
get: jest.fn(),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
jest.mock("@aws-sdk/client-dynamodb")
|
||||
import { default as DynamoDBIntegration } from "../dynamodb"
|
||||
import { Datasource } from "@budibase/types"
|
||||
import { DynamoDBConfig, DynamoDBIntegration } from "../dynamodb"
|
||||
import { DatabaseName, datasourceDescribe } from "./utils"
|
||||
import {
|
||||
CreateTableCommandInput,
|
||||
DynamoDB,
|
||||
DynamoDBClientConfig,
|
||||
} from "@aws-sdk/client-dynamodb"
|
||||
|
||||
class TestConfiguration {
|
||||
integration: any
|
||||
const describes = datasourceDescribe({ only: [DatabaseName.DYNAMODB] })
|
||||
|
||||
constructor(config: any = {}) {
|
||||
this.integration = new DynamoDBIntegration.integration(config)
|
||||
async function createTable(client: DynamoDB, req: CreateTableCommandInput) {
|
||||
try {
|
||||
await client.deleteTable({ TableName: req.TableName })
|
||||
} catch (e: any) {
|
||||
if (e.name !== "ResourceNotFoundException") {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
return await client.createTable(req)
|
||||
}
|
||||
|
||||
describe("DynamoDB Integration", () => {
|
||||
let config: any
|
||||
let tableName = "Users"
|
||||
if (describes.length > 0) {
|
||||
describe.each(describes)("DynamoDB Integration", ({ dsProvider }) => {
|
||||
let table = "Users"
|
||||
let rawDatasource: Datasource
|
||||
let dynamodb: DynamoDBIntegration
|
||||
|
||||
beforeEach(() => {
|
||||
config = new TestConfiguration()
|
||||
})
|
||||
|
||||
it("calls the create method with the correct params", async () => {
|
||||
await config.integration.create({
|
||||
table: tableName,
|
||||
json: {
|
||||
Name: "John",
|
||||
},
|
||||
})
|
||||
expect(config.integration.client.put).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
Name: "John",
|
||||
})
|
||||
})
|
||||
|
||||
it("calls the read method with the correct params", async () => {
|
||||
const indexName = "Test"
|
||||
|
||||
const response = await config.integration.read({
|
||||
table: tableName,
|
||||
index: indexName,
|
||||
json: {},
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
IndexName: indexName,
|
||||
})
|
||||
expect(response).toEqual([])
|
||||
})
|
||||
|
||||
it("calls the scan method with the correct params", async () => {
|
||||
const indexName = "Test"
|
||||
|
||||
const response = await config.integration.scan({
|
||||
table: tableName,
|
||||
index: indexName,
|
||||
json: {},
|
||||
})
|
||||
expect(config.integration.client.scan).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
IndexName: indexName,
|
||||
})
|
||||
expect(response).toEqual([])
|
||||
})
|
||||
|
||||
it("calls the get method with the correct params", async () => {
|
||||
await config.integration.get({
|
||||
table: tableName,
|
||||
json: {
|
||||
Id: 123,
|
||||
},
|
||||
})
|
||||
|
||||
expect(config.integration.client.get).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
Id: 123,
|
||||
})
|
||||
})
|
||||
|
||||
it("calls the update method with the correct params", async () => {
|
||||
await config.integration.update({
|
||||
table: tableName,
|
||||
json: {
|
||||
Name: "John",
|
||||
},
|
||||
})
|
||||
expect(config.integration.client.update).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
Name: "John",
|
||||
})
|
||||
})
|
||||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
await config.integration.delete({
|
||||
table: tableName,
|
||||
json: {
|
||||
Name: "John",
|
||||
},
|
||||
})
|
||||
expect(config.integration.client.delete).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
Name: "John",
|
||||
})
|
||||
})
|
||||
|
||||
it("configures the dynamoDB constructor based on an empty endpoint parameter", async () => {
|
||||
const config = {
|
||||
region: "us-east-1",
|
||||
accessKeyId: "test",
|
||||
secretAccessKey: "test",
|
||||
function item(json: Record<string, any>) {
|
||||
return { table, json: { Item: json } }
|
||||
}
|
||||
|
||||
const integration: any = new DynamoDBIntegration.integration(config)
|
||||
|
||||
expect(integration.config).toEqual({
|
||||
currentClockSkew: true,
|
||||
...config,
|
||||
})
|
||||
})
|
||||
|
||||
it("configures the dynamoDB constructor based on a localhost endpoint parameter", async () => {
|
||||
const config = {
|
||||
region: "us-east-1",
|
||||
accessKeyId: "test",
|
||||
secretAccessKey: "test",
|
||||
endpoint: "localhost:8080",
|
||||
function key(json: Record<string, any>) {
|
||||
return { table, json: { Key: json } }
|
||||
}
|
||||
|
||||
const integration: any = new DynamoDBIntegration.integration(config)
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider()
|
||||
rawDatasource = ds.rawDatasource!
|
||||
dynamodb = new DynamoDBIntegration(
|
||||
rawDatasource.config! as DynamoDBConfig
|
||||
)
|
||||
|
||||
expect(integration.config).toEqual({
|
||||
region: "us-east-1",
|
||||
currentClockSkew: true,
|
||||
endpoint: "localhost:8080",
|
||||
const config: DynamoDBClientConfig = {
|
||||
credentials: {
|
||||
accessKeyId: "test",
|
||||
secretAccessKey: "test",
|
||||
},
|
||||
region: "us-east-1",
|
||||
endpoint: rawDatasource.config!.endpoint,
|
||||
}
|
||||
|
||||
const client = new DynamoDB(config)
|
||||
await createTable(client, {
|
||||
TableName: table,
|
||||
KeySchema: [{ AttributeName: "Id", KeyType: "HASH" }],
|
||||
AttributeDefinitions: [{ AttributeName: "Id", AttributeType: "N" }],
|
||||
ProvisionedThroughput: { ReadCapacityUnits: 1, WriteCapacityUnits: 1 },
|
||||
})
|
||||
})
|
||||
|
||||
it("can create and read a record", async () => {
|
||||
await dynamodb.create(item({ Id: 1, Name: "John" }))
|
||||
|
||||
const resp = await dynamodb.get(key({ Id: 1 }))
|
||||
expect(resp.Item).toEqual({ Id: 1, Name: "John" })
|
||||
})
|
||||
|
||||
it("can scan", async () => {
|
||||
await dynamodb.create(item({ Id: 1, Name: "John" }))
|
||||
await dynamodb.create(item({ Id: 2, Name: "Jane" }))
|
||||
await dynamodb.create(item({ Id: 3, Name: "Jack" }))
|
||||
|
||||
const resp = await dynamodb.scan({ table, json: {}, index: null })
|
||||
expect(resp).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ Id: 1, Name: "John" },
|
||||
{ Id: 2, Name: "Jane" },
|
||||
{ Id: 3, Name: "Jack" },
|
||||
])
|
||||
)
|
||||
})
|
||||
|
||||
it("can update", async () => {
|
||||
await dynamodb.create(item({ Id: 1, Foo: "John" }))
|
||||
await dynamodb.update({
|
||||
table,
|
||||
json: {
|
||||
Key: { Id: 1 },
|
||||
UpdateExpression: "SET Foo = :foo",
|
||||
ExpressionAttributeValues: { ":foo": "Jane" },
|
||||
},
|
||||
})
|
||||
|
||||
const updatedRecord = await dynamodb.get(key({ Id: 1 }))
|
||||
expect(updatedRecord.Item).toEqual({ Id: 1, Foo: "Jane" })
|
||||
})
|
||||
|
||||
it("can delete", async () => {
|
||||
await dynamodb.create(item({ Id: 1, Name: "John" }))
|
||||
await dynamodb.delete(key({ Id: 1 }))
|
||||
|
||||
const deletedRecord = await dynamodb.get(key({ Id: 1 }))
|
||||
expect(deletedRecord.Item).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
it("configures the dynamoDB constructor based on a remote endpoint parameter", async () => {
|
||||
const config = {
|
||||
region: "us-east-1",
|
||||
accessKeyId: "test",
|
||||
secretAccessKey: "test",
|
||||
endpoint: "dynamodb.aws.foo.net",
|
||||
}
|
||||
|
||||
const integration = new DynamoDBIntegration.integration(config)
|
||||
|
||||
// @ts-ignore
|
||||
expect(integration.config).toEqual({
|
||||
currentClockSkew: true,
|
||||
...config,
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
import { DYNAMODB_IMAGE } from "./images"
|
||||
import { DynamoDBConfig } from "../../dynamodb"
|
||||
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer(DYNAMODB_IMAGE)
|
||||
.withExposedPorts(8000)
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
// https://stackoverflow.com/a/77373799
|
||||
`if [ "$(curl -s -o /dev/null -I -w ''%{http_code}'' http://localhost:8000)" == "400" ]; then exit 0; else exit 1; fi`
|
||||
).withStartupTimeout(60000)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const port = (await ports).find(x => x.container === 8000)?.host
|
||||
if (!port) {
|
||||
throw new Error("DynamoDB port not found")
|
||||
}
|
||||
|
||||
const config: DynamoDBConfig = {
|
||||
accessKeyId: "test",
|
||||
secretAccessKey: "test",
|
||||
region: "us-east-1",
|
||||
endpoint: `http://127.0.0.1:${port}`,
|
||||
}
|
||||
|
||||
return {
|
||||
type: "datasource",
|
||||
source: SourceName.DYNAMODB,
|
||||
config,
|
||||
}
|
||||
}
|
|
@ -420,15 +420,16 @@ export class GoogleSheetsMock {
|
|||
}
|
||||
|
||||
const newRows = body.values.map(v => this.valuesToRowData(v))
|
||||
const toDelete =
|
||||
params.insertDataOption === "INSERT_ROWS" ? newRows.length : 0
|
||||
sheet.data[0].rowData.splice(endRowIndex + 1, toDelete, ...newRows)
|
||||
sheet.data[0].rowMetadata.splice(endRowIndex + 1, toDelete, {
|
||||
const newMetadata = newRows.map(() => ({
|
||||
hiddenByUser: false,
|
||||
hiddenByFilter: false,
|
||||
pixelSize: 100,
|
||||
developerMetadata: [],
|
||||
})
|
||||
}))
|
||||
const toDelete =
|
||||
params.insertDataOption === "INSERT_ROWS" ? newRows.length : 0
|
||||
sheet.data[0].rowData.splice(endRowIndex + 1, toDelete, ...newRows)
|
||||
sheet.data[0].rowMetadata.splice(endRowIndex + 1, toDelete, ...newMetadata)
|
||||
|
||||
// It's important to give back a correct updated range because the API
|
||||
// library we use makes use of it to assign the correct row IDs to rows.
|
||||
|
|
|
@ -13,3 +13,4 @@ export const POSTGRES_LEGACY_IMAGE = `postgres:9.5.25`
|
|||
export const MONGODB_IMAGE = `mongo@${process.env.MONGODB_SHA}`
|
||||
export const MARIADB_IMAGE = `mariadb@${process.env.MARIADB_SHA}`
|
||||
export const ELASTICSEARCH_IMAGE = `elasticsearch@${process.env.ELASTICSEARCH_SHA}`
|
||||
export const DYNAMODB_IMAGE = `amazon/dynamodb-local@${process.env.DYNAMODB_SHA}`
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue