Merge branch 'master' of github.com:budibase/budibase into budi-8483-consolidate-feature-flags-into-a-single-endpoint

This commit is contained in:
Sam Rose 2024-08-05 17:11:23 +01:00
commit da74441c1c
No known key found for this signature in database
219 changed files with 6558 additions and 3142 deletions

View File

@ -108,7 +108,7 @@ jobs:
- name: Pull testcontainers images
run: |
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.2.1-sqs &
docker pull budibase/couchdb:v3.3.3 &
docker pull redis &
wait $(jobs -p)
@ -162,17 +162,24 @@ jobs:
node-version: 20.x
cache: yarn
- name: Load dotenv
id: dotenv
uses: falti/dotenv-action@v1.1.3
with:
path: ./packages/server/datasource-sha.env
- name: Pull testcontainers images
run: |
docker pull mcr.microsoft.com/mssql/server:2022-CU13-ubuntu-22.04 &
docker pull mysql:8.3 &
docker pull postgres:16.1-bullseye &
docker pull mongo:7.0-jammy &
docker pull mariadb:lts &
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.2.1-sqs &
docker pull mcr.microsoft.com/mssql/server@${{ steps.dotenv.outputs.MSSQL_SHA }} &
docker pull mysql@${{ steps.dotenv.outputs.MYSQL_SHA }} &
docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }} &
docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }} &
docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }} &
docker pull budibase/oracle-database:23.2-slim-faststart &
docker pull minio/minio &
docker pull redis &
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.3.3 &
wait $(jobs -p)

View File

@ -46,7 +46,7 @@ export default async function setup() {
await killContainers(containers)
try {
const couchdb = new GenericContainer("budibase/couchdb:v3.2.1-sqs")
const couchdb = new GenericContainer("budibase/couchdb:v3.3.3")
.withExposedPorts(5984, 4984)
.withEnvironment({
COUCHDB_PASSWORD: "budibase",

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.29.24",
"version": "2.29.29",
"npmClient": "yarn",
"packages": [
"packages/*",

View File

@ -10,7 +10,7 @@
"@types/proper-lockfile": "^4.1.4",
"@typescript-eslint/parser": "6.9.0",
"esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0",
"esbuild-node-externals": "^1.14.0",
"eslint": "^8.52.0",
"eslint-plugin-import": "^2.29.0",
"eslint-plugin-jest": "^27.9.0",

@ -1 +1 @@
Subproject commit b03e584e465f620b49a1b688ff4afc973e6c0758
Subproject commit 32b8fa4643b4f0f74ee89760deffe431ab347ad9

View File

@ -56,24 +56,24 @@ class CouchDBError extends Error implements DBError {
constructor(
message: string,
info: {
status: number | undefined
statusCode: number | undefined
status?: number
statusCode?: number
name: string
errid: string
description: string
reason: string
error: string
errid?: string
description?: string
reason?: string
error?: string
}
) {
super(message)
const statusCode = info.status || info.statusCode || 500
this.status = statusCode
this.statusCode = statusCode
this.reason = info.reason
this.reason = info.reason || "Unknown"
this.name = info.name
this.errid = info.errid
this.description = info.description
this.error = info.error
this.errid = info.errid || "Unknown"
this.description = info.description || "Unknown"
this.error = info.error || "Not found"
}
}
@ -246,6 +246,35 @@ export class DatabaseImpl implements Database {
})
}
async bulkRemove(documents: Document[], opts?: { silenceErrors?: boolean }) {
const response: Nano.DocumentBulkResponse[] = await this.performCall(db => {
return () =>
db.bulk({
docs: documents.map(doc => ({
...doc,
_deleted: true,
})),
})
})
if (opts?.silenceErrors) {
return
}
let errorFound = false
let errorMessage: string = "Unable to bulk remove documents: "
for (let res of response) {
if (res.error) {
errorFound = true
errorMessage += res.error
}
}
if (errorFound) {
throw new CouchDBError(errorMessage, {
name: this.name,
status: 400,
})
}
}
async post(document: AnyDocument, opts?: DatabasePutOpts) {
if (!document._id) {
document._id = newid()
@ -279,8 +308,12 @@ export class DatabaseImpl implements Database {
}
async bulkDocs(documents: AnyDocument[]) {
const now = new Date().toISOString()
return this.performCall(db => {
return () => db.bulk({ docs: documents })
return () =>
db.bulk({
docs: documents.map(d => ({ createdAt: now, ...d, updatedAt: now })),
})
})
}

View File

@ -0,0 +1,118 @@
import tk from "timekeeper"
import { DatabaseImpl } from ".."
import { generator, structures } from "../../../../tests"
const initialTime = new Date()
tk.freeze(initialTime)
describe("DatabaseImpl", () => {
const db = new DatabaseImpl(structures.db.id())
beforeEach(() => {
tk.freeze(initialTime)
})
describe("put", () => {
it("persists createdAt and updatedAt fields", async () => {
const id = generator.guid()
await db.put({ _id: id })
expect(await db.get(id)).toEqual({
_id: id,
_rev: expect.any(String),
createdAt: initialTime.toISOString(),
updatedAt: initialTime.toISOString(),
})
})
it("updates updated at fields", async () => {
const id = generator.guid()
await db.put({ _id: id })
tk.travel(100)
await db.put({ ...(await db.get(id)), newValue: 123 })
expect(await db.get(id)).toEqual({
_id: id,
_rev: expect.any(String),
newValue: 123,
createdAt: initialTime.toISOString(),
updatedAt: new Date().toISOString(),
})
})
})
describe("bulkDocs", () => {
it("persists createdAt and updatedAt fields", async () => {
const ids = generator.unique(() => generator.guid(), 5)
await db.bulkDocs(ids.map(id => ({ _id: id })))
for (const id of ids) {
expect(await db.get(id)).toEqual({
_id: id,
_rev: expect.any(String),
createdAt: initialTime.toISOString(),
updatedAt: initialTime.toISOString(),
})
}
})
it("updates updated at fields", async () => {
const ids = generator.unique(() => generator.guid(), 5)
await db.bulkDocs(ids.map(id => ({ _id: id })))
tk.travel(100)
const docsToUpdate = await Promise.all(
ids.map(async id => ({ ...(await db.get(id)), newValue: 123 }))
)
await db.bulkDocs(docsToUpdate)
for (const id of ids) {
expect(await db.get(id)).toEqual({
_id: id,
_rev: expect.any(String),
newValue: 123,
createdAt: initialTime.toISOString(),
updatedAt: new Date().toISOString(),
})
}
})
it("keeps existing createdAt", async () => {
const ids = generator.unique(() => generator.guid(), 2)
await db.bulkDocs(ids.map(id => ({ _id: id })))
tk.travel(100)
const newDocs = generator
.unique(() => generator.guid(), 3)
.map(id => ({ _id: id }))
const docsToUpdate = await Promise.all(
ids.map(async id => ({ ...(await db.get(id)), newValue: 123 }))
)
await db.bulkDocs([...newDocs, ...docsToUpdate])
for (const { _id } of docsToUpdate) {
expect(await db.get(_id)).toEqual({
_id,
_rev: expect.any(String),
newValue: 123,
createdAt: initialTime.toISOString(),
updatedAt: new Date().toISOString(),
})
}
for (const { _id } of newDocs) {
expect(await db.get(_id)).toEqual({
_id,
_rev: expect.any(String),
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
})
}
})
})
})

View File

@ -71,6 +71,16 @@ export class DDInstrumentedDatabase implements Database {
})
}
bulkRemove(
documents: Document[],
opts?: { silenceErrors?: boolean }
): Promise<void> {
return tracer.trace("db.bulkRemove", span => {
span?.addTags({ db_name: this.name, num_docs: documents.length })
return this.db.bulkRemove(documents, opts)
})
}
put(
document: AnyDocument,
opts?: DatabasePutOpts | undefined

View File

@ -199,9 +199,8 @@ export const createPlatformUserView = async () => {
export const queryPlatformView = async <T extends Document>(
viewName: ViewName,
params: DatabaseQueryOpts,
opts?: QueryViewOptions
): Promise<T[] | T> => {
params: DatabaseQueryOpts
): Promise<T[]> => {
const CreateFuncByName: any = {
[ViewName.ACCOUNT_BY_EMAIL]: createPlatformAccountEmailView,
[ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,
@ -209,7 +208,9 @@ export const queryPlatformView = async <T extends Document>(
return doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {
const createFn = CreateFuncByName[viewName]
return queryView(viewName, params, db, createFn, opts)
return queryView(viewName, params, db, createFn, {
arrayResponse: true,
}) as Promise<T[]>
})
}

View File

@ -25,6 +25,11 @@ export async function getUserDoc(emailOrId: string): Promise<PlatformUser> {
return db.get(emailOrId)
}
export async function updateUserDoc(platformUser: PlatformUserById) {
const db = getPlatformDB()
await db.put(platformUser)
}
// CREATE
function newUserIdDoc(id: string, tenantId: string): PlatformUserById {
@ -113,15 +118,12 @@ export async function addUser(
export async function removeUser(user: User) {
const db = getPlatformDB()
const keys = [user._id!, user.email]
const userDocs = await db.allDocs({
const userDocs = await db.allDocs<User>({
keys,
include_docs: true,
})
const toDelete = userDocs.rows.map((row: any) => {
return {
...row.doc,
_deleted: true,
}
})
await db.bulkDocs(toDelete)
await db.bulkRemove(
userDocs.rows.map(row => row.doc!),
{ silenceErrors: true }
)
}

File diff suppressed because it is too large Load Diff

View File

@ -1,87 +0,0 @@
import { FieldType, Table, FieldSchema, SqlClient } from "@budibase/types"
import { Knex } from "knex"
export class SqlStatements {
client: string
table: Table
allOr: boolean | undefined
columnPrefix: string | undefined
constructor(
client: string,
table: Table,
{ allOr, columnPrefix }: { allOr?: boolean; columnPrefix?: string } = {}
) {
this.client = client
this.table = table
this.allOr = allOr
this.columnPrefix = columnPrefix
}
getField(key: string): FieldSchema | undefined {
const fieldName = key.split(".")[1]
let found = this.table.schema[fieldName]
if (!found && this.columnPrefix) {
const prefixRemovedFieldName = fieldName.replace(this.columnPrefix, "")
found = this.table.schema[prefixRemovedFieldName]
}
return found
}
between(
query: Knex.QueryBuilder,
key: string,
low: number | string,
high: number | string
) {
// Use a between operator if we have 2 valid range values
const field = this.getField(key)
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
[low, high]
)
} else {
const fnc = this.allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [low, high])
}
return query
}
lte(query: Knex.QueryBuilder, key: string, low: number | string) {
// Use just a single greater than operator if we only have a low
const field = this.getField(key)
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
low,
])
} else {
const fnc = this.allOr ? "orWhere" : "where"
query = query[fnc](key, ">=", low)
}
return query
}
gte(query: Knex.QueryBuilder, key: string, high: number | string) {
const field = this.getField(key)
// Use just a single less than operator if we only have a high
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
high,
])
} else {
const fnc = this.allOr ? "orWhere" : "where"
query = query[fnc](key, "<=", high)
}
return query
}
}

View File

@ -22,6 +22,7 @@ export function getNativeSql(
query: Knex.SchemaBuilder | Knex.QueryBuilder
): SqlQuery | SqlQuery[] {
let sql = query.toSQL()
if (Array.isArray(sql)) {
return sql as SqlQuery[]
}

View File

@ -18,6 +18,9 @@ import {
User,
UserStatus,
UserGroup,
PlatformUserBySsoId,
PlatformUserById,
AnyDocument,
} from "@budibase/types"
import {
getAccountHolderFromUserIds,
@ -25,7 +28,11 @@ import {
isCreator,
validateUniqueUser,
} from "./utils"
import { searchExistingEmails } from "./lookup"
import {
getFirstPlatformUser,
getPlatformUsers,
searchExistingEmails,
} from "./lookup"
import { hash } from "../utils"
import { validatePassword } from "../security"
@ -446,9 +453,32 @@ export class UserDB {
creator => !!creator
).length
const ssoUsersToDelete: AnyDocument[] = []
for (let user of usersToDelete) {
const platformUser = (await getFirstPlatformUser(
user._id!
)) as PlatformUserById
const ssoId = platformUser.ssoId
if (ssoId) {
// Need to get the _rev of the SSO user doc to delete it. The view also returns docs that have the ssoId property, so we need to ignore those.
const ssoUsers = (await getPlatformUsers(
ssoId
)) as PlatformUserBySsoId[]
ssoUsers
.filter(user => user.ssoId == null)
.forEach(user => {
ssoUsersToDelete.push({
...user,
_deleted: true,
})
})
}
await bulkDeleteProcessing(user)
}
// Delete any associated SSO user docs
await platform.getPlatformDB().bulkDocs(ssoUsersToDelete)
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDeleteCount)
// Build Response

View File

@ -34,15 +34,22 @@ export async function searchExistingEmails(emails: string[]) {
}
// lookup, could be email or userId, either will return a doc
export async function getPlatformUser(
export async function getPlatformUsers(
identifier: string
): Promise<PlatformUser | null> {
): Promise<PlatformUser[]> {
// use the view here and allow to find anyone regardless of casing
// Use lowercase to ensure email login is case insensitive
return (await dbUtils.queryPlatformView(ViewName.PLATFORM_USERS_LOWERCASE, {
return await dbUtils.queryPlatformView(ViewName.PLATFORM_USERS_LOWERCASE, {
keys: [identifier.toLowerCase()],
include_docs: true,
})) as PlatformUser
})
}
export async function getFirstPlatformUser(
identifier: string
): Promise<PlatformUser | null> {
const platformUserDocs = await getPlatformUsers(identifier)
return platformUserDocs[0] ?? null
}
export async function getExistingTenantUsers(
@ -74,15 +81,10 @@ export async function getExistingPlatformUsers(
keys: lcEmails,
include_docs: true,
}
const opts = {
arrayResponse: true,
}
return (await dbUtils.queryPlatformView(
return await dbUtils.queryPlatformView(
ViewName.PLATFORM_USERS_LOWERCASE,
params,
opts
)) as PlatformUserByEmail[]
params
)
}
export async function getExistingAccounts(
@ -93,14 +95,5 @@ export async function getExistingAccounts(
keys: lcEmails,
include_docs: true,
}
const opts = {
arrayResponse: true,
}
return (await dbUtils.queryPlatformView(
ViewName.ACCOUNT_BY_EMAIL,
params,
opts
)) as AccountMetadata[]
return await dbUtils.queryPlatformView(ViewName.ACCOUNT_BY_EMAIL, params)
}

View File

@ -1,7 +1,7 @@
import { CloudAccount, ContextUser, User, UserGroup } from "@budibase/types"
import * as accountSdk from "../accounts"
import env from "../environment"
import { getPlatformUser } from "./lookup"
import { getFirstPlatformUser } from "./lookup"
import { EmailUnavailableError } from "../errors"
import { getTenantId } from "../context"
import { sdk } from "@budibase/shared-core"
@ -51,7 +51,7 @@ async function isCreatorByGroupMembership(user?: User | ContextUser) {
export async function validateUniqueUser(email: string, tenantId: string) {
// check budibase users in other tenants
if (env.MULTI_TENANCY) {
const tenantUser = await getPlatformUser(email)
const tenantUser = await getFirstPlatformUser(email)
if (tenantUser != null && tenantUser.tenantId !== tenantId) {
throw new EmailUnavailableError(email)
}

View File

@ -1,6 +1,6 @@
import {
CONSTANT_EXTERNAL_ROW_COLS,
CONSTANT_INTERNAL_ROW_COLS,
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
export function expectFunctionWasCalledTimesWith(
@ -14,7 +14,7 @@ export function expectFunctionWasCalledTimesWith(
}
export const expectAnyInternalColsAttributes: {
[K in (typeof CONSTANT_INTERNAL_ROW_COLS)[number]]: any
[K in (typeof PROTECTED_INTERNAL_COLUMNS)[number]]: any
} = {
tableId: expect.anything(),
type: expect.anything(),
@ -25,7 +25,7 @@ export const expectAnyInternalColsAttributes: {
}
export const expectAnyExternalColsAttributes: {
[K in (typeof CONSTANT_EXTERNAL_ROW_COLS)[number]]: any
[K in (typeof PROTECTED_EXTERNAL_COLUMNS)[number]]: any
} = {
tableId: expect.anything(),
_id: expect.anything(),

View File

@ -36,9 +36,11 @@
<use xlink:href="#spectrum-icon-18-{icon}" />
</svg>
<div class="spectrum-InLineAlert-header">{header}</div>
{#each split as splitMsg}
<div class="spectrum-InLineAlert-content">{splitMsg}</div>
{/each}
<slot>
{#each split as splitMsg}
<div class="spectrum-InLineAlert-content">{splitMsg}</div>
{/each}
</slot>
{#if onConfirm}
<div class="spectrum-InLineAlert-footer button">
<Button {cta} secondary={cta ? false : true} on:click={onConfirm}

View File

@ -30,7 +30,7 @@
class:custom={!!color}
class:square
class:hoverable
style={`--color: ${color};`}
style={`--color: ${color ?? "var(--spectrum-global-color-gray-400)"};`}
class:spectrum-StatusLight--celery={celery}
class:spectrum-StatusLight--yellow={yellow}
class:spectrum-StatusLight--fuchsia={fuchsia}
@ -61,13 +61,17 @@
min-height: 0;
padding-top: 0;
padding-bottom: 0;
transition: color ease-out 130ms;
}
.spectrum-StatusLight.withText::before {
margin-right: 10px;
}
.spectrum-StatusLight::before {
transition: background-color ease-out 160ms;
}
.custom::before {
background: var(--color) !important;
background-color: var(--color) !important;
}
.square::before {
width: 14px;
@ -79,4 +83,14 @@
cursor: pointer;
color: var(--spectrum-global-color-gray-900);
}
.spectrum-StatusLight--sizeXS::before {
width: 10px;
height: 10px;
border-radius: 2px;
}
.spectrum-StatusLight--disabled::before {
background-color: var(--spectrum-global-color-gray-400) !important;
}
</style>

View File

@ -54,6 +54,7 @@
</div>
<div class="controls">
<div
class:disabled={!$selectedAutomation?.definition?.trigger}
on:click={() => {
testDataModal.show()
}}
@ -80,6 +81,7 @@
automation._id,
automation.disabled
)}
disabled={!$selectedAutomation?.definition?.trigger}
value={!automation.disabled}
/>
</div>

View File

@ -3,6 +3,7 @@
automationStore,
selectedAutomation,
permissions,
selectedAutomationDisplayData,
} from "stores/builder"
import {
Icon,
@ -14,6 +15,7 @@
notifications,
Label,
AbsTooltip,
InlineAlert,
} from "@budibase/bbui"
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
@ -49,6 +51,8 @@
$: isAppAction && setPermissions(role)
$: isAppAction && getPermissions(automationId)
$: triggerInfo = $selectedAutomationDisplayData?.triggerInfo
async function setPermissions(role) {
if (!role || !automationId) {
return
@ -183,6 +187,12 @@
{block}
{webhookModal}
/>
{#if isTrigger && triggerInfo}
<InlineAlert
header={triggerInfo.type}
message={`This trigger is tied to the row action ${triggerInfo.rowAction.name} on your ${triggerInfo.table.name} table`}
/>
{/if}
{#if lastStep}
<Button on:click={() => testDataModal.show()} cta>
Finish and test automation

View File

@ -81,7 +81,7 @@
// Check the schema to see if required fields have been entered
$: isError =
!isTriggerValid(trigger) ||
!trigger.schema.outputs.required?.every(
!(trigger.schema.outputs.required || []).every(
required => $memoTestData?.[required] || required !== "row"
)

View File

@ -6,6 +6,7 @@
contextMenuStore,
} from "stores/builder"
import { notifications, Icon } from "@budibase/bbui"
import { sdk } from "@budibase/shared-core"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import UpdateAutomationModal from "components/automation/AutomationPanel/UpdateAutomationModal.svelte"
import NavItem from "components/common/NavItem.svelte"
@ -35,45 +36,55 @@
}
const getContextMenuItems = () => {
return [
{
icon: "Delete",
name: "Delete",
keyBind: null,
visible: true,
disabled: false,
callback: confirmDeleteDialog.show,
const isRowAction = sdk.automations.isRowAction(automation)
const result = []
if (!isRowAction) {
result.push(
...[
{
icon: "Delete",
name: "Delete",
keyBind: null,
visible: true,
disabled: false,
callback: confirmDeleteDialog.show,
},
{
icon: "Edit",
name: "Edit",
keyBind: null,
visible: true,
disabled: !automation.definition.trigger,
callback: updateAutomationDialog.show,
},
{
icon: "Duplicate",
name: "Duplicate",
keyBind: null,
visible: true,
disabled:
!automation.definition.trigger ||
automation.definition.trigger?.name === "Webhook",
callback: duplicateAutomation,
},
]
)
}
result.push({
icon: automation.disabled ? "CheckmarkCircle" : "Cancel",
name: automation.disabled ? "Activate" : "Pause",
keyBind: null,
visible: true,
disabled: !automation.definition.trigger,
callback: () => {
automationStore.actions.toggleDisabled(
automation._id,
automation.disabled
)
},
{
icon: "Edit",
name: "Edit",
keyBind: null,
visible: true,
disabled: false,
callback: updateAutomationDialog.show,
},
{
icon: "Duplicate",
name: "Duplicate",
keyBind: null,
visible: true,
disabled: automation.definition.trigger.name === "Webhook",
callback: duplicateAutomation,
},
{
icon: automation.disabled ? "CheckmarkCircle" : "Cancel",
name: automation.disabled ? "Activate" : "Pause",
keyBind: null,
visible: true,
disabled: false,
callback: () => {
automationStore.actions.toggleDisabled(
automation._id,
automation.disabled
)
},
},
]
})
return result
}
const openContextMenu = e => {
@ -89,7 +100,7 @@
on:contextmenu={openContextMenu}
{icon}
iconColor={"var(--spectrum-global-color-gray-900)"}
text={automation.name}
text={automation.displayName}
selected={automation._id === $selectedAutomation?._id}
hovering={automation._id === $contextMenuStore.id}
on:click={() => automationStore.actions.select(automation._id)}

View File

@ -17,19 +17,26 @@
automation.name.toLowerCase().includes(searchString.toLowerCase())
)
})
.map(automation => ({
...automation,
displayName:
$automationStore.automationDisplayData[automation._id]?.displayName ||
automation.name,
}))
.sort((a, b) => {
const lowerA = a.name.toLowerCase()
const lowerB = b.name.toLowerCase()
const lowerA = a.displayName.toLowerCase()
const lowerB = b.displayName.toLowerCase()
return lowerA > lowerB ? 1 : -1
})
$: groupedAutomations = filteredAutomations.reduce((acc, auto) => {
acc[auto.definition.trigger.event] ??= {
icon: auto.definition.trigger.icon,
name: (auto.definition.trigger?.name || "").toUpperCase(),
const catName = auto.definition?.trigger?.event || "No Trigger"
acc[catName] ??= {
icon: auto.definition?.trigger?.icon || "AlertCircle",
name: (auto.definition?.trigger?.name || "No Trigger").toUpperCase(),
entries: [],
}
acc[auto.definition.trigger.event].entries.push(auto)
acc[catName].entries.push(auto)
return acc
}, {})

View File

@ -21,7 +21,9 @@
$: nameError =
nameTouched && !name ? "Please specify a name for the automation." : null
$: triggers = Object.entries($automationStore.blockDefinitions.TRIGGER)
$: triggers = Object.entries(
$automationStore.blockDefinitions.CREATABLE_TRIGGER
)
async function createAutomation() {
try {

View File

@ -58,6 +58,7 @@
AutomationEventType,
AutomationStepType,
AutomationActionStepId,
AutomationCustomIOType,
} from "@budibase/types"
import { FIELDS } from "constants/backend"
import PropField from "./PropField.svelte"
@ -394,7 +395,9 @@
*/
const onRowTriggerUpdate = async update => {
if (
["tableId", "filters", "meta"].some(key => Object.hasOwn(update, key))
["tableId", AutomationCustomIOType.FILTERS, "meta"].some(key =>
Object.hasOwn(update, key)
)
) {
try {
let updatedAutomation
@ -744,7 +747,11 @@
for (let [key, field] of properties) {
// need to look for the builder definition (keyed separately, see saveFilters)
const defKey = `${key}-def`
if (field.customType === "filters" && inputs?.[defKey]) {
if (
(field.customType === AutomationCustomIOType.FILTERS ||
field.customType === AutomationCustomIOType.TRIGGER_FILTER) &&
inputs?.[defKey]
) {
filters = inputs[defKey]
break
}
@ -846,7 +853,7 @@
<Label>
{label}
</Label>
{#if value.customType === "trigger_filter"}
{#if value.customType === AutomationCustomIOType.TRIGGER_FILTER}
<Icon
hoverable
on:click={() =>
@ -869,6 +876,7 @@
options={value.enum}
getOptionLabel={(x, idx) =>
value.pretty ? value.pretty[idx] : x}
disabled={value.readonly}
/>
{:else if value.type === "json"}
<Editor
@ -877,6 +885,7 @@
mode="json"
value={inputData[key]?.value}
on:change={e => onChange({ [key]: e.detail })}
readOnly={value.readonly}
/>
{:else if value.type === "boolean"}
<div style="margin-top: 10px">
@ -884,6 +893,7 @@
text={value.title}
value={inputData[key]}
on:change={e => onChange({ [key]: e.detail })}
disabled={value.readonly}
/>
</div>
{:else if value.type === "date"}
@ -897,6 +907,7 @@
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
disabled={value.readonly}
>
<DatePicker
value={inputData[key]}
@ -908,6 +919,7 @@
on:change={e => onChange({ [key]: e.detail })}
value={inputData[key]}
options={Object.keys(table?.schema || {})}
disabled={value.readonly}
/>
{:else if value.type === "attachment" || value.type === "signature_single"}
<div class="attachment-field-wrapper">
@ -977,7 +989,7 @@
{/if}
</div>
</div>
{:else if value.customType === "filters" || value.customType === "trigger_filter"}
{:else if value.customType === AutomationCustomIOType.FILTERS || value.customType === AutomationCustomIOType.TRIGGER_FILTER}
<ActionButton fullWidth on:click={drawer.show}
>{filters.length > 0
? "Update Filter"
@ -1021,6 +1033,7 @@
{isTrigger}
value={inputData[key]}
on:change={e => onChange({ [key]: e.detail })}
disabled={value.readonly}
/>
{:else if value.customType === "webhookUrl"}
<WebhookDisplay value={inputData[key]} />

View File

@ -13,7 +13,7 @@
const { datasource } = getContext("grid")
$: triggers = $automationStore.blockDefinitions.TRIGGER
$: triggers = $automationStore.blockDefinitions.CREATABLE_TRIGGER
$: table = $tables.list.find(table => table._id === $datasource.tableId)

View File

@ -17,8 +17,8 @@
SWITCHABLE_TYPES,
ValidColumnNameRegex,
helpers,
CONSTANT_INTERNAL_ROW_COLS,
CONSTANT_EXTERNAL_ROW_COLS,
PROTECTED_INTERNAL_COLUMNS,
PROTECTED_EXTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { createEventDispatcher, getContext, onMount } from "svelte"
import { cloneDeep } from "lodash/fp"
@ -489,8 +489,8 @@
}
const newError = {}
const prohibited = externalTable
? CONSTANT_EXTERNAL_ROW_COLS
: CONSTANT_INTERNAL_ROW_COLS
? PROTECTED_EXTERNAL_COLUMNS
: PROTECTED_INTERNAL_COLUMNS
if (!externalTable && fieldInfo.name?.startsWith("_")) {
newError.name = `Column name cannot start with an underscore.`
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {

View File

@ -33,6 +33,5 @@
title="Confirm Deletion"
>
Are you sure you wish to delete the datasource
<i>{datasource.name}?</i>
This action cannot be undone.
<i>{datasource.name}</i>? This action cannot be undone.
</ConfirmDialog>

View File

@ -100,51 +100,43 @@
async function handleFile(e) {
loading = true
error = null
const previousValidation = validation
validation = {}
try {
const response = await parseFile(e)
rows = response.rows
fileName = response.fileName
const newValidateHash = JSON.stringify(rows)
if (newValidateHash === validateHash) {
validation = previousValidation
} else {
await validate(rows)
validateHash = newValidateHash
}
} catch (e) {
error = e.message || e
} finally {
loading = false
error = e
}
}
async function validate(rows) {
loading = true
error = null
validation = {}
allValid = false
try {
if (rows.length > 0) {
const response = await API.validateExistingTableImport({
rows,
tableId,
})
if (rows.length > 0) {
const response = await API.validateExistingTableImport({
rows,
tableId,
})
validation = response.schemaValidation
invalidColumns = response.invalidColumns
allValid = response.allValid
}
} catch (e) {
error = e.message
validation = response.schemaValidation
invalidColumns = response.invalidColumns
allValid = response.allValid
}
loading = false
}
$: {
// binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows)
if (newValidateHash !== validateHash) {
validate(rows)
}
validateHash = newValidateHash
}
</script>

View File

@ -1,9 +1,9 @@
<script>
import { Select, Icon } from "@budibase/bbui"
import { FIELDS } from "constants/backend"
import { canBeDisplayColumn, utils } from "@budibase/shared-core"
import { API } from "api"
import { parseFile } from "./utils"
import { canBeDisplayColumn } from "@budibase/shared-core"
export let rows = []
export let schema = {}
@ -97,6 +97,8 @@
let errors = {}
let selectedColumnTypes = {}
let rawRows = []
$: displayColumnOptions = Object.keys(schema || {}).filter(column => {
return validation[column] && canBeDisplayColumn(schema[column].type)
})
@ -106,6 +108,8 @@
}
$: {
rows = rawRows.map(row => utils.trimOtherProps(row, Object.keys(schema)))
// binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
if (newValidateHash !== validateHash) {
@ -122,7 +126,7 @@
try {
const response = await parseFile(e)
rows = response.rows
rawRows = response.rows
schema = response.schema
fileName = response.fileName
selectedColumnTypes = Object.entries(response.schema).reduce(
@ -188,7 +192,7 @@
type="file"
on:change={handleFile}
/>
<label for="file-upload" class:uploaded={rows.length > 0}>
<label for="file-upload" class:uploaded={rawRows.length > 0}>
{#if error}
Error: {error}
{:else if fileName}
@ -198,7 +202,7 @@
{/if}
</label>
</div>
{#if rows.length > 0 && !error}
{#if rawRows.length > 0 && !error}
<div class="schema-fields">
{#each Object.entries(schema) as [name, column]}
<div class="field">

View File

@ -1,7 +1,7 @@
<script>
import { goto, params } from "@roxi/routify"
import { tables, datasources, screenStore } from "stores/builder"
import { Input, notifications } from "@budibase/bbui"
import { appStore, tables, datasources, screenStore } from "stores/builder"
import { InlineAlert, Link, Input, notifications } from "@budibase/bbui"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { DB_TYPE_EXTERNAL } from "constants/backend"
@ -9,28 +9,41 @@
let confirmDeleteDialog
export const show = () => {
templateScreens = $screenStore.screens.filter(
screen => screen.autoTableId === table._id
)
willBeDeleted = ["All table data"].concat(
templateScreens.map(screen => `Screen ${screen.routing?.route || ""}`)
)
confirmDeleteDialog.show()
let screensPossiblyAffected = []
let viewsMessage = ""
let deleteTableName
const getViewsMessage = () => {
const views = Object.values(table?.views ?? [])
if (views.length < 1) {
return ""
}
if (views.length === 1) {
return ", including 1 view"
}
return `, including ${views.length} views`
}
let templateScreens
let willBeDeleted
let deleteTableName
export const show = () => {
viewsMessage = getViewsMessage()
screensPossiblyAffected = $screenStore.screens
.filter(
screen => screen.autoTableId === table._id && screen.routing?.route
)
.map(screen => ({
text: screen.routing.route,
url: `/builder/app/${$appStore.appId}/design/${screen._id}`,
}))
confirmDeleteDialog.show()
}
async function deleteTable() {
const isSelected = $params.tableId === table._id
try {
await tables.delete(table)
// Screens need deleted one at a time because of undo/redo
for (let screen of templateScreens) {
await screenStore.delete(screen)
}
if (table.sourceType === DB_TYPE_EXTERNAL) {
await datasources.fetch()
}
@ -46,6 +59,10 @@
function hideDeleteDialog() {
deleteTableName = ""
}
const autofillTableName = () => {
deleteTableName = table.name
}
</script>
<ConfirmDialog
@ -56,34 +73,103 @@
title="Confirm Deletion"
disabled={deleteTableName !== table.name}
>
<p>
Are you sure you wish to delete the table
<b>{table.name}?</b>
The following will also be deleted:
</p>
<b>
<div class="delete-items">
{#each willBeDeleted as item}
<div>{item}</div>
{/each}
</div>
</b>
<p>
This action cannot be undone - to continue please enter the table name below
to confirm.
</p>
<Input bind:value={deleteTableName} placeholder={table.name} />
<div class="content">
<p class="firstWarning">
Are you sure you wish to delete the table
<span class="tableNameLine">
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<b on:click={autofillTableName} class="tableName">{table.name}</b>
<span>?</span>
</span>
</p>
<p class="secondWarning">All table data will be deleted{viewsMessage}.</p>
<p class="thirdWarning">This action <b>cannot be undone</b>.</p>
{#if screensPossiblyAffected.length > 0}
<div class="affectedScreens">
<InlineAlert
header="The following screens were originally generated from this table and may no longer function as expected"
>
<ul class="affectedScreensList">
{#each screensPossiblyAffected as item}
<li>
<Link quiet overBackground target="_blank" href={item.url}
>{item.text}</Link
>
</li>
{/each}
</ul>
</InlineAlert>
</div>
{/if}
<p class="fourthWarning">Please enter the app name below to confirm.</p>
<Input bind:value={deleteTableName} placeholder={table.name} />
</div>
</ConfirmDialog>
<style>
div.delete-items {
margin-top: 10px;
margin-bottom: 10px;
margin-left: 10px;
.content {
margin-top: 0;
max-width: 320px;
}
div.delete-items div {
.firstWarning {
margin: 0 0 12px;
max-width: 100%;
}
.tableNameLine {
display: inline-flex;
max-width: 100%;
vertical-align: bottom;
}
.tableName {
flex-grow: 1;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
cursor: pointer;
}
.secondWarning {
margin: 0;
max-width: 100%;
}
.thirdWarning {
margin: 0 0 12px;
max-width: 100%;
}
.affectedScreens {
margin: 18px 0;
max-width: 100%;
margin-bottom: 24px;
}
.affectedScreens :global(.spectrum-InLineAlert) {
max-width: 100%;
}
.affectedScreensList {
padding: 0;
margin-bottom: 0;
}
.affectedScreensList li {
display: block;
max-width: 100%;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
margin-top: 4px;
font-weight: 600;
}
.fourthWarning {
margin: 12px 0 6px;
max-width: 100%;
}
</style>

View File

@ -78,7 +78,7 @@
await datasources.fetch()
await afterSave(table)
} catch (e) {
notifications.error(e)
notifications.error(e.message || e)
// reload in case the table was created
await tables.fetch()
}

View File

@ -0,0 +1,12 @@
<script>
import { RoleUtils } from "@budibase/frontend-core"
import { StatusLight } from "@budibase/bbui"
export let id
export let size = "M"
export let disabled = false
$: color = RoleUtils.getRoleColour(id)
</script>
<StatusLight square {disabled} {size} {color} />

View File

@ -1,20 +1,32 @@
<script>
import { Layout, Input } from "@budibase/bbui"
import { FancyForm, FancyInput } from "@budibase/bbui"
import { createValidationStore, requiredValidator } from "helpers/validation"
export let password
export let passwordForm
export let error
const validatePassword = value => {
if (!value || value.length < 12) {
return "Please enter at least 12 characters. We recommend using machine generated or random passwords."
}
return null
}
const [firstPassword, passwordError, firstTouched] = createValidationStore(
"",
requiredValidator
)
const [repeatPassword, _, repeatTouched] = createValidationStore(
"",
requiredValidator
requiredValidator,
validatePassword
)
$: password = $firstPassword
$: firstPasswordError =
($firstTouched && $passwordError) ||
($repeatTouched && validatePassword(password))
$: error =
!$firstPassword ||
!$firstTouched ||
@ -22,19 +34,19 @@
$firstPassword !== $repeatPassword
</script>
<Layout gap="XS" noPadding>
<Input
<FancyForm bind:this={passwordForm}>
<FancyInput
label="Password"
type="password"
error={$firstTouched && $passwordError}
error={firstPasswordError}
bind:value={$firstPassword}
/>
<Input
label="Repeat Password"
<FancyInput
label="Repeat password"
type="password"
error={$repeatTouched &&
$firstPassword !== $repeatPassword &&
"Passwords must match"}
bind:value={$repeatPassword}
/>
</Layout>
</FancyForm>

View File

@ -115,6 +115,7 @@
})
$: fields = bindings
.filter(x => arrayTypes.includes(x.fieldSchema?.type))
.filter(x => x.fieldSchema?.tableId != null)
.map(binding => {
const { providerId, readableBinding, runtimeBinding } = binding
const { name, type, tableId } = binding.fieldSchema

View File

@ -9,7 +9,10 @@ import { Constants } from "@budibase/frontend-core"
const { TypeIconMap } = Constants
export { RelationshipType } from "@budibase/types"
export {
RelationshipType,
RowExportFormat as ROW_EXPORT_FORMATS,
} from "@budibase/types"
export const AUTO_COLUMN_SUB_TYPES = AutoFieldSubType
@ -307,9 +310,3 @@ export const DatasourceTypes = {
GRAPH: "Graph",
API: "API",
}
export const ROW_EXPORT_FORMATS = {
CSV: "csv",
JSON: "json",
JSON_WITH_SCHEMA: "jsonWithSchema",
}

View File

@ -1,108 +1,88 @@
<script>
import ScreenDetailsModal from "components/design/ScreenDetailsModal.svelte"
import DatasourceModal from "./DatasourceModal.svelte"
import ScreenRoleModal from "./ScreenRoleModal.svelte"
import sanitizeUrl from "helpers/sanitizeUrl"
import FormTypeModal from "./FormTypeModal.svelte"
import { Modal, notifications } from "@budibase/bbui"
import {
screenStore,
navigationStore,
tables,
permissions as permissionsStore,
builderStore,
} from "stores/builder"
import { auth } from "stores/portal"
import { get } from "svelte/store"
import getTemplates from "templates"
import { Roles } from "constants/backend"
import { capitalise } from "helpers"
import { goto } from "@roxi/routify"
import { TOUR_KEYS } from "components/portal/onboarding/tours.js"
import blankScreen from "templates/blankScreen"
import formScreen from "templates/formScreen"
import gridListScreen from "templates/gridListScreen"
import gridScreen from "templates/gridScreen"
import gridDetailsScreen from "templates/gridDetailsScreen"
import { Roles } from "constants/backend"
let mode
let pendingScreen
// Modal refs
let screenDetailsModal
let datasourceModal
let screenAccessRoleModal
let formTypeModal
// Cache variables for workflow
let screenAccessRole = Roles.BASIC
let selectedTablesAndViews = []
let permissions = {}
let templates = null
let screens = null
export const show = newMode => {
mode = newMode
selectedTablesAndViews = []
permissions = {}
let selectedDatasources = null
let blankScreenUrl = null
let screenMode = null
let formType = null
// Creates an array of screens, checking and sanitising their URLs
const createScreens = async ({ screens, screenAccessRole }) => {
if (!screens?.length) {
return
if (mode === "grid" || mode === "gridDetails" || mode === "form") {
datasourceModal.show()
} else if (mode === "blank") {
screenDetailsModal.show()
} else {
throw new Error("Invalid mode provided")
}
}
const createScreen = async screen => {
try {
let createdScreens = []
for (let screen of screens) {
// Check we aren't clashing with an existing URL
if (hasExistingUrl(screen.routing.route)) {
let suffix = 2
let candidateUrl = makeCandidateUrl(screen, suffix)
while (hasExistingUrl(candidateUrl)) {
candidateUrl = makeCandidateUrl(screen, ++suffix)
}
screen.routing.route = candidateUrl
// Check we aren't clashing with an existing URL
if (hasExistingUrl(screen.routing.route, screen.routing.roleId)) {
let suffix = 2
let candidateUrl = makeCandidateUrl(screen, suffix)
while (hasExistingUrl(candidateUrl, screen.routing.roleId)) {
candidateUrl = makeCandidateUrl(screen, ++suffix)
}
// Sanitise URL
screen.routing.route = sanitizeUrl(screen.routing.route)
// Use the currently selected role
if (!screenAccessRole) {
return
}
screen.routing.roleId = screenAccessRole
// Create the screen
const response = await screenStore.save(screen)
createdScreens.push(response)
// Add link in layout. We only ever actually create 1 screen now, even
// for autoscreens, so it's always safe to do this.
await navigationStore.saveLink(
screen.routing.route,
capitalise(screen.routing.route.split("/")[1]),
screenAccessRole
)
screen.routing.route = candidateUrl
}
return createdScreens
screen.routing.route = sanitizeUrl(screen.routing.route)
return await screenStore.save(screen)
} catch (error) {
console.error(error)
notifications.error("Error creating screens")
}
}
const addNavigationLink = async screen =>
await navigationStore.saveLink(
screen.routing.route,
capitalise(screen.routing.route.split("/")[1]),
screen.routing.roleId
)
// Checks if any screens exist in the store with the given route and
// currently selected role
const hasExistingUrl = url => {
const roleId = screenAccessRole
const hasExistingUrl = (url, screenAccessRole) => {
const screens = get(screenStore).screens.filter(
s => s.routing.roleId === roleId
s => s.routing.roleId === screenAccessRole
)
return !!screens.find(s => s.routing?.route === url)
}
// Constructs a candidate URL for a new screen, suffixing the base of the
// screen's URL with a given suffix.
// Constructs a candidate URL for a new screen, appending a given suffix to the
// screen's URL
// e.g. "/sales/:id" => "/sales-1/:id"
const makeCandidateUrl = (screen, suffix) => {
let url = screen.routing?.route || ""
@ -117,105 +97,79 @@
}
}
// Handler for NewScreenModal
export const show = newMode => {
mode = newMode
templates = null
screens = null
selectedDatasources = null
blankScreenUrl = null
screenMode = mode
pendingScreen = null
screenAccessRole = Roles.BASIC
formType = null
if (mode === "grid" || mode === "gridDetails" || mode === "form") {
datasourceModal.show()
} else if (mode === "blank") {
let templates = getTemplates($tables.list)
const blankScreenTemplate = templates.find(
t => t.id === "createFromScratch"
)
pendingScreen = blankScreenTemplate.create()
screenDetailsModal.show()
} else {
throw new Error("Invalid mode provided")
}
}
// Handler for DatasourceModal confirmation, move to screen access select
const confirmScreenDatasources = async ({ datasources }) => {
selectedDatasources = datasources
if (screenMode === "form") {
const onSelectDatasources = async () => {
if (mode === "form") {
formTypeModal.show()
} else {
screenAccessRoleModal.show()
} else if (mode === "grid") {
await createGridScreen()
} else if (mode === "gridDetails") {
await createGridDetailsScreen()
}
}
// Handler for Datasource Screen Creation
const completeDatasourceScreenCreation = async () => {
templates =
mode === "grid"
? gridListScreen(selectedDatasources)
: gridDetailsScreen(selectedDatasources)
const createBlankScreen = async ({ screenUrl }) => {
const screenTemplate = blankScreen(screenUrl)
const screen = await createScreen(screenTemplate)
await addNavigationLink(screenTemplate)
const screens = templates.map(template => {
let screenTemplate = template.create()
screenTemplate.autoTableId = template.resourceId
return screenTemplate
})
const createdScreens = await createScreens({ screens, screenAccessRole })
loadNewScreen(createdScreens)
loadNewScreen(screen)
}
const confirmScreenBlank = async ({ screenUrl }) => {
blankScreenUrl = screenUrl
screenAccessRoleModal.show()
}
const createGridScreen = async () => {
let firstScreen = null
// Submit request for a blank screen
const confirmBlankScreenCreation = async ({
screenUrl,
screenAccessRole,
}) => {
if (!pendingScreen) {
return
}
pendingScreen.routing.route = screenUrl
const createdScreens = await createScreens({
screens: [pendingScreen],
screenAccessRole,
})
loadNewScreen(createdScreens)
}
for (let tableOrView of selectedTablesAndViews) {
const screenTemplate = gridScreen(
tableOrView,
permissions[tableOrView.id]
)
const onConfirmFormType = () => {
screenAccessRoleModal.show()
}
const screen = await createScreen(screenTemplate)
await addNavigationLink(screen)
const loadNewScreen = createdScreens => {
const lastScreen = createdScreens.slice(-1)[0]
// Go to new screen
if (lastScreen?.props?._children.length) {
// Focus on the main component for the streen type
const mainComponent = lastScreen?.props?._children?.[0]._id
$goto(`./${lastScreen._id}/${mainComponent}`)
} else {
$goto(`./${lastScreen._id}`)
firstScreen ??= screen
}
screenStore.select(lastScreen._id)
loadNewScreen(firstScreen)
}
const confirmFormScreenCreation = async () => {
templates = formScreen(selectedDatasources, { actionType: formType })
screens = templates.map(template => {
let screenTemplate = template.create()
return screenTemplate
})
const createdScreens = await createScreens({ screens, screenAccessRole })
const createGridDetailsScreen = async () => {
let firstScreen = null
for (let tableOrView of selectedTablesAndViews) {
const screenTemplate = gridDetailsScreen(
tableOrView,
permissions[tableOrView.id]
)
const screen = await createScreen(screenTemplate)
await addNavigationLink(screen)
firstScreen ??= screen
}
loadNewScreen(firstScreen)
}
const createFormScreen = async formType => {
let firstScreen = null
for (let tableOrView of selectedTablesAndViews) {
const screenTemplate = formScreen(
tableOrView,
formType,
permissions[tableOrView.id]
)
const screen = await createScreen(screenTemplate)
// Only add a navigation link for `Create`, as both `Update` and `View`
// require an `id` in their URL in order to function.
if (formType === "Create") {
await addNavigationLink(screen)
}
firstScreen ??= screen
}
if (formType === "Update" || formType === "Create") {
const associatedTour =
@ -229,66 +183,89 @@
}
}
// Go to new screen
loadNewScreen(createdScreens)
loadNewScreen(firstScreen)
}
// Submit screen config for creation.
const confirmScreenCreation = async () => {
if (screenMode === "blank") {
confirmBlankScreenCreation({
screenUrl: blankScreenUrl,
screenAccessRole,
})
} else if (screenMode === "form") {
confirmFormScreenCreation()
const loadNewScreen = screen => {
if (screen?.props?._children.length) {
// Focus on the main component for the screen type
const mainComponent = screen?.props?._children?.[0]._id
$goto(`./${screen._id}/${mainComponent}`)
} else {
completeDatasourceScreenCreation()
$goto(`./${screen._id}`)
}
screenStore.select(screen._id)
}
const roleSelectBack = () => {
if (screenMode === "blank") {
screenDetailsModal.show()
const fetchPermission = resourceId => {
permissions[resourceId] = { loading: true, read: null, write: null }
permissionsStore
.forResource(resourceId)
.then(permission => {
if (permissions[resourceId]?.loading) {
permissions[resourceId] = {
loading: false,
read: permission?.read?.role,
write: permission?.write?.role,
}
}
})
.catch(e => {
console.error("Error fetching permission data: ", e)
if (permissions[resourceId]?.loading) {
permissions[resourceId] = {
loading: false,
read: Roles.PUBLIC,
write: Roles.PUBLIC,
}
}
})
}
const deletePermission = resourceId => {
delete permissions[resourceId]
permissions = permissions
}
const handleTableOrViewToggle = ({ detail: tableOrView }) => {
const alreadySelected = selectedTablesAndViews.some(
selected => selected.id === tableOrView.id
)
if (!alreadySelected) {
fetchPermission(tableOrView.id)
selectedTablesAndViews = [...selectedTablesAndViews, tableOrView]
} else {
datasourceModal.show()
deletePermission(tableOrView.id)
selectedTablesAndViews = selectedTablesAndViews.filter(
selected => selected.id !== tableOrView.id
)
}
}
</script>
<Modal bind:this={datasourceModal} autoFocus={false}>
<DatasourceModal {mode} onConfirm={confirmScreenDatasources} />
</Modal>
<Modal bind:this={screenAccessRoleModal}>
<ScreenRoleModal
onConfirm={() => {
confirmScreenCreation()
}}
bind:screenAccessRole
onCancel={roleSelectBack}
screenUrl={blankScreenUrl}
confirmText={screenMode === "form" ? "Confirm" : "Done"}
<DatasourceModal
{selectedTablesAndViews}
{permissions}
onConfirm={onSelectDatasources}
on:toggle={handleTableOrViewToggle}
/>
</Modal>
<Modal bind:this={screenDetailsModal}>
<ScreenDetailsModal
onConfirm={confirmScreenBlank}
initialUrl={blankScreenUrl}
/>
<ScreenDetailsModal onConfirm={createBlankScreen} />
</Modal>
<Modal bind:this={formTypeModal}>
<FormTypeModal
onConfirm={onConfirmFormType}
onConfirm={createFormScreen}
onCancel={() => {
formTypeModal.hide()
datasourceModal.show()
}}
on:select={e => {
formType = e.detail
}}
type={formType}
/>
</Modal>

View File

@ -1,42 +1,95 @@
<script>
import { ModalContent, Layout, notifications, Body } from "@budibase/bbui"
import { datasources } from "stores/builder"
import { datasources as datasourcesStore } from "stores/builder"
import ICONS from "components/backend/DatasourceNavigator/icons"
import { IntegrationNames } from "constants"
import { onMount } from "svelte"
import DatasourceTemplateRow from "./DatasourceTemplateRow.svelte"
import { createEventDispatcher, onMount } from "svelte"
import TableOrViewOption from "./TableOrViewOption.svelte"
export let onCancel
export let onConfirm
export let selectedTablesAndViews
export let permissions
let selectedSources = []
const dispatch = createEventDispatcher()
$: filteredSources = $datasources.list?.filter(datasource => {
return datasource.source !== IntegrationNames.REST && datasource["entities"]
})
const toggleSelection = datasource => {
const exists = selectedSources.find(
d => d.resourceId === datasource.resourceId
const getViews = table => {
const views = Object.values(table.views || {}).filter(
view => view.version === 2
)
if (exists) {
selectedSources = selectedSources.filter(
d => d.resourceId === datasource.resourceId
)
} else {
selectedSources = [...selectedSources, datasource]
}
return views.map(view => ({
icon: "Remove",
name: view.name,
id: view.id,
clientData: {
...view,
type: "viewV2",
label: view.name,
},
}))
}
const confirmDatasourceSelection = async () => {
await onConfirm({
datasources: selectedSources,
})
const getTablesAndViews = datasource => {
let tablesAndViews = []
const rawTables = Array.isArray(datasource.entities)
? datasource.entities
: Object.values(datasource.entities ?? {})
for (const rawTable of rawTables) {
if (rawTable._id === "ta_users") {
continue
}
const table = {
icon: "Table",
name: rawTable.name,
id: rawTable._id,
clientData: {
...rawTable,
label: rawTable.name,
tableId: rawTable._id,
type: "table",
},
}
tablesAndViews = tablesAndViews.concat([table, ...getViews(rawTable)])
}
return tablesAndViews
}
const getDatasources = rawDatasources => {
const datasources = []
for (const rawDatasource of rawDatasources) {
if (
rawDatasource.source === IntegrationNames.REST ||
!rawDatasource["entities"]
) {
continue
}
const datasource = {
name: rawDatasource.name,
iconComponent: ICONS[rawDatasource.source],
tablesAndViews: getTablesAndViews(rawDatasource),
}
datasources.push(datasource)
}
return datasources
}
$: datasources = getDatasources($datasourcesStore.list)
const toggleSelection = tableOrView => {
dispatch("toggle", tableOrView)
}
onMount(async () => {
try {
await datasources.fetch()
await datasourcesStore.fetch()
} catch (error) {
notifications.error("Error fetching datasources")
}
@ -48,66 +101,35 @@
title="Autogenerated screens"
confirmText="Confirm"
cancelText="Back"
onConfirm={confirmDatasourceSelection}
{onCancel}
disabled={!selectedSources.length}
{onConfirm}
disabled={!selectedTablesAndViews.length}
size="L"
>
<Body size="S">
Select which datasources you would like to use to create your screens
</Body>
<Layout noPadding gap="S">
{#each filteredSources as datasource}
{@const entities = Array.isArray(datasource.entities)
? datasource.entities
: Object.values(datasource.entities || {})}
{#each datasources as datasource}
<div class="data-source-wrap">
<div class="data-source-header">
<svelte:component
this={ICONS[datasource.source]}
this={datasource.iconComponent}
height="24"
width="24"
/>
<div class="data-source-name">{datasource.name}</div>
</div>
<!-- List all tables -->
{#each entities.filter(table => table._id !== "ta_users") as table}
{@const views = Object.values(table.views || {}).filter(
view => view.version === 2
{#each datasource.tablesAndViews as tableOrView}
{@const selected = selectedTablesAndViews.some(
selected => selected.id === tableOrView.id
)}
{@const tableDS = {
tableId: table._id,
label: table.name,
resourceId: table._id,
type: "table",
}}
{@const selected = selectedSources.find(
datasource => datasource.resourceId === tableDS.resourceId
)}
<DatasourceTemplateRow
on:click={() => toggleSelection(tableDS)}
<TableOrViewOption
roles={permissions[tableOrView.id]}
on:click={() => toggleSelection(tableOrView)}
{selected}
datasource={tableDS}
{tableOrView}
/>
<!-- List all views inside this table -->
{#each views as view}
{@const viewDS = {
label: view.name,
id: view.id,
resourceId: view.id,
tableId: view.tableId,
type: "viewV2",
}}
{@const selected = selectedSources.find(
x => x.resourceId === viewDS.resourceId
)}
<DatasourceTemplateRow
on:click={() => toggleSelection(viewDS)}
{selected}
datasource={viewDS}
/>
{/each}
{/each}
</div>
{/each}
@ -118,8 +140,11 @@
<style>
.data-source-wrap {
padding-bottom: var(--spectrum-alias-item-padding-s);
display: grid;
display: flex;
flex-direction: column;
grid-gap: var(--spacing-s);
max-width: 100%;
min-width: 0;
}
.data-source-header {
display: flex;

View File

@ -1,45 +0,0 @@
<script>
import { Icon } from "@budibase/bbui"
export let datasource
export let selected = false
$: icon = datasource.type === "viewV2" ? "Remove" : "Table"
</script>
<!-- svelte-ignore a11y-no-static-element-interactions -->
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div class="data-source-entry" class:selected on:click>
<Icon name={icon} color="var(--spectrum-global-color-gray-600)" />
{datasource.label}
{#if selected}
<span class="data-source-check">
<Icon size="S" name="CheckmarkCircle" />
</span>
{/if}
</div>
<style>
.data-source-entry {
cursor: pointer;
grid-gap: var(--spacing-m);
padding: var(--spectrum-alias-item-padding-s);
background: var(--spectrum-alias-background-color-secondary);
transition: 0.3s all;
border: 1px solid var(--spectrum-global-color-gray-300);
border-radius: 4px;
display: flex;
align-items: center;
}
.data-source-entry:hover,
.selected {
background: var(--spectrum-alias-background-color-tertiary);
}
.data-source-check {
margin-left: auto;
}
.data-source-check :global(.spectrum-Icon) {
color: var(--spectrum-global-color-green-600);
}
</style>

View File

@ -1,12 +1,10 @@
<script>
import { ModalContent, Layout, Body, Icon } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
let type = null
export let onCancel = () => {}
export let onConfirm = () => {}
export let type
const dispatch = createEventDispatcher()
</script>
<span>
@ -14,7 +12,7 @@
title="Select form type"
confirmText="Done"
cancelText="Back"
{onConfirm}
onConfirm={() => onConfirm(type)}
{onCancel}
disabled={!type}
size="L"
@ -25,9 +23,7 @@
<div
class="form-type"
class:selected={type === "Create"}
on:click={() => {
dispatch("select", "Create")
}}
on:click={() => (type = "Create")}
>
<div class="form-type-wrap">
<div class="form-type-content">
@ -46,9 +42,7 @@
<div
class="form-type"
class:selected={type === "Update"}
on:click={() => {
dispatch("select", "Update")
}}
on:click={() => (type = "Update")}
>
<div class="form-type-wrap">
<div class="form-type-content">
@ -65,9 +59,7 @@
<div
class="form-type"
class:selected={type === "View"}
on:click={() => {
dispatch("select", "View")
}}
on:click={() => (type = "View")}
>
<div class="form-type-wrap">
<div class="form-type-content">

View File

@ -1,62 +0,0 @@
<script>
import { Select, ModalContent } from "@budibase/bbui"
import { RoleUtils } from "@budibase/frontend-core"
import { roles, screenStore } from "stores/builder"
import { get } from "svelte/store"
import { onMount } from "svelte"
export let onConfirm
export let onCancel
export let screenUrl
export let screenAccessRole
export let confirmText = "Done"
let error
const onChangeRole = e => {
const roleId = e.detail
if (routeExists(screenUrl, roleId)) {
error = "This URL is already taken for this access role"
} else {
error = null
}
}
const routeExists = (url, role) => {
if (!url || !role) {
return false
}
return get(screenStore).screens.some(
screen =>
screen.routing.route.toLowerCase() === url.toLowerCase() &&
screen.routing.roleId === role
)
}
onMount(() => {
// Validate the initial role
onChangeRole({ detail: screenAccessRole })
})
</script>
<ModalContent
title="Access"
{confirmText}
cancelText="Back"
{onConfirm}
{onCancel}
disabled={!!error}
>
Select the level of access required to see these screens
<Select
bind:value={screenAccessRole}
on:change={onChangeRole}
label="Access"
{error}
getOptionLabel={role => role.name}
getOptionValue={role => role._id}
getOptionColour={role => RoleUtils.getRoleColour(role._id)}
options={$roles}
placeholder={null}
/>
</ModalContent>

View File

@ -0,0 +1,112 @@
<script>
import { Icon, AbsTooltip } from "@budibase/bbui"
import RoleIcon from "components/common/RoleIcon.svelte"
export let tableOrView
export let roles
export let selected = false
$: hideRoles = roles == undefined || roles?.loading
</script>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div role="button" tabindex="0" class="datasource" class:selected on:click>
<div class="content">
<Icon name={tableOrView.icon} />
<span>{tableOrView.name}</span>
</div>
<div class:hideRoles class="roles">
<AbsTooltip
type="info"
text={`Screens that only read data will be generated with access "${roles?.read?.toLowerCase()}"`}
>
<div class="role">
<span>read</span>
<RoleIcon
size="XS"
id={roles?.read}
disabled={roles?.loading !== false}
/>
</div>
</AbsTooltip>
<AbsTooltip
type="info"
text={`Screens that write data will be generated with access "${roles?.write?.toLowerCase()}"`}
>
<div class="role">
<span>write</span>
<RoleIcon
size="XS"
id={roles?.write}
disabled={roles?.loading !== false}
/>
</div>
</AbsTooltip>
</div>
</div>
<style>
.datasource {
cursor: pointer;
border: 1px solid var(--spectrum-global-color-gray-300);
transition: 160ms all;
border-radius: 4px;
display: flex;
align-items: center;
user-select: none;
background-color: var(--background);
}
.datasource :global(svg) {
transition: 160ms all;
color: var(--spectrum-global-color-gray-600);
}
.content {
padding: var(--spectrum-alias-item-padding-s);
display: flex;
align-items: center;
grid-gap: var(--spacing-m);
min-width: 0;
}
.content span {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.datasource:hover {
border: 1px solid var(--grey-5);
}
.selected {
border: 1px solid var(--blue) !important;
}
.roles {
margin-left: auto;
display: flex;
flex-direction: column;
align-items: end;
padding-right: var(--spectrum-alias-item-padding-s);
opacity: 0.5;
transition: opacity 160ms;
}
.hideRoles {
opacity: 0;
pointer-events: none;
}
.role {
display: flex;
align-items: center;
}
.role span {
font-size: 11px;
margin-right: 5px;
}
</style>

View File

@ -187,7 +187,9 @@
<Divider />
<Layout gap="XS" noPadding>
<Heading size="XS">History</Heading>
<Body size="S">Free plan stores up to 1 day of automation history</Body>
{#if licensePlan?.type === Constants.PlanType.FREE}
<Body size="S">Free plan stores up to 1 day of automation history</Body>
{/if}
</Layout>
<div class="controls">
<div class="search">

View File

@ -4,47 +4,45 @@
Button,
Heading,
Layout,
ProgressCircle,
notifications,
FancyForm,
FancyInput,
} from "@budibase/bbui"
import { goto, params } from "@roxi/routify"
import { auth, organisation } from "stores/portal"
import Logo from "assets/bb-emblem.svg"
import { TestimonialPage } from "@budibase/frontend-core/src/components"
import { onMount } from "svelte"
import { handleError, passwordsMatch } from "./_components/utils"
import PasswordRepeatInput from "../../../components/common/users/PasswordRepeatInput.svelte"
const resetCode = $params["?code"]
let form
let formData = {}
let errors = {}
let loaded = false
let loading = false
let password
let passwordError
$: submitted = false
$: forceResetPassword = $auth?.user?.forceResetPassword
async function reset() {
form.validate()
if (Object.keys(errors).length > 0) {
if (!form.validate() || passwordError) {
return
}
submitted = true
try {
loading = true
if (forceResetPassword) {
await auth.updateSelf({
password: formData.password,
password,
forceResetPassword: false,
})
$goto("../portal/")
} else {
await auth.resetPassword(formData.password, resetCode)
await auth.resetPassword(password, resetCode)
notifications.success("Password reset successfully")
// send them to login if reset successful
$goto("./login")
}
} catch (err) {
submitted = false
loading = false
notifications.error(err.message || "Unable to reset password")
}
}
@ -58,86 +56,37 @@
}
loaded = true
})
const handleKeydown = evt => {
if (evt.key === "Enter") {
reset()
}
}
</script>
<svelte:window on:keydown={handleKeydown} />
<TestimonialPage enabled={$organisation.testimonialsEnabled}>
<Layout gap="S" noPadding>
{#if loaded}
<img alt="logo" src={$organisation.logoUrl || Logo} />
{/if}
<Layout gap="XS" noPadding>
<Heading size="M">Reset your password</Heading>
<Body size="M">Please enter the new password you'd like to use.</Body>
</Layout>
<Layout gap="S" noPadding>
<FancyForm bind:this={form}>
<FancyInput
label="Password"
value={formData.password}
type="password"
on:change={e => {
formData = {
...formData,
password: e.detail,
}
}}
validate={() => {
let fieldError = {}
fieldError["password"] = !formData.password
? "Please enter a password"
: undefined
fieldError["confirmationPassword"] =
!passwordsMatch(
formData.password,
formData.confirmationPassword
) && formData.confirmationPassword
? "Passwords must match"
: undefined
errors = handleError({ ...errors, ...fieldError })
}}
error={errors.password}
disabled={submitted}
/>
<FancyInput
label="Repeat Password"
value={formData.confirmationPassword}
type="password"
on:change={e => {
formData = {
...formData,
confirmationPassword: e.detail,
}
}}
validate={() => {
const isValid =
!passwordsMatch(
formData.password,
formData.confirmationPassword
) && formData.password
let fieldError = {
confirmationPassword: isValid ? "Passwords must match" : null,
}
errors = handleError({ ...errors, ...fieldError })
}}
error={errors.confirmationPassword}
disabled={submitted}
/>
</FancyForm>
<Heading size="M">Reset your password</Heading>
<Body size="M">Must contain at least 12 characters</Body>
<PasswordRepeatInput
bind:passwordForm={form}
bind:password
bind:error={passwordError}
/>
<Button secondary cta on:click={reset}>
{#if loading}
<ProgressCircle overBackground={true} size="S" />
{:else}
Reset
{/if}
</Button>
</Layout>
<div>
<Button
disabled={Object.keys(errors).length > 0 ||
(forceResetPassword ? false : !resetCode)}
cta
on:click={reset}>Reset your password</Button
>
</div>
<div />
</Layout>
</TestimonialPage>

View File

@ -5,16 +5,19 @@ import { generate } from "shortid"
import { createHistoryStore } from "stores/builder/history"
import { notifications } from "@budibase/bbui"
import { updateReferencesInObject } from "dataBinding"
import { AutomationTriggerStepId } from "@budibase/types"
const initialAutomationState = {
automations: [],
testResults: null,
showTestPanel: false,
blockDefinitions: {
TRIGGER: [],
ACTION: [],
TRIGGER: {},
CREATABLE_TRIGGER: {},
ACTION: {},
},
selectedAutomationId: null,
automationDisplayData: {},
}
// If this functions, remove the actions elements
@ -45,32 +48,48 @@ const updateStepReferences = (steps, modifiedIndex, action) => {
})
}
const getFinalDefinitions = (triggers, actions) => {
const creatable = {}
Object.entries(triggers).forEach(entry => {
if (entry[0] === AutomationTriggerStepId.ROW_ACTION) {
return
}
creatable[entry[0]] = entry[1]
})
return {
TRIGGER: triggers,
CREATABLE_TRIGGER: creatable,
ACTION: actions,
}
}
const automationActions = store => ({
definitions: async () => {
const response = await API.getAutomationDefinitions()
store.update(state => {
state.blockDefinitions = {
TRIGGER: response.trigger,
ACTION: response.action,
}
state.blockDefinitions = getFinalDefinitions(
response.trigger,
response.action
)
return state
})
return response
},
fetch: async () => {
const responses = await Promise.all([
API.getAutomations(),
const [automationResponse, definitions] = await Promise.all([
API.getAutomations({ enrich: true }),
API.getAutomationDefinitions(),
])
store.update(state => {
state.automations = responses[0]
state.automations = automationResponse.automations
state.automations.sort((a, b) => {
return a.name < b.name ? -1 : 1
})
state.blockDefinitions = {
TRIGGER: responses[1].trigger,
ACTION: responses[1].action,
}
state.automationDisplayData = automationResponse.builderData
state.blockDefinitions = getFinalDefinitions(
definitions.trigger,
definitions.action
)
return state
})
},
@ -85,8 +104,6 @@ const automationActions = store => ({
disabled: false,
}
const response = await store.actions.save(automation)
await store.actions.fetch()
store.actions.select(response._id)
return response
},
duplicate: async automation => {
@ -96,25 +113,13 @@ const automationActions = store => ({
_id: undefined,
_ref: undefined,
})
await store.actions.fetch()
store.actions.select(response._id)
return response
},
save: async automation => {
const response = await API.updateAutomation(automation)
store.update(state => {
const updatedAutomation = response.automation
const existingIdx = state.automations.findIndex(
existing => existing._id === automation._id
)
if (existingIdx !== -1) {
state.automations.splice(existingIdx, 1, updatedAutomation)
return state
} else {
state.automations = [...state.automations, updatedAutomation]
}
return state
})
await store.actions.fetch()
store.actions.select(response._id)
return response.automation
},
delete: async automation => {
@ -122,18 +127,22 @@ const automationActions = store => ({
automationId: automation?._id,
automationRev: automation?._rev,
})
store.update(state => {
// Remove the automation
state.automations = state.automations.filter(
x => x._id !== automation._id
)
// Select a new automation if required
if (automation._id === state.selectedAutomationId) {
store.actions.select(state.automations[0]?._id)
state.selectedAutomationId = state.automations[0]?._id || null
}
// Clear out automationDisplayData for the automation
delete state.automationDisplayData[automation._id]
return state
})
await store.actions.fetch()
},
toggleDisabled: async automationId => {
let automation
@ -308,7 +317,9 @@ const automationActions = store => ({
if (!automation) {
return
}
delete newAutomation.definition.stepNames[blockId]
if (newAutomation.definition.stepNames) {
delete newAutomation.definition.stepNames[blockId]
}
await store.actions.save(newAutomation)
},
@ -384,3 +395,13 @@ export const selectedAutomation = derived(automationStore, $automationStore => {
x => x._id === $automationStore.selectedAutomationId
)
})
export const selectedAutomationDisplayData = derived(
[automationStore, selectedAutomation],
([$automationStore, $selectedAutomation]) => {
if (!$selectedAutomation?._id) {
return null
}
return $automationStore.automationDisplayData[$selectedAutomation._id]
}
)

View File

@ -11,6 +11,7 @@ import {
automationStore,
selectedAutomation,
automationHistoryStore,
selectedAutomationDisplayData,
} from "./automations.js"
import { userStore, userSelectedResourceMap, isOnlyUser } from "./users.js"
import { deploymentStore } from "./deployments.js"
@ -44,6 +45,7 @@ export {
previewStore,
automationStore,
selectedAutomation,
selectedAutomationDisplayData,
automationHistoryStore,
sortedScreens,
userStore,

View File

@ -63,6 +63,11 @@ export class Screen extends BaseStructure {
return this
}
autoTableId(autoTableId) {
this._json.autoTableId = autoTableId
return this
}
instanceName(name) {
this._json.props._instanceName = name
return this

View File

@ -0,0 +1,7 @@
import { Screen } from "./Screen"
const blankScreen = route => {
return new Screen().instanceName("New Screen").route(route).json()
}
export default blankScreen

View File

@ -1,12 +0,0 @@
import { Screen } from "./Screen"
export default {
name: `Create from scratch`,
id: `createFromScratch`,
create: () => createScreen(),
table: `Create from scratch`,
}
const createScreen = () => {
return new Screen().instanceName("New Screen").json()
}

View File

@ -3,41 +3,47 @@ import { Component } from "./Component"
import sanitizeUrl from "helpers/sanitizeUrl"
export const FORM_TEMPLATE = "FORM_TEMPLATE"
export const formUrl = datasource => sanitizeUrl(`/${datasource.label}-form`)
// Mode not really necessary
export default function (datasources, config) {
if (!Array.isArray(datasources)) {
return []
export const formUrl = (tableOrView, actionType) => {
if (actionType === "Create") {
return sanitizeUrl(`/${tableOrView.name}/new`)
} else if (actionType === "Update") {
return sanitizeUrl(`/${tableOrView.name}/edit/:id`)
} else if (actionType === "View") {
return sanitizeUrl(`/${tableOrView.name}/view/:id`)
}
return datasources.map(datasource => {
return {
name: `${datasource.label} - Form`,
create: () => createScreen(datasource, config),
id: FORM_TEMPLATE,
resourceId: datasource.resourceId,
}
})
}
const generateMultistepFormBlock = (dataSource, { actionType } = {}) => {
export const getRole = (permissions, actionType) => {
if (actionType === "View") {
return permissions.read
}
return permissions.write
}
const generateMultistepFormBlock = (tableOrView, actionType) => {
const multistepFormBlock = new Component(
"@budibase/standard-components/multistepformblock"
)
multistepFormBlock
.customProps({
actionType,
dataSource,
dataSource: tableOrView.clientData,
steps: [{}],
rowId: actionType === "new" ? undefined : `{{ url.id }}`,
})
.instanceName(`${dataSource.label} - Multistep Form block`)
.instanceName(`${tableOrView.name} - Multistep Form block`)
return multistepFormBlock
}
const createScreen = (datasource, config) => {
const createScreen = (tableOrView, actionType, permissions) => {
return new Screen()
.route(formUrl(datasource))
.instanceName(`${datasource.label} - Form`)
.addChild(generateMultistepFormBlock(datasource, config))
.route(formUrl(tableOrView, actionType))
.instanceName(`${tableOrView.name} - Form`)
.role(getRole(permissions, actionType))
.autoTableId(tableOrView.id)
.addChild(generateMultistepFormBlock(tableOrView, actionType))
.json()
}
export default createScreen

View File

@ -5,24 +5,9 @@ import { generate } from "shortid"
import { makePropSafe as safe } from "@budibase/string-templates"
import { Utils } from "@budibase/frontend-core"
export default function (datasources) {
if (!Array.isArray(datasources)) {
return []
}
return datasources.map(datasource => {
return {
name: `${datasource.label} - List with panel`,
create: () => createScreen(datasource),
id: GRID_DETAILS_TEMPLATE,
resourceId: datasource.resourceId,
}
})
}
const gridDetailsUrl = tableOrView => sanitizeUrl(`/${tableOrView.name}`)
export const GRID_DETAILS_TEMPLATE = "GRID_DETAILS_TEMPLATE"
export const gridDetailsUrl = datasource => sanitizeUrl(`/${datasource.label}`)
const createScreen = datasource => {
const createScreen = (tableOrView, permissions) => {
/*
Create Row
*/
@ -47,7 +32,7 @@ const createScreen = datasource => {
type: "cta",
})
buttonGroup.instanceName(`${datasource.label} - Create`).customProps({
buttonGroup.instanceName(`${tableOrView.name} - Create`).customProps({
hAlign: "right",
buttons: [createButton.json()],
})
@ -62,7 +47,7 @@ const createScreen = datasource => {
const heading = new Component("@budibase/standard-components/heading")
.instanceName("Table heading")
.customProps({
text: datasource?.label,
text: tableOrView.name,
})
gridHeader.addChild(heading)
@ -72,7 +57,7 @@ const createScreen = datasource => {
"@budibase/standard-components/formblock"
)
createFormBlock.instanceName("Create row form block").customProps({
dataSource: datasource,
dataSource: tableOrView.clientData,
labelPosition: "left",
buttonPosition: "top",
actionType: "Create",
@ -83,7 +68,7 @@ const createScreen = datasource => {
showSaveButton: true,
saveButtonLabel: "Save",
actionType: "Create",
dataSource: datasource,
dataSource: tableOrView.clientData,
}),
})
@ -99,7 +84,7 @@ const createScreen = datasource => {
const editFormBlock = new Component("@budibase/standard-components/formblock")
editFormBlock.instanceName("Edit row form block").customProps({
dataSource: datasource,
dataSource: tableOrView.clientData,
labelPosition: "left",
buttonPosition: "top",
actionType: "Update",
@ -112,7 +97,7 @@ const createScreen = datasource => {
saveButtonLabel: "Save",
deleteButtonLabel: "Delete",
actionType: "Update",
dataSource: datasource,
dataSource: tableOrView.clientData,
}),
})
@ -121,7 +106,7 @@ const createScreen = datasource => {
const gridBlock = new Component("@budibase/standard-components/gridblock")
gridBlock
.customProps({
table: datasource,
table: tableOrView.clientData,
allowAddRows: false,
allowEditRows: false,
allowDeleteRows: false,
@ -145,14 +130,18 @@ const createScreen = datasource => {
},
],
})
.instanceName(`${datasource.label} - Table`)
.instanceName(`${tableOrView.name} - Table`)
return new Screen()
.route(gridDetailsUrl(datasource))
.instanceName(`${datasource.label} - List and details`)
.route(gridDetailsUrl(tableOrView))
.instanceName(`${tableOrView.name} - List and details`)
.role(permissions.write)
.autoTableId(tableOrView.resourceId)
.addChild(gridHeader)
.addChild(gridBlock)
.addChild(createRowSidePanel)
.addChild(detailsSidePanel)
.json()
}
export default createScreen

View File

@ -1,41 +0,0 @@
import sanitizeUrl from "helpers/sanitizeUrl"
import { Screen } from "./Screen"
import { Component } from "./Component"
export default function (datasources) {
if (!Array.isArray(datasources)) {
return []
}
return datasources.map(datasource => {
return {
name: `${datasource.label} - List`,
create: () => createScreen(datasource),
id: GRID_LIST_TEMPLATE,
resourceId: datasource.resourceId,
}
})
}
export const GRID_LIST_TEMPLATE = "GRID_LIST_TEMPLATE"
export const gridListUrl = datasource => sanitizeUrl(`/${datasource.label}`)
const createScreen = datasource => {
const heading = new Component("@budibase/standard-components/heading")
.instanceName("Table heading")
.customProps({
text: datasource?.label,
})
const gridBlock = new Component("@budibase/standard-components/gridblock")
.instanceName(`${datasource.label} - Table`)
.customProps({
table: datasource,
})
return new Screen()
.route(gridListUrl(datasource))
.instanceName(`${datasource.label} - List`)
.addChild(heading)
.addChild(gridBlock)
.json()
}

View File

@ -0,0 +1,30 @@
import sanitizeUrl from "helpers/sanitizeUrl"
import { Screen } from "./Screen"
import { Component } from "./Component"
const gridUrl = tableOrView => sanitizeUrl(`/${tableOrView.name}`)
const createScreen = (tableOrView, permissions) => {
const heading = new Component("@budibase/standard-components/heading")
.instanceName("Table heading")
.customProps({
text: tableOrView.name,
})
const gridBlock = new Component("@budibase/standard-components/gridblock")
.instanceName(`${tableOrView.name} - Table`)
.customProps({
table: tableOrView.clientData,
})
return new Screen()
.route(gridUrl(tableOrView))
.instanceName(`${tableOrView.name} - List`)
.role(permissions.write)
.autoTableId(tableOrView.id)
.addChild(heading)
.addChild(gridBlock)
.json()
}
export default createScreen

View File

@ -1,35 +0,0 @@
import gridListScreen from "./gridListScreen"
import gridDetailsScreen from "./gridDetailsScreen"
import createFromScratchScreen from "./createFromScratchScreen"
import formScreen from "./formScreen"
const allTemplates = datasources => [
...gridListScreen(datasources),
...gridDetailsScreen(datasources),
...formScreen(datasources),
]
// Allows us to apply common behaviour to all create() functions
const createTemplateOverride = template => () => {
const screen = template.create()
screen.name = screen.props._id
screen.routing.route = screen.routing.route.toLowerCase()
screen.template = template.id
return screen
}
export default datasources => {
const enrichTemplate = template => ({
...template,
create: createTemplateOverride(template),
})
const fromScratch = enrichTemplate(createFromScratchScreen)
const tableTemplates = allTemplates(datasources).map(enrichTemplate)
return [
fromScratch,
...tableTemplates.sort((templateA, templateB) => {
return templateA.name > templateB.name ? 1 : -1
}),
]
}

View File

@ -26,9 +26,14 @@ export const buildAutomationEndpoints = API => ({
/**
* Gets a list of all automations.
*/
getAutomations: async () => {
getAutomations: async ({ enrich }) => {
const params = new URLSearchParams()
if (enrich) {
params.set("enrich", true)
}
return await API.get({
url: "/api/automations",
url: `/api/automations?${params.toString()}`,
})
},

View File

@ -1,206 +0,0 @@
// @ts-ignore
import fs from "fs"
// eslint-disable-next-line @typescript-eslint/no-unused-vars
module FetchMock {
// @ts-ignore
const fetch = jest.requireActual("node-fetch")
let failCount = 0
let mockSearch = false
const func = async (url: any, opts: any) => {
const { host, pathname } = new URL(url)
function json(body: any, status = 200) {
return {
status,
headers: {
raw: () => {
return { "content-type": ["application/json"] }
},
get: (name: string) => {
if (name.toLowerCase() === "content-type") {
return ["application/json"]
}
},
},
json: async () => {
//x-www-form-encoded body is a URLSearchParams
//The call to stringify it leaves it blank
if (body?.opts?.body instanceof URLSearchParams) {
const paramArray = Array.from(body.opts.body.entries())
body.opts.body = paramArray.reduce((acc: any, pair: any) => {
acc[pair[0]] = pair[1]
return acc
}, {})
}
return body
},
}
}
if (pathname.includes("/api/global")) {
const user = {
email: "test@example.com",
_id: "us_test@example.com",
status: "active",
roles: {},
builder: {
global: false,
},
admin: {
global: false,
},
}
return pathname.endsWith("/users") && opts.method === "GET"
? json([user])
: json(user)
}
// mocked data based on url
else if (pathname.includes("api/apps")) {
return json({
app1: {
url: "/app1",
},
})
} else if (host.includes("example.com")) {
return json({
body: opts.body,
url,
method: opts.method,
})
} else if (host.includes("invalid.com")) {
return json(
{
invalid: true,
},
404
)
} else if (mockSearch && pathname.includes("_search")) {
const body = opts.body
const parts = body.split("tableId:")
let tableId
if (parts && parts[1]) {
tableId = parts[1].split('"')[0]
}
return json({
rows: [
{
doc: {
_id: "test",
tableId: tableId,
query: opts.body,
},
},
],
bookmark: "test",
})
} else if (host.includes("google.com")) {
return json({
url,
opts,
value:
'<!doctype html><html itemscope="" itemtype="http://schema.org/WebPage" lang="en-GB"></html>',
})
} else if (
url === "https://api.github.com/repos/my-repo/budibase-comment-box"
) {
return Promise.resolve({
json: () => {
return {
name: "budibase-comment-box",
releases_url:
"https://api.github.com/repos/my-repo/budibase-comment-box{/id}",
}
},
})
} else if (
url === "https://api.github.com/repos/my-repo/budibase-comment-box/latest"
) {
return Promise.resolve({
json: () => {
return {
assets: [
{
content_type: "application/gzip",
browser_download_url:
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz",
},
],
}
},
})
} else if (
url ===
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz"
) {
return Promise.resolve({
body: fs.createReadStream(
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
),
ok: true,
})
} else if (url === "https://www.npmjs.com/package/budibase-component") {
return Promise.resolve({
status: 200,
json: () => {
return {
name: "budibase-component",
"dist-tags": {
latest: "1.0.0",
},
versions: {
"1.0.0": {
dist: {
tarball:
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.2.tgz",
},
},
},
}
},
})
} else if (
url ===
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.2.tgz"
) {
return Promise.resolve({
body: fs.createReadStream(
"src/api/routes/tests/data/budibase-component-1.0.2.tgz"
),
ok: true,
})
} else if (
url === "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz"
) {
return Promise.resolve({
body: fs.createReadStream(
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
),
ok: true,
})
} else if (url === "https://www.googleapis.com/oauth2/v4/token") {
// any valid response
return json({})
} else if (host.includes("failonce.com")) {
failCount++
if (failCount === 1) {
return json({ message: "error" }, 500)
} else {
return json({
fails: failCount - 1,
url,
opts,
})
}
}
return fetch(url, opts)
}
func.Headers = fetch.Headers
func.mockSearch = () => {
mockSearch = true
}
module.exports = func
}

View File

@ -1,21 +0,0 @@
const executeMock = jest.fn(() => ({
rows: [
{
a: "string",
b: 1,
},
],
}))
const closeMock = jest.fn()
class Connection {
execute = executeMock
close = closeMock
}
module.exports = {
getConnection: jest.fn(() => new Connection()),
executeMock,
closeMock,
}

View File

@ -0,0 +1,5 @@
MSSQL_SHA=sha256:c4369c38385eba011c10906dc8892425831275bb035d5ce69656da8e29de50d8
MYSQL_SHA=sha256:9de9d54fecee6253130e65154b930978b1fcc336bcc86dfd06e89b72a2588ebe
POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053090e
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d
MARIADB_SHA=sha256:e59ba8783bf7bc02a4779f103bb0d8751ac0e10f9471089709608377eded7aa8

View File

@ -16,7 +16,8 @@
"build:isolated-vm-lib:snippets": "esbuild --minify --bundle src/jsRunner/bundles/snippets.ts --outfile=src/jsRunner/bundles/snippets.ivm.bundle.js --platform=node --format=iife --global-name=snippets",
"build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers",
"build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson",
"build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson && yarn build:isolated-vm-lib:snippets",
"build:isolated-vm-lib:buffer": "esbuild --minify --bundle src/jsRunner/bundles/buffer.ts --outfile=src/jsRunner/bundles/buffer.ivm.bundle.js --platform=node --format=iife --global-name=buffer",
"build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson && yarn build:isolated-vm-lib:snippets && yarn build:isolated-vm-lib:buffer",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
"jest": "NODE_OPTIONS=\"--no-node-snapshot $NODE_OPTIONS\" jest",
@ -68,6 +69,7 @@
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"buffer": "6.0.3",
"bull": "4.10.1",
"chokidar": "3.5.3",
"content-disposition": "^0.5.4",

View File

@ -1,4 +1,4 @@
FROM mcr.microsoft.com/mssql/server:2022-latest
FROM mcr.microsoft.com/mssql/server@sha256:c4369c38385eba011c10906dc8892425831275bb035d5ce69656da8e29de50d8
ENV ACCEPT_EULA=Y
ENV SA_PASSWORD=Passw0rd

View File

@ -6,9 +6,9 @@ services:
db:
restart: unless-stopped
platform: linux/x86_64
image: container-registry.oracle.com/database/express:18.4.0-xe
image: gvenzl/oracle-free:23.2-slim-faststart
environment:
ORACLE_PWD: oracle
ORACLE_PWD: Password1
ports:
- 1521:1521
- 5500:5500
@ -16,4 +16,4 @@ services:
- oracle_data:/opt/oracle/oradata
volumes:
oracle_data:
oracle_data:

View File

@ -1,4 +1,5 @@
import * as triggers from "../../automations/triggers"
import { sdk as coreSdk } from "@budibase/shared-core"
import { DocumentType } from "../../db/utils"
import { updateTestHistory, removeDeprecated } from "../../automations/utils"
import { setTestFlag, clearTestFlag } from "../../utilities/redis"
@ -11,6 +12,7 @@ import {
AutomationResults,
UserCtx,
DeleteAutomationResponse,
FetchAutomationResponse,
} from "@budibase/types"
import { getActionDefinitions as actionDefs } from "../../automations/actions"
import sdk from "../../sdk"
@ -73,8 +75,17 @@ export async function update(ctx: UserCtx) {
builderSocket?.emitAutomationUpdate(ctx, automation)
}
export async function fetch(ctx: UserCtx) {
ctx.body = await sdk.automations.fetch()
export async function fetch(ctx: UserCtx<void, FetchAutomationResponse>) {
const query: { enrich?: string } = ctx.request.query || {}
const enrich = query.enrich === "true"
const automations = await sdk.automations.fetch()
ctx.body = { automations }
if (enrich) {
ctx.body.builderData = await sdk.automations.utils.getBuilderData(
automations
)
}
}
export async function find(ctx: UserCtx) {
@ -84,6 +95,11 @@ export async function find(ctx: UserCtx) {
export async function destroy(ctx: UserCtx<void, DeleteAutomationResponse>) {
const automationId = ctx.params.id
const automation = await sdk.automations.get(ctx.params.id)
if (coreSdk.automations.isRowAction(automation)) {
ctx.throw("Row actions automations cannot be deleted", 422)
}
ctx.body = await sdk.automations.remove(automationId, ctx.params.rev)
builderSocket?.emitAutomationDeletion(ctx, automationId)
}

View File

@ -1,6 +1,13 @@
import { npmUpload, urlUpload, githubUpload } from "./uploaders"
import { plugins as pluginCore } from "@budibase/backend-core"
import { PluginType, FileType, PluginSource } from "@budibase/types"
import {
PluginType,
FileType,
PluginSource,
Ctx,
CreatePluginRequest,
CreatePluginResponse,
} from "@budibase/types"
import env from "../../../environment"
import { clientAppSocket } from "../../../websockets"
import sdk from "../../../sdk"
@ -29,7 +36,9 @@ export async function upload(ctx: any) {
}
}
export async function create(ctx: any) {
export async function create(
ctx: Ctx<CreatePluginRequest, CreatePluginResponse>
) {
const { source, url, headers, githubToken } = ctx.request.body
try {
@ -75,14 +84,9 @@ export async function create(ctx: any) {
const doc = await pro.plugins.storePlugin(metadata, directory, source)
clientAppSocket?.emit("plugins-update", { name, hash: doc.hash })
ctx.body = {
message: "Plugin uploaded successfully",
plugins: [doc],
}
ctx.body = { plugin: doc }
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
}
}

View File

@ -66,9 +66,14 @@ export interface RunConfig {
includeSqlRelationships?: IncludeRelationship
}
export type ExternalReadRequestReturnType = {
rows: Row[]
rawResponseSize: number
}
export type ExternalRequestReturnType<T extends Operation> =
T extends Operation.READ
? Row[]
? ExternalReadRequestReturnType
: T extends Operation.COUNT
? number
: { row: Row; table: Table }
@ -741,9 +746,11 @@ export class ExternalRequest<T extends Operation> {
)
// if reading it'll just be an array of rows, return whole thing
if (operation === Operation.READ) {
return (
Array.isArray(output) ? output : [output]
) as ExternalRequestReturnType<T>
const rows = Array.isArray(output) ? output : [output]
return {
rows,
rawResponseSize: responseRows.length,
} as ExternalRequestReturnType<T>
} else {
return { row: output[0], table } as ExternalRequestReturnType<T>
}

View File

@ -136,7 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
const table: Table = tables[tableName]
const row = response[0]
const row = response.rows[0]
// this seems like a lot of work, but basically we need to dig deeper for the enrich
// for a single row, there is probably a better way to do this with some smart multi-layer joins
for (let [fieldName, field] of Object.entries(table.schema)) {
@ -163,10 +163,14 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
},
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
row[fieldName] = await outputProcessing(linkedTable, relatedRows, {
squash: true,
preserveLinks: true,
})
row[fieldName] = await outputProcessing<Row[]>(
linkedTable,
relatedRows.rows,
{
squash: true,
preserveLinks: true,
}
)
}
return row
}

View File

@ -1,8 +1,7 @@
// need to handle table name + field or just field, depending on if relationships used
import { FieldType, Row, Table } from "@budibase/types"
import { helpers } from "@budibase/shared-core"
import { helpers, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
import { generateRowIdField } from "../../../../integrations/utils"
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
function extractFieldValue({
row,
@ -94,7 +93,7 @@ export function basicProcessing({
thisRow._rev = "rev"
} else {
const columns = Object.keys(table.schema)
for (let internalColumn of [...CONSTANT_INTERNAL_ROW_COLS, ...columns]) {
for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({
row,
tableName: table._id!,

View File

@ -31,7 +31,12 @@ export async function find(ctx: Ctx<void, RowActionsResponse>) {
actions: Object.entries(actions).reduce<Record<string, RowActionResponse>>(
(acc, [key, action]) => ({
...acc,
[key]: { id: key, tableId: table._id!, ...action },
[key]: {
id: key,
tableId: table._id!,
name: action.name,
automationId: action.automationId,
},
}),
{}
),
@ -50,7 +55,9 @@ export async function create(
ctx.body = {
tableId: table._id!,
...createdAction,
id: createdAction.id,
name: createdAction.name,
automationId: createdAction.automationId,
}
ctx.status = 201
}
@ -61,13 +68,15 @@ export async function update(
const table = await getTable(ctx)
const { actionId } = ctx.params
const actions = await sdk.rowActions.update(table._id!, actionId, {
const action = await sdk.rowActions.update(table._id!, actionId, {
name: ctx.request.body.name,
})
ctx.body = {
tableId: table._id!,
...actions,
id: action.id,
name: action.name,
automationId: action.automationId,
}
}

View File

@ -1,3 +1,10 @@
export function run() {
throw new Error("Function not implemented.")
import { RowActionTriggerRequest, Ctx } from "@budibase/types"
import sdk from "../../../sdk"
export async function run(ctx: Ctx<RowActionTriggerRequest, void>) {
const { tableId, actionId } = ctx.params
const { rowId } = ctx.request.body
await sdk.rowActions.run(tableId, actionId, rowId)
ctx.status = 200
}

View File

@ -14,22 +14,31 @@ import { events, HTTPError } from "@budibase/backend-core"
import {
BulkImportRequest,
BulkImportResponse,
CsvToJsonRequest,
CsvToJsonResponse,
FetchTablesResponse,
FieldType,
MigrateRequest,
MigrateResponse,
Row,
SaveTableRequest,
SaveTableResponse,
Table,
TableResponse,
TableSourceType,
UserCtx,
ValidateNewTableImportRequest,
ValidateTableImportRequest,
ValidateTableImportResponse,
} from "@budibase/types"
import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash"
import { helpers } from "@budibase/shared-core"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && isExternalTable(table)) {
@ -144,7 +153,9 @@ export async function bulkImport(
ctx.body = { message: `Bulk rows created.` }
}
export async function csvToJson(ctx: UserCtx) {
export async function csvToJson(
ctx: UserCtx<CsvToJsonRequest, CsvToJsonResponse>
) {
const { csvString } = ctx.request.body
const result = await jsonFromCsvString(csvString)
@ -153,24 +164,40 @@ export async function csvToJson(ctx: UserCtx) {
ctx.body = result
}
export async function validateNewTableImport(ctx: UserCtx) {
const { rows, schema }: { rows: unknown; schema: unknown } = ctx.request.body
export async function validateNewTableImport(
ctx: UserCtx<ValidateNewTableImportRequest, ValidateTableImportResponse>
) {
const { rows, schema } = ctx.request.body
if (isRows(rows) && isSchema(schema)) {
ctx.status = 200
ctx.body = validateSchema(rows, schema)
ctx.body = validateSchema(rows, schema, PROTECTED_INTERNAL_COLUMNS)
} else {
ctx.status = 422
}
}
export async function validateExistingTableImport(ctx: UserCtx) {
const { rows, tableId }: { rows: Row[]; tableId?: string } = ctx.request.body
export async function validateExistingTableImport(
ctx: UserCtx<ValidateTableImportRequest, ValidateTableImportResponse>
) {
const { rows, tableId } = ctx.request.body
let schema = null
let protectedColumnNames
if (tableId) {
const table = await sdk.tables.getTable(tableId)
schema = table.schema
if (!isExternalTable(table)) {
schema._id = {
name: "_id",
type: FieldType.STRING,
}
protectedColumnNames = PROTECTED_INTERNAL_COLUMNS.filter(x => x !== "_id")
} else {
protectedColumnNames = PROTECTED_EXTERNAL_COLUMNS
}
} else {
ctx.status = 422
return
@ -178,7 +205,7 @@ export async function validateExistingTableImport(ctx: UserCtx) {
if (tableId && isRows(rows) && isSchema(schema)) {
ctx.status = 200
ctx.body = validateSchema(rows, schema)
ctx.body = validateSchema(rows, schema, protectedColumnNames)
} else {
ctx.status = 422
}

View File

@ -3,6 +3,7 @@ import { handleDataImport } from "./utils"
import {
BulkImportRequest,
BulkImportResponse,
FieldType,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
@ -25,6 +26,8 @@ export async function save(
sourceType: rest.sourceType || TableSourceType.INTERNAL,
}
const isImport = !!rows
if (!tableToSave.views) {
tableToSave.views = {}
}
@ -35,6 +38,7 @@ export async function save(
rowsToImport: rows,
tableId: ctx.request.body._id,
renaming,
isImport,
})
return table
@ -66,10 +70,22 @@ export async function bulkImport(
) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body
await handleDataImport(table, {
importRows: rows,
identifierFields,
user: ctx.user,
})
await handleDataImport(
{
...table,
schema: {
_id: {
name: "_id",
type: FieldType.STRING,
},
...table.schema,
},
},
{
importRows: rows,
identifierFields,
user: ctx.user,
}
)
return table
}

View File

@ -122,13 +122,15 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
export async function importToRows(
data: Row[],
table: Table,
user?: ContextUser
user?: ContextUser,
opts?: { keepCouchId: boolean }
) {
let originalTable = table
let finalData: any = []
const originalTable = table
const finalData: Row[] = []
const keepCouchId = !!opts?.keepCouchId
for (let i = 0; i < data.length; i++) {
let row = data[i]
row._id = generateRowID(table._id!)
row._id = (keepCouchId && row._id) || generateRowID(table._id!)
row.type = "row"
row.tableId = table._id
@ -180,7 +182,11 @@ export async function handleDataImport(
const db = context.getAppDB()
const data = parse(importRows, table)
let finalData: any = await importToRows(data, table, user)
const finalData = await importToRows(data, table, user, {
keepCouchId: identifierFields.includes("_id"),
})
let newRowCount = finalData.length
//Set IDs of finalData to match existing row if an update is expected
if (identifierFields.length > 0) {
@ -203,12 +209,14 @@ export async function handleDataImport(
if (match) {
finalItem._id = doc._id
finalItem._rev = doc._rev
newRowCount--
}
})
})
}
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), {
await quotas.addRows(newRowCount, () => db.bulkDocs(finalData), {
tableId: table._id,
})

View File

@ -1,4 +1,6 @@
import { Row, TableSchema } from "@budibase/types"
import { Row, RowExportFormat, TableSchema } from "@budibase/types"
export { RowExportFormat as Format } from "@budibase/types"
function getHeaders(
headers: string[],
@ -46,16 +48,6 @@ export function jsonWithSchema(schema: TableSchema, rows: Row[]) {
return JSON.stringify({ schema: newSchema, rows }, undefined, 2)
}
export enum Format {
CSV = "csv",
JSON = "json",
JSON_WITH_SCHEMA = "jsonWithSchema",
}
export function isFormat(format: any): format is Format {
return Object.values(Format).includes(format as Format)
}
export function parseCsvExport<T>(value: string) {
return JSON.parse(value) as T
export function isFormat(format: any): format is RowExportFormat {
return Object.values(RowExportFormat).includes(format as RowExportFormat)
}

View File

@ -1,13 +1,12 @@
import Router from "@koa/router"
import Joi from "joi"
import { middleware, permissions } from "@budibase/backend-core"
import * as rowActionController from "../controllers/rowAction"
import { authorizedResource } from "../../middleware/authorized"
import { middleware, permissions } from "@budibase/backend-core"
import Joi from "joi"
const { PermissionLevel, PermissionType } = permissions
export function rowActionValidator() {
function rowActionValidator() {
return middleware.joiValidator.body(
Joi.object({
name: Joi.string().required(),
@ -16,6 +15,15 @@ export function rowActionValidator() {
)
}
function rowTriggerValidator() {
return middleware.joiValidator.body(
Joi.object({
rowId: Joi.string().required(),
}),
{ allowUnknown: false }
)
}
const router: Router = new Router()
// CRUD endpoints
@ -45,7 +53,8 @@ router
// Other endpoints
.post(
"/api/tables/:tableId/actions/:actionId/run",
"/api/tables/:tableId/actions/:actionId/trigger",
rowTriggerValidator(),
authorizedResource(PermissionType.TABLE, PermissionLevel.READ, "tableId"),
rowActionController.run
)

View File

@ -20,6 +20,7 @@ import { type App } from "@budibase/types"
import tk from "timekeeper"
import * as uuid from "uuid"
import { structures } from "@budibase/backend-core/tests"
import nock from "nock"
describe("/applications", () => {
let config = setup.getConfig()
@ -35,6 +36,7 @@ describe("/applications", () => {
throw new Error("Failed to publish app")
}
jest.clearAllMocks()
nock.cleanAll()
})
// These need to go first for the app totals to make sense
@ -324,18 +326,33 @@ describe("/applications", () => {
describe("delete", () => {
it("should delete published app and dev apps with dev app ID", async () => {
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await config.api.application.delete(app.appId)
expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
})
it("should delete published app and dev app with prod app ID", async () => {
await config.api.application.delete(app.appId.replace("_dev", ""))
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await config.api.application.delete(prodAppId)
expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
})
it("should be able to delete an app after SQS_SEARCH_ENABLE has been set but app hasn't been migrated", async () => {
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, async () => {
await config.api.application.delete(app.appId)
})

View File

@ -14,6 +14,7 @@ import sdk from "../../../sdk"
import { Automation, FieldType, Table } from "@budibase/types"
import { mocks } from "@budibase/backend-core/tests"
import { FilterConditions } from "../../../automations/steps/filter"
import { removeDeprecated } from "../../../automations/utils"
const MAX_RETRIES = 4
let {
@ -69,14 +70,15 @@ describe("/automations", () => {
.expect("Content-Type", /json/)
.expect(200)
let definitionsLength = Object.keys(BUILTIN_ACTION_DEFINITIONS).length
definitionsLength-- // OUTGOING_WEBHOOK is deprecated
let definitionsLength = Object.keys(
removeDeprecated(BUILTIN_ACTION_DEFINITIONS)
).length
expect(Object.keys(res.body.action).length).toBeGreaterThanOrEqual(
definitionsLength
)
expect(Object.keys(res.body.trigger).length).toEqual(
Object.keys(TRIGGER_DEFINITIONS).length
Object.keys(removeDeprecated(TRIGGER_DEFINITIONS)).length
)
})
})
@ -398,7 +400,9 @@ describe("/automations", () => {
.expect("Content-Type", /json/)
.expect(200)
expect(res.body[0]).toEqual(expect.objectContaining(autoConfig))
expect(res.body.automations[0]).toEqual(
expect.objectContaining(autoConfig)
)
})
it("should apply authorization to endpoint", async () => {
@ -423,6 +427,22 @@ describe("/automations", () => {
expect(events.automation.deleted).toHaveBeenCalledTimes(1)
})
it("cannot delete a row action automation", async () => {
const automation = await config.createAutomation(
setup.structures.rowActionAutomation()
)
await request
.delete(`/api/automations/${automation._id}/${automation._rev}`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(422, {
message: "Row actions automations cannot be deleted",
status: 422,
})
expect(events.automation.deleted).not.toHaveBeenCalled()
})
it("should apply authorization to endpoint", async () => {
const automation = await config.createAutomation()
await checkBuilderEndpoint({

View File

@ -15,9 +15,11 @@ import {
Table,
TableSchema,
SupportedSqlTypes,
JsonFieldSubType,
} from "@budibase/types"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures"
import nock from "nock"
describe("/datasources", () => {
const config = setup.getConfig()
@ -36,6 +38,7 @@ describe("/datasources", () => {
config: {},
})
jest.clearAllMocks()
nock.cleanAll()
})
describe("create", () => {
@ -70,6 +73,12 @@ describe("/datasources", () => {
describe("dynamic variables", () => {
it("should invalidate changed or removed variables", async () => {
nock("http://www.example.com/")
.get("/")
.reply(200, [{ value: "test" }])
.get("/?test=test")
.reply(200, [{ value: 1 }])
let datasource = await config.api.datasource.create({
type: "datasource",
name: "Rest",
@ -80,7 +89,7 @@ describe("/datasources", () => {
const query = await config.api.query.save({
datasourceId: datasource._id!,
fields: {
path: "www.google.com",
path: "www.example.com",
},
parameters: [],
transformer: null,
@ -288,7 +297,10 @@ describe("/datasources", () => {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: { allowEmpty: false },
presence: {
allowEmpty: false,
},
inclusion: [],
},
},
[FieldType.NUMBER]: {
@ -302,6 +314,10 @@ describe("/datasources", () => {
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: [],
},
},
[FieldType.DATETIME]: {
name: "datetime",

View File

@ -15,6 +15,8 @@ jest.mock("@budibase/backend-core", () => {
import { events, objectStore } from "@budibase/backend-core"
import * as setup from "./utilities"
import nock from "nock"
import { PluginSource } from "@budibase/types"
const mockUploadDirectory = objectStore.uploadDirectory as jest.Mock
const mockDeleteFolder = objectStore.deleteFolder as jest.Mock
@ -28,6 +30,7 @@ describe("/plugins", () => {
beforeEach(async () => {
await config.init()
jest.clearAllMocks()
nock.cleanAll()
})
const createPlugin = async (status?: number) => {
@ -112,67 +115,108 @@ describe("/plugins", () => {
})
describe("github", () => {
const createGithubPlugin = async (status?: number, url?: string) => {
return await request
.post(`/api/plugin`)
.send({
source: "Github",
url,
githubToken: "token",
beforeEach(async () => {
nock("https://api.github.com")
.get("/repos/my-repo/budibase-comment-box")
.reply(200, {
name: "budibase-comment-box",
releases_url:
"https://api.github.com/repos/my-repo/budibase-comment-box{/id}",
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(status ? status : 200)
}
it("should be able to create a plugin from github", async () => {
const res = await createGithubPlugin(
200,
"https://github.com/my-repo/budibase-comment-box.git"
)
expect(res.body).toBeDefined()
expect(res.body.plugin).toBeDefined()
expect(res.body.plugin._id).toEqual("plg_comment-box")
.get("/repos/my-repo/budibase-comment-box/latest")
.reply(200, {
assets: [
{
content_type: "application/gzip",
browser_download_url:
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz",
},
],
})
nock("https://github.com")
.get(
"/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz"
)
.replyWithFile(
200,
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
)
})
it("should be able to create a plugin from github", async () => {
const { plugin } = await config.api.plugin.create({
source: PluginSource.GITHUB,
url: "https://github.com/my-repo/budibase-comment-box.git",
githubToken: "token",
})
expect(plugin._id).toEqual("plg_comment-box")
})
it("should fail if the url is not from github", async () => {
const res = await createGithubPlugin(
400,
"https://notgithub.com/my-repo/budibase-comment-box"
)
expect(res.body.message).toEqual(
"Failed to import plugin: The plugin origin must be from Github"
await config.api.plugin.create(
{
source: PluginSource.GITHUB,
url: "https://notgithub.com/my-repo/budibase-comment-box",
githubToken: "token",
},
{
status: 400,
body: {
message:
"Failed to import plugin: The plugin origin must be from Github",
},
}
)
})
})
describe("npm", () => {
it("should be able to create a plugin from npm", async () => {
const res = await request
.post(`/api/plugin`)
.send({
source: "NPM",
url: "https://www.npmjs.com/package/budibase-component",
nock("https://registry.npmjs.org")
.get("/budibase-component")
.reply(200, {
name: "budibase-component",
"dist-tags": {
latest: "1.0.0",
},
versions: {
"1.0.0": {
dist: {
tarball:
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.1.tgz",
},
},
},
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body).toBeDefined()
expect(res.body.plugin._id).toEqual("plg_budibase-component")
.get("/budibase-component/-/budibase-component-1.0.1.tgz")
.replyWithFile(
200,
"src/api/routes/tests/data/budibase-component-1.0.1.tgz"
)
const { plugin } = await config.api.plugin.create({
source: PluginSource.NPM,
url: "https://www.npmjs.com/package/budibase-component",
})
expect(plugin._id).toEqual("plg_budibase-component")
expect(events.plugin.imported).toHaveBeenCalled()
})
})
describe("url", () => {
it("should be able to create a plugin from a URL", async () => {
const res = await request
.post(`/api/plugin`)
.send({
source: "URL",
url: "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz",
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body).toBeDefined()
expect(res.body.plugin._id).toEqual("plg_comment-box")
nock("https://www.someurl.com")
.get("/comment-box/comment-box-1.0.2.tar.gz")
.replyWithFile(
200,
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
)
const { plugin } = await config.api.plugin.create({
source: PluginSource.URL,
url: "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz",
})
expect(plugin._id).toEqual("plg_comment-box")
expect(events.plugin.imported).toHaveBeenCalledTimes(1)
})
})

View File

@ -22,9 +22,13 @@ describe.each(
DatabaseName.MYSQL,
DatabaseName.SQL_SERVER,
DatabaseName.MARIADB,
DatabaseName.ORACLE,
].map(name => [name, getDatasource(name)])
)("queries (%s)", (dbName, dsProvider) => {
const config = setup.getConfig()
const isOracle = dbName === DatabaseName.ORACLE
const isMsSQL = dbName === DatabaseName.SQL_SERVER
let rawDatasource: Datasource
let datasource: Datasource
let client: Knex
@ -97,7 +101,7 @@ describe.each(
const query = await createQuery({
name: "New Query",
fields: {
sql: "SELECT * FROM test_table",
sql: client("test_table").select("*").toString(),
},
})
@ -106,7 +110,7 @@ describe.each(
name: "New Query",
parameters: [],
fields: {
sql: "SELECT * FROM test_table",
sql: client("test_table").select("*").toString(),
},
schema: {},
queryVerb: "read",
@ -125,7 +129,7 @@ describe.each(
it("should be able to update a query", async () => {
const query = await createQuery({
fields: {
sql: "SELECT * FROM test_table",
sql: client("test_table").select("*").toString(),
},
})
@ -135,7 +139,7 @@ describe.each(
...query,
name: "Updated Query",
fields: {
sql: "SELECT * FROM test_table WHERE id = 1",
sql: client("test_table").where({ id: 1 }).toString(),
},
})
@ -144,7 +148,7 @@ describe.each(
name: "Updated Query",
parameters: [],
fields: {
sql: "SELECT * FROM test_table WHERE id = 1",
sql: client("test_table").where({ id: 1 }).toString(),
},
schema: {},
queryVerb: "read",
@ -161,7 +165,7 @@ describe.each(
it("should be able to delete a query", async () => {
const query = await createQuery({
fields: {
sql: "SELECT * FROM test_table",
sql: client("test_table").select("*").toString(),
},
})
@ -180,7 +184,7 @@ describe.each(
it("should be able to list queries", async () => {
const query = await createQuery({
fields: {
sql: "SELECT * FROM test_table",
sql: client("test_table").select("*").toString(),
},
})
@ -191,7 +195,7 @@ describe.each(
it("should strip sensitive fields for prod apps", async () => {
const query = await createQuery({
fields: {
sql: "SELECT * FROM test_table",
sql: client("test_table").select("*").toString(),
},
})
@ -212,7 +216,7 @@ describe.each(
datasourceId: datasource._id!,
queryVerb: "read",
fields: {
sql: `SELECT * FROM test_table WHERE id = 1`,
sql: client("test_table").where({ id: 1 }).toString(),
},
parameters: [],
transformer: "return data",
@ -270,7 +274,7 @@ describe.each(
name: "Test Query",
queryVerb: "read",
fields: {
sql: `SELECT * FROM ${tableName}`,
sql: client(tableName).select("*").toString(),
},
parameters: [],
transformer: "return data",
@ -284,11 +288,13 @@ describe.each(
})
)
await client(tableName).delete()
await client.schema.alterTable(tableName, table => {
table.string("data").alter()
})
await client(tableName).update({
await client(tableName).insert({
name: "test",
data: "string value",
})
@ -297,7 +303,7 @@ describe.each(
name: "Test Query",
queryVerb: "read",
fields: {
sql: `SELECT * FROM ${tableName}`,
sql: client(tableName).select("*").toString(),
},
parameters: [],
transformer: "return data",
@ -311,6 +317,7 @@ describe.each(
})
)
})
it("should work with static variables", async () => {
await config.api.datasource.update({
...datasource,
@ -326,7 +333,7 @@ describe.each(
datasourceId: datasource._id!,
queryVerb: "read",
fields: {
sql: `SELECT '{{ foo }}' as foo`,
sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
},
parameters: [],
transformer: "return data",
@ -337,16 +344,17 @@ describe.each(
const response = await config.api.query.preview(request)
let key = isOracle ? "FOO" : "foo"
expect(response.schema).toEqual({
foo: {
name: "foo",
[key]: {
name: key,
type: "string",
},
})
expect(response.rows).toEqual([
{
foo: "bar",
[key]: "bar",
},
])
})
@ -354,7 +362,7 @@ describe.each(
it("should work with dynamic variables", async () => {
const basedOnQuery = await createQuery({
fields: {
sql: "SELECT name FROM test_table WHERE id = 1",
sql: client("test_table").select("name").where({ id: 1 }).toString(),
},
})
@ -376,7 +384,7 @@ describe.each(
datasourceId: datasource._id!,
queryVerb: "read",
fields: {
sql: `SELECT '{{ foo }}' as foo`,
sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
},
parameters: [],
transformer: "return data",
@ -385,16 +393,17 @@ describe.each(
readable: true,
})
let key = isOracle ? "FOO" : "foo"
expect(preview.schema).toEqual({
foo: {
name: "foo",
[key]: {
name: key,
type: "string",
},
})
expect(preview.rows).toEqual([
{
foo: "one",
[key]: "one",
},
])
})
@ -402,7 +411,7 @@ describe.each(
it("should handle the dynamic base query being deleted", async () => {
const basedOnQuery = await createQuery({
fields: {
sql: "SELECT name FROM test_table WHERE id = 1",
sql: client("test_table").select("name").where({ id: 1 }).toString(),
},
})
@ -426,7 +435,7 @@ describe.each(
datasourceId: datasource._id!,
queryVerb: "read",
fields: {
sql: `SELECT '{{ foo }}' as foo`,
sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
},
parameters: [],
transformer: "return data",
@ -435,16 +444,17 @@ describe.each(
readable: true,
})
let key = isOracle ? "FOO" : "foo"
expect(preview.schema).toEqual({
foo: {
name: "foo",
[key]: {
name: key,
type: "string",
},
})
expect(preview.rows).toEqual([
{
foo: datasource.source === SourceName.SQL_SERVER ? "" : null,
[key]: datasource.source === SourceName.SQL_SERVER ? "" : null,
},
])
})
@ -455,7 +465,7 @@ describe.each(
it("should be able to insert with bindings", async () => {
const query = await createQuery({
fields: {
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
sql: client("test_table").insert({ name: "{{ foo }}" }).toString(),
},
parameters: [
{
@ -488,7 +498,7 @@ describe.each(
it("should not allow handlebars as parameters", async () => {
const query = await createQuery({
fields: {
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
sql: client("test_table").insert({ name: "{{ foo }}" }).toString(),
},
parameters: [
{
@ -516,46 +526,55 @@ describe.each(
)
})
it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])(
"should coerce %s into a date",
async datetimeStr => {
const date = new Date(datetimeStr)
const query = await createQuery({
fields: {
sql: `INSERT INTO test_table (name, birthday) VALUES ('foo', {{ birthday }})`,
},
parameters: [
{
name: "birthday",
default: "",
// Oracle doesn't automatically coerce strings into dates.
!isOracle &&
it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])(
"should coerce %s into a date",
async datetimeStr => {
const date = new Date(datetimeStr)
const query = await createQuery({
fields: {
sql: client("test_table")
.insert({
name: "foo",
birthday: client.raw("{{ birthday }}"),
})
.toString(),
},
],
queryVerb: "create",
})
parameters: [
{
name: "birthday",
default: "",
},
],
queryVerb: "create",
})
const result = await config.api.query.execute(query._id!, {
parameters: { birthday: datetimeStr },
})
const result = await config.api.query.execute(query._id!, {
parameters: { birthday: datetimeStr },
})
expect(result.data).toEqual([{ created: true }])
expect(result.data).toEqual([{ created: true }])
const rows = await client("test_table")
.where({ birthday: datetimeStr })
.select()
expect(rows).toHaveLength(1)
const rows = await client("test_table")
.where({ birthday: datetimeStr })
.select()
expect(rows).toHaveLength(1)
for (const row of rows) {
expect(new Date(row.birthday)).toEqual(date)
for (const row of rows) {
expect(new Date(row.birthday)).toEqual(date)
}
}
}
)
)
it.each(["2021,02,05", "202205-1500"])(
"should not coerce %s as a date",
async notDateStr => {
const query = await createQuery({
fields: {
sql: "INSERT INTO test_table (name) VALUES ({{ name }})",
sql: client("test_table")
.insert({ name: client.raw("{{ name }}") })
.toString(),
},
parameters: [
{
@ -586,7 +605,7 @@ describe.each(
it("should execute a query", async () => {
const query = await createQuery({
fields: {
sql: "SELECT * FROM test_table ORDER BY id",
sql: client("test_table").select("*").orderBy("id").toString(),
},
})
@ -629,7 +648,7 @@ describe.each(
it("should be able to transform a query", async () => {
const query = await createQuery({
fields: {
sql: "SELECT * FROM test_table WHERE id = 1",
sql: client("test_table").where({ id: 1 }).select("*").toString(),
},
transformer: `
data[0].id = data[0].id + 1;
@ -652,7 +671,10 @@ describe.each(
it("should coerce numeric bindings", async () => {
const query = await createQuery({
fields: {
sql: "SELECT * FROM test_table WHERE id = {{ id }}",
sql: client("test_table")
.where({ id: client.raw("{{ id }}") })
.select("*")
.toString(),
},
parameters: [
{
@ -683,7 +705,10 @@ describe.each(
it("should be able to update rows", async () => {
const query = await createQuery({
fields: {
sql: "UPDATE test_table SET name = {{ name }} WHERE id = {{ id }}",
sql: client("test_table")
.update({ name: client.raw("{{ name }}") })
.where({ id: client.raw("{{ id }}") })
.toString(),
},
parameters: [
{
@ -698,19 +723,13 @@ describe.each(
queryVerb: "update",
})
const result = await config.api.query.execute(query._id!, {
await config.api.query.execute(query._id!, {
parameters: {
id: "1",
name: "foo",
},
})
expect(result.data).toEqual([
{
updated: true,
},
])
const rows = await client("test_table").where({ id: 1 }).select()
expect(rows).toEqual([
{ id: 1, name: "foo", birthday: null, number: null },
@ -720,35 +739,34 @@ describe.each(
it("should be able to execute an update that updates no rows", async () => {
const query = await createQuery({
fields: {
sql: "UPDATE test_table SET name = 'updated' WHERE id = 100",
sql: client("test_table")
.update({ name: "updated" })
.where({ id: 100 })
.toString(),
},
queryVerb: "update",
})
const result = await config.api.query.execute(query._id!)
await config.api.query.execute(query._id!)
expect(result.data).toEqual([
{
updated: true,
},
])
const rows = await client("test_table").select()
for (const row of rows) {
expect(row.name).not.toEqual("updated")
}
})
it("should be able to execute a delete that deletes no rows", async () => {
const query = await createQuery({
fields: {
sql: "DELETE FROM test_table WHERE id = 100",
sql: client("test_table").where({ id: 100 }).delete().toString(),
},
queryVerb: "delete",
})
const result = await config.api.query.execute(query._id!)
await config.api.query.execute(query._id!)
expect(result.data).toEqual([
{
deleted: true,
},
])
const rows = await client("test_table").select()
expect(rows).toHaveLength(5)
})
})
@ -756,7 +774,10 @@ describe.each(
it("should be able to delete rows", async () => {
const query = await createQuery({
fields: {
sql: "DELETE FROM test_table WHERE id = {{ id }}",
sql: client("test_table")
.where({ id: client.raw("{{ id }}") })
.delete()
.toString(),
},
parameters: [
{
@ -767,18 +788,12 @@ describe.each(
queryVerb: "delete",
})
const result = await config.api.query.execute(query._id!, {
await config.api.query.execute(query._id!, {
parameters: {
id: "1",
},
})
expect(result.data).toEqual([
{
deleted: true,
},
])
const rows = await client("test_table").where({ id: 1 }).select()
expect(rows).toHaveLength(0)
})
@ -823,72 +838,63 @@ describe.each(
})
})
it("should be able to execute an update that updates no rows", async () => {
const query = await createQuery({
fields: {
sql: "UPDATE test_table SET name = 'updated' WHERE id = 100",
},
queryVerb: "update",
// this parameter really only impacts SQL queries
describe("confirm nullDefaultSupport", () => {
let queryParams: Partial<Query>
beforeAll(async () => {
queryParams = {
fields: {
sql: client("test_table")
.insert({
name: client.raw("{{ bindingName }}"),
number: client.raw("{{ bindingNumber }}"),
})
.toString(),
},
parameters: [
{
name: "bindingName",
default: "",
},
{
name: "bindingNumber",
default: "",
},
],
queryVerb: "create",
}
})
const result = await config.api.query.execute(query._id!, {})
it("should error for old queries", async () => {
const query = await createQuery(queryParams)
await config.api.query.save({ ...query, nullDefaultSupport: false })
let error: string | undefined
try {
await config.api.query.execute(query._id!, {
parameters: {
bindingName: "testing",
},
})
} catch (err: any) {
error = err.message
}
if (isMsSQL || isOracle) {
expect(error).toBeUndefined()
} else {
expect(error).toBeDefined()
expect(error).toContain("integer")
}
})
expect(result.data).toEqual([
{
updated: true,
},
])
})
})
// this parameter really only impacts SQL queries
describe("confirm nullDefaultSupport", () => {
const queryParams = {
fields: {
sql: "INSERT INTO test_table (name, number) VALUES ({{ bindingName }}, {{ bindingNumber }})",
},
parameters: [
{
name: "bindingName",
default: "",
},
{
name: "bindingNumber",
default: "",
},
],
queryVerb: "create",
}
it("should error for old queries", async () => {
const query = await createQuery(queryParams)
await config.api.query.save({ ...query, nullDefaultSupport: false })
let error: string | undefined
try {
await config.api.query.execute(query._id!, {
it("should not error for new queries", async () => {
const query = await createQuery(queryParams)
const results = await config.api.query.execute(query._id!, {
parameters: {
bindingName: "testing",
},
})
} catch (err: any) {
error = err.message
}
if (dbName === "mssql") {
expect(error).toBeUndefined()
} else {
expect(error).toBeDefined()
expect(error).toContain("integer")
}
})
it("should not error for new queries", async () => {
const query = await createQuery(queryParams)
const results = await config.api.query.execute(query._id!, {
parameters: {
bindingName: "testing",
},
expect(results).toEqual({ data: [{ created: true }] })
})
expect(results).toEqual({ data: [{ created: true }] })
})
})
})

View File

@ -5,8 +5,6 @@ import { getCachedVariable } from "../../../../threads/utils"
import nock from "nock"
import { generator } from "@budibase/backend-core/tests"
jest.unmock("node-fetch")
describe("rest", () => {
let config: TestConfiguration
let datasource: Datasource

View File

@ -1,3 +1,5 @@
import * as setup from "./utilities"
import {
DatabaseName,
getDatasource,
@ -7,7 +9,6 @@ import {
import tk from "timekeeper"
import emitter from "../../../../src/events"
import { outputProcessing } from "../../../utilities/rowProcessor"
import * as setup from "./utilities"
import { context, InternalTable, tenancy } from "@budibase/backend-core"
import { quotas } from "@budibase/pro"
import {
@ -31,6 +32,8 @@ import {
TableSourceType,
UpdatedRowEventEmitter,
TableSchema,
JsonFieldSubType,
RowExportFormat,
} from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash"
@ -69,9 +72,11 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/rows (%s)", (providerType, dsProvider) => {
const isInternal = dsProvider === undefined
const isMSSQL = providerType === DatabaseName.SQL_SERVER
const isOracle = providerType === DatabaseName.ORACLE
const config = setup.getConfig()
let table: Table
@ -101,7 +106,7 @@ describe.each([
): SaveTableRequest {
const defaultSchema: TableSchema = {
id: {
type: FieldType.AUTO,
type: FieldType.NUMBER,
name: "id",
autocolumn: true,
constraints: {
@ -126,7 +131,8 @@ describe.each([
primary: ["id"],
schema: defaultSchema,
}
return merge(req, ...overrides)
const merged = merge(req, ...overrides)
return merged
}
function defaultTable(
@ -383,7 +389,7 @@ describe.each([
const arrayField: FieldSchema = {
type: FieldType.ARRAY,
constraints: {
type: "array",
type: JsonFieldSubType.ARRAY,
presence: false,
inclusion: ["One", "Two", "Three"],
},
@ -1296,9 +1302,117 @@ describe.each([
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
})
// Upserting isn't yet supported in MSSQL, see:
isInternal &&
it("should be able to update existing rows on bulkImport", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
const rowUsage = await getRowUsage()
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
identifierFields: ["_id"],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1")
expect(rows[0].description).toEqual("Row 1 description")
expect(rows[1].name).toEqual("Row 2")
expect(rows[1].description).toEqual("Row 2 description")
expect(rows[2].name).toEqual("Updated existing row")
expect(rows[2].description).toEqual("Existing description")
await assertRowUsage(rowUsage + 2)
})
isInternal &&
it("should create new rows if not identifierFields are provided", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
const rowUsage = await getRowUsage()
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(4)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Existing row")
expect(rows[0].description).toEqual("Existing description")
expect(rows[1].name).toEqual("Row 1")
expect(rows[1].description).toEqual("Row 1 description")
expect(rows[2].name).toEqual("Row 2")
expect(rows[2].description).toEqual("Row 2 description")
expect(rows[3].name).toEqual("Updated existing row")
expect(rows[3].description).toEqual("Existing description")
await assertRowUsage(rowUsage + 3)
})
// Upserting isn't yet supported in MSSQL / Oracle, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isOracle &&
it("should be able to update existing rows with bulkImport", async () => {
const table = await config.api.table.save(
saveTableRequest({
@ -1368,9 +1482,10 @@ describe.each([
expect(rows[2].description).toEqual("Row 3 description")
})
// Upserting isn't yet supported in MSSQL, see:
// Upserting isn't yet supported in MSSQL or Oracle, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isOracle &&
!isInternal &&
it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
const tableName = uuid.v4()
@ -1437,9 +1552,10 @@ describe.each([
expect(rows[2].description).toEqual("Row 3 description")
})
// Upserting isn't yet supported in MSSQL, see:
// Upserting isn't yet supported in MSSQL/Oracle, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isOracle &&
!isInternal &&
it("should be able to update existing rows an autoID primary key", async () => {
const tableName = uuid.v4()
@ -1638,23 +1754,38 @@ describe.each([
table = await config.api.table.save(defaultTable())
})
it("should allow exporting all columns", async () => {
const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
isInternal &&
it("should not export internal couchdb fields", async () => {
const existing = await config.api.row.save(table._id!, {
name: generator.guid(),
description: generator.paragraph(),
})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure all original columns were exported
expect(Object.keys(row).length).toBeGreaterThanOrEqual(
Object.keys(existing).length
)
Object.keys(existing).forEach(key => {
expect(row[key]).toEqual(existing[key])
expect(Object.keys(row)).toEqual(["_id", "name", "description"])
})
!isInternal &&
it("should allow exporting all columns", async () => {
const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure all original columns were exported
expect(Object.keys(row).length).toBe(Object.keys(existing).length)
Object.keys(existing).forEach(key => {
expect(row[key]).toEqual(existing[key])
})
})
})
it("should allow exporting only certain columns", async () => {
const existing = await config.api.row.save(table._id!, {})
@ -1687,6 +1818,7 @@ describe.each([
await config.api.row.exportRows(
"1234567",
{ rows: [existing._id!] },
RowExportFormat.JSON,
{ status: 404 }
)
})
@ -1725,6 +1857,202 @@ describe.each([
const results = JSON.parse(res)
expect(results.length).toEqual(3)
})
describe("should allow exporting all column types", () => {
let tableId: string
let expectedRowData: Row
beforeAll(async () => {
const fullSchema = setup.structures.fullSchemaWithoutLinks({
allRequired: true,
})
const table = await config.api.table.save(
saveTableRequest({
...setup.structures.basicTable(),
schema: fullSchema,
primary: ["string"],
})
)
tableId = table._id!
const rowValues: Record<keyof typeof fullSchema, any> = {
[FieldType.STRING]: generator.guid(),
[FieldType.LONGFORM]: generator.paragraph(),
[FieldType.OPTIONS]: "option 2",
[FieldType.ARRAY]: ["options 2", "options 4"],
[FieldType.NUMBER]: generator.natural(),
[FieldType.BOOLEAN]: generator.bool(),
[FieldType.DATETIME]: generator.date().toISOString(),
[FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()],
[FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(),
[FieldType.FORMULA]: undefined, // generated field
[FieldType.AUTO]: undefined, // generated field
[FieldType.JSON]: { name: generator.guid() },
[FieldType.INTERNAL]: generator.guid(),
[FieldType.BARCODEQR]: generator.guid(),
[FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(),
[FieldType.BIGINT]: generator.integer().toString(),
[FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }],
[FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id },
}
const row = await config.api.row.save(table._id!, rowValues)
expectedRowData = {
_id: row._id,
[FieldType.STRING]: rowValues[FieldType.STRING],
[FieldType.LONGFORM]: rowValues[FieldType.LONGFORM],
[FieldType.OPTIONS]: rowValues[FieldType.OPTIONS],
[FieldType.ARRAY]: rowValues[FieldType.ARRAY],
[FieldType.NUMBER]: rowValues[FieldType.NUMBER],
[FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN],
[FieldType.DATETIME]: rowValues[FieldType.DATETIME],
[FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map(
(a: any) =>
expect.objectContaining({
...a,
url: expect.any(String),
})
),
[FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({
...rowValues[FieldType.ATTACHMENT_SINGLE],
url: expect.any(String),
}),
[FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula,
[FieldType.AUTO]: expect.any(Number),
[FieldType.JSON]: rowValues[FieldType.JSON],
[FieldType.INTERNAL]: rowValues[FieldType.INTERNAL],
[FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR],
[FieldType.SIGNATURE_SINGLE]: expect.objectContaining({
...rowValues[FieldType.SIGNATURE_SINGLE],
url: expect.any(String),
}),
[FieldType.BIGINT]: rowValues[FieldType.BIGINT],
[FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map(
expect.objectContaining
),
[FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining(
rowValues[FieldType.BB_REFERENCE_SINGLE]
),
}
})
it("as csv", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
const jsonResult = await config.api.table.csvToJson({
csvString: exportedValue,
})
const stringified = (value: string) =>
JSON.stringify(value).replace(/"/g, "'")
const matchingObject = (key: string, value: any, isArray: boolean) => {
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
if (isArray) {
return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`))
}
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
}
expect(jsonResult).toEqual([
{
...expectedRowData,
auto: expect.any(String),
array: stringified(expectedRowData["array"]),
attachment: matchingObject(
"key",
expectedRowData["attachment"][0].sample,
true
),
attachment_single: matchingObject(
"key",
expectedRowData["attachment_single"].sample,
false
),
boolean: stringified(expectedRowData["boolean"]),
json: stringified(expectedRowData["json"]),
number: stringified(expectedRowData["number"]),
signature_single: matchingObject(
"key",
expectedRowData["signature_single"].sample,
false
),
bb_reference: matchingObject(
"_id",
expectedRowData["bb_reference"][0].sample,
true
),
bb_reference_single: matchingObject(
"_id",
expectedRowData["bb_reference_single"].sample,
false
),
},
])
})
it("as json", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON
)
const json = JSON.parse(exportedValue)
expect(json).toEqual([expectedRowData])
})
it("as json with schema", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON_WITH_SCHEMA
)
const json = JSON.parse(exportedValue)
expect(json).toEqual({
schema: expect.any(Object),
rows: [expectedRowData],
})
})
it("exported data can be re-imported", async () => {
// export all
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
// import all twice
const rows = await config.api.table.csvToJson({
csvString: exportedValue,
})
await config.api.row.bulkImport(tableId, {
rows,
})
await config.api.row.bulkImport(tableId, {
rows,
})
const { rows: allRows } = await config.api.row.search(tableId)
const expectedRow = {
...expectedRowData,
_id: expect.any(String),
_rev: expect.any(String),
type: "row",
tableId: tableId,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
}
expect(allRows).toEqual([expectedRow, expectedRow, expectedRow])
})
})
})
let o2mTable: Table

View File

@ -1,10 +1,17 @@
import _ from "lodash"
import tk from "timekeeper"
import { CreateRowActionRequest, RowActionResponse } from "@budibase/types"
import {
CreateRowActionRequest,
DocumentType,
RowActionResponse,
} from "@budibase/types"
import * as setup from "./utilities"
import { generator } from "@budibase/backend-core/tests"
const expectAutomationId = () =>
expect.stringMatching(`^${DocumentType.AUTOMATION}_.+`)
describe("/rowsActions", () => {
const config = setup.getConfig()
@ -79,17 +86,19 @@ describe("/rowsActions", () => {
})
expect(res).toEqual({
name: rowAction.name,
id: expect.stringMatching(/^row_action_\w+/),
tableId: tableId,
...rowAction,
automationId: expectAutomationId(),
})
expect(await config.api.rowAction.find(tableId)).toEqual({
actions: {
[res.id]: {
...rowAction,
name: rowAction.name,
id: res.id,
tableId: tableId,
automationId: expectAutomationId(),
},
},
})
@ -97,19 +106,13 @@ describe("/rowsActions", () => {
it("trims row action names", async () => {
const name = " action name "
const res = await createRowAction(
tableId,
{ name },
{
status: 201,
}
)
const res = await createRowAction(tableId, { name }, { status: 201 })
expect(res).toEqual({
id: expect.stringMatching(/^row_action_\w+/),
tableId: tableId,
name: "action name",
})
expect(res).toEqual(
expect.objectContaining({
name: "action name",
})
)
expect(await config.api.rowAction.find(tableId)).toEqual({
actions: {
@ -129,9 +132,24 @@ describe("/rowsActions", () => {
expect(await config.api.rowAction.find(tableId)).toEqual({
actions: {
[responses[0].id]: { ...rowActions[0], id: responses[0].id, tableId },
[responses[1].id]: { ...rowActions[1], id: responses[1].id, tableId },
[responses[2].id]: { ...rowActions[2], id: responses[2].id, tableId },
[responses[0].id]: {
name: rowActions[0].name,
id: responses[0].id,
tableId,
automationId: expectAutomationId(),
},
[responses[1].id]: {
name: rowActions[1].name,
id: responses[1].id,
tableId,
automationId: expectAutomationId(),
},
[responses[2].id]: {
name: rowActions[2].name,
id: responses[2].id,
tableId,
automationId: expectAutomationId(),
},
},
})
})
@ -152,7 +170,7 @@ describe("/rowsActions", () => {
it("ignores not valid row action data", async () => {
const rowAction = createRowActionRequest()
const dirtyRowAction = {
...rowAction,
name: rowAction.name,
id: generator.guid(),
valueToIgnore: generator.string(),
}
@ -161,17 +179,19 @@ describe("/rowsActions", () => {
})
expect(res).toEqual({
name: rowAction.name,
id: expect.any(String),
tableId,
...rowAction,
automationId: expectAutomationId(),
})
expect(await config.api.rowAction.find(tableId)).toEqual({
actions: {
[res.id]: {
name: rowAction.name,
id: res.id,
tableId: tableId,
...rowAction,
automationId: expectAutomationId(),
},
},
})
@ -213,6 +233,17 @@ describe("/rowsActions", () => {
await createRowAction(otherTable._id!, { name: action.name })
})
it("an automation is created when creating a new row action", async () => {
const action1 = await createRowAction(tableId, createRowActionRequest())
const action2 = await createRowAction(tableId, createRowActionRequest())
for (const automationId of [action1.automationId, action2.automationId]) {
expect(
await config.api.automation.get(automationId, { status: 200 })
).toEqual(expect.objectContaining({ _id: automationId }))
}
})
})
describe("find", () => {
@ -264,7 +295,6 @@ describe("/rowsActions", () => {
const updatedName = generator.string()
const res = await config.api.rowAction.update(tableId, actionId, {
...actionData,
name: updatedName,
})
@ -272,14 +302,17 @@ describe("/rowsActions", () => {
id: actionId,
tableId,
name: updatedName,
automationId: actionData.automationId,
})
expect(await config.api.rowAction.find(tableId)).toEqual(
expect.objectContaining({
actions: expect.objectContaining({
[actionId]: {
...actionData,
name: updatedName,
id: actionData.id,
tableId: actionData.tableId,
automationId: actionData.automationId,
},
}),
})
@ -296,7 +329,6 @@ describe("/rowsActions", () => {
)
const res = await config.api.rowAction.update(tableId, rowAction.id, {
...rowAction,
name: " action name ",
})
@ -408,5 +440,26 @@ describe("/rowsActions", () => {
status: 400,
})
})
it("deletes the linked automation", async () => {
const actions: RowActionResponse[] = []
for (const rowAction of createRowActionRequests(3)) {
actions.push(await createRowAction(tableId, rowAction))
}
const actionToDelete = _.sample(actions)!
await config.api.rowAction.delete(tableId, actionToDelete.id, {
status: 204,
})
await config.api.automation.get(actionToDelete.automationId, {
status: 404,
})
for (const action of actions.filter(a => a.id !== actionToDelete.id)) {
await config.api.automation.get(action.automationId, {
status: 200,
})
}
})
})
})

View File

@ -5,12 +5,12 @@ import {
knexClient,
} from "../../../integrations/tests/utils"
import {
db as dbCore,
context,
db as dbCore,
MAX_VALID_DATE,
MIN_VALID_DATE,
utils,
SQLITE_DESIGN_DOC_ID,
utils,
} from "@budibase/backend-core"
import * as setup from "./utilities"
@ -20,6 +20,7 @@ import {
Datasource,
EmptyFilterOption,
FieldType,
JsonFieldSubType,
RelationshipType,
Row,
RowSearchParams,
@ -47,11 +48,13 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("search (%s)", (name, dsProvider) => {
const isSqs = name === "sqs"
const isLucene = name === "lucene"
const isInMemory = name === "in-memory"
const isInternal = isSqs || isLucene || isInMemory
const isSql = !isInMemory && !isLucene
const config = setup.getConfig()
let envCleanup: (() => void) | undefined
@ -191,7 +194,8 @@ describe.each([
// different to the one passed in will cause the assertion to fail. Extra
// rows returned by the query will also cause the assertion to fail.
async toMatchExactly(expectedRows: any[]) {
const { rows: foundRows } = await this.performSearch()
const response = await this.performSearch()
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toHaveLength(expectedRows.length)
@ -201,13 +205,15 @@ describe.each([
expect.objectContaining(this.popRow(expectedRow, foundRows))
)
)
return response
}
// Asserts that the query returns rows matching exactly the set of rows
// passed in. The order of the rows is not important, but extra rows will
// cause the assertion to fail.
async toContainExactly(expectedRows: any[]) {
const { rows: foundRows } = await this.performSearch()
const response = await this.performSearch()
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toHaveLength(expectedRows.length)
@ -219,6 +225,7 @@ describe.each([
)
)
)
return response
}
// Asserts that the query returns some property values - this cannot be used
@ -235,6 +242,7 @@ describe.each([
expect(response[key]).toEqual(properties[key])
}
}
return response
}
// Asserts that the query doesn't return a property, e.g. pagination parameters.
@ -244,13 +252,15 @@ describe.each([
// eslint-disable-next-line jest/no-standalone-expect
expect(response[property]).toBeUndefined()
}
return response
}
// Asserts that the query returns rows matching the set of rows passed in.
// The order of the rows is not important. Extra rows will not cause the
// assertion to fail.
async toContain(expectedRows: any[]) {
const { rows: foundRows } = await this.performSearch()
const response = await this.performSearch()
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
expect([...foundRows]).toEqual(
@ -260,6 +270,7 @@ describe.each([
)
)
)
return response
}
async toFindNothing() {
@ -1494,7 +1505,10 @@ describe.each([
numbers: {
name: "numbers",
type: FieldType.ARRAY,
constraints: { inclusion: ["one", "two", "three"] },
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: ["one", "two", "three"],
},
},
})
await createRows([{ numbers: ["one", "two"] }, { numbers: ["three"] }])
@ -1581,7 +1595,10 @@ describe.each([
const MEDIUM = "10000000"
// Our bigints are int64s in most datasources.
const BIG = "9223372036854775807"
let BIG = "9223372036854775807"
if (name === DatabaseName.ORACLE) {
// BIG = "9223372036854775808"
}
beforeAll(async () => {
table = await createTable({
@ -2560,4 +2577,123 @@ describe.each([
}).toContainExactly([{ name: "foo" }])
})
})
!isInMemory &&
describe("search by _id", () => {
let row: Row
beforeAll(async () => {
const toRelateTable = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
})
table = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
rel: {
name: "rel",
type: FieldType.LINK,
relationshipType: RelationshipType.MANY_TO_MANY,
tableId: toRelateTable._id!,
fieldName: "rel",
},
})
const [row1, row2] = await Promise.all([
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
])
row = await config.api.row.save(table._id!, {
name: "product 1",
rel: [row1._id, row2._id],
})
})
it("can filter by the row ID with limit 1", async () => {
await expectSearch({
query: {
equal: { _id: row._id },
},
limit: 1,
}).toContainExactly([row])
})
})
isSql &&
describe("pagination edge case with relationships", () => {
let mainRows: Row[] = []
beforeAll(async () => {
const toRelateTable = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
})
table = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
rel: {
name: "rel",
type: FieldType.LINK,
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: toRelateTable._id!,
fieldName: "rel",
},
})
const relatedRows = await Promise.all([
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
config.api.row.save(toRelateTable._id!, { name: "tag 3" }),
config.api.row.save(toRelateTable._id!, { name: "tag 4" }),
config.api.row.save(toRelateTable._id!, { name: "tag 5" }),
config.api.row.save(toRelateTable._id!, { name: "tag 6" }),
])
mainRows = await Promise.all([
config.api.row.save(table._id!, {
name: "product 1",
rel: relatedRows.map(row => row._id),
}),
config.api.row.save(table._id!, {
name: "product 2",
rel: [],
}),
config.api.row.save(table._id!, {
name: "product 3",
rel: [],
}),
])
})
it("can still page when the hard limit is hit", async () => {
await config.withCoreEnv(
{
SQL_MAX_ROWS: "6",
},
async () => {
const params: Omit<RowSearchParams, "tableId"> = {
query: {},
paginate: true,
limit: 3,
sort: "name",
sortType: SortType.STRING,
sortOrder: SortOrder.ASCENDING,
}
const page1 = await expectSearch(params).toContain([mainRows[0]])
expect(page1.hasNextPage).toBe(true)
expect(page1.bookmark).toBeDefined()
const page2 = await expectSearch({
...params,
bookmark: page1.bookmark,
}).toContain([mainRows[1], mainRows[2]])
expect(page2.hasNextPage).toBe(false)
}
)
})
})
})

View File

@ -1,4 +1,8 @@
import { context, events } from "@budibase/backend-core"
import { context, docIds, events } from "@budibase/backend-core"
import {
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import {
AutoFieldSubType,
BBReferenceFieldSubType,
@ -10,10 +14,13 @@ import {
Row,
SaveTableRequest,
Table,
TableSchema,
TableSourceType,
User,
ValidateTableImportResponse,
ViewCalculation,
ViewV2Enriched,
RowExportFormat,
} from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
@ -33,7 +40,8 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/tables (%s)", (_, dsProvider) => {
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/tables (%s)", (name, dsProvider) => {
const isInternal: boolean = !dsProvider
let datasource: Datasource | undefined
let config = setup.getConfig()
@ -52,15 +60,20 @@ describe.each([
jest.clearAllMocks()
})
it.each([
let names = [
"alphanum",
"with spaces",
"with-dashes",
"with_underscores",
'with "double quotes"',
"with 'single quotes'",
"with `backticks`",
])("creates a table with name: %s", async name => {
]
if (name !== DatabaseName.ORACLE) {
names.push(`with "double quotes"`)
names.push(`with 'single quotes'`)
}
it.each(names)("creates a table with name: %s", async name => {
const table = await config.api.table.save(
tableForDatasource(datasource, { name })
)
@ -118,6 +131,64 @@ describe.each([
body: basicTable(),
})
})
it("does not persist the row fields that are not on the table schema", async () => {
const table: SaveTableRequest = basicTable()
table.rows = [
{
name: "test-name",
description: "test-desc",
nonValid: "test-non-valid",
},
]
const res = await config.api.table.save(table)
const persistedRows = await config.api.row.search(res._id!)
expect(persistedRows.rows).toEqual([
expect.objectContaining({
name: "test-name",
description: "test-desc",
}),
])
expect(persistedRows.rows[0].nonValid).toBeUndefined()
})
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)(
"cannot use protected column names (%s) while importing a table",
async columnName => {
const table: SaveTableRequest = basicTable()
table.rows = [
{
name: "test-name",
description: "test-desc",
},
]
await config.api.table.save(
{
...table,
schema: {
...table.schema,
[columnName]: {
name: columnName,
type: FieldType.STRING,
},
},
},
{
status: 400,
body: {
message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`,
status: 400,
},
}
)
}
)
})
describe("update", () => {
@ -398,6 +469,7 @@ describe.each([
name: "auto",
autocolumn: true,
type: FieldType.AUTO,
subtype: AutoFieldSubType.AUTO_ID,
},
},
},
@ -1021,4 +1093,371 @@ describe.each([
})
})
})
describe.each([
[RowExportFormat.CSV, (val: any) => JSON.stringify(val).replace(/"/g, "'")],
[RowExportFormat.JSON, (val: any) => val],
])("import validation (%s)", (_, userParser) => {
const basicSchema: TableSchema = {
id: {
type: FieldType.NUMBER,
name: "id",
},
name: {
type: FieldType.STRING,
name: "name",
},
}
const importCases: [
string,
(rows: Row[], schema: TableSchema) => Promise<ValidateTableImportResponse>
][] = [
[
"validateNewTableImport",
async (rows: Row[], schema: TableSchema) => {
const result = await config.api.table.validateNewTableImport({
rows,
schema,
})
return result
},
],
[
"validateExistingTableImport",
async (rows: Row[], schema: TableSchema) => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows,
})
return result
},
],
]
describe.each(importCases)("%s", (_, testDelegate) => {
it("validates basic imports", async () => {
const result = await testDelegate(
[{ id: generator.natural(), name: generator.first() }],
basicSchema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in schema (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport({
rows: [
{
id: generator.natural(),
name: generator.first(),
[columnName]: generator.word(),
},
],
schema: {
...basicSchema,
},
})
expect(result).toEqual({
allValid: false,
errors: {
[columnName]: `${columnName} is a protected column name`,
},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
[columnName]: false,
},
})
})
it("does not allow imports without rows", async () => {
const result = await testDelegate([], basicSchema)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {},
})
})
it("validates imports with some empty rows", async () => {
const result = await testDelegate(
[{}, { id: generator.natural(), name: generator.first() }, {}],
basicSchema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
isInternal &&
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in the rows (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport({
rows: [
{
id: generator.natural(),
name: generator.first(),
},
],
schema: {
...basicSchema,
[columnName]: {
name: columnName,
type: FieldType.STRING,
},
},
})
expect(result).toEqual({
allValid: false,
errors: {
[columnName]: `${columnName} is a protected column name`,
},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
[columnName]: false,
},
})
})
it("validates required fields and valid rows", async () => {
const schema: TableSchema = {
...basicSchema,
name: {
type: FieldType.STRING,
name: "name",
constraints: { presence: true },
},
}
const result = await testDelegate(
[
{ id: generator.natural(), name: generator.first() },
{ id: generator.natural(), name: generator.first() },
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
it("validates required fields and non-valid rows", async () => {
const schema: TableSchema = {
...basicSchema,
name: {
type: FieldType.STRING,
name: "name",
constraints: { presence: true },
},
}
const result = await testDelegate(
[
{ id: generator.natural(), name: generator.first() },
{ id: generator.natural(), name: "" },
],
schema
)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: false,
},
})
})
describe("bb references", () => {
const getUserValues = () => ({
_id: docIds.generateGlobalUserID(),
primaryDisplay: generator.first(),
email: generator.email({}),
})
it("can validate user column imports", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser(getUserValues()),
},
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: true,
},
})
})
it("can validate user column imports with invalid data", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser(getUserValues()),
},
{
id: generator.natural(),
name: generator.first(),
user: "no valid user data",
},
],
schema
)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: false,
},
})
})
it("can validate users column imports", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
externalType: "array",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser([
getUserValues(),
getUserValues(),
getUserValues(),
]),
},
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: true,
},
})
})
})
})
describe("validateExistingTableImport", () => {
isInternal &&
it("can reimport _id fields for internal tables", async () => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: basicSchema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows: [
{
_id: docIds.generateRowID(table._id!),
id: generator.natural(),
name: generator.first(),
},
],
})
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
_id: true,
id: true,
name: true,
},
})
})
})
})
})

View File

@ -54,7 +54,7 @@ export const clearAllApps = async (
}
export const clearAllAutomations = async (config: TestConfiguration) => {
const automations = await config.getAllAutomations()
const { automations } = await config.getAllAutomations()
for (let auto of automations) {
await context.doInAppContext(config.getAppId(), async () => {
await config.deleteAutomation(auto)

View File

@ -33,6 +33,7 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/v2/views (%s)", (name, dsProvider) => {
const config = setup.getConfig()
const isSqs = name === "sqs"
@ -56,7 +57,7 @@ describe.each([
primary: ["id"],
schema: {
id: {
type: FieldType.AUTO,
type: FieldType.NUMBER,
name: "id",
autocolumn: true,
constraints: {
@ -241,7 +242,7 @@ describe.each([
schema: {
id: {
name: "id",
type: FieldType.AUTO,
type: FieldType.NUMBER,
autocolumn: true,
visible: true,
},
@ -1555,7 +1556,7 @@ describe.each([
schema: {
id: {
name: "id",
type: FieldType.AUTO,
type: FieldType.NUMBER,
autocolumn: true,
},
name: {

View File

@ -20,17 +20,21 @@ import * as triggerAutomationRun from "./steps/triggerAutomationRun"
import env from "../environment"
import {
AutomationStepSchema,
AutomationStepInput,
PluginType,
AutomationStep,
AutomationActionStepId,
ActionImplementations,
Hosting,
ActionImplementation,
} from "@budibase/types"
import sdk from "../sdk"
import { getAutomationPlugin } from "../utilities/fileSystem"
const ACTION_IMPLS: Record<
string,
(opts: AutomationStepInput) => Promise<any>
> = {
type ActionImplType = ActionImplementations<
typeof env.SELF_HOSTED extends "true" ? Hosting.SELF : Hosting.CLOUD
>
const ACTION_IMPLS: ActionImplType = {
SEND_EMAIL_SMTP: sendSmtpEmail.run,
CREATE_ROW: createRow.run,
UPDATE_ROW: updateRow.run,
@ -51,6 +55,7 @@ const ACTION_IMPLS: Record<
integromat: make.run,
n8n: n8n.run,
}
export const BUILTIN_ACTION_DEFINITIONS: Record<string, AutomationStepSchema> =
{
SEND_EMAIL_SMTP: sendSmtpEmail.definition,
@ -86,7 +91,7 @@ if (env.SELF_HOSTED) {
ACTION_IMPLS["EXECUTE_BASH"] = bash.run
// @ts-ignore
BUILTIN_ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition
// @ts-ignore
ACTION_IMPLS.OPENAI = openai.run
BUILTIN_ACTION_DEFINITIONS.OPENAI = openai.definition
}
@ -107,10 +112,13 @@ export async function getActionDefinitions() {
}
/* istanbul ignore next */
export async function getAction(stepId: string) {
if (ACTION_IMPLS[stepId] != null) {
return ACTION_IMPLS[stepId]
export async function getAction(
stepId: AutomationActionStepId
): Promise<ActionImplementation<any, any> | undefined> {
if (ACTION_IMPLS[stepId as keyof ActionImplType] != null) {
return ACTION_IMPLS[stepId as keyof ActionImplType]
}
// must be a plugin
if (env.SELF_HOSTED) {
const plugins = await sdk.plugins.fetch(PluginType.AUTOMATION)

View File

@ -4,8 +4,13 @@ import {
encodeJSBinding,
} from "@budibase/string-templates"
import sdk from "../sdk"
import { AutomationAttachment, FieldType, Row } from "@budibase/types"
import { LoopInput, LoopStepType } from "../definitions/automations"
import {
AutomationAttachment,
FieldType,
Row,
LoopStepType,
} from "@budibase/types"
import { LoopInput } from "../definitions/automations"
import { objectStore, context } from "@budibase/backend-core"
import * as uuid from "uuid"
import path from "path"

View File

@ -7,9 +7,10 @@ import {
AutomationCustomIOType,
AutomationFeature,
AutomationIOType,
AutomationStepInput,
AutomationStepSchema,
AutomationStepType,
BashStepInputs,
BashStepOutputs,
} from "@budibase/types"
export const definition: AutomationStepSchema = {
@ -51,7 +52,13 @@ export const definition: AutomationStepSchema = {
},
}
export async function run({ inputs, context }: AutomationStepInput) {
export async function run({
inputs,
context,
}: {
inputs: BashStepInputs
context: object
}): Promise<BashStepOutputs> {
if (inputs.code == null) {
return {
stdout: "Budibase bash automation failed: Invalid inputs",

View File

@ -1,9 +1,10 @@
import {
AutomationActionStepId,
AutomationStepSchema,
AutomationStepInput,
AutomationStepType,
AutomationIOType,
CollectStepInputs,
CollectStepOutputs,
} from "@budibase/types"
export const definition: AutomationStepSchema = {
@ -43,7 +44,11 @@ export const definition: AutomationStepSchema = {
},
}
export async function run({ inputs }: AutomationStepInput) {
export async function run({
inputs,
}: {
inputs: CollectStepInputs
}): Promise<CollectStepOutputs> {
if (!inputs.collection) {
return {
success: false,

View File

@ -10,10 +10,12 @@ import {
AutomationCustomIOType,
AutomationFeature,
AutomationIOType,
AutomationStepInput,
AutomationStepSchema,
AutomationStepType,
CreateRowStepInputs,
CreateRowStepOutputs,
} from "@budibase/types"
import { EventEmitter } from "events"
export const definition: AutomationStepSchema = {
name: "Create Row",
@ -74,7 +76,15 @@ export const definition: AutomationStepSchema = {
},
}
export async function run({ inputs, appId, emitter }: AutomationStepInput) {
export async function run({
inputs,
appId,
emitter,
}: {
inputs: CreateRowStepInputs
appId: string
emitter: EventEmitter
}): Promise<CreateRowStepOutputs> {
if (inputs.row == null || inputs.row.tableId == null) {
return {
success: false,
@ -93,7 +103,7 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
try {
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
inputs.row = await sendAutomationAttachmentsToStorage(
inputs.row.tableId,
inputs.row.tableId!,
inputs.row
)
await save(ctx)

View File

@ -2,9 +2,10 @@ import { wait } from "../../utilities"
import {
AutomationActionStepId,
AutomationIOType,
AutomationStepInput,
AutomationStepSchema,
AutomationStepType,
DelayStepInputs,
DelayStepOutputs,
} from "@budibase/types"
export const definition: AutomationStepSchema = {
@ -39,7 +40,11 @@ export const definition: AutomationStepSchema = {
type: AutomationStepType.LOGIC,
}
export async function run({ inputs }: AutomationStepInput) {
export async function run({
inputs,
}: {
inputs: DelayStepInputs
}): Promise<DelayStepOutputs> {
await wait(inputs.time)
return {
success: true,

View File

@ -1,14 +1,16 @@
import { EventEmitter } from "events"
import { destroy } from "../../api/controllers/row"
import { buildCtx } from "./utils"
import { getError } from "../automationUtils"
import {
AutomationActionStepId,
AutomationStepInput,
AutomationStepSchema,
AutomationStepType,
AutomationIOType,
AutomationCustomIOType,
AutomationFeature,
DeleteRowStepInputs,
DeleteRowStepOutputs,
} from "@budibase/types"
export const definition: AutomationStepSchema = {
@ -59,7 +61,15 @@ export const definition: AutomationStepSchema = {
},
}
export async function run({ inputs, appId, emitter }: AutomationStepInput) {
export async function run({
inputs,
appId,
emitter,
}: {
inputs: DeleteRowStepInputs
appId: string
emitter: EventEmitter
}): Promise<DeleteRowStepOutputs> {
if (inputs.id == null) {
return {
success: false,

View File

@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils"
import {
AutomationActionStepId,
AutomationStepSchema,
AutomationStepInput,
AutomationStepType,
AutomationIOType,
AutomationFeature,
ExternalAppStepOutputs,
DiscordStepInputs,
} from "@budibase/types"
const DEFAULT_USERNAME = "Budibase Automate"
@ -65,7 +66,11 @@ export const definition: AutomationStepSchema = {
},
}
export async function run({ inputs }: AutomationStepInput) {
export async function run({
inputs,
}: {
inputs: DiscordStepInputs
}): Promise<ExternalAppStepOutputs> {
let { url, username, avatar_url, content } = inputs
if (!username) {
username = DEFAULT_USERNAME

View File

@ -1,3 +1,4 @@
import { EventEmitter } from "events"
import * as queryController from "../../api/controllers/query"
import { buildCtx } from "./utils"
import * as automationUtils from "../automationUtils"
@ -6,9 +7,10 @@ import {
AutomationCustomIOType,
AutomationFeature,
AutomationIOType,
AutomationStepInput,
AutomationStepSchema,
AutomationStepType,
ExecuteQueryStepInputs,
ExecuteQueryStepOutputs,
} from "@budibase/types"
export const definition: AutomationStepSchema = {
@ -62,7 +64,15 @@ export const definition: AutomationStepSchema = {
},
}
export async function run({ inputs, appId, emitter }: AutomationStepInput) {
export async function run({
inputs,
appId,
emitter,
}: {
inputs: ExecuteQueryStepInputs
appId: string
emitter: EventEmitter
}): Promise<ExecuteQueryStepOutputs> {
if (inputs.query == null) {
return {
success: false,

View File

@ -6,10 +6,12 @@ import {
AutomationCustomIOType,
AutomationFeature,
AutomationIOType,
AutomationStepInput,
AutomationStepSchema,
AutomationStepType,
ExecuteScriptStepInputs,
ExecuteScriptStepOutputs,
} from "@budibase/types"
import { EventEmitter } from "events"
export const definition: AutomationStepSchema = {
name: "JS Scripting",
@ -55,7 +57,12 @@ export async function run({
appId,
context,
emitter,
}: AutomationStepInput) {
}: {
inputs: ExecuteScriptStepInputs
appId: string
context: object
emitter: EventEmitter
}): Promise<ExecuteScriptStepOutputs> {
if (inputs.code == null) {
return {
success: false,

View File

@ -1,9 +1,10 @@
import {
AutomationActionStepId,
AutomationStepSchema,
AutomationStepInput,
AutomationStepType,
AutomationIOType,
FilterStepInputs,
FilterStepOutputs,
} from "@budibase/types"
export const FilterConditions = {
@ -69,7 +70,11 @@ export const definition: AutomationStepSchema = {
},
}
export async function run({ inputs }: AutomationStepInput) {
export async function run({
inputs,
}: {
inputs: FilterStepInputs
}): Promise<FilterStepOutputs> {
try {
let { field, condition, value } = inputs
// coerce types so that we can use them

View File

@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils"
import {
AutomationActionStepId,
AutomationStepSchema,
AutomationStepInput,
AutomationStepType,
AutomationIOType,
AutomationFeature,
ExternalAppStepOutputs,
MakeIntegrationInputs,
} from "@budibase/types"
export const definition: AutomationStepSchema = {
@ -57,7 +58,11 @@ export const definition: AutomationStepSchema = {
},
}
export async function run({ inputs }: AutomationStepInput) {
export async function run({
inputs,
}: {
inputs: MakeIntegrationInputs
}): Promise<ExternalAppStepOutputs> {
const { url, body } = inputs
let payload = {}

View File

@ -3,11 +3,12 @@ import { getFetchResponse } from "./utils"
import {
AutomationActionStepId,
AutomationStepSchema,
AutomationStepInput,
AutomationStepType,
AutomationIOType,
AutomationFeature,
HttpMethod,
ExternalAppStepOutputs,
n8nStepInputs,
} from "@budibase/types"
export const definition: AutomationStepSchema = {
@ -67,7 +68,11 @@ export const definition: AutomationStepSchema = {
},
}
export async function run({ inputs }: AutomationStepInput) {
export async function run({
inputs,
}: {
inputs: n8nStepInputs
}): Promise<ExternalAppStepOutputs> {
const { url, body, method, authorization } = inputs
let payload = {}

View File

@ -3,9 +3,10 @@ import { OpenAI } from "openai"
import {
AutomationActionStepId,
AutomationStepSchema,
AutomationStepInput,
AutomationStepType,
AutomationIOType,
OpenAIStepInputs,
OpenAIStepOutputs,
} from "@budibase/types"
import { env } from "@budibase/backend-core"
import * as automationUtils from "../automationUtils"
@ -59,7 +60,11 @@ export const definition: AutomationStepSchema = {
},
}
export async function run({ inputs }: AutomationStepInput) {
export async function run({
inputs,
}: {
inputs: OpenAIStepInputs
}): Promise<OpenAIStepOutputs> {
if (!env.OPENAI_API_KEY) {
return {
success: false,

Some files were not shown because too many files have changed in this diff Show More