Merge branch 'master' of github.com:budibase/budibase into budi-8483-consolidate-feature-flags-into-a-single-endpoint

This commit is contained in:
Sam Rose 2024-08-05 17:11:23 +01:00
commit da74441c1c
No known key found for this signature in database
219 changed files with 6558 additions and 3142 deletions

View File

@ -108,7 +108,7 @@ jobs:
- name: Pull testcontainers images - name: Pull testcontainers images
run: | run: |
docker pull testcontainers/ryuk:0.5.1 & docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.2.1-sqs & docker pull budibase/couchdb:v3.3.3 &
docker pull redis & docker pull redis &
wait $(jobs -p) wait $(jobs -p)
@ -162,17 +162,24 @@ jobs:
node-version: 20.x node-version: 20.x
cache: yarn cache: yarn
- name: Load dotenv
id: dotenv
uses: falti/dotenv-action@v1.1.3
with:
path: ./packages/server/datasource-sha.env
- name: Pull testcontainers images - name: Pull testcontainers images
run: | run: |
docker pull mcr.microsoft.com/mssql/server:2022-CU13-ubuntu-22.04 & docker pull mcr.microsoft.com/mssql/server@${{ steps.dotenv.outputs.MSSQL_SHA }} &
docker pull mysql:8.3 & docker pull mysql@${{ steps.dotenv.outputs.MYSQL_SHA }} &
docker pull postgres:16.1-bullseye & docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }} &
docker pull mongo:7.0-jammy & docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }} &
docker pull mariadb:lts & docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }} &
docker pull testcontainers/ryuk:0.5.1 & docker pull budibase/oracle-database:23.2-slim-faststart &
docker pull budibase/couchdb:v3.2.1-sqs &
docker pull minio/minio & docker pull minio/minio &
docker pull redis & docker pull redis &
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.3.3 &
wait $(jobs -p) wait $(jobs -p)

View File

@ -46,7 +46,7 @@ export default async function setup() {
await killContainers(containers) await killContainers(containers)
try { try {
const couchdb = new GenericContainer("budibase/couchdb:v3.2.1-sqs") const couchdb = new GenericContainer("budibase/couchdb:v3.3.3")
.withExposedPorts(5984, 4984) .withExposedPorts(5984, 4984)
.withEnvironment({ .withEnvironment({
COUCHDB_PASSWORD: "budibase", COUCHDB_PASSWORD: "budibase",

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.29.24", "version": "2.29.29",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -10,7 +10,7 @@
"@types/proper-lockfile": "^4.1.4", "@types/proper-lockfile": "^4.1.4",
"@typescript-eslint/parser": "6.9.0", "@typescript-eslint/parser": "6.9.0",
"esbuild": "^0.18.17", "esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0", "esbuild-node-externals": "^1.14.0",
"eslint": "^8.52.0", "eslint": "^8.52.0",
"eslint-plugin-import": "^2.29.0", "eslint-plugin-import": "^2.29.0",
"eslint-plugin-jest": "^27.9.0", "eslint-plugin-jest": "^27.9.0",

@ -1 +1 @@
Subproject commit b03e584e465f620b49a1b688ff4afc973e6c0758 Subproject commit 32b8fa4643b4f0f74ee89760deffe431ab347ad9

View File

@ -56,24 +56,24 @@ class CouchDBError extends Error implements DBError {
constructor( constructor(
message: string, message: string,
info: { info: {
status: number | undefined status?: number
statusCode: number | undefined statusCode?: number
name: string name: string
errid: string errid?: string
description: string description?: string
reason: string reason?: string
error: string error?: string
} }
) { ) {
super(message) super(message)
const statusCode = info.status || info.statusCode || 500 const statusCode = info.status || info.statusCode || 500
this.status = statusCode this.status = statusCode
this.statusCode = statusCode this.statusCode = statusCode
this.reason = info.reason this.reason = info.reason || "Unknown"
this.name = info.name this.name = info.name
this.errid = info.errid this.errid = info.errid || "Unknown"
this.description = info.description this.description = info.description || "Unknown"
this.error = info.error this.error = info.error || "Not found"
} }
} }
@ -246,6 +246,35 @@ export class DatabaseImpl implements Database {
}) })
} }
async bulkRemove(documents: Document[], opts?: { silenceErrors?: boolean }) {
const response: Nano.DocumentBulkResponse[] = await this.performCall(db => {
return () =>
db.bulk({
docs: documents.map(doc => ({
...doc,
_deleted: true,
})),
})
})
if (opts?.silenceErrors) {
return
}
let errorFound = false
let errorMessage: string = "Unable to bulk remove documents: "
for (let res of response) {
if (res.error) {
errorFound = true
errorMessage += res.error
}
}
if (errorFound) {
throw new CouchDBError(errorMessage, {
name: this.name,
status: 400,
})
}
}
async post(document: AnyDocument, opts?: DatabasePutOpts) { async post(document: AnyDocument, opts?: DatabasePutOpts) {
if (!document._id) { if (!document._id) {
document._id = newid() document._id = newid()
@ -279,8 +308,12 @@ export class DatabaseImpl implements Database {
} }
async bulkDocs(documents: AnyDocument[]) { async bulkDocs(documents: AnyDocument[]) {
const now = new Date().toISOString()
return this.performCall(db => { return this.performCall(db => {
return () => db.bulk({ docs: documents }) return () =>
db.bulk({
docs: documents.map(d => ({ createdAt: now, ...d, updatedAt: now })),
})
}) })
} }

View File

@ -0,0 +1,118 @@
import tk from "timekeeper"
import { DatabaseImpl } from ".."
import { generator, structures } from "../../../../tests"
const initialTime = new Date()
tk.freeze(initialTime)
describe("DatabaseImpl", () => {
const db = new DatabaseImpl(structures.db.id())
beforeEach(() => {
tk.freeze(initialTime)
})
describe("put", () => {
it("persists createdAt and updatedAt fields", async () => {
const id = generator.guid()
await db.put({ _id: id })
expect(await db.get(id)).toEqual({
_id: id,
_rev: expect.any(String),
createdAt: initialTime.toISOString(),
updatedAt: initialTime.toISOString(),
})
})
it("updates updated at fields", async () => {
const id = generator.guid()
await db.put({ _id: id })
tk.travel(100)
await db.put({ ...(await db.get(id)), newValue: 123 })
expect(await db.get(id)).toEqual({
_id: id,
_rev: expect.any(String),
newValue: 123,
createdAt: initialTime.toISOString(),
updatedAt: new Date().toISOString(),
})
})
})
describe("bulkDocs", () => {
it("persists createdAt and updatedAt fields", async () => {
const ids = generator.unique(() => generator.guid(), 5)
await db.bulkDocs(ids.map(id => ({ _id: id })))
for (const id of ids) {
expect(await db.get(id)).toEqual({
_id: id,
_rev: expect.any(String),
createdAt: initialTime.toISOString(),
updatedAt: initialTime.toISOString(),
})
}
})
it("updates updated at fields", async () => {
const ids = generator.unique(() => generator.guid(), 5)
await db.bulkDocs(ids.map(id => ({ _id: id })))
tk.travel(100)
const docsToUpdate = await Promise.all(
ids.map(async id => ({ ...(await db.get(id)), newValue: 123 }))
)
await db.bulkDocs(docsToUpdate)
for (const id of ids) {
expect(await db.get(id)).toEqual({
_id: id,
_rev: expect.any(String),
newValue: 123,
createdAt: initialTime.toISOString(),
updatedAt: new Date().toISOString(),
})
}
})
it("keeps existing createdAt", async () => {
const ids = generator.unique(() => generator.guid(), 2)
await db.bulkDocs(ids.map(id => ({ _id: id })))
tk.travel(100)
const newDocs = generator
.unique(() => generator.guid(), 3)
.map(id => ({ _id: id }))
const docsToUpdate = await Promise.all(
ids.map(async id => ({ ...(await db.get(id)), newValue: 123 }))
)
await db.bulkDocs([...newDocs, ...docsToUpdate])
for (const { _id } of docsToUpdate) {
expect(await db.get(_id)).toEqual({
_id,
_rev: expect.any(String),
newValue: 123,
createdAt: initialTime.toISOString(),
updatedAt: new Date().toISOString(),
})
}
for (const { _id } of newDocs) {
expect(await db.get(_id)).toEqual({
_id,
_rev: expect.any(String),
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
})
}
})
})
})

View File

@ -71,6 +71,16 @@ export class DDInstrumentedDatabase implements Database {
}) })
} }
bulkRemove(
documents: Document[],
opts?: { silenceErrors?: boolean }
): Promise<void> {
return tracer.trace("db.bulkRemove", span => {
span?.addTags({ db_name: this.name, num_docs: documents.length })
return this.db.bulkRemove(documents, opts)
})
}
put( put(
document: AnyDocument, document: AnyDocument,
opts?: DatabasePutOpts | undefined opts?: DatabasePutOpts | undefined

View File

@ -199,9 +199,8 @@ export const createPlatformUserView = async () => {
export const queryPlatformView = async <T extends Document>( export const queryPlatformView = async <T extends Document>(
viewName: ViewName, viewName: ViewName,
params: DatabaseQueryOpts, params: DatabaseQueryOpts
opts?: QueryViewOptions ): Promise<T[]> => {
): Promise<T[] | T> => {
const CreateFuncByName: any = { const CreateFuncByName: any = {
[ViewName.ACCOUNT_BY_EMAIL]: createPlatformAccountEmailView, [ViewName.ACCOUNT_BY_EMAIL]: createPlatformAccountEmailView,
[ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView, [ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,
@ -209,7 +208,9 @@ export const queryPlatformView = async <T extends Document>(
return doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => { return doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {
const createFn = CreateFuncByName[viewName] const createFn = CreateFuncByName[viewName]
return queryView(viewName, params, db, createFn, opts) return queryView(viewName, params, db, createFn, {
arrayResponse: true,
}) as Promise<T[]>
}) })
} }

View File

@ -25,6 +25,11 @@ export async function getUserDoc(emailOrId: string): Promise<PlatformUser> {
return db.get(emailOrId) return db.get(emailOrId)
} }
export async function updateUserDoc(platformUser: PlatformUserById) {
const db = getPlatformDB()
await db.put(platformUser)
}
// CREATE // CREATE
function newUserIdDoc(id: string, tenantId: string): PlatformUserById { function newUserIdDoc(id: string, tenantId: string): PlatformUserById {
@ -113,15 +118,12 @@ export async function addUser(
export async function removeUser(user: User) { export async function removeUser(user: User) {
const db = getPlatformDB() const db = getPlatformDB()
const keys = [user._id!, user.email] const keys = [user._id!, user.email]
const userDocs = await db.allDocs({ const userDocs = await db.allDocs<User>({
keys, keys,
include_docs: true, include_docs: true,
}) })
const toDelete = userDocs.rows.map((row: any) => { await db.bulkRemove(
return { userDocs.rows.map(row => row.doc!),
...row.doc, { silenceErrors: true }
_deleted: true, )
}
})
await db.bulkDocs(toDelete)
} }

File diff suppressed because it is too large Load Diff

View File

@ -1,87 +0,0 @@
import { FieldType, Table, FieldSchema, SqlClient } from "@budibase/types"
import { Knex } from "knex"
export class SqlStatements {
client: string
table: Table
allOr: boolean | undefined
columnPrefix: string | undefined
constructor(
client: string,
table: Table,
{ allOr, columnPrefix }: { allOr?: boolean; columnPrefix?: string } = {}
) {
this.client = client
this.table = table
this.allOr = allOr
this.columnPrefix = columnPrefix
}
getField(key: string): FieldSchema | undefined {
const fieldName = key.split(".")[1]
let found = this.table.schema[fieldName]
if (!found && this.columnPrefix) {
const prefixRemovedFieldName = fieldName.replace(this.columnPrefix, "")
found = this.table.schema[prefixRemovedFieldName]
}
return found
}
between(
query: Knex.QueryBuilder,
key: string,
low: number | string,
high: number | string
) {
// Use a between operator if we have 2 valid range values
const field = this.getField(key)
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
[low, high]
)
} else {
const fnc = this.allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [low, high])
}
return query
}
lte(query: Knex.QueryBuilder, key: string, low: number | string) {
// Use just a single greater than operator if we only have a low
const field = this.getField(key)
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
low,
])
} else {
const fnc = this.allOr ? "orWhere" : "where"
query = query[fnc](key, ">=", low)
}
return query
}
gte(query: Knex.QueryBuilder, key: string, high: number | string) {
const field = this.getField(key)
// Use just a single less than operator if we only have a high
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
high,
])
} else {
const fnc = this.allOr ? "orWhere" : "where"
query = query[fnc](key, "<=", high)
}
return query
}
}

View File

@ -22,6 +22,7 @@ export function getNativeSql(
query: Knex.SchemaBuilder | Knex.QueryBuilder query: Knex.SchemaBuilder | Knex.QueryBuilder
): SqlQuery | SqlQuery[] { ): SqlQuery | SqlQuery[] {
let sql = query.toSQL() let sql = query.toSQL()
if (Array.isArray(sql)) { if (Array.isArray(sql)) {
return sql as SqlQuery[] return sql as SqlQuery[]
} }

View File

@ -18,6 +18,9 @@ import {
User, User,
UserStatus, UserStatus,
UserGroup, UserGroup,
PlatformUserBySsoId,
PlatformUserById,
AnyDocument,
} from "@budibase/types" } from "@budibase/types"
import { import {
getAccountHolderFromUserIds, getAccountHolderFromUserIds,
@ -25,7 +28,11 @@ import {
isCreator, isCreator,
validateUniqueUser, validateUniqueUser,
} from "./utils" } from "./utils"
import { searchExistingEmails } from "./lookup" import {
getFirstPlatformUser,
getPlatformUsers,
searchExistingEmails,
} from "./lookup"
import { hash } from "../utils" import { hash } from "../utils"
import { validatePassword } from "../security" import { validatePassword } from "../security"
@ -446,9 +453,32 @@ export class UserDB {
creator => !!creator creator => !!creator
).length ).length
const ssoUsersToDelete: AnyDocument[] = []
for (let user of usersToDelete) { for (let user of usersToDelete) {
const platformUser = (await getFirstPlatformUser(
user._id!
)) as PlatformUserById
const ssoId = platformUser.ssoId
if (ssoId) {
// Need to get the _rev of the SSO user doc to delete it. The view also returns docs that have the ssoId property, so we need to ignore those.
const ssoUsers = (await getPlatformUsers(
ssoId
)) as PlatformUserBySsoId[]
ssoUsers
.filter(user => user.ssoId == null)
.forEach(user => {
ssoUsersToDelete.push({
...user,
_deleted: true,
})
})
}
await bulkDeleteProcessing(user) await bulkDeleteProcessing(user)
} }
// Delete any associated SSO user docs
await platform.getPlatformDB().bulkDocs(ssoUsersToDelete)
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDeleteCount) await UserDB.quotas.removeUsers(toDelete.length, creatorsToDeleteCount)
// Build Response // Build Response

View File

@ -34,15 +34,22 @@ export async function searchExistingEmails(emails: string[]) {
} }
// lookup, could be email or userId, either will return a doc // lookup, could be email or userId, either will return a doc
export async function getPlatformUser( export async function getPlatformUsers(
identifier: string identifier: string
): Promise<PlatformUser | null> { ): Promise<PlatformUser[]> {
// use the view here and allow to find anyone regardless of casing // use the view here and allow to find anyone regardless of casing
// Use lowercase to ensure email login is case insensitive // Use lowercase to ensure email login is case insensitive
return (await dbUtils.queryPlatformView(ViewName.PLATFORM_USERS_LOWERCASE, { return await dbUtils.queryPlatformView(ViewName.PLATFORM_USERS_LOWERCASE, {
keys: [identifier.toLowerCase()], keys: [identifier.toLowerCase()],
include_docs: true, include_docs: true,
})) as PlatformUser })
}
export async function getFirstPlatformUser(
identifier: string
): Promise<PlatformUser | null> {
const platformUserDocs = await getPlatformUsers(identifier)
return platformUserDocs[0] ?? null
} }
export async function getExistingTenantUsers( export async function getExistingTenantUsers(
@ -74,15 +81,10 @@ export async function getExistingPlatformUsers(
keys: lcEmails, keys: lcEmails,
include_docs: true, include_docs: true,
} }
return await dbUtils.queryPlatformView(
const opts = {
arrayResponse: true,
}
return (await dbUtils.queryPlatformView(
ViewName.PLATFORM_USERS_LOWERCASE, ViewName.PLATFORM_USERS_LOWERCASE,
params, params
opts )
)) as PlatformUserByEmail[]
} }
export async function getExistingAccounts( export async function getExistingAccounts(
@ -93,14 +95,5 @@ export async function getExistingAccounts(
keys: lcEmails, keys: lcEmails,
include_docs: true, include_docs: true,
} }
return await dbUtils.queryPlatformView(ViewName.ACCOUNT_BY_EMAIL, params)
const opts = {
arrayResponse: true,
}
return (await dbUtils.queryPlatformView(
ViewName.ACCOUNT_BY_EMAIL,
params,
opts
)) as AccountMetadata[]
} }

View File

@ -1,7 +1,7 @@
import { CloudAccount, ContextUser, User, UserGroup } from "@budibase/types" import { CloudAccount, ContextUser, User, UserGroup } from "@budibase/types"
import * as accountSdk from "../accounts" import * as accountSdk from "../accounts"
import env from "../environment" import env from "../environment"
import { getPlatformUser } from "./lookup" import { getFirstPlatformUser } from "./lookup"
import { EmailUnavailableError } from "../errors" import { EmailUnavailableError } from "../errors"
import { getTenantId } from "../context" import { getTenantId } from "../context"
import { sdk } from "@budibase/shared-core" import { sdk } from "@budibase/shared-core"
@ -51,7 +51,7 @@ async function isCreatorByGroupMembership(user?: User | ContextUser) {
export async function validateUniqueUser(email: string, tenantId: string) { export async function validateUniqueUser(email: string, tenantId: string) {
// check budibase users in other tenants // check budibase users in other tenants
if (env.MULTI_TENANCY) { if (env.MULTI_TENANCY) {
const tenantUser = await getPlatformUser(email) const tenantUser = await getFirstPlatformUser(email)
if (tenantUser != null && tenantUser.tenantId !== tenantId) { if (tenantUser != null && tenantUser.tenantId !== tenantId) {
throw new EmailUnavailableError(email) throw new EmailUnavailableError(email)
} }

View File

@ -1,6 +1,6 @@
import { import {
CONSTANT_EXTERNAL_ROW_COLS, PROTECTED_EXTERNAL_COLUMNS,
CONSTANT_INTERNAL_ROW_COLS, PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core" } from "@budibase/shared-core"
export function expectFunctionWasCalledTimesWith( export function expectFunctionWasCalledTimesWith(
@ -14,7 +14,7 @@ export function expectFunctionWasCalledTimesWith(
} }
export const expectAnyInternalColsAttributes: { export const expectAnyInternalColsAttributes: {
[K in (typeof CONSTANT_INTERNAL_ROW_COLS)[number]]: any [K in (typeof PROTECTED_INTERNAL_COLUMNS)[number]]: any
} = { } = {
tableId: expect.anything(), tableId: expect.anything(),
type: expect.anything(), type: expect.anything(),
@ -25,7 +25,7 @@ export const expectAnyInternalColsAttributes: {
} }
export const expectAnyExternalColsAttributes: { export const expectAnyExternalColsAttributes: {
[K in (typeof CONSTANT_EXTERNAL_ROW_COLS)[number]]: any [K in (typeof PROTECTED_EXTERNAL_COLUMNS)[number]]: any
} = { } = {
tableId: expect.anything(), tableId: expect.anything(),
_id: expect.anything(), _id: expect.anything(),

View File

@ -36,9 +36,11 @@
<use xlink:href="#spectrum-icon-18-{icon}" /> <use xlink:href="#spectrum-icon-18-{icon}" />
</svg> </svg>
<div class="spectrum-InLineAlert-header">{header}</div> <div class="spectrum-InLineAlert-header">{header}</div>
{#each split as splitMsg} <slot>
<div class="spectrum-InLineAlert-content">{splitMsg}</div> {#each split as splitMsg}
{/each} <div class="spectrum-InLineAlert-content">{splitMsg}</div>
{/each}
</slot>
{#if onConfirm} {#if onConfirm}
<div class="spectrum-InLineAlert-footer button"> <div class="spectrum-InLineAlert-footer button">
<Button {cta} secondary={cta ? false : true} on:click={onConfirm} <Button {cta} secondary={cta ? false : true} on:click={onConfirm}

View File

@ -30,7 +30,7 @@
class:custom={!!color} class:custom={!!color}
class:square class:square
class:hoverable class:hoverable
style={`--color: ${color};`} style={`--color: ${color ?? "var(--spectrum-global-color-gray-400)"};`}
class:spectrum-StatusLight--celery={celery} class:spectrum-StatusLight--celery={celery}
class:spectrum-StatusLight--yellow={yellow} class:spectrum-StatusLight--yellow={yellow}
class:spectrum-StatusLight--fuchsia={fuchsia} class:spectrum-StatusLight--fuchsia={fuchsia}
@ -61,13 +61,17 @@
min-height: 0; min-height: 0;
padding-top: 0; padding-top: 0;
padding-bottom: 0; padding-bottom: 0;
transition: color ease-out 130ms;
} }
.spectrum-StatusLight.withText::before { .spectrum-StatusLight.withText::before {
margin-right: 10px; margin-right: 10px;
} }
.spectrum-StatusLight::before {
transition: background-color ease-out 160ms;
}
.custom::before { .custom::before {
background: var(--color) !important; background-color: var(--color) !important;
} }
.square::before { .square::before {
width: 14px; width: 14px;
@ -79,4 +83,14 @@
cursor: pointer; cursor: pointer;
color: var(--spectrum-global-color-gray-900); color: var(--spectrum-global-color-gray-900);
} }
.spectrum-StatusLight--sizeXS::before {
width: 10px;
height: 10px;
border-radius: 2px;
}
.spectrum-StatusLight--disabled::before {
background-color: var(--spectrum-global-color-gray-400) !important;
}
</style> </style>

View File

@ -54,6 +54,7 @@
</div> </div>
<div class="controls"> <div class="controls">
<div <div
class:disabled={!$selectedAutomation?.definition?.trigger}
on:click={() => { on:click={() => {
testDataModal.show() testDataModal.show()
}} }}
@ -80,6 +81,7 @@
automation._id, automation._id,
automation.disabled automation.disabled
)} )}
disabled={!$selectedAutomation?.definition?.trigger}
value={!automation.disabled} value={!automation.disabled}
/> />
</div> </div>

View File

@ -3,6 +3,7 @@
automationStore, automationStore,
selectedAutomation, selectedAutomation,
permissions, permissions,
selectedAutomationDisplayData,
} from "stores/builder" } from "stores/builder"
import { import {
Icon, Icon,
@ -14,6 +15,7 @@
notifications, notifications,
Label, Label,
AbsTooltip, AbsTooltip,
InlineAlert,
} from "@budibase/bbui" } from "@budibase/bbui"
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte" import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte" import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
@ -49,6 +51,8 @@
$: isAppAction && setPermissions(role) $: isAppAction && setPermissions(role)
$: isAppAction && getPermissions(automationId) $: isAppAction && getPermissions(automationId)
$: triggerInfo = $selectedAutomationDisplayData?.triggerInfo
async function setPermissions(role) { async function setPermissions(role) {
if (!role || !automationId) { if (!role || !automationId) {
return return
@ -183,6 +187,12 @@
{block} {block}
{webhookModal} {webhookModal}
/> />
{#if isTrigger && triggerInfo}
<InlineAlert
header={triggerInfo.type}
message={`This trigger is tied to the row action ${triggerInfo.rowAction.name} on your ${triggerInfo.table.name} table`}
/>
{/if}
{#if lastStep} {#if lastStep}
<Button on:click={() => testDataModal.show()} cta> <Button on:click={() => testDataModal.show()} cta>
Finish and test automation Finish and test automation

View File

@ -81,7 +81,7 @@
// Check the schema to see if required fields have been entered // Check the schema to see if required fields have been entered
$: isError = $: isError =
!isTriggerValid(trigger) || !isTriggerValid(trigger) ||
!trigger.schema.outputs.required?.every( !(trigger.schema.outputs.required || []).every(
required => $memoTestData?.[required] || required !== "row" required => $memoTestData?.[required] || required !== "row"
) )

View File

@ -6,6 +6,7 @@
contextMenuStore, contextMenuStore,
} from "stores/builder" } from "stores/builder"
import { notifications, Icon } from "@budibase/bbui" import { notifications, Icon } from "@budibase/bbui"
import { sdk } from "@budibase/shared-core"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import UpdateAutomationModal from "components/automation/AutomationPanel/UpdateAutomationModal.svelte" import UpdateAutomationModal from "components/automation/AutomationPanel/UpdateAutomationModal.svelte"
import NavItem from "components/common/NavItem.svelte" import NavItem from "components/common/NavItem.svelte"
@ -35,45 +36,55 @@
} }
const getContextMenuItems = () => { const getContextMenuItems = () => {
return [ const isRowAction = sdk.automations.isRowAction(automation)
{ const result = []
icon: "Delete", if (!isRowAction) {
name: "Delete", result.push(
keyBind: null, ...[
visible: true, {
disabled: false, icon: "Delete",
callback: confirmDeleteDialog.show, name: "Delete",
keyBind: null,
visible: true,
disabled: false,
callback: confirmDeleteDialog.show,
},
{
icon: "Edit",
name: "Edit",
keyBind: null,
visible: true,
disabled: !automation.definition.trigger,
callback: updateAutomationDialog.show,
},
{
icon: "Duplicate",
name: "Duplicate",
keyBind: null,
visible: true,
disabled:
!automation.definition.trigger ||
automation.definition.trigger?.name === "Webhook",
callback: duplicateAutomation,
},
]
)
}
result.push({
icon: automation.disabled ? "CheckmarkCircle" : "Cancel",
name: automation.disabled ? "Activate" : "Pause",
keyBind: null,
visible: true,
disabled: !automation.definition.trigger,
callback: () => {
automationStore.actions.toggleDisabled(
automation._id,
automation.disabled
)
}, },
{ })
icon: "Edit", return result
name: "Edit",
keyBind: null,
visible: true,
disabled: false,
callback: updateAutomationDialog.show,
},
{
icon: "Duplicate",
name: "Duplicate",
keyBind: null,
visible: true,
disabled: automation.definition.trigger.name === "Webhook",
callback: duplicateAutomation,
},
{
icon: automation.disabled ? "CheckmarkCircle" : "Cancel",
name: automation.disabled ? "Activate" : "Pause",
keyBind: null,
visible: true,
disabled: false,
callback: () => {
automationStore.actions.toggleDisabled(
automation._id,
automation.disabled
)
},
},
]
} }
const openContextMenu = e => { const openContextMenu = e => {
@ -89,7 +100,7 @@
on:contextmenu={openContextMenu} on:contextmenu={openContextMenu}
{icon} {icon}
iconColor={"var(--spectrum-global-color-gray-900)"} iconColor={"var(--spectrum-global-color-gray-900)"}
text={automation.name} text={automation.displayName}
selected={automation._id === $selectedAutomation?._id} selected={automation._id === $selectedAutomation?._id}
hovering={automation._id === $contextMenuStore.id} hovering={automation._id === $contextMenuStore.id}
on:click={() => automationStore.actions.select(automation._id)} on:click={() => automationStore.actions.select(automation._id)}

View File

@ -17,19 +17,26 @@
automation.name.toLowerCase().includes(searchString.toLowerCase()) automation.name.toLowerCase().includes(searchString.toLowerCase())
) )
}) })
.map(automation => ({
...automation,
displayName:
$automationStore.automationDisplayData[automation._id]?.displayName ||
automation.name,
}))
.sort((a, b) => { .sort((a, b) => {
const lowerA = a.name.toLowerCase() const lowerA = a.displayName.toLowerCase()
const lowerB = b.name.toLowerCase() const lowerB = b.displayName.toLowerCase()
return lowerA > lowerB ? 1 : -1 return lowerA > lowerB ? 1 : -1
}) })
$: groupedAutomations = filteredAutomations.reduce((acc, auto) => { $: groupedAutomations = filteredAutomations.reduce((acc, auto) => {
acc[auto.definition.trigger.event] ??= { const catName = auto.definition?.trigger?.event || "No Trigger"
icon: auto.definition.trigger.icon, acc[catName] ??= {
name: (auto.definition.trigger?.name || "").toUpperCase(), icon: auto.definition?.trigger?.icon || "AlertCircle",
name: (auto.definition?.trigger?.name || "No Trigger").toUpperCase(),
entries: [], entries: [],
} }
acc[auto.definition.trigger.event].entries.push(auto) acc[catName].entries.push(auto)
return acc return acc
}, {}) }, {})

View File

@ -21,7 +21,9 @@
$: nameError = $: nameError =
nameTouched && !name ? "Please specify a name for the automation." : null nameTouched && !name ? "Please specify a name for the automation." : null
$: triggers = Object.entries($automationStore.blockDefinitions.TRIGGER) $: triggers = Object.entries(
$automationStore.blockDefinitions.CREATABLE_TRIGGER
)
async function createAutomation() { async function createAutomation() {
try { try {

View File

@ -58,6 +58,7 @@
AutomationEventType, AutomationEventType,
AutomationStepType, AutomationStepType,
AutomationActionStepId, AutomationActionStepId,
AutomationCustomIOType,
} from "@budibase/types" } from "@budibase/types"
import { FIELDS } from "constants/backend" import { FIELDS } from "constants/backend"
import PropField from "./PropField.svelte" import PropField from "./PropField.svelte"
@ -394,7 +395,9 @@
*/ */
const onRowTriggerUpdate = async update => { const onRowTriggerUpdate = async update => {
if ( if (
["tableId", "filters", "meta"].some(key => Object.hasOwn(update, key)) ["tableId", AutomationCustomIOType.FILTERS, "meta"].some(key =>
Object.hasOwn(update, key)
)
) { ) {
try { try {
let updatedAutomation let updatedAutomation
@ -744,7 +747,11 @@
for (let [key, field] of properties) { for (let [key, field] of properties) {
// need to look for the builder definition (keyed separately, see saveFilters) // need to look for the builder definition (keyed separately, see saveFilters)
const defKey = `${key}-def` const defKey = `${key}-def`
if (field.customType === "filters" && inputs?.[defKey]) { if (
(field.customType === AutomationCustomIOType.FILTERS ||
field.customType === AutomationCustomIOType.TRIGGER_FILTER) &&
inputs?.[defKey]
) {
filters = inputs[defKey] filters = inputs[defKey]
break break
} }
@ -846,7 +853,7 @@
<Label> <Label>
{label} {label}
</Label> </Label>
{#if value.customType === "trigger_filter"} {#if value.customType === AutomationCustomIOType.TRIGGER_FILTER}
<Icon <Icon
hoverable hoverable
on:click={() => on:click={() =>
@ -869,6 +876,7 @@
options={value.enum} options={value.enum}
getOptionLabel={(x, idx) => getOptionLabel={(x, idx) =>
value.pretty ? value.pretty[idx] : x} value.pretty ? value.pretty[idx] : x}
disabled={value.readonly}
/> />
{:else if value.type === "json"} {:else if value.type === "json"}
<Editor <Editor
@ -877,6 +885,7 @@
mode="json" mode="json"
value={inputData[key]?.value} value={inputData[key]?.value}
on:change={e => onChange({ [key]: e.detail })} on:change={e => onChange({ [key]: e.detail })}
readOnly={value.readonly}
/> />
{:else if value.type === "boolean"} {:else if value.type === "boolean"}
<div style="margin-top: 10px"> <div style="margin-top: 10px">
@ -884,6 +893,7 @@
text={value.title} text={value.title}
value={inputData[key]} value={inputData[key]}
on:change={e => onChange({ [key]: e.detail })} on:change={e => onChange({ [key]: e.detail })}
disabled={value.readonly}
/> />
</div> </div>
{:else if value.type === "date"} {:else if value.type === "date"}
@ -897,6 +907,7 @@
allowJS={true} allowJS={true}
updateOnChange={false} updateOnChange={false}
drawerLeft="260px" drawerLeft="260px"
disabled={value.readonly}
> >
<DatePicker <DatePicker
value={inputData[key]} value={inputData[key]}
@ -908,6 +919,7 @@
on:change={e => onChange({ [key]: e.detail })} on:change={e => onChange({ [key]: e.detail })}
value={inputData[key]} value={inputData[key]}
options={Object.keys(table?.schema || {})} options={Object.keys(table?.schema || {})}
disabled={value.readonly}
/> />
{:else if value.type === "attachment" || value.type === "signature_single"} {:else if value.type === "attachment" || value.type === "signature_single"}
<div class="attachment-field-wrapper"> <div class="attachment-field-wrapper">
@ -977,7 +989,7 @@
{/if} {/if}
</div> </div>
</div> </div>
{:else if value.customType === "filters" || value.customType === "trigger_filter"} {:else if value.customType === AutomationCustomIOType.FILTERS || value.customType === AutomationCustomIOType.TRIGGER_FILTER}
<ActionButton fullWidth on:click={drawer.show} <ActionButton fullWidth on:click={drawer.show}
>{filters.length > 0 >{filters.length > 0
? "Update Filter" ? "Update Filter"
@ -1021,6 +1033,7 @@
{isTrigger} {isTrigger}
value={inputData[key]} value={inputData[key]}
on:change={e => onChange({ [key]: e.detail })} on:change={e => onChange({ [key]: e.detail })}
disabled={value.readonly}
/> />
{:else if value.customType === "webhookUrl"} {:else if value.customType === "webhookUrl"}
<WebhookDisplay value={inputData[key]} /> <WebhookDisplay value={inputData[key]} />

View File

@ -13,7 +13,7 @@
const { datasource } = getContext("grid") const { datasource } = getContext("grid")
$: triggers = $automationStore.blockDefinitions.TRIGGER $: triggers = $automationStore.blockDefinitions.CREATABLE_TRIGGER
$: table = $tables.list.find(table => table._id === $datasource.tableId) $: table = $tables.list.find(table => table._id === $datasource.tableId)

View File

@ -17,8 +17,8 @@
SWITCHABLE_TYPES, SWITCHABLE_TYPES,
ValidColumnNameRegex, ValidColumnNameRegex,
helpers, helpers,
CONSTANT_INTERNAL_ROW_COLS, PROTECTED_INTERNAL_COLUMNS,
CONSTANT_EXTERNAL_ROW_COLS, PROTECTED_EXTERNAL_COLUMNS,
} from "@budibase/shared-core" } from "@budibase/shared-core"
import { createEventDispatcher, getContext, onMount } from "svelte" import { createEventDispatcher, getContext, onMount } from "svelte"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
@ -489,8 +489,8 @@
} }
const newError = {} const newError = {}
const prohibited = externalTable const prohibited = externalTable
? CONSTANT_EXTERNAL_ROW_COLS ? PROTECTED_EXTERNAL_COLUMNS
: CONSTANT_INTERNAL_ROW_COLS : PROTECTED_INTERNAL_COLUMNS
if (!externalTable && fieldInfo.name?.startsWith("_")) { if (!externalTable && fieldInfo.name?.startsWith("_")) {
newError.name = `Column name cannot start with an underscore.` newError.name = `Column name cannot start with an underscore.`
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) { } else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {

View File

@ -33,6 +33,5 @@
title="Confirm Deletion" title="Confirm Deletion"
> >
Are you sure you wish to delete the datasource Are you sure you wish to delete the datasource
<i>{datasource.name}?</i> <i>{datasource.name}</i>? This action cannot be undone.
This action cannot be undone.
</ConfirmDialog> </ConfirmDialog>

View File

@ -100,51 +100,43 @@
async function handleFile(e) { async function handleFile(e) {
loading = true loading = true
error = null error = null
const previousValidation = validation
validation = {} validation = {}
try { try {
const response = await parseFile(e) const response = await parseFile(e)
rows = response.rows rows = response.rows
fileName = response.fileName fileName = response.fileName
const newValidateHash = JSON.stringify(rows)
if (newValidateHash === validateHash) {
validation = previousValidation
} else {
await validate(rows)
validateHash = newValidateHash
}
} catch (e) { } catch (e) {
error = e.message || e
} finally {
loading = false loading = false
error = e
} }
} }
async function validate(rows) { async function validate(rows) {
loading = true
error = null error = null
validation = {} validation = {}
allValid = false allValid = false
try { if (rows.length > 0) {
if (rows.length > 0) { const response = await API.validateExistingTableImport({
const response = await API.validateExistingTableImport({ rows,
rows, tableId,
tableId, })
})
validation = response.schemaValidation validation = response.schemaValidation
invalidColumns = response.invalidColumns invalidColumns = response.invalidColumns
allValid = response.allValid allValid = response.allValid
}
} catch (e) {
error = e.message
} }
loading = false
}
$: {
// binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows)
if (newValidateHash !== validateHash) {
validate(rows)
}
validateHash = newValidateHash
} }
</script> </script>

View File

@ -1,9 +1,9 @@
<script> <script>
import { Select, Icon } from "@budibase/bbui" import { Select, Icon } from "@budibase/bbui"
import { FIELDS } from "constants/backend" import { FIELDS } from "constants/backend"
import { canBeDisplayColumn, utils } from "@budibase/shared-core"
import { API } from "api" import { API } from "api"
import { parseFile } from "./utils" import { parseFile } from "./utils"
import { canBeDisplayColumn } from "@budibase/shared-core"
export let rows = [] export let rows = []
export let schema = {} export let schema = {}
@ -97,6 +97,8 @@
let errors = {} let errors = {}
let selectedColumnTypes = {} let selectedColumnTypes = {}
let rawRows = []
$: displayColumnOptions = Object.keys(schema || {}).filter(column => { $: displayColumnOptions = Object.keys(schema || {}).filter(column => {
return validation[column] && canBeDisplayColumn(schema[column].type) return validation[column] && canBeDisplayColumn(schema[column].type)
}) })
@ -106,6 +108,8 @@
} }
$: { $: {
rows = rawRows.map(row => utils.trimOtherProps(row, Object.keys(schema)))
// binding in consumer is causing double renders here // binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema) const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
if (newValidateHash !== validateHash) { if (newValidateHash !== validateHash) {
@ -122,7 +126,7 @@
try { try {
const response = await parseFile(e) const response = await parseFile(e)
rows = response.rows rawRows = response.rows
schema = response.schema schema = response.schema
fileName = response.fileName fileName = response.fileName
selectedColumnTypes = Object.entries(response.schema).reduce( selectedColumnTypes = Object.entries(response.schema).reduce(
@ -188,7 +192,7 @@
type="file" type="file"
on:change={handleFile} on:change={handleFile}
/> />
<label for="file-upload" class:uploaded={rows.length > 0}> <label for="file-upload" class:uploaded={rawRows.length > 0}>
{#if error} {#if error}
Error: {error} Error: {error}
{:else if fileName} {:else if fileName}
@ -198,7 +202,7 @@
{/if} {/if}
</label> </label>
</div> </div>
{#if rows.length > 0 && !error} {#if rawRows.length > 0 && !error}
<div class="schema-fields"> <div class="schema-fields">
{#each Object.entries(schema) as [name, column]} {#each Object.entries(schema) as [name, column]}
<div class="field"> <div class="field">

View File

@ -1,7 +1,7 @@
<script> <script>
import { goto, params } from "@roxi/routify" import { goto, params } from "@roxi/routify"
import { tables, datasources, screenStore } from "stores/builder" import { appStore, tables, datasources, screenStore } from "stores/builder"
import { Input, notifications } from "@budibase/bbui" import { InlineAlert, Link, Input, notifications } from "@budibase/bbui"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { DB_TYPE_EXTERNAL } from "constants/backend" import { DB_TYPE_EXTERNAL } from "constants/backend"
@ -9,28 +9,41 @@
let confirmDeleteDialog let confirmDeleteDialog
export const show = () => { let screensPossiblyAffected = []
templateScreens = $screenStore.screens.filter( let viewsMessage = ""
screen => screen.autoTableId === table._id let deleteTableName
)
willBeDeleted = ["All table data"].concat( const getViewsMessage = () => {
templateScreens.map(screen => `Screen ${screen.routing?.route || ""}`) const views = Object.values(table?.views ?? [])
) if (views.length < 1) {
confirmDeleteDialog.show() return ""
}
if (views.length === 1) {
return ", including 1 view"
}
return `, including ${views.length} views`
} }
let templateScreens export const show = () => {
let willBeDeleted viewsMessage = getViewsMessage()
let deleteTableName screensPossiblyAffected = $screenStore.screens
.filter(
screen => screen.autoTableId === table._id && screen.routing?.route
)
.map(screen => ({
text: screen.routing.route,
url: `/builder/app/${$appStore.appId}/design/${screen._id}`,
}))
confirmDeleteDialog.show()
}
async function deleteTable() { async function deleteTable() {
const isSelected = $params.tableId === table._id const isSelected = $params.tableId === table._id
try { try {
await tables.delete(table) await tables.delete(table)
// Screens need deleted one at a time because of undo/redo
for (let screen of templateScreens) {
await screenStore.delete(screen)
}
if (table.sourceType === DB_TYPE_EXTERNAL) { if (table.sourceType === DB_TYPE_EXTERNAL) {
await datasources.fetch() await datasources.fetch()
} }
@ -46,6 +59,10 @@
function hideDeleteDialog() { function hideDeleteDialog() {
deleteTableName = "" deleteTableName = ""
} }
const autofillTableName = () => {
deleteTableName = table.name
}
</script> </script>
<ConfirmDialog <ConfirmDialog
@ -56,34 +73,103 @@
title="Confirm Deletion" title="Confirm Deletion"
disabled={deleteTableName !== table.name} disabled={deleteTableName !== table.name}
> >
<p> <div class="content">
Are you sure you wish to delete the table <p class="firstWarning">
<b>{table.name}?</b> Are you sure you wish to delete the table
The following will also be deleted: <span class="tableNameLine">
</p> <!-- svelte-ignore a11y-click-events-have-key-events -->
<b> <!-- svelte-ignore a11y-no-static-element-interactions -->
<div class="delete-items"> <b on:click={autofillTableName} class="tableName">{table.name}</b>
{#each willBeDeleted as item} <span>?</span>
<div>{item}</div> </span>
{/each} </p>
</div>
</b> <p class="secondWarning">All table data will be deleted{viewsMessage}.</p>
<p> <p class="thirdWarning">This action <b>cannot be undone</b>.</p>
This action cannot be undone - to continue please enter the table name below
to confirm. {#if screensPossiblyAffected.length > 0}
</p> <div class="affectedScreens">
<Input bind:value={deleteTableName} placeholder={table.name} /> <InlineAlert
header="The following screens were originally generated from this table and may no longer function as expected"
>
<ul class="affectedScreensList">
{#each screensPossiblyAffected as item}
<li>
<Link quiet overBackground target="_blank" href={item.url}
>{item.text}</Link
>
</li>
{/each}
</ul>
</InlineAlert>
</div>
{/if}
<p class="fourthWarning">Please enter the app name below to confirm.</p>
<Input bind:value={deleteTableName} placeholder={table.name} />
</div>
</ConfirmDialog> </ConfirmDialog>
<style> <style>
div.delete-items { .content {
margin-top: 10px; margin-top: 0;
margin-bottom: 10px; max-width: 320px;
margin-left: 10px;
} }
div.delete-items div { .firstWarning {
margin: 0 0 12px;
max-width: 100%;
}
.tableNameLine {
display: inline-flex;
max-width: 100%;
vertical-align: bottom;
}
.tableName {
flex-grow: 1;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
cursor: pointer;
}
.secondWarning {
margin: 0;
max-width: 100%;
}
.thirdWarning {
margin: 0 0 12px;
max-width: 100%;
}
.affectedScreens {
margin: 18px 0;
max-width: 100%;
margin-bottom: 24px;
}
.affectedScreens :global(.spectrum-InLineAlert) {
max-width: 100%;
}
.affectedScreensList {
padding: 0;
margin-bottom: 0;
}
.affectedScreensList li {
display: block;
max-width: 100%;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
margin-top: 4px; margin-top: 4px;
font-weight: 600; }
.fourthWarning {
margin: 12px 0 6px;
max-width: 100%;
} }
</style> </style>

View File

@ -78,7 +78,7 @@
await datasources.fetch() await datasources.fetch()
await afterSave(table) await afterSave(table)
} catch (e) { } catch (e) {
notifications.error(e) notifications.error(e.message || e)
// reload in case the table was created // reload in case the table was created
await tables.fetch() await tables.fetch()
} }

View File

@ -0,0 +1,12 @@
<script>
import { RoleUtils } from "@budibase/frontend-core"
import { StatusLight } from "@budibase/bbui"
export let id
export let size = "M"
export let disabled = false
$: color = RoleUtils.getRoleColour(id)
</script>
<StatusLight square {disabled} {size} {color} />

View File

@ -1,20 +1,32 @@
<script> <script>
import { Layout, Input } from "@budibase/bbui" import { FancyForm, FancyInput } from "@budibase/bbui"
import { createValidationStore, requiredValidator } from "helpers/validation" import { createValidationStore, requiredValidator } from "helpers/validation"
export let password export let password
export let passwordForm
export let error export let error
const validatePassword = value => {
if (!value || value.length < 12) {
return "Please enter at least 12 characters. We recommend using machine generated or random passwords."
}
return null
}
const [firstPassword, passwordError, firstTouched] = createValidationStore( const [firstPassword, passwordError, firstTouched] = createValidationStore(
"", "",
requiredValidator requiredValidator
) )
const [repeatPassword, _, repeatTouched] = createValidationStore( const [repeatPassword, _, repeatTouched] = createValidationStore(
"", "",
requiredValidator requiredValidator,
validatePassword
) )
$: password = $firstPassword $: password = $firstPassword
$: firstPasswordError =
($firstTouched && $passwordError) ||
($repeatTouched && validatePassword(password))
$: error = $: error =
!$firstPassword || !$firstPassword ||
!$firstTouched || !$firstTouched ||
@ -22,19 +34,19 @@
$firstPassword !== $repeatPassword $firstPassword !== $repeatPassword
</script> </script>
<Layout gap="XS" noPadding> <FancyForm bind:this={passwordForm}>
<Input <FancyInput
label="Password" label="Password"
type="password" type="password"
error={$firstTouched && $passwordError} error={firstPasswordError}
bind:value={$firstPassword} bind:value={$firstPassword}
/> />
<Input <FancyInput
label="Repeat Password" label="Repeat password"
type="password" type="password"
error={$repeatTouched && error={$repeatTouched &&
$firstPassword !== $repeatPassword && $firstPassword !== $repeatPassword &&
"Passwords must match"} "Passwords must match"}
bind:value={$repeatPassword} bind:value={$repeatPassword}
/> />
</Layout> </FancyForm>

View File

@ -115,6 +115,7 @@
}) })
$: fields = bindings $: fields = bindings
.filter(x => arrayTypes.includes(x.fieldSchema?.type)) .filter(x => arrayTypes.includes(x.fieldSchema?.type))
.filter(x => x.fieldSchema?.tableId != null)
.map(binding => { .map(binding => {
const { providerId, readableBinding, runtimeBinding } = binding const { providerId, readableBinding, runtimeBinding } = binding
const { name, type, tableId } = binding.fieldSchema const { name, type, tableId } = binding.fieldSchema

View File

@ -9,7 +9,10 @@ import { Constants } from "@budibase/frontend-core"
const { TypeIconMap } = Constants const { TypeIconMap } = Constants
export { RelationshipType } from "@budibase/types" export {
RelationshipType,
RowExportFormat as ROW_EXPORT_FORMATS,
} from "@budibase/types"
export const AUTO_COLUMN_SUB_TYPES = AutoFieldSubType export const AUTO_COLUMN_SUB_TYPES = AutoFieldSubType
@ -307,9 +310,3 @@ export const DatasourceTypes = {
GRAPH: "Graph", GRAPH: "Graph",
API: "API", API: "API",
} }
export const ROW_EXPORT_FORMATS = {
CSV: "csv",
JSON: "json",
JSON_WITH_SCHEMA: "jsonWithSchema",
}

View File

@ -1,108 +1,88 @@
<script> <script>
import ScreenDetailsModal from "components/design/ScreenDetailsModal.svelte" import ScreenDetailsModal from "components/design/ScreenDetailsModal.svelte"
import DatasourceModal from "./DatasourceModal.svelte" import DatasourceModal from "./DatasourceModal.svelte"
import ScreenRoleModal from "./ScreenRoleModal.svelte"
import sanitizeUrl from "helpers/sanitizeUrl" import sanitizeUrl from "helpers/sanitizeUrl"
import FormTypeModal from "./FormTypeModal.svelte" import FormTypeModal from "./FormTypeModal.svelte"
import { Modal, notifications } from "@budibase/bbui" import { Modal, notifications } from "@budibase/bbui"
import { import {
screenStore, screenStore,
navigationStore, navigationStore,
tables, permissions as permissionsStore,
builderStore, builderStore,
} from "stores/builder" } from "stores/builder"
import { auth } from "stores/portal" import { auth } from "stores/portal"
import { get } from "svelte/store" import { get } from "svelte/store"
import getTemplates from "templates"
import { Roles } from "constants/backend"
import { capitalise } from "helpers" import { capitalise } from "helpers"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { TOUR_KEYS } from "components/portal/onboarding/tours.js" import { TOUR_KEYS } from "components/portal/onboarding/tours.js"
import blankScreen from "templates/blankScreen"
import formScreen from "templates/formScreen" import formScreen from "templates/formScreen"
import gridListScreen from "templates/gridListScreen" import gridScreen from "templates/gridScreen"
import gridDetailsScreen from "templates/gridDetailsScreen" import gridDetailsScreen from "templates/gridDetailsScreen"
import { Roles } from "constants/backend"
let mode let mode
let pendingScreen
// Modal refs
let screenDetailsModal let screenDetailsModal
let datasourceModal let datasourceModal
let screenAccessRoleModal
let formTypeModal let formTypeModal
// Cache variables for workflow let selectedTablesAndViews = []
let screenAccessRole = Roles.BASIC let permissions = {}
let templates = null export const show = newMode => {
let screens = null mode = newMode
selectedTablesAndViews = []
permissions = {}
let selectedDatasources = null if (mode === "grid" || mode === "gridDetails" || mode === "form") {
let blankScreenUrl = null datasourceModal.show()
let screenMode = null } else if (mode === "blank") {
let formType = null screenDetailsModal.show()
} else {
// Creates an array of screens, checking and sanitising their URLs throw new Error("Invalid mode provided")
const createScreens = async ({ screens, screenAccessRole }) => {
if (!screens?.length) {
return
} }
}
const createScreen = async screen => {
try { try {
let createdScreens = [] // Check we aren't clashing with an existing URL
if (hasExistingUrl(screen.routing.route, screen.routing.roleId)) {
for (let screen of screens) { let suffix = 2
// Check we aren't clashing with an existing URL let candidateUrl = makeCandidateUrl(screen, suffix)
if (hasExistingUrl(screen.routing.route)) { while (hasExistingUrl(candidateUrl, screen.routing.roleId)) {
let suffix = 2 candidateUrl = makeCandidateUrl(screen, ++suffix)
let candidateUrl = makeCandidateUrl(screen, suffix)
while (hasExistingUrl(candidateUrl)) {
candidateUrl = makeCandidateUrl(screen, ++suffix)
}
screen.routing.route = candidateUrl
} }
screen.routing.route = candidateUrl
// Sanitise URL
screen.routing.route = sanitizeUrl(screen.routing.route)
// Use the currently selected role
if (!screenAccessRole) {
return
}
screen.routing.roleId = screenAccessRole
// Create the screen
const response = await screenStore.save(screen)
createdScreens.push(response)
// Add link in layout. We only ever actually create 1 screen now, even
// for autoscreens, so it's always safe to do this.
await navigationStore.saveLink(
screen.routing.route,
capitalise(screen.routing.route.split("/")[1]),
screenAccessRole
)
} }
return createdScreens screen.routing.route = sanitizeUrl(screen.routing.route)
return await screenStore.save(screen)
} catch (error) { } catch (error) {
console.error(error) console.error(error)
notifications.error("Error creating screens") notifications.error("Error creating screens")
} }
} }
const addNavigationLink = async screen =>
await navigationStore.saveLink(
screen.routing.route,
capitalise(screen.routing.route.split("/")[1]),
screen.routing.roleId
)
// Checks if any screens exist in the store with the given route and // Checks if any screens exist in the store with the given route and
// currently selected role // currently selected role
const hasExistingUrl = url => { const hasExistingUrl = (url, screenAccessRole) => {
const roleId = screenAccessRole
const screens = get(screenStore).screens.filter( const screens = get(screenStore).screens.filter(
s => s.routing.roleId === roleId s => s.routing.roleId === screenAccessRole
) )
return !!screens.find(s => s.routing?.route === url) return !!screens.find(s => s.routing?.route === url)
} }
// Constructs a candidate URL for a new screen, suffixing the base of the // Constructs a candidate URL for a new screen, appending a given suffix to the
// screen's URL with a given suffix. // screen's URL
// e.g. "/sales/:id" => "/sales-1/:id" // e.g. "/sales/:id" => "/sales-1/:id"
const makeCandidateUrl = (screen, suffix) => { const makeCandidateUrl = (screen, suffix) => {
let url = screen.routing?.route || "" let url = screen.routing?.route || ""
@ -117,105 +97,79 @@
} }
} }
// Handler for NewScreenModal const onSelectDatasources = async () => {
export const show = newMode => { if (mode === "form") {
mode = newMode
templates = null
screens = null
selectedDatasources = null
blankScreenUrl = null
screenMode = mode
pendingScreen = null
screenAccessRole = Roles.BASIC
formType = null
if (mode === "grid" || mode === "gridDetails" || mode === "form") {
datasourceModal.show()
} else if (mode === "blank") {
let templates = getTemplates($tables.list)
const blankScreenTemplate = templates.find(
t => t.id === "createFromScratch"
)
pendingScreen = blankScreenTemplate.create()
screenDetailsModal.show()
} else {
throw new Error("Invalid mode provided")
}
}
// Handler for DatasourceModal confirmation, move to screen access select
const confirmScreenDatasources = async ({ datasources }) => {
selectedDatasources = datasources
if (screenMode === "form") {
formTypeModal.show() formTypeModal.show()
} else { } else if (mode === "grid") {
screenAccessRoleModal.show() await createGridScreen()
} else if (mode === "gridDetails") {
await createGridDetailsScreen()
} }
} }
// Handler for Datasource Screen Creation const createBlankScreen = async ({ screenUrl }) => {
const completeDatasourceScreenCreation = async () => { const screenTemplate = blankScreen(screenUrl)
templates = const screen = await createScreen(screenTemplate)
mode === "grid" await addNavigationLink(screenTemplate)
? gridListScreen(selectedDatasources)
: gridDetailsScreen(selectedDatasources)
const screens = templates.map(template => { loadNewScreen(screen)
let screenTemplate = template.create()
screenTemplate.autoTableId = template.resourceId
return screenTemplate
})
const createdScreens = await createScreens({ screens, screenAccessRole })
loadNewScreen(createdScreens)
} }
const confirmScreenBlank = async ({ screenUrl }) => { const createGridScreen = async () => {
blankScreenUrl = screenUrl let firstScreen = null
screenAccessRoleModal.show()
}
// Submit request for a blank screen for (let tableOrView of selectedTablesAndViews) {
const confirmBlankScreenCreation = async ({ const screenTemplate = gridScreen(
screenUrl, tableOrView,
screenAccessRole, permissions[tableOrView.id]
}) => { )
if (!pendingScreen) {
return
}
pendingScreen.routing.route = screenUrl
const createdScreens = await createScreens({
screens: [pendingScreen],
screenAccessRole,
})
loadNewScreen(createdScreens)
}
const onConfirmFormType = () => { const screen = await createScreen(screenTemplate)
screenAccessRoleModal.show() await addNavigationLink(screen)
}
const loadNewScreen = createdScreens => { firstScreen ??= screen
const lastScreen = createdScreens.slice(-1)[0]
// Go to new screen
if (lastScreen?.props?._children.length) {
// Focus on the main component for the streen type
const mainComponent = lastScreen?.props?._children?.[0]._id
$goto(`./${lastScreen._id}/${mainComponent}`)
} else {
$goto(`./${lastScreen._id}`)
} }
screenStore.select(lastScreen._id) loadNewScreen(firstScreen)
} }
const confirmFormScreenCreation = async () => { const createGridDetailsScreen = async () => {
templates = formScreen(selectedDatasources, { actionType: formType }) let firstScreen = null
screens = templates.map(template => {
let screenTemplate = template.create() for (let tableOrView of selectedTablesAndViews) {
return screenTemplate const screenTemplate = gridDetailsScreen(
}) tableOrView,
const createdScreens = await createScreens({ screens, screenAccessRole }) permissions[tableOrView.id]
)
const screen = await createScreen(screenTemplate)
await addNavigationLink(screen)
firstScreen ??= screen
}
loadNewScreen(firstScreen)
}
const createFormScreen = async formType => {
let firstScreen = null
for (let tableOrView of selectedTablesAndViews) {
const screenTemplate = formScreen(
tableOrView,
formType,
permissions[tableOrView.id]
)
const screen = await createScreen(screenTemplate)
// Only add a navigation link for `Create`, as both `Update` and `View`
// require an `id` in their URL in order to function.
if (formType === "Create") {
await addNavigationLink(screen)
}
firstScreen ??= screen
}
if (formType === "Update" || formType === "Create") { if (formType === "Update" || formType === "Create") {
const associatedTour = const associatedTour =
@ -229,66 +183,89 @@
} }
} }
// Go to new screen loadNewScreen(firstScreen)
loadNewScreen(createdScreens)
} }
// Submit screen config for creation. const loadNewScreen = screen => {
const confirmScreenCreation = async () => { if (screen?.props?._children.length) {
if (screenMode === "blank") { // Focus on the main component for the screen type
confirmBlankScreenCreation({ const mainComponent = screen?.props?._children?.[0]._id
screenUrl: blankScreenUrl, $goto(`./${screen._id}/${mainComponent}`)
screenAccessRole,
})
} else if (screenMode === "form") {
confirmFormScreenCreation()
} else { } else {
completeDatasourceScreenCreation() $goto(`./${screen._id}`)
} }
screenStore.select(screen._id)
} }
const roleSelectBack = () => { const fetchPermission = resourceId => {
if (screenMode === "blank") { permissions[resourceId] = { loading: true, read: null, write: null }
screenDetailsModal.show()
permissionsStore
.forResource(resourceId)
.then(permission => {
if (permissions[resourceId]?.loading) {
permissions[resourceId] = {
loading: false,
read: permission?.read?.role,
write: permission?.write?.role,
}
}
})
.catch(e => {
console.error("Error fetching permission data: ", e)
if (permissions[resourceId]?.loading) {
permissions[resourceId] = {
loading: false,
read: Roles.PUBLIC,
write: Roles.PUBLIC,
}
}
})
}
const deletePermission = resourceId => {
delete permissions[resourceId]
permissions = permissions
}
const handleTableOrViewToggle = ({ detail: tableOrView }) => {
const alreadySelected = selectedTablesAndViews.some(
selected => selected.id === tableOrView.id
)
if (!alreadySelected) {
fetchPermission(tableOrView.id)
selectedTablesAndViews = [...selectedTablesAndViews, tableOrView]
} else { } else {
datasourceModal.show() deletePermission(tableOrView.id)
selectedTablesAndViews = selectedTablesAndViews.filter(
selected => selected.id !== tableOrView.id
)
} }
} }
</script> </script>
<Modal bind:this={datasourceModal} autoFocus={false}> <Modal bind:this={datasourceModal} autoFocus={false}>
<DatasourceModal {mode} onConfirm={confirmScreenDatasources} /> <DatasourceModal
</Modal> {selectedTablesAndViews}
{permissions}
<Modal bind:this={screenAccessRoleModal}> onConfirm={onSelectDatasources}
<ScreenRoleModal on:toggle={handleTableOrViewToggle}
onConfirm={() => {
confirmScreenCreation()
}}
bind:screenAccessRole
onCancel={roleSelectBack}
screenUrl={blankScreenUrl}
confirmText={screenMode === "form" ? "Confirm" : "Done"}
/> />
</Modal> </Modal>
<Modal bind:this={screenDetailsModal}> <Modal bind:this={screenDetailsModal}>
<ScreenDetailsModal <ScreenDetailsModal onConfirm={createBlankScreen} />
onConfirm={confirmScreenBlank}
initialUrl={blankScreenUrl}
/>
</Modal> </Modal>
<Modal bind:this={formTypeModal}> <Modal bind:this={formTypeModal}>
<FormTypeModal <FormTypeModal
onConfirm={onConfirmFormType} onConfirm={createFormScreen}
onCancel={() => { onCancel={() => {
formTypeModal.hide() formTypeModal.hide()
datasourceModal.show() datasourceModal.show()
}} }}
on:select={e => {
formType = e.detail
}}
type={formType}
/> />
</Modal> </Modal>

View File

@ -1,42 +1,95 @@
<script> <script>
import { ModalContent, Layout, notifications, Body } from "@budibase/bbui" import { ModalContent, Layout, notifications, Body } from "@budibase/bbui"
import { datasources } from "stores/builder" import { datasources as datasourcesStore } from "stores/builder"
import ICONS from "components/backend/DatasourceNavigator/icons" import ICONS from "components/backend/DatasourceNavigator/icons"
import { IntegrationNames } from "constants" import { IntegrationNames } from "constants"
import { onMount } from "svelte" import { createEventDispatcher, onMount } from "svelte"
import DatasourceTemplateRow from "./DatasourceTemplateRow.svelte" import TableOrViewOption from "./TableOrViewOption.svelte"
export let onCancel
export let onConfirm export let onConfirm
export let selectedTablesAndViews
export let permissions
let selectedSources = [] const dispatch = createEventDispatcher()
$: filteredSources = $datasources.list?.filter(datasource => { const getViews = table => {
return datasource.source !== IntegrationNames.REST && datasource["entities"] const views = Object.values(table.views || {}).filter(
}) view => view.version === 2
const toggleSelection = datasource => {
const exists = selectedSources.find(
d => d.resourceId === datasource.resourceId
) )
if (exists) {
selectedSources = selectedSources.filter( return views.map(view => ({
d => d.resourceId === datasource.resourceId icon: "Remove",
) name: view.name,
} else { id: view.id,
selectedSources = [...selectedSources, datasource] clientData: {
} ...view,
type: "viewV2",
label: view.name,
},
}))
} }
const confirmDatasourceSelection = async () => { const getTablesAndViews = datasource => {
await onConfirm({ let tablesAndViews = []
datasources: selectedSources, const rawTables = Array.isArray(datasource.entities)
}) ? datasource.entities
: Object.values(datasource.entities ?? {})
for (const rawTable of rawTables) {
if (rawTable._id === "ta_users") {
continue
}
const table = {
icon: "Table",
name: rawTable.name,
id: rawTable._id,
clientData: {
...rawTable,
label: rawTable.name,
tableId: rawTable._id,
type: "table",
},
}
tablesAndViews = tablesAndViews.concat([table, ...getViews(rawTable)])
}
return tablesAndViews
}
const getDatasources = rawDatasources => {
const datasources = []
for (const rawDatasource of rawDatasources) {
if (
rawDatasource.source === IntegrationNames.REST ||
!rawDatasource["entities"]
) {
continue
}
const datasource = {
name: rawDatasource.name,
iconComponent: ICONS[rawDatasource.source],
tablesAndViews: getTablesAndViews(rawDatasource),
}
datasources.push(datasource)
}
return datasources
}
$: datasources = getDatasources($datasourcesStore.list)
const toggleSelection = tableOrView => {
dispatch("toggle", tableOrView)
} }
onMount(async () => { onMount(async () => {
try { try {
await datasources.fetch() await datasourcesStore.fetch()
} catch (error) { } catch (error) {
notifications.error("Error fetching datasources") notifications.error("Error fetching datasources")
} }
@ -48,66 +101,35 @@
title="Autogenerated screens" title="Autogenerated screens"
confirmText="Confirm" confirmText="Confirm"
cancelText="Back" cancelText="Back"
onConfirm={confirmDatasourceSelection} {onConfirm}
{onCancel} disabled={!selectedTablesAndViews.length}
disabled={!selectedSources.length}
size="L" size="L"
> >
<Body size="S"> <Body size="S">
Select which datasources you would like to use to create your screens Select which datasources you would like to use to create your screens
</Body> </Body>
<Layout noPadding gap="S"> <Layout noPadding gap="S">
{#each filteredSources as datasource} {#each datasources as datasource}
{@const entities = Array.isArray(datasource.entities)
? datasource.entities
: Object.values(datasource.entities || {})}
<div class="data-source-wrap"> <div class="data-source-wrap">
<div class="data-source-header"> <div class="data-source-header">
<svelte:component <svelte:component
this={ICONS[datasource.source]} this={datasource.iconComponent}
height="24" height="24"
width="24" width="24"
/> />
<div class="data-source-name">{datasource.name}</div> <div class="data-source-name">{datasource.name}</div>
</div> </div>
<!-- List all tables --> <!-- List all tables -->
{#each entities.filter(table => table._id !== "ta_users") as table} {#each datasource.tablesAndViews as tableOrView}
{@const views = Object.values(table.views || {}).filter( {@const selected = selectedTablesAndViews.some(
view => view.version === 2 selected => selected.id === tableOrView.id
)} )}
{@const tableDS = { <TableOrViewOption
tableId: table._id, roles={permissions[tableOrView.id]}
label: table.name, on:click={() => toggleSelection(tableOrView)}
resourceId: table._id,
type: "table",
}}
{@const selected = selectedSources.find(
datasource => datasource.resourceId === tableDS.resourceId
)}
<DatasourceTemplateRow
on:click={() => toggleSelection(tableDS)}
{selected} {selected}
datasource={tableDS} {tableOrView}
/> />
<!-- List all views inside this table -->
{#each views as view}
{@const viewDS = {
label: view.name,
id: view.id,
resourceId: view.id,
tableId: view.tableId,
type: "viewV2",
}}
{@const selected = selectedSources.find(
x => x.resourceId === viewDS.resourceId
)}
<DatasourceTemplateRow
on:click={() => toggleSelection(viewDS)}
{selected}
datasource={viewDS}
/>
{/each}
{/each} {/each}
</div> </div>
{/each} {/each}
@ -118,8 +140,11 @@
<style> <style>
.data-source-wrap { .data-source-wrap {
padding-bottom: var(--spectrum-alias-item-padding-s); padding-bottom: var(--spectrum-alias-item-padding-s);
display: grid; display: flex;
flex-direction: column;
grid-gap: var(--spacing-s); grid-gap: var(--spacing-s);
max-width: 100%;
min-width: 0;
} }
.data-source-header { .data-source-header {
display: flex; display: flex;

View File

@ -1,45 +0,0 @@
<script>
import { Icon } from "@budibase/bbui"
export let datasource
export let selected = false
$: icon = datasource.type === "viewV2" ? "Remove" : "Table"
</script>
<!-- svelte-ignore a11y-no-static-element-interactions -->
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div class="data-source-entry" class:selected on:click>
<Icon name={icon} color="var(--spectrum-global-color-gray-600)" />
{datasource.label}
{#if selected}
<span class="data-source-check">
<Icon size="S" name="CheckmarkCircle" />
</span>
{/if}
</div>
<style>
.data-source-entry {
cursor: pointer;
grid-gap: var(--spacing-m);
padding: var(--spectrum-alias-item-padding-s);
background: var(--spectrum-alias-background-color-secondary);
transition: 0.3s all;
border: 1px solid var(--spectrum-global-color-gray-300);
border-radius: 4px;
display: flex;
align-items: center;
}
.data-source-entry:hover,
.selected {
background: var(--spectrum-alias-background-color-tertiary);
}
.data-source-check {
margin-left: auto;
}
.data-source-check :global(.spectrum-Icon) {
color: var(--spectrum-global-color-green-600);
}
</style>

View File

@ -1,12 +1,10 @@
<script> <script>
import { ModalContent, Layout, Body, Icon } from "@budibase/bbui" import { ModalContent, Layout, Body, Icon } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
let type = null
export let onCancel = () => {} export let onCancel = () => {}
export let onConfirm = () => {} export let onConfirm = () => {}
export let type
const dispatch = createEventDispatcher()
</script> </script>
<span> <span>
@ -14,7 +12,7 @@
title="Select form type" title="Select form type"
confirmText="Done" confirmText="Done"
cancelText="Back" cancelText="Back"
{onConfirm} onConfirm={() => onConfirm(type)}
{onCancel} {onCancel}
disabled={!type} disabled={!type}
size="L" size="L"
@ -25,9 +23,7 @@
<div <div
class="form-type" class="form-type"
class:selected={type === "Create"} class:selected={type === "Create"}
on:click={() => { on:click={() => (type = "Create")}
dispatch("select", "Create")
}}
> >
<div class="form-type-wrap"> <div class="form-type-wrap">
<div class="form-type-content"> <div class="form-type-content">
@ -46,9 +42,7 @@
<div <div
class="form-type" class="form-type"
class:selected={type === "Update"} class:selected={type === "Update"}
on:click={() => { on:click={() => (type = "Update")}
dispatch("select", "Update")
}}
> >
<div class="form-type-wrap"> <div class="form-type-wrap">
<div class="form-type-content"> <div class="form-type-content">
@ -65,9 +59,7 @@
<div <div
class="form-type" class="form-type"
class:selected={type === "View"} class:selected={type === "View"}
on:click={() => { on:click={() => (type = "View")}
dispatch("select", "View")
}}
> >
<div class="form-type-wrap"> <div class="form-type-wrap">
<div class="form-type-content"> <div class="form-type-content">

View File

@ -1,62 +0,0 @@
<script>
import { Select, ModalContent } from "@budibase/bbui"
import { RoleUtils } from "@budibase/frontend-core"
import { roles, screenStore } from "stores/builder"
import { get } from "svelte/store"
import { onMount } from "svelte"
export let onConfirm
export let onCancel
export let screenUrl
export let screenAccessRole
export let confirmText = "Done"
let error
const onChangeRole = e => {
const roleId = e.detail
if (routeExists(screenUrl, roleId)) {
error = "This URL is already taken for this access role"
} else {
error = null
}
}
const routeExists = (url, role) => {
if (!url || !role) {
return false
}
return get(screenStore).screens.some(
screen =>
screen.routing.route.toLowerCase() === url.toLowerCase() &&
screen.routing.roleId === role
)
}
onMount(() => {
// Validate the initial role
onChangeRole({ detail: screenAccessRole })
})
</script>
<ModalContent
title="Access"
{confirmText}
cancelText="Back"
{onConfirm}
{onCancel}
disabled={!!error}
>
Select the level of access required to see these screens
<Select
bind:value={screenAccessRole}
on:change={onChangeRole}
label="Access"
{error}
getOptionLabel={role => role.name}
getOptionValue={role => role._id}
getOptionColour={role => RoleUtils.getRoleColour(role._id)}
options={$roles}
placeholder={null}
/>
</ModalContent>

View File

@ -0,0 +1,112 @@
<script>
import { Icon, AbsTooltip } from "@budibase/bbui"
import RoleIcon from "components/common/RoleIcon.svelte"
export let tableOrView
export let roles
export let selected = false
$: hideRoles = roles == undefined || roles?.loading
</script>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div role="button" tabindex="0" class="datasource" class:selected on:click>
<div class="content">
<Icon name={tableOrView.icon} />
<span>{tableOrView.name}</span>
</div>
<div class:hideRoles class="roles">
<AbsTooltip
type="info"
text={`Screens that only read data will be generated with access "${roles?.read?.toLowerCase()}"`}
>
<div class="role">
<span>read</span>
<RoleIcon
size="XS"
id={roles?.read}
disabled={roles?.loading !== false}
/>
</div>
</AbsTooltip>
<AbsTooltip
type="info"
text={`Screens that write data will be generated with access "${roles?.write?.toLowerCase()}"`}
>
<div class="role">
<span>write</span>
<RoleIcon
size="XS"
id={roles?.write}
disabled={roles?.loading !== false}
/>
</div>
</AbsTooltip>
</div>
</div>
<style>
.datasource {
cursor: pointer;
border: 1px solid var(--spectrum-global-color-gray-300);
transition: 160ms all;
border-radius: 4px;
display: flex;
align-items: center;
user-select: none;
background-color: var(--background);
}
.datasource :global(svg) {
transition: 160ms all;
color: var(--spectrum-global-color-gray-600);
}
.content {
padding: var(--spectrum-alias-item-padding-s);
display: flex;
align-items: center;
grid-gap: var(--spacing-m);
min-width: 0;
}
.content span {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.datasource:hover {
border: 1px solid var(--grey-5);
}
.selected {
border: 1px solid var(--blue) !important;
}
.roles {
margin-left: auto;
display: flex;
flex-direction: column;
align-items: end;
padding-right: var(--spectrum-alias-item-padding-s);
opacity: 0.5;
transition: opacity 160ms;
}
.hideRoles {
opacity: 0;
pointer-events: none;
}
.role {
display: flex;
align-items: center;
}
.role span {
font-size: 11px;
margin-right: 5px;
}
</style>

View File

@ -187,7 +187,9 @@
<Divider /> <Divider />
<Layout gap="XS" noPadding> <Layout gap="XS" noPadding>
<Heading size="XS">History</Heading> <Heading size="XS">History</Heading>
<Body size="S">Free plan stores up to 1 day of automation history</Body> {#if licensePlan?.type === Constants.PlanType.FREE}
<Body size="S">Free plan stores up to 1 day of automation history</Body>
{/if}
</Layout> </Layout>
<div class="controls"> <div class="controls">
<div class="search"> <div class="search">

View File

@ -4,47 +4,45 @@
Button, Button,
Heading, Heading,
Layout, Layout,
ProgressCircle,
notifications, notifications,
FancyForm,
FancyInput,
} from "@budibase/bbui" } from "@budibase/bbui"
import { goto, params } from "@roxi/routify" import { goto, params } from "@roxi/routify"
import { auth, organisation } from "stores/portal" import { auth, organisation } from "stores/portal"
import Logo from "assets/bb-emblem.svg" import Logo from "assets/bb-emblem.svg"
import { TestimonialPage } from "@budibase/frontend-core/src/components" import { TestimonialPage } from "@budibase/frontend-core/src/components"
import { onMount } from "svelte" import { onMount } from "svelte"
import { handleError, passwordsMatch } from "./_components/utils" import PasswordRepeatInput from "../../../components/common/users/PasswordRepeatInput.svelte"
const resetCode = $params["?code"] const resetCode = $params["?code"]
let form let form
let formData = {}
let errors = {}
let loaded = false let loaded = false
let loading = false
let password
let passwordError
$: submitted = false
$: forceResetPassword = $auth?.user?.forceResetPassword $: forceResetPassword = $auth?.user?.forceResetPassword
async function reset() { async function reset() {
form.validate() if (!form.validate() || passwordError) {
if (Object.keys(errors).length > 0) {
return return
} }
submitted = true
try { try {
loading = true
if (forceResetPassword) { if (forceResetPassword) {
await auth.updateSelf({ await auth.updateSelf({
password: formData.password, password,
forceResetPassword: false, forceResetPassword: false,
}) })
$goto("../portal/") $goto("../portal/")
} else { } else {
await auth.resetPassword(formData.password, resetCode) await auth.resetPassword(password, resetCode)
notifications.success("Password reset successfully") notifications.success("Password reset successfully")
// send them to login if reset successful // send them to login if reset successful
$goto("./login") $goto("./login")
} }
} catch (err) { } catch (err) {
submitted = false loading = false
notifications.error(err.message || "Unable to reset password") notifications.error(err.message || "Unable to reset password")
} }
} }
@ -58,86 +56,37 @@
} }
loaded = true loaded = true
}) })
const handleKeydown = evt => {
if (evt.key === "Enter") {
reset()
}
}
</script> </script>
<svelte:window on:keydown={handleKeydown} />
<TestimonialPage enabled={$organisation.testimonialsEnabled}> <TestimonialPage enabled={$organisation.testimonialsEnabled}>
<Layout gap="S" noPadding> <Layout gap="S" noPadding>
{#if loaded} {#if loaded}
<img alt="logo" src={$organisation.logoUrl || Logo} /> <img alt="logo" src={$organisation.logoUrl || Logo} />
{/if} {/if}
<Layout gap="XS" noPadding>
<Heading size="M">Reset your password</Heading>
<Body size="M">Please enter the new password you'd like to use.</Body>
</Layout>
<Layout gap="S" noPadding> <Layout gap="S" noPadding>
<FancyForm bind:this={form}> <Heading size="M">Reset your password</Heading>
<FancyInput <Body size="M">Must contain at least 12 characters</Body>
label="Password" <PasswordRepeatInput
value={formData.password} bind:passwordForm={form}
type="password" bind:password
on:change={e => { bind:error={passwordError}
formData = { />
...formData, <Button secondary cta on:click={reset}>
password: e.detail, {#if loading}
} <ProgressCircle overBackground={true} size="S" />
}} {:else}
validate={() => { Reset
let fieldError = {} {/if}
</Button>
fieldError["password"] = !formData.password
? "Please enter a password"
: undefined
fieldError["confirmationPassword"] =
!passwordsMatch(
formData.password,
formData.confirmationPassword
) && formData.confirmationPassword
? "Passwords must match"
: undefined
errors = handleError({ ...errors, ...fieldError })
}}
error={errors.password}
disabled={submitted}
/>
<FancyInput
label="Repeat Password"
value={formData.confirmationPassword}
type="password"
on:change={e => {
formData = {
...formData,
confirmationPassword: e.detail,
}
}}
validate={() => {
const isValid =
!passwordsMatch(
formData.password,
formData.confirmationPassword
) && formData.password
let fieldError = {
confirmationPassword: isValid ? "Passwords must match" : null,
}
errors = handleError({ ...errors, ...fieldError })
}}
error={errors.confirmationPassword}
disabled={submitted}
/>
</FancyForm>
</Layout> </Layout>
<div> <div />
<Button
disabled={Object.keys(errors).length > 0 ||
(forceResetPassword ? false : !resetCode)}
cta
on:click={reset}>Reset your password</Button
>
</div>
</Layout> </Layout>
</TestimonialPage> </TestimonialPage>

View File

@ -5,16 +5,19 @@ import { generate } from "shortid"
import { createHistoryStore } from "stores/builder/history" import { createHistoryStore } from "stores/builder/history"
import { notifications } from "@budibase/bbui" import { notifications } from "@budibase/bbui"
import { updateReferencesInObject } from "dataBinding" import { updateReferencesInObject } from "dataBinding"
import { AutomationTriggerStepId } from "@budibase/types"
const initialAutomationState = { const initialAutomationState = {
automations: [], automations: [],
testResults: null, testResults: null,
showTestPanel: false, showTestPanel: false,
blockDefinitions: { blockDefinitions: {
TRIGGER: [], TRIGGER: {},
ACTION: [], CREATABLE_TRIGGER: {},
ACTION: {},
}, },
selectedAutomationId: null, selectedAutomationId: null,
automationDisplayData: {},
} }
// If this functions, remove the actions elements // If this functions, remove the actions elements
@ -45,32 +48,48 @@ const updateStepReferences = (steps, modifiedIndex, action) => {
}) })
} }
const getFinalDefinitions = (triggers, actions) => {
const creatable = {}
Object.entries(triggers).forEach(entry => {
if (entry[0] === AutomationTriggerStepId.ROW_ACTION) {
return
}
creatable[entry[0]] = entry[1]
})
return {
TRIGGER: triggers,
CREATABLE_TRIGGER: creatable,
ACTION: actions,
}
}
const automationActions = store => ({ const automationActions = store => ({
definitions: async () => { definitions: async () => {
const response = await API.getAutomationDefinitions() const response = await API.getAutomationDefinitions()
store.update(state => { store.update(state => {
state.blockDefinitions = { state.blockDefinitions = getFinalDefinitions(
TRIGGER: response.trigger, response.trigger,
ACTION: response.action, response.action
} )
return state return state
}) })
return response return response
}, },
fetch: async () => { fetch: async () => {
const responses = await Promise.all([ const [automationResponse, definitions] = await Promise.all([
API.getAutomations(), API.getAutomations({ enrich: true }),
API.getAutomationDefinitions(), API.getAutomationDefinitions(),
]) ])
store.update(state => { store.update(state => {
state.automations = responses[0] state.automations = automationResponse.automations
state.automations.sort((a, b) => { state.automations.sort((a, b) => {
return a.name < b.name ? -1 : 1 return a.name < b.name ? -1 : 1
}) })
state.blockDefinitions = { state.automationDisplayData = automationResponse.builderData
TRIGGER: responses[1].trigger, state.blockDefinitions = getFinalDefinitions(
ACTION: responses[1].action, definitions.trigger,
} definitions.action
)
return state return state
}) })
}, },
@ -85,8 +104,6 @@ const automationActions = store => ({
disabled: false, disabled: false,
} }
const response = await store.actions.save(automation) const response = await store.actions.save(automation)
await store.actions.fetch()
store.actions.select(response._id)
return response return response
}, },
duplicate: async automation => { duplicate: async automation => {
@ -96,25 +113,13 @@ const automationActions = store => ({
_id: undefined, _id: undefined,
_ref: undefined, _ref: undefined,
}) })
await store.actions.fetch()
store.actions.select(response._id)
return response return response
}, },
save: async automation => { save: async automation => {
const response = await API.updateAutomation(automation) const response = await API.updateAutomation(automation)
store.update(state => {
const updatedAutomation = response.automation await store.actions.fetch()
const existingIdx = state.automations.findIndex( store.actions.select(response._id)
existing => existing._id === automation._id
)
if (existingIdx !== -1) {
state.automations.splice(existingIdx, 1, updatedAutomation)
return state
} else {
state.automations = [...state.automations, updatedAutomation]
}
return state
})
return response.automation return response.automation
}, },
delete: async automation => { delete: async automation => {
@ -122,18 +127,22 @@ const automationActions = store => ({
automationId: automation?._id, automationId: automation?._id,
automationRev: automation?._rev, automationRev: automation?._rev,
}) })
store.update(state => { store.update(state => {
// Remove the automation // Remove the automation
state.automations = state.automations.filter( state.automations = state.automations.filter(
x => x._id !== automation._id x => x._id !== automation._id
) )
// Select a new automation if required // Select a new automation if required
if (automation._id === state.selectedAutomationId) { if (automation._id === state.selectedAutomationId) {
store.actions.select(state.automations[0]?._id) state.selectedAutomationId = state.automations[0]?._id || null
} }
// Clear out automationDisplayData for the automation
delete state.automationDisplayData[automation._id]
return state return state
}) })
await store.actions.fetch()
}, },
toggleDisabled: async automationId => { toggleDisabled: async automationId => {
let automation let automation
@ -308,7 +317,9 @@ const automationActions = store => ({
if (!automation) { if (!automation) {
return return
} }
delete newAutomation.definition.stepNames[blockId] if (newAutomation.definition.stepNames) {
delete newAutomation.definition.stepNames[blockId]
}
await store.actions.save(newAutomation) await store.actions.save(newAutomation)
}, },
@ -384,3 +395,13 @@ export const selectedAutomation = derived(automationStore, $automationStore => {
x => x._id === $automationStore.selectedAutomationId x => x._id === $automationStore.selectedAutomationId
) )
}) })
export const selectedAutomationDisplayData = derived(
[automationStore, selectedAutomation],
([$automationStore, $selectedAutomation]) => {
if (!$selectedAutomation?._id) {
return null
}
return $automationStore.automationDisplayData[$selectedAutomation._id]
}
)

View File

@ -11,6 +11,7 @@ import {
automationStore, automationStore,
selectedAutomation, selectedAutomation,
automationHistoryStore, automationHistoryStore,
selectedAutomationDisplayData,
} from "./automations.js" } from "./automations.js"
import { userStore, userSelectedResourceMap, isOnlyUser } from "./users.js" import { userStore, userSelectedResourceMap, isOnlyUser } from "./users.js"
import { deploymentStore } from "./deployments.js" import { deploymentStore } from "./deployments.js"
@ -44,6 +45,7 @@ export {
previewStore, previewStore,
automationStore, automationStore,
selectedAutomation, selectedAutomation,
selectedAutomationDisplayData,
automationHistoryStore, automationHistoryStore,
sortedScreens, sortedScreens,
userStore, userStore,

View File

@ -63,6 +63,11 @@ export class Screen extends BaseStructure {
return this return this
} }
autoTableId(autoTableId) {
this._json.autoTableId = autoTableId
return this
}
instanceName(name) { instanceName(name) {
this._json.props._instanceName = name this._json.props._instanceName = name
return this return this

View File

@ -0,0 +1,7 @@
import { Screen } from "./Screen"
const blankScreen = route => {
return new Screen().instanceName("New Screen").route(route).json()
}
export default blankScreen

View File

@ -1,12 +0,0 @@
import { Screen } from "./Screen"
export default {
name: `Create from scratch`,
id: `createFromScratch`,
create: () => createScreen(),
table: `Create from scratch`,
}
const createScreen = () => {
return new Screen().instanceName("New Screen").json()
}

View File

@ -3,41 +3,47 @@ import { Component } from "./Component"
import sanitizeUrl from "helpers/sanitizeUrl" import sanitizeUrl from "helpers/sanitizeUrl"
export const FORM_TEMPLATE = "FORM_TEMPLATE" export const FORM_TEMPLATE = "FORM_TEMPLATE"
export const formUrl = datasource => sanitizeUrl(`/${datasource.label}-form`) export const formUrl = (tableOrView, actionType) => {
if (actionType === "Create") {
// Mode not really necessary return sanitizeUrl(`/${tableOrView.name}/new`)
export default function (datasources, config) { } else if (actionType === "Update") {
if (!Array.isArray(datasources)) { return sanitizeUrl(`/${tableOrView.name}/edit/:id`)
return [] } else if (actionType === "View") {
return sanitizeUrl(`/${tableOrView.name}/view/:id`)
} }
return datasources.map(datasource => {
return {
name: `${datasource.label} - Form`,
create: () => createScreen(datasource, config),
id: FORM_TEMPLATE,
resourceId: datasource.resourceId,
}
})
} }
const generateMultistepFormBlock = (dataSource, { actionType } = {}) => { export const getRole = (permissions, actionType) => {
if (actionType === "View") {
return permissions.read
}
return permissions.write
}
const generateMultistepFormBlock = (tableOrView, actionType) => {
const multistepFormBlock = new Component( const multistepFormBlock = new Component(
"@budibase/standard-components/multistepformblock" "@budibase/standard-components/multistepformblock"
) )
multistepFormBlock multistepFormBlock
.customProps({ .customProps({
actionType, actionType,
dataSource, dataSource: tableOrView.clientData,
steps: [{}], steps: [{}],
rowId: actionType === "new" ? undefined : `{{ url.id }}`,
}) })
.instanceName(`${dataSource.label} - Multistep Form block`) .instanceName(`${tableOrView.name} - Multistep Form block`)
return multistepFormBlock return multistepFormBlock
} }
const createScreen = (datasource, config) => { const createScreen = (tableOrView, actionType, permissions) => {
return new Screen() return new Screen()
.route(formUrl(datasource)) .route(formUrl(tableOrView, actionType))
.instanceName(`${datasource.label} - Form`) .instanceName(`${tableOrView.name} - Form`)
.addChild(generateMultistepFormBlock(datasource, config)) .role(getRole(permissions, actionType))
.autoTableId(tableOrView.id)
.addChild(generateMultistepFormBlock(tableOrView, actionType))
.json() .json()
} }
export default createScreen

View File

@ -5,24 +5,9 @@ import { generate } from "shortid"
import { makePropSafe as safe } from "@budibase/string-templates" import { makePropSafe as safe } from "@budibase/string-templates"
import { Utils } from "@budibase/frontend-core" import { Utils } from "@budibase/frontend-core"
export default function (datasources) { const gridDetailsUrl = tableOrView => sanitizeUrl(`/${tableOrView.name}`)
if (!Array.isArray(datasources)) {
return []
}
return datasources.map(datasource => {
return {
name: `${datasource.label} - List with panel`,
create: () => createScreen(datasource),
id: GRID_DETAILS_TEMPLATE,
resourceId: datasource.resourceId,
}
})
}
export const GRID_DETAILS_TEMPLATE = "GRID_DETAILS_TEMPLATE" const createScreen = (tableOrView, permissions) => {
export const gridDetailsUrl = datasource => sanitizeUrl(`/${datasource.label}`)
const createScreen = datasource => {
/* /*
Create Row Create Row
*/ */
@ -47,7 +32,7 @@ const createScreen = datasource => {
type: "cta", type: "cta",
}) })
buttonGroup.instanceName(`${datasource.label} - Create`).customProps({ buttonGroup.instanceName(`${tableOrView.name} - Create`).customProps({
hAlign: "right", hAlign: "right",
buttons: [createButton.json()], buttons: [createButton.json()],
}) })
@ -62,7 +47,7 @@ const createScreen = datasource => {
const heading = new Component("@budibase/standard-components/heading") const heading = new Component("@budibase/standard-components/heading")
.instanceName("Table heading") .instanceName("Table heading")
.customProps({ .customProps({
text: datasource?.label, text: tableOrView.name,
}) })
gridHeader.addChild(heading) gridHeader.addChild(heading)
@ -72,7 +57,7 @@ const createScreen = datasource => {
"@budibase/standard-components/formblock" "@budibase/standard-components/formblock"
) )
createFormBlock.instanceName("Create row form block").customProps({ createFormBlock.instanceName("Create row form block").customProps({
dataSource: datasource, dataSource: tableOrView.clientData,
labelPosition: "left", labelPosition: "left",
buttonPosition: "top", buttonPosition: "top",
actionType: "Create", actionType: "Create",
@ -83,7 +68,7 @@ const createScreen = datasource => {
showSaveButton: true, showSaveButton: true,
saveButtonLabel: "Save", saveButtonLabel: "Save",
actionType: "Create", actionType: "Create",
dataSource: datasource, dataSource: tableOrView.clientData,
}), }),
}) })
@ -99,7 +84,7 @@ const createScreen = datasource => {
const editFormBlock = new Component("@budibase/standard-components/formblock") const editFormBlock = new Component("@budibase/standard-components/formblock")
editFormBlock.instanceName("Edit row form block").customProps({ editFormBlock.instanceName("Edit row form block").customProps({
dataSource: datasource, dataSource: tableOrView.clientData,
labelPosition: "left", labelPosition: "left",
buttonPosition: "top", buttonPosition: "top",
actionType: "Update", actionType: "Update",
@ -112,7 +97,7 @@ const createScreen = datasource => {
saveButtonLabel: "Save", saveButtonLabel: "Save",
deleteButtonLabel: "Delete", deleteButtonLabel: "Delete",
actionType: "Update", actionType: "Update",
dataSource: datasource, dataSource: tableOrView.clientData,
}), }),
}) })
@ -121,7 +106,7 @@ const createScreen = datasource => {
const gridBlock = new Component("@budibase/standard-components/gridblock") const gridBlock = new Component("@budibase/standard-components/gridblock")
gridBlock gridBlock
.customProps({ .customProps({
table: datasource, table: tableOrView.clientData,
allowAddRows: false, allowAddRows: false,
allowEditRows: false, allowEditRows: false,
allowDeleteRows: false, allowDeleteRows: false,
@ -145,14 +130,18 @@ const createScreen = datasource => {
}, },
], ],
}) })
.instanceName(`${datasource.label} - Table`) .instanceName(`${tableOrView.name} - Table`)
return new Screen() return new Screen()
.route(gridDetailsUrl(datasource)) .route(gridDetailsUrl(tableOrView))
.instanceName(`${datasource.label} - List and details`) .instanceName(`${tableOrView.name} - List and details`)
.role(permissions.write)
.autoTableId(tableOrView.resourceId)
.addChild(gridHeader) .addChild(gridHeader)
.addChild(gridBlock) .addChild(gridBlock)
.addChild(createRowSidePanel) .addChild(createRowSidePanel)
.addChild(detailsSidePanel) .addChild(detailsSidePanel)
.json() .json()
} }
export default createScreen

View File

@ -1,41 +0,0 @@
import sanitizeUrl from "helpers/sanitizeUrl"
import { Screen } from "./Screen"
import { Component } from "./Component"
export default function (datasources) {
if (!Array.isArray(datasources)) {
return []
}
return datasources.map(datasource => {
return {
name: `${datasource.label} - List`,
create: () => createScreen(datasource),
id: GRID_LIST_TEMPLATE,
resourceId: datasource.resourceId,
}
})
}
export const GRID_LIST_TEMPLATE = "GRID_LIST_TEMPLATE"
export const gridListUrl = datasource => sanitizeUrl(`/${datasource.label}`)
const createScreen = datasource => {
const heading = new Component("@budibase/standard-components/heading")
.instanceName("Table heading")
.customProps({
text: datasource?.label,
})
const gridBlock = new Component("@budibase/standard-components/gridblock")
.instanceName(`${datasource.label} - Table`)
.customProps({
table: datasource,
})
return new Screen()
.route(gridListUrl(datasource))
.instanceName(`${datasource.label} - List`)
.addChild(heading)
.addChild(gridBlock)
.json()
}

View File

@ -0,0 +1,30 @@
import sanitizeUrl from "helpers/sanitizeUrl"
import { Screen } from "./Screen"
import { Component } from "./Component"
const gridUrl = tableOrView => sanitizeUrl(`/${tableOrView.name}`)
const createScreen = (tableOrView, permissions) => {
const heading = new Component("@budibase/standard-components/heading")
.instanceName("Table heading")
.customProps({
text: tableOrView.name,
})
const gridBlock = new Component("@budibase/standard-components/gridblock")
.instanceName(`${tableOrView.name} - Table`)
.customProps({
table: tableOrView.clientData,
})
return new Screen()
.route(gridUrl(tableOrView))
.instanceName(`${tableOrView.name} - List`)
.role(permissions.write)
.autoTableId(tableOrView.id)
.addChild(heading)
.addChild(gridBlock)
.json()
}
export default createScreen

View File

@ -1,35 +0,0 @@
import gridListScreen from "./gridListScreen"
import gridDetailsScreen from "./gridDetailsScreen"
import createFromScratchScreen from "./createFromScratchScreen"
import formScreen from "./formScreen"
const allTemplates = datasources => [
...gridListScreen(datasources),
...gridDetailsScreen(datasources),
...formScreen(datasources),
]
// Allows us to apply common behaviour to all create() functions
const createTemplateOverride = template => () => {
const screen = template.create()
screen.name = screen.props._id
screen.routing.route = screen.routing.route.toLowerCase()
screen.template = template.id
return screen
}
export default datasources => {
const enrichTemplate = template => ({
...template,
create: createTemplateOverride(template),
})
const fromScratch = enrichTemplate(createFromScratchScreen)
const tableTemplates = allTemplates(datasources).map(enrichTemplate)
return [
fromScratch,
...tableTemplates.sort((templateA, templateB) => {
return templateA.name > templateB.name ? 1 : -1
}),
]
}

View File

@ -26,9 +26,14 @@ export const buildAutomationEndpoints = API => ({
/** /**
* Gets a list of all automations. * Gets a list of all automations.
*/ */
getAutomations: async () => { getAutomations: async ({ enrich }) => {
const params = new URLSearchParams()
if (enrich) {
params.set("enrich", true)
}
return await API.get({ return await API.get({
url: "/api/automations", url: `/api/automations?${params.toString()}`,
}) })
}, },

View File

@ -1,206 +0,0 @@
// @ts-ignore
import fs from "fs"
// eslint-disable-next-line @typescript-eslint/no-unused-vars
module FetchMock {
// @ts-ignore
const fetch = jest.requireActual("node-fetch")
let failCount = 0
let mockSearch = false
const func = async (url: any, opts: any) => {
const { host, pathname } = new URL(url)
function json(body: any, status = 200) {
return {
status,
headers: {
raw: () => {
return { "content-type": ["application/json"] }
},
get: (name: string) => {
if (name.toLowerCase() === "content-type") {
return ["application/json"]
}
},
},
json: async () => {
//x-www-form-encoded body is a URLSearchParams
//The call to stringify it leaves it blank
if (body?.opts?.body instanceof URLSearchParams) {
const paramArray = Array.from(body.opts.body.entries())
body.opts.body = paramArray.reduce((acc: any, pair: any) => {
acc[pair[0]] = pair[1]
return acc
}, {})
}
return body
},
}
}
if (pathname.includes("/api/global")) {
const user = {
email: "test@example.com",
_id: "us_test@example.com",
status: "active",
roles: {},
builder: {
global: false,
},
admin: {
global: false,
},
}
return pathname.endsWith("/users") && opts.method === "GET"
? json([user])
: json(user)
}
// mocked data based on url
else if (pathname.includes("api/apps")) {
return json({
app1: {
url: "/app1",
},
})
} else if (host.includes("example.com")) {
return json({
body: opts.body,
url,
method: opts.method,
})
} else if (host.includes("invalid.com")) {
return json(
{
invalid: true,
},
404
)
} else if (mockSearch && pathname.includes("_search")) {
const body = opts.body
const parts = body.split("tableId:")
let tableId
if (parts && parts[1]) {
tableId = parts[1].split('"')[0]
}
return json({
rows: [
{
doc: {
_id: "test",
tableId: tableId,
query: opts.body,
},
},
],
bookmark: "test",
})
} else if (host.includes("google.com")) {
return json({
url,
opts,
value:
'<!doctype html><html itemscope="" itemtype="http://schema.org/WebPage" lang="en-GB"></html>',
})
} else if (
url === "https://api.github.com/repos/my-repo/budibase-comment-box"
) {
return Promise.resolve({
json: () => {
return {
name: "budibase-comment-box",
releases_url:
"https://api.github.com/repos/my-repo/budibase-comment-box{/id}",
}
},
})
} else if (
url === "https://api.github.com/repos/my-repo/budibase-comment-box/latest"
) {
return Promise.resolve({
json: () => {
return {
assets: [
{
content_type: "application/gzip",
browser_download_url:
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz",
},
],
}
},
})
} else if (
url ===
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz"
) {
return Promise.resolve({
body: fs.createReadStream(
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
),
ok: true,
})
} else if (url === "https://www.npmjs.com/package/budibase-component") {
return Promise.resolve({
status: 200,
json: () => {
return {
name: "budibase-component",
"dist-tags": {
latest: "1.0.0",
},
versions: {
"1.0.0": {
dist: {
tarball:
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.2.tgz",
},
},
},
}
},
})
} else if (
url ===
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.2.tgz"
) {
return Promise.resolve({
body: fs.createReadStream(
"src/api/routes/tests/data/budibase-component-1.0.2.tgz"
),
ok: true,
})
} else if (
url === "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz"
) {
return Promise.resolve({
body: fs.createReadStream(
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
),
ok: true,
})
} else if (url === "https://www.googleapis.com/oauth2/v4/token") {
// any valid response
return json({})
} else if (host.includes("failonce.com")) {
failCount++
if (failCount === 1) {
return json({ message: "error" }, 500)
} else {
return json({
fails: failCount - 1,
url,
opts,
})
}
}
return fetch(url, opts)
}
func.Headers = fetch.Headers
func.mockSearch = () => {
mockSearch = true
}
module.exports = func
}

View File

@ -1,21 +0,0 @@
const executeMock = jest.fn(() => ({
rows: [
{
a: "string",
b: 1,
},
],
}))
const closeMock = jest.fn()
class Connection {
execute = executeMock
close = closeMock
}
module.exports = {
getConnection: jest.fn(() => new Connection()),
executeMock,
closeMock,
}

View File

@ -0,0 +1,5 @@
MSSQL_SHA=sha256:c4369c38385eba011c10906dc8892425831275bb035d5ce69656da8e29de50d8
MYSQL_SHA=sha256:9de9d54fecee6253130e65154b930978b1fcc336bcc86dfd06e89b72a2588ebe
POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053090e
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d
MARIADB_SHA=sha256:e59ba8783bf7bc02a4779f103bb0d8751ac0e10f9471089709608377eded7aa8

View File

@ -16,7 +16,8 @@
"build:isolated-vm-lib:snippets": "esbuild --minify --bundle src/jsRunner/bundles/snippets.ts --outfile=src/jsRunner/bundles/snippets.ivm.bundle.js --platform=node --format=iife --global-name=snippets", "build:isolated-vm-lib:snippets": "esbuild --minify --bundle src/jsRunner/bundles/snippets.ts --outfile=src/jsRunner/bundles/snippets.ivm.bundle.js --platform=node --format=iife --global-name=snippets",
"build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers", "build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers",
"build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson", "build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson",
"build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson && yarn build:isolated-vm-lib:snippets", "build:isolated-vm-lib:buffer": "esbuild --minify --bundle src/jsRunner/bundles/buffer.ts --outfile=src/jsRunner/bundles/buffer.ivm.bundle.js --platform=node --format=iife --global-name=buffer",
"build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson && yarn build:isolated-vm-lib:snippets && yarn build:isolated-vm-lib:buffer",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
"jest": "NODE_OPTIONS=\"--no-node-snapshot $NODE_OPTIONS\" jest", "jest": "NODE_OPTIONS=\"--no-node-snapshot $NODE_OPTIONS\" jest",
@ -68,6 +69,7 @@
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"buffer": "6.0.3",
"bull": "4.10.1", "bull": "4.10.1",
"chokidar": "3.5.3", "chokidar": "3.5.3",
"content-disposition": "^0.5.4", "content-disposition": "^0.5.4",

View File

@ -1,4 +1,4 @@
FROM mcr.microsoft.com/mssql/server:2022-latest FROM mcr.microsoft.com/mssql/server@sha256:c4369c38385eba011c10906dc8892425831275bb035d5ce69656da8e29de50d8
ENV ACCEPT_EULA=Y ENV ACCEPT_EULA=Y
ENV SA_PASSWORD=Passw0rd ENV SA_PASSWORD=Passw0rd

View File

@ -6,9 +6,9 @@ services:
db: db:
restart: unless-stopped restart: unless-stopped
platform: linux/x86_64 platform: linux/x86_64
image: container-registry.oracle.com/database/express:18.4.0-xe image: gvenzl/oracle-free:23.2-slim-faststart
environment: environment:
ORACLE_PWD: oracle ORACLE_PWD: Password1
ports: ports:
- 1521:1521 - 1521:1521
- 5500:5500 - 5500:5500
@ -16,4 +16,4 @@ services:
- oracle_data:/opt/oracle/oradata - oracle_data:/opt/oracle/oradata
volumes: volumes:
oracle_data: oracle_data:

View File

@ -1,4 +1,5 @@
import * as triggers from "../../automations/triggers" import * as triggers from "../../automations/triggers"
import { sdk as coreSdk } from "@budibase/shared-core"
import { DocumentType } from "../../db/utils" import { DocumentType } from "../../db/utils"
import { updateTestHistory, removeDeprecated } from "../../automations/utils" import { updateTestHistory, removeDeprecated } from "../../automations/utils"
import { setTestFlag, clearTestFlag } from "../../utilities/redis" import { setTestFlag, clearTestFlag } from "../../utilities/redis"
@ -11,6 +12,7 @@ import {
AutomationResults, AutomationResults,
UserCtx, UserCtx,
DeleteAutomationResponse, DeleteAutomationResponse,
FetchAutomationResponse,
} from "@budibase/types" } from "@budibase/types"
import { getActionDefinitions as actionDefs } from "../../automations/actions" import { getActionDefinitions as actionDefs } from "../../automations/actions"
import sdk from "../../sdk" import sdk from "../../sdk"
@ -73,8 +75,17 @@ export async function update(ctx: UserCtx) {
builderSocket?.emitAutomationUpdate(ctx, automation) builderSocket?.emitAutomationUpdate(ctx, automation)
} }
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx<void, FetchAutomationResponse>) {
ctx.body = await sdk.automations.fetch() const query: { enrich?: string } = ctx.request.query || {}
const enrich = query.enrich === "true"
const automations = await sdk.automations.fetch()
ctx.body = { automations }
if (enrich) {
ctx.body.builderData = await sdk.automations.utils.getBuilderData(
automations
)
}
} }
export async function find(ctx: UserCtx) { export async function find(ctx: UserCtx) {
@ -84,6 +95,11 @@ export async function find(ctx: UserCtx) {
export async function destroy(ctx: UserCtx<void, DeleteAutomationResponse>) { export async function destroy(ctx: UserCtx<void, DeleteAutomationResponse>) {
const automationId = ctx.params.id const automationId = ctx.params.id
const automation = await sdk.automations.get(ctx.params.id)
if (coreSdk.automations.isRowAction(automation)) {
ctx.throw("Row actions automations cannot be deleted", 422)
}
ctx.body = await sdk.automations.remove(automationId, ctx.params.rev) ctx.body = await sdk.automations.remove(automationId, ctx.params.rev)
builderSocket?.emitAutomationDeletion(ctx, automationId) builderSocket?.emitAutomationDeletion(ctx, automationId)
} }

View File

@ -1,6 +1,13 @@
import { npmUpload, urlUpload, githubUpload } from "./uploaders" import { npmUpload, urlUpload, githubUpload } from "./uploaders"
import { plugins as pluginCore } from "@budibase/backend-core" import { plugins as pluginCore } from "@budibase/backend-core"
import { PluginType, FileType, PluginSource } from "@budibase/types" import {
PluginType,
FileType,
PluginSource,
Ctx,
CreatePluginRequest,
CreatePluginResponse,
} from "@budibase/types"
import env from "../../../environment" import env from "../../../environment"
import { clientAppSocket } from "../../../websockets" import { clientAppSocket } from "../../../websockets"
import sdk from "../../../sdk" import sdk from "../../../sdk"
@ -29,7 +36,9 @@ export async function upload(ctx: any) {
} }
} }
export async function create(ctx: any) { export async function create(
ctx: Ctx<CreatePluginRequest, CreatePluginResponse>
) {
const { source, url, headers, githubToken } = ctx.request.body const { source, url, headers, githubToken } = ctx.request.body
try { try {
@ -75,14 +84,9 @@ export async function create(ctx: any) {
const doc = await pro.plugins.storePlugin(metadata, directory, source) const doc = await pro.plugins.storePlugin(metadata, directory, source)
clientAppSocket?.emit("plugins-update", { name, hash: doc.hash }) clientAppSocket?.emit("plugins-update", { name, hash: doc.hash })
ctx.body = {
message: "Plugin uploaded successfully",
plugins: [doc],
}
ctx.body = { plugin: doc } ctx.body = { plugin: doc }
} catch (err: any) { } catch (err: any) {
const errMsg = err?.message ? err?.message : err const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to import plugin: ${errMsg}`) ctx.throw(400, `Failed to import plugin: ${errMsg}`)
} }
} }

View File

@ -66,9 +66,14 @@ export interface RunConfig {
includeSqlRelationships?: IncludeRelationship includeSqlRelationships?: IncludeRelationship
} }
export type ExternalReadRequestReturnType = {
rows: Row[]
rawResponseSize: number
}
export type ExternalRequestReturnType<T extends Operation> = export type ExternalRequestReturnType<T extends Operation> =
T extends Operation.READ T extends Operation.READ
? Row[] ? ExternalReadRequestReturnType
: T extends Operation.COUNT : T extends Operation.COUNT
? number ? number
: { row: Row; table: Table } : { row: Row; table: Table }
@ -741,9 +746,11 @@ export class ExternalRequest<T extends Operation> {
) )
// if reading it'll just be an array of rows, return whole thing // if reading it'll just be an array of rows, return whole thing
if (operation === Operation.READ) { if (operation === Operation.READ) {
return ( const rows = Array.isArray(output) ? output : [output]
Array.isArray(output) ? output : [output] return {
) as ExternalRequestReturnType<T> rows,
rawResponseSize: responseRows.length,
} as ExternalRequestReturnType<T>
} else { } else {
return { row: output[0], table } as ExternalRequestReturnType<T> return { row: output[0], table } as ExternalRequestReturnType<T>
} }

View File

@ -136,7 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
}) })
const table: Table = tables[tableName] const table: Table = tables[tableName]
const row = response[0] const row = response.rows[0]
// this seems like a lot of work, but basically we need to dig deeper for the enrich // this seems like a lot of work, but basically we need to dig deeper for the enrich
// for a single row, there is probably a better way to do this with some smart multi-layer joins // for a single row, there is probably a better way to do this with some smart multi-layer joins
for (let [fieldName, field] of Object.entries(table.schema)) { for (let [fieldName, field] of Object.entries(table.schema)) {
@ -163,10 +163,14 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
}, },
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
}) })
row[fieldName] = await outputProcessing(linkedTable, relatedRows, { row[fieldName] = await outputProcessing<Row[]>(
squash: true, linkedTable,
preserveLinks: true, relatedRows.rows,
}) {
squash: true,
preserveLinks: true,
}
)
} }
return row return row
} }

View File

@ -1,8 +1,7 @@
// need to handle table name + field or just field, depending on if relationships used // need to handle table name + field or just field, depending on if relationships used
import { FieldType, Row, Table } from "@budibase/types" import { FieldType, Row, Table } from "@budibase/types"
import { helpers } from "@budibase/shared-core" import { helpers, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
import { generateRowIdField } from "../../../../integrations/utils" import { generateRowIdField } from "../../../../integrations/utils"
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
function extractFieldValue({ function extractFieldValue({
row, row,
@ -94,7 +93,7 @@ export function basicProcessing({
thisRow._rev = "rev" thisRow._rev = "rev"
} else { } else {
const columns = Object.keys(table.schema) const columns = Object.keys(table.schema)
for (let internalColumn of [...CONSTANT_INTERNAL_ROW_COLS, ...columns]) { for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({ thisRow[internalColumn] = extractFieldValue({
row, row,
tableName: table._id!, tableName: table._id!,

View File

@ -31,7 +31,12 @@ export async function find(ctx: Ctx<void, RowActionsResponse>) {
actions: Object.entries(actions).reduce<Record<string, RowActionResponse>>( actions: Object.entries(actions).reduce<Record<string, RowActionResponse>>(
(acc, [key, action]) => ({ (acc, [key, action]) => ({
...acc, ...acc,
[key]: { id: key, tableId: table._id!, ...action }, [key]: {
id: key,
tableId: table._id!,
name: action.name,
automationId: action.automationId,
},
}), }),
{} {}
), ),
@ -50,7 +55,9 @@ export async function create(
ctx.body = { ctx.body = {
tableId: table._id!, tableId: table._id!,
...createdAction, id: createdAction.id,
name: createdAction.name,
automationId: createdAction.automationId,
} }
ctx.status = 201 ctx.status = 201
} }
@ -61,13 +68,15 @@ export async function update(
const table = await getTable(ctx) const table = await getTable(ctx)
const { actionId } = ctx.params const { actionId } = ctx.params
const actions = await sdk.rowActions.update(table._id!, actionId, { const action = await sdk.rowActions.update(table._id!, actionId, {
name: ctx.request.body.name, name: ctx.request.body.name,
}) })
ctx.body = { ctx.body = {
tableId: table._id!, tableId: table._id!,
...actions, id: action.id,
name: action.name,
automationId: action.automationId,
} }
} }

View File

@ -1,3 +1,10 @@
export function run() { import { RowActionTriggerRequest, Ctx } from "@budibase/types"
throw new Error("Function not implemented.") import sdk from "../../../sdk"
export async function run(ctx: Ctx<RowActionTriggerRequest, void>) {
const { tableId, actionId } = ctx.params
const { rowId } = ctx.request.body
await sdk.rowActions.run(tableId, actionId, rowId)
ctx.status = 200
} }

View File

@ -14,22 +14,31 @@ import { events, HTTPError } from "@budibase/backend-core"
import { import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
CsvToJsonRequest,
CsvToJsonResponse,
FetchTablesResponse, FetchTablesResponse,
FieldType,
MigrateRequest, MigrateRequest,
MigrateResponse, MigrateResponse,
Row,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
TableResponse, TableResponse,
TableSourceType, TableSourceType,
UserCtx, UserCtx,
ValidateNewTableImportRequest,
ValidateTableImportRequest,
ValidateTableImportResponse,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv" import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash" import { cloneDeep, isEqual } from "lodash"
import { helpers } from "@budibase/shared-core" import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) { function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && isExternalTable(table)) { if (table && isExternalTable(table)) {
@ -144,7 +153,9 @@ export async function bulkImport(
ctx.body = { message: `Bulk rows created.` } ctx.body = { message: `Bulk rows created.` }
} }
export async function csvToJson(ctx: UserCtx) { export async function csvToJson(
ctx: UserCtx<CsvToJsonRequest, CsvToJsonResponse>
) {
const { csvString } = ctx.request.body const { csvString } = ctx.request.body
const result = await jsonFromCsvString(csvString) const result = await jsonFromCsvString(csvString)
@ -153,24 +164,40 @@ export async function csvToJson(ctx: UserCtx) {
ctx.body = result ctx.body = result
} }
export async function validateNewTableImport(ctx: UserCtx) { export async function validateNewTableImport(
const { rows, schema }: { rows: unknown; schema: unknown } = ctx.request.body ctx: UserCtx<ValidateNewTableImportRequest, ValidateTableImportResponse>
) {
const { rows, schema } = ctx.request.body
if (isRows(rows) && isSchema(schema)) { if (isRows(rows) && isSchema(schema)) {
ctx.status = 200 ctx.status = 200
ctx.body = validateSchema(rows, schema) ctx.body = validateSchema(rows, schema, PROTECTED_INTERNAL_COLUMNS)
} else { } else {
ctx.status = 422 ctx.status = 422
} }
} }
export async function validateExistingTableImport(ctx: UserCtx) { export async function validateExistingTableImport(
const { rows, tableId }: { rows: Row[]; tableId?: string } = ctx.request.body ctx: UserCtx<ValidateTableImportRequest, ValidateTableImportResponse>
) {
const { rows, tableId } = ctx.request.body
let schema = null let schema = null
let protectedColumnNames
if (tableId) { if (tableId) {
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
schema = table.schema schema = table.schema
if (!isExternalTable(table)) {
schema._id = {
name: "_id",
type: FieldType.STRING,
}
protectedColumnNames = PROTECTED_INTERNAL_COLUMNS.filter(x => x !== "_id")
} else {
protectedColumnNames = PROTECTED_EXTERNAL_COLUMNS
}
} else { } else {
ctx.status = 422 ctx.status = 422
return return
@ -178,7 +205,7 @@ export async function validateExistingTableImport(ctx: UserCtx) {
if (tableId && isRows(rows) && isSchema(schema)) { if (tableId && isRows(rows) && isSchema(schema)) {
ctx.status = 200 ctx.status = 200
ctx.body = validateSchema(rows, schema) ctx.body = validateSchema(rows, schema, protectedColumnNames)
} else { } else {
ctx.status = 422 ctx.status = 422
} }

View File

@ -3,6 +3,7 @@ import { handleDataImport } from "./utils"
import { import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
FieldType,
RenameColumn, RenameColumn,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
@ -25,6 +26,8 @@ export async function save(
sourceType: rest.sourceType || TableSourceType.INTERNAL, sourceType: rest.sourceType || TableSourceType.INTERNAL,
} }
const isImport = !!rows
if (!tableToSave.views) { if (!tableToSave.views) {
tableToSave.views = {} tableToSave.views = {}
} }
@ -35,6 +38,7 @@ export async function save(
rowsToImport: rows, rowsToImport: rows,
tableId: ctx.request.body._id, tableId: ctx.request.body._id,
renaming, renaming,
isImport,
}) })
return table return table
@ -66,10 +70,22 @@ export async function bulkImport(
) { ) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body const { rows, identifierFields } = ctx.request.body
await handleDataImport(table, { await handleDataImport(
importRows: rows, {
identifierFields, ...table,
user: ctx.user, schema: {
}) _id: {
name: "_id",
type: FieldType.STRING,
},
...table.schema,
},
},
{
importRows: rows,
identifierFields,
user: ctx.user,
}
)
return table return table
} }

View File

@ -122,13 +122,15 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
export async function importToRows( export async function importToRows(
data: Row[], data: Row[],
table: Table, table: Table,
user?: ContextUser user?: ContextUser,
opts?: { keepCouchId: boolean }
) { ) {
let originalTable = table const originalTable = table
let finalData: any = [] const finalData: Row[] = []
const keepCouchId = !!opts?.keepCouchId
for (let i = 0; i < data.length; i++) { for (let i = 0; i < data.length; i++) {
let row = data[i] let row = data[i]
row._id = generateRowID(table._id!) row._id = (keepCouchId && row._id) || generateRowID(table._id!)
row.type = "row" row.type = "row"
row.tableId = table._id row.tableId = table._id
@ -180,7 +182,11 @@ export async function handleDataImport(
const db = context.getAppDB() const db = context.getAppDB()
const data = parse(importRows, table) const data = parse(importRows, table)
let finalData: any = await importToRows(data, table, user) const finalData = await importToRows(data, table, user, {
keepCouchId: identifierFields.includes("_id"),
})
let newRowCount = finalData.length
//Set IDs of finalData to match existing row if an update is expected //Set IDs of finalData to match existing row if an update is expected
if (identifierFields.length > 0) { if (identifierFields.length > 0) {
@ -203,12 +209,14 @@ export async function handleDataImport(
if (match) { if (match) {
finalItem._id = doc._id finalItem._id = doc._id
finalItem._rev = doc._rev finalItem._rev = doc._rev
newRowCount--
} }
}) })
}) })
} }
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), { await quotas.addRows(newRowCount, () => db.bulkDocs(finalData), {
tableId: table._id, tableId: table._id,
}) })

View File

@ -1,4 +1,6 @@
import { Row, TableSchema } from "@budibase/types" import { Row, RowExportFormat, TableSchema } from "@budibase/types"
export { RowExportFormat as Format } from "@budibase/types"
function getHeaders( function getHeaders(
headers: string[], headers: string[],
@ -46,16 +48,6 @@ export function jsonWithSchema(schema: TableSchema, rows: Row[]) {
return JSON.stringify({ schema: newSchema, rows }, undefined, 2) return JSON.stringify({ schema: newSchema, rows }, undefined, 2)
} }
export enum Format { export function isFormat(format: any): format is RowExportFormat {
CSV = "csv", return Object.values(RowExportFormat).includes(format as RowExportFormat)
JSON = "json",
JSON_WITH_SCHEMA = "jsonWithSchema",
}
export function isFormat(format: any): format is Format {
return Object.values(Format).includes(format as Format)
}
export function parseCsvExport<T>(value: string) {
return JSON.parse(value) as T
} }

View File

@ -1,13 +1,12 @@
import Router from "@koa/router" import Router from "@koa/router"
import Joi from "joi"
import { middleware, permissions } from "@budibase/backend-core"
import * as rowActionController from "../controllers/rowAction" import * as rowActionController from "../controllers/rowAction"
import { authorizedResource } from "../../middleware/authorized" import { authorizedResource } from "../../middleware/authorized"
import { middleware, permissions } from "@budibase/backend-core"
import Joi from "joi"
const { PermissionLevel, PermissionType } = permissions const { PermissionLevel, PermissionType } = permissions
export function rowActionValidator() { function rowActionValidator() {
return middleware.joiValidator.body( return middleware.joiValidator.body(
Joi.object({ Joi.object({
name: Joi.string().required(), name: Joi.string().required(),
@ -16,6 +15,15 @@ export function rowActionValidator() {
) )
} }
function rowTriggerValidator() {
return middleware.joiValidator.body(
Joi.object({
rowId: Joi.string().required(),
}),
{ allowUnknown: false }
)
}
const router: Router = new Router() const router: Router = new Router()
// CRUD endpoints // CRUD endpoints
@ -45,7 +53,8 @@ router
// Other endpoints // Other endpoints
.post( .post(
"/api/tables/:tableId/actions/:actionId/run", "/api/tables/:tableId/actions/:actionId/trigger",
rowTriggerValidator(),
authorizedResource(PermissionType.TABLE, PermissionLevel.READ, "tableId"), authorizedResource(PermissionType.TABLE, PermissionLevel.READ, "tableId"),
rowActionController.run rowActionController.run
) )

View File

@ -20,6 +20,7 @@ import { type App } from "@budibase/types"
import tk from "timekeeper" import tk from "timekeeper"
import * as uuid from "uuid" import * as uuid from "uuid"
import { structures } from "@budibase/backend-core/tests" import { structures } from "@budibase/backend-core/tests"
import nock from "nock"
describe("/applications", () => { describe("/applications", () => {
let config = setup.getConfig() let config = setup.getConfig()
@ -35,6 +36,7 @@ describe("/applications", () => {
throw new Error("Failed to publish app") throw new Error("Failed to publish app")
} }
jest.clearAllMocks() jest.clearAllMocks()
nock.cleanAll()
}) })
// These need to go first for the app totals to make sense // These need to go first for the app totals to make sense
@ -324,18 +326,33 @@ describe("/applications", () => {
describe("delete", () => { describe("delete", () => {
it("should delete published app and dev apps with dev app ID", async () => { it("should delete published app and dev apps with dev app ID", async () => {
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await config.api.application.delete(app.appId) await config.api.application.delete(app.appId)
expect(events.app.deleted).toHaveBeenCalledTimes(1) expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toHaveBeenCalledTimes(1) expect(events.app.unpublished).toHaveBeenCalledTimes(1)
}) })
it("should delete published app and dev app with prod app ID", async () => { it("should delete published app and dev app with prod app ID", async () => {
await config.api.application.delete(app.appId.replace("_dev", "")) const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await config.api.application.delete(prodAppId)
expect(events.app.deleted).toHaveBeenCalledTimes(1) expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toHaveBeenCalledTimes(1) expect(events.app.unpublished).toHaveBeenCalledTimes(1)
}) })
it("should be able to delete an app after SQS_SEARCH_ENABLE has been set but app hasn't been migrated", async () => { it("should be able to delete an app after SQS_SEARCH_ENABLE has been set but app hasn't been migrated", async () => {
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, async () => { await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, async () => {
await config.api.application.delete(app.appId) await config.api.application.delete(app.appId)
}) })

View File

@ -14,6 +14,7 @@ import sdk from "../../../sdk"
import { Automation, FieldType, Table } from "@budibase/types" import { Automation, FieldType, Table } from "@budibase/types"
import { mocks } from "@budibase/backend-core/tests" import { mocks } from "@budibase/backend-core/tests"
import { FilterConditions } from "../../../automations/steps/filter" import { FilterConditions } from "../../../automations/steps/filter"
import { removeDeprecated } from "../../../automations/utils"
const MAX_RETRIES = 4 const MAX_RETRIES = 4
let { let {
@ -69,14 +70,15 @@ describe("/automations", () => {
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
let definitionsLength = Object.keys(BUILTIN_ACTION_DEFINITIONS).length let definitionsLength = Object.keys(
definitionsLength-- // OUTGOING_WEBHOOK is deprecated removeDeprecated(BUILTIN_ACTION_DEFINITIONS)
).length
expect(Object.keys(res.body.action).length).toBeGreaterThanOrEqual( expect(Object.keys(res.body.action).length).toBeGreaterThanOrEqual(
definitionsLength definitionsLength
) )
expect(Object.keys(res.body.trigger).length).toEqual( expect(Object.keys(res.body.trigger).length).toEqual(
Object.keys(TRIGGER_DEFINITIONS).length Object.keys(removeDeprecated(TRIGGER_DEFINITIONS)).length
) )
}) })
}) })
@ -398,7 +400,9 @@ describe("/automations", () => {
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body[0]).toEqual(expect.objectContaining(autoConfig)) expect(res.body.automations[0]).toEqual(
expect.objectContaining(autoConfig)
)
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {
@ -423,6 +427,22 @@ describe("/automations", () => {
expect(events.automation.deleted).toHaveBeenCalledTimes(1) expect(events.automation.deleted).toHaveBeenCalledTimes(1)
}) })
it("cannot delete a row action automation", async () => {
const automation = await config.createAutomation(
setup.structures.rowActionAutomation()
)
await request
.delete(`/api/automations/${automation._id}/${automation._rev}`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(422, {
message: "Row actions automations cannot be deleted",
status: 422,
})
expect(events.automation.deleted).not.toHaveBeenCalled()
})
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {
const automation = await config.createAutomation() const automation = await config.createAutomation()
await checkBuilderEndpoint({ await checkBuilderEndpoint({

View File

@ -15,9 +15,11 @@ import {
Table, Table,
TableSchema, TableSchema,
SupportedSqlTypes, SupportedSqlTypes,
JsonFieldSubType,
} from "@budibase/types" } from "@budibase/types"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures" import { tableForDatasource } from "../../../tests/utilities/structures"
import nock from "nock"
describe("/datasources", () => { describe("/datasources", () => {
const config = setup.getConfig() const config = setup.getConfig()
@ -36,6 +38,7 @@ describe("/datasources", () => {
config: {}, config: {},
}) })
jest.clearAllMocks() jest.clearAllMocks()
nock.cleanAll()
}) })
describe("create", () => { describe("create", () => {
@ -70,6 +73,12 @@ describe("/datasources", () => {
describe("dynamic variables", () => { describe("dynamic variables", () => {
it("should invalidate changed or removed variables", async () => { it("should invalidate changed or removed variables", async () => {
nock("http://www.example.com/")
.get("/")
.reply(200, [{ value: "test" }])
.get("/?test=test")
.reply(200, [{ value: 1 }])
let datasource = await config.api.datasource.create({ let datasource = await config.api.datasource.create({
type: "datasource", type: "datasource",
name: "Rest", name: "Rest",
@ -80,7 +89,7 @@ describe("/datasources", () => {
const query = await config.api.query.save({ const query = await config.api.query.save({
datasourceId: datasource._id!, datasourceId: datasource._id!,
fields: { fields: {
path: "www.google.com", path: "www.example.com",
}, },
parameters: [], parameters: [],
transformer: null, transformer: null,
@ -288,7 +297,10 @@ describe("/datasources", () => {
name: "options", name: "options",
type: FieldType.OPTIONS, type: FieldType.OPTIONS,
constraints: { constraints: {
presence: { allowEmpty: false }, presence: {
allowEmpty: false,
},
inclusion: [],
}, },
}, },
[FieldType.NUMBER]: { [FieldType.NUMBER]: {
@ -302,6 +314,10 @@ describe("/datasources", () => {
[FieldType.ARRAY]: { [FieldType.ARRAY]: {
name: "array", name: "array",
type: FieldType.ARRAY, type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: [],
},
}, },
[FieldType.DATETIME]: { [FieldType.DATETIME]: {
name: "datetime", name: "datetime",

View File

@ -15,6 +15,8 @@ jest.mock("@budibase/backend-core", () => {
import { events, objectStore } from "@budibase/backend-core" import { events, objectStore } from "@budibase/backend-core"
import * as setup from "./utilities" import * as setup from "./utilities"
import nock from "nock"
import { PluginSource } from "@budibase/types"
const mockUploadDirectory = objectStore.uploadDirectory as jest.Mock const mockUploadDirectory = objectStore.uploadDirectory as jest.Mock
const mockDeleteFolder = objectStore.deleteFolder as jest.Mock const mockDeleteFolder = objectStore.deleteFolder as jest.Mock
@ -28,6 +30,7 @@ describe("/plugins", () => {
beforeEach(async () => { beforeEach(async () => {
await config.init() await config.init()
jest.clearAllMocks() jest.clearAllMocks()
nock.cleanAll()
}) })
const createPlugin = async (status?: number) => { const createPlugin = async (status?: number) => {
@ -112,67 +115,108 @@ describe("/plugins", () => {
}) })
describe("github", () => { describe("github", () => {
const createGithubPlugin = async (status?: number, url?: string) => { beforeEach(async () => {
return await request nock("https://api.github.com")
.post(`/api/plugin`) .get("/repos/my-repo/budibase-comment-box")
.send({ .reply(200, {
source: "Github", name: "budibase-comment-box",
url, releases_url:
githubToken: "token", "https://api.github.com/repos/my-repo/budibase-comment-box{/id}",
}) })
.set(config.defaultHeaders()) .get("/repos/my-repo/budibase-comment-box/latest")
.expect("Content-Type", /json/) .reply(200, {
.expect(status ? status : 200) assets: [
} {
it("should be able to create a plugin from github", async () => { content_type: "application/gzip",
const res = await createGithubPlugin( browser_download_url:
200, "https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz",
"https://github.com/my-repo/budibase-comment-box.git" },
) ],
expect(res.body).toBeDefined() })
expect(res.body.plugin).toBeDefined()
expect(res.body.plugin._id).toEqual("plg_comment-box") nock("https://github.com")
.get(
"/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz"
)
.replyWithFile(
200,
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
)
}) })
it("should be able to create a plugin from github", async () => {
const { plugin } = await config.api.plugin.create({
source: PluginSource.GITHUB,
url: "https://github.com/my-repo/budibase-comment-box.git",
githubToken: "token",
})
expect(plugin._id).toEqual("plg_comment-box")
})
it("should fail if the url is not from github", async () => { it("should fail if the url is not from github", async () => {
const res = await createGithubPlugin( await config.api.plugin.create(
400, {
"https://notgithub.com/my-repo/budibase-comment-box" source: PluginSource.GITHUB,
) url: "https://notgithub.com/my-repo/budibase-comment-box",
expect(res.body.message).toEqual( githubToken: "token",
"Failed to import plugin: The plugin origin must be from Github" },
{
status: 400,
body: {
message:
"Failed to import plugin: The plugin origin must be from Github",
},
}
) )
}) })
}) })
describe("npm", () => { describe("npm", () => {
it("should be able to create a plugin from npm", async () => { it("should be able to create a plugin from npm", async () => {
const res = await request nock("https://registry.npmjs.org")
.post(`/api/plugin`) .get("/budibase-component")
.send({ .reply(200, {
source: "NPM", name: "budibase-component",
url: "https://www.npmjs.com/package/budibase-component", "dist-tags": {
latest: "1.0.0",
},
versions: {
"1.0.0": {
dist: {
tarball:
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.1.tgz",
},
},
},
}) })
.set(config.defaultHeaders()) .get("/budibase-component/-/budibase-component-1.0.1.tgz")
.expect("Content-Type", /json/) .replyWithFile(
.expect(200) 200,
expect(res.body).toBeDefined() "src/api/routes/tests/data/budibase-component-1.0.1.tgz"
expect(res.body.plugin._id).toEqual("plg_budibase-component") )
const { plugin } = await config.api.plugin.create({
source: PluginSource.NPM,
url: "https://www.npmjs.com/package/budibase-component",
})
expect(plugin._id).toEqual("plg_budibase-component")
expect(events.plugin.imported).toHaveBeenCalled() expect(events.plugin.imported).toHaveBeenCalled()
}) })
}) })
describe("url", () => { describe("url", () => {
it("should be able to create a plugin from a URL", async () => { it("should be able to create a plugin from a URL", async () => {
const res = await request nock("https://www.someurl.com")
.post(`/api/plugin`) .get("/comment-box/comment-box-1.0.2.tar.gz")
.send({ .replyWithFile(
source: "URL", 200,
url: "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz", "src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
}) )
.set(config.defaultHeaders())
.expect("Content-Type", /json/) const { plugin } = await config.api.plugin.create({
.expect(200) source: PluginSource.URL,
expect(res.body).toBeDefined() url: "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz",
expect(res.body.plugin._id).toEqual("plg_comment-box") })
expect(plugin._id).toEqual("plg_comment-box")
expect(events.plugin.imported).toHaveBeenCalledTimes(1) expect(events.plugin.imported).toHaveBeenCalledTimes(1)
}) })
}) })

View File

@ -22,9 +22,13 @@ describe.each(
DatabaseName.MYSQL, DatabaseName.MYSQL,
DatabaseName.SQL_SERVER, DatabaseName.SQL_SERVER,
DatabaseName.MARIADB, DatabaseName.MARIADB,
DatabaseName.ORACLE,
].map(name => [name, getDatasource(name)]) ].map(name => [name, getDatasource(name)])
)("queries (%s)", (dbName, dsProvider) => { )("queries (%s)", (dbName, dsProvider) => {
const config = setup.getConfig() const config = setup.getConfig()
const isOracle = dbName === DatabaseName.ORACLE
const isMsSQL = dbName === DatabaseName.SQL_SERVER
let rawDatasource: Datasource let rawDatasource: Datasource
let datasource: Datasource let datasource: Datasource
let client: Knex let client: Knex
@ -97,7 +101,7 @@ describe.each(
const query = await createQuery({ const query = await createQuery({
name: "New Query", name: "New Query",
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
}) })
@ -106,7 +110,7 @@ describe.each(
name: "New Query", name: "New Query",
parameters: [], parameters: [],
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
schema: {}, schema: {},
queryVerb: "read", queryVerb: "read",
@ -125,7 +129,7 @@ describe.each(
it("should be able to update a query", async () => { it("should be able to update a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
}) })
@ -135,7 +139,7 @@ describe.each(
...query, ...query,
name: "Updated Query", name: "Updated Query",
fields: { fields: {
sql: "SELECT * FROM test_table WHERE id = 1", sql: client("test_table").where({ id: 1 }).toString(),
}, },
}) })
@ -144,7 +148,7 @@ describe.each(
name: "Updated Query", name: "Updated Query",
parameters: [], parameters: [],
fields: { fields: {
sql: "SELECT * FROM test_table WHERE id = 1", sql: client("test_table").where({ id: 1 }).toString(),
}, },
schema: {}, schema: {},
queryVerb: "read", queryVerb: "read",
@ -161,7 +165,7 @@ describe.each(
it("should be able to delete a query", async () => { it("should be able to delete a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
}) })
@ -180,7 +184,7 @@ describe.each(
it("should be able to list queries", async () => { it("should be able to list queries", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
}) })
@ -191,7 +195,7 @@ describe.each(
it("should strip sensitive fields for prod apps", async () => { it("should strip sensitive fields for prod apps", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
}) })
@ -212,7 +216,7 @@ describe.each(
datasourceId: datasource._id!, datasourceId: datasource._id!,
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT * FROM test_table WHERE id = 1`, sql: client("test_table").where({ id: 1 }).toString(),
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -270,7 +274,7 @@ describe.each(
name: "Test Query", name: "Test Query",
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT * FROM ${tableName}`, sql: client(tableName).select("*").toString(),
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -284,11 +288,13 @@ describe.each(
}) })
) )
await client(tableName).delete()
await client.schema.alterTable(tableName, table => { await client.schema.alterTable(tableName, table => {
table.string("data").alter() table.string("data").alter()
}) })
await client(tableName).update({ await client(tableName).insert({
name: "test",
data: "string value", data: "string value",
}) })
@ -297,7 +303,7 @@ describe.each(
name: "Test Query", name: "Test Query",
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT * FROM ${tableName}`, sql: client(tableName).select("*").toString(),
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -311,6 +317,7 @@ describe.each(
}) })
) )
}) })
it("should work with static variables", async () => { it("should work with static variables", async () => {
await config.api.datasource.update({ await config.api.datasource.update({
...datasource, ...datasource,
@ -326,7 +333,7 @@ describe.each(
datasourceId: datasource._id!, datasourceId: datasource._id!,
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT '{{ foo }}' as foo`, sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -337,16 +344,17 @@ describe.each(
const response = await config.api.query.preview(request) const response = await config.api.query.preview(request)
let key = isOracle ? "FOO" : "foo"
expect(response.schema).toEqual({ expect(response.schema).toEqual({
foo: { [key]: {
name: "foo", name: key,
type: "string", type: "string",
}, },
}) })
expect(response.rows).toEqual([ expect(response.rows).toEqual([
{ {
foo: "bar", [key]: "bar",
}, },
]) ])
}) })
@ -354,7 +362,7 @@ describe.each(
it("should work with dynamic variables", async () => { it("should work with dynamic variables", async () => {
const basedOnQuery = await createQuery({ const basedOnQuery = await createQuery({
fields: { fields: {
sql: "SELECT name FROM test_table WHERE id = 1", sql: client("test_table").select("name").where({ id: 1 }).toString(),
}, },
}) })
@ -376,7 +384,7 @@ describe.each(
datasourceId: datasource._id!, datasourceId: datasource._id!,
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT '{{ foo }}' as foo`, sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -385,16 +393,17 @@ describe.each(
readable: true, readable: true,
}) })
let key = isOracle ? "FOO" : "foo"
expect(preview.schema).toEqual({ expect(preview.schema).toEqual({
foo: { [key]: {
name: "foo", name: key,
type: "string", type: "string",
}, },
}) })
expect(preview.rows).toEqual([ expect(preview.rows).toEqual([
{ {
foo: "one", [key]: "one",
}, },
]) ])
}) })
@ -402,7 +411,7 @@ describe.each(
it("should handle the dynamic base query being deleted", async () => { it("should handle the dynamic base query being deleted", async () => {
const basedOnQuery = await createQuery({ const basedOnQuery = await createQuery({
fields: { fields: {
sql: "SELECT name FROM test_table WHERE id = 1", sql: client("test_table").select("name").where({ id: 1 }).toString(),
}, },
}) })
@ -426,7 +435,7 @@ describe.each(
datasourceId: datasource._id!, datasourceId: datasource._id!,
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT '{{ foo }}' as foo`, sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -435,16 +444,17 @@ describe.each(
readable: true, readable: true,
}) })
let key = isOracle ? "FOO" : "foo"
expect(preview.schema).toEqual({ expect(preview.schema).toEqual({
foo: { [key]: {
name: "foo", name: key,
type: "string", type: "string",
}, },
}) })
expect(preview.rows).toEqual([ expect(preview.rows).toEqual([
{ {
foo: datasource.source === SourceName.SQL_SERVER ? "" : null, [key]: datasource.source === SourceName.SQL_SERVER ? "" : null,
}, },
]) ])
}) })
@ -455,7 +465,7 @@ describe.each(
it("should be able to insert with bindings", async () => { it("should be able to insert with bindings", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})", sql: client("test_table").insert({ name: "{{ foo }}" }).toString(),
}, },
parameters: [ parameters: [
{ {
@ -488,7 +498,7 @@ describe.each(
it("should not allow handlebars as parameters", async () => { it("should not allow handlebars as parameters", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})", sql: client("test_table").insert({ name: "{{ foo }}" }).toString(),
}, },
parameters: [ parameters: [
{ {
@ -516,46 +526,55 @@ describe.each(
) )
}) })
it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])( // Oracle doesn't automatically coerce strings into dates.
"should coerce %s into a date", !isOracle &&
async datetimeStr => { it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])(
const date = new Date(datetimeStr) "should coerce %s into a date",
const query = await createQuery({ async datetimeStr => {
fields: { const date = new Date(datetimeStr)
sql: `INSERT INTO test_table (name, birthday) VALUES ('foo', {{ birthday }})`, const query = await createQuery({
}, fields: {
parameters: [ sql: client("test_table")
{ .insert({
name: "birthday", name: "foo",
default: "", birthday: client.raw("{{ birthday }}"),
})
.toString(),
}, },
], parameters: [
queryVerb: "create", {
}) name: "birthday",
default: "",
},
],
queryVerb: "create",
})
const result = await config.api.query.execute(query._id!, { const result = await config.api.query.execute(query._id!, {
parameters: { birthday: datetimeStr }, parameters: { birthday: datetimeStr },
}) })
expect(result.data).toEqual([{ created: true }]) expect(result.data).toEqual([{ created: true }])
const rows = await client("test_table") const rows = await client("test_table")
.where({ birthday: datetimeStr }) .where({ birthday: datetimeStr })
.select() .select()
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
for (const row of rows) { for (const row of rows) {
expect(new Date(row.birthday)).toEqual(date) expect(new Date(row.birthday)).toEqual(date)
}
} }
} )
)
it.each(["2021,02,05", "202205-1500"])( it.each(["2021,02,05", "202205-1500"])(
"should not coerce %s as a date", "should not coerce %s as a date",
async notDateStr => { async notDateStr => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "INSERT INTO test_table (name) VALUES ({{ name }})", sql: client("test_table")
.insert({ name: client.raw("{{ name }}") })
.toString(),
}, },
parameters: [ parameters: [
{ {
@ -586,7 +605,7 @@ describe.each(
it("should execute a query", async () => { it("should execute a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table ORDER BY id", sql: client("test_table").select("*").orderBy("id").toString(),
}, },
}) })
@ -629,7 +648,7 @@ describe.each(
it("should be able to transform a query", async () => { it("should be able to transform a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table WHERE id = 1", sql: client("test_table").where({ id: 1 }).select("*").toString(),
}, },
transformer: ` transformer: `
data[0].id = data[0].id + 1; data[0].id = data[0].id + 1;
@ -652,7 +671,10 @@ describe.each(
it("should coerce numeric bindings", async () => { it("should coerce numeric bindings", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table WHERE id = {{ id }}", sql: client("test_table")
.where({ id: client.raw("{{ id }}") })
.select("*")
.toString(),
}, },
parameters: [ parameters: [
{ {
@ -683,7 +705,10 @@ describe.each(
it("should be able to update rows", async () => { it("should be able to update rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "UPDATE test_table SET name = {{ name }} WHERE id = {{ id }}", sql: client("test_table")
.update({ name: client.raw("{{ name }}") })
.where({ id: client.raw("{{ id }}") })
.toString(),
}, },
parameters: [ parameters: [
{ {
@ -698,19 +723,13 @@ describe.each(
queryVerb: "update", queryVerb: "update",
}) })
const result = await config.api.query.execute(query._id!, { await config.api.query.execute(query._id!, {
parameters: { parameters: {
id: "1", id: "1",
name: "foo", name: "foo",
}, },
}) })
expect(result.data).toEqual([
{
updated: true,
},
])
const rows = await client("test_table").where({ id: 1 }).select() const rows = await client("test_table").where({ id: 1 }).select()
expect(rows).toEqual([ expect(rows).toEqual([
{ id: 1, name: "foo", birthday: null, number: null }, { id: 1, name: "foo", birthday: null, number: null },
@ -720,35 +739,34 @@ describe.each(
it("should be able to execute an update that updates no rows", async () => { it("should be able to execute an update that updates no rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "UPDATE test_table SET name = 'updated' WHERE id = 100", sql: client("test_table")
.update({ name: "updated" })
.where({ id: 100 })
.toString(),
}, },
queryVerb: "update", queryVerb: "update",
}) })
const result = await config.api.query.execute(query._id!) await config.api.query.execute(query._id!)
expect(result.data).toEqual([ const rows = await client("test_table").select()
{ for (const row of rows) {
updated: true, expect(row.name).not.toEqual("updated")
}, }
])
}) })
it("should be able to execute a delete that deletes no rows", async () => { it("should be able to execute a delete that deletes no rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "DELETE FROM test_table WHERE id = 100", sql: client("test_table").where({ id: 100 }).delete().toString(),
}, },
queryVerb: "delete", queryVerb: "delete",
}) })
const result = await config.api.query.execute(query._id!) await config.api.query.execute(query._id!)
expect(result.data).toEqual([ const rows = await client("test_table").select()
{ expect(rows).toHaveLength(5)
deleted: true,
},
])
}) })
}) })
@ -756,7 +774,10 @@ describe.each(
it("should be able to delete rows", async () => { it("should be able to delete rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "DELETE FROM test_table WHERE id = {{ id }}", sql: client("test_table")
.where({ id: client.raw("{{ id }}") })
.delete()
.toString(),
}, },
parameters: [ parameters: [
{ {
@ -767,18 +788,12 @@ describe.each(
queryVerb: "delete", queryVerb: "delete",
}) })
const result = await config.api.query.execute(query._id!, { await config.api.query.execute(query._id!, {
parameters: { parameters: {
id: "1", id: "1",
}, },
}) })
expect(result.data).toEqual([
{
deleted: true,
},
])
const rows = await client("test_table").where({ id: 1 }).select() const rows = await client("test_table").where({ id: 1 }).select()
expect(rows).toHaveLength(0) expect(rows).toHaveLength(0)
}) })
@ -823,72 +838,63 @@ describe.each(
}) })
}) })
it("should be able to execute an update that updates no rows", async () => { // this parameter really only impacts SQL queries
const query = await createQuery({ describe("confirm nullDefaultSupport", () => {
fields: { let queryParams: Partial<Query>
sql: "UPDATE test_table SET name = 'updated' WHERE id = 100", beforeAll(async () => {
}, queryParams = {
queryVerb: "update", fields: {
sql: client("test_table")
.insert({
name: client.raw("{{ bindingName }}"),
number: client.raw("{{ bindingNumber }}"),
})
.toString(),
},
parameters: [
{
name: "bindingName",
default: "",
},
{
name: "bindingNumber",
default: "",
},
],
queryVerb: "create",
}
}) })
const result = await config.api.query.execute(query._id!, {}) it("should error for old queries", async () => {
const query = await createQuery(queryParams)
await config.api.query.save({ ...query, nullDefaultSupport: false })
let error: string | undefined
try {
await config.api.query.execute(query._id!, {
parameters: {
bindingName: "testing",
},
})
} catch (err: any) {
error = err.message
}
if (isMsSQL || isOracle) {
expect(error).toBeUndefined()
} else {
expect(error).toBeDefined()
expect(error).toContain("integer")
}
})
expect(result.data).toEqual([ it("should not error for new queries", async () => {
{ const query = await createQuery(queryParams)
updated: true, const results = await config.api.query.execute(query._id!, {
},
])
})
})
// this parameter really only impacts SQL queries
describe("confirm nullDefaultSupport", () => {
const queryParams = {
fields: {
sql: "INSERT INTO test_table (name, number) VALUES ({{ bindingName }}, {{ bindingNumber }})",
},
parameters: [
{
name: "bindingName",
default: "",
},
{
name: "bindingNumber",
default: "",
},
],
queryVerb: "create",
}
it("should error for old queries", async () => {
const query = await createQuery(queryParams)
await config.api.query.save({ ...query, nullDefaultSupport: false })
let error: string | undefined
try {
await config.api.query.execute(query._id!, {
parameters: { parameters: {
bindingName: "testing", bindingName: "testing",
}, },
}) })
} catch (err: any) { expect(results).toEqual({ data: [{ created: true }] })
error = err.message
}
if (dbName === "mssql") {
expect(error).toBeUndefined()
} else {
expect(error).toBeDefined()
expect(error).toContain("integer")
}
})
it("should not error for new queries", async () => {
const query = await createQuery(queryParams)
const results = await config.api.query.execute(query._id!, {
parameters: {
bindingName: "testing",
},
}) })
expect(results).toEqual({ data: [{ created: true }] })
}) })
}) })
}) })

View File

@ -5,8 +5,6 @@ import { getCachedVariable } from "../../../../threads/utils"
import nock from "nock" import nock from "nock"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
jest.unmock("node-fetch")
describe("rest", () => { describe("rest", () => {
let config: TestConfiguration let config: TestConfiguration
let datasource: Datasource let datasource: Datasource

View File

@ -1,3 +1,5 @@
import * as setup from "./utilities"
import { import {
DatabaseName, DatabaseName,
getDatasource, getDatasource,
@ -7,7 +9,6 @@ import {
import tk from "timekeeper" import tk from "timekeeper"
import emitter from "../../../../src/events" import emitter from "../../../../src/events"
import { outputProcessing } from "../../../utilities/rowProcessor" import { outputProcessing } from "../../../utilities/rowProcessor"
import * as setup from "./utilities"
import { context, InternalTable, tenancy } from "@budibase/backend-core" import { context, InternalTable, tenancy } from "@budibase/backend-core"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { import {
@ -31,6 +32,8 @@ import {
TableSourceType, TableSourceType,
UpdatedRowEventEmitter, UpdatedRowEventEmitter,
TableSchema, TableSchema,
JsonFieldSubType,
RowExportFormat,
} from "@budibase/types" } from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests" import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash" import _, { merge } from "lodash"
@ -69,9 +72,11 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/rows (%s)", (providerType, dsProvider) => { ])("/rows (%s)", (providerType, dsProvider) => {
const isInternal = dsProvider === undefined const isInternal = dsProvider === undefined
const isMSSQL = providerType === DatabaseName.SQL_SERVER const isMSSQL = providerType === DatabaseName.SQL_SERVER
const isOracle = providerType === DatabaseName.ORACLE
const config = setup.getConfig() const config = setup.getConfig()
let table: Table let table: Table
@ -101,7 +106,7 @@ describe.each([
): SaveTableRequest { ): SaveTableRequest {
const defaultSchema: TableSchema = { const defaultSchema: TableSchema = {
id: { id: {
type: FieldType.AUTO, type: FieldType.NUMBER,
name: "id", name: "id",
autocolumn: true, autocolumn: true,
constraints: { constraints: {
@ -126,7 +131,8 @@ describe.each([
primary: ["id"], primary: ["id"],
schema: defaultSchema, schema: defaultSchema,
} }
return merge(req, ...overrides) const merged = merge(req, ...overrides)
return merged
} }
function defaultTable( function defaultTable(
@ -383,7 +389,7 @@ describe.each([
const arrayField: FieldSchema = { const arrayField: FieldSchema = {
type: FieldType.ARRAY, type: FieldType.ARRAY,
constraints: { constraints: {
type: "array", type: JsonFieldSubType.ARRAY,
presence: false, presence: false,
inclusion: ["One", "Two", "Three"], inclusion: ["One", "Two", "Three"],
}, },
@ -1296,9 +1302,117 @@ describe.each([
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
}) })
// Upserting isn't yet supported in MSSQL, see: isInternal &&
it("should be able to update existing rows on bulkImport", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
const rowUsage = await getRowUsage()
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
identifierFields: ["_id"],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1")
expect(rows[0].description).toEqual("Row 1 description")
expect(rows[1].name).toEqual("Row 2")
expect(rows[1].description).toEqual("Row 2 description")
expect(rows[2].name).toEqual("Updated existing row")
expect(rows[2].description).toEqual("Existing description")
await assertRowUsage(rowUsage + 2)
})
isInternal &&
it("should create new rows if not identifierFields are provided", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
const rowUsage = await getRowUsage()
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(4)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Existing row")
expect(rows[0].description).toEqual("Existing description")
expect(rows[1].name).toEqual("Row 1")
expect(rows[1].description).toEqual("Row 1 description")
expect(rows[2].name).toEqual("Row 2")
expect(rows[2].description).toEqual("Row 2 description")
expect(rows[3].name).toEqual("Updated existing row")
expect(rows[3].description).toEqual("Existing description")
await assertRowUsage(rowUsage + 3)
})
// Upserting isn't yet supported in MSSQL / Oracle, see:
// https://github.com/knex/knex/pull/6050 // https://github.com/knex/knex/pull/6050
!isMSSQL && !isMSSQL &&
!isOracle &&
it("should be able to update existing rows with bulkImport", async () => { it("should be able to update existing rows with bulkImport", async () => {
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
@ -1368,9 +1482,10 @@ describe.each([
expect(rows[2].description).toEqual("Row 3 description") expect(rows[2].description).toEqual("Row 3 description")
}) })
// Upserting isn't yet supported in MSSQL, see: // Upserting isn't yet supported in MSSQL or Oracle, see:
// https://github.com/knex/knex/pull/6050 // https://github.com/knex/knex/pull/6050
!isMSSQL && !isMSSQL &&
!isOracle &&
!isInternal && !isInternal &&
it("should be able to update existing rows with composite primary keys with bulkImport", async () => { it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
const tableName = uuid.v4() const tableName = uuid.v4()
@ -1437,9 +1552,10 @@ describe.each([
expect(rows[2].description).toEqual("Row 3 description") expect(rows[2].description).toEqual("Row 3 description")
}) })
// Upserting isn't yet supported in MSSQL, see: // Upserting isn't yet supported in MSSQL/Oracle, see:
// https://github.com/knex/knex/pull/6050 // https://github.com/knex/knex/pull/6050
!isMSSQL && !isMSSQL &&
!isOracle &&
!isInternal && !isInternal &&
it("should be able to update existing rows an autoID primary key", async () => { it("should be able to update existing rows an autoID primary key", async () => {
const tableName = uuid.v4() const tableName = uuid.v4()
@ -1638,23 +1754,38 @@ describe.each([
table = await config.api.table.save(defaultTable()) table = await config.api.table.save(defaultTable())
}) })
it("should allow exporting all columns", async () => { isInternal &&
const existing = await config.api.row.save(table._id!, {}) it("should not export internal couchdb fields", async () => {
const res = await config.api.row.exportRows(table._id!, { const existing = await config.api.row.save(table._id!, {
rows: [existing._id!], name: generator.guid(),
}) description: generator.paragraph(),
const results = JSON.parse(res) })
expect(results.length).toEqual(1) const res = await config.api.row.exportRows(table._id!, {
const row = results[0] rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure all original columns were exported expect(Object.keys(row)).toEqual(["_id", "name", "description"])
expect(Object.keys(row).length).toBeGreaterThanOrEqual( })
Object.keys(existing).length
) !isInternal &&
Object.keys(existing).forEach(key => { it("should allow exporting all columns", async () => {
expect(row[key]).toEqual(existing[key]) const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure all original columns were exported
expect(Object.keys(row).length).toBe(Object.keys(existing).length)
Object.keys(existing).forEach(key => {
expect(row[key]).toEqual(existing[key])
})
}) })
})
it("should allow exporting only certain columns", async () => { it("should allow exporting only certain columns", async () => {
const existing = await config.api.row.save(table._id!, {}) const existing = await config.api.row.save(table._id!, {})
@ -1687,6 +1818,7 @@ describe.each([
await config.api.row.exportRows( await config.api.row.exportRows(
"1234567", "1234567",
{ rows: [existing._id!] }, { rows: [existing._id!] },
RowExportFormat.JSON,
{ status: 404 } { status: 404 }
) )
}) })
@ -1725,6 +1857,202 @@ describe.each([
const results = JSON.parse(res) const results = JSON.parse(res)
expect(results.length).toEqual(3) expect(results.length).toEqual(3)
}) })
describe("should allow exporting all column types", () => {
let tableId: string
let expectedRowData: Row
beforeAll(async () => {
const fullSchema = setup.structures.fullSchemaWithoutLinks({
allRequired: true,
})
const table = await config.api.table.save(
saveTableRequest({
...setup.structures.basicTable(),
schema: fullSchema,
primary: ["string"],
})
)
tableId = table._id!
const rowValues: Record<keyof typeof fullSchema, any> = {
[FieldType.STRING]: generator.guid(),
[FieldType.LONGFORM]: generator.paragraph(),
[FieldType.OPTIONS]: "option 2",
[FieldType.ARRAY]: ["options 2", "options 4"],
[FieldType.NUMBER]: generator.natural(),
[FieldType.BOOLEAN]: generator.bool(),
[FieldType.DATETIME]: generator.date().toISOString(),
[FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()],
[FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(),
[FieldType.FORMULA]: undefined, // generated field
[FieldType.AUTO]: undefined, // generated field
[FieldType.JSON]: { name: generator.guid() },
[FieldType.INTERNAL]: generator.guid(),
[FieldType.BARCODEQR]: generator.guid(),
[FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(),
[FieldType.BIGINT]: generator.integer().toString(),
[FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }],
[FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id },
}
const row = await config.api.row.save(table._id!, rowValues)
expectedRowData = {
_id: row._id,
[FieldType.STRING]: rowValues[FieldType.STRING],
[FieldType.LONGFORM]: rowValues[FieldType.LONGFORM],
[FieldType.OPTIONS]: rowValues[FieldType.OPTIONS],
[FieldType.ARRAY]: rowValues[FieldType.ARRAY],
[FieldType.NUMBER]: rowValues[FieldType.NUMBER],
[FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN],
[FieldType.DATETIME]: rowValues[FieldType.DATETIME],
[FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map(
(a: any) =>
expect.objectContaining({
...a,
url: expect.any(String),
})
),
[FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({
...rowValues[FieldType.ATTACHMENT_SINGLE],
url: expect.any(String),
}),
[FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula,
[FieldType.AUTO]: expect.any(Number),
[FieldType.JSON]: rowValues[FieldType.JSON],
[FieldType.INTERNAL]: rowValues[FieldType.INTERNAL],
[FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR],
[FieldType.SIGNATURE_SINGLE]: expect.objectContaining({
...rowValues[FieldType.SIGNATURE_SINGLE],
url: expect.any(String),
}),
[FieldType.BIGINT]: rowValues[FieldType.BIGINT],
[FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map(
expect.objectContaining
),
[FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining(
rowValues[FieldType.BB_REFERENCE_SINGLE]
),
}
})
it("as csv", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
const jsonResult = await config.api.table.csvToJson({
csvString: exportedValue,
})
const stringified = (value: string) =>
JSON.stringify(value).replace(/"/g, "'")
const matchingObject = (key: string, value: any, isArray: boolean) => {
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
if (isArray) {
return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`))
}
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
}
expect(jsonResult).toEqual([
{
...expectedRowData,
auto: expect.any(String),
array: stringified(expectedRowData["array"]),
attachment: matchingObject(
"key",
expectedRowData["attachment"][0].sample,
true
),
attachment_single: matchingObject(
"key",
expectedRowData["attachment_single"].sample,
false
),
boolean: stringified(expectedRowData["boolean"]),
json: stringified(expectedRowData["json"]),
number: stringified(expectedRowData["number"]),
signature_single: matchingObject(
"key",
expectedRowData["signature_single"].sample,
false
),
bb_reference: matchingObject(
"_id",
expectedRowData["bb_reference"][0].sample,
true
),
bb_reference_single: matchingObject(
"_id",
expectedRowData["bb_reference_single"].sample,
false
),
},
])
})
it("as json", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON
)
const json = JSON.parse(exportedValue)
expect(json).toEqual([expectedRowData])
})
it("as json with schema", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON_WITH_SCHEMA
)
const json = JSON.parse(exportedValue)
expect(json).toEqual({
schema: expect.any(Object),
rows: [expectedRowData],
})
})
it("exported data can be re-imported", async () => {
// export all
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
// import all twice
const rows = await config.api.table.csvToJson({
csvString: exportedValue,
})
await config.api.row.bulkImport(tableId, {
rows,
})
await config.api.row.bulkImport(tableId, {
rows,
})
const { rows: allRows } = await config.api.row.search(tableId)
const expectedRow = {
...expectedRowData,
_id: expect.any(String),
_rev: expect.any(String),
type: "row",
tableId: tableId,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
}
expect(allRows).toEqual([expectedRow, expectedRow, expectedRow])
})
})
}) })
let o2mTable: Table let o2mTable: Table

View File

@ -1,10 +1,17 @@
import _ from "lodash" import _ from "lodash"
import tk from "timekeeper" import tk from "timekeeper"
import { CreateRowActionRequest, RowActionResponse } from "@budibase/types" import {
CreateRowActionRequest,
DocumentType,
RowActionResponse,
} from "@budibase/types"
import * as setup from "./utilities" import * as setup from "./utilities"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
const expectAutomationId = () =>
expect.stringMatching(`^${DocumentType.AUTOMATION}_.+`)
describe("/rowsActions", () => { describe("/rowsActions", () => {
const config = setup.getConfig() const config = setup.getConfig()
@ -79,17 +86,19 @@ describe("/rowsActions", () => {
}) })
expect(res).toEqual({ expect(res).toEqual({
name: rowAction.name,
id: expect.stringMatching(/^row_action_\w+/), id: expect.stringMatching(/^row_action_\w+/),
tableId: tableId, tableId: tableId,
...rowAction, automationId: expectAutomationId(),
}) })
expect(await config.api.rowAction.find(tableId)).toEqual({ expect(await config.api.rowAction.find(tableId)).toEqual({
actions: { actions: {
[res.id]: { [res.id]: {
...rowAction, name: rowAction.name,
id: res.id, id: res.id,
tableId: tableId, tableId: tableId,
automationId: expectAutomationId(),
}, },
}, },
}) })
@ -97,19 +106,13 @@ describe("/rowsActions", () => {
it("trims row action names", async () => { it("trims row action names", async () => {
const name = " action name " const name = " action name "
const res = await createRowAction( const res = await createRowAction(tableId, { name }, { status: 201 })
tableId,
{ name },
{
status: 201,
}
)
expect(res).toEqual({ expect(res).toEqual(
id: expect.stringMatching(/^row_action_\w+/), expect.objectContaining({
tableId: tableId, name: "action name",
name: "action name", })
}) )
expect(await config.api.rowAction.find(tableId)).toEqual({ expect(await config.api.rowAction.find(tableId)).toEqual({
actions: { actions: {
@ -129,9 +132,24 @@ describe("/rowsActions", () => {
expect(await config.api.rowAction.find(tableId)).toEqual({ expect(await config.api.rowAction.find(tableId)).toEqual({
actions: { actions: {
[responses[0].id]: { ...rowActions[0], id: responses[0].id, tableId }, [responses[0].id]: {
[responses[1].id]: { ...rowActions[1], id: responses[1].id, tableId }, name: rowActions[0].name,
[responses[2].id]: { ...rowActions[2], id: responses[2].id, tableId }, id: responses[0].id,
tableId,
automationId: expectAutomationId(),
},
[responses[1].id]: {
name: rowActions[1].name,
id: responses[1].id,
tableId,
automationId: expectAutomationId(),
},
[responses[2].id]: {
name: rowActions[2].name,
id: responses[2].id,
tableId,
automationId: expectAutomationId(),
},
}, },
}) })
}) })
@ -152,7 +170,7 @@ describe("/rowsActions", () => {
it("ignores not valid row action data", async () => { it("ignores not valid row action data", async () => {
const rowAction = createRowActionRequest() const rowAction = createRowActionRequest()
const dirtyRowAction = { const dirtyRowAction = {
...rowAction, name: rowAction.name,
id: generator.guid(), id: generator.guid(),
valueToIgnore: generator.string(), valueToIgnore: generator.string(),
} }
@ -161,17 +179,19 @@ describe("/rowsActions", () => {
}) })
expect(res).toEqual({ expect(res).toEqual({
name: rowAction.name,
id: expect.any(String), id: expect.any(String),
tableId, tableId,
...rowAction, automationId: expectAutomationId(),
}) })
expect(await config.api.rowAction.find(tableId)).toEqual({ expect(await config.api.rowAction.find(tableId)).toEqual({
actions: { actions: {
[res.id]: { [res.id]: {
name: rowAction.name,
id: res.id, id: res.id,
tableId: tableId, tableId: tableId,
...rowAction, automationId: expectAutomationId(),
}, },
}, },
}) })
@ -213,6 +233,17 @@ describe("/rowsActions", () => {
await createRowAction(otherTable._id!, { name: action.name }) await createRowAction(otherTable._id!, { name: action.name })
}) })
it("an automation is created when creating a new row action", async () => {
const action1 = await createRowAction(tableId, createRowActionRequest())
const action2 = await createRowAction(tableId, createRowActionRequest())
for (const automationId of [action1.automationId, action2.automationId]) {
expect(
await config.api.automation.get(automationId, { status: 200 })
).toEqual(expect.objectContaining({ _id: automationId }))
}
})
}) })
describe("find", () => { describe("find", () => {
@ -264,7 +295,6 @@ describe("/rowsActions", () => {
const updatedName = generator.string() const updatedName = generator.string()
const res = await config.api.rowAction.update(tableId, actionId, { const res = await config.api.rowAction.update(tableId, actionId, {
...actionData,
name: updatedName, name: updatedName,
}) })
@ -272,14 +302,17 @@ describe("/rowsActions", () => {
id: actionId, id: actionId,
tableId, tableId,
name: updatedName, name: updatedName,
automationId: actionData.automationId,
}) })
expect(await config.api.rowAction.find(tableId)).toEqual( expect(await config.api.rowAction.find(tableId)).toEqual(
expect.objectContaining({ expect.objectContaining({
actions: expect.objectContaining({ actions: expect.objectContaining({
[actionId]: { [actionId]: {
...actionData,
name: updatedName, name: updatedName,
id: actionData.id,
tableId: actionData.tableId,
automationId: actionData.automationId,
}, },
}), }),
}) })
@ -296,7 +329,6 @@ describe("/rowsActions", () => {
) )
const res = await config.api.rowAction.update(tableId, rowAction.id, { const res = await config.api.rowAction.update(tableId, rowAction.id, {
...rowAction,
name: " action name ", name: " action name ",
}) })
@ -408,5 +440,26 @@ describe("/rowsActions", () => {
status: 400, status: 400,
}) })
}) })
it("deletes the linked automation", async () => {
const actions: RowActionResponse[] = []
for (const rowAction of createRowActionRequests(3)) {
actions.push(await createRowAction(tableId, rowAction))
}
const actionToDelete = _.sample(actions)!
await config.api.rowAction.delete(tableId, actionToDelete.id, {
status: 204,
})
await config.api.automation.get(actionToDelete.automationId, {
status: 404,
})
for (const action of actions.filter(a => a.id !== actionToDelete.id)) {
await config.api.automation.get(action.automationId, {
status: 200,
})
}
})
}) })
}) })

View File

@ -5,12 +5,12 @@ import {
knexClient, knexClient,
} from "../../../integrations/tests/utils" } from "../../../integrations/tests/utils"
import { import {
db as dbCore,
context, context,
db as dbCore,
MAX_VALID_DATE, MAX_VALID_DATE,
MIN_VALID_DATE, MIN_VALID_DATE,
utils,
SQLITE_DESIGN_DOC_ID, SQLITE_DESIGN_DOC_ID,
utils,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import * as setup from "./utilities" import * as setup from "./utilities"
@ -20,6 +20,7 @@ import {
Datasource, Datasource,
EmptyFilterOption, EmptyFilterOption,
FieldType, FieldType,
JsonFieldSubType,
RelationshipType, RelationshipType,
Row, Row,
RowSearchParams, RowSearchParams,
@ -47,11 +48,13 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("search (%s)", (name, dsProvider) => { ])("search (%s)", (name, dsProvider) => {
const isSqs = name === "sqs" const isSqs = name === "sqs"
const isLucene = name === "lucene" const isLucene = name === "lucene"
const isInMemory = name === "in-memory" const isInMemory = name === "in-memory"
const isInternal = isSqs || isLucene || isInMemory const isInternal = isSqs || isLucene || isInMemory
const isSql = !isInMemory && !isLucene
const config = setup.getConfig() const config = setup.getConfig()
let envCleanup: (() => void) | undefined let envCleanup: (() => void) | undefined
@ -191,7 +194,8 @@ describe.each([
// different to the one passed in will cause the assertion to fail. Extra // different to the one passed in will cause the assertion to fail. Extra
// rows returned by the query will also cause the assertion to fail. // rows returned by the query will also cause the assertion to fail.
async toMatchExactly(expectedRows: any[]) { async toMatchExactly(expectedRows: any[]) {
const { rows: foundRows } = await this.performSearch() const response = await this.performSearch()
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect // eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toHaveLength(expectedRows.length) expect(foundRows).toHaveLength(expectedRows.length)
@ -201,13 +205,15 @@ describe.each([
expect.objectContaining(this.popRow(expectedRow, foundRows)) expect.objectContaining(this.popRow(expectedRow, foundRows))
) )
) )
return response
} }
// Asserts that the query returns rows matching exactly the set of rows // Asserts that the query returns rows matching exactly the set of rows
// passed in. The order of the rows is not important, but extra rows will // passed in. The order of the rows is not important, but extra rows will
// cause the assertion to fail. // cause the assertion to fail.
async toContainExactly(expectedRows: any[]) { async toContainExactly(expectedRows: any[]) {
const { rows: foundRows } = await this.performSearch() const response = await this.performSearch()
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect // eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toHaveLength(expectedRows.length) expect(foundRows).toHaveLength(expectedRows.length)
@ -219,6 +225,7 @@ describe.each([
) )
) )
) )
return response
} }
// Asserts that the query returns some property values - this cannot be used // Asserts that the query returns some property values - this cannot be used
@ -235,6 +242,7 @@ describe.each([
expect(response[key]).toEqual(properties[key]) expect(response[key]).toEqual(properties[key])
} }
} }
return response
} }
// Asserts that the query doesn't return a property, e.g. pagination parameters. // Asserts that the query doesn't return a property, e.g. pagination parameters.
@ -244,13 +252,15 @@ describe.each([
// eslint-disable-next-line jest/no-standalone-expect // eslint-disable-next-line jest/no-standalone-expect
expect(response[property]).toBeUndefined() expect(response[property]).toBeUndefined()
} }
return response
} }
// Asserts that the query returns rows matching the set of rows passed in. // Asserts that the query returns rows matching the set of rows passed in.
// The order of the rows is not important. Extra rows will not cause the // The order of the rows is not important. Extra rows will not cause the
// assertion to fail. // assertion to fail.
async toContain(expectedRows: any[]) { async toContain(expectedRows: any[]) {
const { rows: foundRows } = await this.performSearch() const response = await this.performSearch()
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect // eslint-disable-next-line jest/no-standalone-expect
expect([...foundRows]).toEqual( expect([...foundRows]).toEqual(
@ -260,6 +270,7 @@ describe.each([
) )
) )
) )
return response
} }
async toFindNothing() { async toFindNothing() {
@ -1494,7 +1505,10 @@ describe.each([
numbers: { numbers: {
name: "numbers", name: "numbers",
type: FieldType.ARRAY, type: FieldType.ARRAY,
constraints: { inclusion: ["one", "two", "three"] }, constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: ["one", "two", "three"],
},
}, },
}) })
await createRows([{ numbers: ["one", "two"] }, { numbers: ["three"] }]) await createRows([{ numbers: ["one", "two"] }, { numbers: ["three"] }])
@ -1581,7 +1595,10 @@ describe.each([
const MEDIUM = "10000000" const MEDIUM = "10000000"
// Our bigints are int64s in most datasources. // Our bigints are int64s in most datasources.
const BIG = "9223372036854775807" let BIG = "9223372036854775807"
if (name === DatabaseName.ORACLE) {
// BIG = "9223372036854775808"
}
beforeAll(async () => { beforeAll(async () => {
table = await createTable({ table = await createTable({
@ -2560,4 +2577,123 @@ describe.each([
}).toContainExactly([{ name: "foo" }]) }).toContainExactly([{ name: "foo" }])
}) })
}) })
!isInMemory &&
describe("search by _id", () => {
let row: Row
beforeAll(async () => {
const toRelateTable = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
})
table = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
rel: {
name: "rel",
type: FieldType.LINK,
relationshipType: RelationshipType.MANY_TO_MANY,
tableId: toRelateTable._id!,
fieldName: "rel",
},
})
const [row1, row2] = await Promise.all([
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
])
row = await config.api.row.save(table._id!, {
name: "product 1",
rel: [row1._id, row2._id],
})
})
it("can filter by the row ID with limit 1", async () => {
await expectSearch({
query: {
equal: { _id: row._id },
},
limit: 1,
}).toContainExactly([row])
})
})
isSql &&
describe("pagination edge case with relationships", () => {
let mainRows: Row[] = []
beforeAll(async () => {
const toRelateTable = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
})
table = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
rel: {
name: "rel",
type: FieldType.LINK,
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: toRelateTable._id!,
fieldName: "rel",
},
})
const relatedRows = await Promise.all([
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
config.api.row.save(toRelateTable._id!, { name: "tag 3" }),
config.api.row.save(toRelateTable._id!, { name: "tag 4" }),
config.api.row.save(toRelateTable._id!, { name: "tag 5" }),
config.api.row.save(toRelateTable._id!, { name: "tag 6" }),
])
mainRows = await Promise.all([
config.api.row.save(table._id!, {
name: "product 1",
rel: relatedRows.map(row => row._id),
}),
config.api.row.save(table._id!, {
name: "product 2",
rel: [],
}),
config.api.row.save(table._id!, {
name: "product 3",
rel: [],
}),
])
})
it("can still page when the hard limit is hit", async () => {
await config.withCoreEnv(
{
SQL_MAX_ROWS: "6",
},
async () => {
const params: Omit<RowSearchParams, "tableId"> = {
query: {},
paginate: true,
limit: 3,
sort: "name",
sortType: SortType.STRING,
sortOrder: SortOrder.ASCENDING,
}
const page1 = await expectSearch(params).toContain([mainRows[0]])
expect(page1.hasNextPage).toBe(true)
expect(page1.bookmark).toBeDefined()
const page2 = await expectSearch({
...params,
bookmark: page1.bookmark,
}).toContain([mainRows[1], mainRows[2]])
expect(page2.hasNextPage).toBe(false)
}
)
})
})
}) })

View File

@ -1,4 +1,8 @@
import { context, events } from "@budibase/backend-core" import { context, docIds, events } from "@budibase/backend-core"
import {
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { import {
AutoFieldSubType, AutoFieldSubType,
BBReferenceFieldSubType, BBReferenceFieldSubType,
@ -10,10 +14,13 @@ import {
Row, Row,
SaveTableRequest, SaveTableRequest,
Table, Table,
TableSchema,
TableSourceType, TableSourceType,
User, User,
ValidateTableImportResponse,
ViewCalculation, ViewCalculation,
ViewV2Enriched, ViewV2Enriched,
RowExportFormat,
} from "@budibase/types" } from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions" import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities" import * as setup from "./utilities"
@ -33,7 +40,8 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/tables (%s)", (_, dsProvider) => { [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/tables (%s)", (name, dsProvider) => {
const isInternal: boolean = !dsProvider const isInternal: boolean = !dsProvider
let datasource: Datasource | undefined let datasource: Datasource | undefined
let config = setup.getConfig() let config = setup.getConfig()
@ -52,15 +60,20 @@ describe.each([
jest.clearAllMocks() jest.clearAllMocks()
}) })
it.each([ let names = [
"alphanum", "alphanum",
"with spaces", "with spaces",
"with-dashes", "with-dashes",
"with_underscores", "with_underscores",
'with "double quotes"',
"with 'single quotes'",
"with `backticks`", "with `backticks`",
])("creates a table with name: %s", async name => { ]
if (name !== DatabaseName.ORACLE) {
names.push(`with "double quotes"`)
names.push(`with 'single quotes'`)
}
it.each(names)("creates a table with name: %s", async name => {
const table = await config.api.table.save( const table = await config.api.table.save(
tableForDatasource(datasource, { name }) tableForDatasource(datasource, { name })
) )
@ -118,6 +131,64 @@ describe.each([
body: basicTable(), body: basicTable(),
}) })
}) })
it("does not persist the row fields that are not on the table schema", async () => {
const table: SaveTableRequest = basicTable()
table.rows = [
{
name: "test-name",
description: "test-desc",
nonValid: "test-non-valid",
},
]
const res = await config.api.table.save(table)
const persistedRows = await config.api.row.search(res._id!)
expect(persistedRows.rows).toEqual([
expect.objectContaining({
name: "test-name",
description: "test-desc",
}),
])
expect(persistedRows.rows[0].nonValid).toBeUndefined()
})
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)(
"cannot use protected column names (%s) while importing a table",
async columnName => {
const table: SaveTableRequest = basicTable()
table.rows = [
{
name: "test-name",
description: "test-desc",
},
]
await config.api.table.save(
{
...table,
schema: {
...table.schema,
[columnName]: {
name: columnName,
type: FieldType.STRING,
},
},
},
{
status: 400,
body: {
message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`,
status: 400,
},
}
)
}
)
}) })
describe("update", () => { describe("update", () => {
@ -398,6 +469,7 @@ describe.each([
name: "auto", name: "auto",
autocolumn: true, autocolumn: true,
type: FieldType.AUTO, type: FieldType.AUTO,
subtype: AutoFieldSubType.AUTO_ID,
}, },
}, },
}, },
@ -1021,4 +1093,371 @@ describe.each([
}) })
}) })
}) })
describe.each([
[RowExportFormat.CSV, (val: any) => JSON.stringify(val).replace(/"/g, "'")],
[RowExportFormat.JSON, (val: any) => val],
])("import validation (%s)", (_, userParser) => {
const basicSchema: TableSchema = {
id: {
type: FieldType.NUMBER,
name: "id",
},
name: {
type: FieldType.STRING,
name: "name",
},
}
const importCases: [
string,
(rows: Row[], schema: TableSchema) => Promise<ValidateTableImportResponse>
][] = [
[
"validateNewTableImport",
async (rows: Row[], schema: TableSchema) => {
const result = await config.api.table.validateNewTableImport({
rows,
schema,
})
return result
},
],
[
"validateExistingTableImport",
async (rows: Row[], schema: TableSchema) => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows,
})
return result
},
],
]
describe.each(importCases)("%s", (_, testDelegate) => {
it("validates basic imports", async () => {
const result = await testDelegate(
[{ id: generator.natural(), name: generator.first() }],
basicSchema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in schema (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport({
rows: [
{
id: generator.natural(),
name: generator.first(),
[columnName]: generator.word(),
},
],
schema: {
...basicSchema,
},
})
expect(result).toEqual({
allValid: false,
errors: {
[columnName]: `${columnName} is a protected column name`,
},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
[columnName]: false,
},
})
})
it("does not allow imports without rows", async () => {
const result = await testDelegate([], basicSchema)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {},
})
})
it("validates imports with some empty rows", async () => {
const result = await testDelegate(
[{}, { id: generator.natural(), name: generator.first() }, {}],
basicSchema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
isInternal &&
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in the rows (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport({
rows: [
{
id: generator.natural(),
name: generator.first(),
},
],
schema: {
...basicSchema,
[columnName]: {
name: columnName,
type: FieldType.STRING,
},
},
})
expect(result).toEqual({
allValid: false,
errors: {
[columnName]: `${columnName} is a protected column name`,
},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
[columnName]: false,
},
})
})
it("validates required fields and valid rows", async () => {
const schema: TableSchema = {
...basicSchema,
name: {
type: FieldType.STRING,
name: "name",
constraints: { presence: true },
},
}
const result = await testDelegate(
[
{ id: generator.natural(), name: generator.first() },
{ id: generator.natural(), name: generator.first() },
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
it("validates required fields and non-valid rows", async () => {
const schema: TableSchema = {
...basicSchema,
name: {
type: FieldType.STRING,
name: "name",
constraints: { presence: true },
},
}
const result = await testDelegate(
[
{ id: generator.natural(), name: generator.first() },
{ id: generator.natural(), name: "" },
],
schema
)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: false,
},
})
})
describe("bb references", () => {
const getUserValues = () => ({
_id: docIds.generateGlobalUserID(),
primaryDisplay: generator.first(),
email: generator.email({}),
})
it("can validate user column imports", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser(getUserValues()),
},
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: true,
},
})
})
it("can validate user column imports with invalid data", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser(getUserValues()),
},
{
id: generator.natural(),
name: generator.first(),
user: "no valid user data",
},
],
schema
)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: false,
},
})
})
it("can validate users column imports", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
externalType: "array",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser([
getUserValues(),
getUserValues(),
getUserValues(),
]),
},
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: true,
},
})
})
})
})
describe("validateExistingTableImport", () => {
isInternal &&
it("can reimport _id fields for internal tables", async () => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: basicSchema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows: [
{
_id: docIds.generateRowID(table._id!),
id: generator.natural(),
name: generator.first(),
},
],
})
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
_id: true,
id: true,
name: true,
},
})
})
})
})
}) })

View File

@ -54,7 +54,7 @@ export const clearAllApps = async (
} }
export const clearAllAutomations = async (config: TestConfiguration) => { export const clearAllAutomations = async (config: TestConfiguration) => {
const automations = await config.getAllAutomations() const { automations } = await config.getAllAutomations()
for (let auto of automations) { for (let auto of automations) {
await context.doInAppContext(config.getAppId(), async () => { await context.doInAppContext(config.getAppId(), async () => {
await config.deleteAutomation(auto) await config.deleteAutomation(auto)

View File

@ -33,6 +33,7 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/v2/views (%s)", (name, dsProvider) => { ])("/v2/views (%s)", (name, dsProvider) => {
const config = setup.getConfig() const config = setup.getConfig()
const isSqs = name === "sqs" const isSqs = name === "sqs"
@ -56,7 +57,7 @@ describe.each([
primary: ["id"], primary: ["id"],
schema: { schema: {
id: { id: {
type: FieldType.AUTO, type: FieldType.NUMBER,
name: "id", name: "id",
autocolumn: true, autocolumn: true,
constraints: { constraints: {
@ -241,7 +242,7 @@ describe.each([
schema: { schema: {
id: { id: {
name: "id", name: "id",
type: FieldType.AUTO, type: FieldType.NUMBER,
autocolumn: true, autocolumn: true,
visible: true, visible: true,
}, },
@ -1555,7 +1556,7 @@ describe.each([
schema: { schema: {
id: { id: {
name: "id", name: "id",
type: FieldType.AUTO, type: FieldType.NUMBER,
autocolumn: true, autocolumn: true,
}, },
name: { name: {

View File

@ -20,17 +20,21 @@ import * as triggerAutomationRun from "./steps/triggerAutomationRun"
import env from "../environment" import env from "../environment"
import { import {
AutomationStepSchema, AutomationStepSchema,
AutomationStepInput,
PluginType, PluginType,
AutomationStep, AutomationStep,
AutomationActionStepId,
ActionImplementations,
Hosting,
ActionImplementation,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../sdk" import sdk from "../sdk"
import { getAutomationPlugin } from "../utilities/fileSystem" import { getAutomationPlugin } from "../utilities/fileSystem"
const ACTION_IMPLS: Record< type ActionImplType = ActionImplementations<
string, typeof env.SELF_HOSTED extends "true" ? Hosting.SELF : Hosting.CLOUD
(opts: AutomationStepInput) => Promise<any> >
> = {
const ACTION_IMPLS: ActionImplType = {
SEND_EMAIL_SMTP: sendSmtpEmail.run, SEND_EMAIL_SMTP: sendSmtpEmail.run,
CREATE_ROW: createRow.run, CREATE_ROW: createRow.run,
UPDATE_ROW: updateRow.run, UPDATE_ROW: updateRow.run,
@ -51,6 +55,7 @@ const ACTION_IMPLS: Record<
integromat: make.run, integromat: make.run,
n8n: n8n.run, n8n: n8n.run,
} }
export const BUILTIN_ACTION_DEFINITIONS: Record<string, AutomationStepSchema> = export const BUILTIN_ACTION_DEFINITIONS: Record<string, AutomationStepSchema> =
{ {
SEND_EMAIL_SMTP: sendSmtpEmail.definition, SEND_EMAIL_SMTP: sendSmtpEmail.definition,
@ -86,7 +91,7 @@ if (env.SELF_HOSTED) {
ACTION_IMPLS["EXECUTE_BASH"] = bash.run ACTION_IMPLS["EXECUTE_BASH"] = bash.run
// @ts-ignore // @ts-ignore
BUILTIN_ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition BUILTIN_ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition
// @ts-ignore
ACTION_IMPLS.OPENAI = openai.run ACTION_IMPLS.OPENAI = openai.run
BUILTIN_ACTION_DEFINITIONS.OPENAI = openai.definition BUILTIN_ACTION_DEFINITIONS.OPENAI = openai.definition
} }
@ -107,10 +112,13 @@ export async function getActionDefinitions() {
} }
/* istanbul ignore next */ /* istanbul ignore next */
export async function getAction(stepId: string) { export async function getAction(
if (ACTION_IMPLS[stepId] != null) { stepId: AutomationActionStepId
return ACTION_IMPLS[stepId] ): Promise<ActionImplementation<any, any> | undefined> {
if (ACTION_IMPLS[stepId as keyof ActionImplType] != null) {
return ACTION_IMPLS[stepId as keyof ActionImplType]
} }
// must be a plugin // must be a plugin
if (env.SELF_HOSTED) { if (env.SELF_HOSTED) {
const plugins = await sdk.plugins.fetch(PluginType.AUTOMATION) const plugins = await sdk.plugins.fetch(PluginType.AUTOMATION)

View File

@ -4,8 +4,13 @@ import {
encodeJSBinding, encodeJSBinding,
} from "@budibase/string-templates" } from "@budibase/string-templates"
import sdk from "../sdk" import sdk from "../sdk"
import { AutomationAttachment, FieldType, Row } from "@budibase/types" import {
import { LoopInput, LoopStepType } from "../definitions/automations" AutomationAttachment,
FieldType,
Row,
LoopStepType,
} from "@budibase/types"
import { LoopInput } from "../definitions/automations"
import { objectStore, context } from "@budibase/backend-core" import { objectStore, context } from "@budibase/backend-core"
import * as uuid from "uuid" import * as uuid from "uuid"
import path from "path" import path from "path"

View File

@ -7,9 +7,10 @@ import {
AutomationCustomIOType, AutomationCustomIOType,
AutomationFeature, AutomationFeature,
AutomationIOType, AutomationIOType,
AutomationStepInput,
AutomationStepSchema, AutomationStepSchema,
AutomationStepType, AutomationStepType,
BashStepInputs,
BashStepOutputs,
} from "@budibase/types" } from "@budibase/types"
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
@ -51,7 +52,13 @@ export const definition: AutomationStepSchema = {
}, },
} }
export async function run({ inputs, context }: AutomationStepInput) { export async function run({
inputs,
context,
}: {
inputs: BashStepInputs
context: object
}): Promise<BashStepOutputs> {
if (inputs.code == null) { if (inputs.code == null) {
return { return {
stdout: "Budibase bash automation failed: Invalid inputs", stdout: "Budibase bash automation failed: Invalid inputs",

View File

@ -1,9 +1,10 @@
import { import {
AutomationActionStepId, AutomationActionStepId,
AutomationStepSchema, AutomationStepSchema,
AutomationStepInput,
AutomationStepType, AutomationStepType,
AutomationIOType, AutomationIOType,
CollectStepInputs,
CollectStepOutputs,
} from "@budibase/types" } from "@budibase/types"
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
@ -43,7 +44,11 @@ export const definition: AutomationStepSchema = {
}, },
} }
export async function run({ inputs }: AutomationStepInput) { export async function run({
inputs,
}: {
inputs: CollectStepInputs
}): Promise<CollectStepOutputs> {
if (!inputs.collection) { if (!inputs.collection) {
return { return {
success: false, success: false,

View File

@ -10,10 +10,12 @@ import {
AutomationCustomIOType, AutomationCustomIOType,
AutomationFeature, AutomationFeature,
AutomationIOType, AutomationIOType,
AutomationStepInput,
AutomationStepSchema, AutomationStepSchema,
AutomationStepType, AutomationStepType,
CreateRowStepInputs,
CreateRowStepOutputs,
} from "@budibase/types" } from "@budibase/types"
import { EventEmitter } from "events"
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
name: "Create Row", name: "Create Row",
@ -74,7 +76,15 @@ export const definition: AutomationStepSchema = {
}, },
} }
export async function run({ inputs, appId, emitter }: AutomationStepInput) { export async function run({
inputs,
appId,
emitter,
}: {
inputs: CreateRowStepInputs
appId: string
emitter: EventEmitter
}): Promise<CreateRowStepOutputs> {
if (inputs.row == null || inputs.row.tableId == null) { if (inputs.row == null || inputs.row.tableId == null) {
return { return {
success: false, success: false,
@ -93,7 +103,7 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
try { try {
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row) inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
inputs.row = await sendAutomationAttachmentsToStorage( inputs.row = await sendAutomationAttachmentsToStorage(
inputs.row.tableId, inputs.row.tableId!,
inputs.row inputs.row
) )
await save(ctx) await save(ctx)

View File

@ -2,9 +2,10 @@ import { wait } from "../../utilities"
import { import {
AutomationActionStepId, AutomationActionStepId,
AutomationIOType, AutomationIOType,
AutomationStepInput,
AutomationStepSchema, AutomationStepSchema,
AutomationStepType, AutomationStepType,
DelayStepInputs,
DelayStepOutputs,
} from "@budibase/types" } from "@budibase/types"
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
@ -39,7 +40,11 @@ export const definition: AutomationStepSchema = {
type: AutomationStepType.LOGIC, type: AutomationStepType.LOGIC,
} }
export async function run({ inputs }: AutomationStepInput) { export async function run({
inputs,
}: {
inputs: DelayStepInputs
}): Promise<DelayStepOutputs> {
await wait(inputs.time) await wait(inputs.time)
return { return {
success: true, success: true,

View File

@ -1,14 +1,16 @@
import { EventEmitter } from "events"
import { destroy } from "../../api/controllers/row" import { destroy } from "../../api/controllers/row"
import { buildCtx } from "./utils" import { buildCtx } from "./utils"
import { getError } from "../automationUtils" import { getError } from "../automationUtils"
import { import {
AutomationActionStepId, AutomationActionStepId,
AutomationStepInput,
AutomationStepSchema, AutomationStepSchema,
AutomationStepType, AutomationStepType,
AutomationIOType, AutomationIOType,
AutomationCustomIOType, AutomationCustomIOType,
AutomationFeature, AutomationFeature,
DeleteRowStepInputs,
DeleteRowStepOutputs,
} from "@budibase/types" } from "@budibase/types"
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
@ -59,7 +61,15 @@ export const definition: AutomationStepSchema = {
}, },
} }
export async function run({ inputs, appId, emitter }: AutomationStepInput) { export async function run({
inputs,
appId,
emitter,
}: {
inputs: DeleteRowStepInputs
appId: string
emitter: EventEmitter
}): Promise<DeleteRowStepOutputs> {
if (inputs.id == null) { if (inputs.id == null) {
return { return {
success: false, success: false,

View File

@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils"
import { import {
AutomationActionStepId, AutomationActionStepId,
AutomationStepSchema, AutomationStepSchema,
AutomationStepInput,
AutomationStepType, AutomationStepType,
AutomationIOType, AutomationIOType,
AutomationFeature, AutomationFeature,
ExternalAppStepOutputs,
DiscordStepInputs,
} from "@budibase/types" } from "@budibase/types"
const DEFAULT_USERNAME = "Budibase Automate" const DEFAULT_USERNAME = "Budibase Automate"
@ -65,7 +66,11 @@ export const definition: AutomationStepSchema = {
}, },
} }
export async function run({ inputs }: AutomationStepInput) { export async function run({
inputs,
}: {
inputs: DiscordStepInputs
}): Promise<ExternalAppStepOutputs> {
let { url, username, avatar_url, content } = inputs let { url, username, avatar_url, content } = inputs
if (!username) { if (!username) {
username = DEFAULT_USERNAME username = DEFAULT_USERNAME

View File

@ -1,3 +1,4 @@
import { EventEmitter } from "events"
import * as queryController from "../../api/controllers/query" import * as queryController from "../../api/controllers/query"
import { buildCtx } from "./utils" import { buildCtx } from "./utils"
import * as automationUtils from "../automationUtils" import * as automationUtils from "../automationUtils"
@ -6,9 +7,10 @@ import {
AutomationCustomIOType, AutomationCustomIOType,
AutomationFeature, AutomationFeature,
AutomationIOType, AutomationIOType,
AutomationStepInput,
AutomationStepSchema, AutomationStepSchema,
AutomationStepType, AutomationStepType,
ExecuteQueryStepInputs,
ExecuteQueryStepOutputs,
} from "@budibase/types" } from "@budibase/types"
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
@ -62,7 +64,15 @@ export const definition: AutomationStepSchema = {
}, },
} }
export async function run({ inputs, appId, emitter }: AutomationStepInput) { export async function run({
inputs,
appId,
emitter,
}: {
inputs: ExecuteQueryStepInputs
appId: string
emitter: EventEmitter
}): Promise<ExecuteQueryStepOutputs> {
if (inputs.query == null) { if (inputs.query == null) {
return { return {
success: false, success: false,

View File

@ -6,10 +6,12 @@ import {
AutomationCustomIOType, AutomationCustomIOType,
AutomationFeature, AutomationFeature,
AutomationIOType, AutomationIOType,
AutomationStepInput,
AutomationStepSchema, AutomationStepSchema,
AutomationStepType, AutomationStepType,
ExecuteScriptStepInputs,
ExecuteScriptStepOutputs,
} from "@budibase/types" } from "@budibase/types"
import { EventEmitter } from "events"
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
name: "JS Scripting", name: "JS Scripting",
@ -55,7 +57,12 @@ export async function run({
appId, appId,
context, context,
emitter, emitter,
}: AutomationStepInput) { }: {
inputs: ExecuteScriptStepInputs
appId: string
context: object
emitter: EventEmitter
}): Promise<ExecuteScriptStepOutputs> {
if (inputs.code == null) { if (inputs.code == null) {
return { return {
success: false, success: false,

View File

@ -1,9 +1,10 @@
import { import {
AutomationActionStepId, AutomationActionStepId,
AutomationStepSchema, AutomationStepSchema,
AutomationStepInput,
AutomationStepType, AutomationStepType,
AutomationIOType, AutomationIOType,
FilterStepInputs,
FilterStepOutputs,
} from "@budibase/types" } from "@budibase/types"
export const FilterConditions = { export const FilterConditions = {
@ -69,7 +70,11 @@ export const definition: AutomationStepSchema = {
}, },
} }
export async function run({ inputs }: AutomationStepInput) { export async function run({
inputs,
}: {
inputs: FilterStepInputs
}): Promise<FilterStepOutputs> {
try { try {
let { field, condition, value } = inputs let { field, condition, value } = inputs
// coerce types so that we can use them // coerce types so that we can use them

View File

@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils"
import { import {
AutomationActionStepId, AutomationActionStepId,
AutomationStepSchema, AutomationStepSchema,
AutomationStepInput,
AutomationStepType, AutomationStepType,
AutomationIOType, AutomationIOType,
AutomationFeature, AutomationFeature,
ExternalAppStepOutputs,
MakeIntegrationInputs,
} from "@budibase/types" } from "@budibase/types"
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
@ -57,7 +58,11 @@ export const definition: AutomationStepSchema = {
}, },
} }
export async function run({ inputs }: AutomationStepInput) { export async function run({
inputs,
}: {
inputs: MakeIntegrationInputs
}): Promise<ExternalAppStepOutputs> {
const { url, body } = inputs const { url, body } = inputs
let payload = {} let payload = {}

View File

@ -3,11 +3,12 @@ import { getFetchResponse } from "./utils"
import { import {
AutomationActionStepId, AutomationActionStepId,
AutomationStepSchema, AutomationStepSchema,
AutomationStepInput,
AutomationStepType, AutomationStepType,
AutomationIOType, AutomationIOType,
AutomationFeature, AutomationFeature,
HttpMethod, HttpMethod,
ExternalAppStepOutputs,
n8nStepInputs,
} from "@budibase/types" } from "@budibase/types"
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
@ -67,7 +68,11 @@ export const definition: AutomationStepSchema = {
}, },
} }
export async function run({ inputs }: AutomationStepInput) { export async function run({
inputs,
}: {
inputs: n8nStepInputs
}): Promise<ExternalAppStepOutputs> {
const { url, body, method, authorization } = inputs const { url, body, method, authorization } = inputs
let payload = {} let payload = {}

View File

@ -3,9 +3,10 @@ import { OpenAI } from "openai"
import { import {
AutomationActionStepId, AutomationActionStepId,
AutomationStepSchema, AutomationStepSchema,
AutomationStepInput,
AutomationStepType, AutomationStepType,
AutomationIOType, AutomationIOType,
OpenAIStepInputs,
OpenAIStepOutputs,
} from "@budibase/types" } from "@budibase/types"
import { env } from "@budibase/backend-core" import { env } from "@budibase/backend-core"
import * as automationUtils from "../automationUtils" import * as automationUtils from "../automationUtils"
@ -59,7 +60,11 @@ export const definition: AutomationStepSchema = {
}, },
} }
export async function run({ inputs }: AutomationStepInput) { export async function run({
inputs,
}: {
inputs: OpenAIStepInputs
}): Promise<OpenAIStepOutputs> {
if (!env.OPENAI_API_KEY) { if (!env.OPENAI_API_KEY) {
return { return {
success: false, success: false,

Some files were not shown because too many files have changed in this diff Show More