Merge branch 'master' of github.com:Budibase/budibase into labday/sqs

This commit is contained in:
mike12345567 2024-03-06 17:27:53 +00:00
commit 2eb16a70db
142 changed files with 3412 additions and 1597 deletions

View File

@ -1,5 +1,5 @@
{ {
"version": "2.21.0", "version": "2.21.3",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

@ -1 +1 @@
Subproject commit ab324e35d855012bd0f49caa53c6dd765223c6fa Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac

View File

@ -67,7 +67,7 @@
"@types/lodash": "4.14.200", "@types/lodash": "4.14.200",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.0", "@types/pouchdb": "6.4.0",
"@types/redlock": "4.0.3", "@types/redlock": "4.0.7",
"@types/semver": "7.3.7", "@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1", "@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4", "@types/uuid": "8.3.4",
@ -78,6 +78,7 @@
"jest-serial-runner": "1.2.1", "jest-serial-runner": "1.2.1",
"pino-pretty": "10.0.0", "pino-pretty": "10.0.0",
"pouchdb-adapter-memory": "7.2.2", "pouchdb-adapter-memory": "7.2.2",
"testcontainers": "^10.7.2",
"timekeeper": "2.2.0", "timekeeper": "2.2.0",
"typescript": "5.2.2" "typescript": "5.2.2"
}, },

View File

@ -6,7 +6,7 @@ import env from "../environment"
import * as accounts from "../accounts" import * as accounts from "../accounts"
import { UserDB } from "../users" import { UserDB } from "../users"
import { sdk } from "@budibase/shared-core" import { sdk } from "@budibase/shared-core"
import { User } from "@budibase/types" import { User, UserMetadata } from "@budibase/types"
const EXPIRY_SECONDS = 3600 const EXPIRY_SECONDS = 3600
@ -15,7 +15,7 @@ const EXPIRY_SECONDS = 3600
*/ */
async function populateFromDB(userId: string, tenantId: string) { async function populateFromDB(userId: string, tenantId: string) {
const db = tenancy.getTenantDB(tenantId) const db = tenancy.getTenantDB(tenantId)
const user = await db.get<any>(userId) const user = await db.get<UserMetadata>(userId)
user.budibaseAccess = true user.budibaseAccess = true
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
const account = await accounts.getAccount(user.email) const account = await accounts.getAccount(user.email)

View File

@ -1,66 +1,57 @@
import PouchDB from "pouchdb"
import { getPouchDB, closePouchDB } from "./couch" import { getPouchDB, closePouchDB } from "./couch"
import { DocumentType } from "../constants" import { DocumentType } from "../constants"
class Replication { class Replication {
source: any source: PouchDB.Database
target: any target: PouchDB.Database
replication: any
/** constructor({ source, target }: { source: string; target: string }) {
*
* @param source - the DB you want to replicate or rollback to
* @param target - the DB you want to replicate to, or rollback from
*/
constructor({ source, target }: any) {
this.source = getPouchDB(source) this.source = getPouchDB(source)
this.target = getPouchDB(target) this.target = getPouchDB(target)
} }
close() { async close() {
return Promise.all([closePouchDB(this.source), closePouchDB(this.target)]) await Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
} }
promisify(operation: any, opts = {}) { replicate(opts: PouchDB.Replication.ReplicateOptions = {}) {
return new Promise(resolve => { return new Promise<PouchDB.Replication.ReplicationResult<{}>>(resolve => {
operation(this.target, opts) this.source.replicate
.on("denied", function (err: any) { .to(this.target, opts)
.on("denied", function (err) {
// a document failed to replicate (e.g. due to permissions) // a document failed to replicate (e.g. due to permissions)
throw new Error(`Denied: Document failed to replicate ${err}`) throw new Error(`Denied: Document failed to replicate ${err}`)
}) })
.on("complete", function (info: any) { .on("complete", function (info) {
return resolve(info) return resolve(info)
}) })
.on("error", function (err: any) { .on("error", function (err) {
throw new Error(`Replication Error: ${err}`) throw new Error(`Replication Error: ${err}`)
}) })
}) })
} }
/** appReplicateOpts(
* Two way replication operation, intended to be promise based. opts: PouchDB.Replication.ReplicateOptions = {}
* @param opts - PouchDB replication options ): PouchDB.Replication.ReplicateOptions {
*/ if (typeof opts.filter === "string") {
sync(opts = {}) { return opts
this.replication = this.promisify(this.source.sync, opts) }
return this.replication
}
/** const filter = opts.filter
* One way replication operation, intended to be promise based. delete opts.filter
* @param opts - PouchDB replication options
*/
replicate(opts = {}) {
this.replication = this.promisify(this.source.replicate.to, opts)
return this.replication
}
appReplicateOpts() {
return { return {
filter: (doc: any) => { ...opts,
filter: (doc: any, params: any) => {
if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) { if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) {
return false return false
} }
return doc._id !== DocumentType.APP_METADATA if (doc._id === DocumentType.APP_METADATA) {
return false
}
return filter ? filter(doc, params) : true
}, },
} }
} }
@ -75,10 +66,6 @@ class Replication {
// take the opportunity to remove deleted tombstones // take the opportunity to remove deleted tombstones
await this.replicate() await this.replicate()
} }
cancel() {
this.replication.cancel()
}
} }
export default Replication export default Replication

View File

@ -1,5 +1,6 @@
import { APIError } from "@budibase/types" import { APIError } from "@budibase/types"
import * as errors from "../errors" import * as errors from "../errors"
import environment from "../environment"
export async function errorHandling(ctx: any, next: any) { export async function errorHandling(ctx: any, next: any) {
try { try {
@ -14,15 +15,19 @@ export async function errorHandling(ctx: any, next: any) {
console.error(err) console.error(err)
} }
const error = errors.getPublicError(err) let error: APIError = {
const body: APIError = {
message: err.message, message: err.message,
status: status, status: status,
validationErrors: err.validation, validationErrors: err.validation,
error, error: errors.getPublicError(err),
} }
ctx.body = body if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) {
// @ts-ignore
error.stack = err.stack
}
ctx.body = error
} }
} }

View File

@ -1,5 +1,5 @@
import env from "../environment" import env from "../environment"
import Redis from "ioredis" import Redis, { Cluster } from "ioredis"
// mock-redis doesn't have any typing // mock-redis doesn't have any typing
let MockRedis: any | undefined let MockRedis: any | undefined
if (env.MOCK_REDIS) { if (env.MOCK_REDIS) {
@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
// for testing just generate the client once // for testing just generate the client once
let CLOSED = false let CLOSED = false
let CLIENTS: { [key: number]: any } = {} const CLIENTS: Record<number, Redis> = {}
let CONNECTED = false let CONNECTED = false
// mock redis always connected // mock redis always connected
@ -36,7 +36,7 @@ if (env.MOCK_REDIS) {
CONNECTED = true CONNECTED = true
} }
function pickClient(selectDb: number): any { function pickClient(selectDb: number) {
return CLIENTS[selectDb] return CLIENTS[selectDb]
} }
@ -201,12 +201,15 @@ class RedisWrapper {
key = `${db}${SEPARATOR}${key}` key = `${db}${SEPARATOR}${key}`
let stream let stream
if (CLUSTERED) { if (CLUSTERED) {
let node = this.getClient().nodes("master") let node = (this.getClient() as never as Cluster).nodes("master")
stream = node[0].scanStream({ match: key + "*", count: 100 }) stream = node[0].scanStream({ match: key + "*", count: 100 })
} else { } else {
stream = this.getClient().scanStream({ match: key + "*", count: 100 }) stream = (this.getClient() as Redis).scanStream({
match: key + "*",
count: 100,
})
} }
return promisifyStream(stream, this.getClient()) return promisifyStream(stream, this.getClient() as any)
} }
async keys(pattern: string) { async keys(pattern: string) {
@ -221,14 +224,16 @@ class RedisWrapper {
async get(key: string) { async get(key: string) {
const db = this._db const db = this._db
let response = await this.getClient().get(addDbPrefix(db, key)) const response = await this.getClient().get(addDbPrefix(db, key))
// overwrite the prefixed key // overwrite the prefixed key
// @ts-ignore
if (response != null && response.key) { if (response != null && response.key) {
// @ts-ignore
response.key = key response.key = key
} }
// if its not an object just return the response // if its not an object just return the response
try { try {
return JSON.parse(response) return JSON.parse(response!)
} catch (err) { } catch (err) {
return response return response
} }
@ -274,13 +279,37 @@ class RedisWrapper {
} }
} }
async bulkStore(
data: Record<string, any>,
expirySeconds: number | null = null
) {
const client = this.getClient()
const dataToStore = Object.entries(data).reduce((acc, [key, value]) => {
acc[addDbPrefix(this._db, key)] =
typeof value === "object" ? JSON.stringify(value) : value
return acc
}, {} as Record<string, any>)
const pipeline = client.pipeline()
pipeline.mset(dataToStore)
if (expirySeconds !== null) {
for (const key of Object.keys(dataToStore)) {
pipeline.expire(key, expirySeconds)
}
}
await pipeline.exec()
}
async getTTL(key: string) { async getTTL(key: string) {
const db = this._db const db = this._db
const prefixedKey = addDbPrefix(db, key) const prefixedKey = addDbPrefix(db, key)
return this.getClient().ttl(prefixedKey) return this.getClient().ttl(prefixedKey)
} }
async setExpiry(key: string, expirySeconds: number | null) { async setExpiry(key: string, expirySeconds: number) {
const db = this._db const db = this._db
const prefixedKey = addDbPrefix(db, key) const prefixedKey = addDbPrefix(db, key)
await this.getClient().expire(prefixedKey, expirySeconds) await this.getClient().expire(prefixedKey, expirySeconds)
@ -295,6 +324,26 @@ class RedisWrapper {
let items = await this.scan() let items = await this.scan()
await Promise.all(items.map((obj: any) => this.delete(obj.key))) await Promise.all(items.map((obj: any) => this.delete(obj.key)))
} }
async increment(key: string) {
const result = await this.getClient().incr(addDbPrefix(this._db, key))
if (isNaN(result)) {
throw new Error(`Redis ${key} does not contain a number`)
}
return result
}
async deleteIfValue(key: string, value: any) {
const client = this.getClient()
const luaScript = `
if redis.call('GET', KEYS[1]) == ARGV[1] then
redis.call('DEL', KEYS[1])
end
`
await client.eval(luaScript, 1, addDbPrefix(this._db, key), value)
}
} }
export default RedisWrapper export default RedisWrapper

View File

@ -72,7 +72,7 @@ const OPTIONS: Record<keyof typeof LockType, Redlock.Options> = {
export async function newRedlock(opts: Redlock.Options = {}) { export async function newRedlock(opts: Redlock.Options = {}) {
const options = { ...OPTIONS.DEFAULT, ...opts } const options = { ...OPTIONS.DEFAULT, ...opts }
const redisWrapper = await getLockClient() const redisWrapper = await getLockClient()
const client = redisWrapper.getClient() const client = redisWrapper.getClient() as any
return new Redlock([client], options) return new Redlock([client], options)
} }

View File

@ -0,0 +1,214 @@
import { GenericContainer, StartedTestContainer } from "testcontainers"
import { generator, structures } from "../../../tests"
import RedisWrapper from "../redis"
import { env } from "../.."
jest.setTimeout(30000)
describe("redis", () => {
let redis: RedisWrapper
let container: StartedTestContainer
beforeAll(async () => {
const container = await new GenericContainer("redis")
.withExposedPorts(6379)
.start()
env._set(
"REDIS_URL",
`${container.getHost()}:${container.getMappedPort(6379)}`
)
env._set("MOCK_REDIS", 0)
env._set("REDIS_PASSWORD", 0)
})
afterAll(() => container?.stop())
beforeEach(async () => {
redis = new RedisWrapper(structures.db.id())
await redis.init()
})
describe("store", () => {
it("a basic value can be persisted", async () => {
const key = structures.uuid()
const value = generator.word()
await redis.store(key, value)
expect(await redis.get(key)).toEqual(value)
})
it("objects can be persisted", async () => {
const key = structures.uuid()
const value = { [generator.word()]: generator.word() }
await redis.store(key, value)
expect(await redis.get(key)).toEqual(value)
})
})
describe("bulkStore", () => {
function createRandomObject(
keyLength: number,
valueGenerator: () => any = () => generator.word()
) {
return generator
.unique(() => generator.word(), keyLength)
.reduce((acc, key) => {
acc[key] = valueGenerator()
return acc
}, {} as Record<string, string>)
}
it("a basic object can be persisted", async () => {
const data = createRandomObject(10)
await redis.bulkStore(data)
for (const [key, value] of Object.entries(data)) {
expect(await redis.get(key)).toEqual(value)
}
expect(await redis.keys("*")).toHaveLength(10)
})
it("a complex object can be persisted", async () => {
const data = {
...createRandomObject(10, () => createRandomObject(5)),
...createRandomObject(5),
}
await redis.bulkStore(data)
for (const [key, value] of Object.entries(data)) {
expect(await redis.get(key)).toEqual(value)
}
expect(await redis.keys("*")).toHaveLength(15)
})
it("no TTL is set by default", async () => {
const data = createRandomObject(10)
await redis.bulkStore(data)
for (const [key, value] of Object.entries(data)) {
expect(await redis.get(key)).toEqual(value)
expect(await redis.getTTL(key)).toEqual(-1)
}
})
it("a bulk store can be persisted with TTL", async () => {
const ttl = 500
const data = createRandomObject(8)
await redis.bulkStore(data, ttl)
for (const [key, value] of Object.entries(data)) {
expect(await redis.get(key)).toEqual(value)
expect(await redis.getTTL(key)).toEqual(ttl)
}
expect(await redis.keys("*")).toHaveLength(8)
})
it("setting a TTL of -1 will not persist the key", async () => {
const ttl = -1
const data = createRandomObject(5)
await redis.bulkStore(data, ttl)
for (const [key, value] of Object.entries(data)) {
expect(await redis.get(key)).toBe(null)
}
expect(await redis.keys("*")).toHaveLength(0)
})
})
describe("increment", () => {
it("can increment on a new key", async () => {
const key = structures.uuid()
const result = await redis.increment(key)
expect(result).toBe(1)
})
it("can increment multiple times", async () => {
const key = structures.uuid()
const results = [
await redis.increment(key),
await redis.increment(key),
await redis.increment(key),
await redis.increment(key),
await redis.increment(key),
]
expect(results).toEqual([1, 2, 3, 4, 5])
})
it("can increment on a new key", async () => {
const key1 = structures.uuid()
const key2 = structures.uuid()
const result1 = await redis.increment(key1)
expect(result1).toBe(1)
const result2 = await redis.increment(key2)
expect(result2).toBe(1)
})
it("can increment multiple times in parallel", async () => {
const key = structures.uuid()
const results = await Promise.all(
Array.from({ length: 100 }).map(() => redis.increment(key))
)
expect(results).toHaveLength(100)
expect(results).toEqual(Array.from({ length: 100 }).map((_, i) => i + 1))
})
it("can increment existing set keys", async () => {
const key = structures.uuid()
await redis.store(key, 70)
await redis.increment(key)
const result = await redis.increment(key)
expect(result).toBe(72)
})
it.each([
generator.word(),
generator.bool(),
{ [generator.word()]: generator.word() },
])("cannot increment if the store value is not a number", async value => {
const key = structures.uuid()
await redis.store(key, value)
await expect(redis.increment(key)).rejects.toThrowError(
"ERR value is not an integer or out of range"
)
})
})
describe("deleteIfValue", () => {
it("can delete if the value matches", async () => {
const key = structures.uuid()
const value = generator.word()
await redis.store(key, value)
await redis.deleteIfValue(key, value)
expect(await redis.get(key)).toBeNull()
})
it("will not delete if the value does not matches", async () => {
const key = structures.uuid()
const value = generator.word()
await redis.store(key, value)
await redis.deleteIfValue(key, generator.word())
expect(await redis.get(key)).toEqual(value)
})
})
})

View File

@ -84,25 +84,24 @@ export function getBuiltinRoles(): { [key: string]: RoleDoc } {
return cloneDeep(BUILTIN_ROLES) return cloneDeep(BUILTIN_ROLES)
} }
export const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map( export function isBuiltin(role: string) {
role => role._id return getBuiltinRole(role) !== undefined
) }
export const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map( export function getBuiltinRole(roleId: string): Role | undefined {
role => role.name const role = Object.values(BUILTIN_ROLES).find(role =>
) roleId.includes(role._id)
)
export function isBuiltin(role?: string) { if (!role) {
return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin)) return undefined
}
return cloneDeep(role)
} }
/** /**
* Works through the inheritance ranks to see how far up the builtin stack this ID is. * Works through the inheritance ranks to see how far up the builtin stack this ID is.
*/ */
export function builtinRoleToNumber(id?: string) { export function builtinRoleToNumber(id: string) {
if (!id) {
return 0
}
const builtins = getBuiltinRoles() const builtins = getBuiltinRoles()
const MAX = Object.values(builtins).length + 1 const MAX = Object.values(builtins).length + 1
if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) { if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {
@ -123,7 +122,7 @@ export function builtinRoleToNumber(id?: string) {
/** /**
* Converts any role to a number, but has to be async to get the roles from db. * Converts any role to a number, but has to be async to get the roles from db.
*/ */
export async function roleToNumber(id?: string) { export async function roleToNumber(id: string) {
if (isBuiltin(id)) { if (isBuiltin(id)) {
return builtinRoleToNumber(id) return builtinRoleToNumber(id)
} }
@ -131,7 +130,7 @@ export async function roleToNumber(id?: string) {
defaultPublic: true, defaultPublic: true,
})) as RoleDoc[] })) as RoleDoc[]
for (let role of hierarchy) { for (let role of hierarchy) {
if (isBuiltin(role?.inherits)) { if (role?.inherits && isBuiltin(role.inherits)) {
return builtinRoleToNumber(role.inherits) + 1 return builtinRoleToNumber(role.inherits) + 1
} }
} }
@ -161,35 +160,28 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
* @returns The role object, which may contain an "inherits" property. * @returns The role object, which may contain an "inherits" property.
*/ */
export async function getRole( export async function getRole(
roleId?: string, roleId: string,
opts?: { defaultPublic?: boolean } opts?: { defaultPublic?: boolean }
): Promise<RoleDoc | undefined> { ): Promise<RoleDoc> {
if (!roleId) {
return undefined
}
let role: any = {}
// built in roles mostly come from the in-code implementation, // built in roles mostly come from the in-code implementation,
// but can be extended by a doc stored about them (e.g. permissions) // but can be extended by a doc stored about them (e.g. permissions)
if (isBuiltin(roleId)) { let role: RoleDoc | undefined = getBuiltinRole(roleId)
role = cloneDeep( if (!role) {
Object.values(BUILTIN_ROLES).find(role => role._id === roleId)
)
} else {
// make sure has the prefix (if it has it then it won't be added) // make sure has the prefix (if it has it then it won't be added)
roleId = prefixRoleID(roleId) roleId = prefixRoleID(roleId)
} }
try { try {
const db = getAppDB() const db = getAppDB()
const dbRole = await db.get(getDBRoleID(roleId)) const dbRole = await db.get<RoleDoc>(getDBRoleID(roleId))
role = Object.assign(role, dbRole) role = Object.assign(role || {}, dbRole)
// finalise the ID // finalise the ID
role._id = getExternalRoleID(role._id, role.version) role._id = getExternalRoleID(role._id!, role.version)
} catch (err) { } catch (err) {
if (!isBuiltin(roleId) && opts?.defaultPublic) { if (!isBuiltin(roleId) && opts?.defaultPublic) {
return cloneDeep(BUILTIN_ROLES.PUBLIC) return cloneDeep(BUILTIN_ROLES.PUBLIC)
} }
// only throw an error if there is no role at all // only throw an error if there is no role at all
if (Object.keys(role).length === 0) { if (!role || Object.keys(role).length === 0) {
throw err throw err
} }
} }
@ -200,7 +192,7 @@ export async function getRole(
* Simple function to get all the roles based on the top level user role ID. * Simple function to get all the roles based on the top level user role ID.
*/ */
async function getAllUserRoles( async function getAllUserRoles(
userRoleId?: string, userRoleId: string,
opts?: { defaultPublic?: boolean } opts?: { defaultPublic?: boolean }
): Promise<RoleDoc[]> { ): Promise<RoleDoc[]> {
// admins have access to all roles // admins have access to all roles
@ -226,7 +218,7 @@ async function getAllUserRoles(
} }
export async function getUserRoleIdHierarchy( export async function getUserRoleIdHierarchy(
userRoleId?: string userRoleId: string
): Promise<string[]> { ): Promise<string[]> {
const roles = await getUserRoleHierarchy(userRoleId) const roles = await getUserRoleHierarchy(userRoleId)
return roles.map(role => role._id!) return roles.map(role => role._id!)
@ -241,7 +233,7 @@ export async function getUserRoleIdHierarchy(
* highest level of access and the last being the lowest level. * highest level of access and the last being the lowest level.
*/ */
export async function getUserRoleHierarchy( export async function getUserRoleHierarchy(
userRoleId?: string, userRoleId: string,
opts?: { defaultPublic?: boolean } opts?: { defaultPublic?: boolean }
) { ) {
// special case, if they don't have a role then they are a public user // special case, if they don't have a role then they are a public user
@ -265,9 +257,9 @@ export function checkForRoleResourceArray(
return rolePerms return rolePerms
} }
export async function getAllRoleIds(appId?: string) { export async function getAllRoleIds(appId: string): Promise<string[]> {
const roles = await getAllRoles(appId) const roles = await getAllRoles(appId)
return roles.map(role => role._id) return roles.map(role => role._id!)
} }
/** /**

View File

@ -35,7 +35,10 @@ export default function positionDropdown(element, opts) {
} }
if (typeof customUpdate === "function") { if (typeof customUpdate === "function") {
styles = customUpdate(anchorBounds, elementBounds, styles) styles = customUpdate(anchorBounds, elementBounds, {
...styles,
offset: opts.offset,
})
} else { } else {
// Determine vertical styles // Determine vertical styles
if (align === "right-outside") { if (align === "right-outside") {

View File

@ -9,13 +9,17 @@ const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
class AnalyticsHub { class AnalyticsHub {
constructor() { constructor() {
this.clients = [posthog, intercom] this.clients = [posthog, intercom]
this.initialised = false
} }
async activate() { async activate() {
// Check analytics are enabled // Check analytics are enabled
const analyticsStatus = await API.getAnalyticsStatus() const analyticsStatus = await API.getAnalyticsStatus()
if (analyticsStatus.enabled) { if (analyticsStatus.enabled && !this.initialised) {
this.clients.forEach(client => client.init()) this.clients.forEach(client => {
client.init()
})
this.initialised = true
} }
} }

View File

@ -164,9 +164,10 @@
</div> </div>
{/if} {/if}
<TourWrap <TourWrap
tourStepKey={$builderStore.onboarding stepKeys={[
? TOUR_STEP_KEYS.BUILDER_USER_MANAGEMENT TOUR_STEP_KEYS.BUILDER_USER_MANAGEMENT,
: TOUR_STEP_KEYS.FEATURE_USER_MANAGEMENT} TOUR_STEP_KEYS.FEATURE_USER_MANAGEMENT,
]}
> >
<div class="app-action-button users"> <div class="app-action-button users">
<div class="app-action" id="builder-app-users-button"> <div class="app-action" id="builder-app-users-button">
@ -209,7 +210,7 @@
<div bind:this={appActionPopoverAnchor}> <div bind:this={appActionPopoverAnchor}>
<div class="app-action"> <div class="app-action">
<Icon name={isPublished ? "GlobeCheck" : "GlobeStrike"} /> <Icon name={isPublished ? "GlobeCheck" : "GlobeStrike"} />
<TourWrap tourStepKey={TOUR_STEP_KEYS.BUILDER_APP_PUBLISH}> <TourWrap stepKeys={[TOUR_STEP_KEYS.BUILDER_APP_PUBLISH]}>
<span class="publish-open" id="builder-app-publish-button"> <span class="publish-open" id="builder-app-publish-button">
Publish Publish
<Icon <Icon

View File

@ -1,5 +1,5 @@
<script> <script>
import EditComponentPopover from "../EditComponentPopover.svelte" import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte"
import { Icon } from "@budibase/bbui" import { Icon } from "@budibase/bbui"
import { runtimeToReadableBinding } from "dataBinding" import { runtimeToReadableBinding } from "dataBinding"
import { isJSBinding } from "@budibase/string-templates" import { isJSBinding } from "@budibase/string-templates"

View File

@ -3,6 +3,7 @@
import { componentStore } from "stores/builder" import { componentStore } from "stores/builder"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { createEventDispatcher, getContext } from "svelte" import { createEventDispatcher, getContext } from "svelte"
import { customPositionHandler } from "."
import ComponentSettingsSection from "pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte" import ComponentSettingsSection from "pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte"
export let anchor export let anchor
@ -54,25 +55,6 @@
dispatch("change", nestedComponentInstance) dispatch("change", nestedComponentInstance)
} }
const customPositionHandler = (anchorBounds, eleBounds, cfg) => {
let { left, top } = cfg
let percentageOffset = 30
// left-outside
left = anchorBounds.left - eleBounds.width - 18
// shift up from the anchor, if space allows
let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset
let defaultTop = anchorBounds.top - offsetPos
if (window.innerHeight - defaultTop < eleBounds.height) {
top = window.innerHeight - eleBounds.height - 5
} else {
top = anchorBounds.top - offsetPos
}
return { ...cfg, left, top }
}
</script> </script>
<Icon <Icon
@ -104,6 +86,7 @@
showPopover={drawers.length === 0} showPopover={drawers.length === 0}
clickOutsideOverride={drawers.length > 0} clickOutsideOverride={drawers.length > 0}
maxHeight={600} maxHeight={600}
offset={18}
handlePostionUpdate={customPositionHandler} handlePostionUpdate={customPositionHandler}
> >
<span class="popover-wrap"> <span class="popover-wrap">

View File

@ -0,0 +1,18 @@
export const customPositionHandler = (anchorBounds, eleBounds, cfg) => {
let { left, top, offset } = cfg
let percentageOffset = 30
// left-outside
left = anchorBounds.left - eleBounds.width - (offset || 5)
// shift up from the anchor, if space allows
let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset
let defaultTop = anchorBounds.top - offsetPos
if (window.innerHeight - defaultTop < eleBounds.height) {
top = window.innerHeight - eleBounds.height - 5
} else {
top = anchorBounds.top - offsetPos
}
return { ...cfg, left, top }
}

View File

@ -1,5 +1,5 @@
<script> <script>
import EditComponentPopover from "../EditComponentPopover.svelte" import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte"
import { Toggle, Icon } from "@budibase/bbui" import { Toggle, Icon } from "@budibase/bbui"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"

View File

@ -52,8 +52,8 @@
_id: Helpers.uuid(), _id: Helpers.uuid(),
_component: componentType, _component: componentType,
_instanceName: `Step ${currentStep + 1}`, _instanceName: `Step ${currentStep + 1}`,
title: stepSettings.title ?? defaults.title, title: stepSettings.title ?? defaults?.title,
buttons: stepSettings.buttons || defaults.buttons, buttons: stepSettings.buttons || defaults?.buttons,
fields: stepSettings.fields, fields: stepSettings.fields,
desc: stepSettings.desc, desc: stepSettings.desc,

View File

@ -1,5 +1,5 @@
<script> <script>
import EditComponentPopover from "../EditComponentPopover.svelte" import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte"
import { Toggle, Icon } from "@budibase/bbui" import { Toggle, Icon } from "@budibase/bbui"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"

View File

@ -1,5 +1,5 @@
<script> <script>
import EditComponentPopover from "../EditComponentPopover.svelte" import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte"
import { Icon } from "@budibase/bbui" import { Icon } from "@budibase/bbui"
import { setContext } from "svelte" import { setContext } from "svelte"
import { writable } from "svelte/store" import { writable } from "svelte/store"

View File

@ -139,10 +139,22 @@
{/each} {/each}
</div> </div>
<div class="search-input"> <div class="search-input">
<div class="input-wrapper"> <div class="input-wrapper" style={`width: ${value ? "425" : "510"}px`}>
<Input bind:value={searchTerm} thin placeholder="Search Icon" /> <Input
bind:value={searchTerm}
on:keyup={event => {
if (event.key === "Enter") {
searchForIcon()
}
}}
thin
placeholder="Search Icon"
/>
</div> </div>
<Button secondary on:click={searchForIcon}>Search</Button> <Button secondary on:click={searchForIcon}>Search</Button>
{#if value}
<Button primary on:click={() => (value = null)}>Clear</Button>
{/if}
</div> </div>
<div class="page-area"> <div class="page-area">
<div class="pager"> <div class="pager">
@ -239,6 +251,7 @@
flex-flow: row nowrap; flex-flow: row nowrap;
width: 100%; width: 100%;
padding-right: 15px; padding-right: 15px;
gap: 10px;
} }
.input-wrapper { .input-wrapper {
width: 510px; width: 510px;

View File

@ -20,17 +20,23 @@
export let bindings = [] export let bindings = []
export let componentBindings = [] export let componentBindings = []
export let nested = false export let nested = false
export let highlighted = false
export let propertyFocus = false export let propertyFocus = false
export let info = null export let info = null
export let disableBindings = false export let disableBindings = false
export let wide export let wide
$: nullishValue = value == null || value === "" let highlightType
$: highlightedProp = $builderStore.highlightedSetting
$: allBindings = getAllBindings(bindings, componentBindings, nested) $: allBindings = getAllBindings(bindings, componentBindings, nested)
$: safeValue = getSafeValue(value, defaultValue, allBindings) $: safeValue = getSafeValue(value, defaultValue, allBindings)
$: replaceBindings = val => readableToRuntimeBinding(allBindings, val) $: replaceBindings = val => readableToRuntimeBinding(allBindings, val)
$: if (!Array.isArray(value)) {
highlightType =
highlightedProp?.key === key ? `highlighted-${highlightedProp?.type}` : ""
}
const getAllBindings = (bindings, componentBindings, nested) => { const getAllBindings = (bindings, componentBindings, nested) => {
if (!nested) { if (!nested) {
return bindings return bindings
@ -71,16 +77,17 @@
} }
onDestroy(() => { onDestroy(() => {
if (highlighted) { if (highlightedProp) {
builderStore.highlightSetting(null) builderStore.highlightSetting(null)
} }
}) })
</script> </script>
<div <div
class="property-control" id={`${key}-prop-control-wrap`}
class={`property-control ${highlightType}`}
class:wide={!label || labelHidden || wide === true} class:wide={!label || labelHidden || wide === true}
class:highlighted={highlighted && nullishValue} class:highlighted={highlightType}
class:property-focus={propertyFocus} class:property-focus={propertyFocus}
> >
{#if label && !labelHidden} {#if label && !labelHidden}
@ -115,6 +122,16 @@
</div> </div>
<style> <style>
.property-control.highlighted.highlighted-info {
border-color: var(--spectrum-semantic-informative-color-background);
}
.property-control.highlighted.highlighted-error {
border-color: var(--spectrum-global-color-static-red-600);
}
.property-control.highlighted.highlighted-warning {
border-color: var(--spectrum-global-color-static-orange-700);
}
.property-control { .property-control {
position: relative; position: relative;
display: grid; display: grid;
@ -132,6 +149,10 @@
.property-control.highlighted { .property-control.highlighted {
background: var(--spectrum-global-color-gray-300); background: var(--spectrum-global-color-gray-300);
border-color: var(--spectrum-global-color-static-red-600); border-color: var(--spectrum-global-color-static-red-600);
margin-top: -3.5px;
margin-bottom: -3.5px;
padding-bottom: 3.5px;
padding-top: 3.5px;
} }
.property-control.property-focus :global(input) { .property-control.property-focus :global(input) {

View File

@ -1,6 +1,6 @@
<script> <script>
import { Popover, Layout, Heading, Body, Button, Link } from "@budibase/bbui" import { Popover, Layout, Heading, Body, Button, Link } from "@budibase/bbui"
import { TOURS } from "./tours.js" import { TOURS, getCurrentStepIdx } from "./tours.js"
import { goto, layout, isActive } from "@roxi/routify" import { goto, layout, isActive } from "@roxi/routify"
import { builderStore } from "stores/builder" import { builderStore } from "stores/builder"
@ -20,6 +20,13 @@
const updateTourStep = (targetStepKey, tourKey) => { const updateTourStep = (targetStepKey, tourKey) => {
if (!tourKey) { if (!tourKey) {
tourSteps = null
tourStepIdx = null
lastStep = null
tourStep = null
popoverAnchor = null
popover = null
skipping = false
return return
} }
if (!tourSteps?.length) { if (!tourSteps?.length) {
@ -78,16 +85,6 @@
} }
} }
} }
const getCurrentStepIdx = (steps, tourStepKey) => {
if (!steps?.length) {
return
}
if (steps?.length && !tourStepKey) {
return 0
}
return steps.findIndex(step => step.id === tourStepKey)
}
</script> </script>
{#if tourKey} {#if tourKey}
@ -98,7 +95,9 @@
anchor={popoverAnchor} anchor={popoverAnchor}
maxWidth={300} maxWidth={300}
dismissible={false} dismissible={false}
offset={15} offset={12}
handlePostionUpdate={tourStep?.positionHandler}
customZindex={3}
> >
<div class="tour-content"> <div class="tour-content">
<Layout noPadding gap="M"> <Layout noPadding gap="M">
@ -119,7 +118,7 @@
</Body> </Body>
<div class="tour-footer"> <div class="tour-footer">
<div class="tour-navigation"> <div class="tour-navigation">
{#if typeof tourOnSkip === "function"} {#if typeof tourOnSkip === "function" && !lastStep}
<Link <Link
secondary secondary
quiet quiet

View File

@ -1,39 +1,63 @@
<script> <script>
import { tourHandler } from "./tourHandler" import { tourHandler } from "./tourHandler"
import { TOURS } from "./tours" import { TOURSBYSTEP, TOURS, getCurrentStepIdx } from "./tours"
import { onMount, onDestroy } from "svelte" import { onMount, onDestroy } from "svelte"
import { builderStore } from "stores/builder" import { builderStore } from "stores/builder"
export let tourStepKey export let stepKeys = []
let currentTourStep
let ready = false let ready = false
let registered = false let registered = {}
let handler
const registerTourNode = (tourKey, stepKey) => { const registerTourNode = (tourKey, stepKey) => {
if (ready && !registered && tourKey) { const step = TOURSBYSTEP[stepKey]
currentTourStep = TOURS[tourKey].steps.find(step => step.id === stepKey) if (ready && step && !registered[stepKey] && step?.tour === tourKey) {
if (!currentTourStep) { const elem = document.querySelector(step.query)
return registered[stepKey] = tourHandler(elem, stepKey)
}
}
const scrollToStep = () => {
let tourStepIdx = getCurrentStepIdx(
TOURS[tourKeyWatch]?.steps,
tourStepKeyWatch
)
let currentStep = TOURS[tourKeyWatch]?.steps?.[tourStepIdx]
if (currentStep?.scrollIntoView) {
let currentNode = $builderStore.tourNodes?.[currentStep.id]
if (currentNode) {
currentNode.scrollIntoView({ behavior: "smooth", block: "center" })
} }
const elem = document.querySelector(currentTourStep.query)
handler = tourHandler(elem, stepKey)
registered = true
} }
} }
$: tourKeyWatch = $builderStore.tourKey $: tourKeyWatch = $builderStore.tourKey
$: registerTourNode(tourKeyWatch, tourStepKey, ready) $: tourStepKeyWatch = $builderStore.tourStepKey
$: if (tourKeyWatch || stepKeys || ready) {
stepKeys.forEach(tourStepKey => {
registerTourNode(tourKeyWatch, tourStepKey)
})
}
$: scrollToStep(tourKeyWatch, tourStepKeyWatch)
onMount(() => { onMount(() => {
ready = true ready = true
}) })
onDestroy(() => { onDestroy(() => {
if (handler) { Object.entries(registered).forEach(entry => {
const handler = entry[1]
const stepKey = entry[0]
// Run step destroy, de-register nodes in the builderStore and local cache
handler.destroy() handler.destroy()
} delete registered[stepKey]
// Check if the step is part of an active tour. End the tour if that is the case
const step = TOURSBYSTEP[stepKey]
if (step.tour === tourKeyWatch) {
builderStore.setTour()
}
})
}) })
</script> </script>

View File

@ -0,0 +1,9 @@
<div>
When faced with a sizable form, consider implementing a multi-step approach to
enhance user experience.
<p>
Breaking the form into multiple steps can significantly improve usability by
making the process more digestible for your users.
</p>
</div>

View File

@ -0,0 +1,17 @@
<div>
You can use bindings to set the Row ID on your form.
<p>
This will allow you to pull the correct information into your form and allow
you to update!
</p>
<a href="https://docs.budibase.com/docs/form-block" target="_blank">
How to pass a row ID using bindings
</a>
</div>
<style>
a {
color: inherit;
text-decoration: underline;
}
</style>

View File

@ -1,3 +1,5 @@
export { default as OnboardingData } from "./OnboardingData.svelte" export { default as OnboardingData } from "./OnboardingData.svelte"
export { default as OnboardingDesign } from "./OnboardingDesign.svelte" export { default as OnboardingDesign } from "./OnboardingDesign.svelte"
export { default as OnboardingPublish } from "./OnboardingPublish.svelte" export { default as OnboardingPublish } from "./OnboardingPublish.svelte"
export { default as NewViewUpdateFormRowId } from "./NewViewUpdateFormRowId.svelte"
export { default as NewFormSteps } from "./NewFormSteps.svelte"

View File

@ -2,8 +2,15 @@ import { get } from "svelte/store"
import { builderStore } from "stores/builder" import { builderStore } from "stores/builder"
import { auth } from "stores/portal" import { auth } from "stores/portal"
import analytics from "analytics" import analytics from "analytics"
import { OnboardingData, OnboardingDesign, OnboardingPublish } from "./steps" import {
OnboardingData,
OnboardingDesign,
OnboardingPublish,
NewViewUpdateFormRowId,
NewFormSteps,
} from "./steps"
import { API } from "api" import { API } from "api"
import { customPositionHandler } from "components/design/settings/controls/EditComponentPopover"
const ONBOARDING_EVENT_PREFIX = "onboarding" const ONBOARDING_EVENT_PREFIX = "onboarding"
@ -14,11 +21,26 @@ export const TOUR_STEP_KEYS = {
BUILDER_USER_MANAGEMENT: "builder-user-management", BUILDER_USER_MANAGEMENT: "builder-user-management",
BUILDER_AUTOMATION_SECTION: "builder-automation-section", BUILDER_AUTOMATION_SECTION: "builder-automation-section",
FEATURE_USER_MANAGEMENT: "feature-user-management", FEATURE_USER_MANAGEMENT: "feature-user-management",
BUILDER_FORM_CREATE_STEPS: "builder-form-create-steps",
BUILDER_FORM_VIEW_UPDATE_STEPS: "builder-form-view-update-steps",
BUILDER_FORM_ROW_ID: "builder-form-row-id",
} }
export const TOUR_KEYS = { export const TOUR_KEYS = {
TOUR_BUILDER_ONBOARDING: "builder-onboarding", TOUR_BUILDER_ONBOARDING: "builder-onboarding",
FEATURE_ONBOARDING: "feature-onboarding", FEATURE_ONBOARDING: "feature-onboarding",
BUILDER_FORM_CREATE: "builder-form-create",
BUILDER_FORM_VIEW_UPDATE: "builder-form-view-update",
}
export const getCurrentStepIdx = (steps, tourStepKey) => {
if (!steps?.length) {
return
}
if (steps?.length && !tourStepKey) {
return 0
}
return steps.findIndex(step => step.id === tourStepKey)
} }
const endUserOnboarding = async ({ skipped = false } = {}) => { const endUserOnboarding = async ({ skipped = false } = {}) => {
@ -37,13 +59,8 @@ const endUserOnboarding = async ({ skipped = false } = {}) => {
// Update the cached user // Update the cached user
await auth.getSelf() await auth.getSelf()
builderStore.update(state => ({ builderStore.endBuilderOnboarding()
...state, builderStore.setTour()
tourNodes: null,
tourKey: null,
tourStepKey: null,
onboarding: false,
}))
} catch (e) { } catch (e) {
console.error("Onboarding failed", e) console.error("Onboarding failed", e)
return false return false
@ -52,9 +69,29 @@ const endUserOnboarding = async ({ skipped = false } = {}) => {
} }
} }
const tourEvent = eventKey => { const endTour = async ({ key, skipped = false } = {}) => {
const { tours = {} } = get(auth).user
tours[key] = new Date().toISOString()
await API.updateSelf({
tours,
})
if (skipped) {
tourEvent(key, skipped)
}
// Update the cached user
await auth.getSelf()
// Reset tour state
builderStore.setTour()
}
const tourEvent = (eventKey, skipped) => {
analytics.captureEvent(`${ONBOARDING_EVENT_PREFIX}:${eventKey}`, { analytics.captureEvent(`${ONBOARDING_EVENT_PREFIX}:${eventKey}`, {
eventSource: EventSource.PORTAL, eventSource: EventSource.PORTAL,
skipped,
}) })
} }
@ -135,7 +172,71 @@ const getTours = () => {
}, },
], ],
}, },
[TOUR_KEYS.BUILDER_FORM_CREATE]: {
steps: [
{
id: TOUR_STEP_KEYS.BUILDER_FORM_CREATE_STEPS,
title: "Add multiple steps",
layout: NewFormSteps,
query: "#steps-prop-control-wrap",
onComplete: () => {
builderStore.highlightSetting()
endTour({ key: TOUR_KEYS.BUILDER_FORM_CREATE })
},
onLoad: () => {
tourEvent(TOUR_STEP_KEYS.BUILDER_FORM_CREATE_STEPS)
builderStore.highlightSetting("steps", "info")
},
positionHandler: customPositionHandler,
align: "left-outside",
},
],
},
[TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE]: {
steps: [
{
id: TOUR_STEP_KEYS.BUILDER_FORM_ROW_ID,
title: "Add row ID to update a row",
layout: NewViewUpdateFormRowId,
query: "#rowId-prop-control-wrap",
onLoad: () => {
tourEvent(TOUR_STEP_KEYS.BUILDER_FORM_ROW_ID)
builderStore.highlightSetting("rowId", "info")
},
positionHandler: customPositionHandler,
align: "left-outside",
},
{
id: TOUR_STEP_KEYS.BUILDER_FORM_VIEW_UPDATE_STEPS,
title: "Add multiple steps",
layout: NewFormSteps,
query: "#steps-prop-control-wrap",
onComplete: () => {
builderStore.highlightSetting()
endTour({ key: TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE })
},
onLoad: () => {
tourEvent(TOUR_STEP_KEYS.BUILDER_FORM_VIEW_UPDATE_STEPS)
builderStore.highlightSetting("steps", "info")
},
positionHandler: customPositionHandler,
align: "left-outside",
scrollIntoView: true,
},
],
onSkip: async () => {
builderStore.highlightSetting()
endTour({ key: TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE, skipped: true })
},
},
} }
} }
export const TOURS = getTours() export const TOURS = getTours()
export const TOURSBYSTEP = Object.keys(TOURS).reduce((acc, tour) => {
TOURS[tour].steps.forEach(element => {
acc[element.id] = element
acc[element.id]["tour"] = tour
})
return acc
}, {})

View File

@ -1146,7 +1146,7 @@ export const getAllStateVariables = () => {
"@budibase/standard-components/multistepformblockstep" "@budibase/standard-components/multistepformblockstep"
) )
steps.forEach(step => { steps?.forEach(step => {
parseComponentSettings(stepDefinition, step) parseComponentSettings(stepDefinition, step)
}) })
}) })

View File

@ -96,7 +96,7 @@
const release_date = new Date("2023-03-01T00:00:00.000Z") const release_date = new Date("2023-03-01T00:00:00.000Z")
const onboarded = new Date($auth.user?.onboardedAt) const onboarded = new Date($auth.user?.onboardedAt)
if (onboarded < release_date) { if (onboarded < release_date) {
builderStore.startTour(TOUR_KEYS.FEATURE_ONBOARDING) builderStore.setTour(TOUR_KEYS.FEATURE_ONBOARDING)
} }
} }
} }
@ -144,7 +144,7 @@
</span> </span>
<Tabs {selected} size="M"> <Tabs {selected} size="M">
{#each $layout.children as { path, title }} {#each $layout.children as { path, title }}
<TourWrap tourStepKey={`builder-${title}-section`}> <TourWrap stepKeys={[`builder-${title}-section`]}>
<Tab <Tab
quiet quiet
selected={$isActive(path)} selected={$isActive(path)}

View File

@ -16,6 +16,14 @@
} from "dataBinding" } from "dataBinding"
import { ActionButton, notifications } from "@budibase/bbui" import { ActionButton, notifications } from "@budibase/bbui"
import { capitalise } from "helpers" import { capitalise } from "helpers"
import TourWrap from "components/portal/onboarding/TourWrap.svelte"
import { TOUR_STEP_KEYS } from "components/portal/onboarding/tours.js"
const {
BUILDER_FORM_CREATE_STEPS,
BUILDER_FORM_VIEW_UPDATE_STEPS,
BUILDER_FORM_ROW_ID,
} = TOUR_STEP_KEYS
const onUpdateName = async value => { const onUpdateName = async value => {
try { try {
@ -46,7 +54,6 @@
$: id = $selectedComponent?._id $: id = $selectedComponent?._id
$: id, (section = tabs[0]) $: id, (section = tabs[0])
$: componentName = getComponentName(componentInstance) $: componentName = getComponentName(componentInstance)
</script> </script>
@ -92,13 +99,21 @@
</div> </div>
</span> </span>
{#if section == "settings"} {#if section == "settings"}
<ComponentSettingsSection <TourWrap
{componentInstance} stepKeys={[
{componentDefinition} BUILDER_FORM_CREATE_STEPS,
{bindings} BUILDER_FORM_VIEW_UPDATE_STEPS,
{componentBindings} BUILDER_FORM_ROW_ID,
{isScreen} ]}
/> >
<ComponentSettingsSection
{componentInstance}
{componentDefinition}
{bindings}
{componentBindings}
{isScreen}
/>
</TourWrap>
{/if} {/if}
{#if section == "styles"} {#if section == "styles"}
<DesignSection <DesignSection

View File

@ -1,7 +1,7 @@
<script> <script>
import { helpers } from "@budibase/shared-core" import { helpers } from "@budibase/shared-core"
import { DetailSummary, notifications } from "@budibase/bbui" import { DetailSummary, notifications } from "@budibase/bbui"
import { componentStore } from "stores/builder" import { componentStore, builderStore } from "stores/builder"
import PropertyControl from "components/design/settings/controls/PropertyControl.svelte" import PropertyControl from "components/design/settings/controls/PropertyControl.svelte"
import ResetFieldsButton from "components/design/settings/controls/ResetFieldsButton.svelte" import ResetFieldsButton from "components/design/settings/controls/ResetFieldsButton.svelte"
import EjectBlockButton from "components/design/settings/controls/EjectBlockButton.svelte" import EjectBlockButton from "components/design/settings/controls/EjectBlockButton.svelte"
@ -177,9 +177,7 @@
defaultValue={setting.defaultValue} defaultValue={setting.defaultValue}
nested={setting.nested} nested={setting.nested}
onChange={val => updateSetting(setting, val)} onChange={val => updateSetting(setting, val)}
highlighted={$componentStore.highlightedSettingKey === propertyFocus={$builderStore.propertyFocus === setting.key}
setting.key}
propertyFocus={$componentStore.propertyFocus === setting.key}
info={setting.info} info={setting.info}
disableBindings={setting.disableBindings} disableBindings={setting.disableBindings}
props={{ props={{

View File

@ -174,7 +174,7 @@
} else if (type === "request-add-component") { } else if (type === "request-add-component") {
toggleAddComponent() toggleAddComponent()
} else if (type === "highlight-setting") { } else if (type === "highlight-setting") {
builderStore.highlightSetting(data.setting) builderStore.highlightSetting(data.setting, "error")
// Also scroll setting into view // Also scroll setting into view
const selector = `#${data.setting}-prop-control` const selector = `#${data.setting}-prop-control`

View File

@ -3,13 +3,23 @@
import DatasourceModal from "./DatasourceModal.svelte" import DatasourceModal from "./DatasourceModal.svelte"
import ScreenRoleModal from "./ScreenRoleModal.svelte" import ScreenRoleModal from "./ScreenRoleModal.svelte"
import sanitizeUrl from "helpers/sanitizeUrl" import sanitizeUrl from "helpers/sanitizeUrl"
import FormTypeModal from "./FormTypeModal.svelte"
import { Modal, notifications } from "@budibase/bbui" import { Modal, notifications } from "@budibase/bbui"
import { screenStore, navigationStore, tables } from "stores/builder" import {
screenStore,
navigationStore,
tables,
builderStore,
} from "stores/builder"
import { auth } from "stores/portal"
import { get } from "svelte/store" import { get } from "svelte/store"
import getTemplates from "templates" import getTemplates from "templates"
import { Roles } from "constants/backend" import { Roles } from "constants/backend"
import { capitalise } from "helpers" import { capitalise } from "helpers"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { TOUR_KEYS } from "components/portal/onboarding/tours.js"
import formScreen from "templates/formScreen"
import rowListScreen from "templates/rowListScreen"
let mode let mode
let pendingScreen let pendingScreen
@ -18,12 +28,18 @@
let screenDetailsModal let screenDetailsModal
let datasourceModal let datasourceModal
let screenAccessRoleModal let screenAccessRoleModal
let formTypeModal
// Cache variables for workflow // Cache variables for workflow
let screenAccessRole = Roles.BASIC let screenAccessRole = Roles.BASIC
let selectedTemplates = null
let templates = null
let screens = null
let selectedDatasources = null
let blankScreenUrl = null let blankScreenUrl = null
let screenMode = null let screenMode = null
let formType = null
// Creates an array of screens, checking and sanitising their URLs // Creates an array of screens, checking and sanitising their URLs
const createScreens = async ({ screens, screenAccessRole }) => { const createScreens = async ({ screens, screenAccessRole }) => {
@ -32,7 +48,7 @@
} }
try { try {
let screenId let createdScreens = []
for (let screen of screens) { for (let screen of screens) {
// Check we aren't clashing with an existing URL // Check we aren't clashing with an existing URL
@ -56,7 +72,7 @@
// Create the screen // Create the screen
const response = await screenStore.save(screen) const response = await screenStore.save(screen)
screenId = response._id createdScreens.push(response)
// Add link in layout. We only ever actually create 1 screen now, even // Add link in layout. We only ever actually create 1 screen now, even
// for autoscreens, so it's always safe to do this. // for autoscreens, so it's always safe to do this.
@ -66,9 +82,7 @@
) )
} }
// Go to new screen return createdScreens
$goto(`./${screenId}`)
screenStore.select(screenId)
} catch (error) { } catch (error) {
console.error(error) console.error(error)
notifications.error("Error creating screens") notifications.error("Error creating screens")
@ -104,13 +118,16 @@
// Handler for NewScreenModal // Handler for NewScreenModal
export const show = newMode => { export const show = newMode => {
mode = newMode mode = newMode
selectedTemplates = null templates = null
screens = null
selectedDatasources = null
blankScreenUrl = null blankScreenUrl = null
screenMode = mode screenMode = mode
pendingScreen = null pendingScreen = null
screenAccessRole = Roles.BASIC screenAccessRole = Roles.BASIC
formType = null
if (mode === "table" || mode === "grid") { if (mode === "table" || mode === "grid" || mode === "form") {
datasourceModal.show() datasourceModal.show()
} else if (mode === "blank") { } else if (mode === "blank") {
let templates = getTemplates($tables.list) let templates = getTemplates($tables.list)
@ -125,19 +142,26 @@
} }
// Handler for DatasourceModal confirmation, move to screen access select // Handler for DatasourceModal confirmation, move to screen access select
const confirmScreenDatasources = async ({ templates }) => { const confirmScreenDatasources = async ({ datasources }) => {
selectedTemplates = templates selectedDatasources = datasources
screenAccessRoleModal.show() if (screenMode === "form") {
formTypeModal.show()
} else {
screenAccessRoleModal.show()
}
} }
// Handler for Datasource Screen Creation // Handler for Datasource Screen Creation
const completeDatasourceScreenCreation = async () => { const completeDatasourceScreenCreation = async () => {
const screens = selectedTemplates.map(template => { templates = rowListScreen(selectedDatasources, mode)
const screens = templates.map(template => {
let screenTemplate = template.create() let screenTemplate = template.create()
screenTemplate.autoTableId = template.resourceId screenTemplate.autoTableId = template.resourceId
return screenTemplate return screenTemplate
}) })
await createScreens({ screens, screenAccessRole }) const createdScreens = await createScreens({ screens, screenAccessRole })
loadNewScreen(createdScreens)
} }
const confirmScreenBlank = async ({ screenUrl }) => { const confirmScreenBlank = async ({ screenUrl }) => {
@ -154,7 +178,54 @@
return return
} }
pendingScreen.routing.route = screenUrl pendingScreen.routing.route = screenUrl
await createScreens({ screens: [pendingScreen], screenAccessRole }) const createdScreens = await createScreens({
screens: [pendingScreen],
screenAccessRole,
})
loadNewScreen(createdScreens)
}
const onConfirmFormType = () => {
screenAccessRoleModal.show()
}
const loadNewScreen = createdScreens => {
const lastScreen = createdScreens.slice(-1)[0]
// Go to new screen
if (lastScreen?.props?._children.length) {
// Focus on the main component for the streen type
const mainComponent = lastScreen?.props?._children?.[0]._id
$goto(`./${lastScreen._id}/${mainComponent}`)
} else {
$goto(`./${lastScreen._id}`)
}
screenStore.select(lastScreen._id)
}
const confirmFormScreenCreation = async () => {
templates = formScreen(selectedDatasources, { actionType: formType })
screens = templates.map(template => {
let screenTemplate = template.create()
return screenTemplate
})
const createdScreens = await createScreens({ screens, screenAccessRole })
if (formType === "Update" || formType === "Create") {
const associatedTour =
formType === "Update"
? TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE
: TOUR_KEYS.BUILDER_FORM_CREATE
const tourRequired = !$auth?.user?.tours?.[associatedTour]
if (tourRequired) {
builderStore.setTour(associatedTour)
}
}
// Go to new screen
loadNewScreen(createdScreens)
} }
// Submit screen config for creation. // Submit screen config for creation.
@ -164,6 +235,8 @@
screenUrl: blankScreenUrl, screenUrl: blankScreenUrl,
screenAccessRole, screenAccessRole,
}) })
} else if (screenMode === "form") {
confirmFormScreenCreation()
} else { } else {
completeDatasourceScreenCreation() completeDatasourceScreenCreation()
} }
@ -179,19 +252,18 @@
</script> </script>
<Modal bind:this={datasourceModal} autoFocus={false}> <Modal bind:this={datasourceModal} autoFocus={false}>
<DatasourceModal <DatasourceModal {mode} onConfirm={confirmScreenDatasources} />
{mode}
onConfirm={confirmScreenDatasources}
initialScreens={!selectedTemplates ? [] : [...selectedTemplates]}
/>
</Modal> </Modal>
<Modal bind:this={screenAccessRoleModal}> <Modal bind:this={screenAccessRoleModal}>
<ScreenRoleModal <ScreenRoleModal
onConfirm={confirmScreenCreation} onConfirm={() => {
onCancel={roleSelectBack} confirmScreenCreation()
}}
bind:screenAccessRole bind:screenAccessRole
onCancel={roleSelectBack}
screenUrl={blankScreenUrl} screenUrl={blankScreenUrl}
confirmText={screenMode === "form" ? "Confirm" : "Done"}
/> />
</Modal> </Modal>
@ -201,3 +273,17 @@
initialUrl={blankScreenUrl} initialUrl={blankScreenUrl}
/> />
</Modal> </Modal>
<Modal bind:this={formTypeModal}>
<FormTypeModal
onConfirm={onConfirmFormType}
onCancel={() => {
formTypeModal.hide()
datasourceModal.show()
}}
on:select={e => {
formType = e.detail
}}
type={formType}
/>
</Modal>

View File

@ -4,37 +4,33 @@
import ICONS from "components/backend/DatasourceNavigator/icons" import ICONS from "components/backend/DatasourceNavigator/icons"
import { IntegrationNames } from "constants" import { IntegrationNames } from "constants"
import { onMount } from "svelte" import { onMount } from "svelte"
import rowListScreen from "templates/rowListScreen"
import DatasourceTemplateRow from "./DatasourceTemplateRow.svelte" import DatasourceTemplateRow from "./DatasourceTemplateRow.svelte"
export let mode
export let onCancel export let onCancel
export let onConfirm export let onConfirm
export let initialScreens = []
let selectedScreens = [...initialScreens] let selectedSources = []
$: filteredSources = $datasources.list?.filter(datasource => { $: filteredSources = $datasources.list?.filter(datasource => {
return datasource.source !== IntegrationNames.REST && datasource["entities"] return datasource.source !== IntegrationNames.REST && datasource["entities"]
}) })
const toggleSelection = datasource => { const toggleSelection = datasource => {
const { resourceId } = datasource const exists = selectedSources.find(
if (selectedScreens.find(s => s.resourceId === resourceId)) { d => d.resourceId === datasource.resourceId
selectedScreens = selectedScreens.filter( )
screen => screen.resourceId !== resourceId if (exists) {
selectedSources = selectedSources.filter(
d => d.resourceId === datasource.resourceId
) )
} else { } else {
selectedScreens = [ selectedSources = [...selectedSources, datasource]
...selectedScreens,
rowListScreen([datasource], mode)[0],
]
} }
} }
const confirmDatasourceSelection = async () => { const confirmDatasourceSelection = async () => {
await onConfirm({ await onConfirm({
templates: selectedScreens, datasources: selectedSources,
}) })
} }
@ -54,7 +50,7 @@
cancelText="Back" cancelText="Back"
onConfirm={confirmDatasourceSelection} onConfirm={confirmDatasourceSelection}
{onCancel} {onCancel}
disabled={!selectedScreens.length} disabled={!selectedSources.length}
size="L" size="L"
> >
<Body size="S"> <Body size="S">
@ -85,8 +81,8 @@
resourceId: table._id, resourceId: table._id,
type: "table", type: "table",
}} }}
{@const selected = selectedScreens.find( {@const selected = selectedSources.find(
screen => screen.resourceId === tableDS.resourceId datasource => datasource.resourceId === tableDS.resourceId
)} )}
<DatasourceTemplateRow <DatasourceTemplateRow
on:click={() => toggleSelection(tableDS)} on:click={() => toggleSelection(tableDS)}
@ -103,7 +99,7 @@
tableId: view.tableId, tableId: view.tableId,
type: "viewV2", type: "viewV2",
}} }}
{@const selected = selectedScreens.find( {@const selected = selectedSources.find(
x => x.resourceId === viewDS.resourceId x => x.resourceId === viewDS.resourceId
)} )}
<DatasourceTemplateRow <DatasourceTemplateRow

View File

@ -0,0 +1,123 @@
<script>
import { ModalContent, Layout, Body, Icon } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
export let onCancel = () => {}
export let onConfirm = () => {}
export let type
const dispatch = createEventDispatcher()
</script>
<span>
<ModalContent
title="Select form type"
confirmText="Done"
cancelText="Back"
{onConfirm}
{onCancel}
disabled={!type}
size="L"
>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<Layout noPadding gap="S">
<div
class="form-type"
class:selected={type === "Create"}
on:click={() => {
dispatch("select", "Create")
}}
>
<div class="form-type-wrap">
<div class="form-type-content">
<Body noPadding>Create a new row</Body>
<Body size="S">
For capturing and storing new data from your users
</Body>
</div>
{#if type === "Create"}
<span class="form-type-check">
<Icon size="S" name="CheckmarkCircle" />
</span>
{/if}
</div>
</div>
<div
class="form-type"
class:selected={type === "Update"}
on:click={() => {
dispatch("select", "Update")
}}
>
<div class="form-type-wrap">
<div class="form-type-content">
<Body noPadding>Update an existing row</Body>
<Body size="S">For viewing and updating existing data</Body>
</div>
{#if type === "Update"}
<span class="form-type-check">
<Icon size="S" name="CheckmarkCircle" />
</span>
{/if}
</div>
</div>
<div
class="form-type"
class:selected={type === "View"}
on:click={() => {
dispatch("select", "View")
}}
>
<div class="form-type-wrap">
<div class="form-type-content">
<Body noPadding>View an existing row</Body>
<Body size="S">For a read only view of your data</Body>
</div>
{#if type === "View"}
<span class="form-type-check">
<Icon size="S" name="CheckmarkCircle" />
</span>
{/if}
</div>
</div>
</Layout>
</ModalContent>
</span>
<style>
.form-type {
cursor: pointer;
gap: var(--spacing-s);
padding: var(--spacing-m) var(--spacing-xl);
background: var(--spectrum-alias-background-color-secondary);
transition: 0.3s all;
border: 1px solid var(--spectrum-global-color-gray-300);
border-radius: 4px;
display: flex;
flex-direction: column;
}
.selected,
.form-type:hover {
background: var(--spectrum-alias-background-color-tertiary);
}
.form-type-wrap {
display: flex;
align-items: center;
justify-content: space-between;
}
.form-type :global(p:nth-child(2)) {
color: var(--grey-6);
}
.form-type-check {
margin-left: auto;
}
.form-type-check :global(.spectrum-Icon) {
color: var(--spectrum-global-color-green-600);
}
.form-type-content {
gap: var(--spacing-s);
display: flex;
flex-direction: column;
}
</style>

View File

@ -9,6 +9,7 @@
export let onCancel export let onCancel
export let screenUrl export let screenUrl
export let screenAccessRole export let screenAccessRole
export let confirmText = "Done"
let error let error
@ -40,7 +41,7 @@
<ModalContent <ModalContent
title="Access" title="Access"
confirmText="Done" {confirmText}
cancelText="Back" cancelText="Back"
{onConfirm} {onConfirm}
{onCancel} {onCancel}

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

View File

@ -1,9 +1,10 @@
<script> <script>
import { Body } from "@budibase/bbui" import { Body } from "@budibase/bbui"
import CreationPage from "components/common/CreationPage.svelte" import CreationPage from "components/common/CreationPage.svelte"
import blankImage from "./blank.png" import blankImage from "./images/blank.png"
import tableImage from "./table.png" import tableImage from "./images/table.png"
import gridImage from "./grid.png" import gridImage from "./images/grid.png"
import formImage from "./images/form.png"
import CreateScreenModal from "./CreateScreenModal.svelte" import CreateScreenModal from "./CreateScreenModal.svelte"
import { screenStore } from "stores/builder" import { screenStore } from "stores/builder"
@ -56,6 +57,16 @@
<Body size="XS">View and manipulate rows on a grid</Body> <Body size="XS">View and manipulate rows on a grid</Body>
</div> </div>
</div> </div>
<div class="card" on:click={() => createScreenModal.show("form")}>
<div class="image">
<img alt="" src={formImage} />
</div>
<div class="text">
<Body size="S">Form</Body>
<Body size="XS">Capture data from your users</Body>
</div>
</div>
</div> </div>
</CreationPage> </CreationPage>
</div> </div>

View File

@ -29,6 +29,7 @@ export const INITIAL_APP_META_STATE = {
initialised: false, initialised: false,
hasAppPackage: false, hasAppPackage: false,
usedPlugins: null, usedPlugins: null,
automations: {},
routes: {}, routes: {},
} }
@ -63,6 +64,7 @@ export class AppMetaStore extends BudiStore {
...app.features, ...app.features,
}, },
initialised: true, initialised: true,
automations: app.automations || {},
hasAppPackage: true, hasAppPackage: true,
})) }))
} }

View File

@ -2,12 +2,11 @@ import { get } from "svelte/store"
import { createBuilderWebsocket } from "./websocket.js" import { createBuilderWebsocket } from "./websocket.js"
import { BuilderSocketEvent } from "@budibase/shared-core" import { BuilderSocketEvent } from "@budibase/shared-core"
import BudiStore from "./BudiStore" import BudiStore from "./BudiStore"
import { previewStore } from "./preview.js"
import { TOUR_KEYS } from "components/portal/onboarding/tours.js" import { TOUR_KEYS } from "components/portal/onboarding/tours.js"
export const INITIAL_BUILDER_STATE = { export const INITIAL_BUILDER_STATE = {
previousTopNavPath: {}, previousTopNavPath: {},
highlightedSettingKey: null, highlightedSetting: null,
propertyFocus: null, propertyFocus: null,
builderSidePanel: false, builderSidePanel: false,
onboarding: false, onboarding: false,
@ -26,7 +25,6 @@ export class BuilderStore extends BudiStore {
this.reset = this.reset.bind(this) this.reset = this.reset.bind(this)
this.highlightSetting = this.highlightSetting.bind(this) this.highlightSetting = this.highlightSetting.bind(this)
this.propertyFocus = this.propertyFocus.bind(this) this.propertyFocus = this.propertyFocus.bind(this)
this.hover = this.hover.bind(this)
this.hideBuilderSidePanel = this.hideBuilderSidePanel.bind(this) this.hideBuilderSidePanel = this.hideBuilderSidePanel.bind(this)
this.showBuilderSidePanel = this.showBuilderSidePanel.bind(this) this.showBuilderSidePanel = this.showBuilderSidePanel.bind(this)
this.setPreviousTopNavPath = this.setPreviousTopNavPath.bind(this) this.setPreviousTopNavPath = this.setPreviousTopNavPath.bind(this)
@ -58,10 +56,10 @@ export class BuilderStore extends BudiStore {
this.websocket = null this.websocket = null
} }
highlightSetting(key) { highlightSetting(key, type) {
this.update(state => ({ this.update(state => ({
...state, ...state,
highlightedSettingKey: key, highlightedSetting: key ? { key, type: type || "info" } : null,
})) }))
} }
@ -135,25 +133,20 @@ export class BuilderStore extends BudiStore {
})) }))
} }
startTour(tourKey) { endBuilderOnboarding() {
this.update(state => ({ this.update(state => ({
...state, ...state,
tourKey: tourKey, onboarding: false,
})) }))
} }
hover(componentId, notifyClient = true) { setTour(tourKey) {
const store = get(this.store) this.update(state => ({
if (componentId === store.hoveredComponentId) { ...state,
return tourStepKey: null,
} tourNodes: null,
this.update(state => { tourKey: tourKey,
state.hoveredComponentId = componentId }))
return state
})
if (notifyClient) {
previewStore.sendEvent("hover-component", componentId)
}
} }
} }

View File

@ -147,6 +147,12 @@ export function createTablesStore() {
if (indexes) { if (indexes) {
draft.indexes = indexes draft.indexes = indexes
} }
// Add object to indicate if column is being added
if (draft.schema[field.name] === undefined) {
draft._add = {
name: field.name,
}
}
draft.schema = { draft.schema = {
...draft.schema, ...draft.schema,
[field.name]: cloneDeep(field), [field.name]: cloneDeep(field),

View File

@ -88,14 +88,42 @@ describe("Builder store", () => {
) )
}) })
it("Sync a highlighted setting key to state", ctx => { it("Sync a highlighted setting key to state. Default to info type", ctx => {
expect(ctx.test.store.highlightedSettingKey).toBeNull() expect(ctx.test.store.highlightedSetting).toBeNull()
ctx.test.builderStore.highlightSetting("testing") ctx.test.builderStore.highlightSetting("testing")
expect(ctx.test.store).toStrictEqual({ expect(ctx.test.store).toStrictEqual({
...INITIAL_BUILDER_STATE, ...INITIAL_BUILDER_STATE,
highlightedSettingKey: "testing", highlightedSetting: {
key: "testing",
type: "info",
},
})
})
it("Sync a highlighted setting key to state. Use provided type", ctx => {
expect(ctx.test.store.highlightedSetting).toBeNull()
ctx.test.builderStore.highlightSetting("testing", "error")
expect(ctx.test.store).toStrictEqual({
...INITIAL_BUILDER_STATE,
highlightedSetting: {
key: "testing",
type: "error",
},
})
})
it("Sync a highlighted setting key to state. Unset when no value is passed", ctx => {
expect(ctx.test.store.highlightedSetting).toBeNull()
ctx.test.builderStore.highlightSetting("testing", "error")
ctx.test.builderStore.highlightSetting()
expect(ctx.test.store).toStrictEqual({
...INITIAL_BUILDER_STATE,
}) })
}) })

View File

@ -0,0 +1,43 @@
import { Screen } from "./Screen"
import { Component } from "./Component"
import sanitizeUrl from "helpers/sanitizeUrl"
export const FORM_TEMPLATE = "FORM_TEMPLATE"
export const formUrl = datasource => sanitizeUrl(`/${datasource.label}-form`)
// Mode not really necessary
export default function (datasources, config) {
if (!Array.isArray(datasources)) {
return []
}
return datasources.map(datasource => {
return {
name: `${datasource.label} - Form`,
create: () => createScreen(datasource, config),
id: FORM_TEMPLATE,
resourceId: datasource.resourceId,
}
})
}
const generateMultistepFormBlock = (dataSource, { actionType } = {}) => {
const multistepFormBlock = new Component(
"@budibase/standard-components/multistepformblock"
)
multistepFormBlock
.customProps({
actionType,
dataSource,
steps: [{}],
})
.instanceName(`${dataSource.label} - Multistep Form block`)
return multistepFormBlock
}
const createScreen = (datasource, config) => {
return new Screen()
.route(formUrl(datasource))
.instanceName(`${datasource.label} - Form`)
.addChild(generateMultistepFormBlock(datasource, config))
.json()
}

View File

@ -1,7 +1,11 @@
import rowListScreen from "./rowListScreen" import rowListScreen from "./rowListScreen"
import createFromScratchScreen from "./createFromScratchScreen" import createFromScratchScreen from "./createFromScratchScreen"
import formScreen from "./formScreen"
const allTemplates = datasources => [...rowListScreen(datasources)] const allTemplates = datasources => [
...rowListScreen(datasources),
...formScreen(datasources),
]
// Allows us to apply common behaviour to all create() functions // Allows us to apply common behaviour to all create() functions
const createTemplateOverride = template => () => { const createTemplateOverride = template => () => {
@ -19,6 +23,7 @@ export default datasources => {
}) })
const fromScratch = enrichTemplate(createFromScratchScreen) const fromScratch = enrichTemplate(createFromScratchScreen)
const tableTemplates = allTemplates(datasources).map(enrichTemplate) const tableTemplates = allTemplates(datasources).map(enrichTemplate)
return [ return [
fromScratch, fromScratch,
...tableTemplates.sort((templateA, templateB) => { ...tableTemplates.sort((templateA, templateB) => {

View File

@ -525,6 +525,38 @@
"barTitle": "Disable button", "barTitle": "Disable button",
"key": "disabled" "key": "disabled"
}, },
{
"type": "icon",
"label": "Icon",
"key": "icon"
},
{
"type": "select",
"label": "Gap",
"key": "gap",
"showInBar": true,
"barStyle": "picker",
"dependsOn": "icon",
"options": [
{
"label": "None",
"value": "N"
},
{
"label": "Small",
"value": "S"
},
{
"label": "Medium",
"value": "M"
},
{
"label": "Large",
"value": "L"
}
],
"defaultValue": "M"
},
{ {
"type": "event", "type": "event",
"label": "On click", "label": "On click",

View File

@ -13,9 +13,10 @@
export let size = "M" export let size = "M"
export let type = "cta" export let type = "cta"
export let quiet = false export let quiet = false
export let icon = null
export let gap = "M"
// For internal use only for now - not defined in the manifest // For internal use only for now - not defined in the manifest
export let icon = null
export let active = false export let active = false
const handleOnClick = async () => { const handleOnClick = async () => {
@ -47,7 +48,7 @@
{#key $component.editing} {#key $component.editing}
<button <button
class={`spectrum-Button spectrum-Button--size${size} spectrum-Button--${type}`} class={`spectrum-Button spectrum-Button--size${size} spectrum-Button--${type} gap-${gap}`}
class:spectrum-Button--quiet={quiet} class:spectrum-Button--quiet={quiet}
disabled={disabled || handlingOnClick} disabled={disabled || handlingOnClick}
use:styleable={$component.styles} use:styleable={$component.styles}
@ -58,15 +59,7 @@
class:active class:active
> >
{#if icon} {#if icon}
<svg <i class="{icon} {size}" />
class:hasText={componentText?.length > 0}
class="spectrum-Icon spectrum-Icon--size{size.toUpperCase()}"
focusable="false"
aria-hidden="true"
aria-label={icon}
>
<use xlink:href="#spectrum-icon-18-{icon}" />
</svg>
{/if} {/if}
{componentText} {componentText}
</button> </button>
@ -92,4 +85,13 @@
.active { .active {
color: var(--spectrum-global-color-blue-600); color: var(--spectrum-global-color-blue-600);
} }
.gap-S {
gap: 8px;
}
.gap-M {
gap: 16px;
}
.gap-L {
gap: 32px;
}
</style> </style>

View File

@ -20,7 +20,7 @@
wrap: true, wrap: true,
}} }}
> >
{#each buttons as { text, type, quiet, disabled, onClick, size }} {#each buttons as { text, type, quiet, disabled, onClick, size, icon, gap }}
<BlockComponent <BlockComponent
type="button" type="button"
props={{ props={{
@ -29,6 +29,8 @@
type, type,
quiet, quiet,
disabled, disabled,
icon,
gap,
size: size || "M", size: size || "M",
}} }}
/> />

View File

@ -92,9 +92,9 @@
{#if schemaLoaded} {#if schemaLoaded}
<Button <Button
onClick={openEditor} onClick={openEditor}
icon="Properties" icon="ri-filter-3-line"
text="Filter" text="Filter"
{size} size="XL"
type="secondary" type="secondary"
quiet quiet
active={filters?.length > 0} active={filters?.length > 0}

View File

@ -1,7 +1,7 @@
<script> <script>
import { CoreSelect, CoreMultiselect } from "@budibase/bbui" import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
import { fetchData, Utils } from "@budibase/frontend-core" import { fetchData, Utils } from "@budibase/frontend-core"
import { getContext } from "svelte" import { getContext, onMount } from "svelte"
import Field from "./Field.svelte" import Field from "./Field.svelte"
import { FieldTypes } from "../../../constants" import { FieldTypes } from "../../../constants"
@ -28,6 +28,7 @@
let tableDefinition let tableDefinition
let searchTerm let searchTerm
let open let open
let initialValue
$: type = $: type =
datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE
@ -109,7 +110,11 @@
} }
$: forceFetchRows(filter) $: forceFetchRows(filter)
$: debouncedFetchRows(searchTerm, primaryDisplay, defaultValue) $: debouncedFetchRows(
searchTerm,
primaryDisplay,
initialValue || defaultValue
)
const forceFetchRows = async () => { const forceFetchRows = async () => {
// if the filter has changed, then we need to reset the options, clear the selection, and re-fetch // if the filter has changed, then we need to reset the options, clear the selection, and re-fetch
@ -127,9 +132,13 @@
if (allRowsFetched || !primaryDisplay) { if (allRowsFetched || !primaryDisplay) {
return return
} }
if (defaultVal && !optionsObj[defaultVal]) { // must be an array
if (defaultVal && !Array.isArray(defaultVal)) {
defaultVal = defaultVal.split(",")
}
if (defaultVal && defaultVal.some(val => !optionsObj[val])) {
await fetch.update({ await fetch.update({
query: { equal: { _id: defaultVal } }, query: { oneOf: { _id: defaultVal } },
}) })
} }
@ -202,6 +211,16 @@
fetch.nextPage() fetch.nextPage()
} }
} }
onMount(() => {
// if the form is in 'Update' mode, then we need to fetch the matching row so that the value is correctly set
if (fieldState?.value) {
initialValue =
fieldSchema?.relationshipType !== "one-to-many"
? flatten(fieldState?.value) ?? []
: flatten(fieldState?.value)?.[0]
}
})
</script> </script>
<Field <Field

View File

@ -59,13 +59,13 @@
isReadonly: () => readonly, isReadonly: () => readonly,
getType: () => column.schema.type, getType: () => column.schema.type,
getValue: () => row[column.name], getValue: () => row[column.name],
setValue: (value, options = { save: true }) => { setValue: (value, options = { apply: true }) => {
validation.actions.setError(cellId, null) validation.actions.setError(cellId, null)
updateValue({ updateValue({
rowId: row._id, rowId: row._id,
column: column.name, column: column.name,
value, value,
save: options?.save, apply: options?.apply,
}) })
}, },
} }

View File

@ -217,14 +217,14 @@
const type = $focusedCellAPI.getType() const type = $focusedCellAPI.getType()
if (type === "number" && keyCodeIsNumber(keyCode)) { if (type === "number" && keyCodeIsNumber(keyCode)) {
// Update the value locally but don't save it yet // Update the value locally but don't save it yet
$focusedCellAPI.setValue(parseInt(key), { save: false }) $focusedCellAPI.setValue(parseInt(key), { apply: false })
$focusedCellAPI.focus() $focusedCellAPI.focus()
} else if ( } else if (
["string", "barcodeqr", "longform"].includes(type) && ["string", "barcodeqr", "longform"].includes(type) &&
(keyCodeIsLetter(keyCode) || keyCodeIsNumber(keyCode)) (keyCodeIsLetter(keyCode) || keyCodeIsNumber(keyCode))
) { ) {
// Update the value locally but don't save it yet // Update the value locally but don't save it yet
$focusedCellAPI.setValue(key, { save: false }) $focusedCellAPI.setValue(key, { apply: false })
$focusedCellAPI.focus() $focusedCellAPI.focus()
} }
} }

View File

@ -327,29 +327,31 @@ export const createActions = context => {
get(fetch)?.getInitialData() get(fetch)?.getInitialData()
} }
// Patches a row with some changes // Checks if a changeset for a row actually mutates the row or not
const updateRow = async (rowId, changes, options = { save: true }) => { const changesAreValid = (row, changes) => {
const columns = Object.keys(changes || {})
if (!row || !columns.length) {
return false
}
// Ensure there is at least 1 column that creates a difference
return columns.some(column => row[column] !== changes[column])
}
// Patches a row with some changes in local state, and returns whether a
// valid pending change was made or not
const stashRowChanges = (rowId, changes) => {
const $rows = get(rows) const $rows = get(rows)
const $rowLookupMap = get(rowLookupMap) const $rowLookupMap = get(rowLookupMap)
const index = $rowLookupMap[rowId] const index = $rowLookupMap[rowId]
const row = $rows[index] const row = $rows[index]
if (index == null || !Object.keys(changes || {}).length) {
return // Check this is a valid change
if (!row || !changesAreValid(row, changes)) {
return false
} }
// Abandon if no changes // Add change to cache
let same = true
for (let column of Object.keys(changes)) {
if (row[column] !== changes[column]) {
same = false
break
}
}
if (same) {
return
}
// Immediately update state so that the change is reflected
rowChangeCache.update(state => ({ rowChangeCache.update(state => ({
...state, ...state,
[rowId]: { [rowId]: {
@ -357,26 +359,30 @@ export const createActions = context => {
...changes, ...changes,
}, },
})) }))
return true
}
// Stop here if we don't want to persist the change // Saves any pending changes to a row
if (!options?.save) { const applyRowChanges = async rowId => {
const $rows = get(rows)
const $rowLookupMap = get(rowLookupMap)
const index = $rowLookupMap[rowId]
const row = $rows[index]
if (row == null) {
return return
} }
// Save change // Save change
try { try {
inProgressChanges.update(state => ({ // Mark as in progress
...state, inProgressChanges.update(state => ({ ...state, [rowId]: true }))
[rowId]: true,
}))
// Update row // Update row
const saved = await datasource.actions.updateRow({ const changes = get(rowChangeCache)[rowId]
...cleanRow(row), const newRow = { ...cleanRow(row), ...changes }
...get(rowChangeCache)[rowId], const saved = await datasource.actions.updateRow(newRow)
})
// Update state after a successful change // Update row state after a successful change
if (saved?._id) { if (saved?._id) {
rows.update(state => { rows.update(state => {
state[index] = saved state[index] = saved
@ -386,6 +392,8 @@ export const createActions = context => {
// Handle users table edge case // Handle users table edge case
await refreshRow(saved.id) await refreshRow(saved.id)
} }
// Wipe row change cache now that we've saved the row
rowChangeCache.update(state => { rowChangeCache.update(state => {
delete state[rowId] delete state[rowId]
return state return state
@ -393,15 +401,17 @@ export const createActions = context => {
} catch (error) { } catch (error) {
handleValidationError(rowId, error) handleValidationError(rowId, error)
} }
inProgressChanges.update(state => ({
...state, // Mark as completed
[rowId]: false, inProgressChanges.update(state => ({ ...state, [rowId]: false }))
}))
} }
// Updates a value of a row // Updates a value of a row
const updateValue = async ({ rowId, column, value, save = true }) => { const updateValue = async ({ rowId, column, value, apply = true }) => {
return await updateRow(rowId, { [column]: value }, { save }) const success = stashRowChanges(rowId, { [column]: value })
if (success && apply) {
await applyRowChanges(rowId)
}
} }
// Deletes an array of rows // Deletes an array of rows
@ -411,9 +421,7 @@ export const createActions = context => {
} }
// Actually delete rows // Actually delete rows
rowsToDelete.forEach(row => { rowsToDelete.forEach(row => delete row.__idx)
delete row.__idx
})
await datasource.actions.deleteRows(rowsToDelete) await datasource.actions.deleteRows(rowsToDelete)
// Update state // Update state
@ -433,7 +441,7 @@ export const createActions = context => {
newRow = newRows[i] newRow = newRows[i]
// Ensure we have a unique _id. // Ensure we have a unique _id.
// This means generating one for non DS+, overriting any that may already // This means generating one for non DS+, overwriting any that may already
// exist as we cannot allow duplicates. // exist as we cannot allow duplicates.
if (!$isDatasourcePlus) { if (!$isDatasourcePlus) {
newRow._id = Helpers.uuid() newRow._id = Helpers.uuid()
@ -494,7 +502,7 @@ export const createActions = context => {
duplicateRow, duplicateRow,
getRow, getRow,
updateValue, updateValue,
updateRow, applyRowChanges,
deleteRows, deleteRows,
hasRow, hasRow,
loadNextPage, loadNextPage,
@ -508,7 +516,14 @@ export const createActions = context => {
} }
export const initialise = context => { export const initialise = context => {
const { rowChangeCache, inProgressChanges, previousFocusedRowId } = context const {
rowChangeCache,
inProgressChanges,
previousFocusedRowId,
previousFocusedCellId,
rows,
validation,
} = context
// Wipe the row change cache when changing row // Wipe the row change cache when changing row
previousFocusedRowId.subscribe(id => { previousFocusedRowId.subscribe(id => {
@ -519,4 +534,15 @@ export const initialise = context => {
}) })
} }
}) })
// Ensure any unsaved changes are saved when changing cell
previousFocusedCellId.subscribe(async id => {
const rowId = id?.split("-")[0]
const hasErrors = validation.actions.rowHasErrors(rowId)
const hasChanges = Object.keys(get(rowChangeCache)[rowId] || {}).length > 0
const isSavingChanges = get(inProgressChanges)[rowId]
if (rowId && !hasErrors && hasChanges && !isSavingChanges) {
await rows.actions.applyRowChanges(rowId)
}
})
} }

View File

@ -16,6 +16,7 @@ export const createStores = context => {
const hoveredRowId = writable(null) const hoveredRowId = writable(null)
const rowHeight = writable(get(props).fixedRowHeight || DefaultRowHeight) const rowHeight = writable(get(props).fixedRowHeight || DefaultRowHeight)
const previousFocusedRowId = writable(null) const previousFocusedRowId = writable(null)
const previousFocusedCellId = writable(null)
const gridFocused = writable(false) const gridFocused = writable(false)
const isDragging = writable(false) const isDragging = writable(false)
const buttonColumnWidth = writable(0) const buttonColumnWidth = writable(0)
@ -48,6 +49,7 @@ export const createStores = context => {
focusedCellAPI, focusedCellAPI,
focusedRowId, focusedRowId,
previousFocusedRowId, previousFocusedRowId,
previousFocusedCellId,
hoveredRowId, hoveredRowId,
rowHeight, rowHeight,
gridFocused, gridFocused,
@ -129,6 +131,7 @@ export const initialise = context => {
const { const {
focusedRowId, focusedRowId,
previousFocusedRowId, previousFocusedRowId,
previousFocusedCellId,
rows, rows,
focusedCellId, focusedCellId,
selectedRows, selectedRows,
@ -181,6 +184,13 @@ export const initialise = context => {
lastFocusedRowId = id lastFocusedRowId = id
}) })
// Remember the last focused cell ID so that we can store the previous one
let lastFocusedCellId = null
focusedCellId.subscribe(id => {
previousFocusedCellId.set(lastFocusedCellId)
lastFocusedCellId = id
})
// Remove hovered row when a cell is selected // Remove hovered row when a cell is selected
focusedCellId.subscribe(cell => { focusedCellId.subscribe(cell => {
if (cell && get(hoveredRowId)) { if (cell && get(hoveredRowId)) {

View File

@ -1,8 +1,23 @@
import { writable, get } from "svelte/store" import { writable, get, derived } from "svelte/store"
// Normally we would break out actions into the explicit "createActions"
// function, but for validation all these actions are pure so can go into
// "createStores" instead to make dependency ordering simpler
export const createStores = () => { export const createStores = () => {
const validation = writable({}) const validation = writable({})
// Derive which rows have errors so that we can use that info later
const rowErrorMap = derived(validation, $validation => {
let map = {}
Object.entries($validation).forEach(([key, error]) => {
// Extract row ID from all errored cell IDs
if (error) {
map[key.split("-")[0]] = true
}
})
return map
})
const setError = (cellId, error) => { const setError = (cellId, error) => {
if (!cellId) { if (!cellId) {
return return
@ -13,11 +28,16 @@ export const createStores = () => {
})) }))
} }
const rowHasErrors = rowId => {
return get(rowErrorMap)[rowId]
}
return { return {
validation: { validation: {
...validation, ...validation,
actions: { actions: {
setError, setError,
rowHasErrors,
}, },
}, },
} }

@ -1 +1 @@
Subproject commit 183b35d3acd42433dcb2d32bcd89a36abe13afec Subproject commit 22a278da720d92991dabdcd4cb6c96e7abe29781

View File

@ -152,7 +152,7 @@
"rimraf": "3.0.2", "rimraf": "3.0.2",
"supertest": "6.3.3", "supertest": "6.3.3",
"swagger-jsdoc": "6.1.0", "swagger-jsdoc": "6.1.0",
"testcontainers": "10.6.0", "testcontainers": "10.7.2",
"timekeeper": "2.2.0", "timekeeper": "2.2.0",
"ts-node": "10.8.1", "ts-node": "10.8.1",
"tsconfig-paths": "4.0.0", "tsconfig-paths": "4.0.0",

View File

@ -10,6 +10,11 @@ CREATE TABLE Persons (
City varchar(255), City varchar(255),
PRIMARY KEY (PersonID) PRIMARY KEY (PersonID)
); );
CREATE TABLE Person (
PersonID int NOT NULL AUTO_INCREMENT,
Name varchar(255),
PRIMARY KEY (PersonID)
);
CREATE TABLE Tasks ( CREATE TABLE Tasks (
TaskID int NOT NULL AUTO_INCREMENT, TaskID int NOT NULL AUTO_INCREMENT,
PersonID INT, PersonID INT,
@ -27,6 +32,7 @@ CREATE TABLE Products (
); );
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07');
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11');
INSERT INTO Person (Name) VALUES ('Elf');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31');
INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00'); INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00');

View File

@ -7,6 +7,10 @@ import {
GetResourcePermsResponse, GetResourcePermsResponse,
ResourcePermissionInfo, ResourcePermissionInfo,
GetDependantResourcesResponse, GetDependantResourcesResponse,
AddPermissionResponse,
AddPermissionRequest,
RemovePermissionRequest,
RemovePermissionResponse,
} from "@budibase/types" } from "@budibase/types"
import { getRoleParams } from "../../db/utils" import { getRoleParams } from "../../db/utils"
import { import {
@ -16,9 +20,9 @@ import {
import { removeFromArray } from "../../utilities" import { removeFromArray } from "../../utilities"
import sdk from "../../sdk" import sdk from "../../sdk"
const PermissionUpdateType = { const enum PermissionUpdateType {
REMOVE: "remove", REMOVE = "remove",
ADD: "add", ADD = "add",
} }
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
@ -39,7 +43,7 @@ async function updatePermissionOnRole(
resourceId, resourceId,
level, level,
}: { roleId: string; resourceId: string; level: PermissionLevel }, }: { roleId: string; resourceId: string; level: PermissionLevel },
updateType: string updateType: PermissionUpdateType
) { ) {
const allowedAction = await sdk.permissions.resourceActionAllowed({ const allowedAction = await sdk.permissions.resourceActionAllowed({
resourceId, resourceId,
@ -107,11 +111,15 @@ async function updatePermissionOnRole(
} }
const response = await db.bulkDocs(docUpdates) const response = await db.bulkDocs(docUpdates)
return response.map((resp: any) => { return response.map(resp => {
const version = docUpdates.find(role => role._id === resp.id)?.version const version = docUpdates.find(role => role._id === resp.id)?.version
resp._id = roles.getExternalRoleID(resp.id, version) const _id = roles.getExternalRoleID(resp.id, version)
delete resp.id return {
return resp _id,
rev: resp.rev,
error: resp.error,
reason: resp.reason,
}
}) })
} }
@ -189,13 +197,14 @@ export async function getDependantResources(
} }
} }
export async function addPermission(ctx: UserCtx) { export async function addPermission(ctx: UserCtx<void, AddPermissionResponse>) {
ctx.body = await updatePermissionOnRole(ctx.params, PermissionUpdateType.ADD) const params: AddPermissionRequest = ctx.params
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.ADD)
} }
export async function removePermission(ctx: UserCtx) { export async function removePermission(
ctx.body = await updatePermissionOnRole( ctx: UserCtx<void, RemovePermissionResponse>
ctx.params, ) {
PermissionUpdateType.REMOVE const params: RemovePermissionRequest = ctx.params
) ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.REMOVE)
} }

View File

@ -17,10 +17,12 @@ import {
QueryPreview, QueryPreview,
QuerySchema, QuerySchema,
FieldType, FieldType,
type ExecuteQueryRequest, ExecuteQueryRequest,
type ExecuteQueryResponse, ExecuteQueryResponse,
type Row, Row,
QueryParameter, QueryParameter,
PreviewQueryRequest,
PreviewQueryResponse,
} from "@budibase/types" } from "@budibase/types"
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core" import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
@ -134,14 +136,16 @@ function enrichParameters(
return requestParameters return requestParameters
} }
export async function preview(ctx: UserCtx) { export async function preview(
ctx: UserCtx<PreviewQueryRequest, PreviewQueryResponse>
) {
const { datasource, envVars } = await sdk.datasources.getWithEnvVars( const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
ctx.request.body.datasourceId ctx.request.body.datasourceId
) )
const query: QueryPreview = ctx.request.body
// preview may not have a queryId as it hasn't been saved, but if it does // preview may not have a queryId as it hasn't been saved, but if it does
// this stops dynamic variables from calling the same query // this stops dynamic variables from calling the same query
const { fields, parameters, queryVerb, transformer, queryId, schema } = query const { fields, parameters, queryVerb, transformer, queryId, schema } =
ctx.request.body
let existingSchema = schema let existingSchema = schema
if (queryId && !existingSchema) { if (queryId && !existingSchema) {
@ -266,9 +270,7 @@ export async function preview(ctx: UserCtx) {
}, },
} }
const { rows, keys, info, extra } = (await Runner.run( const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs)
inputs
)) as QueryResponse
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys) const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
// if existing schema, update to include any previous schema keys // if existing schema, update to include any previous schema keys
@ -281,7 +283,7 @@ export async function preview(ctx: UserCtx) {
} }
// remove configuration before sending event // remove configuration before sending event
delete datasource.config delete datasource.config
await events.query.previewed(datasource, query) await events.query.previewed(datasource, ctx.request.body)
ctx.body = { ctx.body = {
rows, rows,
nestedSchemaFields, nestedSchemaFields,
@ -295,7 +297,10 @@ export async function preview(ctx: UserCtx) {
} }
async function execute( async function execute(
ctx: UserCtx<ExecuteQueryRequest, ExecuteQueryResponse | Row[]>, ctx: UserCtx<
ExecuteQueryRequest,
ExecuteQueryResponse | Record<string, any>[]
>,
opts: any = { rowsOnly: false, isAutomation: false } opts: any = { rowsOnly: false, isAutomation: false }
) { ) {
const db = context.getAppDB() const db = context.getAppDB()
@ -350,18 +355,23 @@ async function execute(
} }
} }
export async function executeV1(ctx: UserCtx) { export async function executeV1(
ctx: UserCtx<ExecuteQueryRequest, Record<string, any>[]>
) {
return execute(ctx, { rowsOnly: true, isAutomation: false }) return execute(ctx, { rowsOnly: true, isAutomation: false })
} }
export async function executeV2( export async function executeV2(
ctx: UserCtx, ctx: UserCtx<
ExecuteQueryRequest,
ExecuteQueryResponse | Record<string, any>[]
>,
{ isAutomation }: { isAutomation?: boolean } = {} { isAutomation }: { isAutomation?: boolean } = {}
) { ) {
return execute(ctx, { rowsOnly: false, isAutomation }) return execute(ctx, { rowsOnly: false, isAutomation })
} }
const removeDynamicVariables = async (queryId: any) => { const removeDynamicVariables = async (queryId: string) => {
const db = context.getAppDB() const db = context.getAppDB()
const query = await db.get<Query>(queryId) const query = await db.get<Query>(queryId)
const datasource = await sdk.datasources.get(query.datasourceId) const datasource = await sdk.datasources.get(query.datasourceId)
@ -384,7 +394,7 @@ const removeDynamicVariables = async (queryId: any) => {
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
const db = context.getAppDB() const db = context.getAppDB()
const queryId = ctx.params.queryId const queryId = ctx.params.queryId as string
await removeDynamicVariables(queryId) await removeDynamicVariables(queryId)
const query = await db.get<Query>(queryId) const query = await db.get<Query>(queryId)
const datasource = await sdk.datasources.get(query.datasourceId) const datasource = await sdk.datasources.get(query.datasourceId)

View File

@ -7,8 +7,14 @@ import {
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { getUserMetadataParams, InternalTables } from "../../db/utils" import { getUserMetadataParams, InternalTables } from "../../db/utils"
import { import {
AccessibleRolesResponse,
Database, Database,
DestroyRoleResponse,
FetchRolesResponse,
FindRoleResponse,
Role, Role,
SaveRoleRequest,
SaveRoleResponse,
UserCtx, UserCtx,
UserMetadata, UserMetadata,
UserRoles, UserRoles,
@ -25,43 +31,36 @@ async function updateRolesOnUserTable(
db: Database, db: Database,
roleId: string, roleId: string,
updateOption: string, updateOption: string,
roleVersion: string | undefined roleVersion?: string
) { ) {
const table = await sdk.tables.getTable(InternalTables.USER_METADATA) const table = await sdk.tables.getTable(InternalTables.USER_METADATA)
const schema = table.schema const constraints = table.schema.roleId?.constraints
if (!constraints) {
return
}
const updatedRoleId =
roleVersion === roles.RoleIDVersion.NAME
? roles.getExternalRoleID(roleId, roleVersion)
: roleId
const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId)
const remove = updateOption === UpdateRolesOptions.REMOVED const remove = updateOption === UpdateRolesOptions.REMOVED
let updated = false if (remove && indexOfRoleId !== -1) {
for (let prop of Object.keys(schema)) { constraints.inclusion!.splice(indexOfRoleId, 1)
if (prop === "roleId") { } else if (!remove && indexOfRoleId === -1) {
updated = true constraints.inclusion!.push(updatedRoleId)
const constraints = schema[prop].constraints!
const updatedRoleId =
roleVersion === roles.RoleIDVersion.NAME
? roles.getExternalRoleID(roleId, roleVersion)
: roleId
const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId)
if (remove && indexOfRoleId !== -1) {
constraints.inclusion!.splice(indexOfRoleId, 1)
} else if (!remove && indexOfRoleId === -1) {
constraints.inclusion!.push(updatedRoleId)
}
break
}
}
if (updated) {
await db.put(table)
} }
await db.put(table)
} }
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx<void, FetchRolesResponse>) {
ctx.body = await roles.getAllRoles() ctx.body = await roles.getAllRoles()
} }
export async function find(ctx: UserCtx) { export async function find(ctx: UserCtx<void, FindRoleResponse>) {
ctx.body = await roles.getRole(ctx.params.roleId) ctx.body = await roles.getRole(ctx.params.roleId)
} }
export async function save(ctx: UserCtx) { export async function save(ctx: UserCtx<SaveRoleRequest, SaveRoleResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
let { _id, name, inherits, permissionId, version } = ctx.request.body let { _id, name, inherits, permissionId, version } = ctx.request.body
let isCreate = false let isCreate = false
@ -107,11 +106,26 @@ export async function save(ctx: UserCtx) {
) )
role._rev = result.rev role._rev = result.rev
ctx.body = role ctx.body = role
const devDb = context.getDevAppDB()
const prodDb = context.getProdAppDB()
if (await prodDb.exists()) {
const replication = new dbCore.Replication({
source: devDb.name,
target: prodDb.name,
})
await replication.replicate({
filter: (doc: any, params: any) => {
return doc._id && doc._id.startsWith("role_")
},
})
}
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx<void, DestroyRoleResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
let roleId = ctx.params.roleId let roleId = ctx.params.roleId as string
if (roles.isBuiltin(roleId)) { if (roles.isBuiltin(roleId)) {
ctx.throw(400, "Cannot delete builtin role.") ctx.throw(400, "Cannot delete builtin role.")
} else { } else {
@ -144,14 +158,18 @@ export async function destroy(ctx: UserCtx) {
ctx.status = 200 ctx.status = 200
} }
export async function accessible(ctx: UserCtx) { export async function accessible(ctx: UserCtx<void, AccessibleRolesResponse>) {
let roleId = ctx.user?.roleId let roleId = ctx.user?.roleId
if (!roleId) { if (!roleId) {
roleId = roles.BUILTIN_ROLE_IDS.PUBLIC roleId = roles.BUILTIN_ROLE_IDS.PUBLIC
} }
if (ctx.user && sharedSdk.users.isAdminOrBuilder(ctx.user)) { if (ctx.user && sharedSdk.users.isAdminOrBuilder(ctx.user)) {
const appId = context.getAppId() const appId = context.getAppId()
ctx.body = await roles.getAllRoleIds(appId) if (!appId) {
ctx.body = []
} else {
ctx.body = await roles.getAllRoleIds(appId)
}
} else { } else {
ctx.body = await roles.getUserRoleIdHierarchy(roleId!) ctx.body = await roles.getUserRoleIdHierarchy(roleId!)
} }

View File

@ -63,7 +63,7 @@ export async function fetch(ctx: UserCtx) {
export async function clientFetch(ctx: UserCtx) { export async function clientFetch(ctx: UserCtx) {
const routing = await getRoutingStructure() const routing = await getRoutingStructure()
let roleId = ctx.user?.role?._id let roleId = ctx.user?.role?._id
const roleIds = await roles.getUserRoleIdHierarchy(roleId) const roleIds = roleId ? await roles.getUserRoleIdHierarchy(roleId) : []
for (let topLevel of Object.values(routing.routes) as any) { for (let topLevel of Object.values(routing.routes) as any) {
for (let subpathKey of Object.keys(topLevel.subpaths)) { for (let subpathKey of Object.keys(topLevel.subpaths)) {
let found = false let found = false

View File

@ -1,12 +1,27 @@
import { import {
QueryJson, Datasource,
SearchFilters,
Table,
Row,
DatasourcePlusQueryResponse, DatasourcePlusQueryResponse,
Operation,
QueryJson,
Row,
SearchFilters,
} from "@budibase/types" } from "@budibase/types"
import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" import { getSQLClient } from "../../../sdk/app/rows/utils"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import sdk from "../../../sdk"
import { makeExternalQuery } from "../../../integrations/base/query"
import { SqlClient } from "../../../integrations/utils"
const WRITE_OPERATIONS: Operation[] = [
Operation.CREATE,
Operation.UPDATE,
Operation.DELETE,
]
const DISABLED_WRITE_CLIENTS: SqlClient[] = [
SqlClient.MY_SQL,
SqlClient.MS_SQL,
SqlClient.ORACLE,
]
class CharSequence { class CharSequence {
static alphabet = "abcdefghijklmnopqrstuvwxyz" static alphabet = "abcdefghijklmnopqrstuvwxyz"
@ -43,6 +58,25 @@ export default class AliasTables {
this.charSeq = new CharSequence() this.charSeq = new CharSequence()
} }
isAliasingEnabled(json: QueryJson, datasource: Datasource) {
const fieldLength = json.resource?.fields?.length
if (!fieldLength || fieldLength <= 0) {
return false
}
try {
const sqlClient = getSQLClient(datasource)
const isWrite = WRITE_OPERATIONS.includes(json.endpoint.operation)
const isDisabledClient = DISABLED_WRITE_CLIENTS.includes(sqlClient)
if (isWrite && isDisabledClient) {
return false
}
} catch (err) {
// if we can't get an SQL client, we can't alias
return false
}
return true
}
getAlias(tableName: string) { getAlias(tableName: string) {
if (this.aliases[tableName]) { if (this.aliases[tableName]) {
return this.aliases[tableName] return this.aliases[tableName]
@ -62,7 +96,11 @@ export default class AliasTables {
if (idx === -1 || idx > 1) { if (idx === -1 || idx > 1) {
return return
} }
return Math.abs(tableName.length - name.length) <= 2 // this might look a bit mad, but the idea is if the field is wrapped, say in "", `` or []
// then the idx of the table name will be 1, and we should allow for it ending in a closing
// character - otherwise it should be the full length if the index is zero
const allowedCharacterDiff = idx * 2
return Math.abs(tableName.length - name.length) <= allowedCharacterDiff
}) })
if (foundTableName) { if (foundTableName) {
const aliasedTableName = tableName.replace( const aliasedTableName = tableName.replace(
@ -109,57 +147,57 @@ export default class AliasTables {
async queryWithAliasing( async queryWithAliasing(
json: QueryJson json: QueryJson
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {
json = cloneDeep(json) const datasourceId = json.endpoint.datasourceId
const aliasTable = (table: Table) => ({ const datasource = await sdk.datasources.get(datasourceId)
...table,
name: this.getAlias(table.name), const aliasingEnabled = this.isAliasingEnabled(json, datasource)
}) if (aliasingEnabled) {
// run through the query json to update anywhere a table may be used json = cloneDeep(json)
if (json.resource?.fields) { // run through the query json to update anywhere a table may be used
json.resource.fields = json.resource.fields.map(field => if (json.resource?.fields) {
this.aliasField(field) json.resource.fields = json.resource.fields.map(field =>
) this.aliasField(field)
} )
if (json.filters) {
for (let [filterKey, filter] of Object.entries(json.filters)) {
if (typeof filter !== "object") {
continue
}
const aliasedFilters: typeof filter = {}
for (let key of Object.keys(filter)) {
aliasedFilters[this.aliasField(key)] = filter[key]
}
json.filters[filterKey as keyof SearchFilters] = aliasedFilters
} }
} if (json.filters) {
if (json.relationships) { for (let [filterKey, filter] of Object.entries(json.filters)) {
json.relationships = json.relationships.map(relationship => ({ if (typeof filter !== "object") {
...relationship, continue
aliases: this.aliasMap([ }
relationship.through, const aliasedFilters: typeof filter = {}
relationship.tableName, for (let key of Object.keys(filter)) {
json.endpoint.entityId, aliasedFilters[this.aliasField(key)] = filter[key]
]), }
})) json.filters[filterKey as keyof SearchFilters] = aliasedFilters
} }
if (json.meta?.table) {
json.meta.table = aliasTable(json.meta.table)
}
if (json.meta?.tables) {
const aliasedTables: Record<string, Table> = {}
for (let [tableName, table] of Object.entries(json.meta.tables)) {
aliasedTables[this.getAlias(tableName)] = aliasTable(table)
} }
json.meta.tables = aliasedTables if (json.meta?.table) {
this.getAlias(json.meta.table.name)
}
if (json.meta?.tables) {
Object.keys(json.meta.tables).forEach(tableName =>
this.getAlias(tableName)
)
}
if (json.relationships) {
json.relationships = json.relationships.map(relationship => ({
...relationship,
aliases: this.aliasMap([
relationship.through,
relationship.tableName,
json.endpoint.entityId,
]),
}))
}
// invert and return
const invertedTableAliases: Record<string, string> = {}
for (let [key, value] of Object.entries(this.tableAliases)) {
invertedTableAliases[value] = key
}
json.tableAliases = invertedTableAliases
} }
// invert and return const response = await makeExternalQuery(datasource, json)
const invertedTableAliases: Record<string, string> = {} if (Array.isArray(response) && aliasingEnabled) {
for (let [key, value] of Object.entries(this.tableAliases)) {
invertedTableAliases[value] = key
}
json.tableAliases = invertedTableAliases
const response = await getDatasourceAndQuery(json)
if (Array.isArray(response)) {
return this.reverse(response) return this.reverse(response)
} else { } else {
return response return response

View File

@ -211,7 +211,7 @@ export async function validate(ctx: Ctx<Row, ValidateResponse>) {
} }
} }
export async function fetchEnrichedRow(ctx: any) { export async function fetchEnrichedRow(ctx: UserCtx<void, Row>) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx) ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
} }

View File

@ -174,6 +174,7 @@ export const serveApp = async function (ctx: Ctx) {
if (!env.isJest()) { if (!env.isJest()) {
const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins) const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins)
const { head, html, css } = AppComponent.render({ const { head, html, css } = AppComponent.render({
title: branding?.platformTitle || `${appInfo.name}`,
metaImage: metaImage:
branding?.metaImageUrl || branding?.metaImageUrl ||
"https://res.cloudinary.com/daog6scxm/image/upload/v1698759482/meta-images/plain-branded-meta-image-coral_ocxmgu.png", "https://res.cloudinary.com/daog6scxm/image/upload/v1698759482/meta-images/plain-branded-meta-image-coral_ocxmgu.png",

View File

@ -6,6 +6,7 @@ import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
Operation, Operation,
RenameColumn,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
@ -25,9 +26,12 @@ function getDatasourceId(table: Table) {
return breakExternalTableId(table._id).datasourceId return breakExternalTableId(table._id).datasourceId
} }
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn
) {
const inputs = ctx.request.body const inputs = ctx.request.body
const renaming = inputs?._rename const adding = inputs?._add
// can't do this right now // can't do this right now
delete inputs.rows delete inputs.rows
const tableId = ctx.request.body._id const tableId = ctx.request.body._id
@ -40,7 +44,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const { datasource, table } = await sdk.tables.external.save( const { datasource, table } = await sdk.tables.external.save(
datasourceId!, datasourceId!,
inputs, inputs,
{ tableId, renaming } { tableId, renaming, adding }
) )
builderSocket?.emitDatasourceUpdate(ctx, datasource) builderSocket?.emitDatasourceUpdate(ctx, datasource)
return table return table

View File

@ -74,8 +74,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const appId = ctx.appId const appId = ctx.appId
const table = ctx.request.body const table = ctx.request.body
const isImport = table.rows const isImport = table.rows
const renaming = ctx.request.body._rename
let savedTable = await pickApi({ table }).save(ctx) const api = pickApi({ table })
// do not pass _rename or _add if saving to CouchDB
if (api === internal) {
delete ctx.request.body._add
delete ctx.request.body._rename
}
let savedTable = await api.save(ctx, renaming)
if (!table._id) { if (!table._id) {
await events.table.created(savedTable) await events.table.created(savedTable)
savedTable = sdk.tables.enrichViewSchemas(savedTable) savedTable = sdk.tables.enrichViewSchemas(savedTable)

View File

@ -12,11 +12,12 @@ import {
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn
) {
const { rows, ...rest } = ctx.request.body const { rows, ...rest } = ctx.request.body
let tableToSave: Table & { let tableToSave: Table = {
_rename?: RenameColumn
} = {
_id: generateTableID(), _id: generateTableID(),
...rest, ...rest,
// Ensure these fields are populated, even if not sent in the request // Ensure these fields are populated, even if not sent in the request
@ -28,15 +29,12 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
tableToSave.views = {} tableToSave.views = {}
} }
const renaming = tableToSave._rename
delete tableToSave._rename
try { try {
const { table } = await sdk.tables.internal.save(tableToSave, { const { table } = await sdk.tables.internal.save(tableToSave, {
user: ctx.user, user: ctx.user,
rowsToImport: rows, rowsToImport: rows,
tableId: ctx.request.body._id, tableId: ctx.request.body._id,
renaming: renaming, renaming,
}) })
return table return table

View File

@ -1,6 +1,6 @@
import { generateUserFlagID, InternalTables } from "../../db/utils" import { generateUserFlagID, InternalTables } from "../../db/utils"
import { getFullUser } from "../../utilities/users" import { getFullUser } from "../../utilities/users"
import { context } from "@budibase/backend-core" import { cache, context } from "@budibase/backend-core"
import { import {
ContextUserMetadata, ContextUserMetadata,
Ctx, Ctx,

View File

@ -13,7 +13,7 @@ describe("/api/keys", () => {
describe("fetch", () => { describe("fetch", () => {
it("should allow fetching", async () => { it("should allow fetching", async () => {
await setup.switchToSelfHosted(async () => { await config.withEnv({ SELF_HOSTED: "true" }, async () => {
const res = await request const res = await request
.get(`/api/keys`) .get(`/api/keys`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
@ -34,7 +34,7 @@ describe("/api/keys", () => {
describe("update", () => { describe("update", () => {
it("should allow updating a value", async () => { it("should allow updating a value", async () => {
await setup.switchToSelfHosted(async () => { await config.withEnv({ SELF_HOSTED: "true" }, async () => {
const res = await request const res = await request
.put(`/api/keys/TEST`) .put(`/api/keys/TEST`)
.send({ .send({

View File

@ -16,8 +16,9 @@ import * as setup from "./utilities"
import { AppStatus } from "../../../db/utils" import { AppStatus } from "../../../db/utils"
import { events, utils, context } from "@budibase/backend-core" import { events, utils, context } from "@budibase/backend-core"
import env from "../../../environment" import env from "../../../environment"
import type { App } from "@budibase/types" import { type App } from "@budibase/types"
import tk from "timekeeper" import tk from "timekeeper"
import * as uuid from "uuid"
describe("/applications", () => { describe("/applications", () => {
let config = setup.getConfig() let config = setup.getConfig()
@ -184,7 +185,7 @@ describe("/applications", () => {
it("app should not sync if production", async () => { it("app should not sync if production", async () => {
const { message } = await config.api.application.sync( const { message } = await config.api.application.sync(
app.appId.replace("_dev", ""), app.appId.replace("_dev", ""),
{ statusCode: 400 } { status: 400 }
) )
expect(message).toEqual( expect(message).toEqual(
@ -248,4 +249,93 @@ describe("/applications", () => {
expect(devLogs.data.length).toBe(0) expect(devLogs.data.length).toBe(0)
}) })
}) })
describe("permissions", () => {
it("should only return apps a user has access to", async () => {
let user = await config.createUser({
builder: { global: false },
admin: { global: false },
})
await config.withUser(user, async () => {
const apps = await config.api.application.fetch()
expect(apps).toHaveLength(0)
})
user = await config.globalUser({
...user,
builder: {
apps: [config.getProdAppId()],
},
})
await config.withUser(user, async () => {
const apps = await config.api.application.fetch()
expect(apps).toHaveLength(1)
})
})
it("should only return apps a user has access to through a custom role", async () => {
let user = await config.createUser({
builder: { global: false },
admin: { global: false },
})
await config.withUser(user, async () => {
const apps = await config.api.application.fetch()
expect(apps).toHaveLength(0)
})
const role = await config.api.roles.save({
name: "Test",
inherits: "PUBLIC",
permissionId: "read_only",
version: "name",
})
user = await config.globalUser({
...user,
roles: {
[config.getProdAppId()]: role.name,
},
})
await config.withUser(user, async () => {
const apps = await config.api.application.fetch()
expect(apps).toHaveLength(1)
})
})
it.only("should only return apps a user has access to through a custom role on a group", async () => {
let user = await config.createUser({
builder: { global: false },
admin: { global: false },
})
await config.withUser(user, async () => {
const apps = await config.api.application.fetch()
expect(apps).toHaveLength(0)
})
const roleName = uuid.v4().replace(/-/g, "")
const role = await config.api.roles.save({
name: roleName,
inherits: "PUBLIC",
permissionId: "read_only",
version: "name",
})
const group = await config.createGroup(role._id!)
user = await config.globalUser({
...user,
userGroups: [group._id!],
})
await config.withUser(user, async () => {
const apps = await config.api.application.fetch()
expect(apps).toHaveLength(1)
})
})
})
}) })

View File

@ -29,7 +29,7 @@ describe("/api/applications/:appId/sync", () => {
let resp = (await config.api.attachment.process( let resp = (await config.api.attachment.process(
"ohno.exe", "ohno.exe",
Buffer.from([0]), Buffer.from([0]),
{ expectStatus: 400 } { status: 400 }
)) as unknown as APIError )) as unknown as APIError
expect(resp.message).toContain("invalid extension") expect(resp.message).toContain("invalid extension")
}) })
@ -40,7 +40,7 @@ describe("/api/applications/:appId/sync", () => {
let resp = (await config.api.attachment.process( let resp = (await config.api.attachment.process(
"OHNO.EXE", "OHNO.EXE",
Buffer.from([0]), Buffer.from([0]),
{ expectStatus: 400 } { status: 400 }
)) as unknown as APIError )) as unknown as APIError
expect(resp.message).toContain("invalid extension") expect(resp.message).toContain("invalid extension")
}) })
@ -51,7 +51,7 @@ describe("/api/applications/:appId/sync", () => {
undefined as any, undefined as any,
undefined as any, undefined as any,
{ {
expectStatus: 400, status: 400,
} }
)) as unknown as APIError )) as unknown as APIError
expect(resp.message).toContain("No file provided") expect(resp.message).toContain("No file provided")

View File

@ -19,11 +19,8 @@ describe("/backups", () => {
describe("/api/backups/export", () => { describe("/api/backups/export", () => {
it("should be able to export app", async () => { it("should be able to export app", async () => {
const { body, headers } = await config.api.backup.exportBasicBackup( const body = await config.api.backup.exportBasicBackup(config.getAppId()!)
config.getAppId()!
)
expect(body instanceof Buffer).toBe(true) expect(body instanceof Buffer).toBe(true)
expect(headers["content-type"]).toEqual("application/gzip")
expect(events.app.exported).toBeCalledTimes(1) expect(events.app.exported).toBeCalledTimes(1)
}) })
@ -38,15 +35,13 @@ describe("/backups", () => {
it("should infer the app name from the app", async () => { it("should infer the app name from the app", async () => {
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
const { headers } = await config.api.backup.exportBasicBackup( await config.api.backup.exportBasicBackup(config.getAppId()!, {
config.getAppId()! headers: {
) "content-disposition": `attachment; filename="${
config.getApp().name
expect(headers["content-disposition"]).toEqual( }-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`,
`attachment; filename="${ },
config.getApp().name })
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
)
}) })
}) })

View File

@ -45,7 +45,7 @@ describe("/permission", () => {
table = (await config.createTable()) as typeof table table = (await config.createTable()) as typeof table
row = await config.createRow() row = await config.createRow()
view = await config.api.viewV2.create({ tableId: table._id }) view = await config.api.viewV2.create({ tableId: table._id })
perms = await config.api.permission.set({ perms = await config.api.permission.add({
roleId: STD_ROLE_ID, roleId: STD_ROLE_ID,
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.READ, level: PermissionLevel.READ,
@ -88,13 +88,13 @@ describe("/permission", () => {
}) })
it("should get resource permissions with multiple roles", async () => { it("should get resource permissions with multiple roles", async () => {
perms = await config.api.permission.set({ perms = await config.api.permission.add({
roleId: HIGHER_ROLE_ID, roleId: HIGHER_ROLE_ID,
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.WRITE, level: PermissionLevel.WRITE,
}) })
const res = await config.api.permission.get(table._id) const res = await config.api.permission.get(table._id)
expect(res.body).toEqual({ expect(res).toEqual({
permissions: { permissions: {
read: { permissionType: "EXPLICIT", role: STD_ROLE_ID }, read: { permissionType: "EXPLICIT", role: STD_ROLE_ID },
write: { permissionType: "EXPLICIT", role: HIGHER_ROLE_ID }, write: { permissionType: "EXPLICIT", role: HIGHER_ROLE_ID },
@ -117,16 +117,19 @@ describe("/permission", () => {
level: PermissionLevel.READ, level: PermissionLevel.READ,
}) })
const response = await config.api.permission.set( await config.api.permission.add(
{ {
roleId: STD_ROLE_ID, roleId: STD_ROLE_ID,
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.EXECUTE, level: PermissionLevel.EXECUTE,
}, },
{ expectStatus: 403 } {
) status: 403,
expect(response.message).toEqual( body: {
"You are not allowed to 'read' the resource type 'datasource'" message:
"You are not allowed to 'read' the resource type 'datasource'",
},
}
) )
}) })
}) })
@ -138,9 +141,9 @@ describe("/permission", () => {
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.READ, level: PermissionLevel.READ,
}) })
expect(res.body[0]._id).toEqual(STD_ROLE_ID) expect(res[0]._id).toEqual(STD_ROLE_ID)
const permsRes = await config.api.permission.get(table._id) const permsRes = await config.api.permission.get(table._id)
expect(permsRes.body[STD_ROLE_ID]).toBeUndefined() expect(permsRes.permissions[STD_ROLE_ID]).toBeUndefined()
}) })
it("throw forbidden if the action is not allowed for the resource", async () => { it("throw forbidden if the action is not allowed for the resource", async () => {
@ -156,10 +159,13 @@ describe("/permission", () => {
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.EXECUTE, level: PermissionLevel.EXECUTE,
}, },
{ expectStatus: 403 } {
) status: 403,
expect(response.body.message).toEqual( body: {
"You are not allowed to 'read' the resource type 'datasource'" message:
"You are not allowed to 'read' the resource type 'datasource'",
},
}
) )
}) })
}) })
@ -181,10 +187,8 @@ describe("/permission", () => {
// replicate changes before checking permissions // replicate changes before checking permissions
await config.publish() await config.publish()
const res = await config.api.viewV2.search(view.id, undefined, { const res = await config.api.viewV2.publicSearch(view.id)
usePublicUser: true, expect(res.rows[0]._id).toEqual(row._id)
})
expect(res.body.rows[0]._id).toEqual(row._id)
}) })
it("should not be able to access the view data when the table is not public and there are no view permissions overrides", async () => { it("should not be able to access the view data when the table is not public and there are no view permissions overrides", async () => {
@ -196,14 +200,11 @@ describe("/permission", () => {
// replicate changes before checking permissions // replicate changes before checking permissions
await config.publish() await config.publish()
await config.api.viewV2.search(view.id, undefined, { await config.api.viewV2.publicSearch(view.id, undefined, { status: 403 })
expectStatus: 403,
usePublicUser: true,
})
}) })
it("should ignore the view permissions if the flag is not on", async () => { it("should ignore the view permissions if the flag is not on", async () => {
await config.api.permission.set({ await config.api.permission.add({
roleId: STD_ROLE_ID, roleId: STD_ROLE_ID,
resourceId: view.id, resourceId: view.id,
level: PermissionLevel.READ, level: PermissionLevel.READ,
@ -216,15 +217,14 @@ describe("/permission", () => {
// replicate changes before checking permissions // replicate changes before checking permissions
await config.publish() await config.publish()
await config.api.viewV2.search(view.id, undefined, { await config.api.viewV2.publicSearch(view.id, undefined, {
expectStatus: 403, status: 403,
usePublicUser: true,
}) })
}) })
it("should use the view permissions if the flag is on", async () => { it("should use the view permissions if the flag is on", async () => {
mocks.licenses.useViewPermissions() mocks.licenses.useViewPermissions()
await config.api.permission.set({ await config.api.permission.add({
roleId: STD_ROLE_ID, roleId: STD_ROLE_ID,
resourceId: view.id, resourceId: view.id,
level: PermissionLevel.READ, level: PermissionLevel.READ,
@ -237,10 +237,8 @@ describe("/permission", () => {
// replicate changes before checking permissions // replicate changes before checking permissions
await config.publish() await config.publish()
const res = await config.api.viewV2.search(view.id, undefined, { const res = await config.api.viewV2.publicSearch(view.id)
usePublicUser: true, expect(res.rows[0]._id).toEqual(row._id)
})
expect(res.body.rows[0]._id).toEqual(row._id)
}) })
it("shouldn't allow writing from a public user", async () => { it("shouldn't allow writing from a public user", async () => {
@ -277,7 +275,7 @@ describe("/permission", () => {
const res = await config.api.permission.get(legacyView.name) const res = await config.api.permission.get(legacyView.name)
expect(res.body).toEqual({ expect(res).toEqual({
permissions: { permissions: {
read: { read: {
permissionType: "BASE", permissionType: "BASE",

View File

@ -157,7 +157,7 @@ describe("/queries", () => {
}) })
it("should find a query in cloud", async () => { it("should find a query in cloud", async () => {
await setup.switchToSelfHosted(async () => { await config.withEnv({ SELF_HOSTED: "true" }, async () => {
const query = await config.createQuery() const query = await config.createQuery()
const res = await request const res = await request
.get(`/api/queries/${query._id}`) .get(`/api/queries/${query._id}`)
@ -397,15 +397,16 @@ describe("/queries", () => {
}) })
it("should fail with invalid integration type", async () => { it("should fail with invalid integration type", async () => {
const response = await config.api.datasource.create( const datasource: Datasource = {
{ ...basicDatasource().datasource,
...basicDatasource().datasource, source: "INVALID_INTEGRATION" as SourceName,
source: "INVALID_INTEGRATION" as SourceName, }
await config.api.datasource.create(datasource, {
status: 500,
body: {
message: "No datasource implementation found.",
}, },
{ expectStatus: 500, rawResponse: true } })
)
expect(response.body.message).toBe("No datasource implementation found.")
}) })
}) })

View File

@ -93,7 +93,7 @@ describe("/roles", () => {
it("should be able to get the role with a permission added", async () => { it("should be able to get the role with a permission added", async () => {
const table = await config.createTable() const table = await config.createTable()
await config.api.permission.set({ await config.api.permission.add({
roleId: BUILTIN_ROLE_IDS.POWER, roleId: BUILTIN_ROLE_IDS.POWER,
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.READ, level: PermissionLevel.READ,

View File

@ -7,6 +7,7 @@ import { context, InternalTable, roles, tenancy } from "@budibase/backend-core"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { import {
AutoFieldSubType, AutoFieldSubType,
DeleteRow,
FieldSchema, FieldSchema,
FieldType, FieldType,
FieldTypeSubtypes, FieldTypeSubtypes,
@ -106,9 +107,6 @@ describe.each([
mocks.licenses.useCloudFree() mocks.licenses.useCloudFree()
}) })
const loadRow = (id: string, tbl_Id: string, status = 200) =>
config.api.row.get(tbl_Id, id, { expectStatus: status })
const getRowUsage = async () => { const getRowUsage = async () => {
const { total } = await config.doInContext(undefined, () => const { total } = await config.doInContext(undefined, () =>
quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS) quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
@ -235,7 +233,7 @@ describe.each([
const res = await config.api.row.get(tableId, existing._id!) const res = await config.api.row.get(tableId, existing._id!)
expect(res.body).toEqual({ expect(res).toEqual({
...existing, ...existing,
...defaultRowFields, ...defaultRowFields,
}) })
@ -265,7 +263,7 @@ describe.each([
await config.createRow() await config.createRow()
await config.api.row.get(tableId, "1234567", { await config.api.row.get(tableId, "1234567", {
expectStatus: 404, status: 404,
}) })
}) })
@ -395,7 +393,7 @@ describe.each([
const createdRow = await config.createRow(row) const createdRow = await config.createRow(row)
const id = createdRow._id! const id = createdRow._id!
const saved = (await loadRow(id, table._id!)).body const saved = await config.api.row.get(table._id!, id)
expect(saved.stringUndefined).toBe(undefined) expect(saved.stringUndefined).toBe(undefined)
expect(saved.stringNull).toBe(null) expect(saved.stringNull).toBe(null)
@ -476,8 +474,8 @@ describe.each([
) )
const row = await config.api.row.get(table._id!, createRowResponse._id!) const row = await config.api.row.get(table._id!, createRowResponse._id!)
expect(row.body.Story).toBeUndefined() expect(row.Story).toBeUndefined()
expect(row.body).toEqual({ expect(row).toEqual({
...defaultRowFields, ...defaultRowFields,
OrderID: 1111, OrderID: 1111,
Country: "Aussy", Country: "Aussy",
@ -524,10 +522,10 @@ describe.each([
expect(row.name).toEqual("Updated Name") expect(row.name).toEqual("Updated Name")
expect(row.description).toEqual(existing.description) expect(row.description).toEqual(existing.description)
const savedRow = await loadRow(row._id!, table._id!) const savedRow = await config.api.row.get(table._id!, row._id!)
expect(savedRow.body.description).toEqual(existing.description) expect(savedRow.description).toEqual(existing.description)
expect(savedRow.body.name).toEqual("Updated Name") expect(savedRow.name).toEqual("Updated Name")
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
@ -543,7 +541,7 @@ describe.each([
tableId: table._id!, tableId: table._id!,
name: 1, name: 1,
}, },
{ expectStatus: 400 } { status: 400 }
) )
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
@ -582,8 +580,8 @@ describe.each([
}) })
let getResp = await config.api.row.get(table._id!, row._id!) let getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user1._id) expect(getResp.user1[0]._id).toEqual(user1._id)
expect(getResp.body.user2[0]._id).toEqual(user2._id) expect(getResp.user2[0]._id).toEqual(user2._id)
let patchResp = await config.api.row.patch(table._id!, { let patchResp = await config.api.row.patch(table._id!, {
_id: row._id!, _id: row._id!,
@ -595,8 +593,8 @@ describe.each([
expect(patchResp.user2[0]._id).toEqual(user2._id) expect(patchResp.user2[0]._id).toEqual(user2._id)
getResp = await config.api.row.get(table._id!, row._id!) getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user2._id) expect(getResp.user1[0]._id).toEqual(user2._id)
expect(getResp.body.user2[0]._id).toEqual(user2._id) expect(getResp.user2[0]._id).toEqual(user2._id)
}) })
it("should be able to update relationships when both columns are same name", async () => { it("should be able to update relationships when both columns are same name", async () => {
@ -609,7 +607,7 @@ describe.each([
description: "test", description: "test",
relationship: [row._id], relationship: [row._id],
}) })
row = (await config.api.row.get(table._id!, row._id!)).body row = await config.api.row.get(table._id!, row._id!)
expect(row.relationship.length).toBe(1) expect(row.relationship.length).toBe(1)
const resp = await config.api.row.patch(table._id!, { const resp = await config.api.row.patch(table._id!, {
_id: row._id!, _id: row._id!,
@ -632,8 +630,10 @@ describe.each([
const createdRow = await config.createRow() const createdRow = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.row.delete(table._id!, [createdRow]) const res = await config.api.row.bulkDelete(table._id!, {
expect(res.body[0]._id).toEqual(createdRow._id) rows: [createdRow],
})
expect(res[0]._id).toEqual(createdRow._id)
await assertRowUsage(rowUsage - 1) await assertRowUsage(rowUsage - 1)
}) })
}) })
@ -682,10 +682,12 @@ describe.each([
const row2 = await config.createRow() const row2 = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.row.delete(table._id!, [row1, row2]) const res = await config.api.row.bulkDelete(table._id!, {
rows: [row1, row2],
})
expect(res.body.length).toEqual(2) expect(res.length).toEqual(2)
await loadRow(row1._id!, table._id!, 404) await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(rowUsage - 2) await assertRowUsage(rowUsage - 2)
}) })
@ -697,14 +699,12 @@ describe.each([
]) ])
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.row.delete(table._id!, [ const res = await config.api.row.bulkDelete(table._id!, {
row1, rows: [row1, row2._id!, { _id: row3._id }],
row2._id, })
{ _id: row3._id },
])
expect(res.body.length).toEqual(3) expect(res.length).toEqual(3)
await loadRow(row1._id!, table._id!, 404) await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(rowUsage - 3) await assertRowUsage(rowUsage - 3)
}) })
@ -712,34 +712,36 @@ describe.each([
const row1 = await config.createRow() const row1 = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.row.delete(table._id!, row1) const res = await config.api.row.delete(table._id!, row1 as DeleteRow)
expect(res.body.id).toEqual(row1._id) expect(res.id).toEqual(row1._id)
await loadRow(row1._id!, table._id!, 404) await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(rowUsage - 1) await assertRowUsage(rowUsage - 1)
}) })
it("Should ignore malformed/invalid delete requests", async () => { it("Should ignore malformed/invalid delete requests", async () => {
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.row.delete( await config.api.row.delete(table._id!, { not: "valid" } as any, {
table._id!, status: 400,
{ not: "valid" }, body: {
{ expectStatus: 400 } message: "Invalid delete rows request",
) },
expect(res.body.message).toEqual("Invalid delete rows request") })
const res2 = await config.api.row.delete( await config.api.row.delete(table._id!, { rows: 123 } as any, {
table._id!, status: 400,
{ rows: 123 }, body: {
{ expectStatus: 400 } message: "Invalid delete rows request",
) },
expect(res2.body.message).toEqual("Invalid delete rows request") })
const res3 = await config.api.row.delete(table._id!, "invalid", { await config.api.row.delete(table._id!, "invalid" as any, {
expectStatus: 400, status: 400,
body: {
message: "Invalid delete rows request",
},
}) })
expect(res3.body.message).toEqual("Invalid delete rows request")
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
@ -757,16 +759,16 @@ describe.each([
const row = await config.createRow() const row = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.legacyView.get(table._id!) const rows = await config.api.legacyView.get(table._id!)
expect(res.body.length).toEqual(1) expect(rows.length).toEqual(1)
expect(res.body[0]._id).toEqual(row._id) expect(rows[0]._id).toEqual(row._id)
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
it("should throw an error if view doesn't exist", async () => { it("should throw an error if view doesn't exist", async () => {
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
await config.api.legacyView.get("derp", { expectStatus: 404 }) await config.api.legacyView.get("derp", { status: 404 })
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
@ -781,9 +783,9 @@ describe.each([
const row = await config.createRow() const row = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const res = await config.api.legacyView.get(view.name) const rows = await config.api.legacyView.get(view.name)
expect(res.body.length).toEqual(1) expect(rows.length).toEqual(1)
expect(res.body[0]._id).toEqual(row._id) expect(rows[0]._id).toEqual(row._id)
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
@ -841,8 +843,8 @@ describe.each([
linkedTable._id!, linkedTable._id!,
secondRow._id! secondRow._id!
) )
expect(resBasic.body.link.length).toBe(1) expect(resBasic.link.length).toBe(1)
expect(resBasic.body.link[0]).toEqual({ expect(resBasic.link[0]).toEqual({
_id: firstRow._id, _id: firstRow._id,
primaryDisplay: firstRow.name, primaryDisplay: firstRow.name,
}) })
@ -852,10 +854,10 @@ describe.each([
linkedTable._id!, linkedTable._id!,
secondRow._id! secondRow._id!
) )
expect(resEnriched.body.link.length).toBe(1) expect(resEnriched.link.length).toBe(1)
expect(resEnriched.body.link[0]._id).toBe(firstRow._id) expect(resEnriched.link[0]._id).toBe(firstRow._id)
expect(resEnriched.body.link[0].name).toBe("Test Contact") expect(resEnriched.link[0].name).toBe("Test Contact")
expect(resEnriched.body.link[0].description).toBe("original description") expect(resEnriched.link[0].description).toBe("original description")
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
}) })
@ -880,8 +882,7 @@ describe.each([
], ],
tableId: table._id, tableId: table._id,
}) })
// the environment needs configured for this await config.withEnv({ SELF_HOSTED: "true" }, async () => {
await setup.switchToSelfHosted(async () => {
return context.doInAppContext(config.getAppId(), async () => { return context.doInAppContext(config.getAppId(), async () => {
const enriched = await outputProcessing(table, [row]) const enriched = await outputProcessing(table, [row])
expect((enriched as Row[])[0].attachment[0].url).toBe( expect((enriched as Row[])[0].attachment[0].url).toBe(
@ -903,7 +904,7 @@ describe.each([
const res = await config.api.row.exportRows(table._id!, { const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!], rows: [existing._id!],
}) })
const results = JSON.parse(res.text) const results = JSON.parse(res)
expect(results.length).toEqual(1) expect(results.length).toEqual(1)
const row = results[0] const row = results[0]
@ -922,7 +923,7 @@ describe.each([
rows: [existing._id!], rows: [existing._id!],
columns: ["_id"], columns: ["_id"],
}) })
const results = JSON.parse(res.text) const results = JSON.parse(res)
expect(results.length).toEqual(1) expect(results.length).toEqual(1)
const row = results[0] const row = results[0]
@ -1000,7 +1001,7 @@ describe.each([
}) })
const row = await config.api.row.get(table._id!, newRow._id!) const row = await config.api.row.get(table._id!, newRow._id!)
expect(row.body).toEqual({ expect(row).toEqual({
name: data.name, name: data.name,
surname: data.surname, surname: data.surname,
address: data.address, address: data.address,
@ -1010,9 +1011,9 @@ describe.each([
id: newRow.id, id: newRow.id,
...defaultRowFields, ...defaultRowFields,
}) })
expect(row.body._viewId).toBeUndefined() expect(row._viewId).toBeUndefined()
expect(row.body.age).toBeUndefined() expect(row.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined() expect(row.jobTitle).toBeUndefined()
}) })
}) })
@ -1042,7 +1043,7 @@ describe.each([
}) })
const row = await config.api.row.get(tableId, newRow._id!) const row = await config.api.row.get(tableId, newRow._id!)
expect(row.body).toEqual({ expect(row).toEqual({
...newRow, ...newRow,
name: newData.name, name: newData.name,
address: newData.address, address: newData.address,
@ -1051,9 +1052,9 @@ describe.each([
id: newRow.id, id: newRow.id,
...defaultRowFields, ...defaultRowFields,
}) })
expect(row.body._viewId).toBeUndefined() expect(row._viewId).toBeUndefined()
expect(row.body.age).toBeUndefined() expect(row.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined() expect(row.jobTitle).toBeUndefined()
}) })
}) })
@ -1071,12 +1072,12 @@ describe.each([
const createdRow = await config.createRow() const createdRow = await config.createRow()
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
await config.api.row.delete(view.id, [createdRow]) await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
await assertRowUsage(rowUsage - 1) await assertRowUsage(rowUsage - 1)
await config.api.row.get(tableId, createdRow._id!, { await config.api.row.get(tableId, createdRow._id!, {
expectStatus: 404, status: 404,
}) })
}) })
@ -1097,17 +1098,17 @@ describe.each([
]) ])
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
await config.api.row.delete(view.id, [rows[0], rows[2]]) await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] })
await assertRowUsage(rowUsage - 2) await assertRowUsage(rowUsage - 2)
await config.api.row.get(tableId, rows[0]._id!, { await config.api.row.get(tableId, rows[0]._id!, {
expectStatus: 404, status: 404,
}) })
await config.api.row.get(tableId, rows[2]._id!, { await config.api.row.get(tableId, rows[2]._id!, {
expectStatus: 404, status: 404,
}) })
await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 }) await config.api.row.get(tableId, rows[1]._id!, { status: 200 })
}) })
}) })
@ -1154,8 +1155,8 @@ describe.each([
const createViewResponse = await config.createView() const createViewResponse = await config.createView()
const response = await config.api.viewV2.search(createViewResponse.id) const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(10) expect(response.rows).toHaveLength(10)
expect(response.body).toEqual({ expect(response).toEqual({
rows: expect.arrayContaining( rows: expect.arrayContaining(
rows.map(r => ({ rows.map(r => ({
_viewId: createViewResponse.id, _viewId: createViewResponse.id,
@ -1206,8 +1207,8 @@ describe.each([
const response = await config.api.viewV2.search(createViewResponse.id) const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(5) expect(response.rows).toHaveLength(5)
expect(response.body).toEqual({ expect(response).toEqual({
rows: expect.arrayContaining( rows: expect.arrayContaining(
expectedRows.map(r => ({ expectedRows.map(r => ({
_viewId: createViewResponse.id, _viewId: createViewResponse.id,
@ -1328,8 +1329,8 @@ describe.each([
createViewResponse.id createViewResponse.id
) )
expect(response.body.rows).toHaveLength(4) expect(response.rows).toHaveLength(4)
expect(response.body.rows).toEqual( expect(response.rows).toEqual(
expected.map(name => expect.objectContaining({ name })) expected.map(name => expect.objectContaining({ name }))
) )
} }
@ -1357,8 +1358,8 @@ describe.each([
} }
) )
expect(response.body.rows).toHaveLength(4) expect(response.rows).toHaveLength(4)
expect(response.body.rows).toEqual( expect(response.rows).toEqual(
expected.map(name => expect.objectContaining({ name })) expected.map(name => expect.objectContaining({ name }))
) )
} }
@ -1382,8 +1383,8 @@ describe.each([
}) })
const response = await config.api.viewV2.search(view.id) const response = await config.api.viewV2.search(view.id)
expect(response.body.rows).toHaveLength(10) expect(response.rows).toHaveLength(10)
expect(response.body.rows).toEqual( expect(response.rows).toEqual(
expect.arrayContaining( expect.arrayContaining(
rows.map(r => ({ rows.map(r => ({
...(isInternal ...(isInternal
@ -1402,7 +1403,7 @@ describe.each([
const createViewResponse = await config.createView() const createViewResponse = await config.createView()
const response = await config.api.viewV2.search(createViewResponse.id) const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(0) expect(response.rows).toHaveLength(0)
}) })
it("respects the limit parameter", async () => { it("respects the limit parameter", async () => {
@ -1417,7 +1418,7 @@ describe.each([
query: {}, query: {},
}) })
expect(response.body.rows).toHaveLength(limit) expect(response.rows).toHaveLength(limit)
}) })
it("can handle pagination", async () => { it("can handle pagination", async () => {
@ -1426,7 +1427,7 @@ describe.each([
const createViewResponse = await config.createView() const createViewResponse = await config.createView()
const allRows = (await config.api.viewV2.search(createViewResponse.id)) const allRows = (await config.api.viewV2.search(createViewResponse.id))
.body.rows .rows
const firstPageResponse = await config.api.viewV2.search( const firstPageResponse = await config.api.viewV2.search(
createViewResponse.id, createViewResponse.id,
@ -1436,7 +1437,7 @@ describe.each([
query: {}, query: {},
} }
) )
expect(firstPageResponse.body).toEqual({ expect(firstPageResponse).toEqual({
rows: expect.arrayContaining(allRows.slice(0, 4)), rows: expect.arrayContaining(allRows.slice(0, 4)),
totalRows: isInternal ? 10 : undefined, totalRows: isInternal ? 10 : undefined,
hasNextPage: true, hasNextPage: true,
@ -1448,12 +1449,12 @@ describe.each([
{ {
paginate: true, paginate: true,
limit: 4, limit: 4,
bookmark: firstPageResponse.body.bookmark, bookmark: firstPageResponse.bookmark,
query: {}, query: {},
} }
) )
expect(secondPageResponse.body).toEqual({ expect(secondPageResponse).toEqual({
rows: expect.arrayContaining(allRows.slice(4, 8)), rows: expect.arrayContaining(allRows.slice(4, 8)),
totalRows: isInternal ? 10 : undefined, totalRows: isInternal ? 10 : undefined,
hasNextPage: true, hasNextPage: true,
@ -1465,11 +1466,11 @@ describe.each([
{ {
paginate: true, paginate: true,
limit: 4, limit: 4,
bookmark: secondPageResponse.body.bookmark, bookmark: secondPageResponse.bookmark,
query: {}, query: {},
} }
) )
expect(lastPageResponse.body).toEqual({ expect(lastPageResponse).toEqual({
rows: expect.arrayContaining(allRows.slice(8)), rows: expect.arrayContaining(allRows.slice(8)),
totalRows: isInternal ? 10 : undefined, totalRows: isInternal ? 10 : undefined,
hasNextPage: false, hasNextPage: false,
@ -1489,7 +1490,7 @@ describe.each([
email: "joe@joe.com", email: "joe@joe.com",
roles: {}, roles: {},
}, },
{ expectStatus: 400 } { status: 400 }
) )
expect(response.message).toBe("Cannot create new user entry.") expect(response.message).toBe("Cannot create new user entry.")
}) })
@ -1516,58 +1517,52 @@ describe.each([
it("does not allow public users to fetch by default", async () => { it("does not allow public users to fetch by default", async () => {
await config.publish() await config.publish()
await config.api.viewV2.search(viewId, undefined, { await config.api.viewV2.publicSearch(viewId, undefined, {
expectStatus: 403, status: 403,
usePublicUser: true,
}) })
}) })
it("allow public users to fetch when permissions are explicit", async () => { it("allow public users to fetch when permissions are explicit", async () => {
await config.api.permission.set({ await config.api.permission.add({
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
level: PermissionLevel.READ, level: PermissionLevel.READ,
resourceId: viewId, resourceId: viewId,
}) })
await config.publish() await config.publish()
const response = await config.api.viewV2.search(viewId, undefined, { const response = await config.api.viewV2.publicSearch(viewId)
usePublicUser: true,
})
expect(response.body.rows).toHaveLength(10) expect(response.rows).toHaveLength(10)
}) })
it("allow public users to fetch when permissions are inherited", async () => { it("allow public users to fetch when permissions are inherited", async () => {
await config.api.permission.set({ await config.api.permission.add({
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
level: PermissionLevel.READ, level: PermissionLevel.READ,
resourceId: tableId, resourceId: tableId,
}) })
await config.publish() await config.publish()
const response = await config.api.viewV2.search(viewId, undefined, { const response = await config.api.viewV2.publicSearch(viewId)
usePublicUser: true,
})
expect(response.body.rows).toHaveLength(10) expect(response.rows).toHaveLength(10)
}) })
it("respects inherited permissions, not allowing not public views from public tables", async () => { it("respects inherited permissions, not allowing not public views from public tables", async () => {
await config.api.permission.set({ await config.api.permission.add({
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
level: PermissionLevel.READ, level: PermissionLevel.READ,
resourceId: tableId, resourceId: tableId,
}) })
await config.api.permission.set({ await config.api.permission.add({
roleId: roles.BUILTIN_ROLE_IDS.POWER, roleId: roles.BUILTIN_ROLE_IDS.POWER,
level: PermissionLevel.READ, level: PermissionLevel.READ,
resourceId: viewId, resourceId: viewId,
}) })
await config.publish() await config.publish()
await config.api.viewV2.search(viewId, undefined, { await config.api.viewV2.publicSearch(viewId, undefined, {
usePublicUser: true, status: 403,
expectStatus: 403,
}) })
}) })
}) })
@ -1754,7 +1749,7 @@ describe.each([
} }
const row = await config.api.row.save(tableId, rowData) const row = await config.api.row.save(tableId, rowData)
const { body: retrieved } = await config.api.row.get(tableId, row._id!) const retrieved = await config.api.row.get(tableId, row._id!)
expect(retrieved).toEqual({ expect(retrieved).toEqual({
name: rowData.name, name: rowData.name,
description: rowData.description, description: rowData.description,
@ -1781,7 +1776,7 @@ describe.each([
} }
const row = await config.api.row.save(tableId, rowData) const row = await config.api.row.save(tableId, rowData)
const { body: retrieved } = await config.api.row.get(tableId, row._id!) const retrieved = await config.api.row.get(tableId, row._id!)
expect(retrieved).toEqual({ expect(retrieved).toEqual({
name: rowData.name, name: rowData.name,
description: rowData.description, description: rowData.description,

View File

@ -26,6 +26,7 @@ import { TableToBuild } from "../../../tests/utilities/TestConfiguration"
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
const { basicTable } = setup.structures const { basicTable } = setup.structures
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
describe("/tables", () => { describe("/tables", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -285,6 +286,35 @@ describe("/tables", () => {
expect(res.body.schema.roleId).toBeDefined() expect(res.body.schema.roleId).toBeDefined()
}) })
}) })
it("should add a new column for an internal DB table", async () => {
const saveTableRequest: SaveTableRequest = {
_add: {
name: "NEW_COLUMN",
},
...basicTable(),
}
const response = await request
.post(`/api/tables`)
.send(saveTableRequest)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
const expectedResponse = {
...saveTableRequest,
_rev: expect.stringMatching(/^\d-.+/),
_id: expect.stringMatching(/^ta_.+/),
createdAt: expect.stringMatching(ISO_REGEX_PATTERN),
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
views: {},
}
delete expectedResponse._add
expect(response.status).toBe(200)
expect(response.body).toEqual(expectedResponse)
})
}) })
describe("import", () => { describe("import", () => {
@ -663,8 +693,7 @@ describe("/tables", () => {
expect(migratedTable.schema["user column"]).toBeDefined() expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined() expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const resp = await config.api.row.get(table._id!, testRow._id!) const migratedRow = await config.api.row.get(table._id!, testRow._id!)
const migratedRow = resp.body as Row
expect(migratedRow["user column"]).toBeDefined() expect(migratedRow["user column"]).toBeDefined()
expect(migratedRow["user relationship"]).not.toBeDefined() expect(migratedRow["user relationship"]).not.toBeDefined()
@ -716,15 +745,13 @@ describe("/tables", () => {
expect(migratedTable.schema["user column"]).toBeDefined() expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined() expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const row1Migrated = (await config.api.row.get(table._id!, row1._id!)) const row1Migrated = await config.api.row.get(table._id!, row1._id!)
.body as Row
expect(row1Migrated["user relationship"]).not.toBeDefined() expect(row1Migrated["user relationship"]).not.toBeDefined()
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[0]._id, users[1]._id]) expect.arrayContaining([users[0]._id, users[1]._id])
) )
const row2Migrated = (await config.api.row.get(table._id!, row2._id!)) const row2Migrated = await config.api.row.get(table._id!, row2._id!)
.body as Row
expect(row2Migrated["user relationship"]).not.toBeDefined() expect(row2Migrated["user relationship"]).not.toBeDefined()
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual( expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[1]._id, users[2]._id]) expect.arrayContaining([users[1]._id, users[2]._id])
@ -773,15 +800,13 @@ describe("/tables", () => {
expect(migratedTable.schema["user column"]).toBeDefined() expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined() expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const row1Migrated = (await config.api.row.get(table._id!, row1._id!)) const row1Migrated = await config.api.row.get(table._id!, row1._id!)
.body as Row
expect(row1Migrated["user relationship"]).not.toBeDefined() expect(row1Migrated["user relationship"]).not.toBeDefined()
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[0]._id, users[1]._id]) expect.arrayContaining([users[0]._id, users[1]._id])
) )
const row2Migrated = (await config.api.row.get(table._id!, row2._id!)) const row2Migrated = await config.api.row.get(table._id!, row2._id!)
.body as Row
expect(row2Migrated["user relationship"]).not.toBeDefined() expect(row2Migrated["user relationship"]).not.toBeDefined()
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([ expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([
users[2]._id, users[2]._id,
@ -831,7 +856,7 @@ describe("/tables", () => {
subtype: FieldSubtype.USERS, subtype: FieldSubtype.USERS,
}, },
}, },
{ expectStatus: 400 } { status: 400 }
) )
}) })
@ -846,7 +871,7 @@ describe("/tables", () => {
subtype: FieldSubtype.USERS, subtype: FieldSubtype.USERS,
}, },
}, },
{ expectStatus: 400 } { status: 400 }
) )
}) })
@ -861,7 +886,7 @@ describe("/tables", () => {
subtype: FieldSubtype.USERS, subtype: FieldSubtype.USERS,
}, },
}, },
{ expectStatus: 400 } { status: 400 }
) )
}) })
@ -880,7 +905,7 @@ describe("/tables", () => {
subtype: FieldSubtype.USERS, subtype: FieldSubtype.USERS,
}, },
}, },
{ expectStatus: 400 } { status: 400 }
) )
}) })
}) })

View File

@ -90,7 +90,7 @@ describe("/users", () => {
}) })
await config.api.user.update( await config.api.user.update(
{ ...user, roleId: roles.BUILTIN_ROLE_IDS.POWER }, { ...user, roleId: roles.BUILTIN_ROLE_IDS.POWER },
{ expectStatus: 409 } { status: 409 }
) )
}) })
}) })

View File

@ -1,5 +1,4 @@
import TestConfig from "../../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
import env from "../../../../environment"
import supertest from "supertest" import supertest from "supertest"
export * as structures from "../../../../tests/utilities/structures" export * as structures from "../../../../tests/utilities/structures"
@ -47,10 +46,10 @@ export function delay(ms: number) {
} }
let request: supertest.SuperTest<supertest.Test> | undefined | null, let request: supertest.SuperTest<supertest.Test> | undefined | null,
config: TestConfig | null config: TestConfiguration | null
export function beforeAll() { export function beforeAll() {
config = new TestConfig() config = new TestConfiguration()
request = config.getRequest() request = config.getRequest()
} }
@ -77,21 +76,3 @@ export function getConfig() {
} }
return config! return config!
} }
export async function switchToSelfHosted(func: any) {
// self hosted stops any attempts to Dynamo
env._set("NODE_ENV", "production")
env._set("SELF_HOSTED", true)
let error
try {
await func()
} catch (err) {
error = err
}
env._set("NODE_ENV", "jest")
env._set("SELF_HOSTED", false)
// don't throw error until after reset
if (error) {
throw error
}
}

View File

@ -177,7 +177,7 @@ describe.each([
} }
await config.api.viewV2.create(newView, { await config.api.viewV2.create(newView, {
expectStatus: 201, status: 201,
}) })
}) })
}) })
@ -275,7 +275,7 @@ describe.each([
const tableId = table._id! const tableId = table._id!
await config.api.viewV2.update( await config.api.viewV2.update(
{ ...view, id: generator.guid() }, { ...view, id: generator.guid() },
{ expectStatus: 404 } { status: 404 }
) )
expect(await config.api.table.get(tableId)).toEqual( expect(await config.api.table.get(tableId)).toEqual(
@ -304,7 +304,7 @@ describe.each([
}, },
], ],
}, },
{ expectStatus: 404 } { status: 404 }
) )
expect(await config.api.table.get(tableId)).toEqual( expect(await config.api.table.get(tableId)).toEqual(
@ -326,12 +326,10 @@ describe.each([
...viewV1, ...viewV1,
}, },
{ {
expectStatus: 400, status: 400,
handleResponse: r => { body: {
expect(r.body).toEqual({ message: "Only views V2 can be updated",
message: "Only views V2 can be updated", status: 400,
status: 400,
})
}, },
} }
) )
@ -403,7 +401,7 @@ describe.each([
} as Record<string, FieldSchema>, } as Record<string, FieldSchema>,
}, },
{ {
expectStatus: 200, status: 200,
} }
) )
}) })

View File

@ -30,9 +30,9 @@ describe("migrations", () => {
const appId = config.getAppId() const appId = config.getAppId()
const response = await config.api.application.getRaw(appId) await config.api.application.get(appId, {
headersNotPresent: [Header.MIGRATING_APP],
expect(response.headers[Header.MIGRATING_APP]).toBeUndefined() })
}) })
it("accessing an app that has pending migrations will attach the migrating header", async () => { it("accessing an app that has pending migrations will attach the migrating header", async () => {
@ -46,8 +46,10 @@ describe("migrations", () => {
func: async () => {}, func: async () => {},
}) })
const response = await config.api.application.getRaw(appId) await config.api.application.get(appId, {
headers: {
expect(response.headers[Header.MIGRATING_APP]).toEqual(appId) [Header.MIGRATING_APP]: appId,
},
})
}) })
}) })

View File

@ -24,7 +24,7 @@ describe("test the create row action", () => {
expect(res.id).toBeDefined() expect(res.id).toBeDefined()
expect(res.revision).toBeDefined() expect(res.revision).toBeDefined()
expect(res.success).toEqual(true) expect(res.success).toEqual(true)
const gottenRow = await config.getRow(table._id, res.id) const gottenRow = await config.api.row.get(table._id, res.id)
expect(gottenRow.name).toEqual("test") expect(gottenRow.name).toEqual("test")
expect(gottenRow.description).toEqual("test") expect(gottenRow.description).toEqual("test")
}) })

View File

@ -36,7 +36,7 @@ describe("test the update row action", () => {
it("should be able to run the action", async () => { it("should be able to run the action", async () => {
const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, inputs) const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, inputs)
expect(res.success).toEqual(true) expect(res.success).toEqual(true)
const updatedRow = await config.getRow(table._id!, res.id) const updatedRow = await config.api.row.get(table._id!, res.id)
expect(updatedRow.name).toEqual("Updated name") expect(updatedRow.name).toEqual("Updated name")
expect(updatedRow.description).not.toEqual("") expect(updatedRow.description).not.toEqual("")
}) })
@ -87,8 +87,8 @@ describe("test the update row action", () => {
}) })
let getResp = await config.api.row.get(table._id!, row._id!) let getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user1._id) expect(getResp.user1[0]._id).toEqual(user1._id)
expect(getResp.body.user2[0]._id).toEqual(user2._id) expect(getResp.user2[0]._id).toEqual(user2._id)
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, { let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
rowId: row._id, rowId: row._id,
@ -103,8 +103,8 @@ describe("test the update row action", () => {
expect(stepResp.success).toEqual(true) expect(stepResp.success).toEqual(true)
getResp = await config.api.row.get(table._id!, row._id!) getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user2._id) expect(getResp.user1[0]._id).toEqual(user2._id)
expect(getResp.body.user2[0]._id).toEqual(user2._id) expect(getResp.user2[0]._id).toEqual(user2._id)
}) })
it("should overwrite links if those links are not set and we ask it do", async () => { it("should overwrite links if those links are not set and we ask it do", async () => {
@ -140,8 +140,8 @@ describe("test the update row action", () => {
}) })
let getResp = await config.api.row.get(table._id!, row._id!) let getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user1._id) expect(getResp.user1[0]._id).toEqual(user1._id)
expect(getResp.body.user2[0]._id).toEqual(user2._id) expect(getResp.user2[0]._id).toEqual(user2._id)
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, { let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
rowId: row._id, rowId: row._id,
@ -163,7 +163,7 @@ describe("test the update row action", () => {
expect(stepResp.success).toEqual(true) expect(stepResp.success).toEqual(true)
getResp = await config.api.row.get(table._id!, row._id!) getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.body.user1[0]._id).toEqual(user2._id) expect(getResp.user1[0]._id).toEqual(user2._id)
expect(getResp.body.user2).toBeUndefined() expect(getResp.user2).toBeUndefined()
}) })
}) })

View File

@ -100,7 +100,7 @@ describe("test the link controller", () => {
const { _id } = await config.createRow( const { _id } = await config.createRow(
basicLinkedRow(t1._id!, row._id!, linkField) basicLinkedRow(t1._id!, row._id!, linkField)
) )
return config.getRow(t1._id!, _id!) return config.api.row.get(t1._id!, _id!)
} }
it("should be able to confirm if two table schemas are equal", async () => { it("should be able to confirm if two table schemas are equal", async () => {

View File

@ -0,0 +1,363 @@
import fetch from "node-fetch"
import {
generateMakeRequest,
MakeRequestResponse,
} from "../api/routes/public/tests/utils"
import { v4 as uuidv4 } from "uuid"
import * as setup from "../api/routes/tests/utilities"
import {
Datasource,
FieldType,
Table,
TableRequest,
TableSourceType,
} from "@budibase/types"
import _ from "lodash"
import { databaseTestProviders } from "../integrations/tests/utils"
import mysql from "mysql2/promise"
import { builderSocket } from "../websockets"
// @ts-ignore
fetch.mockSearch()
const config = setup.getConfig()!
jest.unmock("mysql2/promise")
jest.mock("../websockets", () => ({
clientAppSocket: jest.fn(),
gridAppSocket: jest.fn(),
initialise: jest.fn(),
builderSocket: {
emitTableUpdate: jest.fn(),
emitTableDeletion: jest.fn(),
emitDatasourceUpdate: jest.fn(),
emitDatasourceDeletion: jest.fn(),
emitScreenUpdate: jest.fn(),
emitAppMetadataUpdate: jest.fn(),
emitAppPublish: jest.fn(),
},
}))
describe("mysql integrations", () => {
let makeRequest: MakeRequestResponse,
mysqlDatasource: Datasource,
primaryMySqlTable: Table
beforeAll(async () => {
await config.init()
const apiKey = await config.generateApiKey()
makeRequest = generateMakeRequest(apiKey, true)
mysqlDatasource = await config.api.datasource.create(
await databaseTestProviders.mysql.datasource()
)
})
afterAll(async () => {
await databaseTestProviders.mysql.stop()
})
beforeEach(async () => {
primaryMySqlTable = await config.createTable({
name: uuidv4(),
type: "table",
primary: ["id"],
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
description: {
name: "description",
type: FieldType.STRING,
},
value: {
name: "value",
type: FieldType.NUMBER,
},
},
sourceId: mysqlDatasource._id,
sourceType: TableSourceType.EXTERNAL,
})
})
afterAll(config.end)
it("validate table schema", async () => {
const res = await makeRequest(
"get",
`/api/datasources/${mysqlDatasource._id}`
)
expect(res.status).toBe(200)
expect(res.body).toEqual({
config: {
database: "mysql",
host: mysqlDatasource.config!.host,
password: "--secret-value--",
port: mysqlDatasource.config!.port,
user: "root",
},
plus: true,
source: "MYSQL",
type: "datasource_plus",
_id: expect.any(String),
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
entities: expect.any(Object),
})
})
describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: await databaseTestProviders.mysql.datasource(),
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
const dbConfig = await databaseTestProviders.mysql.datasource()
await config.api.datasource.verify(
{
datasource: {
...dbConfig,
config: {
...dbConfig.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error:
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
},
}
)
})
})
describe("POST /api/datasources/info", () => {
it("should fetch information about mysql datasource", async () => {
const primaryName = primaryMySqlTable.name
const response = await makeRequest("post", "/api/datasources/info", {
datasource: mysqlDatasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
})
})
describe("Integration compatibility with mysql search_path", () => {
let client: mysql.Connection, pathDatasource: Datasource
const database = "test1"
const database2 = "test-2"
beforeAll(async () => {
const dsConfig = await databaseTestProviders.mysql.datasource()
const dbConfig = dsConfig.config!
client = await mysql.createConnection(dbConfig)
await client.query(`CREATE DATABASE \`${database}\`;`)
await client.query(`CREATE DATABASE \`${database2}\`;`)
const pathConfig: any = {
...dsConfig,
config: {
...dbConfig,
database,
},
}
pathDatasource = await config.api.datasource.create(pathConfig)
})
afterAll(async () => {
await client.query(`DROP DATABASE \`${database}\`;`)
await client.query(`DROP DATABASE \`${database2}\`;`)
await client.end()
})
it("discovers tables from any schema in search path", async () => {
await client.query(
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
datasource: pathDatasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames).toEqual(
expect.arrayContaining(["table1"])
)
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await client.query(
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
)
await client.query(
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
)
const response = await makeRequest(
"post",
`/api/datasources/${pathDatasource._id}/schema`,
{
tablesFilter: [repeated_table_name],
}
)
expect(response.status).toBe(200)
expect(
response.body.datasource.entities[repeated_table_name].schema
).toBeDefined()
const schema =
response.body.datasource.entities[repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
})
})
describe("POST /api/tables/", () => {
let client: mysql.Connection
const emitDatasourceUpdateMock = jest.fn()
beforeEach(async () => {
client = await mysql.createConnection(
(
await databaseTestProviders.mysql.datasource()
).config!
)
mysqlDatasource = await config.api.datasource.create(
await databaseTestProviders.mysql.datasource()
)
})
afterEach(async () => {
await client.end()
})
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
const addColumnToTable: TableRequest = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
name: "table",
sourceId: mysqlDatasource._id!,
primary: ["id"],
schema: {
id: {
type: FieldType.AUTO,
name: "id",
autocolumn: true,
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
},
},
_add: {
name: "new_column",
},
}
jest
.spyOn(builderSocket!, "emitDatasourceUpdate")
.mockImplementation(emitDatasourceUpdateMock)
await makeRequest("post", "/api/tables/", addColumnToTable)
const expectedTable: TableRequest = {
...addColumnToTable,
schema: {
id: {
type: FieldType.NUMBER,
name: "id",
autocolumn: true,
constraints: {
presence: false,
},
externalType: "int unsigned",
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
autocolumn: false,
constraints: {
presence: false,
},
externalType: "float(8,2)",
},
},
created: true,
_id: `${mysqlDatasource._id}__table`,
}
delete expectedTable._add
expect(emitDatasourceUpdateMock).toBeCalledTimes(1)
const emittedDatasource: Datasource =
emitDatasourceUpdateMock.mock.calls[0][1]
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
})
it("will rename a column", async () => {
await makeRequest("post", "/api/tables/", primaryMySqlTable)
let renameColumnOnTable: TableRequest = {
...primaryMySqlTable,
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
externalType: "unsigned integer",
},
name: {
name: "name",
type: FieldType.STRING,
externalType: "text",
},
description: {
name: "description",
type: FieldType.STRING,
externalType: "text",
},
age: {
name: "age",
type: FieldType.NUMBER,
externalType: "float(8,2)",
},
},
}
const response = await makeRequest(
"post",
"/api/tables/",
renameColumnOnTable
)
mysqlDatasource = (
await makeRequest(
"post",
`/api/datasources/${mysqlDatasource._id}/schema`
)
).body.datasource
expect(response.status).toEqual(200)
expect(
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
).toEqual(["id", "name", "description", "age"])
})
})
})

View File

@ -398,7 +398,7 @@ describe("postgres integrations", () => {
expect(res.status).toBe(200) expect(res.status).toBe(200)
expect(res.body).toEqual(updatedRow) expect(res.body).toEqual(updatedRow)
const persistedRow = await config.getRow( const persistedRow = await config.api.row.get(
primaryPostgresTable._id!, primaryPostgresTable._id!,
row.id row.id
) )
@ -1040,28 +1040,37 @@ describe("postgres integrations", () => {
describe("POST /api/datasources/verify", () => { describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => { it("should be able to verify the connection", async () => {
const response = await config.api.datasource.verify({ await config.api.datasource.verify(
datasource: await databaseTestProviders.postgres.datasource(), {
}) datasource: await databaseTestProviders.postgres.datasource(),
expect(response.status).toBe(200) },
expect(response.body.connected).toBe(true) {
body: {
connected: true,
},
}
)
}) })
it("should state an invalid datasource cannot connect", async () => { it("should state an invalid datasource cannot connect", async () => {
const dbConfig = await databaseTestProviders.postgres.datasource() const dbConfig = await databaseTestProviders.postgres.datasource()
const response = await config.api.datasource.verify({ await config.api.datasource.verify(
datasource: { {
...dbConfig, datasource: {
config: { ...dbConfig,
...dbConfig.config, config: {
password: "wrongpassword", ...dbConfig.config,
password: "wrongpassword",
},
}, },
}, },
}) {
body: {
expect(response.status).toBe(200) connected: false,
expect(response.body.connected).toBe(false) error: 'password authentication failed for user "postgres"',
expect(response.body.error).toBeDefined() },
}
)
}) })
}) })

View File

@ -12,6 +12,8 @@ import {
} from "@budibase/types" } from "@budibase/types"
import environment from "../../environment" import environment from "../../environment"
type QueryFunction = (query: Knex.SqlNative, operation: Operation) => any
const envLimit = environment.SQL_MAX_ROWS const envLimit = environment.SQL_MAX_ROWS
? parseInt(environment.SQL_MAX_ROWS) ? parseInt(environment.SQL_MAX_ROWS)
: null : null
@ -325,15 +327,18 @@ class InternalBuilder {
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort, paginate } = json let { sort, paginate } = json
const table = json.meta?.table const table = json.meta?.table
const aliases = json.tableAliases
const aliased =
table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name
if (sort && Object.keys(sort || {}).length > 0) { if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) { for (let [key, value] of Object.entries(sort)) {
const direction = const direction =
value.direction === SortDirection.ASCENDING ? "asc" : "desc" value.direction === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(`${table?.name}.${key}`, direction) query = query.orderBy(`${aliased}.${key}`, direction)
} }
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) { } else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
// @ts-ignore // @ts-ignore
query = query.orderBy(`${table?.name}.${table?.primary[0]}`) query = query.orderBy(`${aliased}.${table?.primary[0]}`)
} }
return query return query
} }
@ -433,10 +438,12 @@ class InternalBuilder {
aliases?: QueryJson["tableAliases"] aliases?: QueryJson["tableAliases"]
): Knex.QueryBuilder { ): Knex.QueryBuilder {
const tableName = endpoint.entityId const tableName = endpoint.entityId
const tableAliased = aliases?.[tableName] const tableAlias = aliases?.[tableName]
? `${tableName} as ${aliases?.[tableName]}` let table: string | Record<string, string> = tableName
: tableName if (tableAlias) {
let query = knex(tableAliased) table = { [tableAlias]: tableName }
}
let query = knex(table)
if (endpoint.schema) { if (endpoint.schema) {
query = query.withSchema(endpoint.schema) query = query.withSchema(endpoint.schema)
} }
@ -622,7 +629,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
} }
} }
async getReturningRow(queryFn: Function, json: QueryJson) { async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
if (!json.extra || !json.extra.idFilter) { if (!json.extra || !json.extra.idFilter) {
return {} return {}
} }
@ -634,7 +641,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
resource: { resource: {
fields: [], fields: [],
}, },
filters: json.extra.idFilter, filters: json.extra?.idFilter,
paginate: { paginate: {
limit: 1, limit: 1,
}, },
@ -663,7 +670,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
// this function recreates the returning functionality of postgres // this function recreates the returning functionality of postgres
async queryWithReturning( async queryWithReturning(
json: QueryJson, json: QueryJson,
queryFn: Function, queryFn: QueryFunction,
processFn: Function = (result: any) => result processFn: Function = (result: any) => result
) { ) {
const sqlClient = this.getSqlClient() const sqlClient = this.getSqlClient()

View File

@ -14,7 +14,12 @@ import firebase from "./firebase"
import redis from "./redis" import redis from "./redis"
import snowflake from "./snowflake" import snowflake from "./snowflake"
import oracle from "./oracle" import oracle from "./oracle"
import { SourceName, Integration, PluginType } from "@budibase/types" import {
SourceName,
Integration,
PluginType,
IntegrationBase,
} from "@budibase/types"
import { getDatasourcePlugin } from "../utilities/fileSystem" import { getDatasourcePlugin } from "../utilities/fileSystem"
import env from "../environment" import env from "../environment"
import cloneDeep from "lodash/cloneDeep" import cloneDeep from "lodash/cloneDeep"
@ -40,25 +45,28 @@ const DEFINITIONS: Record<SourceName, Integration | undefined> = {
[SourceName.BUDIBASE]: undefined, [SourceName.BUDIBASE]: undefined,
} }
const INTEGRATIONS: Record<SourceName, any> = { type IntegrationBaseConstructor = new (...args: any[]) => IntegrationBase
[SourceName.POSTGRES]: postgres.integration,
[SourceName.DYNAMODB]: dynamodb.integration, const INTEGRATIONS: Record<SourceName, IntegrationBaseConstructor | undefined> =
[SourceName.MONGODB]: mongodb.integration, {
[SourceName.ELASTICSEARCH]: elasticsearch.integration, [SourceName.POSTGRES]: postgres.integration,
[SourceName.COUCHDB]: couchdb.integration, [SourceName.DYNAMODB]: dynamodb.integration,
[SourceName.SQL_SERVER]: sqlServer.integration, [SourceName.MONGODB]: mongodb.integration,
[SourceName.S3]: s3.integration, [SourceName.ELASTICSEARCH]: elasticsearch.integration,
[SourceName.AIRTABLE]: airtable.integration, [SourceName.COUCHDB]: couchdb.integration,
[SourceName.MYSQL]: mysql.integration, [SourceName.SQL_SERVER]: sqlServer.integration,
[SourceName.ARANGODB]: arangodb.integration, [SourceName.S3]: s3.integration,
[SourceName.REST]: rest.integration, [SourceName.AIRTABLE]: airtable.integration,
[SourceName.FIRESTORE]: firebase.integration, [SourceName.MYSQL]: mysql.integration,
[SourceName.GOOGLE_SHEETS]: googlesheets.integration, [SourceName.ARANGODB]: arangodb.integration,
[SourceName.REDIS]: redis.integration, [SourceName.REST]: rest.integration,
[SourceName.SNOWFLAKE]: snowflake.integration, [SourceName.FIRESTORE]: firebase.integration,
[SourceName.ORACLE]: undefined, [SourceName.GOOGLE_SHEETS]: googlesheets.integration,
[SourceName.BUDIBASE]: undefined, [SourceName.REDIS]: redis.integration,
} [SourceName.SNOWFLAKE]: snowflake.integration,
[SourceName.ORACLE]: undefined,
[SourceName.BUDIBASE]: undefined,
}
// optionally add oracle integration if the oracle binary can be installed // optionally add oracle integration if the oracle binary can be installed
if ( if (

View File

@ -1,9 +1,10 @@
import { QueryJson } from "@budibase/types" import { Datasource, Operation, QueryJson, SourceName } from "@budibase/types"
import { join } from "path" import { join } from "path"
import Sql from "../base/sql" import Sql from "../base/sql"
import { SqlClient } from "../utils" import { SqlClient } from "../utils"
import AliasTables from "../../api/controllers/row/alias" import AliasTables from "../../api/controllers/row/alias"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { Knex } from "knex"
function multiline(sql: string) { function multiline(sql: string) {
return sql.replace(/\n/g, "").replace(/ +/g, " ") return sql.replace(/\n/g, "").replace(/ +/g, " ")
@ -160,6 +161,28 @@ describe("Captures of real examples", () => {
}) })
}) })
describe("returning (everything bar Postgres)", () => {
it("should be able to handle row returning", () => {
const queryJson = getJson("createSimple.json")
const SQL = new Sql(SqlClient.MS_SQL, limit)
let query = SQL._query(queryJson, { disableReturning: true })
expect(query).toEqual({
sql: "insert into [people] ([age], [name]) values (@p0, @p1)",
bindings: [22, "Test"],
})
// now check returning
let returningQuery: Knex.SqlNative = { sql: "", bindings: [] }
SQL.getReturningRow((input: Knex.SqlNative) => {
returningQuery = input
}, queryJson)
expect(returningQuery).toEqual({
sql: "select * from (select top (@p0) * from [people] where [people].[name] = @p1 and [people].[age] = @p2 order by [people].[name] asc) as [people]",
bindings: [1, "Test", 22],
})
})
})
describe("check max character aliasing", () => { describe("check max character aliasing", () => {
it("should handle over 'z' max character alias", () => { it("should handle over 'z' max character alias", () => {
const tableNames = [] const tableNames = []
@ -175,6 +198,114 @@ describe("Captures of real examples", () => {
}) })
}) })
describe("check aliasing is disabled/enabled", () => {
const tables = ["tableA", "tableB"]
function getDatasource(source: SourceName): Datasource {
return {
source,
type: "datasource",
isSQL: true,
}
}
function getQuery(op: Operation, fields: string[] = ["a"]): QueryJson {
return {
endpoint: { datasourceId: "", entityId: "", operation: op },
resource: {
fields,
},
}
}
it("should check for Postgres aliased status", () => {
const aliasing = new AliasTables(tables)
const datasource = getDatasource(SourceName.POSTGRES)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
).toEqual(true)
})
it("should check for MS-SQL aliased status", () => {
const aliasing = new AliasTables(tables)
const datasource = getDatasource(SourceName.SQL_SERVER)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
).toEqual(false)
})
it("should check for MySQL aliased status", () => {
const aliasing = new AliasTables(tables)
const datasource = getDatasource(SourceName.MYSQL)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
).toEqual(false)
})
it("should check for Oracle aliased status", () => {
const aliasing = new AliasTables(tables)
const datasource = getDatasource(SourceName.ORACLE)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
).toEqual(true)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
).toEqual(false)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
).toEqual(false)
})
it("should disable aliasing for non-SQL datasources", () => {
const aliasing = new AliasTables(tables)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ), {
source: SourceName.GOOGLE_SHEETS,
type: "datasource",
isSQL: false,
})
)
})
it("should disable when no fields", () => {
const aliasing = new AliasTables(tables)
const datasource = getDatasource(SourceName.POSTGRES)
expect(
aliasing.isAliasingEnabled(getQuery(Operation.READ, []), datasource)
).toEqual(false)
})
})
describe("check some edge cases", () => { describe("check some edge cases", () => {
const tableNames = ["hello", "world"] const tableNames = ["hello", "world"]

View File

@ -68,7 +68,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View File

@ -0,0 +1,64 @@
{
"endpoint": {
"datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"entityId": "people",
"operation": "CREATE"
},
"resource": {
"fields": [
"a.name",
"a.age"
]
},
"filters": {},
"relationships": [],
"body": {
"name": "Test",
"age": 22
},
"extra": {
"idFilter": {
"equal": {
"name": "Test",
"age": 22
}
}
},
"meta": {
"table": {
"_id": "datasource_plus_0ed5835e5552496285df546030f7c4ae__people",
"type": "table",
"sourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"sourceType": "external",
"primary": [
"name",
"age"
],
"name": "people",
"schema": {
"name": {
"type": "string",
"externalType": "varchar",
"autocolumn": false,
"name": "name",
"constraints": {
"presence": true
}
},
"age": {
"type": "number",
"externalType": "int",
"autocolumn": false,
"name": "age",
"constraints": {
"presence": false
}
}
},
"primaryDisplay": "name"
}
},
"tableAliases": {
"people": "a"
}
}

View File

@ -58,7 +58,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View File

@ -34,7 +34,7 @@
"keypartone", "keypartone",
"keyparttwo" "keyparttwo"
], ],
"name": "a", "name": "compositetable",
"schema": { "schema": {
"keyparttwo": { "keyparttwo": {
"type": "string", "type": "string",

View File

@ -49,7 +49,7 @@
"primary": [ "primary": [
"taskid" "taskid"
], ],
"name": "a", "name": "tasks",
"schema": { "schema": {
"executorid": { "executorid": {
"type": "number", "type": "number",

View File

@ -63,7 +63,7 @@
"primary": [ "primary": [
"productid" "productid"
], ],
"name": "a", "name": "products",
"schema": { "schema": {
"productname": { "productname": {
"type": "string", "type": "string",

View File

@ -53,7 +53,7 @@
"primary": [ "primary": [
"productid" "productid"
], ],
"name": "a", "name": "products",
"schema": { "schema": {
"productname": { "productname": {
"type": "string", "type": "string",

Some files were not shown because too many files have changed in this diff Show More