Merge branch 'master' of github.com:Budibase/budibase into labday/sqs
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.21.0",
|
||||
"version": "2.21.3",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit ab324e35d855012bd0f49caa53c6dd765223c6fa
|
||||
Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac
|
|
@ -67,7 +67,7 @@
|
|||
"@types/lodash": "4.14.200",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"@types/redlock": "4.0.3",
|
||||
"@types/redlock": "4.0.7",
|
||||
"@types/semver": "7.3.7",
|
||||
"@types/tar-fs": "2.0.1",
|
||||
"@types/uuid": "8.3.4",
|
||||
|
@ -78,6 +78,7 @@
|
|||
"jest-serial-runner": "1.2.1",
|
||||
"pino-pretty": "10.0.0",
|
||||
"pouchdb-adapter-memory": "7.2.2",
|
||||
"testcontainers": "^10.7.2",
|
||||
"timekeeper": "2.2.0",
|
||||
"typescript": "5.2.2"
|
||||
},
|
||||
|
|
|
@ -6,7 +6,7 @@ import env from "../environment"
|
|||
import * as accounts from "../accounts"
|
||||
import { UserDB } from "../users"
|
||||
import { sdk } from "@budibase/shared-core"
|
||||
import { User } from "@budibase/types"
|
||||
import { User, UserMetadata } from "@budibase/types"
|
||||
|
||||
const EXPIRY_SECONDS = 3600
|
||||
|
||||
|
@ -15,7 +15,7 @@ const EXPIRY_SECONDS = 3600
|
|||
*/
|
||||
async function populateFromDB(userId: string, tenantId: string) {
|
||||
const db = tenancy.getTenantDB(tenantId)
|
||||
const user = await db.get<any>(userId)
|
||||
const user = await db.get<UserMetadata>(userId)
|
||||
user.budibaseAccess = true
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
const account = await accounts.getAccount(user.email)
|
||||
|
|
|
@ -1,66 +1,57 @@
|
|||
import PouchDB from "pouchdb"
|
||||
import { getPouchDB, closePouchDB } from "./couch"
|
||||
import { DocumentType } from "../constants"
|
||||
|
||||
class Replication {
|
||||
source: any
|
||||
target: any
|
||||
replication: any
|
||||
source: PouchDB.Database
|
||||
target: PouchDB.Database
|
||||
|
||||
/**
|
||||
*
|
||||
* @param source - the DB you want to replicate or rollback to
|
||||
* @param target - the DB you want to replicate to, or rollback from
|
||||
*/
|
||||
constructor({ source, target }: any) {
|
||||
constructor({ source, target }: { source: string; target: string }) {
|
||||
this.source = getPouchDB(source)
|
||||
this.target = getPouchDB(target)
|
||||
}
|
||||
|
||||
close() {
|
||||
return Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
|
||||
async close() {
|
||||
await Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
|
||||
}
|
||||
|
||||
promisify(operation: any, opts = {}) {
|
||||
return new Promise(resolve => {
|
||||
operation(this.target, opts)
|
||||
.on("denied", function (err: any) {
|
||||
replicate(opts: PouchDB.Replication.ReplicateOptions = {}) {
|
||||
return new Promise<PouchDB.Replication.ReplicationResult<{}>>(resolve => {
|
||||
this.source.replicate
|
||||
.to(this.target, opts)
|
||||
.on("denied", function (err) {
|
||||
// a document failed to replicate (e.g. due to permissions)
|
||||
throw new Error(`Denied: Document failed to replicate ${err}`)
|
||||
})
|
||||
.on("complete", function (info: any) {
|
||||
.on("complete", function (info) {
|
||||
return resolve(info)
|
||||
})
|
||||
.on("error", function (err: any) {
|
||||
.on("error", function (err) {
|
||||
throw new Error(`Replication Error: ${err}`)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Two way replication operation, intended to be promise based.
|
||||
* @param opts - PouchDB replication options
|
||||
*/
|
||||
sync(opts = {}) {
|
||||
this.replication = this.promisify(this.source.sync, opts)
|
||||
return this.replication
|
||||
}
|
||||
appReplicateOpts(
|
||||
opts: PouchDB.Replication.ReplicateOptions = {}
|
||||
): PouchDB.Replication.ReplicateOptions {
|
||||
if (typeof opts.filter === "string") {
|
||||
return opts
|
||||
}
|
||||
|
||||
/**
|
||||
* One way replication operation, intended to be promise based.
|
||||
* @param opts - PouchDB replication options
|
||||
*/
|
||||
replicate(opts = {}) {
|
||||
this.replication = this.promisify(this.source.replicate.to, opts)
|
||||
return this.replication
|
||||
}
|
||||
const filter = opts.filter
|
||||
delete opts.filter
|
||||
|
||||
appReplicateOpts() {
|
||||
return {
|
||||
filter: (doc: any) => {
|
||||
...opts,
|
||||
filter: (doc: any, params: any) => {
|
||||
if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) {
|
||||
return false
|
||||
}
|
||||
return doc._id !== DocumentType.APP_METADATA
|
||||
if (doc._id === DocumentType.APP_METADATA) {
|
||||
return false
|
||||
}
|
||||
return filter ? filter(doc, params) : true
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -75,10 +66,6 @@ class Replication {
|
|||
// take the opportunity to remove deleted tombstones
|
||||
await this.replicate()
|
||||
}
|
||||
|
||||
cancel() {
|
||||
this.replication.cancel()
|
||||
}
|
||||
}
|
||||
|
||||
export default Replication
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { APIError } from "@budibase/types"
|
||||
import * as errors from "../errors"
|
||||
import environment from "../environment"
|
||||
|
||||
export async function errorHandling(ctx: any, next: any) {
|
||||
try {
|
||||
|
@ -14,15 +15,19 @@ export async function errorHandling(ctx: any, next: any) {
|
|||
console.error(err)
|
||||
}
|
||||
|
||||
const error = errors.getPublicError(err)
|
||||
const body: APIError = {
|
||||
let error: APIError = {
|
||||
message: err.message,
|
||||
status: status,
|
||||
validationErrors: err.validation,
|
||||
error,
|
||||
error: errors.getPublicError(err),
|
||||
}
|
||||
|
||||
ctx.body = body
|
||||
if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) {
|
||||
// @ts-ignore
|
||||
error.stack = err.stack
|
||||
}
|
||||
|
||||
ctx.body = error
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import env from "../environment"
|
||||
import Redis from "ioredis"
|
||||
import Redis, { Cluster } from "ioredis"
|
||||
// mock-redis doesn't have any typing
|
||||
let MockRedis: any | undefined
|
||||
if (env.MOCK_REDIS) {
|
||||
|
@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
|
|||
|
||||
// for testing just generate the client once
|
||||
let CLOSED = false
|
||||
let CLIENTS: { [key: number]: any } = {}
|
||||
const CLIENTS: Record<number, Redis> = {}
|
||||
let CONNECTED = false
|
||||
|
||||
// mock redis always connected
|
||||
|
@ -36,7 +36,7 @@ if (env.MOCK_REDIS) {
|
|||
CONNECTED = true
|
||||
}
|
||||
|
||||
function pickClient(selectDb: number): any {
|
||||
function pickClient(selectDb: number) {
|
||||
return CLIENTS[selectDb]
|
||||
}
|
||||
|
||||
|
@ -201,12 +201,15 @@ class RedisWrapper {
|
|||
key = `${db}${SEPARATOR}${key}`
|
||||
let stream
|
||||
if (CLUSTERED) {
|
||||
let node = this.getClient().nodes("master")
|
||||
let node = (this.getClient() as never as Cluster).nodes("master")
|
||||
stream = node[0].scanStream({ match: key + "*", count: 100 })
|
||||
} else {
|
||||
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
|
||||
stream = (this.getClient() as Redis).scanStream({
|
||||
match: key + "*",
|
||||
count: 100,
|
||||
})
|
||||
}
|
||||
return promisifyStream(stream, this.getClient())
|
||||
return promisifyStream(stream, this.getClient() as any)
|
||||
}
|
||||
|
||||
async keys(pattern: string) {
|
||||
|
@ -221,14 +224,16 @@ class RedisWrapper {
|
|||
|
||||
async get(key: string) {
|
||||
const db = this._db
|
||||
let response = await this.getClient().get(addDbPrefix(db, key))
|
||||
const response = await this.getClient().get(addDbPrefix(db, key))
|
||||
// overwrite the prefixed key
|
||||
// @ts-ignore
|
||||
if (response != null && response.key) {
|
||||
// @ts-ignore
|
||||
response.key = key
|
||||
}
|
||||
// if its not an object just return the response
|
||||
try {
|
||||
return JSON.parse(response)
|
||||
return JSON.parse(response!)
|
||||
} catch (err) {
|
||||
return response
|
||||
}
|
||||
|
@ -274,13 +279,37 @@ class RedisWrapper {
|
|||
}
|
||||
}
|
||||
|
||||
async bulkStore(
|
||||
data: Record<string, any>,
|
||||
expirySeconds: number | null = null
|
||||
) {
|
||||
const client = this.getClient()
|
||||
|
||||
const dataToStore = Object.entries(data).reduce((acc, [key, value]) => {
|
||||
acc[addDbPrefix(this._db, key)] =
|
||||
typeof value === "object" ? JSON.stringify(value) : value
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
|
||||
const pipeline = client.pipeline()
|
||||
pipeline.mset(dataToStore)
|
||||
|
||||
if (expirySeconds !== null) {
|
||||
for (const key of Object.keys(dataToStore)) {
|
||||
pipeline.expire(key, expirySeconds)
|
||||
}
|
||||
}
|
||||
|
||||
await pipeline.exec()
|
||||
}
|
||||
|
||||
async getTTL(key: string) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
return this.getClient().ttl(prefixedKey)
|
||||
}
|
||||
|
||||
async setExpiry(key: string, expirySeconds: number | null) {
|
||||
async setExpiry(key: string, expirySeconds: number) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||
|
@ -295,6 +324,26 @@ class RedisWrapper {
|
|||
let items = await this.scan()
|
||||
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
|
||||
}
|
||||
|
||||
async increment(key: string) {
|
||||
const result = await this.getClient().incr(addDbPrefix(this._db, key))
|
||||
if (isNaN(result)) {
|
||||
throw new Error(`Redis ${key} does not contain a number`)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async deleteIfValue(key: string, value: any) {
|
||||
const client = this.getClient()
|
||||
|
||||
const luaScript = `
|
||||
if redis.call('GET', KEYS[1]) == ARGV[1] then
|
||||
redis.call('DEL', KEYS[1])
|
||||
end
|
||||
`
|
||||
|
||||
await client.eval(luaScript, 1, addDbPrefix(this._db, key), value)
|
||||
}
|
||||
}
|
||||
|
||||
export default RedisWrapper
|
||||
|
|
|
@ -72,7 +72,7 @@ const OPTIONS: Record<keyof typeof LockType, Redlock.Options> = {
|
|||
export async function newRedlock(opts: Redlock.Options = {}) {
|
||||
const options = { ...OPTIONS.DEFAULT, ...opts }
|
||||
const redisWrapper = await getLockClient()
|
||||
const client = redisWrapper.getClient()
|
||||
const client = redisWrapper.getClient() as any
|
||||
return new Redlock([client], options)
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,214 @@
|
|||
import { GenericContainer, StartedTestContainer } from "testcontainers"
|
||||
import { generator, structures } from "../../../tests"
|
||||
import RedisWrapper from "../redis"
|
||||
import { env } from "../.."
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("redis", () => {
|
||||
let redis: RedisWrapper
|
||||
let container: StartedTestContainer
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("redis")
|
||||
.withExposedPorts(6379)
|
||||
.start()
|
||||
|
||||
env._set(
|
||||
"REDIS_URL",
|
||||
`${container.getHost()}:${container.getMappedPort(6379)}`
|
||||
)
|
||||
env._set("MOCK_REDIS", 0)
|
||||
env._set("REDIS_PASSWORD", 0)
|
||||
})
|
||||
|
||||
afterAll(() => container?.stop())
|
||||
|
||||
beforeEach(async () => {
|
||||
redis = new RedisWrapper(structures.db.id())
|
||||
await redis.init()
|
||||
})
|
||||
|
||||
describe("store", () => {
|
||||
it("a basic value can be persisted", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = generator.word()
|
||||
|
||||
await redis.store(key, value)
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
|
||||
it("objects can be persisted", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = { [generator.word()]: generator.word() }
|
||||
|
||||
await redis.store(key, value)
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
})
|
||||
|
||||
describe("bulkStore", () => {
|
||||
function createRandomObject(
|
||||
keyLength: number,
|
||||
valueGenerator: () => any = () => generator.word()
|
||||
) {
|
||||
return generator
|
||||
.unique(() => generator.word(), keyLength)
|
||||
.reduce((acc, key) => {
|
||||
acc[key] = valueGenerator()
|
||||
return acc
|
||||
}, {} as Record<string, string>)
|
||||
}
|
||||
|
||||
it("a basic object can be persisted", async () => {
|
||||
const data = createRandomObject(10)
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("a complex object can be persisted", async () => {
|
||||
const data = {
|
||||
...createRandomObject(10, () => createRandomObject(5)),
|
||||
...createRandomObject(5),
|
||||
}
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(15)
|
||||
})
|
||||
|
||||
it("no TTL is set by default", async () => {
|
||||
const data = createRandomObject(10)
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
expect(await redis.getTTL(key)).toEqual(-1)
|
||||
}
|
||||
})
|
||||
|
||||
it("a bulk store can be persisted with TTL", async () => {
|
||||
const ttl = 500
|
||||
const data = createRandomObject(8)
|
||||
|
||||
await redis.bulkStore(data, ttl)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
expect(await redis.getTTL(key)).toEqual(ttl)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(8)
|
||||
})
|
||||
|
||||
it("setting a TTL of -1 will not persist the key", async () => {
|
||||
const ttl = -1
|
||||
const data = createRandomObject(5)
|
||||
|
||||
await redis.bulkStore(data, ttl)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toBe(null)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("increment", () => {
|
||||
it("can increment on a new key", async () => {
|
||||
const key = structures.uuid()
|
||||
const result = await redis.increment(key)
|
||||
expect(result).toBe(1)
|
||||
})
|
||||
|
||||
it("can increment multiple times", async () => {
|
||||
const key = structures.uuid()
|
||||
const results = [
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
]
|
||||
expect(results).toEqual([1, 2, 3, 4, 5])
|
||||
})
|
||||
|
||||
it("can increment on a new key", async () => {
|
||||
const key1 = structures.uuid()
|
||||
const key2 = structures.uuid()
|
||||
|
||||
const result1 = await redis.increment(key1)
|
||||
expect(result1).toBe(1)
|
||||
|
||||
const result2 = await redis.increment(key2)
|
||||
expect(result2).toBe(1)
|
||||
})
|
||||
|
||||
it("can increment multiple times in parallel", async () => {
|
||||
const key = structures.uuid()
|
||||
const results = await Promise.all(
|
||||
Array.from({ length: 100 }).map(() => redis.increment(key))
|
||||
)
|
||||
expect(results).toHaveLength(100)
|
||||
expect(results).toEqual(Array.from({ length: 100 }).map((_, i) => i + 1))
|
||||
})
|
||||
|
||||
it("can increment existing set keys", async () => {
|
||||
const key = structures.uuid()
|
||||
await redis.store(key, 70)
|
||||
await redis.increment(key)
|
||||
|
||||
const result = await redis.increment(key)
|
||||
expect(result).toBe(72)
|
||||
})
|
||||
|
||||
it.each([
|
||||
generator.word(),
|
||||
generator.bool(),
|
||||
{ [generator.word()]: generator.word() },
|
||||
])("cannot increment if the store value is not a number", async value => {
|
||||
const key = structures.uuid()
|
||||
await redis.store(key, value)
|
||||
|
||||
await expect(redis.increment(key)).rejects.toThrowError(
|
||||
"ERR value is not an integer or out of range"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("deleteIfValue", () => {
|
||||
it("can delete if the value matches", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = generator.word()
|
||||
await redis.store(key, value)
|
||||
|
||||
await redis.deleteIfValue(key, value)
|
||||
|
||||
expect(await redis.get(key)).toBeNull()
|
||||
})
|
||||
|
||||
it("will not delete if the value does not matches", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = generator.word()
|
||||
await redis.store(key, value)
|
||||
|
||||
await redis.deleteIfValue(key, generator.word())
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -84,25 +84,24 @@ export function getBuiltinRoles(): { [key: string]: RoleDoc } {
|
|||
return cloneDeep(BUILTIN_ROLES)
|
||||
}
|
||||
|
||||
export const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map(
|
||||
role => role._id
|
||||
)
|
||||
export function isBuiltin(role: string) {
|
||||
return getBuiltinRole(role) !== undefined
|
||||
}
|
||||
|
||||
export const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map(
|
||||
role => role.name
|
||||
)
|
||||
|
||||
export function isBuiltin(role?: string) {
|
||||
return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin))
|
||||
export function getBuiltinRole(roleId: string): Role | undefined {
|
||||
const role = Object.values(BUILTIN_ROLES).find(role =>
|
||||
roleId.includes(role._id)
|
||||
)
|
||||
if (!role) {
|
||||
return undefined
|
||||
}
|
||||
return cloneDeep(role)
|
||||
}
|
||||
|
||||
/**
|
||||
* Works through the inheritance ranks to see how far up the builtin stack this ID is.
|
||||
*/
|
||||
export function builtinRoleToNumber(id?: string) {
|
||||
if (!id) {
|
||||
return 0
|
||||
}
|
||||
export function builtinRoleToNumber(id: string) {
|
||||
const builtins = getBuiltinRoles()
|
||||
const MAX = Object.values(builtins).length + 1
|
||||
if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {
|
||||
|
@ -123,7 +122,7 @@ export function builtinRoleToNumber(id?: string) {
|
|||
/**
|
||||
* Converts any role to a number, but has to be async to get the roles from db.
|
||||
*/
|
||||
export async function roleToNumber(id?: string) {
|
||||
export async function roleToNumber(id: string) {
|
||||
if (isBuiltin(id)) {
|
||||
return builtinRoleToNumber(id)
|
||||
}
|
||||
|
@ -131,7 +130,7 @@ export async function roleToNumber(id?: string) {
|
|||
defaultPublic: true,
|
||||
})) as RoleDoc[]
|
||||
for (let role of hierarchy) {
|
||||
if (isBuiltin(role?.inherits)) {
|
||||
if (role?.inherits && isBuiltin(role.inherits)) {
|
||||
return builtinRoleToNumber(role.inherits) + 1
|
||||
}
|
||||
}
|
||||
|
@ -161,35 +160,28 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
|
|||
* @returns The role object, which may contain an "inherits" property.
|
||||
*/
|
||||
export async function getRole(
|
||||
roleId?: string,
|
||||
roleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc | undefined> {
|
||||
if (!roleId) {
|
||||
return undefined
|
||||
}
|
||||
let role: any = {}
|
||||
): Promise<RoleDoc> {
|
||||
// built in roles mostly come from the in-code implementation,
|
||||
// but can be extended by a doc stored about them (e.g. permissions)
|
||||
if (isBuiltin(roleId)) {
|
||||
role = cloneDeep(
|
||||
Object.values(BUILTIN_ROLES).find(role => role._id === roleId)
|
||||
)
|
||||
} else {
|
||||
let role: RoleDoc | undefined = getBuiltinRole(roleId)
|
||||
if (!role) {
|
||||
// make sure has the prefix (if it has it then it won't be added)
|
||||
roleId = prefixRoleID(roleId)
|
||||
}
|
||||
try {
|
||||
const db = getAppDB()
|
||||
const dbRole = await db.get(getDBRoleID(roleId))
|
||||
role = Object.assign(role, dbRole)
|
||||
const dbRole = await db.get<RoleDoc>(getDBRoleID(roleId))
|
||||
role = Object.assign(role || {}, dbRole)
|
||||
// finalise the ID
|
||||
role._id = getExternalRoleID(role._id, role.version)
|
||||
role._id = getExternalRoleID(role._id!, role.version)
|
||||
} catch (err) {
|
||||
if (!isBuiltin(roleId) && opts?.defaultPublic) {
|
||||
return cloneDeep(BUILTIN_ROLES.PUBLIC)
|
||||
}
|
||||
// only throw an error if there is no role at all
|
||||
if (Object.keys(role).length === 0) {
|
||||
if (!role || Object.keys(role).length === 0) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
@ -200,7 +192,7 @@ export async function getRole(
|
|||
* Simple function to get all the roles based on the top level user role ID.
|
||||
*/
|
||||
async function getAllUserRoles(
|
||||
userRoleId?: string,
|
||||
userRoleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc[]> {
|
||||
// admins have access to all roles
|
||||
|
@ -226,7 +218,7 @@ async function getAllUserRoles(
|
|||
}
|
||||
|
||||
export async function getUserRoleIdHierarchy(
|
||||
userRoleId?: string
|
||||
userRoleId: string
|
||||
): Promise<string[]> {
|
||||
const roles = await getUserRoleHierarchy(userRoleId)
|
||||
return roles.map(role => role._id!)
|
||||
|
@ -241,7 +233,7 @@ export async function getUserRoleIdHierarchy(
|
|||
* highest level of access and the last being the lowest level.
|
||||
*/
|
||||
export async function getUserRoleHierarchy(
|
||||
userRoleId?: string,
|
||||
userRoleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
) {
|
||||
// special case, if they don't have a role then they are a public user
|
||||
|
@ -265,9 +257,9 @@ export function checkForRoleResourceArray(
|
|||
return rolePerms
|
||||
}
|
||||
|
||||
export async function getAllRoleIds(appId?: string) {
|
||||
export async function getAllRoleIds(appId: string): Promise<string[]> {
|
||||
const roles = await getAllRoles(appId)
|
||||
return roles.map(role => role._id)
|
||||
return roles.map(role => role._id!)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -35,7 +35,10 @@ export default function positionDropdown(element, opts) {
|
|||
}
|
||||
|
||||
if (typeof customUpdate === "function") {
|
||||
styles = customUpdate(anchorBounds, elementBounds, styles)
|
||||
styles = customUpdate(anchorBounds, elementBounds, {
|
||||
...styles,
|
||||
offset: opts.offset,
|
||||
})
|
||||
} else {
|
||||
// Determine vertical styles
|
||||
if (align === "right-outside") {
|
||||
|
|
|
@ -9,13 +9,17 @@ const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
|
|||
class AnalyticsHub {
|
||||
constructor() {
|
||||
this.clients = [posthog, intercom]
|
||||
this.initialised = false
|
||||
}
|
||||
|
||||
async activate() {
|
||||
// Check analytics are enabled
|
||||
const analyticsStatus = await API.getAnalyticsStatus()
|
||||
if (analyticsStatus.enabled) {
|
||||
this.clients.forEach(client => client.init())
|
||||
if (analyticsStatus.enabled && !this.initialised) {
|
||||
this.clients.forEach(client => {
|
||||
client.init()
|
||||
})
|
||||
this.initialised = true
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -164,9 +164,10 @@
|
|||
</div>
|
||||
{/if}
|
||||
<TourWrap
|
||||
tourStepKey={$builderStore.onboarding
|
||||
? TOUR_STEP_KEYS.BUILDER_USER_MANAGEMENT
|
||||
: TOUR_STEP_KEYS.FEATURE_USER_MANAGEMENT}
|
||||
stepKeys={[
|
||||
TOUR_STEP_KEYS.BUILDER_USER_MANAGEMENT,
|
||||
TOUR_STEP_KEYS.FEATURE_USER_MANAGEMENT,
|
||||
]}
|
||||
>
|
||||
<div class="app-action-button users">
|
||||
<div class="app-action" id="builder-app-users-button">
|
||||
|
@ -209,7 +210,7 @@
|
|||
<div bind:this={appActionPopoverAnchor}>
|
||||
<div class="app-action">
|
||||
<Icon name={isPublished ? "GlobeCheck" : "GlobeStrike"} />
|
||||
<TourWrap tourStepKey={TOUR_STEP_KEYS.BUILDER_APP_PUBLISH}>
|
||||
<TourWrap stepKeys={[TOUR_STEP_KEYS.BUILDER_APP_PUBLISH]}>
|
||||
<span class="publish-open" id="builder-app-publish-button">
|
||||
Publish
|
||||
<Icon
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import EditComponentPopover from "../EditComponentPopover.svelte"
|
||||
import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte"
|
||||
import { Icon } from "@budibase/bbui"
|
||||
import { runtimeToReadableBinding } from "dataBinding"
|
||||
import { isJSBinding } from "@budibase/string-templates"
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
import { componentStore } from "stores/builder"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { createEventDispatcher, getContext } from "svelte"
|
||||
import { customPositionHandler } from "."
|
||||
import ComponentSettingsSection from "pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte"
|
||||
|
||||
export let anchor
|
||||
|
@ -54,25 +55,6 @@
|
|||
|
||||
dispatch("change", nestedComponentInstance)
|
||||
}
|
||||
|
||||
const customPositionHandler = (anchorBounds, eleBounds, cfg) => {
|
||||
let { left, top } = cfg
|
||||
let percentageOffset = 30
|
||||
// left-outside
|
||||
left = anchorBounds.left - eleBounds.width - 18
|
||||
|
||||
// shift up from the anchor, if space allows
|
||||
let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset
|
||||
let defaultTop = anchorBounds.top - offsetPos
|
||||
|
||||
if (window.innerHeight - defaultTop < eleBounds.height) {
|
||||
top = window.innerHeight - eleBounds.height - 5
|
||||
} else {
|
||||
top = anchorBounds.top - offsetPos
|
||||
}
|
||||
|
||||
return { ...cfg, left, top }
|
||||
}
|
||||
</script>
|
||||
|
||||
<Icon
|
||||
|
@ -104,6 +86,7 @@
|
|||
showPopover={drawers.length === 0}
|
||||
clickOutsideOverride={drawers.length > 0}
|
||||
maxHeight={600}
|
||||
offset={18}
|
||||
handlePostionUpdate={customPositionHandler}
|
||||
>
|
||||
<span class="popover-wrap">
|
|
@ -0,0 +1,18 @@
|
|||
export const customPositionHandler = (anchorBounds, eleBounds, cfg) => {
|
||||
let { left, top, offset } = cfg
|
||||
let percentageOffset = 30
|
||||
// left-outside
|
||||
left = anchorBounds.left - eleBounds.width - (offset || 5)
|
||||
|
||||
// shift up from the anchor, if space allows
|
||||
let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset
|
||||
let defaultTop = anchorBounds.top - offsetPos
|
||||
|
||||
if (window.innerHeight - defaultTop < eleBounds.height) {
|
||||
top = window.innerHeight - eleBounds.height - 5
|
||||
} else {
|
||||
top = anchorBounds.top - offsetPos
|
||||
}
|
||||
|
||||
return { ...cfg, left, top }
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import EditComponentPopover from "../EditComponentPopover.svelte"
|
||||
import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte"
|
||||
import { Toggle, Icon } from "@budibase/bbui"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
|
|
|
@ -52,8 +52,8 @@
|
|||
_id: Helpers.uuid(),
|
||||
_component: componentType,
|
||||
_instanceName: `Step ${currentStep + 1}`,
|
||||
title: stepSettings.title ?? defaults.title,
|
||||
buttons: stepSettings.buttons || defaults.buttons,
|
||||
title: stepSettings.title ?? defaults?.title,
|
||||
buttons: stepSettings.buttons || defaults?.buttons,
|
||||
fields: stepSettings.fields,
|
||||
desc: stepSettings.desc,
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import EditComponentPopover from "../EditComponentPopover.svelte"
|
||||
import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte"
|
||||
import { Toggle, Icon } from "@budibase/bbui"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import EditComponentPopover from "../EditComponentPopover.svelte"
|
||||
import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte"
|
||||
import { Icon } from "@budibase/bbui"
|
||||
import { setContext } from "svelte"
|
||||
import { writable } from "svelte/store"
|
||||
|
|
|
@ -139,10 +139,22 @@
|
|||
{/each}
|
||||
</div>
|
||||
<div class="search-input">
|
||||
<div class="input-wrapper">
|
||||
<Input bind:value={searchTerm} thin placeholder="Search Icon" />
|
||||
<div class="input-wrapper" style={`width: ${value ? "425" : "510"}px`}>
|
||||
<Input
|
||||
bind:value={searchTerm}
|
||||
on:keyup={event => {
|
||||
if (event.key === "Enter") {
|
||||
searchForIcon()
|
||||
}
|
||||
}}
|
||||
thin
|
||||
placeholder="Search Icon"
|
||||
/>
|
||||
</div>
|
||||
<Button secondary on:click={searchForIcon}>Search</Button>
|
||||
{#if value}
|
||||
<Button primary on:click={() => (value = null)}>Clear</Button>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="page-area">
|
||||
<div class="pager">
|
||||
|
@ -239,6 +251,7 @@
|
|||
flex-flow: row nowrap;
|
||||
width: 100%;
|
||||
padding-right: 15px;
|
||||
gap: 10px;
|
||||
}
|
||||
.input-wrapper {
|
||||
width: 510px;
|
||||
|
|
|
@ -20,17 +20,23 @@
|
|||
export let bindings = []
|
||||
export let componentBindings = []
|
||||
export let nested = false
|
||||
export let highlighted = false
|
||||
export let propertyFocus = false
|
||||
export let info = null
|
||||
export let disableBindings = false
|
||||
export let wide
|
||||
|
||||
$: nullishValue = value == null || value === ""
|
||||
let highlightType
|
||||
|
||||
$: highlightedProp = $builderStore.highlightedSetting
|
||||
$: allBindings = getAllBindings(bindings, componentBindings, nested)
|
||||
$: safeValue = getSafeValue(value, defaultValue, allBindings)
|
||||
$: replaceBindings = val => readableToRuntimeBinding(allBindings, val)
|
||||
|
||||
$: if (!Array.isArray(value)) {
|
||||
highlightType =
|
||||
highlightedProp?.key === key ? `highlighted-${highlightedProp?.type}` : ""
|
||||
}
|
||||
|
||||
const getAllBindings = (bindings, componentBindings, nested) => {
|
||||
if (!nested) {
|
||||
return bindings
|
||||
|
@ -71,16 +77,17 @@
|
|||
}
|
||||
|
||||
onDestroy(() => {
|
||||
if (highlighted) {
|
||||
if (highlightedProp) {
|
||||
builderStore.highlightSetting(null)
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
<div
|
||||
class="property-control"
|
||||
id={`${key}-prop-control-wrap`}
|
||||
class={`property-control ${highlightType}`}
|
||||
class:wide={!label || labelHidden || wide === true}
|
||||
class:highlighted={highlighted && nullishValue}
|
||||
class:highlighted={highlightType}
|
||||
class:property-focus={propertyFocus}
|
||||
>
|
||||
{#if label && !labelHidden}
|
||||
|
@ -115,6 +122,16 @@
|
|||
</div>
|
||||
|
||||
<style>
|
||||
.property-control.highlighted.highlighted-info {
|
||||
border-color: var(--spectrum-semantic-informative-color-background);
|
||||
}
|
||||
.property-control.highlighted.highlighted-error {
|
||||
border-color: var(--spectrum-global-color-static-red-600);
|
||||
}
|
||||
.property-control.highlighted.highlighted-warning {
|
||||
border-color: var(--spectrum-global-color-static-orange-700);
|
||||
}
|
||||
|
||||
.property-control {
|
||||
position: relative;
|
||||
display: grid;
|
||||
|
@ -132,6 +149,10 @@
|
|||
.property-control.highlighted {
|
||||
background: var(--spectrum-global-color-gray-300);
|
||||
border-color: var(--spectrum-global-color-static-red-600);
|
||||
margin-top: -3.5px;
|
||||
margin-bottom: -3.5px;
|
||||
padding-bottom: 3.5px;
|
||||
padding-top: 3.5px;
|
||||
}
|
||||
|
||||
.property-control.property-focus :global(input) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<script>
|
||||
import { Popover, Layout, Heading, Body, Button, Link } from "@budibase/bbui"
|
||||
import { TOURS } from "./tours.js"
|
||||
import { TOURS, getCurrentStepIdx } from "./tours.js"
|
||||
import { goto, layout, isActive } from "@roxi/routify"
|
||||
import { builderStore } from "stores/builder"
|
||||
|
||||
|
@ -20,6 +20,13 @@
|
|||
|
||||
const updateTourStep = (targetStepKey, tourKey) => {
|
||||
if (!tourKey) {
|
||||
tourSteps = null
|
||||
tourStepIdx = null
|
||||
lastStep = null
|
||||
tourStep = null
|
||||
popoverAnchor = null
|
||||
popover = null
|
||||
skipping = false
|
||||
return
|
||||
}
|
||||
if (!tourSteps?.length) {
|
||||
|
@ -78,16 +85,6 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
const getCurrentStepIdx = (steps, tourStepKey) => {
|
||||
if (!steps?.length) {
|
||||
return
|
||||
}
|
||||
if (steps?.length && !tourStepKey) {
|
||||
return 0
|
||||
}
|
||||
return steps.findIndex(step => step.id === tourStepKey)
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if tourKey}
|
||||
|
@ -98,7 +95,9 @@
|
|||
anchor={popoverAnchor}
|
||||
maxWidth={300}
|
||||
dismissible={false}
|
||||
offset={15}
|
||||
offset={12}
|
||||
handlePostionUpdate={tourStep?.positionHandler}
|
||||
customZindex={3}
|
||||
>
|
||||
<div class="tour-content">
|
||||
<Layout noPadding gap="M">
|
||||
|
@ -119,7 +118,7 @@
|
|||
</Body>
|
||||
<div class="tour-footer">
|
||||
<div class="tour-navigation">
|
||||
{#if typeof tourOnSkip === "function"}
|
||||
{#if typeof tourOnSkip === "function" && !lastStep}
|
||||
<Link
|
||||
secondary
|
||||
quiet
|
||||
|
|
|
@ -1,39 +1,63 @@
|
|||
<script>
|
||||
import { tourHandler } from "./tourHandler"
|
||||
import { TOURS } from "./tours"
|
||||
import { TOURSBYSTEP, TOURS, getCurrentStepIdx } from "./tours"
|
||||
import { onMount, onDestroy } from "svelte"
|
||||
import { builderStore } from "stores/builder"
|
||||
|
||||
export let tourStepKey
|
||||
export let stepKeys = []
|
||||
|
||||
let currentTourStep
|
||||
let ready = false
|
||||
let registered = false
|
||||
let handler
|
||||
let registered = {}
|
||||
|
||||
const registerTourNode = (tourKey, stepKey) => {
|
||||
if (ready && !registered && tourKey) {
|
||||
currentTourStep = TOURS[tourKey].steps.find(step => step.id === stepKey)
|
||||
if (!currentTourStep) {
|
||||
return
|
||||
const step = TOURSBYSTEP[stepKey]
|
||||
if (ready && step && !registered[stepKey] && step?.tour === tourKey) {
|
||||
const elem = document.querySelector(step.query)
|
||||
registered[stepKey] = tourHandler(elem, stepKey)
|
||||
}
|
||||
}
|
||||
|
||||
const scrollToStep = () => {
|
||||
let tourStepIdx = getCurrentStepIdx(
|
||||
TOURS[tourKeyWatch]?.steps,
|
||||
tourStepKeyWatch
|
||||
)
|
||||
let currentStep = TOURS[tourKeyWatch]?.steps?.[tourStepIdx]
|
||||
if (currentStep?.scrollIntoView) {
|
||||
let currentNode = $builderStore.tourNodes?.[currentStep.id]
|
||||
if (currentNode) {
|
||||
currentNode.scrollIntoView({ behavior: "smooth", block: "center" })
|
||||
}
|
||||
const elem = document.querySelector(currentTourStep.query)
|
||||
handler = tourHandler(elem, stepKey)
|
||||
registered = true
|
||||
}
|
||||
}
|
||||
|
||||
$: tourKeyWatch = $builderStore.tourKey
|
||||
$: registerTourNode(tourKeyWatch, tourStepKey, ready)
|
||||
$: tourStepKeyWatch = $builderStore.tourStepKey
|
||||
$: if (tourKeyWatch || stepKeys || ready) {
|
||||
stepKeys.forEach(tourStepKey => {
|
||||
registerTourNode(tourKeyWatch, tourStepKey)
|
||||
})
|
||||
}
|
||||
$: scrollToStep(tourKeyWatch, tourStepKeyWatch)
|
||||
|
||||
onMount(() => {
|
||||
ready = true
|
||||
})
|
||||
|
||||
onDestroy(() => {
|
||||
if (handler) {
|
||||
Object.entries(registered).forEach(entry => {
|
||||
const handler = entry[1]
|
||||
const stepKey = entry[0]
|
||||
// Run step destroy, de-register nodes in the builderStore and local cache
|
||||
handler.destroy()
|
||||
}
|
||||
delete registered[stepKey]
|
||||
|
||||
// Check if the step is part of an active tour. End the tour if that is the case
|
||||
const step = TOURSBYSTEP[stepKey]
|
||||
if (step.tour === tourKeyWatch) {
|
||||
builderStore.setTour()
|
||||
}
|
||||
})
|
||||
})
|
||||
</script>
|
||||
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
<div>
|
||||
When faced with a sizable form, consider implementing a multi-step approach to
|
||||
enhance user experience.
|
||||
|
||||
<p>
|
||||
Breaking the form into multiple steps can significantly improve usability by
|
||||
making the process more digestible for your users.
|
||||
</p>
|
||||
</div>
|
|
@ -0,0 +1,17 @@
|
|||
<div>
|
||||
You can use bindings to set the Row ID on your form.
|
||||
<p>
|
||||
This will allow you to pull the correct information into your form and allow
|
||||
you to update!
|
||||
</p>
|
||||
<a href="https://docs.budibase.com/docs/form-block" target="_blank">
|
||||
How to pass a row ID using bindings
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
a {
|
||||
color: inherit;
|
||||
text-decoration: underline;
|
||||
}
|
||||
</style>
|
|
@ -1,3 +1,5 @@
|
|||
export { default as OnboardingData } from "./OnboardingData.svelte"
|
||||
export { default as OnboardingDesign } from "./OnboardingDesign.svelte"
|
||||
export { default as OnboardingPublish } from "./OnboardingPublish.svelte"
|
||||
export { default as NewViewUpdateFormRowId } from "./NewViewUpdateFormRowId.svelte"
|
||||
export { default as NewFormSteps } from "./NewFormSteps.svelte"
|
||||
|
|
|
@ -2,8 +2,15 @@ import { get } from "svelte/store"
|
|||
import { builderStore } from "stores/builder"
|
||||
import { auth } from "stores/portal"
|
||||
import analytics from "analytics"
|
||||
import { OnboardingData, OnboardingDesign, OnboardingPublish } from "./steps"
|
||||
import {
|
||||
OnboardingData,
|
||||
OnboardingDesign,
|
||||
OnboardingPublish,
|
||||
NewViewUpdateFormRowId,
|
||||
NewFormSteps,
|
||||
} from "./steps"
|
||||
import { API } from "api"
|
||||
import { customPositionHandler } from "components/design/settings/controls/EditComponentPopover"
|
||||
|
||||
const ONBOARDING_EVENT_PREFIX = "onboarding"
|
||||
|
||||
|
@ -14,11 +21,26 @@ export const TOUR_STEP_KEYS = {
|
|||
BUILDER_USER_MANAGEMENT: "builder-user-management",
|
||||
BUILDER_AUTOMATION_SECTION: "builder-automation-section",
|
||||
FEATURE_USER_MANAGEMENT: "feature-user-management",
|
||||
BUILDER_FORM_CREATE_STEPS: "builder-form-create-steps",
|
||||
BUILDER_FORM_VIEW_UPDATE_STEPS: "builder-form-view-update-steps",
|
||||
BUILDER_FORM_ROW_ID: "builder-form-row-id",
|
||||
}
|
||||
|
||||
export const TOUR_KEYS = {
|
||||
TOUR_BUILDER_ONBOARDING: "builder-onboarding",
|
||||
FEATURE_ONBOARDING: "feature-onboarding",
|
||||
BUILDER_FORM_CREATE: "builder-form-create",
|
||||
BUILDER_FORM_VIEW_UPDATE: "builder-form-view-update",
|
||||
}
|
||||
|
||||
export const getCurrentStepIdx = (steps, tourStepKey) => {
|
||||
if (!steps?.length) {
|
||||
return
|
||||
}
|
||||
if (steps?.length && !tourStepKey) {
|
||||
return 0
|
||||
}
|
||||
return steps.findIndex(step => step.id === tourStepKey)
|
||||
}
|
||||
|
||||
const endUserOnboarding = async ({ skipped = false } = {}) => {
|
||||
|
@ -37,13 +59,8 @@ const endUserOnboarding = async ({ skipped = false } = {}) => {
|
|||
// Update the cached user
|
||||
await auth.getSelf()
|
||||
|
||||
builderStore.update(state => ({
|
||||
...state,
|
||||
tourNodes: null,
|
||||
tourKey: null,
|
||||
tourStepKey: null,
|
||||
onboarding: false,
|
||||
}))
|
||||
builderStore.endBuilderOnboarding()
|
||||
builderStore.setTour()
|
||||
} catch (e) {
|
||||
console.error("Onboarding failed", e)
|
||||
return false
|
||||
|
@ -52,9 +69,29 @@ const endUserOnboarding = async ({ skipped = false } = {}) => {
|
|||
}
|
||||
}
|
||||
|
||||
const tourEvent = eventKey => {
|
||||
const endTour = async ({ key, skipped = false } = {}) => {
|
||||
const { tours = {} } = get(auth).user
|
||||
tours[key] = new Date().toISOString()
|
||||
|
||||
await API.updateSelf({
|
||||
tours,
|
||||
})
|
||||
|
||||
if (skipped) {
|
||||
tourEvent(key, skipped)
|
||||
}
|
||||
|
||||
// Update the cached user
|
||||
await auth.getSelf()
|
||||
|
||||
// Reset tour state
|
||||
builderStore.setTour()
|
||||
}
|
||||
|
||||
const tourEvent = (eventKey, skipped) => {
|
||||
analytics.captureEvent(`${ONBOARDING_EVENT_PREFIX}:${eventKey}`, {
|
||||
eventSource: EventSource.PORTAL,
|
||||
skipped,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -135,7 +172,71 @@ const getTours = () => {
|
|||
},
|
||||
],
|
||||
},
|
||||
[TOUR_KEYS.BUILDER_FORM_CREATE]: {
|
||||
steps: [
|
||||
{
|
||||
id: TOUR_STEP_KEYS.BUILDER_FORM_CREATE_STEPS,
|
||||
title: "Add multiple steps",
|
||||
layout: NewFormSteps,
|
||||
query: "#steps-prop-control-wrap",
|
||||
onComplete: () => {
|
||||
builderStore.highlightSetting()
|
||||
endTour({ key: TOUR_KEYS.BUILDER_FORM_CREATE })
|
||||
},
|
||||
onLoad: () => {
|
||||
tourEvent(TOUR_STEP_KEYS.BUILDER_FORM_CREATE_STEPS)
|
||||
builderStore.highlightSetting("steps", "info")
|
||||
},
|
||||
positionHandler: customPositionHandler,
|
||||
align: "left-outside",
|
||||
},
|
||||
],
|
||||
},
|
||||
[TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE]: {
|
||||
steps: [
|
||||
{
|
||||
id: TOUR_STEP_KEYS.BUILDER_FORM_ROW_ID,
|
||||
title: "Add row ID to update a row",
|
||||
layout: NewViewUpdateFormRowId,
|
||||
query: "#rowId-prop-control-wrap",
|
||||
onLoad: () => {
|
||||
tourEvent(TOUR_STEP_KEYS.BUILDER_FORM_ROW_ID)
|
||||
builderStore.highlightSetting("rowId", "info")
|
||||
},
|
||||
positionHandler: customPositionHandler,
|
||||
align: "left-outside",
|
||||
},
|
||||
{
|
||||
id: TOUR_STEP_KEYS.BUILDER_FORM_VIEW_UPDATE_STEPS,
|
||||
title: "Add multiple steps",
|
||||
layout: NewFormSteps,
|
||||
query: "#steps-prop-control-wrap",
|
||||
onComplete: () => {
|
||||
builderStore.highlightSetting()
|
||||
endTour({ key: TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE })
|
||||
},
|
||||
onLoad: () => {
|
||||
tourEvent(TOUR_STEP_KEYS.BUILDER_FORM_VIEW_UPDATE_STEPS)
|
||||
builderStore.highlightSetting("steps", "info")
|
||||
},
|
||||
positionHandler: customPositionHandler,
|
||||
align: "left-outside",
|
||||
scrollIntoView: true,
|
||||
},
|
||||
],
|
||||
onSkip: async () => {
|
||||
builderStore.highlightSetting()
|
||||
endTour({ key: TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE, skipped: true })
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const TOURS = getTours()
|
||||
export const TOURSBYSTEP = Object.keys(TOURS).reduce((acc, tour) => {
|
||||
TOURS[tour].steps.forEach(element => {
|
||||
acc[element.id] = element
|
||||
acc[element.id]["tour"] = tour
|
||||
})
|
||||
return acc
|
||||
}, {})
|
||||
|
|
|
@ -1146,7 +1146,7 @@ export const getAllStateVariables = () => {
|
|||
"@budibase/standard-components/multistepformblockstep"
|
||||
)
|
||||
|
||||
steps.forEach(step => {
|
||||
steps?.forEach(step => {
|
||||
parseComponentSettings(stepDefinition, step)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -96,7 +96,7 @@
|
|||
const release_date = new Date("2023-03-01T00:00:00.000Z")
|
||||
const onboarded = new Date($auth.user?.onboardedAt)
|
||||
if (onboarded < release_date) {
|
||||
builderStore.startTour(TOUR_KEYS.FEATURE_ONBOARDING)
|
||||
builderStore.setTour(TOUR_KEYS.FEATURE_ONBOARDING)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -144,7 +144,7 @@
|
|||
</span>
|
||||
<Tabs {selected} size="M">
|
||||
{#each $layout.children as { path, title }}
|
||||
<TourWrap tourStepKey={`builder-${title}-section`}>
|
||||
<TourWrap stepKeys={[`builder-${title}-section`]}>
|
||||
<Tab
|
||||
quiet
|
||||
selected={$isActive(path)}
|
||||
|
|
|
@ -16,6 +16,14 @@
|
|||
} from "dataBinding"
|
||||
import { ActionButton, notifications } from "@budibase/bbui"
|
||||
import { capitalise } from "helpers"
|
||||
import TourWrap from "components/portal/onboarding/TourWrap.svelte"
|
||||
import { TOUR_STEP_KEYS } from "components/portal/onboarding/tours.js"
|
||||
|
||||
const {
|
||||
BUILDER_FORM_CREATE_STEPS,
|
||||
BUILDER_FORM_VIEW_UPDATE_STEPS,
|
||||
BUILDER_FORM_ROW_ID,
|
||||
} = TOUR_STEP_KEYS
|
||||
|
||||
const onUpdateName = async value => {
|
||||
try {
|
||||
|
@ -46,7 +54,6 @@
|
|||
|
||||
$: id = $selectedComponent?._id
|
||||
$: id, (section = tabs[0])
|
||||
|
||||
$: componentName = getComponentName(componentInstance)
|
||||
</script>
|
||||
|
||||
|
@ -92,13 +99,21 @@
|
|||
</div>
|
||||
</span>
|
||||
{#if section == "settings"}
|
||||
<ComponentSettingsSection
|
||||
{componentInstance}
|
||||
{componentDefinition}
|
||||
{bindings}
|
||||
{componentBindings}
|
||||
{isScreen}
|
||||
/>
|
||||
<TourWrap
|
||||
stepKeys={[
|
||||
BUILDER_FORM_CREATE_STEPS,
|
||||
BUILDER_FORM_VIEW_UPDATE_STEPS,
|
||||
BUILDER_FORM_ROW_ID,
|
||||
]}
|
||||
>
|
||||
<ComponentSettingsSection
|
||||
{componentInstance}
|
||||
{componentDefinition}
|
||||
{bindings}
|
||||
{componentBindings}
|
||||
{isScreen}
|
||||
/>
|
||||
</TourWrap>
|
||||
{/if}
|
||||
{#if section == "styles"}
|
||||
<DesignSection
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
import { DetailSummary, notifications } from "@budibase/bbui"
|
||||
import { componentStore } from "stores/builder"
|
||||
import { componentStore, builderStore } from "stores/builder"
|
||||
import PropertyControl from "components/design/settings/controls/PropertyControl.svelte"
|
||||
import ResetFieldsButton from "components/design/settings/controls/ResetFieldsButton.svelte"
|
||||
import EjectBlockButton from "components/design/settings/controls/EjectBlockButton.svelte"
|
||||
|
@ -177,9 +177,7 @@
|
|||
defaultValue={setting.defaultValue}
|
||||
nested={setting.nested}
|
||||
onChange={val => updateSetting(setting, val)}
|
||||
highlighted={$componentStore.highlightedSettingKey ===
|
||||
setting.key}
|
||||
propertyFocus={$componentStore.propertyFocus === setting.key}
|
||||
propertyFocus={$builderStore.propertyFocus === setting.key}
|
||||
info={setting.info}
|
||||
disableBindings={setting.disableBindings}
|
||||
props={{
|
||||
|
|
|
@ -174,7 +174,7 @@
|
|||
} else if (type === "request-add-component") {
|
||||
toggleAddComponent()
|
||||
} else if (type === "highlight-setting") {
|
||||
builderStore.highlightSetting(data.setting)
|
||||
builderStore.highlightSetting(data.setting, "error")
|
||||
|
||||
// Also scroll setting into view
|
||||
const selector = `#${data.setting}-prop-control`
|
||||
|
|
|
@ -3,13 +3,23 @@
|
|||
import DatasourceModal from "./DatasourceModal.svelte"
|
||||
import ScreenRoleModal from "./ScreenRoleModal.svelte"
|
||||
import sanitizeUrl from "helpers/sanitizeUrl"
|
||||
import FormTypeModal from "./FormTypeModal.svelte"
|
||||
import { Modal, notifications } from "@budibase/bbui"
|
||||
import { screenStore, navigationStore, tables } from "stores/builder"
|
||||
import {
|
||||
screenStore,
|
||||
navigationStore,
|
||||
tables,
|
||||
builderStore,
|
||||
} from "stores/builder"
|
||||
import { auth } from "stores/portal"
|
||||
import { get } from "svelte/store"
|
||||
import getTemplates from "templates"
|
||||
import { Roles } from "constants/backend"
|
||||
import { capitalise } from "helpers"
|
||||
import { goto } from "@roxi/routify"
|
||||
import { TOUR_KEYS } from "components/portal/onboarding/tours.js"
|
||||
import formScreen from "templates/formScreen"
|
||||
import rowListScreen from "templates/rowListScreen"
|
||||
|
||||
let mode
|
||||
let pendingScreen
|
||||
|
@ -18,12 +28,18 @@
|
|||
let screenDetailsModal
|
||||
let datasourceModal
|
||||
let screenAccessRoleModal
|
||||
let formTypeModal
|
||||
|
||||
// Cache variables for workflow
|
||||
let screenAccessRole = Roles.BASIC
|
||||
let selectedTemplates = null
|
||||
|
||||
let templates = null
|
||||
let screens = null
|
||||
|
||||
let selectedDatasources = null
|
||||
let blankScreenUrl = null
|
||||
let screenMode = null
|
||||
let formType = null
|
||||
|
||||
// Creates an array of screens, checking and sanitising their URLs
|
||||
const createScreens = async ({ screens, screenAccessRole }) => {
|
||||
|
@ -32,7 +48,7 @@
|
|||
}
|
||||
|
||||
try {
|
||||
let screenId
|
||||
let createdScreens = []
|
||||
|
||||
for (let screen of screens) {
|
||||
// Check we aren't clashing with an existing URL
|
||||
|
@ -56,7 +72,7 @@
|
|||
|
||||
// Create the screen
|
||||
const response = await screenStore.save(screen)
|
||||
screenId = response._id
|
||||
createdScreens.push(response)
|
||||
|
||||
// Add link in layout. We only ever actually create 1 screen now, even
|
||||
// for autoscreens, so it's always safe to do this.
|
||||
|
@ -66,9 +82,7 @@
|
|||
)
|
||||
}
|
||||
|
||||
// Go to new screen
|
||||
$goto(`./${screenId}`)
|
||||
screenStore.select(screenId)
|
||||
return createdScreens
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
notifications.error("Error creating screens")
|
||||
|
@ -104,13 +118,16 @@
|
|||
// Handler for NewScreenModal
|
||||
export const show = newMode => {
|
||||
mode = newMode
|
||||
selectedTemplates = null
|
||||
templates = null
|
||||
screens = null
|
||||
selectedDatasources = null
|
||||
blankScreenUrl = null
|
||||
screenMode = mode
|
||||
pendingScreen = null
|
||||
screenAccessRole = Roles.BASIC
|
||||
formType = null
|
||||
|
||||
if (mode === "table" || mode === "grid") {
|
||||
if (mode === "table" || mode === "grid" || mode === "form") {
|
||||
datasourceModal.show()
|
||||
} else if (mode === "blank") {
|
||||
let templates = getTemplates($tables.list)
|
||||
|
@ -125,19 +142,26 @@
|
|||
}
|
||||
|
||||
// Handler for DatasourceModal confirmation, move to screen access select
|
||||
const confirmScreenDatasources = async ({ templates }) => {
|
||||
selectedTemplates = templates
|
||||
screenAccessRoleModal.show()
|
||||
const confirmScreenDatasources = async ({ datasources }) => {
|
||||
selectedDatasources = datasources
|
||||
if (screenMode === "form") {
|
||||
formTypeModal.show()
|
||||
} else {
|
||||
screenAccessRoleModal.show()
|
||||
}
|
||||
}
|
||||
|
||||
// Handler for Datasource Screen Creation
|
||||
const completeDatasourceScreenCreation = async () => {
|
||||
const screens = selectedTemplates.map(template => {
|
||||
templates = rowListScreen(selectedDatasources, mode)
|
||||
|
||||
const screens = templates.map(template => {
|
||||
let screenTemplate = template.create()
|
||||
screenTemplate.autoTableId = template.resourceId
|
||||
return screenTemplate
|
||||
})
|
||||
await createScreens({ screens, screenAccessRole })
|
||||
const createdScreens = await createScreens({ screens, screenAccessRole })
|
||||
loadNewScreen(createdScreens)
|
||||
}
|
||||
|
||||
const confirmScreenBlank = async ({ screenUrl }) => {
|
||||
|
@ -154,7 +178,54 @@
|
|||
return
|
||||
}
|
||||
pendingScreen.routing.route = screenUrl
|
||||
await createScreens({ screens: [pendingScreen], screenAccessRole })
|
||||
const createdScreens = await createScreens({
|
||||
screens: [pendingScreen],
|
||||
screenAccessRole,
|
||||
})
|
||||
loadNewScreen(createdScreens)
|
||||
}
|
||||
|
||||
const onConfirmFormType = () => {
|
||||
screenAccessRoleModal.show()
|
||||
}
|
||||
|
||||
const loadNewScreen = createdScreens => {
|
||||
const lastScreen = createdScreens.slice(-1)[0]
|
||||
|
||||
// Go to new screen
|
||||
if (lastScreen?.props?._children.length) {
|
||||
// Focus on the main component for the streen type
|
||||
const mainComponent = lastScreen?.props?._children?.[0]._id
|
||||
$goto(`./${lastScreen._id}/${mainComponent}`)
|
||||
} else {
|
||||
$goto(`./${lastScreen._id}`)
|
||||
}
|
||||
|
||||
screenStore.select(lastScreen._id)
|
||||
}
|
||||
|
||||
const confirmFormScreenCreation = async () => {
|
||||
templates = formScreen(selectedDatasources, { actionType: formType })
|
||||
screens = templates.map(template => {
|
||||
let screenTemplate = template.create()
|
||||
return screenTemplate
|
||||
})
|
||||
const createdScreens = await createScreens({ screens, screenAccessRole })
|
||||
|
||||
if (formType === "Update" || formType === "Create") {
|
||||
const associatedTour =
|
||||
formType === "Update"
|
||||
? TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE
|
||||
: TOUR_KEYS.BUILDER_FORM_CREATE
|
||||
|
||||
const tourRequired = !$auth?.user?.tours?.[associatedTour]
|
||||
if (tourRequired) {
|
||||
builderStore.setTour(associatedTour)
|
||||
}
|
||||
}
|
||||
|
||||
// Go to new screen
|
||||
loadNewScreen(createdScreens)
|
||||
}
|
||||
|
||||
// Submit screen config for creation.
|
||||
|
@ -164,6 +235,8 @@
|
|||
screenUrl: blankScreenUrl,
|
||||
screenAccessRole,
|
||||
})
|
||||
} else if (screenMode === "form") {
|
||||
confirmFormScreenCreation()
|
||||
} else {
|
||||
completeDatasourceScreenCreation()
|
||||
}
|
||||
|
@ -179,19 +252,18 @@
|
|||
</script>
|
||||
|
||||
<Modal bind:this={datasourceModal} autoFocus={false}>
|
||||
<DatasourceModal
|
||||
{mode}
|
||||
onConfirm={confirmScreenDatasources}
|
||||
initialScreens={!selectedTemplates ? [] : [...selectedTemplates]}
|
||||
/>
|
||||
<DatasourceModal {mode} onConfirm={confirmScreenDatasources} />
|
||||
</Modal>
|
||||
|
||||
<Modal bind:this={screenAccessRoleModal}>
|
||||
<ScreenRoleModal
|
||||
onConfirm={confirmScreenCreation}
|
||||
onCancel={roleSelectBack}
|
||||
onConfirm={() => {
|
||||
confirmScreenCreation()
|
||||
}}
|
||||
bind:screenAccessRole
|
||||
onCancel={roleSelectBack}
|
||||
screenUrl={blankScreenUrl}
|
||||
confirmText={screenMode === "form" ? "Confirm" : "Done"}
|
||||
/>
|
||||
</Modal>
|
||||
|
||||
|
@ -201,3 +273,17 @@
|
|||
initialUrl={blankScreenUrl}
|
||||
/>
|
||||
</Modal>
|
||||
|
||||
<Modal bind:this={formTypeModal}>
|
||||
<FormTypeModal
|
||||
onConfirm={onConfirmFormType}
|
||||
onCancel={() => {
|
||||
formTypeModal.hide()
|
||||
datasourceModal.show()
|
||||
}}
|
||||
on:select={e => {
|
||||
formType = e.detail
|
||||
}}
|
||||
type={formType}
|
||||
/>
|
||||
</Modal>
|
||||
|
|
|
@ -4,37 +4,33 @@
|
|||
import ICONS from "components/backend/DatasourceNavigator/icons"
|
||||
import { IntegrationNames } from "constants"
|
||||
import { onMount } from "svelte"
|
||||
import rowListScreen from "templates/rowListScreen"
|
||||
import DatasourceTemplateRow from "./DatasourceTemplateRow.svelte"
|
||||
|
||||
export let mode
|
||||
export let onCancel
|
||||
export let onConfirm
|
||||
export let initialScreens = []
|
||||
|
||||
let selectedScreens = [...initialScreens]
|
||||
let selectedSources = []
|
||||
|
||||
$: filteredSources = $datasources.list?.filter(datasource => {
|
||||
return datasource.source !== IntegrationNames.REST && datasource["entities"]
|
||||
})
|
||||
|
||||
const toggleSelection = datasource => {
|
||||
const { resourceId } = datasource
|
||||
if (selectedScreens.find(s => s.resourceId === resourceId)) {
|
||||
selectedScreens = selectedScreens.filter(
|
||||
screen => screen.resourceId !== resourceId
|
||||
const exists = selectedSources.find(
|
||||
d => d.resourceId === datasource.resourceId
|
||||
)
|
||||
if (exists) {
|
||||
selectedSources = selectedSources.filter(
|
||||
d => d.resourceId === datasource.resourceId
|
||||
)
|
||||
} else {
|
||||
selectedScreens = [
|
||||
...selectedScreens,
|
||||
rowListScreen([datasource], mode)[0],
|
||||
]
|
||||
selectedSources = [...selectedSources, datasource]
|
||||
}
|
||||
}
|
||||
|
||||
const confirmDatasourceSelection = async () => {
|
||||
await onConfirm({
|
||||
templates: selectedScreens,
|
||||
datasources: selectedSources,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -54,7 +50,7 @@
|
|||
cancelText="Back"
|
||||
onConfirm={confirmDatasourceSelection}
|
||||
{onCancel}
|
||||
disabled={!selectedScreens.length}
|
||||
disabled={!selectedSources.length}
|
||||
size="L"
|
||||
>
|
||||
<Body size="S">
|
||||
|
@ -85,8 +81,8 @@
|
|||
resourceId: table._id,
|
||||
type: "table",
|
||||
}}
|
||||
{@const selected = selectedScreens.find(
|
||||
screen => screen.resourceId === tableDS.resourceId
|
||||
{@const selected = selectedSources.find(
|
||||
datasource => datasource.resourceId === tableDS.resourceId
|
||||
)}
|
||||
<DatasourceTemplateRow
|
||||
on:click={() => toggleSelection(tableDS)}
|
||||
|
@ -103,7 +99,7 @@
|
|||
tableId: view.tableId,
|
||||
type: "viewV2",
|
||||
}}
|
||||
{@const selected = selectedScreens.find(
|
||||
{@const selected = selectedSources.find(
|
||||
x => x.resourceId === viewDS.resourceId
|
||||
)}
|
||||
<DatasourceTemplateRow
|
||||
|
|
|
@ -0,0 +1,123 @@
|
|||
<script>
|
||||
import { ModalContent, Layout, Body, Icon } from "@budibase/bbui"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
export let onCancel = () => {}
|
||||
export let onConfirm = () => {}
|
||||
export let type
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
</script>
|
||||
|
||||
<span>
|
||||
<ModalContent
|
||||
title="Select form type"
|
||||
confirmText="Done"
|
||||
cancelText="Back"
|
||||
{onConfirm}
|
||||
{onCancel}
|
||||
disabled={!type}
|
||||
size="L"
|
||||
>
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<Layout noPadding gap="S">
|
||||
<div
|
||||
class="form-type"
|
||||
class:selected={type === "Create"}
|
||||
on:click={() => {
|
||||
dispatch("select", "Create")
|
||||
}}
|
||||
>
|
||||
<div class="form-type-wrap">
|
||||
<div class="form-type-content">
|
||||
<Body noPadding>Create a new row</Body>
|
||||
<Body size="S">
|
||||
For capturing and storing new data from your users
|
||||
</Body>
|
||||
</div>
|
||||
{#if type === "Create"}
|
||||
<span class="form-type-check">
|
||||
<Icon size="S" name="CheckmarkCircle" />
|
||||
</span>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
class="form-type"
|
||||
class:selected={type === "Update"}
|
||||
on:click={() => {
|
||||
dispatch("select", "Update")
|
||||
}}
|
||||
>
|
||||
<div class="form-type-wrap">
|
||||
<div class="form-type-content">
|
||||
<Body noPadding>Update an existing row</Body>
|
||||
<Body size="S">For viewing and updating existing data</Body>
|
||||
</div>
|
||||
{#if type === "Update"}
|
||||
<span class="form-type-check">
|
||||
<Icon size="S" name="CheckmarkCircle" />
|
||||
</span>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
class="form-type"
|
||||
class:selected={type === "View"}
|
||||
on:click={() => {
|
||||
dispatch("select", "View")
|
||||
}}
|
||||
>
|
||||
<div class="form-type-wrap">
|
||||
<div class="form-type-content">
|
||||
<Body noPadding>View an existing row</Body>
|
||||
<Body size="S">For a read only view of your data</Body>
|
||||
</div>
|
||||
{#if type === "View"}
|
||||
<span class="form-type-check">
|
||||
<Icon size="S" name="CheckmarkCircle" />
|
||||
</span>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</Layout>
|
||||
</ModalContent>
|
||||
</span>
|
||||
|
||||
<style>
|
||||
.form-type {
|
||||
cursor: pointer;
|
||||
gap: var(--spacing-s);
|
||||
padding: var(--spacing-m) var(--spacing-xl);
|
||||
background: var(--spectrum-alias-background-color-secondary);
|
||||
transition: 0.3s all;
|
||||
border: 1px solid var(--spectrum-global-color-gray-300);
|
||||
border-radius: 4px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
.selected,
|
||||
.form-type:hover {
|
||||
background: var(--spectrum-alias-background-color-tertiary);
|
||||
}
|
||||
.form-type-wrap {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
}
|
||||
.form-type :global(p:nth-child(2)) {
|
||||
color: var(--grey-6);
|
||||
}
|
||||
.form-type-check {
|
||||
margin-left: auto;
|
||||
}
|
||||
.form-type-check :global(.spectrum-Icon) {
|
||||
color: var(--spectrum-global-color-green-600);
|
||||
}
|
||||
.form-type-content {
|
||||
gap: var(--spacing-s);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
</style>
|
|
@ -9,6 +9,7 @@
|
|||
export let onCancel
|
||||
export let screenUrl
|
||||
export let screenAccessRole
|
||||
export let confirmText = "Done"
|
||||
|
||||
let error
|
||||
|
||||
|
@ -40,7 +41,7 @@
|
|||
|
||||
<ModalContent
|
||||
title="Access"
|
||||
confirmText="Done"
|
||||
{confirmText}
|
||||
cancelText="Back"
|
||||
{onConfirm}
|
||||
{onCancel}
|
||||
|
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 32 KiB |
After Width: | Height: | Size: 22 KiB |
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 26 KiB |
|
@ -1,9 +1,10 @@
|
|||
<script>
|
||||
import { Body } from "@budibase/bbui"
|
||||
import CreationPage from "components/common/CreationPage.svelte"
|
||||
import blankImage from "./blank.png"
|
||||
import tableImage from "./table.png"
|
||||
import gridImage from "./grid.png"
|
||||
import blankImage from "./images/blank.png"
|
||||
import tableImage from "./images/table.png"
|
||||
import gridImage from "./images/grid.png"
|
||||
import formImage from "./images/form.png"
|
||||
import CreateScreenModal from "./CreateScreenModal.svelte"
|
||||
import { screenStore } from "stores/builder"
|
||||
|
||||
|
@ -56,6 +57,16 @@
|
|||
<Body size="XS">View and manipulate rows on a grid</Body>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card" on:click={() => createScreenModal.show("form")}>
|
||||
<div class="image">
|
||||
<img alt="" src={formImage} />
|
||||
</div>
|
||||
<div class="text">
|
||||
<Body size="S">Form</Body>
|
||||
<Body size="XS">Capture data from your users</Body>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</CreationPage>
|
||||
</div>
|
||||
|
|
|
@ -29,6 +29,7 @@ export const INITIAL_APP_META_STATE = {
|
|||
initialised: false,
|
||||
hasAppPackage: false,
|
||||
usedPlugins: null,
|
||||
automations: {},
|
||||
routes: {},
|
||||
}
|
||||
|
||||
|
@ -63,6 +64,7 @@ export class AppMetaStore extends BudiStore {
|
|||
...app.features,
|
||||
},
|
||||
initialised: true,
|
||||
automations: app.automations || {},
|
||||
hasAppPackage: true,
|
||||
}))
|
||||
}
|
||||
|
|
|
@ -2,12 +2,11 @@ import { get } from "svelte/store"
|
|||
import { createBuilderWebsocket } from "./websocket.js"
|
||||
import { BuilderSocketEvent } from "@budibase/shared-core"
|
||||
import BudiStore from "./BudiStore"
|
||||
import { previewStore } from "./preview.js"
|
||||
import { TOUR_KEYS } from "components/portal/onboarding/tours.js"
|
||||
|
||||
export const INITIAL_BUILDER_STATE = {
|
||||
previousTopNavPath: {},
|
||||
highlightedSettingKey: null,
|
||||
highlightedSetting: null,
|
||||
propertyFocus: null,
|
||||
builderSidePanel: false,
|
||||
onboarding: false,
|
||||
|
@ -26,7 +25,6 @@ export class BuilderStore extends BudiStore {
|
|||
this.reset = this.reset.bind(this)
|
||||
this.highlightSetting = this.highlightSetting.bind(this)
|
||||
this.propertyFocus = this.propertyFocus.bind(this)
|
||||
this.hover = this.hover.bind(this)
|
||||
this.hideBuilderSidePanel = this.hideBuilderSidePanel.bind(this)
|
||||
this.showBuilderSidePanel = this.showBuilderSidePanel.bind(this)
|
||||
this.setPreviousTopNavPath = this.setPreviousTopNavPath.bind(this)
|
||||
|
@ -58,10 +56,10 @@ export class BuilderStore extends BudiStore {
|
|||
this.websocket = null
|
||||
}
|
||||
|
||||
highlightSetting(key) {
|
||||
highlightSetting(key, type) {
|
||||
this.update(state => ({
|
||||
...state,
|
||||
highlightedSettingKey: key,
|
||||
highlightedSetting: key ? { key, type: type || "info" } : null,
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -135,25 +133,20 @@ export class BuilderStore extends BudiStore {
|
|||
}))
|
||||
}
|
||||
|
||||
startTour(tourKey) {
|
||||
endBuilderOnboarding() {
|
||||
this.update(state => ({
|
||||
...state,
|
||||
tourKey: tourKey,
|
||||
onboarding: false,
|
||||
}))
|
||||
}
|
||||
|
||||
hover(componentId, notifyClient = true) {
|
||||
const store = get(this.store)
|
||||
if (componentId === store.hoveredComponentId) {
|
||||
return
|
||||
}
|
||||
this.update(state => {
|
||||
state.hoveredComponentId = componentId
|
||||
return state
|
||||
})
|
||||
if (notifyClient) {
|
||||
previewStore.sendEvent("hover-component", componentId)
|
||||
}
|
||||
setTour(tourKey) {
|
||||
this.update(state => ({
|
||||
...state,
|
||||
tourStepKey: null,
|
||||
tourNodes: null,
|
||||
tourKey: tourKey,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -147,6 +147,12 @@ export function createTablesStore() {
|
|||
if (indexes) {
|
||||
draft.indexes = indexes
|
||||
}
|
||||
// Add object to indicate if column is being added
|
||||
if (draft.schema[field.name] === undefined) {
|
||||
draft._add = {
|
||||
name: field.name,
|
||||
}
|
||||
}
|
||||
draft.schema = {
|
||||
...draft.schema,
|
||||
[field.name]: cloneDeep(field),
|
||||
|
|
|
@ -88,14 +88,42 @@ describe("Builder store", () => {
|
|||
)
|
||||
})
|
||||
|
||||
it("Sync a highlighted setting key to state", ctx => {
|
||||
expect(ctx.test.store.highlightedSettingKey).toBeNull()
|
||||
it("Sync a highlighted setting key to state. Default to info type", ctx => {
|
||||
expect(ctx.test.store.highlightedSetting).toBeNull()
|
||||
|
||||
ctx.test.builderStore.highlightSetting("testing")
|
||||
|
||||
expect(ctx.test.store).toStrictEqual({
|
||||
...INITIAL_BUILDER_STATE,
|
||||
highlightedSettingKey: "testing",
|
||||
highlightedSetting: {
|
||||
key: "testing",
|
||||
type: "info",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("Sync a highlighted setting key to state. Use provided type", ctx => {
|
||||
expect(ctx.test.store.highlightedSetting).toBeNull()
|
||||
|
||||
ctx.test.builderStore.highlightSetting("testing", "error")
|
||||
|
||||
expect(ctx.test.store).toStrictEqual({
|
||||
...INITIAL_BUILDER_STATE,
|
||||
highlightedSetting: {
|
||||
key: "testing",
|
||||
type: "error",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("Sync a highlighted setting key to state. Unset when no value is passed", ctx => {
|
||||
expect(ctx.test.store.highlightedSetting).toBeNull()
|
||||
|
||||
ctx.test.builderStore.highlightSetting("testing", "error")
|
||||
ctx.test.builderStore.highlightSetting()
|
||||
|
||||
expect(ctx.test.store).toStrictEqual({
|
||||
...INITIAL_BUILDER_STATE,
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
import { Screen } from "./Screen"
|
||||
import { Component } from "./Component"
|
||||
import sanitizeUrl from "helpers/sanitizeUrl"
|
||||
|
||||
export const FORM_TEMPLATE = "FORM_TEMPLATE"
|
||||
export const formUrl = datasource => sanitizeUrl(`/${datasource.label}-form`)
|
||||
|
||||
// Mode not really necessary
|
||||
export default function (datasources, config) {
|
||||
if (!Array.isArray(datasources)) {
|
||||
return []
|
||||
}
|
||||
return datasources.map(datasource => {
|
||||
return {
|
||||
name: `${datasource.label} - Form`,
|
||||
create: () => createScreen(datasource, config),
|
||||
id: FORM_TEMPLATE,
|
||||
resourceId: datasource.resourceId,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const generateMultistepFormBlock = (dataSource, { actionType } = {}) => {
|
||||
const multistepFormBlock = new Component(
|
||||
"@budibase/standard-components/multistepformblock"
|
||||
)
|
||||
multistepFormBlock
|
||||
.customProps({
|
||||
actionType,
|
||||
dataSource,
|
||||
steps: [{}],
|
||||
})
|
||||
.instanceName(`${dataSource.label} - Multistep Form block`)
|
||||
return multistepFormBlock
|
||||
}
|
||||
|
||||
const createScreen = (datasource, config) => {
|
||||
return new Screen()
|
||||
.route(formUrl(datasource))
|
||||
.instanceName(`${datasource.label} - Form`)
|
||||
.addChild(generateMultistepFormBlock(datasource, config))
|
||||
.json()
|
||||
}
|
|
@ -1,7 +1,11 @@
|
|||
import rowListScreen from "./rowListScreen"
|
||||
import createFromScratchScreen from "./createFromScratchScreen"
|
||||
import formScreen from "./formScreen"
|
||||
|
||||
const allTemplates = datasources => [...rowListScreen(datasources)]
|
||||
const allTemplates = datasources => [
|
||||
...rowListScreen(datasources),
|
||||
...formScreen(datasources),
|
||||
]
|
||||
|
||||
// Allows us to apply common behaviour to all create() functions
|
||||
const createTemplateOverride = template => () => {
|
||||
|
@ -19,6 +23,7 @@ export default datasources => {
|
|||
})
|
||||
const fromScratch = enrichTemplate(createFromScratchScreen)
|
||||
const tableTemplates = allTemplates(datasources).map(enrichTemplate)
|
||||
|
||||
return [
|
||||
fromScratch,
|
||||
...tableTemplates.sort((templateA, templateB) => {
|
||||
|
|
|
@ -525,6 +525,38 @@
|
|||
"barTitle": "Disable button",
|
||||
"key": "disabled"
|
||||
},
|
||||
{
|
||||
"type": "icon",
|
||||
"label": "Icon",
|
||||
"key": "icon"
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"label": "Gap",
|
||||
"key": "gap",
|
||||
"showInBar": true,
|
||||
"barStyle": "picker",
|
||||
"dependsOn": "icon",
|
||||
"options": [
|
||||
{
|
||||
"label": "None",
|
||||
"value": "N"
|
||||
},
|
||||
{
|
||||
"label": "Small",
|
||||
"value": "S"
|
||||
},
|
||||
{
|
||||
"label": "Medium",
|
||||
"value": "M"
|
||||
},
|
||||
{
|
||||
"label": "Large",
|
||||
"value": "L"
|
||||
}
|
||||
],
|
||||
"defaultValue": "M"
|
||||
},
|
||||
{
|
||||
"type": "event",
|
||||
"label": "On click",
|
||||
|
|
|
@ -13,9 +13,10 @@
|
|||
export let size = "M"
|
||||
export let type = "cta"
|
||||
export let quiet = false
|
||||
export let icon = null
|
||||
export let gap = "M"
|
||||
|
||||
// For internal use only for now - not defined in the manifest
|
||||
export let icon = null
|
||||
export let active = false
|
||||
|
||||
const handleOnClick = async () => {
|
||||
|
@ -47,7 +48,7 @@
|
|||
|
||||
{#key $component.editing}
|
||||
<button
|
||||
class={`spectrum-Button spectrum-Button--size${size} spectrum-Button--${type}`}
|
||||
class={`spectrum-Button spectrum-Button--size${size} spectrum-Button--${type} gap-${gap}`}
|
||||
class:spectrum-Button--quiet={quiet}
|
||||
disabled={disabled || handlingOnClick}
|
||||
use:styleable={$component.styles}
|
||||
|
@ -58,15 +59,7 @@
|
|||
class:active
|
||||
>
|
||||
{#if icon}
|
||||
<svg
|
||||
class:hasText={componentText?.length > 0}
|
||||
class="spectrum-Icon spectrum-Icon--size{size.toUpperCase()}"
|
||||
focusable="false"
|
||||
aria-hidden="true"
|
||||
aria-label={icon}
|
||||
>
|
||||
<use xlink:href="#spectrum-icon-18-{icon}" />
|
||||
</svg>
|
||||
<i class="{icon} {size}" />
|
||||
{/if}
|
||||
{componentText}
|
||||
</button>
|
||||
|
@ -92,4 +85,13 @@
|
|||
.active {
|
||||
color: var(--spectrum-global-color-blue-600);
|
||||
}
|
||||
.gap-S {
|
||||
gap: 8px;
|
||||
}
|
||||
.gap-M {
|
||||
gap: 16px;
|
||||
}
|
||||
.gap-L {
|
||||
gap: 32px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
wrap: true,
|
||||
}}
|
||||
>
|
||||
{#each buttons as { text, type, quiet, disabled, onClick, size }}
|
||||
{#each buttons as { text, type, quiet, disabled, onClick, size, icon, gap }}
|
||||
<BlockComponent
|
||||
type="button"
|
||||
props={{
|
||||
|
@ -29,6 +29,8 @@
|
|||
type,
|
||||
quiet,
|
||||
disabled,
|
||||
icon,
|
||||
gap,
|
||||
size: size || "M",
|
||||
}}
|
||||
/>
|
||||
|
|
|
@ -92,9 +92,9 @@
|
|||
{#if schemaLoaded}
|
||||
<Button
|
||||
onClick={openEditor}
|
||||
icon="Properties"
|
||||
icon="ri-filter-3-line"
|
||||
text="Filter"
|
||||
{size}
|
||||
size="XL"
|
||||
type="secondary"
|
||||
quiet
|
||||
active={filters?.length > 0}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
|
||||
import { fetchData, Utils } from "@budibase/frontend-core"
|
||||
import { getContext } from "svelte"
|
||||
import { getContext, onMount } from "svelte"
|
||||
import Field from "./Field.svelte"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
|
||||
|
@ -28,6 +28,7 @@
|
|||
let tableDefinition
|
||||
let searchTerm
|
||||
let open
|
||||
let initialValue
|
||||
|
||||
$: type =
|
||||
datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE
|
||||
|
@ -109,7 +110,11 @@
|
|||
}
|
||||
|
||||
$: forceFetchRows(filter)
|
||||
$: debouncedFetchRows(searchTerm, primaryDisplay, defaultValue)
|
||||
$: debouncedFetchRows(
|
||||
searchTerm,
|
||||
primaryDisplay,
|
||||
initialValue || defaultValue
|
||||
)
|
||||
|
||||
const forceFetchRows = async () => {
|
||||
// if the filter has changed, then we need to reset the options, clear the selection, and re-fetch
|
||||
|
@ -127,9 +132,13 @@
|
|||
if (allRowsFetched || !primaryDisplay) {
|
||||
return
|
||||
}
|
||||
if (defaultVal && !optionsObj[defaultVal]) {
|
||||
// must be an array
|
||||
if (defaultVal && !Array.isArray(defaultVal)) {
|
||||
defaultVal = defaultVal.split(",")
|
||||
}
|
||||
if (defaultVal && defaultVal.some(val => !optionsObj[val])) {
|
||||
await fetch.update({
|
||||
query: { equal: { _id: defaultVal } },
|
||||
query: { oneOf: { _id: defaultVal } },
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -202,6 +211,16 @@
|
|||
fetch.nextPage()
|
||||
}
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
// if the form is in 'Update' mode, then we need to fetch the matching row so that the value is correctly set
|
||||
if (fieldState?.value) {
|
||||
initialValue =
|
||||
fieldSchema?.relationshipType !== "one-to-many"
|
||||
? flatten(fieldState?.value) ?? []
|
||||
: flatten(fieldState?.value)?.[0]
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
<Field
|
||||
|
|
|
@ -59,13 +59,13 @@
|
|||
isReadonly: () => readonly,
|
||||
getType: () => column.schema.type,
|
||||
getValue: () => row[column.name],
|
||||
setValue: (value, options = { save: true }) => {
|
||||
setValue: (value, options = { apply: true }) => {
|
||||
validation.actions.setError(cellId, null)
|
||||
updateValue({
|
||||
rowId: row._id,
|
||||
column: column.name,
|
||||
value,
|
||||
save: options?.save,
|
||||
apply: options?.apply,
|
||||
})
|
||||
},
|
||||
}
|
||||
|
|
|
@ -217,14 +217,14 @@
|
|||
const type = $focusedCellAPI.getType()
|
||||
if (type === "number" && keyCodeIsNumber(keyCode)) {
|
||||
// Update the value locally but don't save it yet
|
||||
$focusedCellAPI.setValue(parseInt(key), { save: false })
|
||||
$focusedCellAPI.setValue(parseInt(key), { apply: false })
|
||||
$focusedCellAPI.focus()
|
||||
} else if (
|
||||
["string", "barcodeqr", "longform"].includes(type) &&
|
||||
(keyCodeIsLetter(keyCode) || keyCodeIsNumber(keyCode))
|
||||
) {
|
||||
// Update the value locally but don't save it yet
|
||||
$focusedCellAPI.setValue(key, { save: false })
|
||||
$focusedCellAPI.setValue(key, { apply: false })
|
||||
$focusedCellAPI.focus()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -327,29 +327,31 @@ export const createActions = context => {
|
|||
get(fetch)?.getInitialData()
|
||||
}
|
||||
|
||||
// Patches a row with some changes
|
||||
const updateRow = async (rowId, changes, options = { save: true }) => {
|
||||
// Checks if a changeset for a row actually mutates the row or not
|
||||
const changesAreValid = (row, changes) => {
|
||||
const columns = Object.keys(changes || {})
|
||||
if (!row || !columns.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Ensure there is at least 1 column that creates a difference
|
||||
return columns.some(column => row[column] !== changes[column])
|
||||
}
|
||||
|
||||
// Patches a row with some changes in local state, and returns whether a
|
||||
// valid pending change was made or not
|
||||
const stashRowChanges = (rowId, changes) => {
|
||||
const $rows = get(rows)
|
||||
const $rowLookupMap = get(rowLookupMap)
|
||||
const index = $rowLookupMap[rowId]
|
||||
const row = $rows[index]
|
||||
if (index == null || !Object.keys(changes || {}).length) {
|
||||
return
|
||||
|
||||
// Check this is a valid change
|
||||
if (!row || !changesAreValid(row, changes)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Abandon if no changes
|
||||
let same = true
|
||||
for (let column of Object.keys(changes)) {
|
||||
if (row[column] !== changes[column]) {
|
||||
same = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if (same) {
|
||||
return
|
||||
}
|
||||
|
||||
// Immediately update state so that the change is reflected
|
||||
// Add change to cache
|
||||
rowChangeCache.update(state => ({
|
||||
...state,
|
||||
[rowId]: {
|
||||
|
@ -357,26 +359,30 @@ export const createActions = context => {
|
|||
...changes,
|
||||
},
|
||||
}))
|
||||
return true
|
||||
}
|
||||
|
||||
// Stop here if we don't want to persist the change
|
||||
if (!options?.save) {
|
||||
// Saves any pending changes to a row
|
||||
const applyRowChanges = async rowId => {
|
||||
const $rows = get(rows)
|
||||
const $rowLookupMap = get(rowLookupMap)
|
||||
const index = $rowLookupMap[rowId]
|
||||
const row = $rows[index]
|
||||
if (row == null) {
|
||||
return
|
||||
}
|
||||
|
||||
// Save change
|
||||
try {
|
||||
inProgressChanges.update(state => ({
|
||||
...state,
|
||||
[rowId]: true,
|
||||
}))
|
||||
// Mark as in progress
|
||||
inProgressChanges.update(state => ({ ...state, [rowId]: true }))
|
||||
|
||||
// Update row
|
||||
const saved = await datasource.actions.updateRow({
|
||||
...cleanRow(row),
|
||||
...get(rowChangeCache)[rowId],
|
||||
})
|
||||
const changes = get(rowChangeCache)[rowId]
|
||||
const newRow = { ...cleanRow(row), ...changes }
|
||||
const saved = await datasource.actions.updateRow(newRow)
|
||||
|
||||
// Update state after a successful change
|
||||
// Update row state after a successful change
|
||||
if (saved?._id) {
|
||||
rows.update(state => {
|
||||
state[index] = saved
|
||||
|
@ -386,6 +392,8 @@ export const createActions = context => {
|
|||
// Handle users table edge case
|
||||
await refreshRow(saved.id)
|
||||
}
|
||||
|
||||
// Wipe row change cache now that we've saved the row
|
||||
rowChangeCache.update(state => {
|
||||
delete state[rowId]
|
||||
return state
|
||||
|
@ -393,15 +401,17 @@ export const createActions = context => {
|
|||
} catch (error) {
|
||||
handleValidationError(rowId, error)
|
||||
}
|
||||
inProgressChanges.update(state => ({
|
||||
...state,
|
||||
[rowId]: false,
|
||||
}))
|
||||
|
||||
// Mark as completed
|
||||
inProgressChanges.update(state => ({ ...state, [rowId]: false }))
|
||||
}
|
||||
|
||||
// Updates a value of a row
|
||||
const updateValue = async ({ rowId, column, value, save = true }) => {
|
||||
return await updateRow(rowId, { [column]: value }, { save })
|
||||
const updateValue = async ({ rowId, column, value, apply = true }) => {
|
||||
const success = stashRowChanges(rowId, { [column]: value })
|
||||
if (success && apply) {
|
||||
await applyRowChanges(rowId)
|
||||
}
|
||||
}
|
||||
|
||||
// Deletes an array of rows
|
||||
|
@ -411,9 +421,7 @@ export const createActions = context => {
|
|||
}
|
||||
|
||||
// Actually delete rows
|
||||
rowsToDelete.forEach(row => {
|
||||
delete row.__idx
|
||||
})
|
||||
rowsToDelete.forEach(row => delete row.__idx)
|
||||
await datasource.actions.deleteRows(rowsToDelete)
|
||||
|
||||
// Update state
|
||||
|
@ -433,7 +441,7 @@ export const createActions = context => {
|
|||
newRow = newRows[i]
|
||||
|
||||
// Ensure we have a unique _id.
|
||||
// This means generating one for non DS+, overriting any that may already
|
||||
// This means generating one for non DS+, overwriting any that may already
|
||||
// exist as we cannot allow duplicates.
|
||||
if (!$isDatasourcePlus) {
|
||||
newRow._id = Helpers.uuid()
|
||||
|
@ -494,7 +502,7 @@ export const createActions = context => {
|
|||
duplicateRow,
|
||||
getRow,
|
||||
updateValue,
|
||||
updateRow,
|
||||
applyRowChanges,
|
||||
deleteRows,
|
||||
hasRow,
|
||||
loadNextPage,
|
||||
|
@ -508,7 +516,14 @@ export const createActions = context => {
|
|||
}
|
||||
|
||||
export const initialise = context => {
|
||||
const { rowChangeCache, inProgressChanges, previousFocusedRowId } = context
|
||||
const {
|
||||
rowChangeCache,
|
||||
inProgressChanges,
|
||||
previousFocusedRowId,
|
||||
previousFocusedCellId,
|
||||
rows,
|
||||
validation,
|
||||
} = context
|
||||
|
||||
// Wipe the row change cache when changing row
|
||||
previousFocusedRowId.subscribe(id => {
|
||||
|
@ -519,4 +534,15 @@ export const initialise = context => {
|
|||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Ensure any unsaved changes are saved when changing cell
|
||||
previousFocusedCellId.subscribe(async id => {
|
||||
const rowId = id?.split("-")[0]
|
||||
const hasErrors = validation.actions.rowHasErrors(rowId)
|
||||
const hasChanges = Object.keys(get(rowChangeCache)[rowId] || {}).length > 0
|
||||
const isSavingChanges = get(inProgressChanges)[rowId]
|
||||
if (rowId && !hasErrors && hasChanges && !isSavingChanges) {
|
||||
await rows.actions.applyRowChanges(rowId)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ export const createStores = context => {
|
|||
const hoveredRowId = writable(null)
|
||||
const rowHeight = writable(get(props).fixedRowHeight || DefaultRowHeight)
|
||||
const previousFocusedRowId = writable(null)
|
||||
const previousFocusedCellId = writable(null)
|
||||
const gridFocused = writable(false)
|
||||
const isDragging = writable(false)
|
||||
const buttonColumnWidth = writable(0)
|
||||
|
@ -48,6 +49,7 @@ export const createStores = context => {
|
|||
focusedCellAPI,
|
||||
focusedRowId,
|
||||
previousFocusedRowId,
|
||||
previousFocusedCellId,
|
||||
hoveredRowId,
|
||||
rowHeight,
|
||||
gridFocused,
|
||||
|
@ -129,6 +131,7 @@ export const initialise = context => {
|
|||
const {
|
||||
focusedRowId,
|
||||
previousFocusedRowId,
|
||||
previousFocusedCellId,
|
||||
rows,
|
||||
focusedCellId,
|
||||
selectedRows,
|
||||
|
@ -181,6 +184,13 @@ export const initialise = context => {
|
|||
lastFocusedRowId = id
|
||||
})
|
||||
|
||||
// Remember the last focused cell ID so that we can store the previous one
|
||||
let lastFocusedCellId = null
|
||||
focusedCellId.subscribe(id => {
|
||||
previousFocusedCellId.set(lastFocusedCellId)
|
||||
lastFocusedCellId = id
|
||||
})
|
||||
|
||||
// Remove hovered row when a cell is selected
|
||||
focusedCellId.subscribe(cell => {
|
||||
if (cell && get(hoveredRowId)) {
|
||||
|
|
|
@ -1,8 +1,23 @@
|
|||
import { writable, get } from "svelte/store"
|
||||
import { writable, get, derived } from "svelte/store"
|
||||
|
||||
// Normally we would break out actions into the explicit "createActions"
|
||||
// function, but for validation all these actions are pure so can go into
|
||||
// "createStores" instead to make dependency ordering simpler
|
||||
export const createStores = () => {
|
||||
const validation = writable({})
|
||||
|
||||
// Derive which rows have errors so that we can use that info later
|
||||
const rowErrorMap = derived(validation, $validation => {
|
||||
let map = {}
|
||||
Object.entries($validation).forEach(([key, error]) => {
|
||||
// Extract row ID from all errored cell IDs
|
||||
if (error) {
|
||||
map[key.split("-")[0]] = true
|
||||
}
|
||||
})
|
||||
return map
|
||||
})
|
||||
|
||||
const setError = (cellId, error) => {
|
||||
if (!cellId) {
|
||||
return
|
||||
|
@ -13,11 +28,16 @@ export const createStores = () => {
|
|||
}))
|
||||
}
|
||||
|
||||
const rowHasErrors = rowId => {
|
||||
return get(rowErrorMap)[rowId]
|
||||
}
|
||||
|
||||
return {
|
||||
validation: {
|
||||
...validation,
|
||||
actions: {
|
||||
setError,
|
||||
rowHasErrors,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 183b35d3acd42433dcb2d32bcd89a36abe13afec
|
||||
Subproject commit 22a278da720d92991dabdcd4cb6c96e7abe29781
|
|
@ -152,7 +152,7 @@
|
|||
"rimraf": "3.0.2",
|
||||
"supertest": "6.3.3",
|
||||
"swagger-jsdoc": "6.1.0",
|
||||
"testcontainers": "10.6.0",
|
||||
"testcontainers": "10.7.2",
|
||||
"timekeeper": "2.2.0",
|
||||
"ts-node": "10.8.1",
|
||||
"tsconfig-paths": "4.0.0",
|
||||
|
|
|
@ -10,6 +10,11 @@ CREATE TABLE Persons (
|
|||
City varchar(255),
|
||||
PRIMARY KEY (PersonID)
|
||||
);
|
||||
CREATE TABLE Person (
|
||||
PersonID int NOT NULL AUTO_INCREMENT,
|
||||
Name varchar(255),
|
||||
PRIMARY KEY (PersonID)
|
||||
);
|
||||
CREATE TABLE Tasks (
|
||||
TaskID int NOT NULL AUTO_INCREMENT,
|
||||
PersonID INT,
|
||||
|
@ -27,6 +32,7 @@ CREATE TABLE Products (
|
|||
);
|
||||
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07');
|
||||
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11');
|
||||
INSERT INTO Person (Name) VALUES ('Elf');
|
||||
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01');
|
||||
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31');
|
||||
INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00');
|
||||
|
|
|
@ -7,6 +7,10 @@ import {
|
|||
GetResourcePermsResponse,
|
||||
ResourcePermissionInfo,
|
||||
GetDependantResourcesResponse,
|
||||
AddPermissionResponse,
|
||||
AddPermissionRequest,
|
||||
RemovePermissionRequest,
|
||||
RemovePermissionResponse,
|
||||
} from "@budibase/types"
|
||||
import { getRoleParams } from "../../db/utils"
|
||||
import {
|
||||
|
@ -16,9 +20,9 @@ import {
|
|||
import { removeFromArray } from "../../utilities"
|
||||
import sdk from "../../sdk"
|
||||
|
||||
const PermissionUpdateType = {
|
||||
REMOVE: "remove",
|
||||
ADD: "add",
|
||||
const enum PermissionUpdateType {
|
||||
REMOVE = "remove",
|
||||
ADD = "add",
|
||||
}
|
||||
|
||||
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
|
||||
|
@ -39,7 +43,7 @@ async function updatePermissionOnRole(
|
|||
resourceId,
|
||||
level,
|
||||
}: { roleId: string; resourceId: string; level: PermissionLevel },
|
||||
updateType: string
|
||||
updateType: PermissionUpdateType
|
||||
) {
|
||||
const allowedAction = await sdk.permissions.resourceActionAllowed({
|
||||
resourceId,
|
||||
|
@ -107,11 +111,15 @@ async function updatePermissionOnRole(
|
|||
}
|
||||
|
||||
const response = await db.bulkDocs(docUpdates)
|
||||
return response.map((resp: any) => {
|
||||
return response.map(resp => {
|
||||
const version = docUpdates.find(role => role._id === resp.id)?.version
|
||||
resp._id = roles.getExternalRoleID(resp.id, version)
|
||||
delete resp.id
|
||||
return resp
|
||||
const _id = roles.getExternalRoleID(resp.id, version)
|
||||
return {
|
||||
_id,
|
||||
rev: resp.rev,
|
||||
error: resp.error,
|
||||
reason: resp.reason,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -189,13 +197,14 @@ export async function getDependantResources(
|
|||
}
|
||||
}
|
||||
|
||||
export async function addPermission(ctx: UserCtx) {
|
||||
ctx.body = await updatePermissionOnRole(ctx.params, PermissionUpdateType.ADD)
|
||||
export async function addPermission(ctx: UserCtx<void, AddPermissionResponse>) {
|
||||
const params: AddPermissionRequest = ctx.params
|
||||
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.ADD)
|
||||
}
|
||||
|
||||
export async function removePermission(ctx: UserCtx) {
|
||||
ctx.body = await updatePermissionOnRole(
|
||||
ctx.params,
|
||||
PermissionUpdateType.REMOVE
|
||||
)
|
||||
export async function removePermission(
|
||||
ctx: UserCtx<void, RemovePermissionResponse>
|
||||
) {
|
||||
const params: RemovePermissionRequest = ctx.params
|
||||
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.REMOVE)
|
||||
}
|
||||
|
|
|
@ -17,10 +17,12 @@ import {
|
|||
QueryPreview,
|
||||
QuerySchema,
|
||||
FieldType,
|
||||
type ExecuteQueryRequest,
|
||||
type ExecuteQueryResponse,
|
||||
type Row,
|
||||
ExecuteQueryRequest,
|
||||
ExecuteQueryResponse,
|
||||
Row,
|
||||
QueryParameter,
|
||||
PreviewQueryRequest,
|
||||
PreviewQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
|
||||
|
||||
|
@ -134,14 +136,16 @@ function enrichParameters(
|
|||
return requestParameters
|
||||
}
|
||||
|
||||
export async function preview(ctx: UserCtx) {
|
||||
export async function preview(
|
||||
ctx: UserCtx<PreviewQueryRequest, PreviewQueryResponse>
|
||||
) {
|
||||
const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
|
||||
ctx.request.body.datasourceId
|
||||
)
|
||||
const query: QueryPreview = ctx.request.body
|
||||
// preview may not have a queryId as it hasn't been saved, but if it does
|
||||
// this stops dynamic variables from calling the same query
|
||||
const { fields, parameters, queryVerb, transformer, queryId, schema } = query
|
||||
const { fields, parameters, queryVerb, transformer, queryId, schema } =
|
||||
ctx.request.body
|
||||
|
||||
let existingSchema = schema
|
||||
if (queryId && !existingSchema) {
|
||||
|
@ -266,9 +270,7 @@ export async function preview(ctx: UserCtx) {
|
|||
},
|
||||
}
|
||||
|
||||
const { rows, keys, info, extra } = (await Runner.run(
|
||||
inputs
|
||||
)) as QueryResponse
|
||||
const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs)
|
||||
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
|
||||
|
||||
// if existing schema, update to include any previous schema keys
|
||||
|
@ -281,7 +283,7 @@ export async function preview(ctx: UserCtx) {
|
|||
}
|
||||
// remove configuration before sending event
|
||||
delete datasource.config
|
||||
await events.query.previewed(datasource, query)
|
||||
await events.query.previewed(datasource, ctx.request.body)
|
||||
ctx.body = {
|
||||
rows,
|
||||
nestedSchemaFields,
|
||||
|
@ -295,7 +297,10 @@ export async function preview(ctx: UserCtx) {
|
|||
}
|
||||
|
||||
async function execute(
|
||||
ctx: UserCtx<ExecuteQueryRequest, ExecuteQueryResponse | Row[]>,
|
||||
ctx: UserCtx<
|
||||
ExecuteQueryRequest,
|
||||
ExecuteQueryResponse | Record<string, any>[]
|
||||
>,
|
||||
opts: any = { rowsOnly: false, isAutomation: false }
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
|
@ -350,18 +355,23 @@ async function execute(
|
|||
}
|
||||
}
|
||||
|
||||
export async function executeV1(ctx: UserCtx) {
|
||||
export async function executeV1(
|
||||
ctx: UserCtx<ExecuteQueryRequest, Record<string, any>[]>
|
||||
) {
|
||||
return execute(ctx, { rowsOnly: true, isAutomation: false })
|
||||
}
|
||||
|
||||
export async function executeV2(
|
||||
ctx: UserCtx,
|
||||
ctx: UserCtx<
|
||||
ExecuteQueryRequest,
|
||||
ExecuteQueryResponse | Record<string, any>[]
|
||||
>,
|
||||
{ isAutomation }: { isAutomation?: boolean } = {}
|
||||
) {
|
||||
return execute(ctx, { rowsOnly: false, isAutomation })
|
||||
}
|
||||
|
||||
const removeDynamicVariables = async (queryId: any) => {
|
||||
const removeDynamicVariables = async (queryId: string) => {
|
||||
const db = context.getAppDB()
|
||||
const query = await db.get<Query>(queryId)
|
||||
const datasource = await sdk.datasources.get(query.datasourceId)
|
||||
|
@ -384,7 +394,7 @@ const removeDynamicVariables = async (queryId: any) => {
|
|||
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
const queryId = ctx.params.queryId
|
||||
const queryId = ctx.params.queryId as string
|
||||
await removeDynamicVariables(queryId)
|
||||
const query = await db.get<Query>(queryId)
|
||||
const datasource = await sdk.datasources.get(query.datasourceId)
|
||||
|
|
|
@ -7,8 +7,14 @@ import {
|
|||
} from "@budibase/backend-core"
|
||||
import { getUserMetadataParams, InternalTables } from "../../db/utils"
|
||||
import {
|
||||
AccessibleRolesResponse,
|
||||
Database,
|
||||
DestroyRoleResponse,
|
||||
FetchRolesResponse,
|
||||
FindRoleResponse,
|
||||
Role,
|
||||
SaveRoleRequest,
|
||||
SaveRoleResponse,
|
||||
UserCtx,
|
||||
UserMetadata,
|
||||
UserRoles,
|
||||
|
@ -25,43 +31,36 @@ async function updateRolesOnUserTable(
|
|||
db: Database,
|
||||
roleId: string,
|
||||
updateOption: string,
|
||||
roleVersion: string | undefined
|
||||
roleVersion?: string
|
||||
) {
|
||||
const table = await sdk.tables.getTable(InternalTables.USER_METADATA)
|
||||
const schema = table.schema
|
||||
const constraints = table.schema.roleId?.constraints
|
||||
if (!constraints) {
|
||||
return
|
||||
}
|
||||
const updatedRoleId =
|
||||
roleVersion === roles.RoleIDVersion.NAME
|
||||
? roles.getExternalRoleID(roleId, roleVersion)
|
||||
: roleId
|
||||
const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId)
|
||||
const remove = updateOption === UpdateRolesOptions.REMOVED
|
||||
let updated = false
|
||||
for (let prop of Object.keys(schema)) {
|
||||
if (prop === "roleId") {
|
||||
updated = true
|
||||
const constraints = schema[prop].constraints!
|
||||
const updatedRoleId =
|
||||
roleVersion === roles.RoleIDVersion.NAME
|
||||
? roles.getExternalRoleID(roleId, roleVersion)
|
||||
: roleId
|
||||
const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId)
|
||||
if (remove && indexOfRoleId !== -1) {
|
||||
constraints.inclusion!.splice(indexOfRoleId, 1)
|
||||
} else if (!remove && indexOfRoleId === -1) {
|
||||
constraints.inclusion!.push(updatedRoleId)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if (updated) {
|
||||
await db.put(table)
|
||||
if (remove && indexOfRoleId !== -1) {
|
||||
constraints.inclusion!.splice(indexOfRoleId, 1)
|
||||
} else if (!remove && indexOfRoleId === -1) {
|
||||
constraints.inclusion!.push(updatedRoleId)
|
||||
}
|
||||
await db.put(table)
|
||||
}
|
||||
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchRolesResponse>) {
|
||||
ctx.body = await roles.getAllRoles()
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx) {
|
||||
export async function find(ctx: UserCtx<void, FindRoleResponse>) {
|
||||
ctx.body = await roles.getRole(ctx.params.roleId)
|
||||
}
|
||||
|
||||
export async function save(ctx: UserCtx) {
|
||||
export async function save(ctx: UserCtx<SaveRoleRequest, SaveRoleResponse>) {
|
||||
const db = context.getAppDB()
|
||||
let { _id, name, inherits, permissionId, version } = ctx.request.body
|
||||
let isCreate = false
|
||||
|
@ -107,11 +106,26 @@ export async function save(ctx: UserCtx) {
|
|||
)
|
||||
role._rev = result.rev
|
||||
ctx.body = role
|
||||
|
||||
const devDb = context.getDevAppDB()
|
||||
const prodDb = context.getProdAppDB()
|
||||
|
||||
if (await prodDb.exists()) {
|
||||
const replication = new dbCore.Replication({
|
||||
source: devDb.name,
|
||||
target: prodDb.name,
|
||||
})
|
||||
await replication.replicate({
|
||||
filter: (doc: any, params: any) => {
|
||||
return doc._id && doc._id.startsWith("role_")
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
export async function destroy(ctx: UserCtx<void, DestroyRoleResponse>) {
|
||||
const db = context.getAppDB()
|
||||
let roleId = ctx.params.roleId
|
||||
let roleId = ctx.params.roleId as string
|
||||
if (roles.isBuiltin(roleId)) {
|
||||
ctx.throw(400, "Cannot delete builtin role.")
|
||||
} else {
|
||||
|
@ -144,14 +158,18 @@ export async function destroy(ctx: UserCtx) {
|
|||
ctx.status = 200
|
||||
}
|
||||
|
||||
export async function accessible(ctx: UserCtx) {
|
||||
export async function accessible(ctx: UserCtx<void, AccessibleRolesResponse>) {
|
||||
let roleId = ctx.user?.roleId
|
||||
if (!roleId) {
|
||||
roleId = roles.BUILTIN_ROLE_IDS.PUBLIC
|
||||
}
|
||||
if (ctx.user && sharedSdk.users.isAdminOrBuilder(ctx.user)) {
|
||||
const appId = context.getAppId()
|
||||
ctx.body = await roles.getAllRoleIds(appId)
|
||||
if (!appId) {
|
||||
ctx.body = []
|
||||
} else {
|
||||
ctx.body = await roles.getAllRoleIds(appId)
|
||||
}
|
||||
} else {
|
||||
ctx.body = await roles.getUserRoleIdHierarchy(roleId!)
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ export async function fetch(ctx: UserCtx) {
|
|||
export async function clientFetch(ctx: UserCtx) {
|
||||
const routing = await getRoutingStructure()
|
||||
let roleId = ctx.user?.role?._id
|
||||
const roleIds = await roles.getUserRoleIdHierarchy(roleId)
|
||||
const roleIds = roleId ? await roles.getUserRoleIdHierarchy(roleId) : []
|
||||
for (let topLevel of Object.values(routing.routes) as any) {
|
||||
for (let subpathKey of Object.keys(topLevel.subpaths)) {
|
||||
let found = false
|
||||
|
|
|
@ -1,12 +1,27 @@
|
|||
import {
|
||||
QueryJson,
|
||||
SearchFilters,
|
||||
Table,
|
||||
Row,
|
||||
Datasource,
|
||||
DatasourcePlusQueryResponse,
|
||||
Operation,
|
||||
QueryJson,
|
||||
Row,
|
||||
SearchFilters,
|
||||
} from "@budibase/types"
|
||||
import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils"
|
||||
import { getSQLClient } from "../../../sdk/app/rows/utils"
|
||||
import { cloneDeep } from "lodash"
|
||||
import sdk from "../../../sdk"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import { SqlClient } from "../../../integrations/utils"
|
||||
|
||||
const WRITE_OPERATIONS: Operation[] = [
|
||||
Operation.CREATE,
|
||||
Operation.UPDATE,
|
||||
Operation.DELETE,
|
||||
]
|
||||
const DISABLED_WRITE_CLIENTS: SqlClient[] = [
|
||||
SqlClient.MY_SQL,
|
||||
SqlClient.MS_SQL,
|
||||
SqlClient.ORACLE,
|
||||
]
|
||||
|
||||
class CharSequence {
|
||||
static alphabet = "abcdefghijklmnopqrstuvwxyz"
|
||||
|
@ -43,6 +58,25 @@ export default class AliasTables {
|
|||
this.charSeq = new CharSequence()
|
||||
}
|
||||
|
||||
isAliasingEnabled(json: QueryJson, datasource: Datasource) {
|
||||
const fieldLength = json.resource?.fields?.length
|
||||
if (!fieldLength || fieldLength <= 0) {
|
||||
return false
|
||||
}
|
||||
try {
|
||||
const sqlClient = getSQLClient(datasource)
|
||||
const isWrite = WRITE_OPERATIONS.includes(json.endpoint.operation)
|
||||
const isDisabledClient = DISABLED_WRITE_CLIENTS.includes(sqlClient)
|
||||
if (isWrite && isDisabledClient) {
|
||||
return false
|
||||
}
|
||||
} catch (err) {
|
||||
// if we can't get an SQL client, we can't alias
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
getAlias(tableName: string) {
|
||||
if (this.aliases[tableName]) {
|
||||
return this.aliases[tableName]
|
||||
|
@ -62,7 +96,11 @@ export default class AliasTables {
|
|||
if (idx === -1 || idx > 1) {
|
||||
return
|
||||
}
|
||||
return Math.abs(tableName.length - name.length) <= 2
|
||||
// this might look a bit mad, but the idea is if the field is wrapped, say in "", `` or []
|
||||
// then the idx of the table name will be 1, and we should allow for it ending in a closing
|
||||
// character - otherwise it should be the full length if the index is zero
|
||||
const allowedCharacterDiff = idx * 2
|
||||
return Math.abs(tableName.length - name.length) <= allowedCharacterDiff
|
||||
})
|
||||
if (foundTableName) {
|
||||
const aliasedTableName = tableName.replace(
|
||||
|
@ -109,57 +147,57 @@ export default class AliasTables {
|
|||
async queryWithAliasing(
|
||||
json: QueryJson
|
||||
): Promise<DatasourcePlusQueryResponse> {
|
||||
json = cloneDeep(json)
|
||||
const aliasTable = (table: Table) => ({
|
||||
...table,
|
||||
name: this.getAlias(table.name),
|
||||
})
|
||||
// run through the query json to update anywhere a table may be used
|
||||
if (json.resource?.fields) {
|
||||
json.resource.fields = json.resource.fields.map(field =>
|
||||
this.aliasField(field)
|
||||
)
|
||||
}
|
||||
if (json.filters) {
|
||||
for (let [filterKey, filter] of Object.entries(json.filters)) {
|
||||
if (typeof filter !== "object") {
|
||||
continue
|
||||
}
|
||||
const aliasedFilters: typeof filter = {}
|
||||
for (let key of Object.keys(filter)) {
|
||||
aliasedFilters[this.aliasField(key)] = filter[key]
|
||||
}
|
||||
json.filters[filterKey as keyof SearchFilters] = aliasedFilters
|
||||
const datasourceId = json.endpoint.datasourceId
|
||||
const datasource = await sdk.datasources.get(datasourceId)
|
||||
|
||||
const aliasingEnabled = this.isAliasingEnabled(json, datasource)
|
||||
if (aliasingEnabled) {
|
||||
json = cloneDeep(json)
|
||||
// run through the query json to update anywhere a table may be used
|
||||
if (json.resource?.fields) {
|
||||
json.resource.fields = json.resource.fields.map(field =>
|
||||
this.aliasField(field)
|
||||
)
|
||||
}
|
||||
}
|
||||
if (json.relationships) {
|
||||
json.relationships = json.relationships.map(relationship => ({
|
||||
...relationship,
|
||||
aliases: this.aliasMap([
|
||||
relationship.through,
|
||||
relationship.tableName,
|
||||
json.endpoint.entityId,
|
||||
]),
|
||||
}))
|
||||
}
|
||||
if (json.meta?.table) {
|
||||
json.meta.table = aliasTable(json.meta.table)
|
||||
}
|
||||
if (json.meta?.tables) {
|
||||
const aliasedTables: Record<string, Table> = {}
|
||||
for (let [tableName, table] of Object.entries(json.meta.tables)) {
|
||||
aliasedTables[this.getAlias(tableName)] = aliasTable(table)
|
||||
if (json.filters) {
|
||||
for (let [filterKey, filter] of Object.entries(json.filters)) {
|
||||
if (typeof filter !== "object") {
|
||||
continue
|
||||
}
|
||||
const aliasedFilters: typeof filter = {}
|
||||
for (let key of Object.keys(filter)) {
|
||||
aliasedFilters[this.aliasField(key)] = filter[key]
|
||||
}
|
||||
json.filters[filterKey as keyof SearchFilters] = aliasedFilters
|
||||
}
|
||||
}
|
||||
json.meta.tables = aliasedTables
|
||||
if (json.meta?.table) {
|
||||
this.getAlias(json.meta.table.name)
|
||||
}
|
||||
if (json.meta?.tables) {
|
||||
Object.keys(json.meta.tables).forEach(tableName =>
|
||||
this.getAlias(tableName)
|
||||
)
|
||||
}
|
||||
if (json.relationships) {
|
||||
json.relationships = json.relationships.map(relationship => ({
|
||||
...relationship,
|
||||
aliases: this.aliasMap([
|
||||
relationship.through,
|
||||
relationship.tableName,
|
||||
json.endpoint.entityId,
|
||||
]),
|
||||
}))
|
||||
}
|
||||
// invert and return
|
||||
const invertedTableAliases: Record<string, string> = {}
|
||||
for (let [key, value] of Object.entries(this.tableAliases)) {
|
||||
invertedTableAliases[value] = key
|
||||
}
|
||||
json.tableAliases = invertedTableAliases
|
||||
}
|
||||
// invert and return
|
||||
const invertedTableAliases: Record<string, string> = {}
|
||||
for (let [key, value] of Object.entries(this.tableAliases)) {
|
||||
invertedTableAliases[value] = key
|
||||
}
|
||||
json.tableAliases = invertedTableAliases
|
||||
const response = await getDatasourceAndQuery(json)
|
||||
if (Array.isArray(response)) {
|
||||
const response = await makeExternalQuery(datasource, json)
|
||||
if (Array.isArray(response) && aliasingEnabled) {
|
||||
return this.reverse(response)
|
||||
} else {
|
||||
return response
|
||||
|
|
|
@ -211,7 +211,7 @@ export async function validate(ctx: Ctx<Row, ValidateResponse>) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetchEnrichedRow(ctx: any) {
|
||||
export async function fetchEnrichedRow(ctx: UserCtx<void, Row>) {
|
||||
const tableId = utils.getTableId(ctx)
|
||||
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
|
||||
}
|
||||
|
|
|
@ -174,6 +174,7 @@ export const serveApp = async function (ctx: Ctx) {
|
|||
if (!env.isJest()) {
|
||||
const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins)
|
||||
const { head, html, css } = AppComponent.render({
|
||||
title: branding?.platformTitle || `${appInfo.name}`,
|
||||
metaImage:
|
||||
branding?.metaImageUrl ||
|
||||
"https://res.cloudinary.com/daog6scxm/image/upload/v1698759482/meta-images/plain-branded-meta-image-coral_ocxmgu.png",
|
||||
|
|
|
@ -6,6 +6,7 @@ import {
|
|||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
Operation,
|
||||
RenameColumn,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
Table,
|
||||
|
@ -25,9 +26,12 @@ function getDatasourceId(table: Table) {
|
|||
return breakExternalTableId(table._id).datasourceId
|
||||
}
|
||||
|
||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||
export async function save(
|
||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
const inputs = ctx.request.body
|
||||
const renaming = inputs?._rename
|
||||
const adding = inputs?._add
|
||||
// can't do this right now
|
||||
delete inputs.rows
|
||||
const tableId = ctx.request.body._id
|
||||
|
@ -40,7 +44,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
const { datasource, table } = await sdk.tables.external.save(
|
||||
datasourceId!,
|
||||
inputs,
|
||||
{ tableId, renaming }
|
||||
{ tableId, renaming, adding }
|
||||
)
|
||||
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
||||
return table
|
||||
|
|
|
@ -74,8 +74,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
const appId = ctx.appId
|
||||
const table = ctx.request.body
|
||||
const isImport = table.rows
|
||||
const renaming = ctx.request.body._rename
|
||||
|
||||
let savedTable = await pickApi({ table }).save(ctx)
|
||||
const api = pickApi({ table })
|
||||
// do not pass _rename or _add if saving to CouchDB
|
||||
if (api === internal) {
|
||||
delete ctx.request.body._add
|
||||
delete ctx.request.body._rename
|
||||
}
|
||||
let savedTable = await api.save(ctx, renaming)
|
||||
if (!table._id) {
|
||||
await events.table.created(savedTable)
|
||||
savedTable = sdk.tables.enrichViewSchemas(savedTable)
|
||||
|
|
|
@ -12,11 +12,12 @@ import {
|
|||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||
export async function save(
|
||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
const { rows, ...rest } = ctx.request.body
|
||||
let tableToSave: Table & {
|
||||
_rename?: RenameColumn
|
||||
} = {
|
||||
let tableToSave: Table = {
|
||||
_id: generateTableID(),
|
||||
...rest,
|
||||
// Ensure these fields are populated, even if not sent in the request
|
||||
|
@ -28,15 +29,12 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
tableToSave.views = {}
|
||||
}
|
||||
|
||||
const renaming = tableToSave._rename
|
||||
delete tableToSave._rename
|
||||
|
||||
try {
|
||||
const { table } = await sdk.tables.internal.save(tableToSave, {
|
||||
user: ctx.user,
|
||||
rowsToImport: rows,
|
||||
tableId: ctx.request.body._id,
|
||||
renaming: renaming,
|
||||
renaming,
|
||||
})
|
||||
|
||||
return table
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { generateUserFlagID, InternalTables } from "../../db/utils"
|
||||
import { getFullUser } from "../../utilities/users"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { cache, context } from "@budibase/backend-core"
|
||||
import {
|
||||
ContextUserMetadata,
|
||||
Ctx,
|
||||
|
|
|
@ -13,7 +13,7 @@ describe("/api/keys", () => {
|
|||
|
||||
describe("fetch", () => {
|
||||
it("should allow fetching", async () => {
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const res = await request
|
||||
.get(`/api/keys`)
|
||||
.set(config.defaultHeaders())
|
||||
|
@ -34,7 +34,7 @@ describe("/api/keys", () => {
|
|||
|
||||
describe("update", () => {
|
||||
it("should allow updating a value", async () => {
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const res = await request
|
||||
.put(`/api/keys/TEST`)
|
||||
.send({
|
||||
|
|
|
@ -16,8 +16,9 @@ import * as setup from "./utilities"
|
|||
import { AppStatus } from "../../../db/utils"
|
||||
import { events, utils, context } from "@budibase/backend-core"
|
||||
import env from "../../../environment"
|
||||
import type { App } from "@budibase/types"
|
||||
import { type App } from "@budibase/types"
|
||||
import tk from "timekeeper"
|
||||
import * as uuid from "uuid"
|
||||
|
||||
describe("/applications", () => {
|
||||
let config = setup.getConfig()
|
||||
|
@ -184,7 +185,7 @@ describe("/applications", () => {
|
|||
it("app should not sync if production", async () => {
|
||||
const { message } = await config.api.application.sync(
|
||||
app.appId.replace("_dev", ""),
|
||||
{ statusCode: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
|
||||
expect(message).toEqual(
|
||||
|
@ -248,4 +249,93 @@ describe("/applications", () => {
|
|||
expect(devLogs.data.length).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("permissions", () => {
|
||||
it("should only return apps a user has access to", async () => {
|
||||
let user = await config.createUser({
|
||||
builder: { global: false },
|
||||
admin: { global: false },
|
||||
})
|
||||
|
||||
await config.withUser(user, async () => {
|
||||
const apps = await config.api.application.fetch()
|
||||
expect(apps).toHaveLength(0)
|
||||
})
|
||||
|
||||
user = await config.globalUser({
|
||||
...user,
|
||||
builder: {
|
||||
apps: [config.getProdAppId()],
|
||||
},
|
||||
})
|
||||
|
||||
await config.withUser(user, async () => {
|
||||
const apps = await config.api.application.fetch()
|
||||
expect(apps).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
it("should only return apps a user has access to through a custom role", async () => {
|
||||
let user = await config.createUser({
|
||||
builder: { global: false },
|
||||
admin: { global: false },
|
||||
})
|
||||
|
||||
await config.withUser(user, async () => {
|
||||
const apps = await config.api.application.fetch()
|
||||
expect(apps).toHaveLength(0)
|
||||
})
|
||||
|
||||
const role = await config.api.roles.save({
|
||||
name: "Test",
|
||||
inherits: "PUBLIC",
|
||||
permissionId: "read_only",
|
||||
version: "name",
|
||||
})
|
||||
|
||||
user = await config.globalUser({
|
||||
...user,
|
||||
roles: {
|
||||
[config.getProdAppId()]: role.name,
|
||||
},
|
||||
})
|
||||
|
||||
await config.withUser(user, async () => {
|
||||
const apps = await config.api.application.fetch()
|
||||
expect(apps).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
it.only("should only return apps a user has access to through a custom role on a group", async () => {
|
||||
let user = await config.createUser({
|
||||
builder: { global: false },
|
||||
admin: { global: false },
|
||||
})
|
||||
|
||||
await config.withUser(user, async () => {
|
||||
const apps = await config.api.application.fetch()
|
||||
expect(apps).toHaveLength(0)
|
||||
})
|
||||
|
||||
const roleName = uuid.v4().replace(/-/g, "")
|
||||
const role = await config.api.roles.save({
|
||||
name: roleName,
|
||||
inherits: "PUBLIC",
|
||||
permissionId: "read_only",
|
||||
version: "name",
|
||||
})
|
||||
|
||||
const group = await config.createGroup(role._id!)
|
||||
|
||||
user = await config.globalUser({
|
||||
...user,
|
||||
userGroups: [group._id!],
|
||||
})
|
||||
|
||||
await config.withUser(user, async () => {
|
||||
const apps = await config.api.application.fetch()
|
||||
expect(apps).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -29,7 +29,7 @@ describe("/api/applications/:appId/sync", () => {
|
|||
let resp = (await config.api.attachment.process(
|
||||
"ohno.exe",
|
||||
Buffer.from([0]),
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)) as unknown as APIError
|
||||
expect(resp.message).toContain("invalid extension")
|
||||
})
|
||||
|
@ -40,7 +40,7 @@ describe("/api/applications/:appId/sync", () => {
|
|||
let resp = (await config.api.attachment.process(
|
||||
"OHNO.EXE",
|
||||
Buffer.from([0]),
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)) as unknown as APIError
|
||||
expect(resp.message).toContain("invalid extension")
|
||||
})
|
||||
|
@ -51,7 +51,7 @@ describe("/api/applications/:appId/sync", () => {
|
|||
undefined as any,
|
||||
undefined as any,
|
||||
{
|
||||
expectStatus: 400,
|
||||
status: 400,
|
||||
}
|
||||
)) as unknown as APIError
|
||||
expect(resp.message).toContain("No file provided")
|
||||
|
|
|
@ -19,11 +19,8 @@ describe("/backups", () => {
|
|||
|
||||
describe("/api/backups/export", () => {
|
||||
it("should be able to export app", async () => {
|
||||
const { body, headers } = await config.api.backup.exportBasicBackup(
|
||||
config.getAppId()!
|
||||
)
|
||||
const body = await config.api.backup.exportBasicBackup(config.getAppId()!)
|
||||
expect(body instanceof Buffer).toBe(true)
|
||||
expect(headers["content-type"]).toEqual("application/gzip")
|
||||
expect(events.app.exported).toBeCalledTimes(1)
|
||||
})
|
||||
|
||||
|
@ -38,15 +35,13 @@ describe("/backups", () => {
|
|||
it("should infer the app name from the app", async () => {
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
const { headers } = await config.api.backup.exportBasicBackup(
|
||||
config.getAppId()!
|
||||
)
|
||||
|
||||
expect(headers["content-disposition"]).toEqual(
|
||||
`attachment; filename="${
|
||||
config.getApp().name
|
||||
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
|
||||
)
|
||||
await config.api.backup.exportBasicBackup(config.getAppId()!, {
|
||||
headers: {
|
||||
"content-disposition": `attachment; filename="${
|
||||
config.getApp().name
|
||||
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ describe("/permission", () => {
|
|||
table = (await config.createTable()) as typeof table
|
||||
row = await config.createRow()
|
||||
view = await config.api.viewV2.create({ tableId: table._id })
|
||||
perms = await config.api.permission.set({
|
||||
perms = await config.api.permission.add({
|
||||
roleId: STD_ROLE_ID,
|
||||
resourceId: table._id,
|
||||
level: PermissionLevel.READ,
|
||||
|
@ -88,13 +88,13 @@ describe("/permission", () => {
|
|||
})
|
||||
|
||||
it("should get resource permissions with multiple roles", async () => {
|
||||
perms = await config.api.permission.set({
|
||||
perms = await config.api.permission.add({
|
||||
roleId: HIGHER_ROLE_ID,
|
||||
resourceId: table._id,
|
||||
level: PermissionLevel.WRITE,
|
||||
})
|
||||
const res = await config.api.permission.get(table._id)
|
||||
expect(res.body).toEqual({
|
||||
expect(res).toEqual({
|
||||
permissions: {
|
||||
read: { permissionType: "EXPLICIT", role: STD_ROLE_ID },
|
||||
write: { permissionType: "EXPLICIT", role: HIGHER_ROLE_ID },
|
||||
|
@ -117,16 +117,19 @@ describe("/permission", () => {
|
|||
level: PermissionLevel.READ,
|
||||
})
|
||||
|
||||
const response = await config.api.permission.set(
|
||||
await config.api.permission.add(
|
||||
{
|
||||
roleId: STD_ROLE_ID,
|
||||
resourceId: table._id,
|
||||
level: PermissionLevel.EXECUTE,
|
||||
},
|
||||
{ expectStatus: 403 }
|
||||
)
|
||||
expect(response.message).toEqual(
|
||||
"You are not allowed to 'read' the resource type 'datasource'"
|
||||
{
|
||||
status: 403,
|
||||
body: {
|
||||
message:
|
||||
"You are not allowed to 'read' the resource type 'datasource'",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -138,9 +141,9 @@ describe("/permission", () => {
|
|||
resourceId: table._id,
|
||||
level: PermissionLevel.READ,
|
||||
})
|
||||
expect(res.body[0]._id).toEqual(STD_ROLE_ID)
|
||||
expect(res[0]._id).toEqual(STD_ROLE_ID)
|
||||
const permsRes = await config.api.permission.get(table._id)
|
||||
expect(permsRes.body[STD_ROLE_ID]).toBeUndefined()
|
||||
expect(permsRes.permissions[STD_ROLE_ID]).toBeUndefined()
|
||||
})
|
||||
|
||||
it("throw forbidden if the action is not allowed for the resource", async () => {
|
||||
|
@ -156,10 +159,13 @@ describe("/permission", () => {
|
|||
resourceId: table._id,
|
||||
level: PermissionLevel.EXECUTE,
|
||||
},
|
||||
{ expectStatus: 403 }
|
||||
)
|
||||
expect(response.body.message).toEqual(
|
||||
"You are not allowed to 'read' the resource type 'datasource'"
|
||||
{
|
||||
status: 403,
|
||||
body: {
|
||||
message:
|
||||
"You are not allowed to 'read' the resource type 'datasource'",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -181,10 +187,8 @@ describe("/permission", () => {
|
|||
// replicate changes before checking permissions
|
||||
await config.publish()
|
||||
|
||||
const res = await config.api.viewV2.search(view.id, undefined, {
|
||||
usePublicUser: true,
|
||||
})
|
||||
expect(res.body.rows[0]._id).toEqual(row._id)
|
||||
const res = await config.api.viewV2.publicSearch(view.id)
|
||||
expect(res.rows[0]._id).toEqual(row._id)
|
||||
})
|
||||
|
||||
it("should not be able to access the view data when the table is not public and there are no view permissions overrides", async () => {
|
||||
|
@ -196,14 +200,11 @@ describe("/permission", () => {
|
|||
// replicate changes before checking permissions
|
||||
await config.publish()
|
||||
|
||||
await config.api.viewV2.search(view.id, undefined, {
|
||||
expectStatus: 403,
|
||||
usePublicUser: true,
|
||||
})
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, { status: 403 })
|
||||
})
|
||||
|
||||
it("should ignore the view permissions if the flag is not on", async () => {
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: STD_ROLE_ID,
|
||||
resourceId: view.id,
|
||||
level: PermissionLevel.READ,
|
||||
|
@ -216,15 +217,14 @@ describe("/permission", () => {
|
|||
// replicate changes before checking permissions
|
||||
await config.publish()
|
||||
|
||||
await config.api.viewV2.search(view.id, undefined, {
|
||||
expectStatus: 403,
|
||||
usePublicUser: true,
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, {
|
||||
status: 403,
|
||||
})
|
||||
})
|
||||
|
||||
it("should use the view permissions if the flag is on", async () => {
|
||||
mocks.licenses.useViewPermissions()
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: STD_ROLE_ID,
|
||||
resourceId: view.id,
|
||||
level: PermissionLevel.READ,
|
||||
|
@ -237,10 +237,8 @@ describe("/permission", () => {
|
|||
// replicate changes before checking permissions
|
||||
await config.publish()
|
||||
|
||||
const res = await config.api.viewV2.search(view.id, undefined, {
|
||||
usePublicUser: true,
|
||||
})
|
||||
expect(res.body.rows[0]._id).toEqual(row._id)
|
||||
const res = await config.api.viewV2.publicSearch(view.id)
|
||||
expect(res.rows[0]._id).toEqual(row._id)
|
||||
})
|
||||
|
||||
it("shouldn't allow writing from a public user", async () => {
|
||||
|
@ -277,7 +275,7 @@ describe("/permission", () => {
|
|||
|
||||
const res = await config.api.permission.get(legacyView.name)
|
||||
|
||||
expect(res.body).toEqual({
|
||||
expect(res).toEqual({
|
||||
permissions: {
|
||||
read: {
|
||||
permissionType: "BASE",
|
||||
|
|
|
@ -157,7 +157,7 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
it("should find a query in cloud", async () => {
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
|
@ -397,15 +397,16 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
it("should fail with invalid integration type", async () => {
|
||||
const response = await config.api.datasource.create(
|
||||
{
|
||||
...basicDatasource().datasource,
|
||||
source: "INVALID_INTEGRATION" as SourceName,
|
||||
const datasource: Datasource = {
|
||||
...basicDatasource().datasource,
|
||||
source: "INVALID_INTEGRATION" as SourceName,
|
||||
}
|
||||
await config.api.datasource.create(datasource, {
|
||||
status: 500,
|
||||
body: {
|
||||
message: "No datasource implementation found.",
|
||||
},
|
||||
{ expectStatus: 500, rawResponse: true }
|
||||
)
|
||||
|
||||
expect(response.body.message).toBe("No datasource implementation found.")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ describe("/roles", () => {
|
|||
|
||||
it("should be able to get the role with a permission added", async () => {
|
||||
const table = await config.createTable()
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: BUILTIN_ROLE_IDS.POWER,
|
||||
resourceId: table._id,
|
||||
level: PermissionLevel.READ,
|
||||
|
|
|
@ -7,6 +7,7 @@ import { context, InternalTable, roles, tenancy } from "@budibase/backend-core"
|
|||
import { quotas } from "@budibase/pro"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
DeleteRow,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
FieldTypeSubtypes,
|
||||
|
@ -106,9 +107,6 @@ describe.each([
|
|||
mocks.licenses.useCloudFree()
|
||||
})
|
||||
|
||||
const loadRow = (id: string, tbl_Id: string, status = 200) =>
|
||||
config.api.row.get(tbl_Id, id, { expectStatus: status })
|
||||
|
||||
const getRowUsage = async () => {
|
||||
const { total } = await config.doInContext(undefined, () =>
|
||||
quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
|
||||
|
@ -235,7 +233,7 @@ describe.each([
|
|||
|
||||
const res = await config.api.row.get(tableId, existing._id!)
|
||||
|
||||
expect(res.body).toEqual({
|
||||
expect(res).toEqual({
|
||||
...existing,
|
||||
...defaultRowFields,
|
||||
})
|
||||
|
@ -265,7 +263,7 @@ describe.each([
|
|||
await config.createRow()
|
||||
|
||||
await config.api.row.get(tableId, "1234567", {
|
||||
expectStatus: 404,
|
||||
status: 404,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -395,7 +393,7 @@ describe.each([
|
|||
const createdRow = await config.createRow(row)
|
||||
const id = createdRow._id!
|
||||
|
||||
const saved = (await loadRow(id, table._id!)).body
|
||||
const saved = await config.api.row.get(table._id!, id)
|
||||
|
||||
expect(saved.stringUndefined).toBe(undefined)
|
||||
expect(saved.stringNull).toBe(null)
|
||||
|
@ -476,8 +474,8 @@ describe.each([
|
|||
)
|
||||
|
||||
const row = await config.api.row.get(table._id!, createRowResponse._id!)
|
||||
expect(row.body.Story).toBeUndefined()
|
||||
expect(row.body).toEqual({
|
||||
expect(row.Story).toBeUndefined()
|
||||
expect(row).toEqual({
|
||||
...defaultRowFields,
|
||||
OrderID: 1111,
|
||||
Country: "Aussy",
|
||||
|
@ -524,10 +522,10 @@ describe.each([
|
|||
expect(row.name).toEqual("Updated Name")
|
||||
expect(row.description).toEqual(existing.description)
|
||||
|
||||
const savedRow = await loadRow(row._id!, table._id!)
|
||||
const savedRow = await config.api.row.get(table._id!, row._id!)
|
||||
|
||||
expect(savedRow.body.description).toEqual(existing.description)
|
||||
expect(savedRow.body.name).toEqual("Updated Name")
|
||||
expect(savedRow.description).toEqual(existing.description)
|
||||
expect(savedRow.name).toEqual("Updated Name")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
||||
|
@ -543,7 +541,7 @@ describe.each([
|
|||
tableId: table._id!,
|
||||
name: 1,
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
|
@ -582,8 +580,8 @@ describe.each([
|
|||
})
|
||||
|
||||
let getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.body.user2[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
|
||||
let patchResp = await config.api.row.patch(table._id!, {
|
||||
_id: row._id!,
|
||||
|
@ -595,8 +593,8 @@ describe.each([
|
|||
expect(patchResp.user2[0]._id).toEqual(user2._id)
|
||||
|
||||
getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.body.user2[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
})
|
||||
|
||||
it("should be able to update relationships when both columns are same name", async () => {
|
||||
|
@ -609,7 +607,7 @@ describe.each([
|
|||
description: "test",
|
||||
relationship: [row._id],
|
||||
})
|
||||
row = (await config.api.row.get(table._id!, row._id!)).body
|
||||
row = await config.api.row.get(table._id!, row._id!)
|
||||
expect(row.relationship.length).toBe(1)
|
||||
const resp = await config.api.row.patch(table._id!, {
|
||||
_id: row._id!,
|
||||
|
@ -632,8 +630,10 @@ describe.each([
|
|||
const createdRow = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(table._id!, [createdRow])
|
||||
expect(res.body[0]._id).toEqual(createdRow._id)
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [createdRow],
|
||||
})
|
||||
expect(res[0]._id).toEqual(createdRow._id)
|
||||
await assertRowUsage(rowUsage - 1)
|
||||
})
|
||||
})
|
||||
|
@ -682,10 +682,12 @@ describe.each([
|
|||
const row2 = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(table._id!, [row1, row2])
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [row1, row2],
|
||||
})
|
||||
|
||||
expect(res.body.length).toEqual(2)
|
||||
await loadRow(row1._id!, table._id!, 404)
|
||||
expect(res.length).toEqual(2)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(rowUsage - 2)
|
||||
})
|
||||
|
||||
|
@ -697,14 +699,12 @@ describe.each([
|
|||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(table._id!, [
|
||||
row1,
|
||||
row2._id,
|
||||
{ _id: row3._id },
|
||||
])
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [row1, row2._id!, { _id: row3._id }],
|
||||
})
|
||||
|
||||
expect(res.body.length).toEqual(3)
|
||||
await loadRow(row1._id!, table._id!, 404)
|
||||
expect(res.length).toEqual(3)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(rowUsage - 3)
|
||||
})
|
||||
|
||||
|
@ -712,34 +712,36 @@ describe.each([
|
|||
const row1 = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(table._id!, row1)
|
||||
const res = await config.api.row.delete(table._id!, row1 as DeleteRow)
|
||||
|
||||
expect(res.body.id).toEqual(row1._id)
|
||||
await loadRow(row1._id!, table._id!, 404)
|
||||
expect(res.id).toEqual(row1._id)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(rowUsage - 1)
|
||||
})
|
||||
|
||||
it("Should ignore malformed/invalid delete requests", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(
|
||||
table._id!,
|
||||
{ not: "valid" },
|
||||
{ expectStatus: 400 }
|
||||
)
|
||||
expect(res.body.message).toEqual("Invalid delete rows request")
|
||||
|
||||
const res2 = await config.api.row.delete(
|
||||
table._id!,
|
||||
{ rows: 123 },
|
||||
{ expectStatus: 400 }
|
||||
)
|
||||
expect(res2.body.message).toEqual("Invalid delete rows request")
|
||||
|
||||
const res3 = await config.api.row.delete(table._id!, "invalid", {
|
||||
expectStatus: 400,
|
||||
await config.api.row.delete(table._id!, { not: "valid" } as any, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.row.delete(table._id!, { rows: 123 } as any, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.row.delete(table._id!, "invalid" as any, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
expect(res3.body.message).toEqual("Invalid delete rows request")
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
@ -757,16 +759,16 @@ describe.each([
|
|||
const row = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.legacyView.get(table._id!)
|
||||
expect(res.body.length).toEqual(1)
|
||||
expect(res.body[0]._id).toEqual(row._id)
|
||||
const rows = await config.api.legacyView.get(table._id!)
|
||||
expect(rows.length).toEqual(1)
|
||||
expect(rows[0]._id).toEqual(row._id)
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
||||
it("should throw an error if view doesn't exist", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.legacyView.get("derp", { expectStatus: 404 })
|
||||
await config.api.legacyView.get("derp", { status: 404 })
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
@ -781,9 +783,9 @@ describe.each([
|
|||
const row = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.legacyView.get(view.name)
|
||||
expect(res.body.length).toEqual(1)
|
||||
expect(res.body[0]._id).toEqual(row._id)
|
||||
const rows = await config.api.legacyView.get(view.name)
|
||||
expect(rows.length).toEqual(1)
|
||||
expect(rows[0]._id).toEqual(row._id)
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
@ -841,8 +843,8 @@ describe.each([
|
|||
linkedTable._id!,
|
||||
secondRow._id!
|
||||
)
|
||||
expect(resBasic.body.link.length).toBe(1)
|
||||
expect(resBasic.body.link[0]).toEqual({
|
||||
expect(resBasic.link.length).toBe(1)
|
||||
expect(resBasic.link[0]).toEqual({
|
||||
_id: firstRow._id,
|
||||
primaryDisplay: firstRow.name,
|
||||
})
|
||||
|
@ -852,10 +854,10 @@ describe.each([
|
|||
linkedTable._id!,
|
||||
secondRow._id!
|
||||
)
|
||||
expect(resEnriched.body.link.length).toBe(1)
|
||||
expect(resEnriched.body.link[0]._id).toBe(firstRow._id)
|
||||
expect(resEnriched.body.link[0].name).toBe("Test Contact")
|
||||
expect(resEnriched.body.link[0].description).toBe("original description")
|
||||
expect(resEnriched.link.length).toBe(1)
|
||||
expect(resEnriched.link[0]._id).toBe(firstRow._id)
|
||||
expect(resEnriched.link[0].name).toBe("Test Contact")
|
||||
expect(resEnriched.link[0].description).toBe("original description")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
|
@ -880,8 +882,7 @@ describe.each([
|
|||
],
|
||||
tableId: table._id,
|
||||
})
|
||||
// the environment needs configured for this
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
return context.doInAppContext(config.getAppId(), async () => {
|
||||
const enriched = await outputProcessing(table, [row])
|
||||
expect((enriched as Row[])[0].attachment[0].url).toBe(
|
||||
|
@ -903,7 +904,7 @@ describe.each([
|
|||
const res = await config.api.row.exportRows(table._id!, {
|
||||
rows: [existing._id!],
|
||||
})
|
||||
const results = JSON.parse(res.text)
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
|
||||
|
@ -922,7 +923,7 @@ describe.each([
|
|||
rows: [existing._id!],
|
||||
columns: ["_id"],
|
||||
})
|
||||
const results = JSON.parse(res.text)
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
|
||||
|
@ -1000,7 +1001,7 @@ describe.each([
|
|||
})
|
||||
|
||||
const row = await config.api.row.get(table._id!, newRow._id!)
|
||||
expect(row.body).toEqual({
|
||||
expect(row).toEqual({
|
||||
name: data.name,
|
||||
surname: data.surname,
|
||||
address: data.address,
|
||||
|
@ -1010,9 +1011,9 @@ describe.each([
|
|||
id: newRow.id,
|
||||
...defaultRowFields,
|
||||
})
|
||||
expect(row.body._viewId).toBeUndefined()
|
||||
expect(row.body.age).toBeUndefined()
|
||||
expect(row.body.jobTitle).toBeUndefined()
|
||||
expect(row._viewId).toBeUndefined()
|
||||
expect(row.age).toBeUndefined()
|
||||
expect(row.jobTitle).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1042,7 +1043,7 @@ describe.each([
|
|||
})
|
||||
|
||||
const row = await config.api.row.get(tableId, newRow._id!)
|
||||
expect(row.body).toEqual({
|
||||
expect(row).toEqual({
|
||||
...newRow,
|
||||
name: newData.name,
|
||||
address: newData.address,
|
||||
|
@ -1051,9 +1052,9 @@ describe.each([
|
|||
id: newRow.id,
|
||||
...defaultRowFields,
|
||||
})
|
||||
expect(row.body._viewId).toBeUndefined()
|
||||
expect(row.body.age).toBeUndefined()
|
||||
expect(row.body.jobTitle).toBeUndefined()
|
||||
expect(row._viewId).toBeUndefined()
|
||||
expect(row.age).toBeUndefined()
|
||||
expect(row.jobTitle).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1071,12 +1072,12 @@ describe.each([
|
|||
const createdRow = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.delete(view.id, [createdRow])
|
||||
await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
||||
|
||||
await assertRowUsage(rowUsage - 1)
|
||||
|
||||
await config.api.row.get(tableId, createdRow._id!, {
|
||||
expectStatus: 404,
|
||||
status: 404,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1097,17 +1098,17 @@ describe.each([
|
|||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.delete(view.id, [rows[0], rows[2]])
|
||||
await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] })
|
||||
|
||||
await assertRowUsage(rowUsage - 2)
|
||||
|
||||
await config.api.row.get(tableId, rows[0]._id!, {
|
||||
expectStatus: 404,
|
||||
status: 404,
|
||||
})
|
||||
await config.api.row.get(tableId, rows[2]._id!, {
|
||||
expectStatus: 404,
|
||||
status: 404,
|
||||
})
|
||||
await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 })
|
||||
await config.api.row.get(tableId, rows[1]._id!, { status: 200 })
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1154,8 +1155,8 @@ describe.each([
|
|||
const createViewResponse = await config.createView()
|
||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||
|
||||
expect(response.body.rows).toHaveLength(10)
|
||||
expect(response.body).toEqual({
|
||||
expect(response.rows).toHaveLength(10)
|
||||
expect(response).toEqual({
|
||||
rows: expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
_viewId: createViewResponse.id,
|
||||
|
@ -1206,8 +1207,8 @@ describe.each([
|
|||
|
||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||
|
||||
expect(response.body.rows).toHaveLength(5)
|
||||
expect(response.body).toEqual({
|
||||
expect(response.rows).toHaveLength(5)
|
||||
expect(response).toEqual({
|
||||
rows: expect.arrayContaining(
|
||||
expectedRows.map(r => ({
|
||||
_viewId: createViewResponse.id,
|
||||
|
@ -1328,8 +1329,8 @@ describe.each([
|
|||
createViewResponse.id
|
||||
)
|
||||
|
||||
expect(response.body.rows).toHaveLength(4)
|
||||
expect(response.body.rows).toEqual(
|
||||
expect(response.rows).toHaveLength(4)
|
||||
expect(response.rows).toEqual(
|
||||
expected.map(name => expect.objectContaining({ name }))
|
||||
)
|
||||
}
|
||||
|
@ -1357,8 +1358,8 @@ describe.each([
|
|||
}
|
||||
)
|
||||
|
||||
expect(response.body.rows).toHaveLength(4)
|
||||
expect(response.body.rows).toEqual(
|
||||
expect(response.rows).toHaveLength(4)
|
||||
expect(response.rows).toEqual(
|
||||
expected.map(name => expect.objectContaining({ name }))
|
||||
)
|
||||
}
|
||||
|
@ -1382,8 +1383,8 @@ describe.each([
|
|||
})
|
||||
const response = await config.api.viewV2.search(view.id)
|
||||
|
||||
expect(response.body.rows).toHaveLength(10)
|
||||
expect(response.body.rows).toEqual(
|
||||
expect(response.rows).toHaveLength(10)
|
||||
expect(response.rows).toEqual(
|
||||
expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
...(isInternal
|
||||
|
@ -1402,7 +1403,7 @@ describe.each([
|
|||
const createViewResponse = await config.createView()
|
||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||
|
||||
expect(response.body.rows).toHaveLength(0)
|
||||
expect(response.rows).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("respects the limit parameter", async () => {
|
||||
|
@ -1417,7 +1418,7 @@ describe.each([
|
|||
query: {},
|
||||
})
|
||||
|
||||
expect(response.body.rows).toHaveLength(limit)
|
||||
expect(response.rows).toHaveLength(limit)
|
||||
})
|
||||
|
||||
it("can handle pagination", async () => {
|
||||
|
@ -1426,7 +1427,7 @@ describe.each([
|
|||
|
||||
const createViewResponse = await config.createView()
|
||||
const allRows = (await config.api.viewV2.search(createViewResponse.id))
|
||||
.body.rows
|
||||
.rows
|
||||
|
||||
const firstPageResponse = await config.api.viewV2.search(
|
||||
createViewResponse.id,
|
||||
|
@ -1436,7 +1437,7 @@ describe.each([
|
|||
query: {},
|
||||
}
|
||||
)
|
||||
expect(firstPageResponse.body).toEqual({
|
||||
expect(firstPageResponse).toEqual({
|
||||
rows: expect.arrayContaining(allRows.slice(0, 4)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: true,
|
||||
|
@ -1448,12 +1449,12 @@ describe.each([
|
|||
{
|
||||
paginate: true,
|
||||
limit: 4,
|
||||
bookmark: firstPageResponse.body.bookmark,
|
||||
bookmark: firstPageResponse.bookmark,
|
||||
|
||||
query: {},
|
||||
}
|
||||
)
|
||||
expect(secondPageResponse.body).toEqual({
|
||||
expect(secondPageResponse).toEqual({
|
||||
rows: expect.arrayContaining(allRows.slice(4, 8)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: true,
|
||||
|
@ -1465,11 +1466,11 @@ describe.each([
|
|||
{
|
||||
paginate: true,
|
||||
limit: 4,
|
||||
bookmark: secondPageResponse.body.bookmark,
|
||||
bookmark: secondPageResponse.bookmark,
|
||||
query: {},
|
||||
}
|
||||
)
|
||||
expect(lastPageResponse.body).toEqual({
|
||||
expect(lastPageResponse).toEqual({
|
||||
rows: expect.arrayContaining(allRows.slice(8)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: false,
|
||||
|
@ -1489,7 +1490,7 @@ describe.each([
|
|||
email: "joe@joe.com",
|
||||
roles: {},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
expect(response.message).toBe("Cannot create new user entry.")
|
||||
})
|
||||
|
@ -1516,58 +1517,52 @@ describe.each([
|
|||
|
||||
it("does not allow public users to fetch by default", async () => {
|
||||
await config.publish()
|
||||
await config.api.viewV2.search(viewId, undefined, {
|
||||
expectStatus: 403,
|
||||
usePublicUser: true,
|
||||
await config.api.viewV2.publicSearch(viewId, undefined, {
|
||||
status: 403,
|
||||
})
|
||||
})
|
||||
|
||||
it("allow public users to fetch when permissions are explicit", async () => {
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: viewId,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
const response = await config.api.viewV2.search(viewId, undefined, {
|
||||
usePublicUser: true,
|
||||
})
|
||||
const response = await config.api.viewV2.publicSearch(viewId)
|
||||
|
||||
expect(response.body.rows).toHaveLength(10)
|
||||
expect(response.rows).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("allow public users to fetch when permissions are inherited", async () => {
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: tableId,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
const response = await config.api.viewV2.search(viewId, undefined, {
|
||||
usePublicUser: true,
|
||||
})
|
||||
const response = await config.api.viewV2.publicSearch(viewId)
|
||||
|
||||
expect(response.body.rows).toHaveLength(10)
|
||||
expect(response.rows).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("respects inherited permissions, not allowing not public views from public tables", async () => {
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: tableId,
|
||||
})
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.POWER,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: viewId,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
await config.api.viewV2.search(viewId, undefined, {
|
||||
usePublicUser: true,
|
||||
expectStatus: 403,
|
||||
await config.api.viewV2.publicSearch(viewId, undefined, {
|
||||
status: 403,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -1754,7 +1749,7 @@ describe.each([
|
|||
}
|
||||
const row = await config.api.row.save(tableId, rowData)
|
||||
|
||||
const { body: retrieved } = await config.api.row.get(tableId, row._id!)
|
||||
const retrieved = await config.api.row.get(tableId, row._id!)
|
||||
expect(retrieved).toEqual({
|
||||
name: rowData.name,
|
||||
description: rowData.description,
|
||||
|
@ -1781,7 +1776,7 @@ describe.each([
|
|||
}
|
||||
const row = await config.api.row.save(tableId, rowData)
|
||||
|
||||
const { body: retrieved } = await config.api.row.get(tableId, row._id!)
|
||||
const retrieved = await config.api.row.get(tableId, row._id!)
|
||||
expect(retrieved).toEqual({
|
||||
name: rowData.name,
|
||||
description: rowData.description,
|
||||
|
|
|
@ -26,6 +26,7 @@ import { TableToBuild } from "../../../tests/utilities/TestConfiguration"
|
|||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
const { basicTable } = setup.structures
|
||||
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
||||
|
||||
describe("/tables", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -285,6 +286,35 @@ describe("/tables", () => {
|
|||
expect(res.body.schema.roleId).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
it("should add a new column for an internal DB table", async () => {
|
||||
const saveTableRequest: SaveTableRequest = {
|
||||
_add: {
|
||||
name: "NEW_COLUMN",
|
||||
},
|
||||
...basicTable(),
|
||||
}
|
||||
|
||||
const response = await request
|
||||
.post(`/api/tables`)
|
||||
.send(saveTableRequest)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
|
||||
const expectedResponse = {
|
||||
...saveTableRequest,
|
||||
_rev: expect.stringMatching(/^\d-.+/),
|
||||
_id: expect.stringMatching(/^ta_.+/),
|
||||
createdAt: expect.stringMatching(ISO_REGEX_PATTERN),
|
||||
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
|
||||
views: {},
|
||||
}
|
||||
delete expectedResponse._add
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body).toEqual(expectedResponse)
|
||||
})
|
||||
})
|
||||
|
||||
describe("import", () => {
|
||||
|
@ -663,8 +693,7 @@ describe("/tables", () => {
|
|||
expect(migratedTable.schema["user column"]).toBeDefined()
|
||||
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
|
||||
|
||||
const resp = await config.api.row.get(table._id!, testRow._id!)
|
||||
const migratedRow = resp.body as Row
|
||||
const migratedRow = await config.api.row.get(table._id!, testRow._id!)
|
||||
|
||||
expect(migratedRow["user column"]).toBeDefined()
|
||||
expect(migratedRow["user relationship"]).not.toBeDefined()
|
||||
|
@ -716,15 +745,13 @@ describe("/tables", () => {
|
|||
expect(migratedTable.schema["user column"]).toBeDefined()
|
||||
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
|
||||
|
||||
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
|
||||
.body as Row
|
||||
const row1Migrated = await config.api.row.get(table._id!, row1._id!)
|
||||
expect(row1Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
|
||||
expect.arrayContaining([users[0]._id, users[1]._id])
|
||||
)
|
||||
|
||||
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
|
||||
.body as Row
|
||||
const row2Migrated = await config.api.row.get(table._id!, row2._id!)
|
||||
expect(row2Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual(
|
||||
expect.arrayContaining([users[1]._id, users[2]._id])
|
||||
|
@ -773,15 +800,13 @@ describe("/tables", () => {
|
|||
expect(migratedTable.schema["user column"]).toBeDefined()
|
||||
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
|
||||
|
||||
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
|
||||
.body as Row
|
||||
const row1Migrated = await config.api.row.get(table._id!, row1._id!)
|
||||
expect(row1Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
|
||||
expect.arrayContaining([users[0]._id, users[1]._id])
|
||||
)
|
||||
|
||||
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
|
||||
.body as Row
|
||||
const row2Migrated = await config.api.row.get(table._id!, row2._id!)
|
||||
expect(row2Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([
|
||||
users[2]._id,
|
||||
|
@ -831,7 +856,7 @@ describe("/tables", () => {
|
|||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -846,7 +871,7 @@ describe("/tables", () => {
|
|||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -861,7 +886,7 @@ describe("/tables", () => {
|
|||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -880,7 +905,7 @@ describe("/tables", () => {
|
|||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -90,7 +90,7 @@ describe("/users", () => {
|
|||
})
|
||||
await config.api.user.update(
|
||||
{ ...user, roleId: roles.BUILTIN_ROLE_IDS.POWER },
|
||||
{ expectStatus: 409 }
|
||||
{ status: 409 }
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import TestConfig from "../../../../tests/utilities/TestConfiguration"
|
||||
import env from "../../../../environment"
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import supertest from "supertest"
|
||||
|
||||
export * as structures from "../../../../tests/utilities/structures"
|
||||
|
@ -47,10 +46,10 @@ export function delay(ms: number) {
|
|||
}
|
||||
|
||||
let request: supertest.SuperTest<supertest.Test> | undefined | null,
|
||||
config: TestConfig | null
|
||||
config: TestConfiguration | null
|
||||
|
||||
export function beforeAll() {
|
||||
config = new TestConfig()
|
||||
config = new TestConfiguration()
|
||||
request = config.getRequest()
|
||||
}
|
||||
|
||||
|
@ -77,21 +76,3 @@ export function getConfig() {
|
|||
}
|
||||
return config!
|
||||
}
|
||||
|
||||
export async function switchToSelfHosted(func: any) {
|
||||
// self hosted stops any attempts to Dynamo
|
||||
env._set("NODE_ENV", "production")
|
||||
env._set("SELF_HOSTED", true)
|
||||
let error
|
||||
try {
|
||||
await func()
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
env._set("NODE_ENV", "jest")
|
||||
env._set("SELF_HOSTED", false)
|
||||
// don't throw error until after reset
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
|
|
@ -177,7 +177,7 @@ describe.each([
|
|||
}
|
||||
|
||||
await config.api.viewV2.create(newView, {
|
||||
expectStatus: 201,
|
||||
status: 201,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -275,7 +275,7 @@ describe.each([
|
|||
const tableId = table._id!
|
||||
await config.api.viewV2.update(
|
||||
{ ...view, id: generator.guid() },
|
||||
{ expectStatus: 404 }
|
||||
{ status: 404 }
|
||||
)
|
||||
|
||||
expect(await config.api.table.get(tableId)).toEqual(
|
||||
|
@ -304,7 +304,7 @@ describe.each([
|
|||
},
|
||||
],
|
||||
},
|
||||
{ expectStatus: 404 }
|
||||
{ status: 404 }
|
||||
)
|
||||
|
||||
expect(await config.api.table.get(tableId)).toEqual(
|
||||
|
@ -326,12 +326,10 @@ describe.each([
|
|||
...viewV1,
|
||||
},
|
||||
{
|
||||
expectStatus: 400,
|
||||
handleResponse: r => {
|
||||
expect(r.body).toEqual({
|
||||
message: "Only views V2 can be updated",
|
||||
status: 400,
|
||||
})
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Only views V2 can be updated",
|
||||
status: 400,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
@ -403,7 +401,7 @@ describe.each([
|
|||
} as Record<string, FieldSchema>,
|
||||
},
|
||||
{
|
||||
expectStatus: 200,
|
||||
status: 200,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
|
|
@ -30,9 +30,9 @@ describe("migrations", () => {
|
|||
|
||||
const appId = config.getAppId()
|
||||
|
||||
const response = await config.api.application.getRaw(appId)
|
||||
|
||||
expect(response.headers[Header.MIGRATING_APP]).toBeUndefined()
|
||||
await config.api.application.get(appId, {
|
||||
headersNotPresent: [Header.MIGRATING_APP],
|
||||
})
|
||||
})
|
||||
|
||||
it("accessing an app that has pending migrations will attach the migrating header", async () => {
|
||||
|
@ -46,8 +46,10 @@ describe("migrations", () => {
|
|||
func: async () => {},
|
||||
})
|
||||
|
||||
const response = await config.api.application.getRaw(appId)
|
||||
|
||||
expect(response.headers[Header.MIGRATING_APP]).toEqual(appId)
|
||||
await config.api.application.get(appId, {
|
||||
headers: {
|
||||
[Header.MIGRATING_APP]: appId,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -24,7 +24,7 @@ describe("test the create row action", () => {
|
|||
expect(res.id).toBeDefined()
|
||||
expect(res.revision).toBeDefined()
|
||||
expect(res.success).toEqual(true)
|
||||
const gottenRow = await config.getRow(table._id, res.id)
|
||||
const gottenRow = await config.api.row.get(table._id, res.id)
|
||||
expect(gottenRow.name).toEqual("test")
|
||||
expect(gottenRow.description).toEqual("test")
|
||||
})
|
||||
|
|
|
@ -36,7 +36,7 @@ describe("test the update row action", () => {
|
|||
it("should be able to run the action", async () => {
|
||||
const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, inputs)
|
||||
expect(res.success).toEqual(true)
|
||||
const updatedRow = await config.getRow(table._id!, res.id)
|
||||
const updatedRow = await config.api.row.get(table._id!, res.id)
|
||||
expect(updatedRow.name).toEqual("Updated name")
|
||||
expect(updatedRow.description).not.toEqual("")
|
||||
})
|
||||
|
@ -87,8 +87,8 @@ describe("test the update row action", () => {
|
|||
})
|
||||
|
||||
let getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.body.user2[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
|
||||
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
|
||||
rowId: row._id,
|
||||
|
@ -103,8 +103,8 @@ describe("test the update row action", () => {
|
|||
expect(stepResp.success).toEqual(true)
|
||||
|
||||
getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.body.user2[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
})
|
||||
|
||||
it("should overwrite links if those links are not set and we ask it do", async () => {
|
||||
|
@ -140,8 +140,8 @@ describe("test the update row action", () => {
|
|||
})
|
||||
|
||||
let getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.body.user2[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
|
||||
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
|
||||
rowId: row._id,
|
||||
|
@ -163,7 +163,7 @@ describe("test the update row action", () => {
|
|||
expect(stepResp.success).toEqual(true)
|
||||
|
||||
getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.body.user2).toBeUndefined()
|
||||
expect(getResp.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user2).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
|
|
@ -100,7 +100,7 @@ describe("test the link controller", () => {
|
|||
const { _id } = await config.createRow(
|
||||
basicLinkedRow(t1._id!, row._id!, linkField)
|
||||
)
|
||||
return config.getRow(t1._id!, _id!)
|
||||
return config.api.row.get(t1._id!, _id!)
|
||||
}
|
||||
|
||||
it("should be able to confirm if two table schemas are equal", async () => {
|
||||
|
|
|
@ -0,0 +1,363 @@
|
|||
import fetch from "node-fetch"
|
||||
import {
|
||||
generateMakeRequest,
|
||||
MakeRequestResponse,
|
||||
} from "../api/routes/public/tests/utils"
|
||||
import { v4 as uuidv4 } from "uuid"
|
||||
import * as setup from "../api/routes/tests/utilities"
|
||||
import {
|
||||
Datasource,
|
||||
FieldType,
|
||||
Table,
|
||||
TableRequest,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
import { databaseTestProviders } from "../integrations/tests/utils"
|
||||
import mysql from "mysql2/promise"
|
||||
import { builderSocket } from "../websockets"
|
||||
// @ts-ignore
|
||||
fetch.mockSearch()
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.unmock("mysql2/promise")
|
||||
jest.mock("../websockets", () => ({
|
||||
clientAppSocket: jest.fn(),
|
||||
gridAppSocket: jest.fn(),
|
||||
initialise: jest.fn(),
|
||||
builderSocket: {
|
||||
emitTableUpdate: jest.fn(),
|
||||
emitTableDeletion: jest.fn(),
|
||||
emitDatasourceUpdate: jest.fn(),
|
||||
emitDatasourceDeletion: jest.fn(),
|
||||
emitScreenUpdate: jest.fn(),
|
||||
emitAppMetadataUpdate: jest.fn(),
|
||||
emitAppPublish: jest.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
describe("mysql integrations", () => {
|
||||
let makeRequest: MakeRequestResponse,
|
||||
mysqlDatasource: Datasource,
|
||||
primaryMySqlTable: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
const apiKey = await config.generateApiKey()
|
||||
|
||||
makeRequest = generateMakeRequest(apiKey, true)
|
||||
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.mysql.stop()
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
primaryMySqlTable = await config.createTable({
|
||||
name: uuidv4(),
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
autocolumn: true,
|
||||
},
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
description: {
|
||||
name: "description",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
value: {
|
||||
name: "value",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
sourceId: mysqlDatasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(config.end)
|
||||
|
||||
it("validate table schema", async () => {
|
||||
const res = await makeRequest(
|
||||
"get",
|
||||
`/api/datasources/${mysqlDatasource._id}`
|
||||
)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual({
|
||||
config: {
|
||||
database: "mysql",
|
||||
host: mysqlDatasource.config!.host,
|
||||
password: "--secret-value--",
|
||||
port: mysqlDatasource.config!.port,
|
||||
user: "root",
|
||||
},
|
||||
plus: true,
|
||||
source: "MYSQL",
|
||||
type: "datasource_plus",
|
||||
_id: expect.any(String),
|
||||
_rev: expect.any(String),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
entities: expect.any(Object),
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/datasources/verify", () => {
|
||||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.mysql.datasource(),
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.mysql.datasource()
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...dbConfig,
|
||||
config: {
|
||||
...dbConfig.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: false,
|
||||
error:
|
||||
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/datasources/info", () => {
|
||||
it("should fetch information about mysql datasource", async () => {
|
||||
const primaryName = primaryMySqlTable.name
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: mysqlDatasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Integration compatibility with mysql search_path", () => {
|
||||
let client: mysql.Connection, pathDatasource: Datasource
|
||||
const database = "test1"
|
||||
const database2 = "test-2"
|
||||
|
||||
beforeAll(async () => {
|
||||
const dsConfig = await databaseTestProviders.mysql.datasource()
|
||||
const dbConfig = dsConfig.config!
|
||||
|
||||
client = await mysql.createConnection(dbConfig)
|
||||
await client.query(`CREATE DATABASE \`${database}\`;`)
|
||||
await client.query(`CREATE DATABASE \`${database2}\`;`)
|
||||
|
||||
const pathConfig: any = {
|
||||
...dsConfig,
|
||||
config: {
|
||||
...dbConfig,
|
||||
database,
|
||||
},
|
||||
}
|
||||
pathDatasource = await config.api.datasource.create(pathConfig)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await client.query(`DROP DATABASE \`${database}\`;`)
|
||||
await client.query(`DROP DATABASE \`${database2}\`;`)
|
||||
await client.end()
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.query(
|
||||
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: pathDatasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
expect(response.body.tableNames).toEqual(
|
||||
expect.arrayContaining(["table1"])
|
||||
)
|
||||
})
|
||||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.query(
|
||||
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
||||
)
|
||||
await client.query(
|
||||
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
||||
)
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${pathDatasource._id}/schema`,
|
||||
{
|
||||
tablesFilter: [repeated_table_name],
|
||||
}
|
||||
)
|
||||
expect(response.status).toBe(200)
|
||||
expect(
|
||||
response.body.datasource.entities[repeated_table_name].schema
|
||||
).toBeDefined()
|
||||
const schema =
|
||||
response.body.datasource.entities[repeated_table_name].schema
|
||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/tables/", () => {
|
||||
let client: mysql.Connection
|
||||
const emitDatasourceUpdateMock = jest.fn()
|
||||
|
||||
beforeEach(async () => {
|
||||
client = await mysql.createConnection(
|
||||
(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
).config!
|
||||
)
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.end()
|
||||
})
|
||||
|
||||
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
|
||||
const addColumnToTable: TableRequest = {
|
||||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
name: "table",
|
||||
sourceId: mysqlDatasource._id!,
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
type: FieldType.AUTO,
|
||||
name: "id",
|
||||
autocolumn: true,
|
||||
},
|
||||
new_column: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "new_column",
|
||||
},
|
||||
},
|
||||
_add: {
|
||||
name: "new_column",
|
||||
},
|
||||
}
|
||||
|
||||
jest
|
||||
.spyOn(builderSocket!, "emitDatasourceUpdate")
|
||||
.mockImplementation(emitDatasourceUpdateMock)
|
||||
|
||||
await makeRequest("post", "/api/tables/", addColumnToTable)
|
||||
|
||||
const expectedTable: TableRequest = {
|
||||
...addColumnToTable,
|
||||
schema: {
|
||||
id: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "id",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
presence: false,
|
||||
},
|
||||
externalType: "int unsigned",
|
||||
},
|
||||
new_column: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "new_column",
|
||||
autocolumn: false,
|
||||
constraints: {
|
||||
presence: false,
|
||||
},
|
||||
externalType: "float(8,2)",
|
||||
},
|
||||
},
|
||||
created: true,
|
||||
_id: `${mysqlDatasource._id}__table`,
|
||||
}
|
||||
delete expectedTable._add
|
||||
|
||||
expect(emitDatasourceUpdateMock).toBeCalledTimes(1)
|
||||
const emittedDatasource: Datasource =
|
||||
emitDatasourceUpdateMock.mock.calls[0][1]
|
||||
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
|
||||
})
|
||||
|
||||
it("will rename a column", async () => {
|
||||
await makeRequest("post", "/api/tables/", primaryMySqlTable)
|
||||
|
||||
let renameColumnOnTable: TableRequest = {
|
||||
...primaryMySqlTable,
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
autocolumn: true,
|
||||
externalType: "unsigned integer",
|
||||
},
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
externalType: "text",
|
||||
},
|
||||
description: {
|
||||
name: "description",
|
||||
type: FieldType.STRING,
|
||||
externalType: "text",
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
externalType: "float(8,2)",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
"/api/tables/",
|
||||
renameColumnOnTable
|
||||
)
|
||||
mysqlDatasource = (
|
||||
await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${mysqlDatasource._id}/schema`
|
||||
)
|
||||
).body.datasource
|
||||
|
||||
expect(response.status).toEqual(200)
|
||||
expect(
|
||||
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
|
||||
).toEqual(["id", "name", "description", "age"])
|
||||
})
|
||||
})
|
||||
})
|
|
@ -398,7 +398,7 @@ describe("postgres integrations", () => {
|
|||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual(updatedRow)
|
||||
|
||||
const persistedRow = await config.getRow(
|
||||
const persistedRow = await config.api.row.get(
|
||||
primaryPostgresTable._id!,
|
||||
row.id
|
||||
)
|
||||
|
@ -1040,28 +1040,37 @@ describe("postgres integrations", () => {
|
|||
|
||||
describe("POST /api/datasources/verify", () => {
|
||||
it("should be able to verify the connection", async () => {
|
||||
const response = await config.api.datasource.verify({
|
||||
datasource: await databaseTestProviders.postgres.datasource(),
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.connected).toBe(true)
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.postgres.datasource(),
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.postgres.datasource()
|
||||
const response = await config.api.datasource.verify({
|
||||
datasource: {
|
||||
...dbConfig,
|
||||
config: {
|
||||
...dbConfig.config,
|
||||
password: "wrongpassword",
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...dbConfig,
|
||||
config: {
|
||||
...dbConfig.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.connected).toBe(false)
|
||||
expect(response.body.error).toBeDefined()
|
||||
{
|
||||
body: {
|
||||
connected: false,
|
||||
error: 'password authentication failed for user "postgres"',
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -12,6 +12,8 @@ import {
|
|||
} from "@budibase/types"
|
||||
import environment from "../../environment"
|
||||
|
||||
type QueryFunction = (query: Knex.SqlNative, operation: Operation) => any
|
||||
|
||||
const envLimit = environment.SQL_MAX_ROWS
|
||||
? parseInt(environment.SQL_MAX_ROWS)
|
||||
: null
|
||||
|
@ -325,15 +327,18 @@ class InternalBuilder {
|
|||
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
|
||||
let { sort, paginate } = json
|
||||
const table = json.meta?.table
|
||||
const aliases = json.tableAliases
|
||||
const aliased =
|
||||
table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name
|
||||
if (sort && Object.keys(sort || {}).length > 0) {
|
||||
for (let [key, value] of Object.entries(sort)) {
|
||||
const direction =
|
||||
value.direction === SortDirection.ASCENDING ? "asc" : "desc"
|
||||
query = query.orderBy(`${table?.name}.${key}`, direction)
|
||||
query = query.orderBy(`${aliased}.${key}`, direction)
|
||||
}
|
||||
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
|
||||
// @ts-ignore
|
||||
query = query.orderBy(`${table?.name}.${table?.primary[0]}`)
|
||||
query = query.orderBy(`${aliased}.${table?.primary[0]}`)
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
@ -433,10 +438,12 @@ class InternalBuilder {
|
|||
aliases?: QueryJson["tableAliases"]
|
||||
): Knex.QueryBuilder {
|
||||
const tableName = endpoint.entityId
|
||||
const tableAliased = aliases?.[tableName]
|
||||
? `${tableName} as ${aliases?.[tableName]}`
|
||||
: tableName
|
||||
let query = knex(tableAliased)
|
||||
const tableAlias = aliases?.[tableName]
|
||||
let table: string | Record<string, string> = tableName
|
||||
if (tableAlias) {
|
||||
table = { [tableAlias]: tableName }
|
||||
}
|
||||
let query = knex(table)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
|
@ -622,7 +629,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
async getReturningRow(queryFn: Function, json: QueryJson) {
|
||||
async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
|
||||
if (!json.extra || !json.extra.idFilter) {
|
||||
return {}
|
||||
}
|
||||
|
@ -634,7 +641,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
resource: {
|
||||
fields: [],
|
||||
},
|
||||
filters: json.extra.idFilter,
|
||||
filters: json.extra?.idFilter,
|
||||
paginate: {
|
||||
limit: 1,
|
||||
},
|
||||
|
@ -663,7 +670,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
// this function recreates the returning functionality of postgres
|
||||
async queryWithReturning(
|
||||
json: QueryJson,
|
||||
queryFn: Function,
|
||||
queryFn: QueryFunction,
|
||||
processFn: Function = (result: any) => result
|
||||
) {
|
||||
const sqlClient = this.getSqlClient()
|
||||
|
|
|
@ -14,7 +14,12 @@ import firebase from "./firebase"
|
|||
import redis from "./redis"
|
||||
import snowflake from "./snowflake"
|
||||
import oracle from "./oracle"
|
||||
import { SourceName, Integration, PluginType } from "@budibase/types"
|
||||
import {
|
||||
SourceName,
|
||||
Integration,
|
||||
PluginType,
|
||||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
import { getDatasourcePlugin } from "../utilities/fileSystem"
|
||||
import env from "../environment"
|
||||
import cloneDeep from "lodash/cloneDeep"
|
||||
|
@ -40,25 +45,28 @@ const DEFINITIONS: Record<SourceName, Integration | undefined> = {
|
|||
[SourceName.BUDIBASE]: undefined,
|
||||
}
|
||||
|
||||
const INTEGRATIONS: Record<SourceName, any> = {
|
||||
[SourceName.POSTGRES]: postgres.integration,
|
||||
[SourceName.DYNAMODB]: dynamodb.integration,
|
||||
[SourceName.MONGODB]: mongodb.integration,
|
||||
[SourceName.ELASTICSEARCH]: elasticsearch.integration,
|
||||
[SourceName.COUCHDB]: couchdb.integration,
|
||||
[SourceName.SQL_SERVER]: sqlServer.integration,
|
||||
[SourceName.S3]: s3.integration,
|
||||
[SourceName.AIRTABLE]: airtable.integration,
|
||||
[SourceName.MYSQL]: mysql.integration,
|
||||
[SourceName.ARANGODB]: arangodb.integration,
|
||||
[SourceName.REST]: rest.integration,
|
||||
[SourceName.FIRESTORE]: firebase.integration,
|
||||
[SourceName.GOOGLE_SHEETS]: googlesheets.integration,
|
||||
[SourceName.REDIS]: redis.integration,
|
||||
[SourceName.SNOWFLAKE]: snowflake.integration,
|
||||
[SourceName.ORACLE]: undefined,
|
||||
[SourceName.BUDIBASE]: undefined,
|
||||
}
|
||||
type IntegrationBaseConstructor = new (...args: any[]) => IntegrationBase
|
||||
|
||||
const INTEGRATIONS: Record<SourceName, IntegrationBaseConstructor | undefined> =
|
||||
{
|
||||
[SourceName.POSTGRES]: postgres.integration,
|
||||
[SourceName.DYNAMODB]: dynamodb.integration,
|
||||
[SourceName.MONGODB]: mongodb.integration,
|
||||
[SourceName.ELASTICSEARCH]: elasticsearch.integration,
|
||||
[SourceName.COUCHDB]: couchdb.integration,
|
||||
[SourceName.SQL_SERVER]: sqlServer.integration,
|
||||
[SourceName.S3]: s3.integration,
|
||||
[SourceName.AIRTABLE]: airtable.integration,
|
||||
[SourceName.MYSQL]: mysql.integration,
|
||||
[SourceName.ARANGODB]: arangodb.integration,
|
||||
[SourceName.REST]: rest.integration,
|
||||
[SourceName.FIRESTORE]: firebase.integration,
|
||||
[SourceName.GOOGLE_SHEETS]: googlesheets.integration,
|
||||
[SourceName.REDIS]: redis.integration,
|
||||
[SourceName.SNOWFLAKE]: snowflake.integration,
|
||||
[SourceName.ORACLE]: undefined,
|
||||
[SourceName.BUDIBASE]: undefined,
|
||||
}
|
||||
|
||||
// optionally add oracle integration if the oracle binary can be installed
|
||||
if (
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import { QueryJson } from "@budibase/types"
|
||||
import { Datasource, Operation, QueryJson, SourceName } from "@budibase/types"
|
||||
import { join } from "path"
|
||||
import Sql from "../base/sql"
|
||||
import { SqlClient } from "../utils"
|
||||
import AliasTables from "../../api/controllers/row/alias"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { Knex } from "knex"
|
||||
|
||||
function multiline(sql: string) {
|
||||
return sql.replace(/\n/g, "").replace(/ +/g, " ")
|
||||
|
@ -160,6 +161,28 @@ describe("Captures of real examples", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("returning (everything bar Postgres)", () => {
|
||||
it("should be able to handle row returning", () => {
|
||||
const queryJson = getJson("createSimple.json")
|
||||
const SQL = new Sql(SqlClient.MS_SQL, limit)
|
||||
let query = SQL._query(queryJson, { disableReturning: true })
|
||||
expect(query).toEqual({
|
||||
sql: "insert into [people] ([age], [name]) values (@p0, @p1)",
|
||||
bindings: [22, "Test"],
|
||||
})
|
||||
|
||||
// now check returning
|
||||
let returningQuery: Knex.SqlNative = { sql: "", bindings: [] }
|
||||
SQL.getReturningRow((input: Knex.SqlNative) => {
|
||||
returningQuery = input
|
||||
}, queryJson)
|
||||
expect(returningQuery).toEqual({
|
||||
sql: "select * from (select top (@p0) * from [people] where [people].[name] = @p1 and [people].[age] = @p2 order by [people].[name] asc) as [people]",
|
||||
bindings: [1, "Test", 22],
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("check max character aliasing", () => {
|
||||
it("should handle over 'z' max character alias", () => {
|
||||
const tableNames = []
|
||||
|
@ -175,6 +198,114 @@ describe("Captures of real examples", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("check aliasing is disabled/enabled", () => {
|
||||
const tables = ["tableA", "tableB"]
|
||||
|
||||
function getDatasource(source: SourceName): Datasource {
|
||||
return {
|
||||
source,
|
||||
type: "datasource",
|
||||
isSQL: true,
|
||||
}
|
||||
}
|
||||
|
||||
function getQuery(op: Operation, fields: string[] = ["a"]): QueryJson {
|
||||
return {
|
||||
endpoint: { datasourceId: "", entityId: "", operation: op },
|
||||
resource: {
|
||||
fields,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
it("should check for Postgres aliased status", () => {
|
||||
const aliasing = new AliasTables(tables)
|
||||
const datasource = getDatasource(SourceName.POSTGRES)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
|
||||
).toEqual(true)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
|
||||
).toEqual(true)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
|
||||
).toEqual(true)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
|
||||
).toEqual(true)
|
||||
})
|
||||
|
||||
it("should check for MS-SQL aliased status", () => {
|
||||
const aliasing = new AliasTables(tables)
|
||||
const datasource = getDatasource(SourceName.SQL_SERVER)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
|
||||
).toEqual(false)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
|
||||
).toEqual(true)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
|
||||
).toEqual(false)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
|
||||
).toEqual(false)
|
||||
})
|
||||
|
||||
it("should check for MySQL aliased status", () => {
|
||||
const aliasing = new AliasTables(tables)
|
||||
const datasource = getDatasource(SourceName.MYSQL)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
|
||||
).toEqual(false)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
|
||||
).toEqual(true)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
|
||||
).toEqual(false)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
|
||||
).toEqual(false)
|
||||
})
|
||||
|
||||
it("should check for Oracle aliased status", () => {
|
||||
const aliasing = new AliasTables(tables)
|
||||
const datasource = getDatasource(SourceName.ORACLE)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource)
|
||||
).toEqual(false)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource)
|
||||
).toEqual(true)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource)
|
||||
).toEqual(false)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource)
|
||||
).toEqual(false)
|
||||
})
|
||||
|
||||
it("should disable aliasing for non-SQL datasources", () => {
|
||||
const aliasing = new AliasTables(tables)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.READ), {
|
||||
source: SourceName.GOOGLE_SHEETS,
|
||||
type: "datasource",
|
||||
isSQL: false,
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it("should disable when no fields", () => {
|
||||
const aliasing = new AliasTables(tables)
|
||||
const datasource = getDatasource(SourceName.POSTGRES)
|
||||
expect(
|
||||
aliasing.isAliasingEnabled(getQuery(Operation.READ, []), datasource)
|
||||
).toEqual(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("check some edge cases", () => {
|
||||
const tableNames = ["hello", "world"]
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@
|
|||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
"type": "number",
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
|
||||
"entityId": "people",
|
||||
"operation": "CREATE"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.name",
|
||||
"a.age"
|
||||
]
|
||||
},
|
||||
"filters": {},
|
||||
"relationships": [],
|
||||
"body": {
|
||||
"name": "Test",
|
||||
"age": 22
|
||||
},
|
||||
"extra": {
|
||||
"idFilter": {
|
||||
"equal": {
|
||||
"name": "Test",
|
||||
"age": 22
|
||||
}
|
||||
}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"_id": "datasource_plus_0ed5835e5552496285df546030f7c4ae__people",
|
||||
"type": "table",
|
||||
"sourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
|
||||
"sourceType": "external",
|
||||
"primary": [
|
||||
"name",
|
||||
"age"
|
||||
],
|
||||
"name": "people",
|
||||
"schema": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"externalType": "varchar",
|
||||
"autocolumn": false,
|
||||
"name": "name",
|
||||
"constraints": {
|
||||
"presence": true
|
||||
}
|
||||
},
|
||||
"age": {
|
||||
"type": "number",
|
||||
"externalType": "int",
|
||||
"autocolumn": false,
|
||||
"name": "age",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"primaryDisplay": "name"
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"people": "a"
|
||||
}
|
||||
}
|
|
@ -58,7 +58,7 @@
|
|||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
"type": "number",
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
"keypartone",
|
||||
"keyparttwo"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "compositetable",
|
||||
"schema": {
|
||||
"keyparttwo": {
|
||||
"type": "string",
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
"primary": [
|
||||
"taskid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "tasks",
|
||||
"schema": {
|
||||
"executorid": {
|
||||
"type": "number",
|
||||
|
|
|
@ -63,7 +63,7 @@
|
|||
"primary": [
|
||||
"productid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "products",
|
||||
"schema": {
|
||||
"productname": {
|
||||
"type": "string",
|
||||
|
|
|
@ -53,7 +53,7 @@
|
|||
"primary": [
|
||||
"productid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "products",
|
||||
"schema": {
|
||||
"productname": {
|
||||
"type": "string",
|
||||
|
|