Merge branch 'master' of github.com:budibase/budibase into reorganise-row-tests-3

This commit is contained in:
Sam Rose 2024-03-20 16:01:29 +00:00
commit 07c6dcc0c0
No known key found for this signature in database
262 changed files with 2157 additions and 2185 deletions

View File

@ -34,18 +34,43 @@
}, },
{ {
"files": ["**/*.ts"], "files": ["**/*.ts"],
"excludedFiles": ["qa-core/**"],
"parser": "@typescript-eslint/parser", "parser": "@typescript-eslint/parser",
"plugins": ["@typescript-eslint"],
"extends": ["eslint:recommended"], "extends": ["eslint:recommended"],
"globals": {
"NodeJS": true
},
"rules": { "rules": {
"no-unused-vars": "off", "no-unused-vars": "off",
"no-inner-declarations": "off", "@typescript-eslint/no-unused-vars": "error",
"no-case-declarations": "off", "local-rules/no-budibase-imports": "error"
"no-useless-escape": "off", }
"no-undef": "off", },
"no-prototype-builtins": "off", {
"local-rules/no-budibase-imports": "error", "files": ["**/*.spec.ts"],
"excludedFiles": ["qa-core/**"],
"parser": "@typescript-eslint/parser",
"plugins": ["jest", "@typescript-eslint"],
"extends": ["eslint:recommended", "plugin:jest/recommended"],
"env": {
"jest/globals": true
},
"globals": {
"NodeJS": true
},
"rules": {
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"local-rules/no-test-com": "error", "local-rules/no-test-com": "error",
"local-rules/email-domain-example-com": "error" "local-rules/email-domain-example-com": "error",
"no-console": "warn",
// We have a lot of tests that don't have assertions, they use our test
// API client that does the assertions for them
"jest/expect-expect": "off",
// We do this in some tests where the behaviour of internal tables
// differs to external, but the API is broadly the same
"jest/no-conditional-expect": "off"
} }
}, },
{ {

View File

@ -140,7 +140,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| ingress.className | string | `""` | What ingress class to use. | | ingress.className | string | `""` | What ingress class to use. |
| ingress.enabled | bool | `true` | Whether to create an Ingress resource pointing to the Budibase proxy. | | ingress.enabled | bool | `true` | Whether to create an Ingress resource pointing to the Budibase proxy. |
| ingress.hosts | list | `[]` | Standard hosts block for the Ingress resource. Defaults to pointing to the Budibase proxy. | | ingress.hosts | list | `[]` | Standard hosts block for the Ingress resource. Defaults to pointing to the Budibase proxy. |
| nameOverride | string | `""` | Override the name of the deploymen. Defaults to {{ .Chart.Name }}. | | nameOverride | string | `""` | Override the name of the deployment. Defaults to {{ .Chart.Name }}. |
| service.port | int | `10000` | Port to expose on the service. | | service.port | int | `10000` | Port to expose on the service. |
| service.type | string | `"ClusterIP"` | Service type for the service that points to the main Budibase proxy pod. | | service.type | string | `"ClusterIP"` | Service type for the service that points to the main Budibase proxy pod. |
| serviceAccount.annotations | object | `{}` | Annotations to add to the service account | | serviceAccount.annotations | object | `{}` | Annotations to add to the service account |

View File

@ -1,6 +1,6 @@
# -- Passed to all pods created by this chart. Should not ordinarily need to be changed. # -- Passed to all pods created by this chart. Should not ordinarily need to be changed.
imagePullSecrets: [] imagePullSecrets: []
# -- Override the name of the deploymen. Defaults to {{ .Chart.Name }}. # -- Override the name of the deployment. Defaults to {{ .Chart.Name }}.
nameOverride: "" nameOverride: ""
serviceAccount: serviceAccount:

View File

@ -7,11 +7,12 @@ module.exports = {
if ( if (
/^@budibase\/[^/]+\/.*$/.test(importPath) && /^@budibase\/[^/]+\/.*$/.test(importPath) &&
importPath !== "@budibase/backend-core/tests" importPath !== "@budibase/backend-core/tests" &&
importPath !== "@budibase/string-templates/test/utils"
) { ) {
context.report({ context.report({
node, node,
message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests.`, message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests and @budibase/string-templates/test/utils.`,
}) })
} }
}, },
@ -24,11 +25,9 @@ module.exports = {
docs: { docs: {
description: description:
"disallow the use of 'test.com' in strings and replace it with 'example.com'", "disallow the use of 'test.com' in strings and replace it with 'example.com'",
category: "Possible Errors",
recommended: false,
}, },
schema: [], // no options schema: [],
fixable: "code", // Indicates that this rule supports automatic fixing fixable: "code",
}, },
create: function (context) { create: function (context) {
return { return {
@ -57,8 +56,6 @@ module.exports = {
docs: { docs: {
description: description:
"enforce using the example.com domain for generator.email calls", "enforce using the example.com domain for generator.email calls",
category: "Possible Errors",
recommended: false,
}, },
fixable: "code", fixable: "code",
schema: [], schema: [],

View File

@ -12,8 +12,6 @@ COPY .yarnrc .
COPY packages/server/package.json packages/server/package.json COPY packages/server/package.json packages/server/package.json
COPY packages/worker/package.json packages/worker/package.json COPY packages/worker/package.json packages/worker/package.json
# string-templates does not get bundled during the esbuild process, so we want to use the local version
COPY packages/string-templates/package.json packages/string-templates/package.json
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
@ -26,7 +24,7 @@ RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json
RUN echo '' > scripts/syncProPackage.js RUN echo '' > scripts/syncProPackage.js
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
RUN ./scripts/removeWorkspaceDependencies.sh package.json RUN ./scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production --frozen-lockfile
# copy the actual code # copy the actual code
COPY packages/server/dist packages/server/dist COPY packages/server/dist packages/server/dist
@ -35,7 +33,6 @@ COPY packages/server/client packages/server/client
COPY packages/server/builder packages/server/builder COPY packages/server/builder packages/server/builder
COPY packages/worker/dist packages/worker/dist COPY packages/worker/dist packages/worker/dist
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
COPY packages/string-templates packages/string-templates
FROM budibase/couchdb:v3.3.3 as runner FROM budibase/couchdb:v3.3.3 as runner
@ -100,9 +97,6 @@ COPY --from=build /app/node_modules /node_modules
COPY --from=build /app/package.json /package.json COPY --from=build /app/package.json /package.json
COPY --from=build /app/packages/server /app COPY --from=build /app/packages/server /app
COPY --from=build /app/packages/worker /worker COPY --from=build /app/packages/worker /worker
COPY --from=build /app/packages/string-templates /string-templates
RUN cd /string-templates && yarn link && cd ../app && yarn link @budibase/string-templates && cd ../worker && yarn link @budibase/string-templates
EXPOSE 80 EXPOSE 80

View File

@ -1,5 +1,5 @@
{ {
"version": "2.22.0", "version": "2.22.7",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -12,6 +12,7 @@
"esbuild-node-externals": "^1.8.0", "esbuild-node-externals": "^1.8.0",
"eslint": "^8.52.0", "eslint": "^8.52.0",
"eslint-plugin-import": "^2.29.0", "eslint-plugin-import": "^2.29.0",
"eslint-plugin-jest": "^27.9.0",
"eslint-plugin-local-rules": "^2.0.0", "eslint-plugin-local-rules": "^2.0.0",
"eslint-plugin-svelte": "^2.34.0", "eslint-plugin-svelte": "^2.34.0",
"husky": "^8.0.3", "husky": "^8.0.3",
@ -25,6 +26,7 @@
"svelte": "^4.2.10", "svelte": "^4.2.10",
"svelte-eslint-parser": "^0.33.1", "svelte-eslint-parser": "^0.33.1",
"typescript": "5.2.2", "typescript": "5.2.2",
"typescript-eslint": "^7.3.1",
"yargs": "^17.7.2" "yargs": "^17.7.2"
}, },
"scripts": { "scripts": {

@ -1 +1 @@
Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac Subproject commit f5b467b6b1c55c48847545db41be7b1c035e167a

View File

@ -4,10 +4,10 @@ set -e
if [[ -n $CI ]] if [[ -n $CI ]]
then then
# --runInBand performs better in ci where resources are limited # --runInBand performs better in ci where resources are limited
echo "jest --coverage --runInBand --forceExit" echo "jest --coverage --runInBand --forceExit $@"
jest --coverage --runInBand --forceExit jest --coverage --runInBand --forceExit $@
else else
# --maxWorkers performs better in development # --maxWorkers performs better in development
echo "jest --coverage --detectOpenHandles" echo "jest --coverage --forceExit --detectOpenHandles $@"
jest --coverage --detectOpenHandles jest --coverage --forceExit --detectOpenHandles $@
fi fi

View File

@ -133,7 +133,7 @@ export async function refreshOAuthToken(
configId?: string configId?: string
): Promise<RefreshResponse> { ): Promise<RefreshResponse> {
switch (providerType) { switch (providerType) {
case SSOProviderType.OIDC: case SSOProviderType.OIDC: {
if (!configId) { if (!configId) {
return { err: { data: "OIDC config id not provided" } } return { err: { data: "OIDC config id not provided" } }
} }
@ -142,13 +142,15 @@ export async function refreshOAuthToken(
return { err: { data: "OIDC configuration not found" } } return { err: { data: "OIDC configuration not found" } }
} }
return refreshOIDCAccessToken(oidcConfig, refreshToken) return refreshOIDCAccessToken(oidcConfig, refreshToken)
case SSOProviderType.GOOGLE: }
case SSOProviderType.GOOGLE: {
let googleConfig = await configs.getGoogleConfig() let googleConfig = await configs.getGoogleConfig()
if (!googleConfig) { if (!googleConfig) {
return { err: { data: "Google configuration not found" } } return { err: { data: "Google configuration not found" } }
} }
return refreshGoogleAccessToken(googleConfig, refreshToken) return refreshGoogleAccessToken(googleConfig, refreshToken)
} }
}
} }
// TODO: Refactor to use user save function instead to prevent the need for // TODO: Refactor to use user save function instead to prevent the need for

View File

@ -8,7 +8,7 @@ describe("platformLogout", () => {
await testEnv.withTenant(async () => { await testEnv.withTenant(async () => {
const ctx = structures.koa.newContext() const ctx = structures.koa.newContext()
await auth.platformLogout({ ctx, userId: "test" }) await auth.platformLogout({ ctx, userId: "test" })
expect(events.auth.logout).toBeCalledTimes(1) expect(events.auth.logout).toHaveBeenCalledTimes(1)
}) })
}) })
}) })

View File

@ -129,7 +129,7 @@ export default class BaseCache {
} }
} }
async bustCache(key: string, opts = { client: null }) { async bustCache(key: string) {
const client = await this.getClient() const client = await this.getClient()
try { try {
await client.delete(generateTenantKey(key)) await client.delete(generateTenantKey(key))

View File

@ -1,6 +1,6 @@
import { AnyDocument, Database } from "@budibase/types" import { AnyDocument, Database } from "@budibase/types"
import { JobQueue, createQueue } from "../queue" import { JobQueue, Queue, createQueue } from "../queue"
import * as dbUtils from "../db" import * as dbUtils from "../db"
interface ProcessDocMessage { interface ProcessDocMessage {
@ -12,18 +12,26 @@ interface ProcessDocMessage {
const PERSIST_MAX_ATTEMPTS = 100 const PERSIST_MAX_ATTEMPTS = 100
let processor: DocWritethroughProcessor | undefined let processor: DocWritethroughProcessor | undefined
export const docWritethroughProcessorQueue = createQueue<ProcessDocMessage>( export class DocWritethroughProcessor {
private static _queue: Queue
public static get queue() {
if (!DocWritethroughProcessor._queue) {
DocWritethroughProcessor._queue = createQueue<ProcessDocMessage>(
JobQueue.DOC_WRITETHROUGH_QUEUE, JobQueue.DOC_WRITETHROUGH_QUEUE,
{ {
jobOptions: { jobOptions: {
attempts: PERSIST_MAX_ATTEMPTS, attempts: PERSIST_MAX_ATTEMPTS,
}, },
} }
) )
}
return DocWritethroughProcessor._queue
}
class DocWritethroughProcessor {
init() { init() {
docWritethroughProcessorQueue.process(async message => { DocWritethroughProcessor.queue.process(async message => {
try { try {
await this.persistToDb(message.data) await this.persistToDb(message.data)
} catch (err: any) { } catch (err: any) {
@ -76,7 +84,7 @@ export class DocWritethrough {
} }
async patch(data: Record<string, any>) { async patch(data: Record<string, any>) {
await docWritethroughProcessorQueue.add({ await DocWritethroughProcessor.queue.add({
dbName: this.db.name, dbName: this.db.name,
docId: this.docId, docId: this.docId,
data, data,

View File

@ -1,5 +1,5 @@
import * as utils from "../utils" import * as utils from "../utils"
import { Duration, DurationType } from "../utils" import { Duration } from "../utils"
import env from "../environment" import env from "../environment"
import { getTenantId } from "../context" import { getTenantId } from "../context"
import * as redis from "../redis/init" import * as redis from "../redis/init"

View File

@ -6,7 +6,7 @@ import { getDB } from "../../db"
import { import {
DocWritethrough, DocWritethrough,
docWritethroughProcessorQueue, DocWritethroughProcessor,
init, init,
} from "../docWritethrough" } from "../docWritethrough"
@ -15,7 +15,7 @@ import InMemoryQueue from "../../queue/inMemoryQueue"
const initialTime = Date.now() const initialTime = Date.now()
async function waitForQueueCompletion() { async function waitForQueueCompletion() {
const queue: InMemoryQueue = docWritethroughProcessorQueue as never const queue: InMemoryQueue = DocWritethroughProcessor.queue as never
await queue.waitForCompletion() await queue.waitForCompletion()
} }
@ -235,11 +235,11 @@ describe("docWritethrough", () => {
return acc return acc
}, {}) }, {})
} }
const queueMessageSpy = jest.spyOn(docWritethroughProcessorQueue, "add") const queueMessageSpy = jest.spyOn(DocWritethroughProcessor.queue, "add")
await config.doInTenant(async () => { await config.doInTenant(async () => {
let patches = await parallelPatch(5) let patches = await parallelPatch(5)
expect(queueMessageSpy).toBeCalledTimes(5) expect(queueMessageSpy).toHaveBeenCalledTimes(5)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.get(documentId)).toEqual(
@ -247,7 +247,7 @@ describe("docWritethrough", () => {
) )
patches = { ...patches, ...(await parallelPatch(40)) } patches = { ...patches, ...(await parallelPatch(40)) }
expect(queueMessageSpy).toBeCalledTimes(45) expect(queueMessageSpy).toHaveBeenCalledTimes(45)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.get(documentId)).toEqual(
@ -255,7 +255,7 @@ describe("docWritethrough", () => {
) )
patches = { ...patches, ...(await parallelPatch(10)) } patches = { ...patches, ...(await parallelPatch(10)) }
expect(queueMessageSpy).toBeCalledTimes(55) expect(queueMessageSpy).toHaveBeenCalledTimes(55)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.get(documentId)).toEqual(
@ -265,6 +265,7 @@ describe("docWritethrough", () => {
}) })
// This is not yet supported // This is not yet supported
// eslint-disable-next-line jest/no-disabled-tests
it.skip("patches will execute in order", async () => { it.skip("patches will execute in order", async () => {
let incrementalValue = 0 let incrementalValue = 0
const keyToOverride = generator.word() const keyToOverride = generator.word()

View File

@ -55,8 +55,8 @@ describe("user cache", () => {
})), })),
}) })
expect(UserDB.bulkGet).toBeCalledTimes(1) expect(UserDB.bulkGet).toHaveBeenCalledTimes(1)
expect(UserDB.bulkGet).toBeCalledWith(userIdsToRequest) expect(UserDB.bulkGet).toHaveBeenCalledWith(userIdsToRequest)
}) })
it("on a second all, all of them are retrieved from cache", async () => { it("on a second all, all of them are retrieved from cache", async () => {
@ -82,7 +82,7 @@ describe("user cache", () => {
), ),
}) })
expect(UserDB.bulkGet).toBeCalledTimes(1) expect(UserDB.bulkGet).toHaveBeenCalledTimes(1)
}) })
it("when some users are cached, only the missing ones are retrieved from db", async () => { it("when some users are cached, only the missing ones are retrieved from db", async () => {
@ -110,8 +110,8 @@ describe("user cache", () => {
), ),
}) })
expect(UserDB.bulkGet).toBeCalledTimes(1) expect(UserDB.bulkGet).toHaveBeenCalledTimes(1)
expect(UserDB.bulkGet).toBeCalledWith([ expect(UserDB.bulkGet).toHaveBeenCalledWith([
userIdsToRequest[1], userIdsToRequest[1],
userIdsToRequest[2], userIdsToRequest[2],
userIdsToRequest[4], userIdsToRequest[4],

View File

@ -8,7 +8,7 @@ const DEFAULT_WRITE_RATE_MS = 10000
let CACHE: BaseCache | null = null let CACHE: BaseCache | null = null
interface CacheItem<T extends Document> { interface CacheItem<T extends Document> {
doc: any doc: T
lastWrite: number lastWrite: number
} }

View File

@ -246,7 +246,7 @@ describe("context", () => {
context.doInAppMigrationContext(db.generateAppID(), async () => { context.doInAppMigrationContext(db.generateAppID(), async () => {
await otherContextCall() await otherContextCall()
}) })
).rejects.toThrowError( ).rejects.toThrow(
"The context cannot be changed, a migration is currently running" "The context cannot be changed, a migration is currently running"
) )
} }

View File

@ -10,10 +10,6 @@ interface SearchResponse<T> {
totalRows: number totalRows: number
} }
interface PaginatedSearchResponse<T> extends SearchResponse<T> {
hasNextPage: boolean
}
export type SearchParams<T> = { export type SearchParams<T> = {
tableId?: string tableId?: string
sort?: string sort?: string
@ -247,7 +243,7 @@ export class QueryBuilder<T> {
} }
// Escape characters // Escape characters
if (!this.#noEscaping && escape && originalType === "string") { if (!this.#noEscaping && escape && originalType === "string") {
value = `${value}`.replace(/[ \/#+\-&|!(){}\]^"~*?:\\]/g, "\\$&") value = `${value}`.replace(/[ /#+\-&|!(){}\]^"~*?:\\]/g, "\\$&")
} }
// Wrap in quotes // Wrap in quotes

View File

@ -34,12 +34,12 @@ export async function createUserIndex() {
} }
let idxKey = prev != null ? `${prev}.${key}` : key let idxKey = prev != null ? `${prev}.${key}` : key
if (typeof input[key] === "string") { if (typeof input[key] === "string") {
// @ts-expect-error index is available in a CouchDB map function
// eslint-disable-next-line no-undef // eslint-disable-next-line no-undef
// @ts-ignore
index(idxKey, input[key].toLowerCase(), { facet: true }) index(idxKey, input[key].toLowerCase(), { facet: true })
} else if (typeof input[key] !== "object") { } else if (typeof input[key] !== "object") {
// @ts-expect-error index is available in a CouchDB map function
// eslint-disable-next-line no-undef // eslint-disable-next-line no-undef
// @ts-ignore
index(idxKey, input[key], { facet: true }) index(idxKey, input[key], { facet: true })
} else { } else {
idx(input[key], idxKey) idx(input[key], idxKey)

View File

@ -17,13 +17,8 @@ export function init(processors: ProcessorMap) {
// if not processing in this instance, kick it off // if not processing in this instance, kick it off
if (!processingPromise) { if (!processingPromise) {
processingPromise = asyncEventQueue.process(async job => { processingPromise = asyncEventQueue.process(async job => {
const { event, identity, properties, timestamp } = job.data const { event, identity, properties } = job.data
await documentProcessor.processEvent( await documentProcessor.processEvent(event, identity, properties)
event,
identity,
properties,
timestamp
)
}) })
} }
} }

View File

@ -1,7 +1,6 @@
import { import {
Event, Event,
Identity, Identity,
Group,
IdentityType, IdentityType,
AuditLogQueueEvent, AuditLogQueueEvent,
AuditLogFn, AuditLogFn,
@ -79,11 +78,11 @@ export default class AuditLogsProcessor implements EventProcessor {
} }
} }
async identify(identity: Identity, timestamp?: string | number) { async identify() {
// no-op // no-op
} }
async identifyGroup(group: Group, timestamp?: string | number) { async identifyGroup() {
// no-op // no-op
} }

View File

@ -8,8 +8,7 @@ export default class LoggingProcessor implements EventProcessor {
async processEvent( async processEvent(
event: Event, event: Event,
identity: Identity, identity: Identity,
properties: any, properties: any
timestamp?: string
): Promise<void> { ): Promise<void> {
if (skipLogging) { if (skipLogging) {
return return
@ -17,14 +16,14 @@ export default class LoggingProcessor implements EventProcessor {
console.log(`[audit] [identityType=${identity.type}] ${event}`, properties) console.log(`[audit] [identityType=${identity.type}] ${event}`, properties)
} }
async identify(identity: Identity, timestamp?: string | number) { async identify(identity: Identity) {
if (skipLogging) { if (skipLogging) {
return return
} }
console.log(`[audit] identified`, identity) console.log(`[audit] identified`, identity)
} }
async identifyGroup(group: Group, timestamp?: string | number) { async identifyGroup(group: Group) {
if (skipLogging) { if (skipLogging) {
return return
} }

View File

@ -14,12 +14,7 @@ export default class DocumentUpdateProcessor implements EventProcessor {
this.processors = processors this.processors = processors
} }
async processEvent( async processEvent(event: Event, identity: Identity, properties: any) {
event: Event,
identity: Identity,
properties: any,
timestamp?: string | number
) {
const tenantId = identity.realTenantId const tenantId = identity.realTenantId
const docId = getDocumentId(event, properties) const docId = getDocumentId(event, properties)
if (!tenantId || !docId) { if (!tenantId || !docId) {

View File

@ -10,6 +10,18 @@ import { formats } from "dd-trace/ext"
import { localFileDestination } from "../system" import { localFileDestination } from "../system"
function isPlainObject(obj: any) {
return typeof obj === "object" && obj !== null && !(obj instanceof Error)
}
function isError(obj: any) {
return obj instanceof Error
}
function isMessage(obj: any) {
return typeof obj === "string"
}
// LOGGER // LOGGER
let pinoInstance: pino.Logger | undefined let pinoInstance: pino.Logger | undefined
@ -71,23 +83,11 @@ if (!env.DISABLE_PINO_LOGGER) {
err?: Error err?: Error
} }
function isPlainObject(obj: any) {
return typeof obj === "object" && obj !== null && !(obj instanceof Error)
}
function isError(obj: any) {
return obj instanceof Error
}
function isMessage(obj: any) {
return typeof obj === "string"
}
/** /**
* Backwards compatibility between console logging statements * Backwards compatibility between console logging statements
* and pino logging requirements. * and pino logging requirements.
*/ */
function getLogParams(args: any[]): [MergingObject, string] { const getLogParams = (args: any[]): [MergingObject, string] => {
let error = undefined let error = undefined
let objects: any[] = [] let objects: any[] = []
let message = "" let message = ""

View File

@ -11,7 +11,6 @@ export const buildMatcherRegex = (
return patterns.map(pattern => { return patterns.map(pattern => {
let route = pattern.route let route = pattern.route
const method = pattern.method const method = pattern.method
const strict = pattern.strict ? pattern.strict : false
// if there is a param in the route // if there is a param in the route
// use a wildcard pattern // use a wildcard pattern
@ -24,24 +23,17 @@ export const buildMatcherRegex = (
} }
} }
return { regex: new RegExp(route), method, strict, route } return { regex: new RegExp(route), method, route }
}) })
} }
export const matches = (ctx: BBContext, options: RegexMatcher[]) => { export const matches = (ctx: BBContext, options: RegexMatcher[]) => {
return options.find(({ regex, method, strict, route }) => { return options.find(({ regex, method }) => {
let urlMatch const urlMatch = regex.test(ctx.request.url)
if (strict) {
urlMatch = ctx.request.url === route
} else {
urlMatch = regex.test(ctx.request.url)
}
const methodMatch = const methodMatch =
method === "ALL" method === "ALL"
? true ? true
: ctx.request.method.toLowerCase() === method.toLowerCase() : ctx.request.method.toLowerCase() === method.toLowerCase()
return urlMatch && methodMatch return urlMatch && methodMatch
}) })
} }

View File

@ -3,7 +3,7 @@ import { Cookie } from "../../../constants"
import * as configs from "../../../configs" import * as configs from "../../../configs"
import * as cache from "../../../cache" import * as cache from "../../../cache"
import * as utils from "../../../utils" import * as utils from "../../../utils"
import { UserCtx, SSOProfile, DatasourceAuthCookie } from "@budibase/types" import { UserCtx, SSOProfile } from "@budibase/types"
import { ssoSaveUserNoOp } from "../sso/sso" import { ssoSaveUserNoOp } from "../sso/sso"
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy

View File

@ -5,7 +5,6 @@ import * as context from "../../../context"
import fetch from "node-fetch" import fetch from "node-fetch"
import { import {
SaveSSOUserFunction, SaveSSOUserFunction,
SaveUserOpts,
SSOAuthDetails, SSOAuthDetails,
SSOUser, SSOUser,
User, User,
@ -14,10 +13,8 @@ import {
// no-op function for user save // no-op function for user save
// - this allows datasource auth and access token refresh to work correctly // - this allows datasource auth and access token refresh to work correctly
// - prefer no-op over an optional argument to ensure function is provided to login flows // - prefer no-op over an optional argument to ensure function is provided to login flows
export const ssoSaveUserNoOp: SaveSSOUserFunction = ( export const ssoSaveUserNoOp: SaveSSOUserFunction = (user: SSOUser) =>
user: SSOUser, Promise.resolve(user)
opts: SaveUserOpts
) => Promise.resolve(user)
/** /**
* Common authentication logic for third parties. e.g. OAuth, OIDC. * Common authentication logic for third parties. e.g. OAuth, OIDC.

View File

@ -114,11 +114,11 @@ describe("sso", () => {
// tenant id added // tenant id added
ssoUser.tenantId = context.getTenantId() ssoUser.tenantId = context.getTenantId()
expect(mockSaveUser).toBeCalledWith(ssoUser, { expect(mockSaveUser).toHaveBeenCalledWith(ssoUser, {
hashPassword: false, hashPassword: false,
requirePassword: false, requirePassword: false,
}) })
expect(mockDone).toBeCalledWith(null, ssoUser) expect(mockDone).toHaveBeenCalledWith(null, ssoUser)
}) })
}) })
}) })
@ -159,11 +159,11 @@ describe("sso", () => {
// existing id preserved // existing id preserved
ssoUser._id = existingUser._id ssoUser._id = existingUser._id
expect(mockSaveUser).toBeCalledWith(ssoUser, { expect(mockSaveUser).toHaveBeenCalledWith(ssoUser, {
hashPassword: false, hashPassword: false,
requirePassword: false, requirePassword: false,
}) })
expect(mockDone).toBeCalledWith(null, ssoUser) expect(mockDone).toHaveBeenCalledWith(null, ssoUser)
}) })
}) })
@ -187,11 +187,11 @@ describe("sso", () => {
// existing id preserved // existing id preserved
ssoUser._id = existingUser._id ssoUser._id = existingUser._id
expect(mockSaveUser).toBeCalledWith(ssoUser, { expect(mockSaveUser).toHaveBeenCalledWith(ssoUser, {
hashPassword: false, hashPassword: false,
requirePassword: false, requirePassword: false,
}) })
expect(mockDone).toBeCalledWith(null, ssoUser) expect(mockDone).toHaveBeenCalledWith(null, ssoUser)
}) })
}) })
}) })

View File

@ -24,13 +24,13 @@ function buildUserCtx(user: ContextUser) {
} }
function passed(throwFn: jest.Func, nextFn: jest.Func) { function passed(throwFn: jest.Func, nextFn: jest.Func) {
expect(throwFn).not.toBeCalled() expect(throwFn).not.toHaveBeenCalled()
expect(nextFn).toBeCalled() expect(nextFn).toHaveBeenCalled()
} }
function threw(throwFn: jest.Func) { function threw(throwFn: jest.Func) {
// cant check next, the throw function doesn't actually throw - so it still continues // cant check next, the throw function doesn't actually throw - so it still continues
expect(throwFn).toBeCalled() expect(throwFn).toHaveBeenCalled()
} }
describe("adminOnly middleware", () => { describe("adminOnly middleware", () => {

View File

@ -34,23 +34,6 @@ describe("matchers", () => {
expect(!!matchers.matches(ctx, built)).toBe(true) expect(!!matchers.matches(ctx, built)).toBe(true)
}) })
it("doesn't wildcard path with strict", () => {
const pattern = [
{
route: "/api/tests",
method: "POST",
strict: true,
},
]
const ctx = structures.koa.newContext()
ctx.request.url = "/api/tests/id/something/else"
ctx.request.method = "POST"
const built = matchers.buildMatcherRegex(pattern)
expect(!!matchers.matches(ctx, built)).toBe(false)
})
it("matches with param", () => { it("matches with param", () => {
const pattern = [ const pattern = [
{ {
@ -67,23 +50,6 @@ describe("matchers", () => {
expect(!!matchers.matches(ctx, built)).toBe(true) expect(!!matchers.matches(ctx, built)).toBe(true)
}) })
// TODO: Support the below behaviour
// Strict does not work when a param is present
// it("matches with param with strict", () => {
// const pattern = [{
// route: "/api/tests/:testId",
// method: "GET",
// strict: true
// }]
// const ctx = structures.koa.newContext()
// ctx.request.url = "/api/tests/id"
// ctx.request.method = "GET"
//
// const built = matchers.buildMatcherRegex(pattern)
//
// expect(!!matchers.matches(ctx, built)).toBe(true)
// })
it("doesn't match by path", () => { it("doesn't match by path", () => {
const pattern = [ const pattern = [
{ {

View File

@ -45,10 +45,6 @@ export const runMigration = async (
options: MigrationOptions = {} options: MigrationOptions = {}
) => { ) => {
const migrationType = migration.type const migrationType = migration.type
let tenantId: string | undefined
if (migrationType !== MigrationType.INSTALLATION) {
tenantId = context.getTenantId()
}
const migrationName = migration.name const migrationName = migration.name
const silent = migration.silent const silent = migration.silent

View File

@ -126,7 +126,7 @@ describe("app", () => {
it("gets url with embedded minio", async () => { it("gets url with embedded minio", async () => {
testEnv.withMinio() testEnv.withMinio()
await testEnv.withTenant(tenantId => { await testEnv.withTenant(() => {
const url = getAppFileUrl() const url = getAppFileUrl()
expect(url).toBe( expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg" "/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
@ -136,7 +136,7 @@ describe("app", () => {
it("gets url with custom S3", async () => { it("gets url with custom S3", async () => {
testEnv.withS3() testEnv.withS3()
await testEnv.withTenant(tenantId => { await testEnv.withTenant(() => {
const url = getAppFileUrl() const url = getAppFileUrl()
expect(url).toBe( expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg" "http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
@ -146,7 +146,7 @@ describe("app", () => {
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront() testEnv.withCloudfront()
await testEnv.withTenant(tenantId => { await testEnv.withTenant(() => {
const url = getAppFileUrl() const url = getAppFileUrl()
// omit rest of signed params // omit rest of signed params
expect( expect(

View File

@ -3,7 +3,7 @@ import { DBTestConfiguration } from "../../../tests/extra"
import * as tenants from "../tenants" import * as tenants from "../tenants"
describe("tenants", () => { describe("tenants", () => {
const config = new DBTestConfiguration() new DBTestConfiguration()
describe("addTenant", () => { describe("addTenant", () => {
it("concurrently adds multiple tenants safely", async () => { it("concurrently adds multiple tenants safely", async () => {

View File

@ -39,7 +39,7 @@ class InMemoryQueue implements Partial<Queue> {
_opts?: QueueOptions _opts?: QueueOptions
_messages: JobMessage[] _messages: JobMessage[]
_queuedJobIds: Set<string> _queuedJobIds: Set<string>
_emitter: EventEmitter _emitter: NodeJS.EventEmitter
_runCount: number _runCount: number
_addCount: number _addCount: number
@ -166,7 +166,7 @@ class InMemoryQueue implements Partial<Queue> {
return [] return []
} }
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line @typescript-eslint/no-unused-vars
async removeJobs(pattern: string) { async removeJobs(pattern: string) {
// no-op // no-op
} }

View File

@ -132,7 +132,7 @@ function logging(queue: Queue, jobQueue: JobQueue) {
// A Job is waiting to be processed as soon as a worker is idling. // A Job is waiting to be processed as soon as a worker is idling.
console.info(...getLogParams(eventType, BullEvent.WAITING, { jobId })) console.info(...getLogParams(eventType, BullEvent.WAITING, { jobId }))
}) })
.on(BullEvent.ACTIVE, async (job: Job, jobPromise: any) => { .on(BullEvent.ACTIVE, async (job: Job) => {
// A job has started. You can use `jobPromise.cancel()`` to abort it. // A job has started. You can use `jobPromise.cancel()`` to abort it.
await doInJobContext(job, () => { await doInJobContext(job, () => {
console.info(...getLogParams(eventType, BullEvent.ACTIVE, { job })) console.info(...getLogParams(eventType, BullEvent.ACTIVE, { job }))

View File

@ -40,6 +40,7 @@ export async function shutdown() {
if (inviteClient) await inviteClient.finish() if (inviteClient) await inviteClient.finish()
if (passwordResetClient) await passwordResetClient.finish() if (passwordResetClient) await passwordResetClient.finish()
if (socketClient) await socketClient.finish() if (socketClient) await socketClient.finish()
if (docWritethroughClient) await docWritethroughClient.finish()
} }
process.on("exit", async () => { process.on("exit", async () => {

View File

@ -120,7 +120,7 @@ describe("redis", () => {
await redis.bulkStore(data, ttl) await redis.bulkStore(data, ttl)
for (const [key, value] of Object.entries(data)) { for (const key of Object.keys(data)) {
expect(await redis.get(key)).toBe(null) expect(await redis.get(key)).toBe(null)
} }
@ -147,17 +147,6 @@ describe("redis", () => {
expect(results).toEqual([1, 2, 3, 4, 5]) expect(results).toEqual([1, 2, 3, 4, 5])
}) })
it("can increment on a new key", async () => {
const key1 = structures.uuid()
const key2 = structures.uuid()
const result1 = await redis.increment(key1)
expect(result1).toBe(1)
const result2 = await redis.increment(key2)
expect(result2).toBe(1)
})
it("can increment multiple times in parallel", async () => { it("can increment multiple times in parallel", async () => {
const key = structures.uuid() const key = structures.uuid()
const results = await Promise.all( const results = await Promise.all(
@ -184,7 +173,7 @@ describe("redis", () => {
const key = structures.uuid() const key = structures.uuid()
await redis.store(key, value) await redis.store(key, value)
await expect(redis.increment(key)).rejects.toThrowError( await expect(redis.increment(key)).rejects.toThrow(
"ERR value is not an integer or out of range" "ERR value is not an integer or out of range"
) )
}) })

View File

@ -96,8 +96,8 @@ describe("redlockImpl", () => {
task: mockTask, task: mockTask,
executionTimeMs: lockTtl * 2, executionTimeMs: lockTtl * 2,
}) })
).rejects.toThrowError( ).rejects.toThrow(
`Unable to fully release the lock on resource \"lock:${config.tenantId}_persist_writethrough\".` `Unable to fully release the lock on resource "lock:${config.tenantId}_persist_writethrough".`
) )
} }
) )

View File

@ -158,8 +158,8 @@ describe("getTenantIDFromCtx", () => {
], ],
} }
expect(getTenantIDFromCtx(ctx, mockOpts)).toBeUndefined() expect(getTenantIDFromCtx(ctx, mockOpts)).toBeUndefined()
expect(ctx.throw).toBeCalledTimes(1) expect(ctx.throw).toHaveBeenCalledTimes(1)
expect(ctx.throw).toBeCalledWith(403, "Tenant id not set") expect(ctx.throw).toHaveBeenCalledWith(403, "Tenant id not set")
}) })
it("returns undefined if allowNoTenant is true", () => { it("returns undefined if allowNoTenant is true", () => {

View File

@ -45,7 +45,7 @@ describe("Users", () => {
...{ _id: groupId, roles: { app1: "ADMIN" } }, ...{ _id: groupId, roles: { app1: "ADMIN" } },
} }
const users: User[] = [] const users: User[] = []
for (const _ of Array.from({ length: usersInGroup })) { for (let i = 0; i < usersInGroup; i++) {
const userId = `us_${generator.guid()}` const userId = `us_${generator.guid()}`
const user: User = structures.users.user({ const user: User = structures.users.user({
_id: userId, _id: userId,

View File

@ -3,7 +3,7 @@ import { generator } from "./generator"
export function userGroup(): UserGroup { export function userGroup(): UserGroup {
return { return {
name: generator.word(), name: generator.guid(),
icon: generator.word(), icon: generator.word(),
color: generator.word(), color: generator.word(),
} }

View File

@ -39,19 +39,23 @@ const handleClick = event => {
return return
} }
if (handler.allowedType && event.type !== handler.allowedType) {
return
}
handler.callback?.(event) handler.callback?.(event)
}) })
} }
document.documentElement.addEventListener("click", handleClick, true) document.documentElement.addEventListener("click", handleClick, true)
document.documentElement.addEventListener("contextmenu", handleClick, true) document.documentElement.addEventListener("mousedown", handleClick, true)
/** /**
* Adds or updates a click handler * Adds or updates a click handler
*/ */
const updateHandler = (id, element, anchor, callback) => { const updateHandler = (id, element, anchor, callback, allowedType) => {
let existingHandler = clickHandlers.find(x => x.id === id) let existingHandler = clickHandlers.find(x => x.id === id)
if (!existingHandler) { if (!existingHandler) {
clickHandlers.push({ id, element, anchor, callback }) clickHandlers.push({ id, element, anchor, callback, allowedType })
} else { } else {
existingHandler.callback = callback existingHandler.callback = callback
} }
@ -77,7 +81,8 @@ export default (element, opts) => {
const update = newOpts => { const update = newOpts => {
const callback = newOpts?.callback || newOpts const callback = newOpts?.callback || newOpts
const anchor = newOpts?.anchor || element const anchor = newOpts?.anchor || element
updateHandler(id, element, anchor, callback) const allowedType = newOpts?.allowedType || "click"
updateHandler(id, element, anchor, callback, allowedType)
} }
update(opts) update(opts)
return { return {

View File

@ -197,7 +197,9 @@
> >
<Icon name="ChevronRight" /> <Icon name="ChevronRight" />
</div> </div>
{#if maximum !== 1}
<div class="footer">File {selectedImageIdx + 1} of {fileCount}</div> <div class="footer">File {selectedImageIdx + 1} of {fileCount}</div>
{/if}
</div> </div>
{:else if value?.length} {:else if value?.length}
{#each value as file} {#each value as file}

View File

@ -470,7 +470,7 @@
newError.name = `Column name already in use.` newError.name = `Column name already in use.`
} }
if (fieldInfo.type === "auto" && !fieldInfo.subtype) { if (fieldInfo.type === FieldType.AUTO && !fieldInfo.subtype) {
newError.subtype = `Auto Column requires a type` newError.subtype = `Auto Column requires a type`
} }
@ -531,18 +531,18 @@
}} }}
/> />
{#if editableColumn.type === "string"} {#if editableColumn.type === FieldType.STRING}
<Input <Input
type="number" type="number"
label="Max Length" label="Max Length"
bind:value={editableColumn.constraints.length.maximum} bind:value={editableColumn.constraints.length.maximum}
/> />
{:else if editableColumn.type === "options"} {:else if editableColumn.type === FieldType.OPTIONS}
<OptionSelectDnD <OptionSelectDnD
bind:constraints={editableColumn.constraints} bind:constraints={editableColumn.constraints}
bind:optionColors={editableColumn.optionColors} bind:optionColors={editableColumn.optionColors}
/> />
{:else if editableColumn.type === "longform"} {:else if editableColumn.type === FieldType.LONGFORM}
<div> <div>
<div class="tooltip-alignment"> <div class="tooltip-alignment">
<Label size="M">Formatting</Label> <Label size="M">Formatting</Label>
@ -560,12 +560,12 @@
text="Enable rich text support (markdown)" text="Enable rich text support (markdown)"
/> />
</div> </div>
{:else if editableColumn.type === "array"} {:else if editableColumn.type === FieldType.ARRAY}
<OptionSelectDnD <OptionSelectDnD
bind:constraints={editableColumn.constraints} bind:constraints={editableColumn.constraints}
bind:optionColors={editableColumn.optionColors} bind:optionColors={editableColumn.optionColors}
/> />
{:else if editableColumn.type === "datetime" && !editableColumn.autocolumn} {:else if editableColumn.type === FieldType.DATETIME && !editableColumn.autocolumn}
<div class="split-label"> <div class="split-label">
<div class="label-length"> <div class="label-length">
<Label size="M">Earliest</Label> <Label size="M">Earliest</Label>
@ -604,7 +604,7 @@
</div> </div>
{/if} {/if}
<Toggle bind:value={editableColumn.dateOnly} text="Date only" /> <Toggle bind:value={editableColumn.dateOnly} text="Date only" />
{:else if editableColumn.type === "number" && !editableColumn.autocolumn} {:else if editableColumn.type === FieldType.NUMBER && !editableColumn.autocolumn}
<div class="split-label"> <div class="split-label">
<div class="label-length"> <div class="label-length">
<Label size="M">Min Value</Label> <Label size="M">Min Value</Label>
@ -629,7 +629,7 @@
/> />
</div> </div>
</div> </div>
{:else if editableColumn.type === "link"} {:else if editableColumn.type === FieldType.LINK}
<RelationshipSelector <RelationshipSelector
bind:relationshipPart1 bind:relationshipPart1
bind:relationshipPart2 bind:relationshipPart2
@ -703,6 +703,24 @@
thin thin
text="Allow multiple users" text="Allow multiple users"
/> />
{:else if editableColumn.type === FieldType.ATTACHMENT}
<Toggle
value={editableColumn.constraints?.length?.maximum !== 1}
on:change={e => {
if (!e.detail) {
editableColumn.constraints ??= { length: {} }
editableColumn.constraints.length ??= {}
editableColumn.constraints.length.maximum = 1
editableColumn.constraints.length.message =
"cannot contain multiple files"
} else {
delete editableColumn.constraints?.length?.maximum
delete editableColumn.constraints?.length?.message
}
}}
thin
text="Allow multiple"
/>
{/if} {/if}
{#if editableColumn.type === AUTO_TYPE || editableColumn.autocolumn} {#if editableColumn.type === AUTO_TYPE || editableColumn.autocolumn}
<Select <Select

View File

@ -279,3 +279,11 @@ export const buildContextTreeLookupMap = rootComponent => {
}) })
return map return map
} }
// Get a flat list of ids for all descendants of a component
export const getChildIdsForComponent = component => {
return [
component._id,
...(component?._children ?? []).map(getChildIdsForComponent).flat(1),
]
}

View File

@ -129,10 +129,7 @@
filteredUsers = $usersFetch.rows filteredUsers = $usersFetch.rows
.filter(user => user.email !== $auth.user.email) .filter(user => user.email !== $auth.user.email)
.map(user => { .map(user => {
const isAdminOrGlobalBuilder = sdk.users.isAdminOrGlobalBuilder( const isAdminOrGlobalBuilder = sdk.users.isAdminOrGlobalBuilder(user)
user,
prodAppId
)
const isAppBuilder = user.builder?.apps?.includes(prodAppId) const isAppBuilder = user.builder?.apps?.includes(prodAppId)
let role let role
if (isAdminOrGlobalBuilder) { if (isAdminOrGlobalBuilder) {

View File

@ -24,6 +24,13 @@
navigationStore, navigationStore,
} from "stores/builder" } from "stores/builder"
import { DefaultAppTheme } from "constants" import { DefaultAppTheme } from "constants"
import BarButtonList from "/src/components/design/settings/controls/BarButtonList.svelte"
$: alignmentOptions = [
{ value: "Left", barIcon: "TextAlignLeft" },
{ value: "Center", barIcon: "TextAlignCenter" },
{ value: "Right", barIcon: "TextAlignRight" },
]
$: screenRouteOptions = $screenStore.screens $: screenRouteOptions = $screenStore.screens
.map(screen => screen.routing?.route) .map(screen => screen.routing?.route)
@ -46,6 +53,10 @@
notifications.error("Error updating navigation settings") notifications.error("Error updating navigation settings")
} }
} }
const updateTextAlign = textAlignValue => {
navigationStore.syncAppNavigation({ textAlign: textAlignValue })
}
</script> </script>
<Panel <Panel
@ -133,6 +144,15 @@
on:change={e => update("title", e.detail)} on:change={e => update("title", e.detail)}
updateOnChange={false} updateOnChange={false}
/> />
<div class="label">
<Label size="M">Text align</Label>
</div>
<BarButtonList
options={alignmentOptions}
value={$navigationStore.textAlign}
onChange={updateTextAlign}
/>
{/if} {/if}
<div class="label"> <div class="label">
<Label>Background</Label> <Label>Background</Label>

View File

@ -10,6 +10,7 @@
navigationStore, navigationStore,
selectedScreen, selectedScreen,
hoverStore, hoverStore,
componentTreeNodesStore,
snippets, snippets,
} from "stores/builder" } from "stores/builder"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
@ -132,6 +133,7 @@
error = event.error || "An unknown error occurred" error = event.error || "An unknown error occurred"
} else if (type === "select-component" && data.id) { } else if (type === "select-component" && data.id) {
componentStore.select(data.id) componentStore.select(data.id)
componentTreeNodesStore.makeNodeVisible(data.id)
} else if (type === "hover-component") { } else if (type === "hover-component") {
hoverStore.hover(data.id, false) hoverStore.hover(data.id, false)
} else if (type === "update-prop") { } else if (type === "update-prop") {

View File

@ -4,12 +4,12 @@
selectedScreen, selectedScreen,
componentStore, componentStore,
selectedComponent, selectedComponent,
componentTreeNodesStore,
} from "stores/builder" } from "stores/builder"
import { findComponent } from "helpers/components" import { findComponent, getChildIdsForComponent } from "helpers/components"
import { goto, isActive } from "@roxi/routify" import { goto, isActive } from "@roxi/routify"
import { notifications } from "@budibase/bbui" import { notifications } from "@budibase/bbui"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import componentTreeNodesStore from "stores/portal/componentTreeNodesStore"
let confirmDeleteDialog let confirmDeleteDialog
let confirmEjectDialog let confirmEjectDialog
@ -63,38 +63,25 @@
componentStore.selectNext() componentStore.selectNext()
}, },
["ArrowRight"]: component => { ["ArrowRight"]: component => {
componentTreeNodesStore.expandNode(component._id) componentTreeNodesStore.expandNodes([component._id])
}, },
["ArrowLeft"]: component => { ["ArrowLeft"]: component => {
componentTreeNodesStore.collapseNode(component._id) // Select the collapsing root component to ensure the currently selected component is not
// hidden in a collapsed node
componentStore.select(component._id)
componentTreeNodesStore.collapseNodes([component._id])
}, },
["Ctrl+ArrowRight"]: component => { ["Ctrl+ArrowRight"]: component => {
componentTreeNodesStore.expandNode(component._id) const childIds = getChildIdsForComponent(component)
componentTreeNodesStore.expandNodes(childIds)
const expandChildren = component => {
const children = component._children ?? []
children.forEach(child => {
componentTreeNodesStore.expandNode(child._id)
expandChildren(child)
})
}
expandChildren(component)
}, },
["Ctrl+ArrowLeft"]: component => { ["Ctrl+ArrowLeft"]: component => {
componentTreeNodesStore.collapseNode(component._id) // Select the collapsing root component to ensure the currently selected component is not
// hidden in a collapsed node
componentStore.select(component._id)
const collapseChildren = component => { const childIds = getChildIdsForComponent(component)
const children = component._children ?? [] componentTreeNodesStore.collapseNodes(childIds)
children.forEach(child => {
componentTreeNodesStore.collapseNode(child._id)
collapseChildren(child)
})
}
collapseChildren(component)
}, },
["Escape"]: () => { ["Escape"]: () => {
if ($isActive(`./:componentId/new`)) { if ($isActive(`./:componentId/new`)) {

View File

@ -7,8 +7,8 @@
componentStore, componentStore,
userSelectedResourceMap, userSelectedResourceMap,
selectedComponent, selectedComponent,
selectedComponentPath,
hoverStore, hoverStore,
componentTreeNodesStore,
} from "stores/builder" } from "stores/builder"
import { import {
findComponentPath, findComponentPath,
@ -17,7 +17,6 @@
} from "helpers/components" } from "helpers/components"
import { get } from "svelte/store" import { get } from "svelte/store"
import { dndStore } from "./dndStore" import { dndStore } from "./dndStore"
import componentTreeNodesStore from "stores/portal/componentTreeNodesStore"
export let components = [] export let components = []
export let level = 0 export let level = 0
@ -64,14 +63,11 @@
} }
} }
const isOpen = (component, selectedComponentPath, openNodes) => { const isOpen = component => {
if (!component?._children?.length) { if (!component?._children?.length) {
return false return false
} }
if (selectedComponentPath.slice(0, -1).includes(component._id)) { return componentTreeNodesStore.isNodeExpanded(component._id)
return true
}
return openNodes[`nodeOpen-${component._id}`]
} }
const isChildOfSelectedComponent = component => { const isChildOfSelectedComponent = component => {
@ -83,6 +79,11 @@
return findComponentPath($selectedComponent, component._id)?.length > 0 return findComponentPath($selectedComponent, component._id)?.length > 0
} }
const handleIconClick = componentId => {
componentStore.select(componentId)
componentTreeNodesStore.toggleNode(componentId)
}
const hover = hoverStore.hover const hover = hoverStore.hover
</script> </script>
@ -90,7 +91,7 @@
<!-- svelte-ignore a11y-click-events-have-key-events --> <!-- svelte-ignore a11y-click-events-have-key-events -->
<ul> <ul>
{#each filteredComponents || [] as component, index (component._id)} {#each filteredComponents || [] as component, index (component._id)}
{@const opened = isOpen(component, $selectedComponentPath, openNodes)} {@const opened = isOpen(component, openNodes)}
<li <li
on:click|stopPropagation={() => { on:click|stopPropagation={() => {
componentStore.select(component._id) componentStore.select(component._id)
@ -104,7 +105,7 @@
on:dragend={dndStore.actions.reset} on:dragend={dndStore.actions.reset}
on:dragstart={() => dndStore.actions.dragstart(component)} on:dragstart={() => dndStore.actions.dragstart(component)}
on:dragover={dragover(component, index)} on:dragover={dragover(component, index)}
on:iconClick={() => componentTreeNodesStore.toggleNode(component._id)} on:iconClick={() => handleIconClick(component._id)}
on:drop={onDrop} on:drop={onDrop}
hovering={$hoverStore.componentId === component._id} hovering={$hoverStore.componentId === component._id}
on:mouseenter={() => hover(component._id)} on:mouseenter={() => hover(component._id)}

View File

@ -85,7 +85,7 @@
} }
const automationErrorMessage = appId => { const automationErrorMessage = appId => {
const app = enrichedApps.find(app => app.devId === appId) const app = $enrichedApps.find(app => app.devId === appId)
const errors = automationErrors[appId] const errors = automationErrors[appId]
return `${app.name} - Automation error (${errorCount(errors)})` return `${app.name} - Automation error (${errorCount(errors)})`
} }

View File

@ -0,0 +1,67 @@
import { get } from "svelte/store"
import { createSessionStorageStore } from "@budibase/frontend-core"
import { selectedScreen as selectedScreenStore } from "./screens"
import { findComponentPath } from "helpers/components"
const baseStore = createSessionStorageStore("openNodes", {})
const toggleNode = componentId => {
baseStore.update(openNodes => {
openNodes[`nodeOpen-${componentId}`] = !openNodes[`nodeOpen-${componentId}`]
return openNodes
})
}
const expandNodes = componentIds => {
baseStore.update(openNodes => {
const newNodes = Object.fromEntries(
componentIds.map(id => [`nodeOpen-${id}`, true])
)
return { ...openNodes, ...newNodes }
})
}
const collapseNodes = componentIds => {
baseStore.update(openNodes => {
const newNodes = Object.fromEntries(
componentIds.map(id => [`nodeOpen-${id}`, false])
)
return { ...openNodes, ...newNodes }
})
}
// Will ensure all parents of a node are expanded so that it is visible in the tree
const makeNodeVisible = componentId => {
const selectedScreen = get(selectedScreenStore)
const path = findComponentPath(selectedScreen.props, componentId)
const componentIds = path.map(component => component._id)
baseStore.update(openNodes => {
const newNodes = Object.fromEntries(
componentIds.map(id => [`nodeOpen-${id}`, true])
)
return { ...openNodes, ...newNodes }
})
}
const isNodeExpanded = componentId => {
const openNodes = get(baseStore)
return !!openNodes[`nodeOpen-${componentId}`]
}
const store = {
subscribe: baseStore.subscribe,
toggleNode,
expandNodes,
makeNodeVisible,
collapseNodes,
isNodeExpanded,
}
export default store

View File

@ -19,6 +19,7 @@ import {
appStore, appStore,
previewStore, previewStore,
tables, tables,
componentTreeNodesStore,
} from "stores/builder/index" } from "stores/builder/index"
import { buildFormSchema, getSchemaForDatasource } from "dataBinding" import { buildFormSchema, getSchemaForDatasource } from "dataBinding"
import { import {
@ -29,7 +30,6 @@ import {
} from "constants/backend" } from "constants/backend"
import BudiStore from "../BudiStore" import BudiStore from "../BudiStore"
import { Utils } from "@budibase/frontend-core" import { Utils } from "@budibase/frontend-core"
import componentTreeNodesStore from "stores/portal/componentTreeNodesStore"
export const INITIAL_COMPONENTS_STATE = { export const INITIAL_COMPONENTS_STATE = {
components: {}, components: {},
@ -653,8 +653,11 @@ export class ComponentStore extends BudiStore {
this.update(state => { this.update(state => {
state.selectedScreenId = targetScreenId state.selectedScreenId = targetScreenId
state.selectedComponentId = newComponentId state.selectedComponentId = newComponentId
return state return state
}) })
componentTreeNodesStore.makeNodeVisible(newComponentId)
} }
getPrevious() { getPrevious() {
@ -663,7 +666,6 @@ export class ComponentStore extends BudiStore {
const screen = get(selectedScreen) const screen = get(selectedScreen)
const parent = findComponentParent(screen.props, componentId) const parent = findComponentParent(screen.props, componentId)
const index = parent?._children.findIndex(x => x._id === componentId) const index = parent?._children.findIndex(x => x._id === componentId)
const componentTreeNodes = get(componentTreeNodesStore)
// Check for screen and navigation component edge cases // Check for screen and navigation component edge cases
const screenComponentId = `${screen._id}-screen` const screenComponentId = `${screen._id}-screen`
@ -680,16 +682,16 @@ export class ComponentStore extends BudiStore {
// If we have siblings above us, choose the sibling or a descendant // If we have siblings above us, choose the sibling or a descendant
if (index > 0) { if (index > 0) {
// If sibling before us accepts children, select a descendant // If sibling before us accepts children, and is not collapsed, select a descendant
const previousSibling = parent._children[index - 1] const previousSibling = parent._children[index - 1]
if ( if (
previousSibling._children?.length && previousSibling._children?.length &&
componentTreeNodes[`nodeOpen-${previousSibling._id}`] componentTreeNodesStore.isNodeExpanded(previousSibling._id)
) { ) {
let target = previousSibling let target = previousSibling
while ( while (
target._children?.length && target._children?.length &&
componentTreeNodes[`nodeOpen-${target._id}`] componentTreeNodesStore.isNodeExpanded(target._id)
) { ) {
target = target._children[target._children.length - 1] target = target._children[target._children.length - 1]
} }
@ -711,7 +713,6 @@ export class ComponentStore extends BudiStore {
const screen = get(selectedScreen) const screen = get(selectedScreen)
const parent = findComponentParent(screen.props, componentId) const parent = findComponentParent(screen.props, componentId)
const index = parent?._children.findIndex(x => x._id === componentId) const index = parent?._children.findIndex(x => x._id === componentId)
const componentTreeNodes = get(componentTreeNodesStore)
// Check for screen and navigation component edge cases // Check for screen and navigation component edge cases
const screenComponentId = `${screen._id}-screen` const screenComponentId = `${screen._id}-screen`
@ -720,11 +721,11 @@ export class ComponentStore extends BudiStore {
return navComponentId return navComponentId
} }
// If we have children, select first child // If we have children, select first child, and the node is not collapsed
if ( if (
component._children?.length && component._children?.length &&
(state.selectedComponentId === navComponentId || (state.selectedComponentId === navComponentId ||
componentTreeNodes[`nodeOpen-${component._id}`]) componentTreeNodesStore.isNodeExpanded(component._id))
) { ) {
return component._children[0]._id return component._children[0]._id
} else if (!parent) { } else if (!parent) {
@ -803,7 +804,10 @@ export class ComponentStore extends BudiStore {
// sibling // sibling
const previousSibling = parent._children[index - 1] const previousSibling = parent._children[index - 1]
const definition = this.getDefinition(previousSibling._component) const definition = this.getDefinition(previousSibling._component)
if (definition.hasChildren) { if (
definition.hasChildren &&
componentTreeNodesStore.isNodeExpanded(previousSibling._id)
) {
previousSibling._children.push(originalComponent) previousSibling._children.push(originalComponent)
} }
@ -852,10 +856,13 @@ export class ComponentStore extends BudiStore {
// Move below the next sibling if we are not the last sibling // Move below the next sibling if we are not the last sibling
if (index < parent._children.length) { if (index < parent._children.length) {
// If the next sibling has children, become the first child // If the next sibling has children, and is not collapsed, become the first child
const nextSibling = parent._children[index] const nextSibling = parent._children[index]
const definition = this.getDefinition(nextSibling._component) const definition = this.getDefinition(nextSibling._component)
if (definition.hasChildren) { if (
definition.hasChildren &&
componentTreeNodesStore.isNodeExpanded(nextSibling._id)
) {
nextSibling._children.splice(0, 0, originalComponent) nextSibling._children.splice(0, 0, originalComponent)
} }
@ -1151,13 +1158,3 @@ export const selectedComponent = derived(
return clone return clone
} }
) )
export const selectedComponentPath = derived(
[componentStore, selectedScreen],
([$store, $selectedScreen]) => {
return findComponentPath(
$selectedScreen?.props,
$store.selectedComponentId
).map(component => component._id)
}
)

View File

@ -1,10 +1,6 @@
import { layoutStore } from "./layouts.js" import { layoutStore } from "./layouts.js"
import { appStore } from "./app.js" import { appStore } from "./app.js"
import { import { componentStore, selectedComponent } from "./components"
componentStore,
selectedComponent,
selectedComponentPath,
} from "./components"
import { navigationStore } from "./navigation.js" import { navigationStore } from "./navigation.js"
import { themeStore } from "./theme.js" import { themeStore } from "./theme.js"
import { screenStore, selectedScreen, sortedScreens } from "./screens.js" import { screenStore, selectedScreen, sortedScreens } from "./screens.js"
@ -31,8 +27,10 @@ import { integrations } from "./integrations"
import { sortedIntegrations } from "./sortedIntegrations" import { sortedIntegrations } from "./sortedIntegrations"
import { queries } from "./queries" import { queries } from "./queries"
import { flags } from "./flags" import { flags } from "./flags"
import componentTreeNodesStore from "./componentTreeNodes"
export { export {
componentTreeNodesStore,
layoutStore, layoutStore,
appStore, appStore,
componentStore, componentStore,
@ -51,7 +49,6 @@ export {
isOnlyUser, isOnlyUser,
deploymentStore, deploymentStore,
selectedComponent, selectedComponent,
selectedComponentPath,
tables, tables,
views, views,
viewsV2, viewsV2,

View File

@ -11,6 +11,7 @@ export const INITIAL_NAVIGATION_STATE = {
hideLogo: null, hideLogo: null,
logoUrl: null, logoUrl: null,
hideTitle: null, hideTitle: null,
textAlign: "Left",
navBackground: null, navBackground: null,
navWidth: null, navWidth: null,
navTextColor: null, navTextColor: null,

View File

@ -1,36 +0,0 @@
import { createSessionStorageStore } from "@budibase/frontend-core"
const baseStore = createSessionStorageStore("openNodes", {})
const toggleNode = componentId => {
baseStore.update(openNodes => {
openNodes[`nodeOpen-${componentId}`] = !openNodes[`nodeOpen-${componentId}`]
return openNodes
})
}
const expandNode = componentId => {
baseStore.update(openNodes => {
openNodes[`nodeOpen-${componentId}`] = true
return openNodes
})
}
const collapseNode = componentId => {
baseStore.update(openNodes => {
openNodes[`nodeOpen-${componentId}`] = false
return openNodes
})
}
const store = {
subscribe: baseStore.subscribe,
toggleNode,
expandNode,
collapseNode,
}
export default store

View File

@ -4,6 +4,16 @@
"composite": true, "composite": true,
"declaration": true, "declaration": true,
"sourceMap": true, "sourceMap": true,
"baseUrl": "." "baseUrl": ".",
"paths": {
"assets/*": ["./assets/*"],
"@budibase/*": [
"../*/src/index.ts",
"../*/src/index.js",
"../*",
"../../node_modules/@budibase/*"
],
"*": ["./src/*"]
}
} }
} }

View File

@ -11,11 +11,13 @@
"types": ["node", "jest"], "types": ["node", "jest"],
"outDir": "dist", "outDir": "dist",
"skipLibCheck": true, "skipLibCheck": true,
"baseUrl": ".",
"paths": { "paths": {
"@budibase/types": ["../types/src"], "@budibase/types": ["../types/src"],
"@budibase/backend-core": ["../backend-core/src"], "@budibase/backend-core": ["../backend-core/src"],
"@budibase/backend-core/*": ["../backend-core/*"], "@budibase/backend-core/*": ["../backend-core/*"],
"@budibase/shared-core": ["../shared-core/src"] "@budibase/shared-core": ["../shared-core/src"],
"@budibase/string-templates": ["../string-templates/src"]
} }
}, },
"include": ["src/**/*"], "include": ["src/**/*"],

View File

@ -1,16 +1,8 @@
{ {
"extends": "./tsconfig.build.json", "extends": "./tsconfig.build.json",
"compilerOptions": {
"composite": true,
"declaration": true,
"sourceMap": true,
"baseUrl": ".",
"resolveJsonModule": true
},
"ts-node": { "ts-node": {
"require": ["tsconfig-paths/register"], "require": ["tsconfig-paths/register"],
"swc": true "swc": true
}, },
"include": ["src/**/*", "package.json"],
"exclude": ["node_modules", "dist"] "exclude": ["node_modules", "dist"]
} }

View File

@ -36,6 +36,7 @@
export let pageWidth export let pageWidth
export let logoLinkUrl export let logoLinkUrl
export let openLogoLinkInNewTab export let openLogoLinkInNewTab
export let textAlign
export let embedded = false export let embedded = false
@ -226,7 +227,7 @@
{/if} {/if}
{/if} {/if}
{#if !hideTitle && title} {#if !hideTitle && title}
<Heading size="S">{title}</Heading> <Heading size="S" {textAlign}>{title}</Heading>
{/if} {/if}
</div> </div>
{#if !embedded} {#if !embedded}
@ -290,7 +291,10 @@
<div <div
id="side-panel-container" id="side-panel-container"
class:open={$sidePanelStore.open} class:open={$sidePanelStore.open}
use:clickOutside={autoCloseSidePanel ? sidePanelStore.actions.close : null} use:clickOutside={{
callback: autoCloseSidePanel ? sidePanelStore.actions.close : null,
allowedType: "mousedown",
}}
class:builder={$builderStore.inBuilder} class:builder={$builderStore.inBuilder}
> >
<div class="side-panel-header"> <div class="side-panel-header">

View File

@ -0,0 +1,86 @@
<script>
import BlockComponent from "components/BlockComponent.svelte"
import { FieldType } from "@budibase/types"
export let field
export let schema
export let order
const FieldTypeToComponentMap = {
string: "stringfield",
number: "numberfield",
bigint: "bigintfield",
options: "optionsfield",
array: "multifieldselect",
boolean: "booleanfield",
longform: "longformfield",
datetime: "datetimefield",
attachment: "attachmentfield",
link: "relationshipfield",
json: "jsonfield",
barcodeqr: "codescanner",
bb_reference: "bbreferencefield",
}
const getFieldSchema = field => {
const fieldSchemaName = field.field || field.name
if (!fieldSchemaName || !schema?.[fieldSchemaName]) {
return null
}
return schema[fieldSchemaName]
}
const getComponentForField = field => {
const fieldSchema = getFieldSchema(field)
if (!fieldSchema) {
return null
}
const { type } = fieldSchema
return FieldTypeToComponentMap[type]
}
const getPropsForField = field => {
let fieldProps = field._component
? {
...field,
}
: {
field: field.name,
label: field.name,
placeholder: field.name,
_instanceName: field.name,
}
fieldProps = {
...getPropsByType(field),
...fieldProps,
}
return fieldProps
}
function getPropsByType(field) {
const propsMapByType = {
[FieldType.ATTACHMENT]: (_field, schema) => {
return {
maximum: schema?.constraints?.length?.maximum,
}
},
}
const fieldSchema = getFieldSchema(field)
const mapper = propsMapByType[fieldSchema.type]
if (mapper) {
return mapper(field, fieldSchema)
}
}
</script>
{#if getComponentForField(field) && field.active}
<BlockComponent
type={getComponentForField(field)}
props={getPropsForField(field)}
{order}
interactive
name={field?.field}
/>
{/if}

View File

@ -6,6 +6,7 @@
import { Utils } from "@budibase/frontend-core" import { Utils } from "@budibase/frontend-core"
import FormBlockWrapper from "./form/FormBlockWrapper.svelte" import FormBlockWrapper from "./form/FormBlockWrapper.svelte"
import { get, writable } from "svelte/store" import { get, writable } from "svelte/store"
import FormBlockComponent from "./FormBlockComponent.svelte"
export let actionType export let actionType
export let rowId export let rowId
@ -23,22 +24,6 @@
const currentStep = writable(1) const currentStep = writable(1)
setContext("current-step", currentStep) setContext("current-step", currentStep)
const FieldTypeToComponentMap = {
string: "stringfield",
number: "numberfield",
bigint: "bigintfield",
options: "optionsfield",
array: "multifieldselect",
boolean: "booleanfield",
longform: "longformfield",
datetime: "datetimefield",
attachment: "attachmentfield",
link: "relationshipfield",
json: "jsonfield",
barcodeqr: "codescanner",
bb_reference: "bbreferencefield",
}
let schema let schema
$: fetchSchema(dataSource) $: fetchSchema(dataSource)
@ -78,27 +63,6 @@
currentStep.set(newStep + 1) currentStep.set(newStep + 1)
} }
const getPropsForField = field => {
if (field._component) {
return field
}
return {
field: field.name,
label: field.name,
placeholder: field.name,
_instanceName: field.name,
}
}
const getComponentForField = field => {
const fieldSchemaName = field.field || field.name
if (!fieldSchemaName || !schema?.[fieldSchemaName]) {
return null
}
const type = schema[fieldSchemaName].type
return FieldTypeToComponentMap[type]
}
const fetchSchema = async () => { const fetchSchema = async () => {
schema = (await fetchDatasourceSchema(dataSource)) || {} schema = (await fetchDatasourceSchema(dataSource)) || {}
} }
@ -111,6 +75,7 @@
.filter(field => !field.autocolumn) .filter(field => !field.autocolumn)
.map(field => ({ .map(field => ({
name: field.name, name: field.name,
active: true,
})) }))
} }
@ -205,15 +170,7 @@
class:mobile={$context.device.mobile} class:mobile={$context.device.mobile}
> >
{#each step.fields as field, fieldIdx (`${field.field || field.name}_${fieldIdx}`)} {#each step.fields as field, fieldIdx (`${field.field || field.name}_${fieldIdx}`)}
{#if getComponentForField(field)} <FormBlockComponent {field} {schema} order={fieldIdx} />
<BlockComponent
type={getComponentForField(field)}
props={getPropsForField(field)}
order={fieldIdx}
interactive
name={field.field}
/>
{/if}
{/each} {/each}
</div> </div>
</BlockComponent> </BlockComponent>

View File

@ -2,6 +2,7 @@
import BlockComponent from "components/BlockComponent.svelte" import BlockComponent from "components/BlockComponent.svelte"
import Placeholder from "components/app/Placeholder.svelte" import Placeholder from "components/app/Placeholder.svelte"
import { getContext } from "svelte" import { getContext } from "svelte"
import FormBlockComponent from "../FormBlockComponent.svelte"
export let dataSource export let dataSource
export let actionType export let actionType
@ -14,49 +15,11 @@
export let buttonPosition = "bottom" export let buttonPosition = "bottom"
export let schema export let schema
const FieldTypeToComponentMap = {
string: "stringfield",
number: "numberfield",
bigint: "bigintfield",
options: "optionsfield",
array: "multifieldselect",
boolean: "booleanfield",
longform: "longformfield",
datetime: "datetimefield",
attachment: "attachmentfield",
link: "relationshipfield",
json: "jsonfield",
barcodeqr: "codescanner",
bb_reference: "bbreferencefield",
}
const context = getContext("context") const context = getContext("context")
let formId let formId
$: renderHeader = buttons || title $: renderHeader = buttons || title
const getComponentForField = field => {
const fieldSchemaName = field.field || field.name
if (!fieldSchemaName || !schema?.[fieldSchemaName]) {
return null
}
const type = schema[fieldSchemaName].type
return FieldTypeToComponentMap[type]
}
const getPropsForField = field => {
let fieldProps = field._component
? {
...field,
}
: {
field: field.name,
label: field.name,
placeholder: field.name,
_instanceName: field.name,
}
return fieldProps
}
</script> </script>
{#if fields?.length} {#if fields?.length}
@ -132,15 +95,7 @@
<BlockComponent type="container"> <BlockComponent type="container">
<div class="form-block fields" class:mobile={$context.device.mobile}> <div class="form-block fields" class:mobile={$context.device.mobile}>
{#each fields as field, idx} {#each fields as field, idx}
{#if getComponentForField(field) && field.active} <FormBlockComponent {field} {schema} order={idx} />
<BlockComponent
type={getComponentForField(field)}
props={getPropsForField(field)}
order={idx}
interactive
name={field?.field}
/>
{/if}
{/each} {/each}
</div> </div>
</BlockComponent> </BlockComponent>

View File

@ -9,6 +9,7 @@
export let api export let api
export let invertX = false export let invertX = false
export let invertY = false export let invertY = false
export let schema
const { API, notifications } = getContext("grid") const { API, notifications } = getContext("grid")
const imageExtensions = ["png", "tiff", "gif", "raw", "jpg", "jpeg"] const imageExtensions = ["png", "tiff", "gif", "raw", "jpg", "jpeg"]
@ -97,6 +98,7 @@
{value} {value}
compact compact
on:change={e => onChange(e.detail)} on:change={e => onChange(e.detail)}
maximum={schema.constraints?.length?.maximum}
{processFiles} {processFiles}
{deleteAttachments} {deleteAttachments}
{handleFileTooLarge} {handleFileTooLarge}

@ -1 +1 @@
Subproject commit c4c98ae70f2e936009250893898ecf11f4ddf2c3 Subproject commit dd748e045ffdbc6662c5d2b76075f01d65a96a2f

View File

@ -41,17 +41,9 @@ COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
WORKDIR /string-templates
COPY packages/string-templates/package.json package.json
RUN ../scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true --network-timeout 1000000
COPY packages/string-templates .
WORKDIR /app WORKDIR /app
COPY packages/server/package.json . COPY packages/server/package.json .
COPY packages/server/dist/yarn.lock . COPY packages/server/dist/yarn.lock .
RUN cd ../string-templates && yarn link && cd - && yarn link @budibase/string-templates
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh RUN chmod +x ./scripts/removeWorkspaceDependencies.sh

View File

@ -1,3 +1,4 @@
// eslint-disable-next-line @typescript-eslint/no-unused-vars
module FirebaseMock { module FirebaseMock {
const firebase: any = {} const firebase: any = {}

View File

@ -1,3 +1,4 @@
// eslint-disable-next-line @typescript-eslint/no-unused-vars
module SendgridMock { module SendgridMock {
class Email { class Email {
constructor() { constructor() {

View File

@ -1,8 +1,5 @@
module AirtableMock { class Airtable {
function Airtable() { base = jest.fn()
// @ts-ignore
this.base = jest.fn()
}
module.exports = Airtable
} }
module.exports = Airtable

View File

@ -1,3 +1,4 @@
// eslint-disable-next-line @typescript-eslint/no-unused-vars
module ArangoMock { module ArangoMock {
const arangodb: any = {} const arangodb: any = {}

View File

@ -1,25 +1,19 @@
import fs from "fs" import fs from "fs"
import { join } from "path" import { join } from "path"
module AwsMock { const response = (body: any, extra?: any) => () => ({
const aws: any = {}
const response = (body: any, extra?: any) => () => ({
promise: () => body, promise: () => body,
...extra, ...extra,
}) })
function DocumentClient() { class DocumentClient {
// @ts-ignore put = jest.fn(response({}))
this.put = jest.fn(response({})) query = jest.fn(
// @ts-ignore
this.query = jest.fn(
response({ response({
Items: [], Items: [],
}) })
) )
// @ts-ignore scan = jest.fn(
this.scan = jest.fn(
response({ response({
Items: [ Items: [
{ {
@ -28,57 +22,41 @@ module AwsMock {
], ],
}) })
) )
// @ts-ignore get = jest.fn(response({}))
this.get = jest.fn(response({})) update = jest.fn(response({}))
// @ts-ignore delete = jest.fn(response({}))
this.update = jest.fn(response({})) }
// @ts-ignore
this.delete = jest.fn(response({}))
}
function S3() { class S3 {
// @ts-ignore listObjects = jest.fn(
this.listObjects = jest.fn(
response({ response({
Contents: [], Contents: [],
}) })
) )
createBucket = jest.fn(
// @ts-ignore
this.createBucket = jest.fn(
response({ response({
Contents: {}, Contents: {},
}) })
) )
deleteObjects = jest.fn(
// @ts-ignore
this.deleteObjects = jest.fn(
response({ response({
Contents: {}, Contents: {},
}) })
) )
getSignedUrl = jest.fn((operation, params) => {
// @ts-ignore
this.getSignedUrl = (operation, params) => {
return `http://example.com/${params.Bucket}/${params.Key}` return `http://example.com/${params.Bucket}/${params.Key}`
} })
headBucket = jest.fn(
// @ts-ignore
this.headBucket = jest.fn(
response({ response({
Contents: {}, Contents: {},
}) })
) )
upload = jest.fn(
// @ts-ignore
this.upload = jest.fn(
response({ response({
Contents: {}, Contents: {},
}) })
) )
getObject = jest.fn(
// @ts-ignore
this.getObject = jest.fn(
response( response(
{ {
Body: "", Body: "",
@ -86,17 +64,18 @@ module AwsMock {
{ {
createReadStream: jest createReadStream: jest
.fn() .fn()
.mockReturnValue( .mockReturnValue(fs.createReadStream(join(__dirname, "aws-sdk.ts"))),
fs.createReadStream(join(__dirname, "aws-sdk.ts"))
),
} }
) )
) )
} }
aws.DynamoDB = { DocumentClient } module.exports = {
aws.S3 = S3 DynamoDB: {
aws.config = { update: jest.fn() } DocumentClient,
},
module.exports = aws S3,
config: {
update: jest.fn(),
},
} }

View File

@ -1,3 +1,4 @@
// eslint-disable-next-line @typescript-eslint/no-unused-vars
module MongoMock { module MongoMock {
const mongodb: any = {} const mongodb: any = {}

View File

@ -1,24 +1,17 @@
module MsSqlMock { module.exports = {
const mssql: any = {} ConnectionPool: jest.fn(() => ({
connect: jest.fn(() => ({
mssql.query = jest.fn(() => ({ request: jest.fn(() => ({
query: jest.fn(sql => ({ recordset: [sql] })),
})),
})),
})),
query: jest.fn(() => ({
recordset: [ recordset: [
{ {
a: "string", a: "string",
b: 1, b: 1,
}, },
], ],
}))
// mssql.connect = jest.fn(() => ({ recordset: [] }))
mssql.ConnectionPool = jest.fn(() => ({
connect: jest.fn(() => ({
request: jest.fn(() => ({
query: jest.fn(sql => ({ recordset: [sql] })),
})), })),
})),
}))
module.exports = mssql
} }

View File

@ -1,14 +1,11 @@
module MySQLMock { const client = {
const mysql: any = {}
const client = {
connect: jest.fn(), connect: jest.fn(),
query: jest.fn((query, bindings, fn) => { query: jest.fn((query, bindings, fn) => {
fn(null, []) fn(null, [])
}), }),
} }
mysql.createConnection = jest.fn(() => client) module.exports = {
createConnection: jest.fn(() => client),
module.exports = mysql client,
} }

View File

@ -1,3 +1,4 @@
// eslint-disable-next-line @typescript-eslint/no-unused-vars
module MySQLMock { module MySQLMock {
const mysql: any = {} const mysql: any = {}

View File

@ -1,6 +1,7 @@
// @ts-ignore // @ts-ignore
import fs from "fs" import fs from "fs"
// eslint-disable-next-line @typescript-eslint/no-unused-vars
module FetchMock { module FetchMock {
// @ts-ignore // @ts-ignore
const fetch = jest.requireActual("node-fetch") const fetch = jest.requireActual("node-fetch")

View File

@ -1,31 +1,21 @@
module OracleDbMock { const executeMock = jest.fn(() => ({
// mock execute
const execute = jest.fn(() => ({
rows: [ rows: [
{ {
a: "string", a: "string",
b: 1, b: 1,
}, },
], ],
})) }))
const close = jest.fn() const closeMock = jest.fn()
// mock connection class Connection {
function Connection() {} execute = executeMock
Connection.prototype.execute = execute close = closeMock
Connection.prototype.close = close }
// mock oracledb module.exports = {
const oracleDb: any = {} getConnection: jest.fn(() => new Connection()),
oracleDb.getConnection = jest.fn(() => { executeMock,
// @ts-ignore closeMock,
return new Connection()
})
// expose mocks
oracleDb.executeMock = execute
oracleDb.closeMock = close
module.exports = oracleDb
} }

View File

@ -1,30 +1,25 @@
module PgMock { const query = jest.fn(() => ({
const pg: any = {}
const query = jest.fn(() => ({
rows: [ rows: [
{ {
a: "string", a: "string",
b: 1, b: 1,
}, },
], ],
})) }))
// constructor class Client {
function Client() {} query = query
end = jest.fn(cb => {
Client.prototype.query = query
Client.prototype.end = jest.fn(cb => {
if (cb) cb() if (cb) cb()
}) })
Client.prototype.connect = jest.fn() connect = jest.fn()
Client.prototype.release = jest.fn() release = jest.fn()
}
const on = jest.fn()
const on = jest.fn()
pg.Client = Client
pg.queryMock = query module.exports = {
pg.on = on Client,
queryMock: query,
module.exports = pg on,
} }

View File

@ -30,6 +30,8 @@ const baseConfig: Config.InitialProjectOptions = {
"@budibase/backend-core": "<rootDir>/../backend-core/src", "@budibase/backend-core": "<rootDir>/../backend-core/src",
"@budibase/shared-core": "<rootDir>/../shared-core/src", "@budibase/shared-core": "<rootDir>/../shared-core/src",
"@budibase/types": "<rootDir>/../types/src", "@budibase/types": "<rootDir>/../types/src",
"@budibase/string-templates/(.*)": ["<rootDir>/../string-templates/$1"],
"@budibase/string-templates": ["<rootDir>/../string-templates/src"],
}, },
} }

View File

@ -175,6 +175,10 @@
] ]
}, },
"build": { "build": {
"inputs": [
"{projectRoot}/builder",
"{projectRoot}/client"
],
"outputs": [ "outputs": [
"{projectRoot}/builder", "{projectRoot}/builder",
"{projectRoot}/client", "{projectRoot}/client",

View File

@ -26,7 +26,6 @@ import {
env as envCore, env as envCore,
ErrorCode, ErrorCode,
events, events,
HTTPError,
migrations, migrations,
objectStore, objectStore,
roles, roles,

View File

@ -39,19 +39,21 @@ export async function create(ctx: any) {
let name = "PLUGIN_" + Math.floor(100000 + Math.random() * 900000) let name = "PLUGIN_" + Math.floor(100000 + Math.random() * 900000)
switch (source) { switch (source) {
case PluginSource.NPM: case PluginSource.NPM: {
const { metadata: metadataNpm, directory: directoryNpm } = const { metadata: metadataNpm, directory: directoryNpm } =
await npmUpload(url, name) await npmUpload(url, name)
metadata = metadataNpm metadata = metadataNpm
directory = directoryNpm directory = directoryNpm
break break
case PluginSource.GITHUB: }
case PluginSource.GITHUB: {
const { metadata: metadataGithub, directory: directoryGithub } = const { metadata: metadataGithub, directory: directoryGithub } =
await githubUpload(url, name, githubToken) await githubUpload(url, name, githubToken)
metadata = metadataGithub metadata = metadataGithub
directory = directoryGithub directory = directoryGithub
break break
case PluginSource.URL: }
case PluginSource.URL: {
const headersObj = headers || {} const headersObj = headers || {}
const { metadata: metadataUrl, directory: directoryUrl } = const { metadata: metadataUrl, directory: directoryUrl } =
await urlUpload(url, name, headersObj) await urlUpload(url, name, headersObj)
@ -59,6 +61,7 @@ export async function create(ctx: any) {
directory = directoryUrl directory = directoryUrl
break break
} }
}
pluginCore.validate(metadata?.schema) pluginCore.validate(metadata?.schema)

View File

@ -109,13 +109,14 @@ export class OpenAPI2 extends OpenAPISource {
for (let param of allParams) { for (let param of allParams) {
if (parameterNotRef(param)) { if (parameterNotRef(param)) {
switch (param.in) { switch (param.in) {
case "query": case "query": {
let prefix = "" let prefix = ""
if (queryString) { if (queryString) {
prefix = "&" prefix = "&"
} }
queryString = `${queryString}${prefix}${param.name}={{${param.name}}}` queryString = `${queryString}${prefix}${param.name}={{${param.name}}}`
break break
}
case "header": case "header":
headers[param.name] = `{{${param.name}}}` headers[param.name] = `{{${param.name}}}`
break break
@ -125,7 +126,7 @@ export class OpenAPI2 extends OpenAPISource {
case "formData": case "formData":
// future enhancement // future enhancement
break break
case "body": case "body": {
// set the request body to the example provided // set the request body to the example provided
// future enhancement: generate an example from the schema // future enhancement: generate an example from the schema
let bodyParam: OpenAPIV2.InBodyParameterObject = let bodyParam: OpenAPIV2.InBodyParameterObject =
@ -136,6 +137,7 @@ export class OpenAPI2 extends OpenAPISource {
} }
break break
} }
}
// add the parameter if it can be bound in our config // add the parameter if it can be bound in our config
if (["query", "header", "path"].includes(param.in)) { if (["query", "header", "path"].includes(param.in)) {

View File

@ -161,13 +161,14 @@ export class OpenAPI3 extends OpenAPISource {
for (let param of allParams) { for (let param of allParams) {
if (parameterNotRef(param)) { if (parameterNotRef(param)) {
switch (param.in) { switch (param.in) {
case "query": case "query": {
let prefix = "" let prefix = ""
if (queryString) { if (queryString) {
prefix = "&" prefix = "&"
} }
queryString = `${queryString}${prefix}${param.name}={{${param.name}}}` queryString = `${queryString}${prefix}${param.name}={{${param.name}}}`
break break
}
case "header": case "header":
headers[param.name] = `{{${param.name}}}` headers[param.name] = `{{${param.name}}}`
break break

View File

@ -14,22 +14,35 @@ import {
SessionCookie, SessionCookie,
JsonFieldSubType, JsonFieldSubType,
QueryResponse, QueryResponse,
QueryPreview,
QuerySchema, QuerySchema,
FieldType, FieldType,
ExecuteQueryRequest, ExecuteQueryRequest,
ExecuteQueryResponse, ExecuteQueryResponse,
Row,
QueryParameter, QueryParameter,
PreviewQueryRequest, PreviewQueryRequest,
PreviewQueryResponse, PreviewQueryResponse,
} from "@budibase/types" } from "@budibase/types"
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core" import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
import { findHBSBlocks } from "@budibase/string-templates"
const Runner = new Thread(ThreadType.QUERY, { const Runner = new Thread(ThreadType.QUERY, {
timeoutMs: env.QUERY_THREAD_TIMEOUT, timeoutMs: env.QUERY_THREAD_TIMEOUT,
}) })
function validateQueryInputs(parameters: Record<string, string>) {
for (let entry of Object.entries(parameters)) {
const [key, value] = entry
if (typeof value !== "string") {
continue
}
if (findHBSBlocks(value).length !== 0) {
throw new Error(
`Parameter '${key}' input contains a handlebars binding - this is not allowed.`
)
}
}
}
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx) {
ctx.body = await sdk.queries.fetch() ctx.body = await sdk.queries.fetch()
} }
@ -123,10 +136,10 @@ function getAuthConfig(ctx: UserCtx) {
function enrichParameters( function enrichParameters(
queryParameters: QueryParameter[], queryParameters: QueryParameter[],
requestParameters: { [key: string]: string } = {} requestParameters: Record<string, string> = {}
): { ): Record<string, string> {
[key: string]: string // first check parameters are all valid
} { validateQueryInputs(requestParameters)
// make sure parameters are fully enriched with defaults // make sure parameters are fully enriched with defaults
for (let parameter of queryParameters) { for (let parameter of queryParameters) {
if (!requestParameters[parameter.name]) { if (!requestParameters[parameter.name]) {

View File

@ -116,7 +116,7 @@ export async function save(ctx: UserCtx<SaveRoleRequest, SaveRoleResponse>) {
target: prodDb.name, target: prodDb.name,
}) })
await replication.replicate({ await replication.replicate({
filter: (doc: any, params: any) => { filter: (doc: any) => {
return doc._id && doc._id.startsWith("role_") return doc._id && doc._id.startsWith("role_")
}, },
}) })

View File

@ -7,13 +7,11 @@ import {
FilterType, FilterType,
IncludeRelationship, IncludeRelationship,
ManyToManyRelationshipFieldMetadata, ManyToManyRelationshipFieldMetadata,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata, OneToManyRelationshipFieldMetadata,
Operation, Operation,
PaginationJson, PaginationJson,
RelationshipFieldMetadata, RelationshipFieldMetadata,
RelationshipsJson, RelationshipsJson,
RelationshipType,
Row, Row,
SearchFilters, SearchFilters,
SortJson, SortJson,
@ -717,7 +715,7 @@ export class ExternalRequest<T extends Operation> {
const rows = related[key]?.rows || [] const rows = related[key]?.rows || []
function relationshipMatchPredicate({ const relationshipMatchPredicate = ({
row, row,
linkPrimary, linkPrimary,
linkSecondary, linkSecondary,
@ -725,7 +723,7 @@ export class ExternalRequest<T extends Operation> {
row: Row row: Row
linkPrimary: string linkPrimary: string
linkSecondary?: string linkSecondary?: string
}) { }) => {
const matchesPrimaryLink = const matchesPrimaryLink =
row[linkPrimary] === relationship.id || row[linkPrimary] === relationship.id ||
row[linkPrimary] === body?.[linkPrimary] row[linkPrimary] === body?.[linkPrimary]

View File

@ -23,6 +23,12 @@ const DISABLED_WRITE_CLIENTS: SqlClient[] = [
SqlClient.ORACLE, SqlClient.ORACLE,
] ]
const DISABLED_OPERATIONS: Operation[] = [
Operation.CREATE_TABLE,
Operation.UPDATE_TABLE,
Operation.DELETE_TABLE,
]
class CharSequence { class CharSequence {
static alphabet = "abcdefghijklmnopqrstuvwxyz" static alphabet = "abcdefghijklmnopqrstuvwxyz"
counters: number[] counters: number[]
@ -59,13 +65,18 @@ export default class AliasTables {
} }
isAliasingEnabled(json: QueryJson, datasource: Datasource) { isAliasingEnabled(json: QueryJson, datasource: Datasource) {
const operation = json.endpoint.operation
const fieldLength = json.resource?.fields?.length const fieldLength = json.resource?.fields?.length
if (!fieldLength || fieldLength <= 0) { if (
!fieldLength ||
fieldLength <= 0 ||
DISABLED_OPERATIONS.includes(operation)
) {
return false return false
} }
try { try {
const sqlClient = getSQLClient(datasource) const sqlClient = getSQLClient(datasource)
const isWrite = WRITE_OPERATIONS.includes(json.endpoint.operation) const isWrite = WRITE_OPERATIONS.includes(operation)
const isDisabledClient = DISABLED_WRITE_CLIENTS.includes(sqlClient) const isDisabledClient = DISABLED_WRITE_CLIENTS.includes(sqlClient)
if (isWrite && isDisabledClient) { if (isWrite && isDisabledClient) {
return false return false

View File

@ -1,4 +1,3 @@
import { quotas } from "@budibase/pro"
import { import {
UserCtx, UserCtx,
ViewV2, ViewV2,

View File

@ -1,6 +1,6 @@
import { generateUserFlagID, InternalTables } from "../../db/utils" import { generateUserFlagID, InternalTables } from "../../db/utils"
import { getFullUser } from "../../utilities/users" import { getFullUser } from "../../utilities/users"
import { cache, context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { import {
ContextUserMetadata, ContextUserMetadata,
Ctx, Ctx,

View File

@ -24,7 +24,7 @@ async function parseSchema(view: CreateViewRequest) {
icon: schemaValue.icon, icon: schemaValue.icon,
} }
Object.entries(fieldSchema) Object.entries(fieldSchema)
.filter(([_, val]) => val === undefined) .filter(([, val]) => val === undefined)
.forEach(([key]) => { .forEach(([key]) => {
delete fieldSchema[key as keyof UIFieldMetadata] delete fieldSchema[key as keyof UIFieldMetadata]
}) })

View File

@ -33,7 +33,6 @@ export { default as staticRoutes } from "./static"
export { default as publicRoutes } from "./public" export { default as publicRoutes } from "./public"
const appBackupRoutes = pro.appBackups const appBackupRoutes = pro.appBackups
const scheduleRoutes = pro.schedules
const environmentVariableRoutes = pro.environmentVariables const environmentVariableRoutes = pro.environmentVariables
export const mainRoutes: Router[] = [ export const mainRoutes: Router[] = [
@ -65,7 +64,6 @@ export const mainRoutes: Router[] = [
pluginRoutes, pluginRoutes,
opsRoutes, opsRoutes,
debugRoutes, debugRoutes,
scheduleRoutes,
environmentVariableRoutes, environmentVariableRoutes,
// these need to be handled last as they still use /api/:tableId // these need to be handled last as they still use /api/:tableId
// this could be breaking as koa may recognise other routes as this // this could be breaking as koa may recognise other routes as this

View File

@ -81,6 +81,7 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
{ {
"config": {}, "config": {},
"createdAt": "2020-01-01T00:00:00.000Z", "createdAt": "2020-01-01T00:00:00.000Z",
"isSQL": true,
"name": "Test", "name": "Test",
"source": "POSTGRES", "source": "POSTGRES",
"type": "datasource", "type": "datasource",

View File

@ -16,7 +16,7 @@ describe("/applications/:appId/import", () => {
it("should be able to perform import", async () => { it("should be able to perform import", async () => {
const appId = config.getAppId() const appId = config.getAppId()
const res = await request await request
.post(`/api/applications/${appId}/import`) .post(`/api/applications/${appId}/import`)
.field("encryptionPassword", PASSWORD) .field("encryptionPassword", PASSWORD)
.attach("appExport", path.join(__dirname, "assets", "export.tar.gz")) .attach("appExport", path.join(__dirname, "assets", "export.tar.gz"))
@ -25,8 +25,8 @@ describe("/applications/:appId/import", () => {
.expect(200) .expect(200)
const appPackage = await config.api.application.get(appId!) const appPackage = await config.api.application.get(appId!)
expect(appPackage.navigation?.links?.length).toBe(2) expect(appPackage.navigation?.links?.length).toBe(2)
expect(expect(appPackage.navigation?.links?.[0].url).toBe("/blank")) expect(appPackage.navigation?.links?.[0].url).toBe("/blank")
expect(expect(appPackage.navigation?.links?.[1].url).toBe("/derp")) expect(appPackage.navigation?.links?.[1].url).toBe("/derp")
const screens = await config.api.screen.list() const screens = await config.api.screen.list()
expect(screens.length).toBe(2) expect(screens.length).toBe(2)
expect(screens[0].routing.route).toBe("/derp") expect(screens[0].routing.route).toBe("/derp")

View File

@ -2,7 +2,6 @@ import * as setup from "./utilities"
import { roles, db as dbCore } from "@budibase/backend-core" import { roles, db as dbCore } from "@budibase/backend-core"
describe("/api/applications/:appId/sync", () => { describe("/api/applications/:appId/sync", () => {
let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
let app let app

View File

@ -19,6 +19,7 @@ import env from "../../../environment"
import { type App } from "@budibase/types" import { type App } from "@budibase/types"
import tk from "timekeeper" import tk from "timekeeper"
import * as uuid from "uuid" import * as uuid from "uuid"
import { structures } from "@budibase/backend-core/tests"
describe("/applications", () => { describe("/applications", () => {
let config = setup.getConfig() let config = setup.getConfig()
@ -30,7 +31,9 @@ describe("/applications", () => {
beforeEach(async () => { beforeEach(async () => {
app = await config.api.application.create({ name: utils.newid() }) app = await config.api.application.create({ name: utils.newid() })
const deployment = await config.api.application.publish(app.appId) const deployment = await config.api.application.publish(app.appId)
expect(deployment.status).toBe("SUCCESS") if (deployment.status !== "SUCCESS") {
throw new Error("Failed to publish app")
}
jest.clearAllMocks() jest.clearAllMocks()
}) })
@ -128,7 +131,7 @@ describe("/applications", () => {
it("creates empty app", async () => { it("creates empty app", async () => {
const app = await config.api.application.create({ name: utils.newid() }) const app = await config.api.application.create({ name: utils.newid() })
expect(app._id).toBeDefined() expect(app._id).toBeDefined()
expect(events.app.created).toBeCalledTimes(1) expect(events.app.created).toHaveBeenCalledTimes(1)
}) })
it("creates app from template", async () => { it("creates app from template", async () => {
@ -139,8 +142,8 @@ describe("/applications", () => {
templateString: "{}", templateString: "{}",
}) })
expect(app._id).toBeDefined() expect(app._id).toBeDefined()
expect(events.app.created).toBeCalledTimes(1) expect(events.app.created).toHaveBeenCalledTimes(1)
expect(events.app.templateImported).toBeCalledTimes(1) expect(events.app.templateImported).toHaveBeenCalledTimes(1)
}) })
it("creates app from file", async () => { it("creates app from file", async () => {
@ -150,8 +153,8 @@ describe("/applications", () => {
templateFile: "src/api/routes/tests/data/export.txt", templateFile: "src/api/routes/tests/data/export.txt",
}) })
expect(app._id).toBeDefined() expect(app._id).toBeDefined()
expect(events.app.created).toBeCalledTimes(1) expect(events.app.created).toHaveBeenCalledTimes(1)
expect(events.app.fileImported).toBeCalledTimes(1) expect(events.app.fileImported).toHaveBeenCalledTimes(1)
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {
@ -181,8 +184,8 @@ describe("/applications", () => {
expect(app.navigation!.navTextColor).toBe( expect(app.navigation!.navTextColor).toBe(
"var(--spectrum-global-color-gray-50)" "var(--spectrum-global-color-gray-50)"
) )
expect(events.app.created).toBeCalledTimes(1) expect(events.app.created).toHaveBeenCalledTimes(1)
expect(events.app.fileImported).toBeCalledTimes(1) expect(events.app.fileImported).toHaveBeenCalledTimes(1)
}) })
it("should reject with a known name", async () => { it("should reject with a known name", async () => {
@ -228,32 +231,32 @@ describe("/applications", () => {
name: "TEST_APP", name: "TEST_APP",
}) })
expect(updatedApp._rev).toBeDefined() expect(updatedApp._rev).toBeDefined()
expect(events.app.updated).toBeCalledTimes(1) expect(events.app.updated).toHaveBeenCalledTimes(1)
}) })
}) })
describe("publish", () => { describe("publish", () => {
it("should publish app with dev app ID", async () => { it("should publish app with dev app ID", async () => {
await config.api.application.publish(app.appId) await config.api.application.publish(app.appId)
expect(events.app.published).toBeCalledTimes(1) expect(events.app.published).toHaveBeenCalledTimes(1)
}) })
it("should publish app with prod app ID", async () => { it("should publish app with prod app ID", async () => {
await config.api.application.publish(app.appId.replace("_dev", "")) await config.api.application.publish(app.appId.replace("_dev", ""))
expect(events.app.published).toBeCalledTimes(1) expect(events.app.published).toHaveBeenCalledTimes(1)
}) })
}) })
describe("manage client library version", () => { describe("manage client library version", () => {
it("should be able to update the app client library version", async () => { it("should be able to update the app client library version", async () => {
await config.api.application.updateClient(app.appId) await config.api.application.updateClient(app.appId)
expect(events.app.versionUpdated).toBeCalledTimes(1) expect(events.app.versionUpdated).toHaveBeenCalledTimes(1)
}) })
it("should be able to revert the app client library version", async () => { it("should be able to revert the app client library version", async () => {
await config.api.application.updateClient(app.appId) await config.api.application.updateClient(app.appId)
await config.api.application.revertClient(app.appId) await config.api.application.revertClient(app.appId)
expect(events.app.versionReverted).toBeCalledTimes(1) expect(events.app.versionReverted).toHaveBeenCalledTimes(1)
}) })
}) })
@ -310,26 +313,26 @@ describe("/applications", () => {
describe("unpublish", () => { describe("unpublish", () => {
it("should unpublish app with dev app ID", async () => { it("should unpublish app with dev app ID", async () => {
await config.api.application.unpublish(app.appId) await config.api.application.unpublish(app.appId)
expect(events.app.unpublished).toBeCalledTimes(1) expect(events.app.unpublished).toHaveBeenCalledTimes(1)
}) })
it("should unpublish app with prod app ID", async () => { it("should unpublish app with prod app ID", async () => {
await config.api.application.unpublish(app.appId.replace("_dev", "")) await config.api.application.unpublish(app.appId.replace("_dev", ""))
expect(events.app.unpublished).toBeCalledTimes(1) expect(events.app.unpublished).toHaveBeenCalledTimes(1)
}) })
}) })
describe("delete", () => { describe("delete", () => {
it("should delete published app and dev apps with dev app ID", async () => { it("should delete published app and dev apps with dev app ID", async () => {
await config.api.application.delete(app.appId) await config.api.application.delete(app.appId)
expect(events.app.deleted).toBeCalledTimes(1) expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toBeCalledTimes(1) expect(events.app.unpublished).toHaveBeenCalledTimes(1)
}) })
it("should delete published app and dev app with prod app ID", async () => { it("should delete published app and dev app with prod app ID", async () => {
await config.api.application.delete(app.appId.replace("_dev", "")) await config.api.application.delete(app.appId.replace("_dev", ""))
expect(events.app.deleted).toBeCalledTimes(1) expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toBeCalledTimes(1) expect(events.app.unpublished).toHaveBeenCalledTimes(1)
}) })
}) })
@ -346,7 +349,7 @@ describe("/applications", () => {
} }
) )
expect(events.app.duplicated).toBeCalled() expect(events.app.duplicated).toHaveBeenCalled()
expect(resp.duplicateAppId).toBeDefined() expect(resp.duplicateAppId).toBeDefined()
expect(resp.sourceAppId).toEqual(app.appId) expect(resp.sourceAppId).toEqual(app.appId)
expect(resp.duplicateAppId).not.toEqual(app.appId) expect(resp.duplicateAppId).not.toEqual(app.appId)
@ -354,7 +357,7 @@ describe("/applications", () => {
it("should reject an unknown app id with a 404", async () => { it("should reject an unknown app id with a 404", async () => {
await config.api.application.duplicateApp( await config.api.application.duplicateApp(
app.appId.slice(0, -1) + "a", structures.db.id(),
{ {
name: "to-dupe 123", name: "to-dupe 123",
url: "/to-dupe-123", url: "/to-dupe-123",
@ -366,7 +369,7 @@ describe("/applications", () => {
}) })
it("should reject with a known name", async () => { it("should reject with a known name", async () => {
const resp = await config.api.application.duplicateApp( await config.api.application.duplicateApp(
app.appId, app.appId,
{ {
name: app.name, name: app.name,
@ -374,11 +377,11 @@ describe("/applications", () => {
}, },
{ body: { message: "App name is already in use." }, status: 400 } { body: { message: "App name is already in use." }, status: 400 }
) )
expect(events.app.duplicated).not.toBeCalled() expect(events.app.duplicated).not.toHaveBeenCalled()
}) })
it("should reject with a known url", async () => { it("should reject with a known url", async () => {
const resp = await config.api.application.duplicateApp( await config.api.application.duplicateApp(
app.appId, app.appId,
{ {
name: "this is fine", name: "this is fine",
@ -386,7 +389,7 @@ describe("/applications", () => {
}, },
{ body: { message: "App URL is already in use." }, status: 400 } { body: { message: "App URL is already in use." }, status: 400 }
) )
expect(events.app.duplicated).not.toBeCalled() expect(events.app.duplicated).not.toHaveBeenCalled()
}) })
}) })

View File

@ -95,8 +95,8 @@ describe("/automations", () => {
expect(res.body.message).toEqual("Automation created successfully") expect(res.body.message).toEqual("Automation created successfully")
expect(res.body.automation.name).toEqual("My Automation") expect(res.body.automation.name).toEqual("My Automation")
expect(res.body.automation._id).not.toEqual(null) expect(res.body.automation._id).not.toEqual(null)
expect(events.automation.created).toBeCalledTimes(1) expect(events.automation.created).toHaveBeenCalledTimes(1)
expect(events.automation.stepCreated).not.toBeCalled() expect(events.automation.stepCreated).not.toHaveBeenCalled()
}) })
it("creates an automation with steps", async () => { it("creates an automation with steps", async () => {
@ -114,8 +114,8 @@ describe("/automations", () => {
expect(res.body.message).toEqual("Automation created successfully") expect(res.body.message).toEqual("Automation created successfully")
expect(res.body.automation.name).toEqual("My Automation") expect(res.body.automation.name).toEqual("My Automation")
expect(res.body.automation._id).not.toEqual(null) expect(res.body.automation._id).not.toEqual(null)
expect(events.automation.created).toBeCalledTimes(1) expect(events.automation.created).toHaveBeenCalledTimes(1)
expect(events.automation.stepCreated).toBeCalledTimes(2) expect(events.automation.stepCreated).toHaveBeenCalledTimes(2)
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {
@ -158,7 +158,7 @@ describe("/automations", () => {
automation = await config.createAutomation(automation) automation = await config.createAutomation(automation)
await setup.delay(500) await setup.delay(500)
const res = await testAutomation(config, automation) const res = await testAutomation(config, automation)
expect(events.automation.tested).toBeCalledTimes(1) expect(events.automation.tested).toHaveBeenCalledTimes(1)
// this looks a bit mad but we don't actually have a way to wait for a response from the automation to // this looks a bit mad but we don't actually have a way to wait for a response from the automation to
// know that it has finished all of its actions - this is currently the best way // know that it has finished all of its actions - this is currently the best way
// also when this runs in CI it is very temper-mental so for now trying to make run stable by repeating until it works // also when this runs in CI it is very temper-mental so for now trying to make run stable by repeating until it works
@ -265,10 +265,10 @@ describe("/automations", () => {
`Automation ${automation._id} updated successfully.` `Automation ${automation._id} updated successfully.`
) )
// events // events
expect(events.automation.created).not.toBeCalled() expect(events.automation.created).not.toHaveBeenCalled()
expect(events.automation.stepCreated).not.toBeCalled() expect(events.automation.stepCreated).not.toHaveBeenCalled()
expect(events.automation.stepDeleted).not.toBeCalled() expect(events.automation.stepDeleted).not.toHaveBeenCalled()
expect(events.automation.triggerUpdated).not.toBeCalled() expect(events.automation.triggerUpdated).not.toHaveBeenCalled()
}) })
it("updates a automations name using POST request", async () => { it("updates a automations name using POST request", async () => {
@ -293,10 +293,10 @@ describe("/automations", () => {
`Automation ${automation._id} updated successfully.` `Automation ${automation._id} updated successfully.`
) )
// events // events
expect(events.automation.created).not.toBeCalled() expect(events.automation.created).not.toHaveBeenCalled()
expect(events.automation.stepCreated).not.toBeCalled() expect(events.automation.stepCreated).not.toHaveBeenCalled()
expect(events.automation.stepDeleted).not.toBeCalled() expect(events.automation.stepDeleted).not.toHaveBeenCalled()
expect(events.automation.triggerUpdated).not.toBeCalled() expect(events.automation.triggerUpdated).not.toHaveBeenCalled()
}) })
it("updates an automation trigger", async () => { it("updates an automation trigger", async () => {
@ -310,10 +310,10 @@ describe("/automations", () => {
await update(automation) await update(automation)
// events // events
expect(events.automation.created).not.toBeCalled() expect(events.automation.created).not.toHaveBeenCalled()
expect(events.automation.stepCreated).not.toBeCalled() expect(events.automation.stepCreated).not.toHaveBeenCalled()
expect(events.automation.stepDeleted).not.toBeCalled() expect(events.automation.stepDeleted).not.toHaveBeenCalled()
expect(events.automation.triggerUpdated).toBeCalledTimes(1) expect(events.automation.triggerUpdated).toHaveBeenCalledTimes(1)
}) })
it("adds automation steps", async () => { it("adds automation steps", async () => {
@ -327,10 +327,10 @@ describe("/automations", () => {
await update(automation) await update(automation)
// events // events
expect(events.automation.stepCreated).toBeCalledTimes(2) expect(events.automation.stepCreated).toHaveBeenCalledTimes(2)
expect(events.automation.created).not.toBeCalled() expect(events.automation.created).not.toHaveBeenCalled()
expect(events.automation.stepDeleted).not.toBeCalled() expect(events.automation.stepDeleted).not.toHaveBeenCalled()
expect(events.automation.triggerUpdated).not.toBeCalled() expect(events.automation.triggerUpdated).not.toHaveBeenCalled()
}) })
it("removes automation steps", async () => { it("removes automation steps", async () => {
@ -344,10 +344,10 @@ describe("/automations", () => {
await update(automation) await update(automation)
// events // events
expect(events.automation.stepDeleted).toBeCalledTimes(2) expect(events.automation.stepDeleted).toHaveBeenCalledTimes(2)
expect(events.automation.stepCreated).not.toBeCalled() expect(events.automation.stepCreated).not.toHaveBeenCalled()
expect(events.automation.created).not.toBeCalled() expect(events.automation.created).not.toHaveBeenCalled()
expect(events.automation.triggerUpdated).not.toBeCalled() expect(events.automation.triggerUpdated).not.toHaveBeenCalled()
}) })
it("adds and removes automation steps", async () => { it("adds and removes automation steps", async () => {
@ -360,10 +360,10 @@ describe("/automations", () => {
await update(automation) await update(automation)
// events // events
expect(events.automation.stepCreated).toBeCalledTimes(2) expect(events.automation.stepCreated).toHaveBeenCalledTimes(2)
expect(events.automation.stepDeleted).toBeCalledTimes(1) expect(events.automation.stepDeleted).toHaveBeenCalledTimes(1)
expect(events.automation.created).not.toBeCalled() expect(events.automation.created).not.toHaveBeenCalled()
expect(events.automation.triggerUpdated).not.toBeCalled() expect(events.automation.triggerUpdated).not.toHaveBeenCalled()
}) })
}) })
@ -400,7 +400,7 @@ describe("/automations", () => {
.expect(200) .expect(200)
expect(res.body.id).toEqual(automation._id) expect(res.body.id).toEqual(automation._id)
expect(events.automation.deleted).toBeCalledTimes(1) expect(events.automation.deleted).toHaveBeenCalledTimes(1)
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {

View File

@ -21,7 +21,7 @@ describe("/backups", () => {
it("should be able to export app", async () => { it("should be able to export app", async () => {
const body = await config.api.backup.exportBasicBackup(config.getAppId()!) const body = await config.api.backup.exportBasicBackup(config.getAppId()!)
expect(body instanceof Buffer).toBe(true) expect(body instanceof Buffer).toBe(true)
expect(events.app.exported).toBeCalledTimes(1) expect(events.app.exported).toHaveBeenCalledTimes(1)
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {

View File

@ -40,7 +40,7 @@ describe("/datasources", () => {
expect(res.body.datasource.name).toEqual("Test") expect(res.body.datasource.name).toEqual("Test")
expect(res.body.errors).toEqual({}) expect(res.body.errors).toEqual({})
expect(events.datasource.created).toBeCalledTimes(1) expect(events.datasource.created).toHaveBeenCalledTimes(1)
}) })
}) })
@ -56,7 +56,7 @@ describe("/datasources", () => {
expect(res.body.datasource.name).toEqual("Updated Test") expect(res.body.datasource.name).toEqual("Updated Test")
expect(res.body.errors).toBeUndefined() expect(res.body.errors).toBeUndefined()
expect(events.datasource.updated).toBeCalledTimes(1) expect(events.datasource.updated).toHaveBeenCalledTimes(1)
}) })
describe("dynamic variables", () => { describe("dynamic variables", () => {
@ -196,7 +196,7 @@ describe("/datasources", () => {
.expect(200) .expect(200)
expect(res.body.length).toEqual(1) expect(res.body.length).toEqual(1)
expect(events.datasource.deleted).toBeCalledTimes(1) expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {

Some files were not shown because too many files have changed in this diff Show More