Merge branch 'master' into chore/node-20

This commit is contained in:
Adria Navarro 2024-01-03 14:15:21 +01:00 committed by GitHub
commit 7413fd96a8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
58 changed files with 1595 additions and 1841 deletions

View File

@ -16,8 +16,8 @@ jobs:
days-before-pr-stale: 7 days-before-pr-stale: 7
stale-issue-label: stale stale-issue-label: stale
exempt-pr-labels: pinned,security,roadmap exempt-pr-labels: pinned,security,roadmap
days-before-pr-close: 7 days-before-pr-close: 7
days-before-issue-close: 30
- uses: actions/stale@v8 - uses: actions/stale@v8
with: with:
@ -26,6 +26,7 @@ jobs:
days-before-stale: 30 days-before-stale: 30
only-issue-labels: bug,High priority only-issue-labels: bug,High priority
stale-issue-label: warn stale-issue-label: warn
days-before-close: 30
- uses: actions/stale@v8 - uses: actions/stale@v8
with: with:
@ -34,6 +35,7 @@ jobs:
days-before-stale: 90 days-before-stale: 90
only-issue-labels: bug,Medium priority only-issue-labels: bug,Medium priority
stale-issue-label: warn stale-issue-label: warn
days-before-close: 30
- uses: actions/stale@v8 - uses: actions/stale@v8
with: with:
@ -43,5 +45,4 @@ jobs:
stale-issue-label: stale stale-issue-label: stale
only-issue-labels: bug only-issue-labels: bug
stale-issue-message: "This issue has been automatically marked as stale because it has not had any activity for six months." stale-issue-message: "This issue has been automatically marked as stale because it has not had any activity for six months."
days-before-close: 30 days-before-close: 30

View File

@ -1,5 +1,5 @@
{ {
"version": "2.13.50", "version": "2.13.53",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -18,14 +18,15 @@ export enum TTL {
ONE_DAY = 86400, ONE_DAY = 86400,
} }
function performExport(funcName: string) { export const keys = (...args: Parameters<typeof GENERIC.keys>) =>
// @ts-ignore GENERIC.keys(...args)
return (...args: any) => GENERIC[funcName](...args) export const get = (...args: Parameters<typeof GENERIC.get>) =>
} GENERIC.get(...args)
export const store = (...args: Parameters<typeof GENERIC.store>) =>
export const keys = performExport("keys") GENERIC.store(...args)
export const get = performExport("get") export const destroy = (...args: Parameters<typeof GENERIC.delete>) =>
export const store = performExport("store") GENERIC.delete(...args)
export const destroy = performExport("delete") export const withCache = (...args: Parameters<typeof GENERIC.withCache>) =>
export const withCache = performExport("withCache") GENERIC.withCache(...args)
export const bustCache = performExport("bustCache") export const bustCache = (...args: Parameters<typeof GENERIC.bustCache>) =>
GENERIC.bustCache(...args)

View File

@ -1,6 +1,6 @@
import * as redis from "../redis/init" import * as redis from "../redis/init"
import * as utils from "../utils" import * as utils from "../utils"
import { Duration, DurationType } from "../utils" import { Duration } from "../utils"
const TTL_SECONDS = Duration.fromHours(1).toSeconds() const TTL_SECONDS = Duration.fromHours(1).toSeconds()
@ -32,7 +32,18 @@ export async function getCode(code: string): Promise<PasswordReset> {
const client = await redis.getPasswordResetClient() const client = await redis.getPasswordResetClient()
const value = (await client.get(code)) as PasswordReset | undefined const value = (await client.get(code)) as PasswordReset | undefined
if (!value) { if (!value) {
throw "Provided information is not valid, cannot reset password - please try again." throw new Error(
"Provided information is not valid, cannot reset password - please try again."
)
} }
return value return value
} }
/**
* Given a reset code this will invalidate it.
* @param code The code provided via the email link.
*/
export async function invalidateCode(code: string): Promise<void> {
const client = await redis.getPasswordResetClient()
await client.delete(code)
}

View File

@ -134,7 +134,7 @@ export async function doInContext(appId: string, task: any): Promise<any> {
} }
export async function doInTenant<T>( export async function doInTenant<T>(
tenantId: string | null, tenantId: string | undefined,
task: () => T task: () => T
): Promise<T> { ): Promise<T> {
// make sure default always selected in single tenancy // make sure default always selected in single tenancy

View File

@ -17,6 +17,7 @@ import { directCouchUrlCall } from "./utils"
import { getPouchDB } from "./pouchDB" import { getPouchDB } from "./pouchDB"
import { WriteStream, ReadStream } from "fs" import { WriteStream, ReadStream } from "fs"
import { newid } from "../../docIds/newid" import { newid } from "../../docIds/newid"
import { DDInstrumentedDatabase } from "../instrumentation"
function buildNano(couchInfo: { url: string; cookie: string }) { function buildNano(couchInfo: { url: string; cookie: string }) {
return Nano({ return Nano({
@ -35,7 +36,8 @@ export function DatabaseWithConnection(
connection: string, connection: string,
opts?: DatabaseOpts opts?: DatabaseOpts
) { ) {
return new DatabaseImpl(dbName, opts, connection) const db = new DatabaseImpl(dbName, opts, connection)
return new DDInstrumentedDatabase(db)
} }
export class DatabaseImpl implements Database { export class DatabaseImpl implements Database {

View File

@ -1,8 +1,9 @@
import { directCouchQuery, DatabaseImpl } from "./couch" import { directCouchQuery, DatabaseImpl } from "./couch"
import { CouchFindOptions, Database, DatabaseOpts } from "@budibase/types" import { CouchFindOptions, Database, DatabaseOpts } from "@budibase/types"
import { DDInstrumentedDatabase } from "./instrumentation"
export function getDB(dbName: string, opts?: DatabaseOpts): Database { export function getDB(dbName: string, opts?: DatabaseOpts): Database {
return new DatabaseImpl(dbName, opts) return new DDInstrumentedDatabase(new DatabaseImpl(dbName, opts))
} }
// we have to use a callback for this so that we can close // we have to use a callback for this so that we can close

View File

@ -0,0 +1,156 @@
import {
DocumentScope,
DocumentDestroyResponse,
DocumentInsertResponse,
DocumentBulkResponse,
OkResponse,
} from "@budibase/nano"
import {
AllDocsResponse,
AnyDocument,
Database,
DatabaseDumpOpts,
DatabasePutOpts,
DatabaseQueryOpts,
Document,
} from "@budibase/types"
import tracer from "dd-trace"
import { Writable } from "stream"
export class DDInstrumentedDatabase implements Database {
constructor(private readonly db: Database) {}
get name(): string {
return this.db.name
}
exists(): Promise<boolean> {
return tracer.trace("db.exists", span => {
span?.addTags({ db_name: this.name })
return this.db.exists()
})
}
checkSetup(): Promise<DocumentScope<any>> {
return tracer.trace("db.checkSetup", span => {
span?.addTags({ db_name: this.name })
return this.db.checkSetup()
})
}
get<T extends Document>(id?: string | undefined): Promise<T> {
return tracer.trace("db.get", span => {
span?.addTags({ db_name: this.name, doc_id: id })
return this.db.get(id)
})
}
getMultiple<T extends Document>(
ids: string[],
opts?: { allowMissing?: boolean | undefined } | undefined
): Promise<T[]> {
return tracer.trace("db.getMultiple", span => {
span?.addTags({
db_name: this.name,
num_docs: ids.length,
allow_missing: opts?.allowMissing,
})
return this.db.getMultiple(ids, opts)
})
}
remove(
id: string | Document,
rev?: string | undefined
): Promise<DocumentDestroyResponse> {
return tracer.trace("db.remove", span => {
span?.addTags({ db_name: this.name, doc_id: id })
return this.db.remove(id, rev)
})
}
put(
document: AnyDocument,
opts?: DatabasePutOpts | undefined
): Promise<DocumentInsertResponse> {
return tracer.trace("db.put", span => {
span?.addTags({ db_name: this.name, doc_id: document._id })
return this.db.put(document, opts)
})
}
bulkDocs(documents: AnyDocument[]): Promise<DocumentBulkResponse[]> {
return tracer.trace("db.bulkDocs", span => {
span?.addTags({ db_name: this.name, num_docs: documents.length })
return this.db.bulkDocs(documents)
})
}
allDocs<T extends Document>(
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
return tracer.trace("db.allDocs", span => {
span?.addTags({ db_name: this.name })
return this.db.allDocs(params)
})
}
query<T extends Document>(
viewName: string,
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
return tracer.trace("db.query", span => {
span?.addTags({ db_name: this.name, view_name: viewName })
return this.db.query(viewName, params)
})
}
destroy(): Promise<void | OkResponse> {
return tracer.trace("db.destroy", span => {
span?.addTags({ db_name: this.name })
return this.db.destroy()
})
}
compact(): Promise<void | OkResponse> {
return tracer.trace("db.compact", span => {
span?.addTags({ db_name: this.name })
return this.db.compact()
})
}
dump(stream: Writable, opts?: DatabaseDumpOpts | undefined): Promise<any> {
return tracer.trace("db.dump", span => {
span?.addTags({ db_name: this.name })
return this.db.dump(stream, opts)
})
}
load(...args: any[]): Promise<any> {
return tracer.trace("db.load", span => {
span?.addTags({ db_name: this.name })
return this.db.load(...args)
})
}
createIndex(...args: any[]): Promise<any> {
return tracer.trace("db.createIndex", span => {
span?.addTags({ db_name: this.name })
return this.db.createIndex(...args)
})
}
deleteIndex(...args: any[]): Promise<any> {
return tracer.trace("db.deleteIndex", span => {
span?.addTags({ db_name: this.name })
return this.db.deleteIndex(...args)
})
}
getIndexes(...args: any[]): Promise<any> {
return tracer.trace("db.getIndexes", span => {
span?.addTags({ db_name: this.name })
return this.db.getIndexes(...args)
})
}
}

View File

@ -33,6 +33,7 @@ export * as docUpdates from "./docUpdates"
export * from "./utils/Duration" export * from "./utils/Duration"
export { SearchParams } from "./db" export { SearchParams } from "./db"
export * as docIds from "./docIds" export * as docIds from "./docIds"
export * as security from "./security"
// Add context to tenancy for backwards compatibility // Add context to tenancy for backwards compatibility
// only do this for external usages to prevent internal // only do this for external usages to prevent internal
// circular dependencies // circular dependencies

View File

@ -5,6 +5,7 @@ import { IdentityType } from "@budibase/types"
import env from "../../environment" import env from "../../environment"
import * as context from "../../context" import * as context from "../../context"
import * as correlation from "../correlation" import * as correlation from "../correlation"
import tracer from "dd-trace"
import { formats } from "dd-trace/ext" import { formats } from "dd-trace/ext"
import { localFileDestination } from "../system" import { localFileDestination } from "../system"
@ -116,6 +117,11 @@ if (!env.DISABLE_PINO_LOGGER) {
correlationId: correlation.getId(), correlationId: correlation.getId(),
} }
const span = tracer.scope().active()
if (span) {
tracer.inject(span.context(), formats.LOG, contextObject)
}
const mergingObject: any = { const mergingObject: any = {
err: error, err: error,
pid: process.pid, pid: process.pid,

View File

@ -47,7 +47,7 @@ export function createQueue<T>(
cleanupInterval = timers.set(cleanup, CLEANUP_PERIOD_MS) cleanupInterval = timers.set(cleanup, CLEANUP_PERIOD_MS)
// fire off an initial cleanup // fire off an initial cleanup
cleanup().catch(err => { cleanup().catch(err => {
console.error(`Unable to cleanup automation queue initially - ${err}`) console.error(`Unable to cleanup ${jobQueue} initially - ${err}`)
}) })
} }
return queue return queue

View File

@ -18,6 +18,7 @@ import {
SelectableDatabase, SelectableDatabase,
getRedisConnectionDetails, getRedisConnectionDetails,
} from "./utils" } from "./utils"
import { logAlert } from "../logging"
import * as timers from "../timers" import * as timers from "../timers"
const RETRY_PERIOD_MS = 2000 const RETRY_PERIOD_MS = 2000
@ -39,21 +40,16 @@ function pickClient(selectDb: number): any {
return CLIENTS[selectDb] return CLIENTS[selectDb]
} }
function connectionError( function connectionError(timeout: NodeJS.Timeout, err: Error | string) {
selectDb: number,
timeout: NodeJS.Timeout,
err: Error | string
) {
// manually shut down, ignore errors // manually shut down, ignore errors
if (CLOSED) { if (CLOSED) {
return return
} }
pickClient(selectDb).disconnect()
CLOSED = true CLOSED = true
// always clear this on error // always clear this on error
clearTimeout(timeout) clearTimeout(timeout)
CONNECTED = false CONNECTED = false
console.error("Redis connection failed - " + err) logAlert("Redis connection failed", err)
setTimeout(() => { setTimeout(() => {
init() init()
}, RETRY_PERIOD_MS) }, RETRY_PERIOD_MS)
@ -79,11 +75,7 @@ function init(selectDb = DEFAULT_SELECT_DB) {
// start the timer - only allowed 5 seconds to connect // start the timer - only allowed 5 seconds to connect
timeout = setTimeout(() => { timeout = setTimeout(() => {
if (!CONNECTED) { if (!CONNECTED) {
connectionError( connectionError(timeout, "Did not successfully connect in timeout")
selectDb,
timeout,
"Did not successfully connect in timeout"
)
} }
}, STARTUP_TIMEOUT_MS) }, STARTUP_TIMEOUT_MS)
@ -106,12 +98,13 @@ function init(selectDb = DEFAULT_SELECT_DB) {
// allow the process to exit // allow the process to exit
return return
} }
connectionError(selectDb, timeout, err) connectionError(timeout, err)
}) })
client.on("error", (err: Error) => { client.on("error", (err: Error) => {
connectionError(selectDb, timeout, err) connectionError(timeout, err)
}) })
client.on("connect", () => { client.on("connect", () => {
console.log(`Connected to Redis DB: ${selectDb}`)
clearTimeout(timeout) clearTimeout(timeout)
CONNECTED = true CONNECTED = true
}) })

View File

@ -0,0 +1,24 @@
import { env } from ".."
export const PASSWORD_MIN_LENGTH = +(process.env.PASSWORD_MIN_LENGTH || 8)
export const PASSWORD_MAX_LENGTH = +(process.env.PASSWORD_MAX_LENGTH || 512)
export function validatePassword(
password: string
): { valid: true } | { valid: false; error: string } {
if (!password || password.length < PASSWORD_MIN_LENGTH) {
return {
valid: false,
error: `Password invalid. Minimum ${PASSWORD_MIN_LENGTH} characters.`,
}
}
if (password.length > PASSWORD_MAX_LENGTH) {
return {
valid: false,
error: `Password invalid. Maximum ${PASSWORD_MAX_LENGTH} characters.`,
}
}
return { valid: true }
}

View File

@ -0,0 +1 @@
export * from "./auth"

View File

@ -0,0 +1,45 @@
import { generator } from "../../../tests"
import { PASSWORD_MAX_LENGTH, validatePassword } from "../auth"
describe("auth", () => {
describe("validatePassword", () => {
it("a valid password returns successful", () => {
expect(validatePassword("password")).toEqual({ valid: true })
})
it.each([
["undefined", undefined],
["null", null],
["empty", ""],
])("%s returns unsuccessful", (_, password) => {
expect(validatePassword(password as string)).toEqual({
valid: false,
error: "Password invalid. Minimum 8 characters.",
})
})
it.each([
generator.word({ length: PASSWORD_MAX_LENGTH }),
generator.paragraph().substring(0, PASSWORD_MAX_LENGTH),
])(`can use passwords up to 512 characters in length`, password => {
expect(validatePassword(password)).toEqual({
valid: true,
})
})
it.each([
generator.word({ length: PASSWORD_MAX_LENGTH + 1 }),
generator
.paragraph({ sentences: 50 })
.substring(0, PASSWORD_MAX_LENGTH + 1),
])(
`passwords cannot have more than ${PASSWORD_MAX_LENGTH} characters`,
password => {
expect(validatePassword(password)).toEqual({
valid: false,
error: "Password invalid. Maximum 512 characters.",
})
}
)
})
})

View File

@ -39,7 +39,7 @@ const ALL_STRATEGIES = Object.values(TenantResolutionStrategy)
export const getTenantIDFromCtx = ( export const getTenantIDFromCtx = (
ctx: BBContext, ctx: BBContext,
opts: GetTenantIdOptions opts: GetTenantIdOptions
): string | null => { ): string | undefined => {
// exit early if not multi-tenant // exit early if not multi-tenant
if (!isMultiTenant()) { if (!isMultiTenant()) {
return DEFAULT_TENANT_ID return DEFAULT_TENANT_ID
@ -144,5 +144,5 @@ export const getTenantIDFromCtx = (
ctx.throw(403, "Tenant id not set") ctx.throw(403, "Tenant id not set")
} }
return null return undefined
} }

View File

@ -157,12 +157,12 @@ describe("getTenantIDFromCtx", () => {
TenantResolutionStrategy.PATH, TenantResolutionStrategy.PATH,
], ],
} }
expect(getTenantIDFromCtx(ctx, mockOpts)).toBeNull() expect(getTenantIDFromCtx(ctx, mockOpts)).toBeUndefined()
expect(ctx.throw).toBeCalledTimes(1) expect(ctx.throw).toBeCalledTimes(1)
expect(ctx.throw).toBeCalledWith(403, "Tenant id not set") expect(ctx.throw).toBeCalledWith(403, "Tenant id not set")
}) })
it("returns null if allowNoTenant is true", () => { it("returns undefined if allowNoTenant is true", () => {
const ctx = createCtx({}) const ctx = createCtx({})
mockOpts = { mockOpts = {
allowNoTenant: true, allowNoTenant: true,
@ -172,7 +172,7 @@ describe("getTenantIDFromCtx", () => {
TenantResolutionStrategy.PATH, TenantResolutionStrategy.PATH,
], ],
} }
expect(getTenantIDFromCtx(ctx, mockOpts)).toBeNull() expect(getTenantIDFromCtx(ctx, mockOpts)).toBeUndefined()
}) })
}) })

View File

@ -2,7 +2,7 @@ import env from "../environment"
import * as eventHelpers from "./events" import * as eventHelpers from "./events"
import * as accountSdk from "../accounts" import * as accountSdk from "../accounts"
import * as cache from "../cache" import * as cache from "../cache"
import { doInTenant, getGlobalDB, getIdentity, getTenantId } from "../context" import { getGlobalDB, getIdentity, getTenantId } from "../context"
import * as dbUtils from "../db" import * as dbUtils from "../db"
import { EmailUnavailableError, HTTPError } from "../errors" import { EmailUnavailableError, HTTPError } from "../errors"
import * as platform from "../platform" import * as platform from "../platform"
@ -27,6 +27,7 @@ import {
} from "./utils" } from "./utils"
import { searchExistingEmails } from "./lookup" import { searchExistingEmails } from "./lookup"
import { hash } from "../utils" import { hash } from "../utils"
import { validatePassword } from "../security"
type QuotaUpdateFn = ( type QuotaUpdateFn = (
change: number, change: number,
@ -110,6 +111,12 @@ export class UserDB {
if (await UserDB.isPreventPasswordActions(user, account)) { if (await UserDB.isPreventPasswordActions(user, account)) {
throw new HTTPError("Password change is disabled for this user", 400) throw new HTTPError("Password change is disabled for this user", 400)
} }
const passwordValidation = validatePassword(password)
if (!passwordValidation.valid) {
throw new HTTPError(passwordValidation.error, 400)
}
hashedPassword = opts.hashPassword ? await hash(password) : password hashedPassword = opts.hashPassword ? await hash(password) : password
} else if (dbUser) { } else if (dbUser) {
hashedPassword = dbUser.password hashedPassword = dbUser.password

View File

@ -31,8 +31,8 @@ export async function resolveAppUrl(ctx: Ctx) {
const appUrl = ctx.path.split("/")[2] const appUrl = ctx.path.split("/")[2]
let possibleAppUrl = `/${appUrl.toLowerCase()}` let possibleAppUrl = `/${appUrl.toLowerCase()}`
let tenantId: string | null = context.getTenantId() let tenantId: string | undefined = context.getTenantId()
if (env.MULTI_TENANCY) { if (!env.isDev() && env.MULTI_TENANCY) {
// always use the tenant id from the subdomain in multi tenancy // always use the tenant id from the subdomain in multi tenancy
// this ensures the logged-in user tenant id doesn't overwrite // this ensures the logged-in user tenant id doesn't overwrite
// e.g. in the case of viewing a public app while already logged-in to another tenant // e.g. in the case of viewing a public app while already logged-in to another tenant
@ -41,7 +41,7 @@ export async function resolveAppUrl(ctx: Ctx) {
}) })
} }
// search prod apps for a url that matches // search prod apps for an url that matches
const apps: App[] = await context.doInTenant( const apps: App[] = await context.doInTenant(
tenantId, tenantId,
() => getAllApps({ dev: false }) as Promise<App[]> () => getAllApps({ dev: false }) as Promise<App[]>

View File

@ -21,7 +21,7 @@ export const user = (userProps?: Partial<Omit<User, "userId">>): User => {
_id: userId, _id: userId,
userId, userId,
email: newEmail(), email: newEmail(),
password: "test", password: "password",
roles: { app_test: "admin" }, roles: { app_test: "admin" },
firstName: generator.first(), firstName: generator.first(),
lastName: generator.last(), lastName: generator.last(),

View File

@ -19,7 +19,7 @@
// Ensure the value is updated if the value prop changes outside the editor's // Ensure the value is updated if the value prop changes outside the editor's
// control // control
$: checkValue(value) $: checkValue(value)
$: mde?.codemirror.on("change", debouncedUpdate) $: mde?.codemirror.on("blur", update)
$: if (readonly || disabled) { $: if (readonly || disabled) {
mde?.togglePreview() mde?.togglePreview()
} }
@ -30,21 +30,10 @@
} }
} }
const debounce = (fn, interval) => {
let timeout
return () => {
clearTimeout(timeout)
timeout = setTimeout(fn, interval)
}
}
const update = () => { const update = () => {
latestValue = mde.value() latestValue = mde.value()
dispatch("change", latestValue) dispatch("change", latestValue)
} }
// Debounce the update function to avoid spamming it constantly
const debouncedUpdate = debounce(update, 250)
</script> </script>
{#key height} {#key height}

View File

@ -1,4 +1,4 @@
import { string, number } from "yup" import { string, number, object } from "yup"
const propertyValidator = type => { const propertyValidator = type => {
if (type === "number") { if (type === "number") {
@ -9,6 +9,10 @@ const propertyValidator = type => {
return string().email().nullable() return string().email().nullable()
} }
if (type === "object") {
return object().nullable()
}
return string().nullable() return string().nullable()
} }

View File

@ -38,7 +38,7 @@
$goto("../portal") $goto("../portal")
} catch (error) { } catch (error) {
submitted = false submitted = false
notifications.error("Failed to create admin user") notifications.error(error.message || "Failed to create admin user")
} }
} }
</script> </script>

View File

@ -45,7 +45,7 @@
} }
} catch (err) { } catch (err) {
submitted = false submitted = false
notifications.error("Unable to reset password") notifications.error(err.message || "Unable to reset password")
} }
} }

View File

@ -5,7 +5,6 @@ build/
docker-error.log docker-error.log
envoy.yaml envoy.yaml
*.tar.gz *.tar.gz
prebuilds/
dist/ dist/
budibase-automation/ budibase-automation/
budibase-component/ budibase-component/

View File

@ -9,26 +9,11 @@
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
"scripts": { "scripts": {
"prebuild": "rm -rf prebuilds 2> /dev/null && cp -r ../../node_modules/leveldown/prebuilds prebuilds",
"rename": "renamer --find .node --replace .fake 'prebuilds/**'",
"tsc": "node ../../scripts/build.js", "tsc": "node ../../scripts/build.js",
"pkg": "pkg . --out-path build --no-bytecode --public --public-packages \"*\" -C GZip", "build": "yarn tsc",
"build": "yarn prebuild && yarn rename && yarn tsc && yarn pkg && yarn postbuild",
"check:types": "tsc -p tsconfig.json --noEmit --paths null", "check:types": "tsc -p tsconfig.json --noEmit --paths null",
"postbuild": "rm -rf prebuilds 2> /dev/null",
"start": "ts-node ./src/index.ts" "start": "ts-node ./src/index.ts"
}, },
"pkg": {
"targets": [
"node18-linux",
"node18-win",
"node18-macos"
],
"assets": [
"prebuilds/**/*"
],
"outputPath": "build"
},
"dependencies": { "dependencies": {
"@budibase/backend-core": "0.0.0", "@budibase/backend-core": "0.0.0",
"@budibase/string-templates": "0.0.0", "@budibase/string-templates": "0.0.0",
@ -43,7 +28,6 @@
"inquirer": "8.0.0", "inquirer": "8.0.0",
"lookpath": "1.1.0", "lookpath": "1.1.0",
"node-fetch": "2.6.7", "node-fetch": "2.6.7",
"pkg": "5.8.0",
"posthog-node": "1.3.0", "posthog-node": "1.3.0",
"pouchdb": "7.3.0", "pouchdb": "7.3.0",
"pouchdb-replication-stream": "1.2.9", "pouchdb-replication-stream": "1.2.9",
@ -55,7 +39,6 @@
"@types/jest": "29.5.5", "@types/jest": "29.5.5",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"@types/pouchdb": "^6.4.0", "@types/pouchdb": "^6.4.0",
"renamer": "^4.0.0",
"ts-node": "10.8.1", "ts-node": "10.8.1",
"typescript": "5.2.2" "typescript": "5.2.2"
} }

View File

@ -1,10 +1,9 @@
#!/usr/bin/env node #!/usr/bin/env node
process.env.DISABLE_PINO_LOGGER = "1" process.env.DISABLE_PINO_LOGGER = "1"
import "./prebuilds"
import "./environment" import "./environment"
import { getCommands } from "./options" import { getCommands } from "./options"
import { Command } from "commander" import { Command } from "commander"
import { getHelpDescription } from "./utils" import { getHelpDescription, error } from "./utils"
import { version } from "../package.json" import { version } from "../package.json"
// add hosting config // add hosting config
@ -21,6 +20,23 @@ async function init() {
await program.parseAsync(process.argv) await program.parseAsync(process.argv)
} }
const events = ["exit", "SIGINT", "SIGUSR1", "SIGUSR2", "uncaughtException"]
events.forEach(event => {
process.on(event, (evt?: number) => {
if (evt && !isNaN(evt)) {
return
}
if (evt) {
console.error(
error(
"Failed to run CLI command - please report with the following message:"
)
)
console.error(error(evt))
}
})
})
init().catch(err => { init().catch(err => {
console.error(`Unexpected error - `, err) console.error(`Unexpected error - `, err)
}) })

View File

@ -1,57 +0,0 @@
import os from "os"
import { join } from "path"
import fs from "fs"
import { error } from "./utils"
const PREBUILDS = "prebuilds"
const ARCH = `${os.platform()}-${os.arch()}`
const PREBUILD_DIR = join(process.execPath, "..", "cli", PREBUILDS, ARCH)
// running as built CLI pkg bundle
if (!process.argv[0].includes("node")) {
checkForBinaries()
}
function localPrebuildPath() {
return join(process.execPath, "..", PREBUILDS)
}
function checkForBinaries() {
const readDir = join(__filename, "..", "..", "..", "cli", PREBUILDS, ARCH)
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
return
}
const natives = fs.readdirSync(readDir)
if (fs.existsSync(readDir)) {
const writePath = join(localPrebuildPath(), ARCH)
fs.mkdirSync(writePath, { recursive: true })
for (let native of natives) {
const filename = `${native.split(".fake")[0]}.node`
fs.cpSync(join(readDir, native), join(writePath, filename))
}
}
}
function cleanup(evt?: number) {
// cleanup prebuilds first
const path = localPrebuildPath()
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true })
}
if (evt && !isNaN(evt)) {
return
}
if (evt) {
console.error(
error(
"Failed to run CLI command - please report with the following message:"
)
)
console.error(error(evt))
}
}
const events = ["exit", "SIGINT", "SIGUSR1", "SIGUSR2", "uncaughtException"]
events.forEach(event => {
process.on(event, cleanup)
})

View File

@ -48,6 +48,7 @@ async function init() {
HTTP_MIGRATIONS: "0", HTTP_MIGRATIONS: "0",
HTTP_LOGGING: "0", HTTP_LOGGING: "0",
VERSION: "0.0.0+local", VERSION: "0.0.0+local",
PASSWORD_MIN_LENGTH: "1",
} }
config = { ...config, ...existingConfig } config = { ...config, ...existingConfig }

View File

@ -0,0 +1,196 @@
#!/bin/node
const {
createApp,
getTable,
createRow,
createTable,
getApp,
getRows,
} = require("./utils")
const Chance = require("chance")
const generator = new Chance()
const STUDENT_COUNT = 500
const SUBJECT_COUNT = 10
let { apiKey, appId } = require("yargs")
.demandOption(["apiKey"])
.option("appId").argv
const start = Date.now()
async function batchCreate(apiKey, appId, table, items, batchSize = 100) {
let i = 0
let errors = 0
async function createSingleRow(item) {
try {
const row = await createRow(apiKey, appId, table, item)
console.log(
`${table.name} - ${++i} of ${items.length} created (${
(Date.now() - start) / 1000
}s)`
)
return row
} catch {
errors++
}
}
const rows = []
const maxConcurrency = Math.min(batchSize, items.length)
const inFlight = {}
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
const item = items[itemIndex]
const promise = createSingleRow(item)
.then(result => {
rows.push(result)
})
.finally(() => {
delete inFlight[itemIndex]
})
inFlight[itemIndex] = promise
if (Object.keys(inFlight).length >= maxConcurrency) {
await Promise.race(Object.values(inFlight))
}
}
await Promise.all(Object.values(inFlight))
if (errors) {
console.error(
`${table.name} - ${errors} creation errored (${
(Date.now() - start) / 1000
}s)`
)
}
return rows
}
const useExistingApp = !!appId
async function upsertTable(appId, tableName, tableData) {
if (useExistingApp) {
return await getTable(apiKey, appId, tableName)
}
const table = await createTable(apiKey, appId, {
...tableData,
name: tableName,
})
return table
}
async function run() {
if (!appId) {
const app = appId ? await getApp(apiKey, appId) : await createApp(apiKey)
appId = app._id
console.log(`App created. Url: http://localhost:10000/builder/app/${appId}`)
} else {
console.log(
`App retrieved. Url: http://localhost:10000/builder/app/${appId}`
)
}
const studentsTable = await getTable(apiKey, appId, "Students")
let studentNumber = studentsTable.schema["Auto ID"].lastID
const students = await batchCreate(
apiKey,
appId,
studentsTable,
Array.from({ length: STUDENT_COUNT }).map(() => ({
"Student Number": (++studentNumber).toString(),
"First Name": generator.first(),
"Last Name": generator.last(),
Gender: generator.pickone(["M", "F"]),
Grade: generator.pickone(["8", "9", "10", "11"]),
"Tardiness (Days)": generator.integer({ min: 1, max: 100 }),
"Home Number": generator.phone(),
"Attendance_(%)": generator.integer({ min: 0, max: 100 }),
}))
)
const subjectTable = await upsertTable(appId, "Subjects", {
schema: {
Name: {
name: "Name",
type: "string",
},
},
primaryDisplay: "Name",
})
const subjects = useExistingApp
? await getRows(apiKey, appId, subjectTable._id)
: await batchCreate(
apiKey,
appId,
subjectTable,
Array.from({ length: SUBJECT_COUNT }).map(() => ({
Name: generator.profession(),
}))
)
const gradesTable = await upsertTable(appId, "Grades", {
schema: {
Score: {
name: "Score",
type: "number",
},
Student: {
name: "Student",
tableId: studentsTable._id,
constraints: {
presence: true,
type: "array",
},
fieldName: "Grades",
relationshipType: "one-to-many",
type: "link",
},
Subject: {
name: "Subject",
tableId: subjectTable._id,
constraints: {
presence: true,
type: "array",
},
fieldName: "Grades",
relationshipType: "one-to-many",
type: "link",
},
},
})
await batchCreate(
apiKey,
appId,
gradesTable,
students.flatMap(student =>
subjects.map(subject => ({
Score: generator.integer({ min: 0, max: 100 }),
Student: [student],
Subject: [subject],
}))
)
)
console.log(
`Access the app here: http://localhost:10000/builder/app/${appId}`
)
}
run()
.then(() => {
console.log(`Done in ${(Date.now() - start) / 1000} seconds`)
})
.catch(err => {
console.error(err)
})

View File

@ -0,0 +1,29 @@
#!/bin/node
const { searchApps, deleteApp } = require("./utils")
if (!process.argv[2]) {
console.error("Please specify an API key as script argument.")
process.exit(-1)
}
async function run() {
const apiKey = process.argv[2]
const apps = await searchApps(apiKey)
console.log(`Deleting ${apps.length} apps`)
let deletedApps = 0
await Promise.all(
apps.map(async app => {
await deleteApp(apiKey, app._id)
console.log(`App ${++deletedApps} of ${apps.length} deleted`)
})
)
}
run()
.then(() => {
console.log("Done!")
})
.catch(err => {
console.error(err)
})

View File

@ -2,7 +2,8 @@ const fetch = require("node-fetch")
const uuid = require("uuid/v4") const uuid = require("uuid/v4")
const URL_APP = "http://localhost:10000/api/public/v1/applications" const URL_APP = "http://localhost:10000/api/public/v1/applications"
const URL_TABLE = "http://localhost:10000/api/public/v1/tables/search" const URL_TABLE = "http://localhost:10000/api/public/v1/tables"
const URL_SEARCH_TABLE = "http://localhost:10000/api/public/v1/tables/search"
async function request(apiKey, url, method, body, appId = undefined) { async function request(apiKey, url, method, body, appId = undefined) {
const headers = { const headers = {
@ -37,20 +38,41 @@ exports.createApp = async apiKey => {
return json.data return json.data
} }
exports.getTable = async (apiKey, appId) => { exports.getApp = async (apiKey, appId) => {
const res = await request(apiKey, URL_TABLE, "POST", {}, appId) const res = await request(apiKey, `${URL_APP}/${appId}`, "GET")
const json = await res.json() const json = await res.json()
return json.data[0] return json.data
}
exports.searchApps = async apiKey => {
const res = await request(apiKey, `${URL_APP}/search`, "POST", {})
const json = await res.json()
return json.data
} }
exports.createRow = async (apiKey, appId, table) => { exports.deleteApp = async (apiKey, appId) => {
const body = {} const res = await request(apiKey, `${URL_APP}/${appId}`, "DELETE")
return res
}
exports.getTable = async (apiKey, appId, tableName) => {
const res = await request(apiKey, URL_SEARCH_TABLE, "POST", {}, appId)
const json = await res.json()
const table = json.data.find(t => t.name === tableName)
if (!table) {
throw `Table '${tableName} not found`
}
return table
}
exports.createRow = async (apiKey, appId, table, body) => {
if (!body) {
body = {}
for (let [key, schema] of Object.entries(table.schema)) { for (let [key, schema] of Object.entries(table.schema)) {
let fake let fake
switch (schema.type) { switch (schema.type) {
default: default:
case "string": case "string":
fake = schema.constraints.inclusion fake = schema.constraints?.inclusion
? schema.constraints.inclusion[0] ? schema.constraints.inclusion[0]
: "a" : "a"
break break
@ -60,7 +82,20 @@ exports.createRow = async (apiKey, appId, table) => {
} }
body[key] = fake body[key] = fake
} }
}
const url = `http://localhost:10000/api/public/v1/tables/${table._id}/rows` const url = `http://localhost:10000/api/public/v1/tables/${table._id}/rows`
const res = await request(apiKey, url, "POST", body, appId) const res = await request(apiKey, url, "POST", body, appId)
return (await res.json()).data return (await res.json()).data
} }
exports.getRows = async (apiKey, appId, tableId) => {
const url = `${URL_TABLE}/${tableId}/rows/search`
const res = await request(apiKey, url, "POST", {}, appId)
return (await res.json()).data
}
exports.createTable = async (apiKey, appId, config) => {
const res = await request(apiKey, URL_TABLE, "POST", config, appId)
const json = await res.json()
return json.data
}

View File

@ -26,7 +26,7 @@ import {
inputProcessing, inputProcessing,
outputProcessing, outputProcessing,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import { cloneDeep, isEqual } from "lodash" import { cloneDeep } from "lodash"
export async function handleRequest<T extends Operation>( export async function handleRequest<T extends Operation>(
operation: T, operation: T,
@ -86,50 +86,6 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
} }
} }
export async function save(ctx: UserCtx) {
const inputs = ctx.request.body
const tableId = utils.getTableId(ctx)
const table = await sdk.tables.getTable(tableId)
const { table: updatedTable, row } = await inputProcessing(
ctx.user?._id,
cloneDeep(table),
inputs
)
const validateResult = await sdk.rows.utils.validate({
row,
tableId,
})
if (!validateResult.valid) {
throw { validation: validateResult.errors }
}
const response = await handleRequest(Operation.CREATE, tableId, {
row,
})
if (!isEqual(table, updatedTable)) {
await sdk.tables.saveTable(updatedTable)
}
const rowId = response.row._id
if (rowId) {
const row = await sdk.rows.external.getRow(tableId, rowId, {
relationships: true,
})
return {
...response,
row: await outputProcessing(table, row, {
preserveLinks: true,
squash: true,
}),
}
} else {
return response
}
}
export async function find(ctx: UserCtx): Promise<Row> { export async function find(ctx: UserCtx): Promise<Row> {
const id = ctx.params.rowId const id = ctx.params.rowId
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)

View File

@ -30,7 +30,7 @@ import { Format } from "../view/exporters"
export * as views from "./views" export * as views from "./views"
function pickApi(tableId: any) { function pickApi(tableId: string) {
if (isExternalTableID(tableId)) { if (isExternalTableID(tableId)) {
return external return external
} }
@ -84,9 +84,12 @@ export const save = async (ctx: UserCtx<Row, Row>) => {
return patch(ctx as UserCtx<PatchRowRequest, PatchRowResponse>) return patch(ctx as UserCtx<PatchRowRequest, PatchRowResponse>)
} }
const { row, table, squashed } = await quotas.addRow(() => const { row, table, squashed } = await quotas.addRow(() =>
quotas.addQuery(() => pickApi(tableId).save(ctx), { quotas.addQuery(
() => sdk.rows.save(tableId, ctx.request.body, ctx.user?._id),
{
datasourceId: tableId, datasourceId: tableId,
}) }
)
) )
ctx.status = 200 ctx.status = 200
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table) ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)

View File

@ -1,5 +1,5 @@
import * as linkRows from "../../../db/linkedRows" import * as linkRows from "../../../db/linkedRows"
import { generateRowID, InternalTables } from "../../../db/utils" import { InternalTables } from "../../../db/utils"
import * as userController from "../user" import * as userController from "../user"
import { import {
AttachmentCleanup, AttachmentCleanup,
@ -94,45 +94,6 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
}) })
} }
export async function save(ctx: UserCtx) {
let inputs = ctx.request.body
inputs.tableId = utils.getTableId(ctx)
if (!inputs._rev && !inputs._id) {
inputs._id = generateRowID(inputs.tableId)
}
// this returns the table and row incase they have been updated
const dbTable = await sdk.tables.getTable(inputs.tableId)
// need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable)
let { table, row } = await inputProcessing(ctx.user?._id, tableClone, inputs)
const validateResult = await sdk.rows.utils.validate({
row,
table,
})
if (!validateResult.valid) {
throw { validation: validateResult.errors }
}
// make sure link rows are up-to-date
row = (await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_SAVE,
row,
tableId: row.tableId,
table,
})) as Row
return finaliseRow(table, row, {
oldTable: dbTable,
updateFormula: true,
})
}
export async function find(ctx: UserCtx): Promise<Row> { export async function find(ctx: UserCtx): Promise<Row> {
const tableId = utils.getTableId(ctx), const tableId = utils.getTableId(ctx),
rowId = ctx.params.rowId rowId = ctx.params.rowId

View File

@ -5,8 +5,8 @@ import {
processFormulas, processFormulas,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import { FieldTypes, FormulaTypes } from "../../../constants" import { FieldTypes, FormulaTypes } from "../../../constants"
import { context } from "@budibase/backend-core" import { context, locks } from "@budibase/backend-core"
import { Table, Row } from "@budibase/types" import { Table, Row, LockType, LockName } from "@budibase/types"
import * as linkRows from "../../../db/linkedRows" import * as linkRows from "../../../db/linkedRows"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
@ -149,12 +149,22 @@ export async function finaliseRow(
await db.put(table) await db.put(table)
} catch (err: any) { } catch (err: any) {
if (err.status === 409) { if (err.status === 409) {
const updatedTable = await sdk.tables.getTable(table._id!) // Some conflicts with the autocolumns occurred, we need to refetch the table and recalculate
let response = processAutoColumn(null, updatedTable, row, { await locks.doWithLock(
{
type: LockType.AUTO_EXTEND,
name: LockName.PROCESS_AUTO_COLUMNS,
resource: table._id,
},
async () => {
const latestTable = await sdk.tables.getTable(table._id!)
let response = processAutoColumn(null, latestTable, row, {
reprocessing: true, reprocessing: true,
}) })
await db.put(response.table) await db.put(response.table)
row = response.row row = response.row
}
)
} else { } else {
throw err throw err
} }

View File

@ -77,7 +77,7 @@ const publicRouter = new Router({
prefix: PREFIX, prefix: PREFIX,
}) })
if (limiter) { if (limiter && !env.isDev()) {
publicRouter.use(limiter) publicRouter.use(limiter)
} }

View File

@ -16,6 +16,7 @@ import {
} from "@budibase/types" } from "@budibase/types"
import sdk from "../sdk" import sdk from "../sdk"
import { automationsEnabled } from "../features" import { automationsEnabled } from "../features"
import tracer from "dd-trace"
const REBOOT_CRON = "@reboot" const REBOOT_CRON = "@reboot"
const WH_STEP_ID = definitions.WEBHOOK.stepId const WH_STEP_ID = definitions.WEBHOOK.stepId
@ -39,9 +40,37 @@ function loggingArgs(job: AutomationJob) {
} }
export async function processEvent(job: AutomationJob) { export async function processEvent(job: AutomationJob) {
return tracer.trace(
"processEvent",
{ resource: "automation" },
async span => {
const appId = job.data.event.appId! const appId = job.data.event.appId!
const automationId = job.data.automation._id! const automationId = job.data.automation._id!
span?.addTags({
appId,
automationId,
job: {
id: job.id,
name: job.name,
attemptsMade: job.attemptsMade,
opts: {
attempts: job.opts.attempts,
priority: job.opts.priority,
delay: job.opts.delay,
repeat: job.opts.repeat,
backoff: job.opts.backoff,
lifo: job.opts.lifo,
timeout: job.opts.timeout,
jobId: job.opts.jobId,
removeOnComplete: job.opts.removeOnComplete,
removeOnFail: job.opts.removeOnFail,
stackTraceLimit: job.opts.stackTraceLimit,
preventParsingData: job.opts.preventParsingData,
},
},
})
const task = async () => { const task = async () => {
try { try {
// need to actually await these so that an error can be captured properly // need to actually await these so that an error can be captured properly
@ -54,12 +83,19 @@ export async function processEvent(job: AutomationJob) {
console.log("automation completed", ...loggingArgs(job)) console.log("automation completed", ...loggingArgs(job))
return result return result
} catch (err) { } catch (err) {
console.error(`automation was unable to run`, err, ...loggingArgs(job)) span?.addTags({ error: true })
console.error(
`automation was unable to run`,
err,
...loggingArgs(job)
)
return { err } return { err }
} }
} }
return await context.doInAutomationContext({ appId, automationId, task }) return await context.doInAutomationContext({ appId, automationId, task })
}
)
} }
export async function updateTestHistory( export async function updateTestHistory(

View File

@ -2,11 +2,13 @@ import vm from "vm"
import env from "./environment" import env from "./environment"
import { setJSRunner } from "@budibase/string-templates" import { setJSRunner } from "@budibase/string-templates"
import { context, timers } from "@budibase/backend-core" import { context, timers } from "@budibase/backend-core"
import tracer from "dd-trace"
type TrackerFn = <T>(f: () => T) => T type TrackerFn = <T>(f: () => T) => T
export function init() { export function init() {
setJSRunner((js: string, ctx: vm.Context) => { setJSRunner((js: string, ctx: vm.Context) => {
return tracer.trace("runJS", {}, span => {
const perRequestLimit = env.JS_PER_REQUEST_TIME_LIMIT_MS const perRequestLimit = env.JS_PER_REQUEST_TIME_LIMIT_MS
let track: TrackerFn = f => f() let track: TrackerFn = f => f()
if (perRequestLimit) { if (perRequestLimit) {
@ -17,6 +19,12 @@ export function init() {
timers.ExecutionTimeTracker.withLimit(perRequestLimit) timers.ExecutionTimeTracker.withLimit(perRequestLimit)
} }
track = bbCtx.jsExecutionTracker.track.bind(bbCtx.jsExecutionTracker) track = bbCtx.jsExecutionTracker.track.bind(bbCtx.jsExecutionTracker)
span?.addTags({
js: {
limitMS: bbCtx.jsExecutionTracker.limitMs,
elapsedMS: bbCtx.jsExecutionTracker.elapsedMS,
},
})
} }
} }
@ -33,4 +41,5 @@ export function init() {
}) })
) )
}) })
})
} }

View File

@ -12,6 +12,7 @@ import { getCachedSelf } from "../utilities/global"
import env from "../environment" import env from "../environment"
import { isWebhookEndpoint } from "./utils" import { isWebhookEndpoint } from "./utils"
import { UserCtx, ContextUser } from "@budibase/types" import { UserCtx, ContextUser } from "@budibase/types"
import tracer from "dd-trace"
export default async (ctx: UserCtx, next: any) => { export default async (ctx: UserCtx, next: any) => {
// try to get the appID from the request // try to get the appID from the request
@ -20,6 +21,11 @@ export default async (ctx: UserCtx, next: any) => {
return next() return next()
} }
if (requestAppId) {
const span = tracer.scope().active()
span?.setTag("appId", requestAppId)
}
// deny access to application preview // deny access to application preview
if (!env.isTest()) { if (!env.isTest()) {
if ( if (
@ -70,6 +76,14 @@ export default async (ctx: UserCtx, next: any) => {
return next() return next()
} }
if (ctx.user) {
const span = tracer.scope().active()
if (ctx.user._id) {
span?.setTag("userId", ctx.user._id)
}
span?.setTag("tenantId", ctx.user.tenantId)
}
const userId = ctx.user ? generateUserMetadataID(ctx.user._id!) : undefined const userId = ctx.user ? generateUserMetadataID(ctx.user._id!) : undefined
// if the user is not in the right tenant then make sure to wipe their cookie // if the user is not in the right tenant then make sure to wipe their cookie

View File

@ -1,6 +1,13 @@
import { IncludeRelationship, Operation } from "@budibase/types" import { IncludeRelationship, Operation, Row } from "@budibase/types"
import { handleRequest } from "../../../api/controllers/row/external" import { handleRequest } from "../../../api/controllers/row/external"
import { breakRowIdField } from "../../../integrations/utils" import { breakRowIdField } from "../../../integrations/utils"
import sdk from "../../../sdk"
import {
inputProcessing,
outputProcessing,
} from "../../../utilities/rowProcessor"
import cloneDeep from "lodash/fp/cloneDeep"
import isEqual from "lodash/fp/isEqual"
export async function getRow( export async function getRow(
tableId: string, tableId: string,
@ -15,3 +22,48 @@ export async function getRow(
}) })
return response ? response[0] : response return response ? response[0] : response
} }
export async function save(
tableId: string,
inputs: Row,
userId: string | undefined
) {
const table = await sdk.tables.getTable(tableId)
const { table: updatedTable, row } = await inputProcessing(
userId,
cloneDeep(table),
inputs
)
const validateResult = await sdk.rows.utils.validate({
row,
tableId,
})
if (!validateResult.valid) {
throw { validation: validateResult.errors }
}
const response = await handleRequest(Operation.CREATE, tableId, {
row,
})
if (!isEqual(table, updatedTable)) {
await sdk.tables.saveTable(updatedTable)
}
const rowId = response.row._id
if (rowId) {
const row = await sdk.rows.external.getRow(tableId, rowId, {
relationships: true,
})
return {
...response,
row: await outputProcessing(table, row, {
preserveLinks: true,
squash: true,
}),
}
} else {
return response
}
}

View File

@ -0,0 +1,49 @@
import { db } from "@budibase/backend-core"
import { Row } from "@budibase/types"
import sdk from "../../../sdk"
import cloneDeep from "lodash/fp/cloneDeep"
import { finaliseRow } from "../../../api/controllers/row/staticFormula"
import { inputProcessing } from "../../../utilities/rowProcessor"
import * as linkRows from "../../../db/linkedRows"
export async function save(
tableId: string,
inputs: Row,
userId: string | undefined
) {
inputs.tableId = tableId
if (!inputs._rev && !inputs._id) {
inputs._id = db.generateRowID(inputs.tableId)
}
// this returns the table and row incase they have been updated
const dbTable = await sdk.tables.getTable(inputs.tableId)
// need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable)
let { table, row } = await inputProcessing(userId, tableClone, inputs)
const validateResult = await sdk.rows.utils.validate({
row,
table,
})
if (!validateResult.valid) {
throw { validation: validateResult.errors }
}
// make sure link rows are up-to-date
row = (await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_SAVE,
row,
tableId: row.tableId,
table,
})) as Row
return finaliseRow(table, row, {
oldTable: dbTable,
updateFormula: true,
})
}

View File

@ -1,6 +1,9 @@
import { db as dbCore, context } from "@budibase/backend-core" import { db as dbCore, context } from "@budibase/backend-core"
import { Database, Row } from "@budibase/types" import { Database, Row } from "@budibase/types"
import { getRowParams } from "../../../db/utils" import { getRowParams } from "../../../db/utils"
import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./internal"
import * as external from "./external"
export async function getAllInternalRows(appId?: string) { export async function getAllInternalRows(appId?: string) {
let db: Database let db: Database
@ -16,3 +19,18 @@ export async function getAllInternalRows(appId?: string) {
) )
return response.rows.map(row => row.doc) as Row[] return response.rows.map(row => row.doc) as Row[]
} }
function pickApi(tableId: any) {
if (isExternalTableID(tableId)) {
return external
}
return internal
}
export async function save(
tableId: string,
row: Row,
userId: string | undefined
) {
return pickApi(tableId).save(tableId, row, userId)
}

View File

@ -0,0 +1,220 @@
import tk from "timekeeper"
import * as internalSdk from "../internal"
import { generator } from "@budibase/backend-core/tests"
import {
INTERNAL_TABLE_SOURCE_ID,
TableSourceType,
FieldType,
Table,
AutoFieldSubTypes,
} from "@budibase/types"
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
import { cache } from "@budibase/backend-core"
tk.freeze(Date.now())
describe("sdk >> rows >> internal", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.init()
})
function makeRow() {
return {
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
}
}
describe("save", () => {
const tableData: Table = {
name: generator.word(),
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
name: {
name: "name",
type: FieldType.STRING,
constraints: {
type: FieldType.STRING,
},
},
surname: {
name: "surname",
type: FieldType.STRING,
constraints: {
type: FieldType.STRING,
},
},
age: {
name: "age",
type: FieldType.NUMBER,
constraints: {
type: FieldType.NUMBER,
},
},
address: {
name: "address",
type: FieldType.STRING,
constraints: {
type: FieldType.STRING,
},
},
},
}
beforeEach(() => {
jest.clearAllMocks()
})
it("save will persist the row properly", async () => {
const table = await config.createTable(tableData)
const row = makeRow()
await config.doInContext(config.appId, async () => {
const response = await internalSdk.save(
table._id!,
row,
config.user._id
)
expect(response).toEqual({
table,
row: {
...row,
type: "row",
_rev: expect.stringMatching("1-.*"),
},
squashed: {
...row,
type: "row",
_rev: expect.stringMatching("1-.*"),
},
})
const persistedRow = await config.getRow(table._id!, response.row._id!)
expect(persistedRow).toEqual({
...row,
type: "row",
_rev: expect.stringMatching("1-.*"),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
})
})
it("auto ids will update when creating new rows", async () => {
const table = await config.createTable({
...tableData,
schema: {
...tableData.schema,
id: {
name: "id",
type: FieldType.AUTO,
subtype: AutoFieldSubTypes.AUTO_ID,
autocolumn: true,
lastID: 0,
},
},
})
const row = makeRow()
await config.doInContext(config.appId, async () => {
const response = await internalSdk.save(
table._id!,
row,
config.user._id
)
expect(response).toEqual({
table: {
...table,
schema: {
...table.schema,
id: {
...table.schema.id,
lastID: 1,
},
},
},
row: {
...row,
id: 1,
type: "row",
_rev: expect.stringMatching("1-.*"),
},
squashed: {
...row,
id: 1,
type: "row",
_rev: expect.stringMatching("1-.*"),
},
})
const persistedRow = await config.getRow(table._id!, response.row._id!)
expect(persistedRow).toEqual({
...row,
type: "row",
id: 1,
_rev: expect.stringMatching("1-.*"),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
})
})
it("auto ids will update when creating new rows in parallel", async () => {
function makeRows(count: number) {
return Array.from({ length: count }, () => makeRow())
}
const table = await config.createTable({
...tableData,
schema: {
...tableData.schema,
id: {
name: "id",
type: FieldType.AUTO,
subtype: AutoFieldSubTypes.AUTO_ID,
autocolumn: true,
lastID: 0,
},
},
})
await config.doInContext(config.appId, async () => {
for (const row of makeRows(5)) {
await internalSdk.save(table._id!, row, config.user._id)
}
await Promise.all(
makeRows(10).map(row =>
internalSdk.save(table._id!, row, config.user._id)
)
)
for (const row of makeRows(5)) {
await internalSdk.save(table._id!, row, config.user._id)
}
})
const persistedRows = await config.getRows(table._id!)
expect(persistedRows).toHaveLength(20)
expect(persistedRows).toEqual(
expect.arrayContaining(
Array.from({ length: 20 }).map((_, i) =>
expect.objectContaining({ id: i + 1 })
)
)
)
const persistedTable = await config.getTable(table._id)
expect((table as any).schema.id.lastID).toBe(0)
expect(persistedTable.schema.id.lastID).toBe(20)
})
})
})

View File

@ -34,6 +34,7 @@ import { cloneDeep } from "lodash/fp"
import { performance } from "perf_hooks" import { performance } from "perf_hooks"
import * as sdkUtils from "../sdk/utils" import * as sdkUtils from "../sdk/utils"
import env from "../environment" import env from "../environment"
import tracer from "dd-trace"
threadUtils.threadSetup() threadUtils.threadSetup()
const FILTER_STEP_ID = actions.BUILTIN_ACTION_DEFINITIONS.FILTER.stepId const FILTER_STEP_ID = actions.BUILTIN_ACTION_DEFINITIONS.FILTER.stepId
@ -242,6 +243,15 @@ class Orchestrator {
} }
async execute(): Promise<any> { async execute(): Promise<any> {
return tracer.trace(
"Orchestrator.execute",
{ resource: "automation" },
async span => {
span?.addTags({
appId: this._appId,
automationId: this._automation._id,
})
// this will retrieve from context created at start of thread // this will retrieve from context created at start of thread
this._context.env = await sdkUtils.getEnvironmentVariables() this._context.env = await sdkUtils.getEnvironmentVariables()
let automation = this._automation let automation = this._automation
@ -257,19 +267,39 @@ class Orchestrator {
let timeout = this._job.data.event.timeout let timeout = this._job.data.event.timeout
// check if this is a recurring automation, // check if this is a recurring automation,
if (isProdAppID(this._appId) && isRecurring(automation)) { if (isProdAppID(this._appId) && isRecurring(automation)) {
span?.addTags({ recurring: true })
metadata = await this.getMetadata() metadata = await this.getMetadata()
const shouldStop = await this.checkIfShouldStop(metadata) const shouldStop = await this.checkIfShouldStop(metadata)
if (shouldStop) { if (shouldStop) {
span?.addTags({ shouldStop: true })
return return
} }
} }
const start = performance.now() const start = performance.now()
for (let step of automation.definition.steps) { for (let step of automation.definition.steps) {
const stepSpan = tracer.startSpan("Orchestrator.execute.step", {
childOf: span,
})
stepSpan.addTags({
resource: "automation",
step: {
stepId: step.stepId,
id: step.id,
name: step.name,
type: step.type,
title: step.stepTitle,
internal: step.internal,
deprecated: step.deprecated,
},
})
let input: any, let input: any,
iterations = 1, iterations = 1,
iterationCount = 0 iterationCount = 0
try {
if (timeoutFlag) { if (timeoutFlag) {
span?.addTags({ timedOut: true })
break break
} }
@ -289,6 +319,7 @@ class Orchestrator {
if (loopStep) { if (loopStep) {
input = await processObject(loopStep.inputs, this._context) input = await processObject(loopStep.inputs, this._context)
iterations = getLoopIterations(loopStep as LoopStep) iterations = getLoopIterations(loopStep as LoopStep)
stepSpan?.addTags({ step: { iterations } })
} }
for (let index = 0; index < iterations; index++) { for (let index = 0; index < iterations; index++) {
let originalStepInput = cloneDeep(step.inputs) let originalStepInput = cloneDeep(step.inputs)
@ -304,10 +335,15 @@ class Orchestrator {
loopStep.inputs as LoopInput loopStep.inputs as LoopInput
) )
} catch (err) { } catch (err) {
this.updateContextAndOutput(loopStepNumber, step, tempOutput, { this.updateContextAndOutput(
loopStepNumber,
step,
tempOutput,
{
status: AutomationErrors.INCORRECT_TYPE, status: AutomationErrors.INCORRECT_TYPE,
success: false, success: false,
}) }
)
loopSteps = undefined loopSteps = undefined
loopStep = undefined loopStep = undefined
break break
@ -352,7 +388,8 @@ class Orchestrator {
} }
} else { } else {
if (typeof value === "string") { if (typeof value === "string") {
originalStepInput[key] = automationUtils.substituteLoopStep( originalStepInput[key] =
automationUtils.substituteLoopStep(
value, value,
`steps.${loopStepNumber}` `steps.${loopStepNumber}`
) )
@ -364,30 +401,42 @@ class Orchestrator {
index === env.AUTOMATION_MAX_ITERATIONS || index === env.AUTOMATION_MAX_ITERATIONS ||
index === parseInt(loopStep.inputs.iterations) index === parseInt(loopStep.inputs.iterations)
) { ) {
this.updateContextAndOutput(loopStepNumber, step, tempOutput, { this.updateContextAndOutput(
loopStepNumber,
step,
tempOutput,
{
status: AutomationErrors.MAX_ITERATIONS, status: AutomationErrors.MAX_ITERATIONS,
success: true, success: true,
}) }
)
loopSteps = undefined loopSteps = undefined
loopStep = undefined loopStep = undefined
break break
} }
let isFailure = false let isFailure = false
const currentItem = this._context.steps[loopStepNumber]?.currentItem const currentItem =
this._context.steps[loopStepNumber]?.currentItem
if (currentItem && typeof currentItem === "object") { if (currentItem && typeof currentItem === "object") {
isFailure = Object.keys(currentItem).some(value => { isFailure = Object.keys(currentItem).some(value => {
return currentItem[value] === loopStep?.inputs.failure return currentItem[value] === loopStep?.inputs.failure
}) })
} else { } else {
isFailure = currentItem && currentItem === loopStep.inputs.failure isFailure =
currentItem && currentItem === loopStep.inputs.failure
} }
if (isFailure) { if (isFailure) {
this.updateContextAndOutput(loopStepNumber, step, tempOutput, { this.updateContextAndOutput(
loopStepNumber,
step,
tempOutput,
{
status: AutomationErrors.FAILURE_CONDITION, status: AutomationErrors.FAILURE_CONDITION,
success: false, success: false,
}) }
)
loopSteps = undefined loopSteps = undefined
loopStep = undefined loopStep = undefined
break break
@ -396,14 +445,22 @@ class Orchestrator {
// execution stopped, record state for that // execution stopped, record state for that
if (stopped) { if (stopped) {
this.updateExecutionOutput(step.id, step.stepId, {}, STOPPED_STATUS) this.updateExecutionOutput(
step.id,
step.stepId,
{},
STOPPED_STATUS
)
continue continue
} }
// If it's a loop step, we need to manually add the bindings to the context // If it's a loop step, we need to manually add the bindings to the context
let stepFn = await this.getStepFunctionality(step.stepId) let stepFn = await this.getStepFunctionality(step.stepId)
let inputs = await processObject(originalStepInput, this._context) let inputs = await processObject(originalStepInput, this._context)
inputs = automationUtils.cleanInputValues(inputs, step.schema.inputs) inputs = automationUtils.cleanInputValues(
inputs,
step.schema.inputs
)
try { try {
// appId is always passed // appId is always passed
@ -419,10 +476,15 @@ class Orchestrator {
// so that we can finish iterating through the steps and record that it stopped // so that we can finish iterating through the steps and record that it stopped
if (step.stepId === FILTER_STEP_ID && !outputs.result) { if (step.stepId === FILTER_STEP_ID && !outputs.result) {
stopped = true stopped = true
this.updateExecutionOutput(step.id, step.stepId, step.inputs, { this.updateExecutionOutput(
step.id,
step.stepId,
step.inputs,
{
...outputs, ...outputs,
...STOPPED_STATUS, ...STOPPED_STATUS,
}) }
)
continue continue
} }
if (loopStep && loopSteps) { if (loopStep && loopSteps) {
@ -449,6 +511,9 @@ class Orchestrator {
} }
} }
} }
} finally {
stepSpan?.finish()
}
if (loopStep && iterations === 0) { if (loopStep && iterations === 0) {
loopStep = undefined loopStep = undefined
@ -518,6 +583,8 @@ class Orchestrator {
} }
return this.executionOutput return this.executionOutput
} }
)
}
} }
export function execute(job: Job<AutomationData>, callback: WorkerCallback) { export function execute(job: Job<AutomationData>, callback: WorkerCallback) {

View File

@ -11,6 +11,7 @@ import {
Row, Row,
Table, Table,
} from "@budibase/types" } from "@budibase/types"
import tracer from "dd-trace"
interface FormulaOpts { interface FormulaOpts {
dynamic?: boolean dynamic?: boolean
@ -50,6 +51,9 @@ export function processFormulas<T extends Row | Row[]>(
inputRows: T, inputRows: T,
{ dynamic, contextRows }: FormulaOpts = { dynamic: true } { dynamic, contextRows }: FormulaOpts = { dynamic: true }
): T { ): T {
return tracer.trace("processFormulas", {}, span => {
const numRows = Array.isArray(inputRows) ? inputRows.length : 1
span?.addTags({ table_id: table._id, dynamic, numRows })
const rows = Array.isArray(inputRows) ? inputRows : [inputRows] const rows = Array.isArray(inputRows) ? inputRows : [inputRows]
if (rows) { if (rows) {
for (let [column, schema] of Object.entries(table.schema)) { for (let [column, schema] of Object.entries(table.schema)) {
@ -73,12 +77,16 @@ export function processFormulas<T extends Row | Row[]>(
let formula = schema.formula let formula = schema.formula
rows[i] = { rows[i] = {
...row, ...row,
[column]: processStringSync(formula, context), [column]: tracer.trace("processStringSync", {}, span => {
span?.addTags({ table_id: table._id, column, static: isStatic })
return processStringSync(formula, context)
}),
} }
} }
} }
} }
return Array.isArray(inputRows) ? rows : rows[0] return Array.isArray(inputRows) ? rows : rows[0]
})
} }
/** /**

View File

@ -1,19 +1,26 @@
const externalHandlebars = require("./external") const externalHandlebars = require("./external")
const helperList = require("@budibase/handlebars-helpers") const helperList = require("@budibase/handlebars-helpers")
let helpers = undefined
module.exports.getHelperList = () => { module.exports.getHelperList = () => {
if (helpers) {
return helpers
}
helpers = {}
let constructed = [] let constructed = []
for (let collection of externalHandlebars.externalCollections) { for (let collection of externalHandlebars.externalCollections) {
constructed.push(helperList[collection]()) constructed.push(helperList[collection]())
} }
const fullMap = {}
for (let collection of constructed) { for (let collection of constructed) {
for (let [key, func] of Object.entries(collection)) { for (let [key, func] of Object.entries(collection)) {
fullMap[key] = func helpers[key] = func
} }
} }
for (let key of Object.keys(externalHandlebars.addedHelpers)) { for (let key of Object.keys(externalHandlebars.addedHelpers)) {
fullMap[key] = externalHandlebars.addedHelpers[key] helpers[key] = externalHandlebars.addedHelpers[key]
} }
return fullMap Object.freeze(helpers)
return helpers
} }

View File

@ -21,6 +21,7 @@ export enum LockName {
PERSIST_WRITETHROUGH = "persist_writethrough", PERSIST_WRITETHROUGH = "persist_writethrough",
QUOTA_USAGE_EVENT = "quota_usage_event", QUOTA_USAGE_EVENT = "quota_usage_event",
APP_MIGRATION = "app_migrations", APP_MIGRATION = "app_migrations",
PROCESS_AUTO_COLUMNS = "process_auto_columns",
} }
export type LockOptions = { export type LockOptions = {

View File

@ -30,6 +30,7 @@ async function init() {
ENABLE_EMAIL_TEST_MODE: "1", ENABLE_EMAIL_TEST_MODE: "1",
HTTP_LOGGING: "0", HTTP_LOGGING: "0",
VERSION: "0.0.0+local", VERSION: "0.0.0+local",
PASSWORD_MIN_LENGTH: "1",
} }
config = { ...config, ...existingConfig } config = { ...config, ...existingConfig }

View File

@ -122,10 +122,10 @@ export const resetUpdate = async (ctx: Ctx<PasswordResetUpdateRequest>) => {
ctx.body = { ctx.body = {
message: "password reset successfully.", message: "password reset successfully.",
} }
} catch (err) { } catch (err: any) {
console.warn(err) console.warn(err)
// hide any details of the error for security // hide any details of the error for security
ctx.throw(400, "Cannot reset password.") ctx.throw(400, err.message || "Cannot reset password.")
} }
} }

View File

@ -229,7 +229,7 @@ describe("/api/global/auth", () => {
) )
expect(res.body).toEqual({ expect(res.body).toEqual({
message: "Cannot reset password.", message: "Password change is disabled for this user",
status: 400, status: 400,
}) })
} }
@ -261,8 +261,12 @@ describe("/api/global/auth", () => {
) )
// convert to account owner now that password has been requested // convert to account owner now that password has been requested
const account = structures.accounts.ssoAccount() as CloudAccount const account: CloudAccount = {
mocks.accounts.getAccount.mockReturnValueOnce( ...structures.accounts.ssoAccount(),
budibaseUserId: "budibaseUserId",
email: user.email,
}
mocks.accounts.getAccountByTenantId.mockReturnValueOnce(
Promise.resolve(account) Promise.resolve(account)
) )

View File

@ -1,6 +1,6 @@
import tk from "timekeeper" import tk from "timekeeper"
import _ from "lodash" import _ from "lodash"
import { mocks, structures } from "@budibase/backend-core/tests" import { generator, mocks, structures } from "@budibase/backend-core/tests"
import { import {
ScimCreateUserRequest, ScimCreateUserRequest,
ScimGroupResponse, ScimGroupResponse,
@ -14,9 +14,14 @@ import { events } from "@budibase/backend-core"
jest.retryTimes(2, { logErrorsBeforeRetry: true }) jest.retryTimes(2, { logErrorsBeforeRetry: true })
jest.setTimeout(30000) jest.setTimeout(30000)
mocks.licenses.useScimIntegration()
describe("scim", () => { describe("scim", () => {
beforeAll(async () => {
tk.freeze(mocks.date.MOCK_DATE)
mocks.licenses.useScimIntegration()
await config.setSCIMConfig(true)
})
beforeEach(async () => { beforeEach(async () => {
jest.resetAllMocks() jest.resetAllMocks()
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
@ -570,8 +575,15 @@ describe("scim", () => {
beforeAll(async () => { beforeAll(async () => {
groups = [] groups = []
for (let i = 0; i < groupCount; i++) { const groupNames = generator.unique(
const body = structures.scim.createGroupRequest() () => generator.word(),
groupCount
)
for (const groupName of groupNames) {
const body = structures.scim.createGroupRequest({
displayName: groupName,
})
groups.push(await config.api.scimGroupsAPI.post({ body })) groups.push(await config.api.scimGroupsAPI.post({ body }))
} }

View File

@ -79,6 +79,9 @@ export const resetUpdate = async (resetCode: string, password: string) => {
user.password = password user.password = password
user = await userSdk.db.save(user) user = await userSdk.db.save(user)
await cache.passwordReset.invalidateCode(resetCode)
await sessions.invalidateSessions(userId)
// remove password from the user before sending events // remove password from the user before sending events
delete user.password delete user.password
await events.user.passwordReset(user) await events.user.passwordReset(user)

View File

@ -0,0 +1,70 @@
import { cache, context, sessions, utils } from "@budibase/backend-core"
import { loginUser, resetUpdate } from "../auth"
import { generator, structures } from "@budibase/backend-core/tests"
import { TestConfiguration } from "../../../tests"
describe("auth", () => {
const config = new TestConfiguration()
describe("resetUpdate", () => {
it("providing a valid code will update the password", async () => {
await context.doInTenant(structures.tenant.id(), async () => {
const user = await config.createUser()
const previousPassword = user.password
const code = await cache.passwordReset.createCode(user._id!, {})
const newPassword = generator.hash()
await resetUpdate(code, newPassword)
const persistedUser = await config.getUser(user.email)
expect(persistedUser.password).not.toBe(previousPassword)
expect(
await utils.compare(newPassword, persistedUser.password!)
).toBeTruthy()
})
})
it("wrong code will not allow to reset the password", async () => {
await context.doInTenant(structures.tenant.id(), async () => {
const code = generator.hash()
const newPassword = generator.hash()
await expect(resetUpdate(code, newPassword)).rejects.toThrow(
"Provided information is not valid, cannot reset password - please try again."
)
})
})
it("the same code cannot be used twice", async () => {
await context.doInTenant(structures.tenant.id(), async () => {
const user = await config.createUser()
const code = await cache.passwordReset.createCode(user._id!, {})
const newPassword = generator.hash()
await resetUpdate(code, newPassword)
await expect(resetUpdate(code, newPassword)).rejects.toThrow(
"Provided information is not valid, cannot reset password - please try again."
)
})
})
it("updating the password will invalidate all the sessions", async () => {
await context.doInTenant(structures.tenant.id(), async () => {
const user = await config.createUser()
await loginUser(user)
expect(await sessions.getSessionsForUser(user._id!)).toHaveLength(1)
const code = await cache.passwordReset.createCode(user._id!, {})
const newPassword = generator.hash()
await resetUpdate(code, newPassword)
expect(await sessions.getSessionsForUser(user._id!)).toHaveLength(0)
})
})
})
})

View File

@ -1,6 +1,5 @@
import { structures, mocks } from "../../../tests" import { structures, mocks } from "../../../tests"
import { env, context } from "@budibase/backend-core" import { env, context } from "@budibase/backend-core"
import * as users from "../users"
import { db as userDb } from "../" import { db as userDb } from "../"
import { CloudAccount } from "@budibase/types" import { CloudAccount } from "@budibase/types"

View File

@ -45,7 +45,7 @@ class TestConfiguration {
tenantId: string tenantId: string
user?: User user?: User
apiKey?: string apiKey?: string
userPassword = "test" userPassword = "password"
constructor(opts: { openServer: boolean } = { openServer: true }) { constructor(opts: { openServer: boolean } = { openServer: true }) {
// default to cloud hosting // default to cloud hosting

View File

@ -101,7 +101,7 @@ export class UserAPI extends TestAPI {
if (!request) { if (!request) {
request = { request = {
email: structures.email(), email: structures.email(),
password: generator.string(), password: generator.string({ length: 8 }),
tenantId: structures.tenant.id(), tenantId: structures.tenant.id(),
} }
} }

1306
yarn.lock

File diff suppressed because it is too large Load Diff