Merge branch 'master' of github.com:Budibase/budibase into feature/sql-query-aliasing
This commit is contained in:
commit
4ddcecfd04
|
@ -76,6 +76,18 @@ jobs:
|
|||
yarn check:types
|
||||
fi
|
||||
|
||||
helm-lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 18.x
|
||||
uses: azure/setup-helm@v3
|
||||
- run: cd charts/budibase && helm lint .
|
||||
|
||||
test-libraries:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
|
@ -227,6 +227,14 @@ spec:
|
|||
resources:
|
||||
{{- toYaml . | nindent 10 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.apps.command }}
|
||||
command:
|
||||
{{- toYaml .Values.services.apps.command | nindent 10 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.apps.args }}
|
||||
args:
|
||||
{{- toYaml .Values.services.apps.args | nindent 10 }}
|
||||
{{ end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
|
|
@ -227,6 +227,13 @@ spec:
|
|||
resources:
|
||||
{{- toYaml . | nindent 10 }}
|
||||
{{ end }}
|
||||
command:
|
||||
{{- toYaml .Values.services.automationWorkers.command | nindent 10 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.automationWorkers.args }}
|
||||
args:
|
||||
{{- toYaml .Values.services.automationWorkers.args | nindent 10 }}
|
||||
{{ end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
@ -244,5 +251,6 @@ spec:
|
|||
{{ end }}
|
||||
restartPolicy: Always
|
||||
serviceAccountName: ""
|
||||
{{ if .Values.services.automationWorkers.command }}}
|
||||
status: {}
|
||||
{{- end }}
|
|
@ -100,5 +100,13 @@ spec:
|
|||
{{ end }}
|
||||
restartPolicy: Always
|
||||
serviceAccountName: ""
|
||||
{{ if .Values.services.proxy.command }}
|
||||
command:
|
||||
{{- toYaml .Values.services.proxy.command | nindent 8 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.proxy.args }}
|
||||
args:
|
||||
{{- toYaml .Values.services.proxy.args | nindent 8 }}
|
||||
{{ end }}
|
||||
volumes:
|
||||
status: {}
|
||||
|
|
|
@ -213,6 +213,14 @@ spec:
|
|||
resources:
|
||||
{{- toYaml . | nindent 10 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.worker.command }}
|
||||
command:
|
||||
{{- toYaml .Values.services.worker.command | nindent 10 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.worker.args }}
|
||||
args:
|
||||
{{- toYaml .Values.services.worker.args | nindent 10 }}
|
||||
{{ end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
|
|
@ -7,7 +7,7 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME
|
|||
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
|
||||
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
|
||||
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
|
||||
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://127.0.0.1:9000
|
||||
[[ -z "${MINIO_URL}" ]] && [[ -z "${USE_S3}" ]] && export MINIO_URL=http://127.0.0.1:9000
|
||||
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
|
||||
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
|
||||
[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
|
||||
|
@ -77,7 +77,12 @@ mkdir -p ${DATA_DIR}/minio
|
|||
chown -R couchdb:couchdb ${DATA_DIR}/couch
|
||||
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
|
||||
/bbcouch-runner.sh &
|
||||
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
|
||||
|
||||
# only start minio if use s3 isn't passed
|
||||
if [[ -z "${USE_S3}" ]]; then
|
||||
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
|
||||
fi
|
||||
|
||||
/etc/init.d/nginx restart
|
||||
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
||||
# Add monthly cron job to renew certbot certificate
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.13.46",
|
||||
"version": "2.13.51",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 09dae295e3ba6149c4e1d7fe567870c3a38bd277
|
||||
Subproject commit c1a53bb2f4cafcb4c55ad7181146617b449907f2
|
|
@ -32,6 +32,7 @@
|
|||
"bcryptjs": "2.4.3",
|
||||
"bull": "4.10.1",
|
||||
"correlation-id": "4.0.0",
|
||||
"dd-trace": "3.13.2",
|
||||
"dotenv": "16.0.1",
|
||||
"ioredis": "5.3.2",
|
||||
"joi": "17.6.0",
|
||||
|
|
|
@ -18,14 +18,15 @@ export enum TTL {
|
|||
ONE_DAY = 86400,
|
||||
}
|
||||
|
||||
function performExport(funcName: string) {
|
||||
// @ts-ignore
|
||||
return (...args: any) => GENERIC[funcName](...args)
|
||||
}
|
||||
|
||||
export const keys = performExport("keys")
|
||||
export const get = performExport("get")
|
||||
export const store = performExport("store")
|
||||
export const destroy = performExport("delete")
|
||||
export const withCache = performExport("withCache")
|
||||
export const bustCache = performExport("bustCache")
|
||||
export const keys = (...args: Parameters<typeof GENERIC.keys>) =>
|
||||
GENERIC.keys(...args)
|
||||
export const get = (...args: Parameters<typeof GENERIC.get>) =>
|
||||
GENERIC.get(...args)
|
||||
export const store = (...args: Parameters<typeof GENERIC.store>) =>
|
||||
GENERIC.store(...args)
|
||||
export const destroy = (...args: Parameters<typeof GENERIC.delete>) =>
|
||||
GENERIC.delete(...args)
|
||||
export const withCache = (...args: Parameters<typeof GENERIC.withCache>) =>
|
||||
GENERIC.withCache(...args)
|
||||
export const bustCache = (...args: Parameters<typeof GENERIC.bustCache>) =>
|
||||
GENERIC.bustCache(...args)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import * as redis from "../redis/init"
|
||||
import * as utils from "../utils"
|
||||
import { Duration, DurationType } from "../utils"
|
||||
import { Duration } from "../utils"
|
||||
|
||||
const TTL_SECONDS = Duration.fromHours(1).toSeconds()
|
||||
|
||||
|
@ -32,7 +32,18 @@ export async function getCode(code: string): Promise<PasswordReset> {
|
|||
const client = await redis.getPasswordResetClient()
|
||||
const value = (await client.get(code)) as PasswordReset | undefined
|
||||
if (!value) {
|
||||
throw "Provided information is not valid, cannot reset password - please try again."
|
||||
throw new Error(
|
||||
"Provided information is not valid, cannot reset password - please try again."
|
||||
)
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a reset code this will invalidate it.
|
||||
* @param code The code provided via the email link.
|
||||
*/
|
||||
export async function invalidateCode(code: string): Promise<void> {
|
||||
const client = await redis.getPasswordResetClient()
|
||||
await client.delete(code)
|
||||
}
|
||||
|
|
|
@ -335,3 +335,11 @@ export function isScim(): boolean {
|
|||
const scimCall = context?.isScim
|
||||
return !!scimCall
|
||||
}
|
||||
|
||||
export function getCurrentContext(): ContextMap | undefined {
|
||||
try {
|
||||
return Context.get()
|
||||
} catch (e) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { IdentityContext } from "@budibase/types"
|
||||
import { ExecutionTimeTracker } from "../timers"
|
||||
|
||||
// keep this out of Budibase types, don't want to expose context info
|
||||
export type ContextMap = {
|
||||
|
@ -9,4 +10,5 @@ export type ContextMap = {
|
|||
isScim?: boolean
|
||||
automationId?: string
|
||||
isMigrating?: boolean
|
||||
jsExecutionTracker?: ExecutionTimeTracker
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import { directCouchUrlCall } from "./utils"
|
|||
import { getPouchDB } from "./pouchDB"
|
||||
import { WriteStream, ReadStream } from "fs"
|
||||
import { newid } from "../../docIds/newid"
|
||||
import { DDInstrumentedDatabase } from "../instrumentation"
|
||||
|
||||
function buildNano(couchInfo: { url: string; cookie: string }) {
|
||||
return Nano({
|
||||
|
@ -35,10 +36,8 @@ export function DatabaseWithConnection(
|
|||
connection: string,
|
||||
opts?: DatabaseOpts
|
||||
) {
|
||||
if (!connection) {
|
||||
throw new Error("Must provide connection details")
|
||||
}
|
||||
return new DatabaseImpl(dbName, opts, connection)
|
||||
const db = new DatabaseImpl(dbName, opts, connection)
|
||||
return new DDInstrumentedDatabase(db)
|
||||
}
|
||||
|
||||
export class DatabaseImpl implements Database {
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import { directCouchQuery, DatabaseImpl } from "./couch"
|
||||
import { CouchFindOptions, Database, DatabaseOpts } from "@budibase/types"
|
||||
import { DDInstrumentedDatabase } from "./instrumentation"
|
||||
|
||||
export function getDB(dbName: string, opts?: DatabaseOpts): Database {
|
||||
return new DatabaseImpl(dbName, opts)
|
||||
return new DDInstrumentedDatabase(new DatabaseImpl(dbName, opts))
|
||||
}
|
||||
|
||||
// we have to use a callback for this so that we can close
|
||||
|
|
|
@ -0,0 +1,156 @@
|
|||
import {
|
||||
DocumentScope,
|
||||
DocumentDestroyResponse,
|
||||
DocumentInsertResponse,
|
||||
DocumentBulkResponse,
|
||||
OkResponse,
|
||||
} from "@budibase/nano"
|
||||
import {
|
||||
AllDocsResponse,
|
||||
AnyDocument,
|
||||
Database,
|
||||
DatabaseDumpOpts,
|
||||
DatabasePutOpts,
|
||||
DatabaseQueryOpts,
|
||||
Document,
|
||||
} from "@budibase/types"
|
||||
import tracer from "dd-trace"
|
||||
import { Writable } from "stream"
|
||||
|
||||
export class DDInstrumentedDatabase implements Database {
|
||||
constructor(private readonly db: Database) {}
|
||||
|
||||
get name(): string {
|
||||
return this.db.name
|
||||
}
|
||||
|
||||
exists(): Promise<boolean> {
|
||||
return tracer.trace("db.exists", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.exists()
|
||||
})
|
||||
}
|
||||
|
||||
checkSetup(): Promise<DocumentScope<any>> {
|
||||
return tracer.trace("db.checkSetup", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.checkSetup()
|
||||
})
|
||||
}
|
||||
|
||||
get<T extends Document>(id?: string | undefined): Promise<T> {
|
||||
return tracer.trace("db.get", span => {
|
||||
span?.addTags({ db_name: this.name, doc_id: id })
|
||||
return this.db.get(id)
|
||||
})
|
||||
}
|
||||
|
||||
getMultiple<T extends Document>(
|
||||
ids: string[],
|
||||
opts?: { allowMissing?: boolean | undefined } | undefined
|
||||
): Promise<T[]> {
|
||||
return tracer.trace("db.getMultiple", span => {
|
||||
span?.addTags({
|
||||
db_name: this.name,
|
||||
num_docs: ids.length,
|
||||
allow_missing: opts?.allowMissing,
|
||||
})
|
||||
return this.db.getMultiple(ids, opts)
|
||||
})
|
||||
}
|
||||
|
||||
remove(
|
||||
id: string | Document,
|
||||
rev?: string | undefined
|
||||
): Promise<DocumentDestroyResponse> {
|
||||
return tracer.trace("db.remove", span => {
|
||||
span?.addTags({ db_name: this.name, doc_id: id })
|
||||
return this.db.remove(id, rev)
|
||||
})
|
||||
}
|
||||
|
||||
put(
|
||||
document: AnyDocument,
|
||||
opts?: DatabasePutOpts | undefined
|
||||
): Promise<DocumentInsertResponse> {
|
||||
return tracer.trace("db.put", span => {
|
||||
span?.addTags({ db_name: this.name, doc_id: document._id })
|
||||
return this.db.put(document, opts)
|
||||
})
|
||||
}
|
||||
|
||||
bulkDocs(documents: AnyDocument[]): Promise<DocumentBulkResponse[]> {
|
||||
return tracer.trace("db.bulkDocs", span => {
|
||||
span?.addTags({ db_name: this.name, num_docs: documents.length })
|
||||
return this.db.bulkDocs(documents)
|
||||
})
|
||||
}
|
||||
|
||||
allDocs<T extends Document>(
|
||||
params: DatabaseQueryOpts
|
||||
): Promise<AllDocsResponse<T>> {
|
||||
return tracer.trace("db.allDocs", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.allDocs(params)
|
||||
})
|
||||
}
|
||||
|
||||
query<T extends Document>(
|
||||
viewName: string,
|
||||
params: DatabaseQueryOpts
|
||||
): Promise<AllDocsResponse<T>> {
|
||||
return tracer.trace("db.query", span => {
|
||||
span?.addTags({ db_name: this.name, view_name: viewName })
|
||||
return this.db.query(viewName, params)
|
||||
})
|
||||
}
|
||||
|
||||
destroy(): Promise<void | OkResponse> {
|
||||
return tracer.trace("db.destroy", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.destroy()
|
||||
})
|
||||
}
|
||||
|
||||
compact(): Promise<void | OkResponse> {
|
||||
return tracer.trace("db.compact", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.compact()
|
||||
})
|
||||
}
|
||||
|
||||
dump(stream: Writable, opts?: DatabaseDumpOpts | undefined): Promise<any> {
|
||||
return tracer.trace("db.dump", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.dump(stream, opts)
|
||||
})
|
||||
}
|
||||
|
||||
load(...args: any[]): Promise<any> {
|
||||
return tracer.trace("db.load", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.load(...args)
|
||||
})
|
||||
}
|
||||
|
||||
createIndex(...args: any[]): Promise<any> {
|
||||
return tracer.trace("db.createIndex", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.createIndex(...args)
|
||||
})
|
||||
}
|
||||
|
||||
deleteIndex(...args: any[]): Promise<any> {
|
||||
return tracer.trace("db.deleteIndex", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.deleteIndex(...args)
|
||||
})
|
||||
}
|
||||
|
||||
getIndexes(...args: any[]): Promise<any> {
|
||||
return tracer.trace("db.getIndexes", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.getIndexes(...args)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -5,6 +5,8 @@ import { IdentityType } from "@budibase/types"
|
|||
import env from "../../environment"
|
||||
import * as context from "../../context"
|
||||
import * as correlation from "../correlation"
|
||||
import tracer from "dd-trace"
|
||||
import { formats } from "dd-trace/ext"
|
||||
|
||||
import { localFileDestination } from "../system"
|
||||
|
||||
|
@ -115,6 +117,11 @@ if (!env.DISABLE_PINO_LOGGER) {
|
|||
correlationId: correlation.getId(),
|
||||
}
|
||||
|
||||
const span = tracer.scope().active()
|
||||
if (span) {
|
||||
tracer.inject(span.context(), formats.LOG, contextObject)
|
||||
}
|
||||
|
||||
const mergingObject: any = {
|
||||
err: error,
|
||||
pid: process.pid,
|
||||
|
|
|
@ -15,6 +15,7 @@ function newJob(queue: string, message: any) {
|
|||
timestamp: Date.now(),
|
||||
queue: queue,
|
||||
data: message,
|
||||
opts: {},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ export function createQueue<T>(
|
|||
cleanupInterval = timers.set(cleanup, CLEANUP_PERIOD_MS)
|
||||
// fire off an initial cleanup
|
||||
cleanup().catch(err => {
|
||||
console.error(`Unable to cleanup automation queue initially - ${err}`)
|
||||
console.error(`Unable to cleanup ${jobQueue} initially - ${err}`)
|
||||
})
|
||||
}
|
||||
return queue
|
||||
|
|
|
@ -18,6 +18,7 @@ import {
|
|||
SelectableDatabase,
|
||||
getRedisConnectionDetails,
|
||||
} from "./utils"
|
||||
import { logAlert } from "../logging"
|
||||
import * as timers from "../timers"
|
||||
|
||||
const RETRY_PERIOD_MS = 2000
|
||||
|
@ -39,21 +40,16 @@ function pickClient(selectDb: number): any {
|
|||
return CLIENTS[selectDb]
|
||||
}
|
||||
|
||||
function connectionError(
|
||||
selectDb: number,
|
||||
timeout: NodeJS.Timeout,
|
||||
err: Error | string
|
||||
) {
|
||||
function connectionError(timeout: NodeJS.Timeout, err: Error | string) {
|
||||
// manually shut down, ignore errors
|
||||
if (CLOSED) {
|
||||
return
|
||||
}
|
||||
pickClient(selectDb).disconnect()
|
||||
CLOSED = true
|
||||
// always clear this on error
|
||||
clearTimeout(timeout)
|
||||
CONNECTED = false
|
||||
console.error("Redis connection failed - " + err)
|
||||
logAlert("Redis connection failed", err)
|
||||
setTimeout(() => {
|
||||
init()
|
||||
}, RETRY_PERIOD_MS)
|
||||
|
@ -79,11 +75,7 @@ function init(selectDb = DEFAULT_SELECT_DB) {
|
|||
// start the timer - only allowed 5 seconds to connect
|
||||
timeout = setTimeout(() => {
|
||||
if (!CONNECTED) {
|
||||
connectionError(
|
||||
selectDb,
|
||||
timeout,
|
||||
"Did not successfully connect in timeout"
|
||||
)
|
||||
connectionError(timeout, "Did not successfully connect in timeout")
|
||||
}
|
||||
}, STARTUP_TIMEOUT_MS)
|
||||
|
||||
|
@ -106,12 +98,13 @@ function init(selectDb = DEFAULT_SELECT_DB) {
|
|||
// allow the process to exit
|
||||
return
|
||||
}
|
||||
connectionError(selectDb, timeout, err)
|
||||
connectionError(timeout, err)
|
||||
})
|
||||
client.on("error", (err: Error) => {
|
||||
connectionError(selectDb, timeout, err)
|
||||
connectionError(timeout, err)
|
||||
})
|
||||
client.on("connect", () => {
|
||||
console.log(`Connected to Redis DB: ${selectDb}`)
|
||||
clearTimeout(timeout)
|
||||
CONNECTED = true
|
||||
})
|
||||
|
|
|
@ -20,3 +20,41 @@ export function cleanup() {
|
|||
}
|
||||
intervals = []
|
||||
}
|
||||
|
||||
export class ExecutionTimeoutError extends Error {
|
||||
public readonly name = "ExecutionTimeoutError"
|
||||
}
|
||||
|
||||
export class ExecutionTimeTracker {
|
||||
static withLimit(limitMs: number) {
|
||||
return new ExecutionTimeTracker(limitMs)
|
||||
}
|
||||
|
||||
constructor(readonly limitMs: number) {}
|
||||
|
||||
private totalTimeMs = 0
|
||||
|
||||
track<T>(f: () => T): T {
|
||||
this.checkLimit()
|
||||
const start = process.hrtime.bigint()
|
||||
try {
|
||||
return f()
|
||||
} finally {
|
||||
const end = process.hrtime.bigint()
|
||||
this.totalTimeMs += Number(end - start) / 1e6
|
||||
this.checkLimit()
|
||||
}
|
||||
}
|
||||
|
||||
get elapsedMS() {
|
||||
return this.totalTimeMs
|
||||
}
|
||||
|
||||
private checkLimit() {
|
||||
if (this.totalTimeMs > this.limitMs) {
|
||||
throw new ExecutionTimeoutError(
|
||||
`Execution time limit of ${this.limitMs}ms exceeded: ${this.totalTimeMs}ms`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ import env from "../environment"
|
|||
import * as eventHelpers from "./events"
|
||||
import * as accountSdk from "../accounts"
|
||||
import * as cache from "../cache"
|
||||
import { doInTenant, getGlobalDB, getIdentity, getTenantId } from "../context"
|
||||
import { getGlobalDB, getIdentity, getTenantId } from "../context"
|
||||
import * as dbUtils from "../db"
|
||||
import { EmailUnavailableError, HTTPError } from "../errors"
|
||||
import * as platform from "../platform"
|
||||
|
|
|
@ -49,4 +49,8 @@ export class Duration {
|
|||
static fromDays(duration: number) {
|
||||
return Duration.from(DurationType.DAYS, duration)
|
||||
}
|
||||
|
||||
static fromMilliseconds(duration: number) {
|
||||
return Duration.from(DurationType.MILLISECONDS, duration)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,7 +69,15 @@
|
|||
on:change={e => onChange(e, field)}
|
||||
useLabel={false}
|
||||
/>
|
||||
{:else if schema.type === "string" || schema.type === "number"}
|
||||
{:else if schema.type === "bb_reference"}
|
||||
<LinkedRowSelector
|
||||
linkedRows={value[field]}
|
||||
{schema}
|
||||
linkedTableId={"ta_users"}
|
||||
on:change={e => onChange(e, field)}
|
||||
useLabel={false}
|
||||
/>
|
||||
{:else if ["string", "number", "bigint", "barcodeqr"].includes(schema.type)}
|
||||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
panel={AutomationBindingPanel}
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
export let schema
|
||||
export let linkedRows = []
|
||||
export let useLabel = true
|
||||
export let linkedTableId
|
||||
export let label
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
let rows = []
|
||||
|
@ -16,8 +18,8 @@
|
|||
$: linkedIds = (Array.isArray(linkedRows) ? linkedRows : [])?.map(
|
||||
row => row?._id || row
|
||||
)
|
||||
$: label = capitalise(schema.name)
|
||||
$: linkedTableId = schema.tableId
|
||||
$: label = label || capitalise(schema.name)
|
||||
$: linkedTableId = linkedTableId || schema.tableId
|
||||
$: linkedTable = $tables.list.find(table => table._id === linkedTableId)
|
||||
$: fetchRows(linkedTableId)
|
||||
|
||||
|
@ -57,7 +59,7 @@
|
|||
{:else}
|
||||
<Multiselect
|
||||
value={linkedIds}
|
||||
{label}
|
||||
label={useLabel ? label : null}
|
||||
options={rows}
|
||||
getOptionLabel={getPrettyName}
|
||||
getOptionValue={row => row._id}
|
||||
|
|
|
@ -113,7 +113,7 @@
|
|||
if (type === "json" && !isJSBinding(value)) {
|
||||
return "json-slot-icon"
|
||||
}
|
||||
if (type !== "string" && type !== "number") {
|
||||
if (!["string", "number", "bigint", "barcodeqr"].includes(type)) {
|
||||
return "slot-icon"
|
||||
}
|
||||
return ""
|
||||
|
|
|
@ -164,7 +164,8 @@
|
|||
// Required constraint
|
||||
if (
|
||||
field === dataSourceSchema?.table?.primaryDisplay ||
|
||||
constraints.presence?.allowEmpty === false
|
||||
constraints.presence?.allowEmpty === false ||
|
||||
constraints.presence === true
|
||||
) {
|
||||
rules.push({
|
||||
constraint: "required",
|
||||
|
|
|
@ -23,7 +23,8 @@ export const createValidatorFromConstraints = (
|
|||
// Required constraint
|
||||
if (
|
||||
field === table?.primaryDisplay ||
|
||||
schemaConstraints.presence?.allowEmpty === false
|
||||
schemaConstraints.presence?.allowEmpty === false ||
|
||||
schemaConstraints.presence === true
|
||||
) {
|
||||
rules.push({
|
||||
type: schemaConstraints.type == "array" ? "array" : "string",
|
||||
|
|
|
@ -65,7 +65,7 @@
|
|||
"cookies": "0.8.0",
|
||||
"csvtojson": "2.0.10",
|
||||
"curlconverter": "3.21.0",
|
||||
"dd-trace": "4.20.0",
|
||||
"dd-trace": "3.13.2",
|
||||
"dotenv": "8.2.0",
|
||||
"form-data": "4.0.0",
|
||||
"global-agent": "3.0.0",
|
||||
|
|
|
@ -0,0 +1,196 @@
|
|||
#!/bin/node
|
||||
const {
|
||||
createApp,
|
||||
getTable,
|
||||
createRow,
|
||||
createTable,
|
||||
getApp,
|
||||
getRows,
|
||||
} = require("./utils")
|
||||
|
||||
const Chance = require("chance")
|
||||
|
||||
const generator = new Chance()
|
||||
|
||||
const STUDENT_COUNT = 500
|
||||
const SUBJECT_COUNT = 10
|
||||
|
||||
let { apiKey, appId } = require("yargs")
|
||||
.demandOption(["apiKey"])
|
||||
.option("appId").argv
|
||||
|
||||
const start = Date.now()
|
||||
async function batchCreate(apiKey, appId, table, items, batchSize = 100) {
|
||||
let i = 0
|
||||
let errors = 0
|
||||
|
||||
async function createSingleRow(item) {
|
||||
try {
|
||||
const row = await createRow(apiKey, appId, table, item)
|
||||
console.log(
|
||||
`${table.name} - ${++i} of ${items.length} created (${
|
||||
(Date.now() - start) / 1000
|
||||
}s)`
|
||||
)
|
||||
return row
|
||||
} catch {
|
||||
errors++
|
||||
}
|
||||
}
|
||||
|
||||
const rows = []
|
||||
const maxConcurrency = Math.min(batchSize, items.length)
|
||||
const inFlight = {}
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
const item = items[itemIndex]
|
||||
const promise = createSingleRow(item)
|
||||
.then(result => {
|
||||
rows.push(result)
|
||||
})
|
||||
.finally(() => {
|
||||
delete inFlight[itemIndex]
|
||||
})
|
||||
|
||||
inFlight[itemIndex] = promise
|
||||
|
||||
if (Object.keys(inFlight).length >= maxConcurrency) {
|
||||
await Promise.race(Object.values(inFlight))
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(Object.values(inFlight))
|
||||
|
||||
if (errors) {
|
||||
console.error(
|
||||
`${table.name} - ${errors} creation errored (${
|
||||
(Date.now() - start) / 1000
|
||||
}s)`
|
||||
)
|
||||
}
|
||||
|
||||
return rows
|
||||
}
|
||||
|
||||
const useExistingApp = !!appId
|
||||
|
||||
async function upsertTable(appId, tableName, tableData) {
|
||||
if (useExistingApp) {
|
||||
return await getTable(apiKey, appId, tableName)
|
||||
}
|
||||
|
||||
const table = await createTable(apiKey, appId, {
|
||||
...tableData,
|
||||
name: tableName,
|
||||
})
|
||||
return table
|
||||
}
|
||||
|
||||
async function run() {
|
||||
if (!appId) {
|
||||
const app = appId ? await getApp(apiKey, appId) : await createApp(apiKey)
|
||||
appId = app._id
|
||||
|
||||
console.log(`App created. Url: http://localhost:10000/builder/app/${appId}`)
|
||||
} else {
|
||||
console.log(
|
||||
`App retrieved. Url: http://localhost:10000/builder/app/${appId}`
|
||||
)
|
||||
}
|
||||
|
||||
const studentsTable = await getTable(apiKey, appId, "Students")
|
||||
|
||||
let studentNumber = studentsTable.schema["Auto ID"].lastID
|
||||
const students = await batchCreate(
|
||||
apiKey,
|
||||
appId,
|
||||
studentsTable,
|
||||
Array.from({ length: STUDENT_COUNT }).map(() => ({
|
||||
"Student Number": (++studentNumber).toString(),
|
||||
"First Name": generator.first(),
|
||||
"Last Name": generator.last(),
|
||||
Gender: generator.pickone(["M", "F"]),
|
||||
Grade: generator.pickone(["8", "9", "10", "11"]),
|
||||
"Tardiness (Days)": generator.integer({ min: 1, max: 100 }),
|
||||
"Home Number": generator.phone(),
|
||||
"Attendance_(%)": generator.integer({ min: 0, max: 100 }),
|
||||
}))
|
||||
)
|
||||
|
||||
const subjectTable = await upsertTable(appId, "Subjects", {
|
||||
schema: {
|
||||
Name: {
|
||||
name: "Name",
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
primaryDisplay: "Name",
|
||||
})
|
||||
|
||||
const subjects = useExistingApp
|
||||
? await getRows(apiKey, appId, subjectTable._id)
|
||||
: await batchCreate(
|
||||
apiKey,
|
||||
appId,
|
||||
subjectTable,
|
||||
Array.from({ length: SUBJECT_COUNT }).map(() => ({
|
||||
Name: generator.profession(),
|
||||
}))
|
||||
)
|
||||
|
||||
const gradesTable = await upsertTable(appId, "Grades", {
|
||||
schema: {
|
||||
Score: {
|
||||
name: "Score",
|
||||
type: "number",
|
||||
},
|
||||
Student: {
|
||||
name: "Student",
|
||||
tableId: studentsTable._id,
|
||||
constraints: {
|
||||
presence: true,
|
||||
type: "array",
|
||||
},
|
||||
fieldName: "Grades",
|
||||
relationshipType: "one-to-many",
|
||||
type: "link",
|
||||
},
|
||||
Subject: {
|
||||
name: "Subject",
|
||||
tableId: subjectTable._id,
|
||||
constraints: {
|
||||
presence: true,
|
||||
type: "array",
|
||||
},
|
||||
fieldName: "Grades",
|
||||
relationshipType: "one-to-many",
|
||||
type: "link",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await batchCreate(
|
||||
apiKey,
|
||||
appId,
|
||||
gradesTable,
|
||||
students.flatMap(student =>
|
||||
subjects.map(subject => ({
|
||||
Score: generator.integer({ min: 0, max: 100 }),
|
||||
Student: [student],
|
||||
Subject: [subject],
|
||||
}))
|
||||
)
|
||||
)
|
||||
|
||||
console.log(
|
||||
`Access the app here: http://localhost:10000/builder/app/${appId}`
|
||||
)
|
||||
}
|
||||
|
||||
run()
|
||||
.then(() => {
|
||||
console.log(`Done in ${(Date.now() - start) / 1000} seconds`)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
})
|
|
@ -0,0 +1,29 @@
|
|||
#!/bin/node
|
||||
const { searchApps, deleteApp } = require("./utils")
|
||||
|
||||
if (!process.argv[2]) {
|
||||
console.error("Please specify an API key as script argument.")
|
||||
process.exit(-1)
|
||||
}
|
||||
|
||||
async function run() {
|
||||
const apiKey = process.argv[2]
|
||||
const apps = await searchApps(apiKey)
|
||||
console.log(`Deleting ${apps.length} apps`)
|
||||
|
||||
let deletedApps = 0
|
||||
await Promise.all(
|
||||
apps.map(async app => {
|
||||
await deleteApp(apiKey, app._id)
|
||||
console.log(`App ${++deletedApps} of ${apps.length} deleted`)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
run()
|
||||
.then(() => {
|
||||
console.log("Done!")
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
})
|
|
@ -2,7 +2,8 @@ const fetch = require("node-fetch")
|
|||
const uuid = require("uuid/v4")
|
||||
|
||||
const URL_APP = "http://localhost:10000/api/public/v1/applications"
|
||||
const URL_TABLE = "http://localhost:10000/api/public/v1/tables/search"
|
||||
const URL_TABLE = "http://localhost:10000/api/public/v1/tables"
|
||||
const URL_SEARCH_TABLE = "http://localhost:10000/api/public/v1/tables/search"
|
||||
|
||||
async function request(apiKey, url, method, body, appId = undefined) {
|
||||
const headers = {
|
||||
|
@ -37,30 +38,64 @@ exports.createApp = async apiKey => {
|
|||
return json.data
|
||||
}
|
||||
|
||||
exports.getTable = async (apiKey, appId) => {
|
||||
const res = await request(apiKey, URL_TABLE, "POST", {}, appId)
|
||||
exports.getApp = async (apiKey, appId) => {
|
||||
const res = await request(apiKey, `${URL_APP}/${appId}`, "GET")
|
||||
const json = await res.json()
|
||||
return json.data[0]
|
||||
return json.data
|
||||
}
|
||||
exports.searchApps = async apiKey => {
|
||||
const res = await request(apiKey, `${URL_APP}/search`, "POST", {})
|
||||
const json = await res.json()
|
||||
return json.data
|
||||
}
|
||||
|
||||
exports.createRow = async (apiKey, appId, table) => {
|
||||
const body = {}
|
||||
for (let [key, schema] of Object.entries(table.schema)) {
|
||||
let fake
|
||||
switch (schema.type) {
|
||||
default:
|
||||
case "string":
|
||||
fake = schema.constraints.inclusion
|
||||
? schema.constraints.inclusion[0]
|
||||
: "a"
|
||||
break
|
||||
case "number":
|
||||
fake = 1
|
||||
break
|
||||
exports.deleteApp = async (apiKey, appId) => {
|
||||
const res = await request(apiKey, `${URL_APP}/${appId}`, "DELETE")
|
||||
return res
|
||||
}
|
||||
|
||||
exports.getTable = async (apiKey, appId, tableName) => {
|
||||
const res = await request(apiKey, URL_SEARCH_TABLE, "POST", {}, appId)
|
||||
const json = await res.json()
|
||||
const table = json.data.find(t => t.name === tableName)
|
||||
if (!table) {
|
||||
throw `Table '${tableName} not found`
|
||||
}
|
||||
return table
|
||||
}
|
||||
|
||||
exports.createRow = async (apiKey, appId, table, body) => {
|
||||
if (!body) {
|
||||
body = {}
|
||||
for (let [key, schema] of Object.entries(table.schema)) {
|
||||
let fake
|
||||
switch (schema.type) {
|
||||
default:
|
||||
case "string":
|
||||
fake = schema.constraints?.inclusion
|
||||
? schema.constraints.inclusion[0]
|
||||
: "a"
|
||||
break
|
||||
case "number":
|
||||
fake = 1
|
||||
break
|
||||
}
|
||||
body[key] = fake
|
||||
}
|
||||
body[key] = fake
|
||||
}
|
||||
const url = `http://localhost:10000/api/public/v1/tables/${table._id}/rows`
|
||||
const res = await request(apiKey, url, "POST", body, appId)
|
||||
return (await res.json()).data
|
||||
}
|
||||
|
||||
exports.getRows = async (apiKey, appId, tableId) => {
|
||||
const url = `${URL_TABLE}/${tableId}/rows/search`
|
||||
const res = await request(apiKey, url, "POST", {}, appId)
|
||||
return (await res.json()).data
|
||||
}
|
||||
|
||||
exports.createTable = async (apiKey, appId, config) => {
|
||||
const res = await request(apiKey, URL_TABLE, "POST", config, appId)
|
||||
const json = await res.json()
|
||||
return json.data
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import {
|
|||
inputProcessing,
|
||||
outputProcessing,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import { cloneDeep, isEqual } from "lodash"
|
||||
import { cloneDeep } from "lodash"
|
||||
|
||||
export async function handleRequest<T extends Operation>(
|
||||
operation: T,
|
||||
|
@ -86,50 +86,6 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function save(ctx: UserCtx) {
|
||||
const inputs = ctx.request.body
|
||||
const tableId = utils.getTableId(ctx)
|
||||
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
const { table: updatedTable, row } = await inputProcessing(
|
||||
ctx.user?._id,
|
||||
cloneDeep(table),
|
||||
inputs
|
||||
)
|
||||
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
row,
|
||||
tableId,
|
||||
})
|
||||
if (!validateResult.valid) {
|
||||
throw { validation: validateResult.errors }
|
||||
}
|
||||
|
||||
const response = await handleRequest(Operation.CREATE, tableId, {
|
||||
row,
|
||||
})
|
||||
|
||||
if (!isEqual(table, updatedTable)) {
|
||||
await sdk.tables.saveTable(updatedTable)
|
||||
}
|
||||
|
||||
const rowId = response.row._id
|
||||
if (rowId) {
|
||||
const row = await sdk.rows.external.getRow(tableId, rowId, {
|
||||
relationships: true,
|
||||
})
|
||||
return {
|
||||
...response,
|
||||
row: await outputProcessing(table, row, {
|
||||
preserveLinks: true,
|
||||
squash: true,
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
return response
|
||||
}
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx): Promise<Row> {
|
||||
const id = ctx.params.rowId
|
||||
const tableId = utils.getTableId(ctx)
|
||||
|
|
|
@ -30,7 +30,7 @@ import { Format } from "../view/exporters"
|
|||
|
||||
export * as views from "./views"
|
||||
|
||||
function pickApi(tableId: any) {
|
||||
function pickApi(tableId: string) {
|
||||
if (isExternalTableID(tableId)) {
|
||||
return external
|
||||
}
|
||||
|
@ -84,9 +84,12 @@ export const save = async (ctx: UserCtx<Row, Row>) => {
|
|||
return patch(ctx as UserCtx<PatchRowRequest, PatchRowResponse>)
|
||||
}
|
||||
const { row, table, squashed } = await quotas.addRow(() =>
|
||||
quotas.addQuery(() => pickApi(tableId).save(ctx), {
|
||||
datasourceId: tableId,
|
||||
})
|
||||
quotas.addQuery(
|
||||
() => sdk.rows.save(tableId, ctx.request.body, ctx.user?._id),
|
||||
{
|
||||
datasourceId: tableId,
|
||||
}
|
||||
)
|
||||
)
|
||||
ctx.status = 200
|
||||
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as linkRows from "../../../db/linkedRows"
|
||||
import { generateRowID, InternalTables } from "../../../db/utils"
|
||||
import { InternalTables } from "../../../db/utils"
|
||||
import * as userController from "../user"
|
||||
import {
|
||||
AttachmentCleanup,
|
||||
|
@ -94,45 +94,6 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
|||
})
|
||||
}
|
||||
|
||||
export async function save(ctx: UserCtx) {
|
||||
let inputs = ctx.request.body
|
||||
inputs.tableId = utils.getTableId(ctx)
|
||||
|
||||
if (!inputs._rev && !inputs._id) {
|
||||
inputs._id = generateRowID(inputs.tableId)
|
||||
}
|
||||
|
||||
// this returns the table and row incase they have been updated
|
||||
const dbTable = await sdk.tables.getTable(inputs.tableId)
|
||||
|
||||
// need to copy the table so it can be differenced on way out
|
||||
const tableClone = cloneDeep(dbTable)
|
||||
|
||||
let { table, row } = await inputProcessing(ctx.user?._id, tableClone, inputs)
|
||||
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
row,
|
||||
table,
|
||||
})
|
||||
|
||||
if (!validateResult.valid) {
|
||||
throw { validation: validateResult.errors }
|
||||
}
|
||||
|
||||
// make sure link rows are up-to-date
|
||||
row = (await linkRows.updateLinks({
|
||||
eventType: linkRows.EventType.ROW_SAVE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
table,
|
||||
})) as Row
|
||||
|
||||
return finaliseRow(table, row, {
|
||||
oldTable: dbTable,
|
||||
updateFormula: true,
|
||||
})
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx): Promise<Row> {
|
||||
const tableId = utils.getTableId(ctx),
|
||||
rowId = ctx.params.rowId
|
||||
|
|
|
@ -5,8 +5,8 @@ import {
|
|||
processFormulas,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import { FieldTypes, FormulaTypes } from "../../../constants"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { Table, Row } from "@budibase/types"
|
||||
import { context, locks } from "@budibase/backend-core"
|
||||
import { Table, Row, LockType, LockName } from "@budibase/types"
|
||||
import * as linkRows from "../../../db/linkedRows"
|
||||
import sdk from "../../../sdk"
|
||||
import isEqual from "lodash/isEqual"
|
||||
|
@ -149,12 +149,22 @@ export async function finaliseRow(
|
|||
await db.put(table)
|
||||
} catch (err: any) {
|
||||
if (err.status === 409) {
|
||||
const updatedTable = await sdk.tables.getTable(table._id!)
|
||||
let response = processAutoColumn(null, updatedTable, row, {
|
||||
reprocessing: true,
|
||||
})
|
||||
await db.put(response.table)
|
||||
row = response.row
|
||||
// Some conflicts with the autocolumns occurred, we need to refetch the table and recalculate
|
||||
await locks.doWithLock(
|
||||
{
|
||||
type: LockType.AUTO_EXTEND,
|
||||
name: LockName.PROCESS_AUTO_COLUMNS,
|
||||
resource: table._id,
|
||||
},
|
||||
async () => {
|
||||
const latestTable = await sdk.tables.getTable(table._id!)
|
||||
let response = processAutoColumn(null, latestTable, row, {
|
||||
reprocessing: true,
|
||||
})
|
||||
await db.put(response.table)
|
||||
row = response.row
|
||||
}
|
||||
)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
|
|
|
@ -77,7 +77,7 @@ const publicRouter = new Router({
|
|||
prefix: PREFIX,
|
||||
})
|
||||
|
||||
if (limiter) {
|
||||
if (limiter && !env.isDev()) {
|
||||
publicRouter.use(limiter)
|
||||
}
|
||||
|
||||
|
|
|
@ -2086,4 +2086,112 @@ describe.each([
|
|||
expect(row.formula).toBe(relatedRow.name)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Formula JS protection", () => {
|
||||
it("should time out JS execution if a single cell takes too long", async () => {
|
||||
await config.withEnv({ JS_PER_EXECUTION_TIME_LIMIT_MS: 20 }, async () => {
|
||||
const js = Buffer.from(
|
||||
`
|
||||
let i = 0;
|
||||
while (true) {
|
||||
i++;
|
||||
}
|
||||
return i;
|
||||
`
|
||||
).toString("base64")
|
||||
|
||||
const table = await config.createTable({
|
||||
name: "table",
|
||||
type: "table",
|
||||
schema: {
|
||||
text: {
|
||||
name: "text",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
formula: {
|
||||
name: "formula",
|
||||
type: FieldType.FORMULA,
|
||||
formula: `{{ js "${js}"}}`,
|
||||
formulaType: FormulaTypes.DYNAMIC,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.row.save(table._id!, { text: "foo" })
|
||||
const { rows } = await config.api.row.search(table._id!)
|
||||
expect(rows).toHaveLength(1)
|
||||
const row = rows[0]
|
||||
expect(row.text).toBe("foo")
|
||||
expect(row.formula).toBe("Timed out while executing JS")
|
||||
})
|
||||
})
|
||||
|
||||
it("should time out JS execution if a multiple cells take too long", async () => {
|
||||
await config.withEnv(
|
||||
{
|
||||
JS_PER_EXECUTION_TIME_LIMIT_MS: 20,
|
||||
JS_PER_REQUEST_TIME_LIMIT_MS: 40,
|
||||
},
|
||||
async () => {
|
||||
const js = Buffer.from(
|
||||
`
|
||||
let i = 0;
|
||||
while (true) {
|
||||
i++;
|
||||
}
|
||||
return i;
|
||||
`
|
||||
).toString("base64")
|
||||
|
||||
const table = await config.createTable({
|
||||
name: "table",
|
||||
type: "table",
|
||||
schema: {
|
||||
text: {
|
||||
name: "text",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
formula: {
|
||||
name: "formula",
|
||||
type: FieldType.FORMULA,
|
||||
formula: `{{ js "${js}"}}`,
|
||||
formulaType: FormulaTypes.DYNAMIC,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await config.api.row.save(table._id!, { text: "foo" })
|
||||
}
|
||||
|
||||
// Run this test 3 times to make sure that there's no cross-request
|
||||
// pollution of the execution time tracking.
|
||||
for (let reqs = 0; reqs < 3; reqs++) {
|
||||
const { rows } = await config.api.row.search(table._id!)
|
||||
expect(rows).toHaveLength(10)
|
||||
|
||||
let i = 0
|
||||
for (; i < 10; i++) {
|
||||
const row = rows[i]
|
||||
if (row.formula !== "Timed out while executing JS") {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Given the execution times are not deterministic, we can't be sure
|
||||
// of the exact number of rows that were executed before the timeout
|
||||
// but it should absolutely be at least 1.
|
||||
expect(i).toBeGreaterThan(0)
|
||||
expect(i).toBeLessThan(5)
|
||||
|
||||
for (; i < 10; i++) {
|
||||
const row = rows[i]
|
||||
expect(row.text).toBe("foo")
|
||||
expect(row.formula).toBe("Request JS execution limit hit")
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -84,9 +84,11 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
|||
|
||||
// clear any undefined, null or empty string properties so that they aren't updated
|
||||
for (let propKey of Object.keys(inputs.row)) {
|
||||
const clearRelationships =
|
||||
inputs.meta?.fields?.[propKey]?.clearRelationships
|
||||
if (
|
||||
(inputs.row[propKey] == null || inputs.row[propKey] === "") &&
|
||||
!inputs.meta?.fields?.[propKey]?.clearRelationships
|
||||
(inputs.row[propKey] == null || inputs.row[propKey]?.length === 0) &&
|
||||
!clearRelationships
|
||||
) {
|
||||
delete inputs.row[propKey]
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import sdk from "../sdk"
|
||||
import { automationsEnabled } from "../features"
|
||||
import tracer from "dd-trace"
|
||||
|
||||
const REBOOT_CRON = "@reboot"
|
||||
const WH_STEP_ID = definitions.WEBHOOK.stepId
|
||||
|
@ -39,26 +40,62 @@ function loggingArgs(job: AutomationJob) {
|
|||
}
|
||||
|
||||
export async function processEvent(job: AutomationJob) {
|
||||
const appId = job.data.event.appId!
|
||||
const automationId = job.data.automation._id!
|
||||
const task = async () => {
|
||||
try {
|
||||
// need to actually await these so that an error can be captured properly
|
||||
console.log("automation running", ...loggingArgs(job))
|
||||
return tracer.trace(
|
||||
"processEvent",
|
||||
{ resource: "automation" },
|
||||
async span => {
|
||||
const appId = job.data.event.appId!
|
||||
const automationId = job.data.automation._id!
|
||||
|
||||
const runFn = () => Runner.run(job)
|
||||
const result = await quotas.addAutomation(runFn, {
|
||||
span?.addTags({
|
||||
appId,
|
||||
automationId,
|
||||
job: {
|
||||
id: job.id,
|
||||
name: job.name,
|
||||
attemptsMade: job.attemptsMade,
|
||||
opts: {
|
||||
attempts: job.opts.attempts,
|
||||
priority: job.opts.priority,
|
||||
delay: job.opts.delay,
|
||||
repeat: job.opts.repeat,
|
||||
backoff: job.opts.backoff,
|
||||
lifo: job.opts.lifo,
|
||||
timeout: job.opts.timeout,
|
||||
jobId: job.opts.jobId,
|
||||
removeOnComplete: job.opts.removeOnComplete,
|
||||
removeOnFail: job.opts.removeOnFail,
|
||||
stackTraceLimit: job.opts.stackTraceLimit,
|
||||
preventParsingData: job.opts.preventParsingData,
|
||||
},
|
||||
},
|
||||
})
|
||||
console.log("automation completed", ...loggingArgs(job))
|
||||
return result
|
||||
} catch (err) {
|
||||
console.error(`automation was unable to run`, err, ...loggingArgs(job))
|
||||
return { err }
|
||||
}
|
||||
}
|
||||
|
||||
return await context.doInAutomationContext({ appId, automationId, task })
|
||||
const task = async () => {
|
||||
try {
|
||||
// need to actually await these so that an error can be captured properly
|
||||
console.log("automation running", ...loggingArgs(job))
|
||||
|
||||
const runFn = () => Runner.run(job)
|
||||
const result = await quotas.addAutomation(runFn, {
|
||||
automationId,
|
||||
})
|
||||
console.log("automation completed", ...loggingArgs(job))
|
||||
return result
|
||||
} catch (err) {
|
||||
span?.addTags({ error: true })
|
||||
console.error(
|
||||
`automation was unable to run`,
|
||||
err,
|
||||
...loggingArgs(job)
|
||||
)
|
||||
return { err }
|
||||
}
|
||||
}
|
||||
|
||||
return await context.doInAutomationContext({ appId, automationId, task })
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export async function updateTestHistory(
|
||||
|
|
|
@ -70,6 +70,11 @@ const environment = {
|
|||
SELF_HOSTED: process.env.SELF_HOSTED,
|
||||
HTTP_MB_LIMIT: process.env.HTTP_MB_LIMIT,
|
||||
FORKED_PROCESS_NAME: process.env.FORKED_PROCESS_NAME || "main",
|
||||
JS_PER_EXECUTION_TIME_LIMIT_MS:
|
||||
parseIntSafe(process.env.JS_PER_EXECUTION_TIME_LIMIT_MS) || 1000,
|
||||
JS_PER_REQUEST_TIME_LIMIT_MS: parseIntSafe(
|
||||
process.env.JS_PER_REQUEST_TIME_LIMIT_MS
|
||||
),
|
||||
// old
|
||||
CLIENT_ID: process.env.CLIENT_ID,
|
||||
_set(key: string, value: any) {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import {
|
||||
ConnectionInfo,
|
||||
Database,
|
||||
DatasourceFeature,
|
||||
DatasourceFieldType,
|
||||
Document,
|
||||
|
@ -66,7 +67,7 @@ const SCHEMA: Integration = {
|
|||
}
|
||||
|
||||
class CouchDBIntegration implements IntegrationBase {
|
||||
private readonly client: dbCore.DatabaseImpl
|
||||
private readonly client: Database
|
||||
|
||||
constructor(config: CouchDBConfig) {
|
||||
this.client = dbCore.DatabaseWithConnection(config.database, config.url)
|
||||
|
|
|
@ -131,7 +131,10 @@ class RestIntegration implements IntegrationBase {
|
|||
let data, raw, headers
|
||||
const contentType = response.headers.get("content-type") || ""
|
||||
try {
|
||||
if (contentType.includes("application/json")) {
|
||||
if (response.status === 204) {
|
||||
data = []
|
||||
raw = []
|
||||
} else if (contentType.includes("application/json")) {
|
||||
data = await response.json()
|
||||
raw = JSON.stringify(data)
|
||||
} else if (
|
||||
|
|
|
@ -186,9 +186,15 @@ describe("REST Integration", () => {
|
|||
})
|
||||
|
||||
describe("response", () => {
|
||||
function buildInput(json: any, text: any, header: any) {
|
||||
const contentTypes = ["application/json", "text/plain", "application/xml"]
|
||||
function buildInput(
|
||||
json: any,
|
||||
text: any,
|
||||
header: any,
|
||||
status: number = 200
|
||||
) {
|
||||
return {
|
||||
status: 200,
|
||||
status,
|
||||
json: json ? async () => json : undefined,
|
||||
text: text ? async () => text : undefined,
|
||||
headers: {
|
||||
|
@ -225,6 +231,18 @@ describe("REST Integration", () => {
|
|||
expect(output.extra.raw).toEqual(text)
|
||||
expect(output.extra.headers["content-type"]).toEqual("application/xml")
|
||||
})
|
||||
|
||||
test.each(contentTypes)(
|
||||
"should not throw an error on 204 no content",
|
||||
async contentType => {
|
||||
const input = buildInput(undefined, null, contentType, 204)
|
||||
const output = await config.integration.parseResponse(input)
|
||||
expect(output.data).toEqual([])
|
||||
expect(output.extra.raw).toEqual([])
|
||||
expect(output.info.code).toEqual(204)
|
||||
expect(output.extra.headers["content-type"]).toEqual(contentType)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe("authentication", () => {
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
import vm from "vm"
|
||||
import env from "./environment"
|
||||
import { setJSRunner } from "@budibase/string-templates"
|
||||
import { context, timers } from "@budibase/backend-core"
|
||||
import tracer from "dd-trace"
|
||||
|
||||
type TrackerFn = <T>(f: () => T) => T
|
||||
|
||||
export function init() {
|
||||
setJSRunner((js: string, ctx: vm.Context) => {
|
||||
return tracer.trace("runJS", {}, span => {
|
||||
const perRequestLimit = env.JS_PER_REQUEST_TIME_LIMIT_MS
|
||||
let track: TrackerFn = f => f()
|
||||
if (perRequestLimit) {
|
||||
const bbCtx = context.getCurrentContext()
|
||||
if (bbCtx) {
|
||||
if (!bbCtx.jsExecutionTracker) {
|
||||
bbCtx.jsExecutionTracker =
|
||||
timers.ExecutionTimeTracker.withLimit(perRequestLimit)
|
||||
}
|
||||
track = bbCtx.jsExecutionTracker.track.bind(bbCtx.jsExecutionTracker)
|
||||
span?.addTags({
|
||||
js: {
|
||||
limitMS: bbCtx.jsExecutionTracker.limitMs,
|
||||
elapsedMS: bbCtx.jsExecutionTracker.elapsedMS,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
ctx = {
|
||||
...ctx,
|
||||
alert: undefined,
|
||||
setInterval: undefined,
|
||||
setTimeout: undefined,
|
||||
}
|
||||
vm.createContext(ctx)
|
||||
return track(() =>
|
||||
vm.runInNewContext(js, ctx, {
|
||||
timeout: env.JS_PER_EXECUTION_TIME_LIMIT_MS,
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
|
@ -23,7 +23,7 @@ export default async (ctx: UserCtx, next: any) => {
|
|||
|
||||
if (requestAppId) {
|
||||
const span = tracer.scope().active()
|
||||
span?.addTags({ app_id: requestAppId })
|
||||
span?.setTag("appId", requestAppId)
|
||||
}
|
||||
|
||||
// deny access to application preview
|
||||
|
@ -76,6 +76,14 @@ export default async (ctx: UserCtx, next: any) => {
|
|||
return next()
|
||||
}
|
||||
|
||||
if (ctx.user) {
|
||||
const span = tracer.scope().active()
|
||||
if (ctx.user._id) {
|
||||
span?.setTag("userId", ctx.user._id)
|
||||
}
|
||||
span?.setTag("tenantId", ctx.user.tenantId)
|
||||
}
|
||||
|
||||
const userId = ctx.user ? generateUserMetadataID(ctx.user._id!) : undefined
|
||||
|
||||
// if the user is not in the right tenant then make sure to wipe their cookie
|
||||
|
|
|
@ -1,6 +1,13 @@
|
|||
import { IncludeRelationship, Operation } from "@budibase/types"
|
||||
import { IncludeRelationship, Operation, Row } from "@budibase/types"
|
||||
import { handleRequest } from "../../../api/controllers/row/external"
|
||||
import { breakRowIdField } from "../../../integrations/utils"
|
||||
import sdk from "../../../sdk"
|
||||
import {
|
||||
inputProcessing,
|
||||
outputProcessing,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import cloneDeep from "lodash/fp/cloneDeep"
|
||||
import isEqual from "lodash/fp/isEqual"
|
||||
|
||||
export async function getRow(
|
||||
tableId: string,
|
||||
|
@ -15,3 +22,48 @@ export async function getRow(
|
|||
})
|
||||
return response ? response[0] : response
|
||||
}
|
||||
|
||||
export async function save(
|
||||
tableId: string,
|
||||
inputs: Row,
|
||||
userId: string | undefined
|
||||
) {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
const { table: updatedTable, row } = await inputProcessing(
|
||||
userId,
|
||||
cloneDeep(table),
|
||||
inputs
|
||||
)
|
||||
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
row,
|
||||
tableId,
|
||||
})
|
||||
if (!validateResult.valid) {
|
||||
throw { validation: validateResult.errors }
|
||||
}
|
||||
|
||||
const response = await handleRequest(Operation.CREATE, tableId, {
|
||||
row,
|
||||
})
|
||||
|
||||
if (!isEqual(table, updatedTable)) {
|
||||
await sdk.tables.saveTable(updatedTable)
|
||||
}
|
||||
|
||||
const rowId = response.row._id
|
||||
if (rowId) {
|
||||
const row = await sdk.rows.external.getRow(tableId, rowId, {
|
||||
relationships: true,
|
||||
})
|
||||
return {
|
||||
...response,
|
||||
row: await outputProcessing(table, row, {
|
||||
preserveLinks: true,
|
||||
squash: true,
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
return response
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
import { db } from "@budibase/backend-core"
|
||||
import { Row } from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import cloneDeep from "lodash/fp/cloneDeep"
|
||||
import { finaliseRow } from "../../../api/controllers/row/staticFormula"
|
||||
import { inputProcessing } from "../../../utilities/rowProcessor"
|
||||
import * as linkRows from "../../../db/linkedRows"
|
||||
|
||||
export async function save(
|
||||
tableId: string,
|
||||
inputs: Row,
|
||||
userId: string | undefined
|
||||
) {
|
||||
inputs.tableId = tableId
|
||||
|
||||
if (!inputs._rev && !inputs._id) {
|
||||
inputs._id = db.generateRowID(inputs.tableId)
|
||||
}
|
||||
|
||||
// this returns the table and row incase they have been updated
|
||||
const dbTable = await sdk.tables.getTable(inputs.tableId)
|
||||
|
||||
// need to copy the table so it can be differenced on way out
|
||||
const tableClone = cloneDeep(dbTable)
|
||||
|
||||
let { table, row } = await inputProcessing(userId, tableClone, inputs)
|
||||
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
row,
|
||||
table,
|
||||
})
|
||||
|
||||
if (!validateResult.valid) {
|
||||
throw { validation: validateResult.errors }
|
||||
}
|
||||
|
||||
// make sure link rows are up-to-date
|
||||
row = (await linkRows.updateLinks({
|
||||
eventType: linkRows.EventType.ROW_SAVE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
table,
|
||||
})) as Row
|
||||
|
||||
return finaliseRow(table, row, {
|
||||
oldTable: dbTable,
|
||||
updateFormula: true,
|
||||
})
|
||||
}
|
|
@ -1,6 +1,9 @@
|
|||
import { db as dbCore, context } from "@budibase/backend-core"
|
||||
import { Database, Row } from "@budibase/types"
|
||||
import { getRowParams } from "../../../db/utils"
|
||||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
import * as internal from "./internal"
|
||||
import * as external from "./external"
|
||||
|
||||
export async function getAllInternalRows(appId?: string) {
|
||||
let db: Database
|
||||
|
@ -16,3 +19,18 @@ export async function getAllInternalRows(appId?: string) {
|
|||
)
|
||||
return response.rows.map(row => row.doc) as Row[]
|
||||
}
|
||||
|
||||
function pickApi(tableId: any) {
|
||||
if (isExternalTableID(tableId)) {
|
||||
return external
|
||||
}
|
||||
return internal
|
||||
}
|
||||
|
||||
export async function save(
|
||||
tableId: string,
|
||||
row: Row,
|
||||
userId: string | undefined
|
||||
) {
|
||||
return pickApi(tableId).save(tableId, row, userId)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,220 @@
|
|||
import tk from "timekeeper"
|
||||
import * as internalSdk from "../internal"
|
||||
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import {
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
TableSourceType,
|
||||
FieldType,
|
||||
Table,
|
||||
AutoFieldSubTypes,
|
||||
} from "@budibase/types"
|
||||
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import { cache } from "@budibase/backend-core"
|
||||
|
||||
tk.freeze(Date.now())
|
||||
|
||||
describe("sdk >> rows >> internal", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
function makeRow() {
|
||||
return {
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
}
|
||||
}
|
||||
|
||||
describe("save", () => {
|
||||
const tableData: Table = {
|
||||
name: generator.word(),
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
surname: {
|
||||
name: "surname",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
constraints: {
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
address: {
|
||||
name: "address",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it("save will persist the row properly", async () => {
|
||||
const table = await config.createTable(tableData)
|
||||
const row = makeRow()
|
||||
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const response = await internalSdk.save(
|
||||
table._id!,
|
||||
row,
|
||||
config.user._id
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
table,
|
||||
row: {
|
||||
...row,
|
||||
type: "row",
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
},
|
||||
squashed: {
|
||||
...row,
|
||||
type: "row",
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
},
|
||||
})
|
||||
|
||||
const persistedRow = await config.getRow(table._id!, response.row._id!)
|
||||
expect(persistedRow).toEqual({
|
||||
...row,
|
||||
type: "row",
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("auto ids will update when creating new rows", async () => {
|
||||
const table = await config.createTable({
|
||||
...tableData,
|
||||
schema: {
|
||||
...tableData.schema,
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubTypes.AUTO_ID,
|
||||
autocolumn: true,
|
||||
lastID: 0,
|
||||
},
|
||||
},
|
||||
})
|
||||
const row = makeRow()
|
||||
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const response = await internalSdk.save(
|
||||
table._id!,
|
||||
row,
|
||||
config.user._id
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
table: {
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
id: {
|
||||
...table.schema.id,
|
||||
lastID: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
row: {
|
||||
...row,
|
||||
id: 1,
|
||||
type: "row",
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
},
|
||||
squashed: {
|
||||
...row,
|
||||
id: 1,
|
||||
type: "row",
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
},
|
||||
})
|
||||
|
||||
const persistedRow = await config.getRow(table._id!, response.row._id!)
|
||||
expect(persistedRow).toEqual({
|
||||
...row,
|
||||
type: "row",
|
||||
id: 1,
|
||||
_rev: expect.stringMatching("1-.*"),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("auto ids will update when creating new rows in parallel", async () => {
|
||||
function makeRows(count: number) {
|
||||
return Array.from({ length: count }, () => makeRow())
|
||||
}
|
||||
|
||||
const table = await config.createTable({
|
||||
...tableData,
|
||||
schema: {
|
||||
...tableData.schema,
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubTypes.AUTO_ID,
|
||||
autocolumn: true,
|
||||
lastID: 0,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await config.doInContext(config.appId, async () => {
|
||||
for (const row of makeRows(5)) {
|
||||
await internalSdk.save(table._id!, row, config.user._id)
|
||||
}
|
||||
await Promise.all(
|
||||
makeRows(10).map(row =>
|
||||
internalSdk.save(table._id!, row, config.user._id)
|
||||
)
|
||||
)
|
||||
for (const row of makeRows(5)) {
|
||||
await internalSdk.save(table._id!, row, config.user._id)
|
||||
}
|
||||
})
|
||||
|
||||
const persistedRows = await config.getRows(table._id!)
|
||||
expect(persistedRows).toHaveLength(20)
|
||||
expect(persistedRows).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from({ length: 20 }).map((_, i) =>
|
||||
expect.objectContaining({ id: i + 1 })
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
const persistedTable = await config.getTable(table._id)
|
||||
expect((table as any).schema.id.lastID).toBe(0)
|
||||
expect(persistedTable.schema.id.lastID).toBe(20)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -23,6 +23,7 @@ import { automationsEnabled, printFeatures } from "./features"
|
|||
import Koa from "koa"
|
||||
import { Server } from "http"
|
||||
import { AddressInfo } from "net"
|
||||
import * as jsRunner from "./jsRunner"
|
||||
|
||||
let STARTUP_RAN = false
|
||||
|
||||
|
@ -152,4 +153,6 @@ export async function startup(app?: Koa, server?: Server) {
|
|||
}
|
||||
})
|
||||
}
|
||||
|
||||
jsRunner.init()
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import { cloneDeep } from "lodash/fp"
|
|||
import { performance } from "perf_hooks"
|
||||
import * as sdkUtils from "../sdk/utils"
|
||||
import env from "../environment"
|
||||
import tracer from "dd-trace"
|
||||
|
||||
threadUtils.threadSetup()
|
||||
const FILTER_STEP_ID = actions.BUILTIN_ACTION_DEFINITIONS.FILTER.stepId
|
||||
|
@ -242,278 +243,347 @@ class Orchestrator {
|
|||
}
|
||||
|
||||
async execute(): Promise<any> {
|
||||
// this will retrieve from context created at start of thread
|
||||
this._context.env = await sdkUtils.getEnvironmentVariables()
|
||||
let automation = this._automation
|
||||
let stopped = false
|
||||
let loopStep: AutomationStep | undefined = undefined
|
||||
return tracer.trace(
|
||||
"Orchestrator.execute",
|
||||
{ resource: "automation" },
|
||||
async span => {
|
||||
span?.addTags({
|
||||
appId: this._appId,
|
||||
automationId: this._automation._id,
|
||||
})
|
||||
|
||||
let stepCount = 0
|
||||
let loopStepNumber: any = undefined
|
||||
let loopSteps: LoopStep[] | undefined = []
|
||||
let metadata
|
||||
let timeoutFlag = false
|
||||
let wasLoopStep = false
|
||||
let timeout = this._job.data.event.timeout
|
||||
// check if this is a recurring automation,
|
||||
if (isProdAppID(this._appId) && isRecurring(automation)) {
|
||||
metadata = await this.getMetadata()
|
||||
const shouldStop = await this.checkIfShouldStop(metadata)
|
||||
if (shouldStop) {
|
||||
return
|
||||
}
|
||||
}
|
||||
const start = performance.now()
|
||||
for (let step of automation.definition.steps) {
|
||||
if (timeoutFlag) {
|
||||
break
|
||||
}
|
||||
// this will retrieve from context created at start of thread
|
||||
this._context.env = await sdkUtils.getEnvironmentVariables()
|
||||
let automation = this._automation
|
||||
let stopped = false
|
||||
let loopStep: AutomationStep | undefined = undefined
|
||||
|
||||
if (timeout) {
|
||||
setTimeout(() => {
|
||||
timeoutFlag = true
|
||||
}, timeout || 12000)
|
||||
}
|
||||
|
||||
stepCount++
|
||||
let input: any,
|
||||
iterations = 1,
|
||||
iterationCount = 0
|
||||
|
||||
if (step.stepId === LOOP_STEP_ID) {
|
||||
loopStep = step
|
||||
loopStepNumber = stepCount
|
||||
continue
|
||||
}
|
||||
|
||||
if (loopStep) {
|
||||
input = await processObject(loopStep.inputs, this._context)
|
||||
iterations = getLoopIterations(loopStep as LoopStep)
|
||||
}
|
||||
for (let index = 0; index < iterations; index++) {
|
||||
let originalStepInput = cloneDeep(step.inputs)
|
||||
// Handle if the user has set a max iteration count or if it reaches the max limit set by us
|
||||
if (loopStep && input.binding) {
|
||||
let tempOutput = { items: loopSteps, iterations: iterationCount }
|
||||
try {
|
||||
loopStep.inputs.binding = automationUtils.typecastForLooping(
|
||||
loopStep as LoopStep,
|
||||
loopStep.inputs as LoopInput
|
||||
)
|
||||
} catch (err) {
|
||||
this.updateContextAndOutput(loopStepNumber, step, tempOutput, {
|
||||
status: AutomationErrors.INCORRECT_TYPE,
|
||||
success: false,
|
||||
})
|
||||
loopSteps = undefined
|
||||
loopStep = undefined
|
||||
break
|
||||
}
|
||||
let item = []
|
||||
if (
|
||||
typeof loopStep.inputs.binding === "string" &&
|
||||
loopStep.inputs.option === "String"
|
||||
) {
|
||||
item = automationUtils.stringSplit(loopStep.inputs.binding)
|
||||
} else if (Array.isArray(loopStep.inputs.binding)) {
|
||||
item = loopStep.inputs.binding
|
||||
}
|
||||
this._context.steps[loopStepNumber] = {
|
||||
currentItem: item[index],
|
||||
}
|
||||
|
||||
// The "Loop" binding in the front end is "fake", so replace it here so the context can understand it
|
||||
// Pretty hacky because we need to account for the row object
|
||||
for (let [key, value] of Object.entries(originalStepInput)) {
|
||||
if (typeof value === "object") {
|
||||
for (let [innerKey, innerValue] of Object.entries(
|
||||
originalStepInput[key]
|
||||
)) {
|
||||
if (typeof innerValue === "string") {
|
||||
originalStepInput[key][innerKey] =
|
||||
automationUtils.substituteLoopStep(
|
||||
innerValue,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
} else if (typeof value === "object") {
|
||||
for (let [innerObject, innerValue] of Object.entries(
|
||||
originalStepInput[key][innerKey]
|
||||
)) {
|
||||
originalStepInput[key][innerKey][innerObject] =
|
||||
automationUtils.substituteLoopStep(
|
||||
innerValue as string,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (typeof value === "string") {
|
||||
originalStepInput[key] = automationUtils.substituteLoopStep(
|
||||
value,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
index === env.AUTOMATION_MAX_ITERATIONS ||
|
||||
index === parseInt(loopStep.inputs.iterations)
|
||||
) {
|
||||
this.updateContextAndOutput(loopStepNumber, step, tempOutput, {
|
||||
status: AutomationErrors.MAX_ITERATIONS,
|
||||
success: true,
|
||||
})
|
||||
loopSteps = undefined
|
||||
loopStep = undefined
|
||||
break
|
||||
}
|
||||
|
||||
let isFailure = false
|
||||
const currentItem = this._context.steps[loopStepNumber]?.currentItem
|
||||
if (currentItem && typeof currentItem === "object") {
|
||||
isFailure = Object.keys(currentItem).some(value => {
|
||||
return currentItem[value] === loopStep?.inputs.failure
|
||||
})
|
||||
} else {
|
||||
isFailure = currentItem && currentItem === loopStep.inputs.failure
|
||||
}
|
||||
|
||||
if (isFailure) {
|
||||
this.updateContextAndOutput(loopStepNumber, step, tempOutput, {
|
||||
status: AutomationErrors.FAILURE_CONDITION,
|
||||
success: false,
|
||||
})
|
||||
loopSteps = undefined
|
||||
loopStep = undefined
|
||||
break
|
||||
let stepCount = 0
|
||||
let loopStepNumber: any = undefined
|
||||
let loopSteps: LoopStep[] | undefined = []
|
||||
let metadata
|
||||
let timeoutFlag = false
|
||||
let wasLoopStep = false
|
||||
let timeout = this._job.data.event.timeout
|
||||
// check if this is a recurring automation,
|
||||
if (isProdAppID(this._appId) && isRecurring(automation)) {
|
||||
span?.addTags({ recurring: true })
|
||||
metadata = await this.getMetadata()
|
||||
const shouldStop = await this.checkIfShouldStop(metadata)
|
||||
if (shouldStop) {
|
||||
span?.addTags({ shouldStop: true })
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// execution stopped, record state for that
|
||||
if (stopped) {
|
||||
this.updateExecutionOutput(step.id, step.stepId, {}, STOPPED_STATUS)
|
||||
continue
|
||||
}
|
||||
|
||||
// If it's a loop step, we need to manually add the bindings to the context
|
||||
let stepFn = await this.getStepFunctionality(step.stepId)
|
||||
let inputs = await processObject(originalStepInput, this._context)
|
||||
inputs = automationUtils.cleanInputValues(inputs, step.schema.inputs)
|
||||
|
||||
try {
|
||||
// appId is always passed
|
||||
const outputs = await stepFn({
|
||||
inputs: inputs,
|
||||
appId: this._appId,
|
||||
emitter: this._emitter,
|
||||
context: this._context,
|
||||
const start = performance.now()
|
||||
for (let step of automation.definition.steps) {
|
||||
const stepSpan = tracer.startSpan("Orchestrator.execute.step", {
|
||||
childOf: span,
|
||||
})
|
||||
stepSpan.addTags({
|
||||
resource: "automation",
|
||||
step: {
|
||||
stepId: step.stepId,
|
||||
id: step.id,
|
||||
name: step.name,
|
||||
type: step.type,
|
||||
title: step.stepTitle,
|
||||
internal: step.internal,
|
||||
deprecated: step.deprecated,
|
||||
},
|
||||
})
|
||||
|
||||
this._context.steps[stepCount] = outputs
|
||||
// if filter causes us to stop execution don't break the loop, set a var
|
||||
// so that we can finish iterating through the steps and record that it stopped
|
||||
if (step.stepId === FILTER_STEP_ID && !outputs.result) {
|
||||
stopped = true
|
||||
this.updateExecutionOutput(step.id, step.stepId, step.inputs, {
|
||||
...outputs,
|
||||
...STOPPED_STATUS,
|
||||
})
|
||||
continue
|
||||
}
|
||||
if (loopStep && loopSteps) {
|
||||
loopSteps.push(outputs)
|
||||
} else {
|
||||
this.updateExecutionOutput(
|
||||
step.id,
|
||||
step.stepId,
|
||||
step.inputs,
|
||||
outputs
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Automation error - ${step.stepId} - ${err}`)
|
||||
return err
|
||||
}
|
||||
let input: any,
|
||||
iterations = 1,
|
||||
iterationCount = 0
|
||||
|
||||
if (loopStep) {
|
||||
iterationCount++
|
||||
if (index === iterations - 1) {
|
||||
try {
|
||||
if (timeoutFlag) {
|
||||
span?.addTags({ timedOut: true })
|
||||
break
|
||||
}
|
||||
|
||||
if (timeout) {
|
||||
setTimeout(() => {
|
||||
timeoutFlag = true
|
||||
}, timeout || 12000)
|
||||
}
|
||||
|
||||
stepCount++
|
||||
if (step.stepId === LOOP_STEP_ID) {
|
||||
loopStep = step
|
||||
loopStepNumber = stepCount
|
||||
continue
|
||||
}
|
||||
|
||||
if (loopStep) {
|
||||
input = await processObject(loopStep.inputs, this._context)
|
||||
iterations = getLoopIterations(loopStep as LoopStep)
|
||||
stepSpan?.addTags({ step: { iterations } })
|
||||
}
|
||||
for (let index = 0; index < iterations; index++) {
|
||||
let originalStepInput = cloneDeep(step.inputs)
|
||||
// Handle if the user has set a max iteration count or if it reaches the max limit set by us
|
||||
if (loopStep && input.binding) {
|
||||
let tempOutput = {
|
||||
items: loopSteps,
|
||||
iterations: iterationCount,
|
||||
}
|
||||
try {
|
||||
loopStep.inputs.binding = automationUtils.typecastForLooping(
|
||||
loopStep as LoopStep,
|
||||
loopStep.inputs as LoopInput
|
||||
)
|
||||
} catch (err) {
|
||||
this.updateContextAndOutput(
|
||||
loopStepNumber,
|
||||
step,
|
||||
tempOutput,
|
||||
{
|
||||
status: AutomationErrors.INCORRECT_TYPE,
|
||||
success: false,
|
||||
}
|
||||
)
|
||||
loopSteps = undefined
|
||||
loopStep = undefined
|
||||
break
|
||||
}
|
||||
let item = []
|
||||
if (
|
||||
typeof loopStep.inputs.binding === "string" &&
|
||||
loopStep.inputs.option === "String"
|
||||
) {
|
||||
item = automationUtils.stringSplit(loopStep.inputs.binding)
|
||||
} else if (Array.isArray(loopStep.inputs.binding)) {
|
||||
item = loopStep.inputs.binding
|
||||
}
|
||||
this._context.steps[loopStepNumber] = {
|
||||
currentItem: item[index],
|
||||
}
|
||||
|
||||
// The "Loop" binding in the front end is "fake", so replace it here so the context can understand it
|
||||
// Pretty hacky because we need to account for the row object
|
||||
for (let [key, value] of Object.entries(originalStepInput)) {
|
||||
if (typeof value === "object") {
|
||||
for (let [innerKey, innerValue] of Object.entries(
|
||||
originalStepInput[key]
|
||||
)) {
|
||||
if (typeof innerValue === "string") {
|
||||
originalStepInput[key][innerKey] =
|
||||
automationUtils.substituteLoopStep(
|
||||
innerValue,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
} else if (typeof value === "object") {
|
||||
for (let [innerObject, innerValue] of Object.entries(
|
||||
originalStepInput[key][innerKey]
|
||||
)) {
|
||||
originalStepInput[key][innerKey][innerObject] =
|
||||
automationUtils.substituteLoopStep(
|
||||
innerValue as string,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (typeof value === "string") {
|
||||
originalStepInput[key] =
|
||||
automationUtils.substituteLoopStep(
|
||||
value,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
index === env.AUTOMATION_MAX_ITERATIONS ||
|
||||
index === parseInt(loopStep.inputs.iterations)
|
||||
) {
|
||||
this.updateContextAndOutput(
|
||||
loopStepNumber,
|
||||
step,
|
||||
tempOutput,
|
||||
{
|
||||
status: AutomationErrors.MAX_ITERATIONS,
|
||||
success: true,
|
||||
}
|
||||
)
|
||||
loopSteps = undefined
|
||||
loopStep = undefined
|
||||
break
|
||||
}
|
||||
|
||||
let isFailure = false
|
||||
const currentItem =
|
||||
this._context.steps[loopStepNumber]?.currentItem
|
||||
if (currentItem && typeof currentItem === "object") {
|
||||
isFailure = Object.keys(currentItem).some(value => {
|
||||
return currentItem[value] === loopStep?.inputs.failure
|
||||
})
|
||||
} else {
|
||||
isFailure =
|
||||
currentItem && currentItem === loopStep.inputs.failure
|
||||
}
|
||||
|
||||
if (isFailure) {
|
||||
this.updateContextAndOutput(
|
||||
loopStepNumber,
|
||||
step,
|
||||
tempOutput,
|
||||
{
|
||||
status: AutomationErrors.FAILURE_CONDITION,
|
||||
success: false,
|
||||
}
|
||||
)
|
||||
loopSteps = undefined
|
||||
loopStep = undefined
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// execution stopped, record state for that
|
||||
if (stopped) {
|
||||
this.updateExecutionOutput(
|
||||
step.id,
|
||||
step.stepId,
|
||||
{},
|
||||
STOPPED_STATUS
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
// If it's a loop step, we need to manually add the bindings to the context
|
||||
let stepFn = await this.getStepFunctionality(step.stepId)
|
||||
let inputs = await processObject(originalStepInput, this._context)
|
||||
inputs = automationUtils.cleanInputValues(
|
||||
inputs,
|
||||
step.schema.inputs
|
||||
)
|
||||
|
||||
try {
|
||||
// appId is always passed
|
||||
const outputs = await stepFn({
|
||||
inputs: inputs,
|
||||
appId: this._appId,
|
||||
emitter: this._emitter,
|
||||
context: this._context,
|
||||
})
|
||||
|
||||
this._context.steps[stepCount] = outputs
|
||||
// if filter causes us to stop execution don't break the loop, set a var
|
||||
// so that we can finish iterating through the steps and record that it stopped
|
||||
if (step.stepId === FILTER_STEP_ID && !outputs.result) {
|
||||
stopped = true
|
||||
this.updateExecutionOutput(
|
||||
step.id,
|
||||
step.stepId,
|
||||
step.inputs,
|
||||
{
|
||||
...outputs,
|
||||
...STOPPED_STATUS,
|
||||
}
|
||||
)
|
||||
continue
|
||||
}
|
||||
if (loopStep && loopSteps) {
|
||||
loopSteps.push(outputs)
|
||||
} else {
|
||||
this.updateExecutionOutput(
|
||||
step.id,
|
||||
step.stepId,
|
||||
step.inputs,
|
||||
outputs
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Automation error - ${step.stepId} - ${err}`)
|
||||
return err
|
||||
}
|
||||
|
||||
if (loopStep) {
|
||||
iterationCount++
|
||||
if (index === iterations - 1) {
|
||||
loopStep = undefined
|
||||
this._context.steps.splice(loopStepNumber, 1)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
stepSpan?.finish()
|
||||
}
|
||||
|
||||
if (loopStep && iterations === 0) {
|
||||
loopStep = undefined
|
||||
this.executionOutput.steps.splice(loopStepNumber + 1, 0, {
|
||||
id: step.id,
|
||||
stepId: step.stepId,
|
||||
outputs: {
|
||||
status: AutomationStepStatus.NO_ITERATIONS,
|
||||
success: true,
|
||||
},
|
||||
inputs: {},
|
||||
})
|
||||
|
||||
this._context.steps.splice(loopStepNumber, 1)
|
||||
break
|
||||
iterations = 1
|
||||
}
|
||||
|
||||
// Delete the step after the loop step as it's irrelevant, since information is included
|
||||
// in the loop step
|
||||
if (wasLoopStep && !loopStep) {
|
||||
this._context.steps.splice(loopStepNumber + 1, 1)
|
||||
wasLoopStep = false
|
||||
}
|
||||
if (loopSteps && loopSteps.length) {
|
||||
let tempOutput = {
|
||||
success: true,
|
||||
items: loopSteps,
|
||||
iterations: iterationCount,
|
||||
}
|
||||
this.executionOutput.steps.splice(loopStepNumber + 1, 0, {
|
||||
id: step.id,
|
||||
stepId: step.stepId,
|
||||
outputs: tempOutput,
|
||||
inputs: step.inputs,
|
||||
})
|
||||
this._context.steps[loopStepNumber] = tempOutput
|
||||
|
||||
wasLoopStep = true
|
||||
loopSteps = []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (loopStep && iterations === 0) {
|
||||
loopStep = undefined
|
||||
this.executionOutput.steps.splice(loopStepNumber + 1, 0, {
|
||||
id: step.id,
|
||||
stepId: step.stepId,
|
||||
outputs: {
|
||||
status: AutomationStepStatus.NO_ITERATIONS,
|
||||
success: true,
|
||||
},
|
||||
inputs: {},
|
||||
})
|
||||
const end = performance.now()
|
||||
const executionTime = end - start
|
||||
|
||||
this._context.steps.splice(loopStepNumber, 1)
|
||||
iterations = 1
|
||||
}
|
||||
console.info(
|
||||
`Automation ID: ${automation._id} Execution time: ${executionTime} milliseconds`,
|
||||
{
|
||||
_logKey: "automation",
|
||||
executionTime,
|
||||
}
|
||||
)
|
||||
|
||||
// Delete the step after the loop step as it's irrelevant, since information is included
|
||||
// in the loop step
|
||||
if (wasLoopStep && !loopStep) {
|
||||
this._context.steps.splice(loopStepNumber + 1, 1)
|
||||
wasLoopStep = false
|
||||
}
|
||||
if (loopSteps && loopSteps.length) {
|
||||
let tempOutput = {
|
||||
success: true,
|
||||
items: loopSteps,
|
||||
iterations: iterationCount,
|
||||
// store the logs for the automation run
|
||||
try {
|
||||
await storeLog(this._automation, this.executionOutput)
|
||||
} catch (e: any) {
|
||||
if (e.status === 413 && e.request?.data) {
|
||||
// if content is too large we shouldn't log it
|
||||
delete e.request.data
|
||||
e.request.data = { message: "removed due to large size" }
|
||||
}
|
||||
logging.logAlert("Error writing automation log", e)
|
||||
}
|
||||
this.executionOutput.steps.splice(loopStepNumber + 1, 0, {
|
||||
id: step.id,
|
||||
stepId: step.stepId,
|
||||
outputs: tempOutput,
|
||||
inputs: step.inputs,
|
||||
})
|
||||
this._context.steps[loopStepNumber] = tempOutput
|
||||
|
||||
wasLoopStep = true
|
||||
loopSteps = []
|
||||
}
|
||||
}
|
||||
|
||||
const end = performance.now()
|
||||
const executionTime = end - start
|
||||
|
||||
console.info(
|
||||
`Automation ID: ${automation._id} Execution time: ${executionTime} milliseconds`,
|
||||
{
|
||||
_logKey: "automation",
|
||||
executionTime,
|
||||
if (isProdAppID(this._appId) && isRecurring(automation) && metadata) {
|
||||
await this.updateMetadata(metadata)
|
||||
}
|
||||
return this.executionOutput
|
||||
}
|
||||
)
|
||||
|
||||
// store the logs for the automation run
|
||||
try {
|
||||
await storeLog(this._automation, this.executionOutput)
|
||||
} catch (e: any) {
|
||||
if (e.status === 413 && e.request?.data) {
|
||||
// if content is too large we shouldn't log it
|
||||
delete e.request.data
|
||||
e.request.data = { message: "removed due to large size" }
|
||||
}
|
||||
logging.logAlert("Error writing automation log", e)
|
||||
}
|
||||
if (isProdAppID(this._appId) && isRecurring(automation) && metadata) {
|
||||
await this.updateMetadata(metadata)
|
||||
}
|
||||
return this.executionOutput
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -56,10 +56,12 @@ module.exports.processJS = (handlebars, context) => {
|
|||
const res = { data: runJS(js, sandboxContext) }
|
||||
return `{{${LITERAL_MARKER} js_result-${JSON.stringify(res)}}}`
|
||||
} catch (error) {
|
||||
console.log(`JS error: ${typeof error} ${JSON.stringify(error)}`)
|
||||
if (error.code === "ERR_SCRIPT_EXECUTION_TIMEOUT") {
|
||||
return "Timed out while executing JS"
|
||||
}
|
||||
if (error.name === "ExecutionTimeoutError") {
|
||||
return "Request JS execution limit hit"
|
||||
}
|
||||
return "Error while executing JS"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,19 +1,26 @@
|
|||
const externalHandlebars = require("./external")
|
||||
const helperList = require("@budibase/handlebars-helpers")
|
||||
|
||||
let helpers = undefined
|
||||
|
||||
module.exports.getHelperList = () => {
|
||||
if (helpers) {
|
||||
return helpers
|
||||
}
|
||||
|
||||
helpers = {}
|
||||
let constructed = []
|
||||
for (let collection of externalHandlebars.externalCollections) {
|
||||
constructed.push(helperList[collection]())
|
||||
}
|
||||
const fullMap = {}
|
||||
for (let collection of constructed) {
|
||||
for (let [key, func] of Object.entries(collection)) {
|
||||
fullMap[key] = func
|
||||
helpers[key] = func
|
||||
}
|
||||
}
|
||||
for (let key of Object.keys(externalHandlebars.addedHelpers)) {
|
||||
fullMap[key] = externalHandlebars.addedHelpers[key]
|
||||
helpers[key] = externalHandlebars.addedHelpers[key]
|
||||
}
|
||||
return fullMap
|
||||
Object.freeze(helpers)
|
||||
return helpers
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ module.exports.doesContainString = templates.doesContainString
|
|||
module.exports.disableEscaping = templates.disableEscaping
|
||||
module.exports.findHBSBlocks = templates.findHBSBlocks
|
||||
module.exports.convertToJS = templates.convertToJS
|
||||
module.exports.setJSRunner = templates.setJSRunner
|
||||
module.exports.FIND_ANY_HBS_REGEX = templates.FIND_ANY_HBS_REGEX
|
||||
|
||||
if (!process.env.NO_JS) {
|
||||
|
|
|
@ -9,6 +9,7 @@ const {
|
|||
findDoubleHbsInstances,
|
||||
} = require("./utilities")
|
||||
const { convertHBSBlock } = require("./conversion")
|
||||
const javascript = require("./helpers/javascript")
|
||||
|
||||
const hbsInstance = handlebars.create()
|
||||
registerAll(hbsInstance)
|
||||
|
@ -362,6 +363,8 @@ module.exports.doesContainString = (template, string) => {
|
|||
return exports.doesContainStrings(template, [string])
|
||||
}
|
||||
|
||||
module.exports.setJSRunner = javascript.setJSRunner
|
||||
|
||||
module.exports.convertToJS = hbs => {
|
||||
const blocks = exports.findHBSBlocks(hbs)
|
||||
let js = "return `",
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import vm from "vm"
|
||||
import templates from "./index.js"
|
||||
import { setJSRunner } from "./helpers/javascript"
|
||||
|
||||
/**
|
||||
* ES6 entrypoint for rollup
|
||||
|
@ -20,6 +19,7 @@ export const doesContainString = templates.doesContainString
|
|||
export const disableEscaping = templates.disableEscaping
|
||||
export const findHBSBlocks = templates.findHBSBlocks
|
||||
export const convertToJS = templates.convertToJS
|
||||
export const setJSRunner = templates.setJSRunner
|
||||
export const FIND_ANY_HBS_REGEX = templates.FIND_ANY_HBS_REGEX
|
||||
|
||||
if (process && !process.env.NO_JS) {
|
||||
|
|
|
@ -21,6 +21,7 @@ export enum LockName {
|
|||
PERSIST_WRITETHROUGH = "persist_writethrough",
|
||||
QUOTA_USAGE_EVENT = "quota_usage_event",
|
||||
APP_MIGRATION = "app_migrations",
|
||||
PROCESS_AUTO_COLUMNS = "process_auto_columns",
|
||||
}
|
||||
|
||||
export type LockOptions = {
|
||||
|
|
|
@ -48,7 +48,7 @@
|
|||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bull": "4.10.1",
|
||||
"dd-trace": "4.20.0",
|
||||
"dd-trace": "3.13.2",
|
||||
"dotenv": "8.6.0",
|
||||
"global-agent": "3.0.0",
|
||||
"ical-generator": "4.1.0",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import tk from "timekeeper"
|
||||
import _ from "lodash"
|
||||
import { mocks, structures } from "@budibase/backend-core/tests"
|
||||
import { generator, mocks, structures } from "@budibase/backend-core/tests"
|
||||
import {
|
||||
ScimCreateUserRequest,
|
||||
ScimGroupResponse,
|
||||
|
@ -14,9 +14,14 @@ import { events } from "@budibase/backend-core"
|
|||
jest.retryTimes(2, { logErrorsBeforeRetry: true })
|
||||
jest.setTimeout(30000)
|
||||
|
||||
mocks.licenses.useScimIntegration()
|
||||
|
||||
describe("scim", () => {
|
||||
beforeAll(async () => {
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
mocks.licenses.useScimIntegration()
|
||||
|
||||
await config.setSCIMConfig(true)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.resetAllMocks()
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
@ -570,8 +575,15 @@ describe("scim", () => {
|
|||
beforeAll(async () => {
|
||||
groups = []
|
||||
|
||||
for (let i = 0; i < groupCount; i++) {
|
||||
const body = structures.scim.createGroupRequest()
|
||||
const groupNames = generator.unique(
|
||||
() => generator.word(),
|
||||
groupCount
|
||||
)
|
||||
|
||||
for (const groupName of groupNames) {
|
||||
const body = structures.scim.createGroupRequest({
|
||||
displayName: groupName,
|
||||
})
|
||||
groups.push(await config.api.scimGroupsAPI.post({ body }))
|
||||
}
|
||||
|
||||
|
|
|
@ -79,6 +79,9 @@ export const resetUpdate = async (resetCode: string, password: string) => {
|
|||
user.password = password
|
||||
user = await userSdk.db.save(user)
|
||||
|
||||
await cache.passwordReset.invalidateCode(resetCode)
|
||||
await sessions.invalidateSessions(userId)
|
||||
|
||||
// remove password from the user before sending events
|
||||
delete user.password
|
||||
await events.user.passwordReset(user)
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
import { cache, context, sessions, utils } from "@budibase/backend-core"
|
||||
import { loginUser, resetUpdate } from "../auth"
|
||||
import { generator, structures } from "@budibase/backend-core/tests"
|
||||
import { TestConfiguration } from "../../../tests"
|
||||
|
||||
describe("auth", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
describe("resetUpdate", () => {
|
||||
it("providing a valid code will update the password", async () => {
|
||||
await context.doInTenant(structures.tenant.id(), async () => {
|
||||
const user = await config.createUser()
|
||||
const previousPassword = user.password
|
||||
|
||||
const code = await cache.passwordReset.createCode(user._id!, {})
|
||||
const newPassword = generator.hash()
|
||||
|
||||
await resetUpdate(code, newPassword)
|
||||
|
||||
const persistedUser = await config.getUser(user.email)
|
||||
expect(persistedUser.password).not.toBe(previousPassword)
|
||||
expect(
|
||||
await utils.compare(newPassword, persistedUser.password!)
|
||||
).toBeTruthy()
|
||||
})
|
||||
})
|
||||
|
||||
it("wrong code will not allow to reset the password", async () => {
|
||||
await context.doInTenant(structures.tenant.id(), async () => {
|
||||
const code = generator.hash()
|
||||
const newPassword = generator.hash()
|
||||
|
||||
await expect(resetUpdate(code, newPassword)).rejects.toThrow(
|
||||
"Provided information is not valid, cannot reset password - please try again."
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("the same code cannot be used twice", async () => {
|
||||
await context.doInTenant(structures.tenant.id(), async () => {
|
||||
const user = await config.createUser()
|
||||
|
||||
const code = await cache.passwordReset.createCode(user._id!, {})
|
||||
const newPassword = generator.hash()
|
||||
|
||||
await resetUpdate(code, newPassword)
|
||||
await expect(resetUpdate(code, newPassword)).rejects.toThrow(
|
||||
"Provided information is not valid, cannot reset password - please try again."
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("updating the password will invalidate all the sessions", async () => {
|
||||
await context.doInTenant(structures.tenant.id(), async () => {
|
||||
const user = await config.createUser()
|
||||
|
||||
await loginUser(user)
|
||||
|
||||
expect(await sessions.getSessionsForUser(user._id!)).toHaveLength(1)
|
||||
|
||||
const code = await cache.passwordReset.createCode(user._id!, {})
|
||||
const newPassword = generator.hash()
|
||||
|
||||
await resetUpdate(code, newPassword)
|
||||
|
||||
expect(await sessions.getSessionsForUser(user._id!)).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,6 +1,5 @@
|
|||
import { structures, mocks } from "../../../tests"
|
||||
import { env, context } from "@budibase/backend-core"
|
||||
import * as users from "../users"
|
||||
import { db as userDb } from "../"
|
||||
import { CloudAccount } from "@budibase/types"
|
||||
|
||||
|
|
1026
qa-core/yarn.lock
1026
qa-core/yarn.lock
File diff suppressed because it is too large
Load Diff
|
@ -59,6 +59,7 @@ function runBuild(entry, outfile) {
|
|||
"pouchdb",
|
||||
"bcrypt",
|
||||
"bcryptjs",
|
||||
"graphql/*",
|
||||
],
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue