Merge branch 'master' into new-datepicker

This commit is contained in:
Andrew Kingston 2024-04-12 12:04:09 +01:00 committed by GitHub
commit 770cba6fdd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 529 additions and 51 deletions

View File

@ -150,6 +150,8 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.apps.autoscaling.maxReplicas | int | `10` | |
| services.apps.autoscaling.minReplicas | int | `1` | |
| services.apps.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the apps service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the apps pods. |
| services.apps.extraContainers | list | `[]` | Additional containers to be added to the apps pod. |
| services.apps.extraEnv | list | `[]` | Extra environment variables to set for apps pods. Takes a list of name=value pairs. |
| services.apps.httpLogging | int | `1` | Whether or not to log HTTP requests to the apps service. |
| services.apps.livenessProbe | object | HTTP health checks. | Liveness probe configuration for apps pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.apps.logLevel | string | `"info"` | The log level for the apps service. |
@ -162,6 +164,8 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.automationWorkers.autoscaling.minReplicas | int | `1` | |
| services.automationWorkers.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the automation worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the automation worker pods. |
| services.automationWorkers.enabled | bool | `true` | Whether or not to enable the automation worker service. If you disable this, automations will be processed by the apps service. |
| services.automationWorkers.extraContainers | list | `[]` | Additional containers to be added to the automationWorkers pod. |
| services.automationWorkers.extraEnv | list | `[]` | Extra environment variables to set for automation worker pods. Takes a list of name=value pairs. |
| services.automationWorkers.livenessProbe | object | HTTP health checks. | Liveness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.automationWorkers.logLevel | string | `"info"` | The log level for the automation worker service. |
| services.automationWorkers.readinessProbe | object | HTTP health checks. | Readiness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
@ -180,6 +184,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.objectStore.cloudfront.cdn | string | `""` | Set the url of a distribution to enable cloudfront. |
| services.objectStore.cloudfront.privateKey64 | string | `""` | Base64 encoded private key for the above public key. |
| services.objectStore.cloudfront.publicKeyId | string | `""` | ID of public key stored in cloudfront. |
| services.objectStore.extraContainers | list | `[]` | Additional containers to be added to the objectStore pod. |
| services.objectStore.minio | bool | `true` | Set to false if using another object store, such as S3. You will need to set `services.objectStore.url` to point to your bucket if you do this. |
| services.objectStore.region | string | `""` | AWS_REGION if using S3 |
| services.objectStore.resources | object | `{}` | The resources to use for Minio pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
@ -191,12 +196,14 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.proxy.autoscaling.maxReplicas | int | `10` | |
| services.proxy.autoscaling.minReplicas | int | `1` | |
| services.proxy.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the proxy service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the proxy pods. |
| services.proxy.extraContainers | list | `[]` | |
| services.proxy.livenessProbe | object | HTTP health checks. | Liveness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.proxy.readinessProbe | object | HTTP health checks. | Readiness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.proxy.replicaCount | int | `1` | The number of proxy replicas to run. |
| services.proxy.resources | object | `{}` | The resources to use for proxy pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
| services.proxy.startupProbe | object | HTTP health checks. | Startup probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.redis.enabled | bool | `true` | Whether or not to deploy a Redis pod into your cluster. |
| services.redis.extraContainers | list | `[]` | Additional containers to be added to the redis pod. |
| services.redis.password | string | `"budibase"` | The password to use when connecting to Redis. It's recommended that you change this from the default if you're running Redis in-cluster. |
| services.redis.port | int | `6379` | Port to expose Redis on. |
| services.redis.resources | object | `{}` | The resources to use for Redis pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
@ -207,6 +214,8 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.worker.autoscaling.maxReplicas | int | `10` | |
| services.worker.autoscaling.minReplicas | int | `1` | |
| services.worker.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the worker pods. |
| services.worker.extraContainers | list | `[]` | Additional containers to be added to the worker pod. |
| services.worker.extraEnv | list | `[]` | Extra environment variables to set for worker pods. Takes a list of name=value pairs. |
| services.worker.httpLogging | int | `1` | Whether or not to log HTTP requests to the worker service. |
| services.worker.livenessProbe | object | HTTP health checks. | Liveness probe configuration for worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.worker.logLevel | string | `"info"` | The log level for the worker service. |
@ -225,4 +234,4 @@ $ helm uninstall --namespace budibase budibase
```
----------------------------------------------
Autogenerated from chart metadata using [helm-docs v1.11.3](https://github.com/norwoodj/helm-docs/releases/v1.11.3)
Autogenerated from chart metadata using [helm-docs v1.13.1](https://github.com/norwoodj/helm-docs/releases/v1.13.1)

View File

@ -235,6 +235,9 @@ spec:
args:
{{- toYaml .Values.services.apps.args | nindent 10 }}
{{ end }}
{{- if .Values.services.apps.extraContainers }}
{{- toYaml .Values.services.apps.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -235,6 +235,9 @@ spec:
args:
{{- toYaml .Values.services.automationWorkers.args | nindent 10 }}
{{ end }}
{{- if .Values.services.automationWorkers.extraContainers }}
{{- toYaml .Values.services.automationWorkers.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -54,6 +54,9 @@ spec:
volumeMounts:
- mountPath: /data
name: minio-data
{{- if .Values.services.objectStore.extraContainers }}
{{- toYaml .Values.services.objectStore.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -82,7 +82,9 @@ spec:
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
{{- if .Values.services.proxy.extraContainers }}
{{- toYaml .Values.services.proxy.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -34,6 +34,9 @@ spec:
volumeMounts:
- mountPath: /data
name: redis-data
{{- if .Values.services.redis.extraContainers }}
{{- toYaml .Values.services.redis.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -221,6 +221,9 @@ spec:
args:
{{- toYaml .Values.services.worker.args | nindent 10 }}
{{ end }}
{{- if .Values.services.worker.extraContainers }}
{{- toYaml .Values.services.worker.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -206,6 +206,10 @@ services:
# for autoscaling to work, you will need to have metrics-server
# configured, and resources set for the proxy pods.
targetCPUUtilizationPercentage: 80
# -- Additional containers to be added to the proxy pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
apps:
# @ignore (you shouldn't need to change this)
@ -274,6 +278,10 @@ services:
# autoscaling to work, you will need to have metrics-server configured,
# and resources set for the apps pods.
targetCPUUtilizationPercentage: 80
# -- Additional containers to be added to the apps pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
automationWorkers:
# -- Whether or not to enable the automation worker service. If you disable this,
@ -346,6 +354,10 @@ services:
# Note that for autoscaling to work, you will need to have metrics-server
# configured, and resources set for the automation worker pods.
targetCPUUtilizationPercentage: 80
# -- Additional containers to be added to the automationWorkers pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
worker:
# @ignore (you shouldn't need to change this)
@ -414,6 +426,10 @@ services:
# for autoscaling to work, you will need to have metrics-server
# configured, and resources set for the worker pods.
targetCPUUtilizationPercentage: 80
# -- Additional containers to be added to the worker pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
couchdb:
# -- Whether or not to spin up a CouchDB instance in your cluster. True by
@ -463,6 +479,10 @@ services:
# <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/>
# for more information on how to set these.
resources: {}
# -- Additional containers to be added to the redis pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
objectStore:
# -- Set to false if using another object store, such as S3. You will need
@ -488,7 +508,7 @@ services:
# do this.
url: "http://minio-service:9000"
# -- How much storage to give Minio in its PersistentVolumeClaim.
storage: 100Mi
storage: 2Gi
# -- If defined, storageClassName: <storageClass> If set to "-",
# storageClassName: "", which disables dynamic provisioning If undefined
# (the default) or set to null, no storageClassName spec is set, choosing
@ -505,6 +525,10 @@ services:
publicKeyId: ""
# -- Base64 encoded private key for the above public key.
privateKey64: ""
# -- Additional containers to be added to the objectStore pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
# Override values in couchDB subchart. We're only specifying the values we're changing.
# If you want to see all of the available values, see:

View File

@ -0,0 +1,110 @@
<script>
import { Select, Label } from "@budibase/bbui"
import { onMount } from "svelte"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
import { FieldType } from "@budibase/types"
import { tables, viewsV2 } from "stores/builder"
export let parameters
export let bindings = []
const fileOptions = [
{
label: "Attachment",
value: "attachment",
},
{
label: "URL",
value: "url",
},
]
$: tableOptions = $tables.list.map(table => ({
label: table.name,
resourceId: table._id,
schema: table.schema,
}))
$: viewOptions = $viewsV2.list.map(view => ({
label: view.name,
resourceId: view.id,
schema: view.schema,
}))
$: options = [...(tableOptions || []), ...(viewOptions || [])]
$: selectedTable =
parameters.tableId && options.find(t => t.resourceId === parameters.tableId)
$: attachmentColumns =
selectedTable &&
Object.values(selectedTable.schema).filter(c =>
[FieldType.ATTACHMENTS, FieldType.ATTACHMENT_SINGLE].includes(c.type)
)
onMount(() => {
if (!parameters.type) {
parameters.type = "attachment"
}
})
</script>
<div class="root">
<Label small>File</Label>
<Select
placeholder={null}
bind:value={parameters.type}
options={fileOptions}
/>
{#if parameters.type === "attachment"}
<Label>Table</Label>
<Select
placeholder={null}
bind:value={parameters.tableId}
{options}
getOptionLabel={table => table.label}
getOptionValue={table => table.resourceId}
/>
<Label small>Column</Label>
<Select
disabled={!attachmentColumns?.length}
placeholder={parameters.tableId && !attachmentColumns?.length
? "This table has no attachment columns"
: undefined}
bind:value={parameters.attachmentColumn}
options={attachmentColumns?.map(c => c.name)}
/>
<Label small>Row ID</Label>
<DrawerBindableInput
{bindings}
title="Row ID"
value={parameters.rowId}
on:change={value => (parameters.rowId = value.detail)}
/>
{:else}
<Label small>URL</Label>
<DrawerBindableInput
title="URL"
{bindings}
value={parameters.url}
on:change={value => (parameters.url = value.detail)}
/>
<Label small>File name</Label>
<DrawerBindableInput
title="File name"
{bindings}
value={parameters.fileName}
on:change={value => (parameters.fileName = value.detail)}
/>
{/if}
</div>
<style>
.root {
display: grid;
column-gap: var(--spacing-l);
row-gap: var(--spacing-s);
grid-template-columns: 60px 1fr;
align-items: center;
max-width: 800px;
margin: 0 auto;
}
</style>

View File

@ -22,3 +22,4 @@ export { default as PromptUser } from "./PromptUser.svelte"
export { default as OpenSidePanel } from "./OpenSidePanel.svelte"
export { default as CloseSidePanel } from "./CloseSidePanel.svelte"
export { default as ClearRowSelection } from "./ClearRowSelection.svelte"
export { default as DownloadFile } from "./DownloadFile.svelte"

View File

@ -161,6 +161,11 @@
"name": "Clear Row Selection",
"type": "data",
"component": "ClearRowSelection"
},
{
"name": "Download File",
"type": "data",
"component": "DownloadFile"
}
]
}

View File

@ -8,6 +8,7 @@
Input,
notifications,
} from "@budibase/bbui"
import { downloadFile } from "@budibase/frontend-core"
import { createValidationStore } from "helpers/validation/yup"
export let app
@ -55,40 +56,13 @@
const exportApp = async () => {
const id = published ? app.prodId : app.devId
const url = `/api/backups/export?appId=${id}`
await downloadFile(url, {
excludeRows: !includeInternalTablesRows,
encryptPassword: password,
})
}
async function downloadFile(url, body) {
try {
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(body),
const downloaded = await downloadFile(url, {
excludeRows: !includeInternalTablesRows,
encryptPassword: password,
})
if (response.ok) {
const contentDisposition = response.headers.get("Content-Disposition")
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(
contentDisposition
)
const filename = matches[1].replace(/['"]/g, "")
const url = URL.createObjectURL(await response.blob())
const link = document.createElement("a")
link.href = url
link.download = filename
link.click()
URL.revokeObjectURL(url)
} else {
if (!downloaded) {
notifications.error("Error exporting the app.")
}
} catch (error) {

View File

@ -1,5 +1,6 @@
import { get } from "svelte/store"
import download from "downloadjs"
import { downloadStream } from "@budibase/frontend-core"
import {
routeStore,
builderStore,
@ -400,6 +401,51 @@ const closeSidePanelHandler = () => {
sidePanelStore.actions.close()
}
const downloadFileHandler = async action => {
const { url, fileName } = action.parameters
try {
const { type } = action.parameters
if (type === "attachment") {
const { tableId, rowId, attachmentColumn } = action.parameters
const res = await API.downloadAttachment(
tableId,
rowId,
attachmentColumn,
{ suppressErrors: true }
)
await downloadStream(res)
return
}
const response = await fetch(url)
if (!response.ok) {
notificationStore.actions.error(
`Failed to download from '${url}'. Server returned status code: ${response.status}`
)
return
}
const objectUrl = URL.createObjectURL(await response.blob())
const link = document.createElement("a")
link.href = objectUrl
link.download = fileName
link.click()
URL.revokeObjectURL(objectUrl)
} catch (e) {
console.error(e)
if (e.status) {
notificationStore.actions.error(
`Failed to download from '${url}'. Server returned status code: ${e.status}`
)
} else {
notificationStore.actions.error(`Failed to download from '${url}'.`)
}
}
}
const handlerMap = {
["Fetch Row"]: fetchRowHandler,
["Save Row"]: saveRowHandler,
@ -418,6 +464,7 @@ const handlerMap = {
["Prompt User"]: promptUserHandler,
["Open Side Panel"]: openSidePanelHandler,
["Close Side Panel"]: closeSidePanelHandler,
["Download File"]: downloadFileHandler,
}
const confirmTextMap = {

View File

@ -0,0 +1,17 @@
{
"compilerOptions": {
"allowJs": true,
"strict": true,
"outDir": "dist",
"paths": {
"@budibase/*": [
"../*/src/index.ts",
"../*/src/index.js",
"../*",
"../../node_modules/@budibase/*"
],
"*": ["./src/*"]
}
},
"include": ["src/**/*"]
}

View File

@ -88,5 +88,19 @@ export const buildAttachmentEndpoints = API => {
},
})
},
/**
* Download an attachment from a row given its column name.
* @param datasourceId the ID of the datasource to download from
* @param rowId the ID of the row to download from
* @param columnName the column name to download
*/
downloadAttachment: async (datasourceId, rowId, columnName, options) => {
return await API.get({
url: `/api/${datasourceId}/rows/${rowId}/attachment/${columnName}`,
parseResponse: response => response,
suppressErrors: options?.suppressErrors,
})
},
}
}

View File

@ -1,3 +1,5 @@
const extractFileNameRegex = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/
export function downloadText(filename, text) {
if (typeof text === "object") {
text = JSON.stringify(text)
@ -17,9 +19,7 @@ export async function downloadStream(streamResponse) {
const contentDisposition = streamResponse.headers.get("Content-Disposition")
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(
contentDisposition
)
const matches = extractFileNameRegex.exec(contentDisposition)
const filename = matches[1].replace(/['"]/g, "")
@ -34,3 +34,33 @@ export async function downloadStream(streamResponse) {
URL.revokeObjectURL(blobUrl)
}
export async function downloadFile(url, body) {
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(body),
})
if (!response.ok) {
return false
} else {
const contentDisposition = response.headers.get("Content-Disposition")
const matches = extractFileNameRegex.exec(contentDisposition)
const filename = matches[1].replace(/['"]/g, "")
const url = URL.createObjectURL(await response.blob())
const link = document.createElement("a")
link.href = url
link.download = filename
link.click()
URL.revokeObjectURL(url)
return true
}
}

View File

@ -50,10 +50,10 @@
"@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "0.0.0",
"@budibase/client": "0.0.0",
"@budibase/frontend-core": "0.0.0",
"@budibase/pro": "0.0.0",
"@budibase/shared-core": "0.0.0",
"@budibase/string-templates": "0.0.0",
"@budibase/frontend-core": "0.0.0",
"@budibase/types": "0.0.0",
"@bull-board/api": "5.10.2",
"@bull-board/koa": "5.10.2",
@ -63,6 +63,7 @@
"@socket.io/redis-adapter": "^8.2.1",
"airtable": "0.10.1",
"arangojs": "7.2.0",
"archiver": "7.0.1",
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
@ -124,6 +125,7 @@
"@babel/preset-env": "7.16.11",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
"@types/archiver": "^6.0.2",
"@types/global-agent": "2.1.1",
"@types/google-spreadsheet": "3.1.5",
"@types/jest": "29.5.5",

View File

@ -1,4 +1,8 @@
import stream from "stream"
import archiver from "archiver"
import { quotas } from "@budibase/pro"
import { objectStore } from "@budibase/backend-core"
import * as internal from "./internal"
import * as external from "./external"
import { isExternalTableID } from "../../../integrations/utils"
@ -9,10 +13,12 @@ import {
DeleteRows,
ExportRowsRequest,
ExportRowsResponse,
FieldType,
GetRowResponse,
PatchRowRequest,
PatchRowResponse,
Row,
RowAttachment,
RowSearchParams,
SearchRowRequest,
SearchRowResponse,
@ -251,3 +257,59 @@ export const exportRows = async (
ctx.attachment(fileName)
ctx.body = apiFileReturn(content)
}
export async function downloadAttachment(ctx: UserCtx) {
const { columnName } = ctx.params
const tableId = utils.getTableId(ctx)
const row = await pickApi(tableId).find(ctx)
const table = await sdk.tables.getTable(tableId)
const columnSchema = table.schema[columnName]
if (!columnSchema) {
ctx.throw(400, `'${columnName}' is not valid`)
}
const columnType = columnSchema.type
if (
columnType !== FieldType.ATTACHMENTS &&
columnType !== FieldType.ATTACHMENT_SINGLE
) {
ctx.throw(404, `'${columnName}' is not valid attachment column`)
}
const attachments: RowAttachment[] =
columnType === FieldType.ATTACHMENTS ? row[columnName] : [row[columnName]]
if (!attachments?.length) {
ctx.throw(404)
}
if (attachments.length === 1) {
const attachment = attachments[0]
ctx.attachment(attachment.name)
ctx.body = await objectStore.getReadStream(
objectStore.ObjectStoreBuckets.APPS,
attachment.key
)
} else {
const passThrough = new stream.PassThrough()
const archive = archiver.create("zip")
archive.pipe(passThrough)
for (const attachment of attachments) {
const attachmentStream = await objectStore.getReadStream(
objectStore.ObjectStoreBuckets.APPS,
attachment.key
)
archive.append(attachmentStream, { name: attachment.name })
}
const displayName = row[table.primaryDisplay || "_id"]
ctx.attachment(`${displayName}_${columnName}.zip`)
archive.finalize()
ctx.body = passThrough
ctx.type = "zip"
}
}

View File

@ -77,6 +77,12 @@ router
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.exportRows
)
.get(
"/api/:sourceId/rows/:rowId/attachment/:columnName",
paramSubResource("sourceId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.downloadAttachment
)
router.post(
"/api/v2/views/:viewId/search",

View File

@ -12,11 +12,12 @@ import {
} from "./utilities"
import { convertHBSBlock } from "./conversion"
import { setJSRunner, removeJSRunner } from "./helpers/javascript"
import { helpersToRemoveForJs } from "./helpers/list"
import manifest from "./manifest.json"
import { ProcessOptions } from "./types"
export { helpersToRemoveForJs } from "./helpers/list"
export { FIND_ANY_HBS_REGEX } from "./utilities"
export { setJSRunner, setOnErrorLog } from "./helpers/javascript"
export { iifeWrapper } from "./iife"
@ -412,15 +413,9 @@ export function convertToJS(hbs: string) {
return `${varBlock}${js}`
}
const _FIND_ANY_HBS_REGEX = FIND_ANY_HBS_REGEX
export { _FIND_ANY_HBS_REGEX as FIND_ANY_HBS_REGEX }
export { JsErrorTimeout } from "./errors"
const _helpersToRemoveForJs = helpersToRemoveForJs
export { _helpersToRemoveForJs as helpersToRemoveForJs }
function defaultJSSetup() {
export function defaultJSSetup() {
if (!isBackendService()) {
/**
* Use polyfilled vm to run JS scripts in a browser Env
@ -440,6 +435,3 @@ function defaultJSSetup() {
}
}
defaultJSSetup()
const _defaultJSSetup = defaultJSSetup
export { _defaultJSSetup as defaultJSSetup }

View File

@ -126,6 +126,63 @@ describe("Test that the object processing works correctly", () => {
})
})
describe("check arrays", () => {
describe("index with square brackets", () => {
it.each([
[0, "1"],
[1, "2"],
])("should handle an array of primitive types", async (index, expected) => {
const json = [1, 2, 3]
const output = await processString(`{{ testing.[${index}] }}`, {
testing: json,
})
expect(output).toEqual(expected)
})
it("should handle an array of objects", async () => {
const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
const output = await processString("{{ testing.[1] }}", {
testing: json,
})
expect(output).toEqual('{"value":2}')
})
it("should handle nesting properties in an array of objects", async () => {
const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
const output = await processString("{{ testing.[1].value }}", {
testing: json,
})
expect(output).toEqual("2")
})
})
describe("index without square brackets", () => {
it("should not handle an array of primitive types", async () => {
const json = [1, 2, 3]
const output = await processString(`{{ testing.1 }}`, {
testing: json,
})
expect(output).toEqual("{{ testing.1 }}")
})
it("should not handle an array of objects", async () => {
const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
const output = await processString("{{ testing.1 }}", {
testing: json,
})
expect(output).toEqual("{{ testing.1 }}")
})
it("should handle nesting properties in an array of object types", async () => {
const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
const output = await processString("{{ testing.1.value }}", {
testing: json,
})
expect(output).toEqual("2")
})
})
})
describe("check returning objects", () => {
it("should handle an array of objects", async () => {
const json = [{ a: 1 }, { a: 2 }]

115
yarn.lock
View File

@ -5164,6 +5164,13 @@
dependencies:
"@types/node" "*"
"@types/archiver@^6.0.2":
version "6.0.2"
resolved "https://registry.yarnpkg.com/@types/archiver/-/archiver-6.0.2.tgz#0daf8c83359cbde69de1e4b33dcade6a48a929e2"
integrity sha512-KmROQqbQzKGuaAbmK+ZcytkJ51+YqDa7NmbXjmtC5YBLSyQYo21YaUnQ3HbaPFKL1ooo6RQ6OPYPIDyxfpDDXw==
dependencies:
"@types/readdir-glob" "*"
"@types/aria-query@^5.0.1":
version "5.0.1"
resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-5.0.1.tgz#3286741fb8f1e1580ac28784add4c7a1d49bdfbc"
@ -5913,6 +5920,13 @@
resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc"
integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==
"@types/readdir-glob@*":
version "1.1.5"
resolved "https://registry.yarnpkg.com/@types/readdir-glob/-/readdir-glob-1.1.5.tgz#21a4a98898fc606cb568ad815f2a0eedc24d412a"
integrity sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==
dependencies:
"@types/node" "*"
"@types/redis@^2.8.0":
version "2.8.32"
resolved "https://registry.yarnpkg.com/@types/redis/-/redis-2.8.32.tgz#1d3430219afbee10f8cfa389dad2571a05ecfb11"
@ -6983,6 +6997,32 @@ archiver-utils@^3.0.4:
normalize-path "^3.0.0"
readable-stream "^3.6.0"
archiver-utils@^5.0.0, archiver-utils@^5.0.2:
version "5.0.2"
resolved "https://registry.yarnpkg.com/archiver-utils/-/archiver-utils-5.0.2.tgz#63bc719d951803efc72cf961a56ef810760dd14d"
integrity sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==
dependencies:
glob "^10.0.0"
graceful-fs "^4.2.0"
is-stream "^2.0.1"
lazystream "^1.0.0"
lodash "^4.17.15"
normalize-path "^3.0.0"
readable-stream "^4.0.0"
archiver@7.0.1:
version "7.0.1"
resolved "https://registry.yarnpkg.com/archiver/-/archiver-7.0.1.tgz#c9d91c350362040b8927379c7aa69c0655122f61"
integrity sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==
dependencies:
archiver-utils "^5.0.2"
async "^3.2.4"
buffer-crc32 "^1.0.0"
readable-stream "^4.0.0"
readdir-glob "^1.1.2"
tar-stream "^3.0.0"
zip-stream "^6.0.1"
archiver@^5.3.2:
version "5.3.2"
resolved "https://registry.yarnpkg.com/archiver/-/archiver-5.3.2.tgz#99991d5957e53bd0303a392979276ac4ddccf3b0"
@ -7784,6 +7824,11 @@ buffer-crc32@^0.2.1, buffer-crc32@^0.2.13, buffer-crc32@~0.2.3:
resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242"
integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==
buffer-crc32@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-1.0.0.tgz#a10993b9055081d55304bd9feb4a072de179f405"
integrity sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==
buffer-equal-constant-time@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
@ -8631,6 +8676,17 @@ compress-commons@^4.1.2:
normalize-path "^3.0.0"
readable-stream "^3.6.0"
compress-commons@^6.0.2:
version "6.0.2"
resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-6.0.2.tgz#26d31251a66b9d6ba23a84064ecd3a6a71d2609e"
integrity sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==
dependencies:
crc-32 "^1.2.0"
crc32-stream "^6.0.0"
is-stream "^2.0.1"
normalize-path "^3.0.0"
readable-stream "^4.0.0"
compressible@^2.0.0, compressible@^2.0.12:
version "2.0.18"
resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba"
@ -8923,6 +8979,14 @@ crc32-stream@^4.0.2:
crc-32 "^1.2.0"
readable-stream "^3.4.0"
crc32-stream@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/crc32-stream/-/crc32-stream-6.0.0.tgz#8529a3868f8b27abb915f6c3617c0fadedbf9430"
integrity sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==
dependencies:
crc-32 "^1.2.0"
readable-stream "^4.0.0"
crc@^3.4.4:
version "3.8.0"
resolved "https://registry.yarnpkg.com/crc/-/crc-3.8.0.tgz#ad60269c2c856f8c299e2c4cc0de4556914056c6"
@ -11833,6 +11897,17 @@ glob@7.1.6:
once "^1.3.0"
path-is-absolute "^1.0.0"
glob@^10.0.0:
version "10.3.12"
resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.12.tgz#3a65c363c2e9998d220338e88a5f6ac97302960b"
integrity sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg==
dependencies:
foreground-child "^3.1.0"
jackspeak "^2.3.6"
minimatch "^9.0.1"
minipass "^7.0.4"
path-scurry "^1.10.2"
glob@^10.2.2:
version "10.2.7"
resolved "https://registry.yarnpkg.com/glob/-/glob-10.2.7.tgz#9dd2828cd5bc7bd861e7738d91e7113dda41d7d8"
@ -13226,7 +13301,7 @@ is-stream@^1.1.0:
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==
is-stream@^2.0.0:
is-stream@^2.0.0, is-stream@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
@ -13472,6 +13547,15 @@ jackspeak@^2.0.3:
optionalDependencies:
"@pkgjs/parseargs" "^0.11.0"
jackspeak@^2.3.6:
version "2.3.6"
resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8"
integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==
dependencies:
"@isaacs/cliui" "^8.0.2"
optionalDependencies:
"@pkgjs/parseargs" "^0.11.0"
jake@^10.8.5:
version "10.8.5"
resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46"
@ -15249,6 +15333,11 @@ lowercase-keys@^2.0.0:
resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479"
integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==
lru-cache@^10.2.0:
version "10.2.0"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.0.tgz#0bd445ca57363465900f4d1f9bd8db343a4d95c3"
integrity sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==
lru-cache@^4.1.5:
version "4.1.5"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd"
@ -15874,6 +15963,11 @@ minipass@^5.0.0:
resolved "https://registry.yarnpkg.com/minipass/-/minipass-6.0.2.tgz#542844b6c4ce95b202c0995b0a471f1229de4c81"
integrity sha512-MzWSV5nYVT7mVyWCwn2o7JH13w2TBRmmSqSRCKzTw+lmft9X4z+3wjvs06Tzijo5z4W/kahUCDpRXTF+ZrmF/w==
"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.0.4:
version "7.0.4"
resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c"
integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==
minizlib@^2.0.0, minizlib@^2.1.1, minizlib@^2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
@ -17432,6 +17526,14 @@ path-parser@^6.1.0:
search-params "3.0.0"
tslib "^1.10.0"
path-scurry@^1.10.2:
version "1.10.2"
resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.2.tgz#8f6357eb1239d5fa1da8b9f70e9c080675458ba7"
integrity sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==
dependencies:
lru-cache "^10.2.0"
minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
path-scurry@^1.6.1, path-scurry@^1.7.0:
version "1.9.2"
resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.9.2.tgz#90f9d296ac5e37e608028e28a447b11d385b3f63"
@ -21122,7 +21224,7 @@ tar-stream@^2.0.0, tar-stream@^2.1.4, tar-stream@^2.2.0, tar-stream@~2.2.0:
inherits "^2.0.3"
readable-stream "^3.1.1"
tar-stream@^3.1.5:
tar-stream@^3.0.0, tar-stream@^3.1.5:
version "3.1.7"
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-3.1.7.tgz#24b3fb5eabada19fe7338ed6d26e5f7c482e792b"
integrity sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==
@ -23035,3 +23137,12 @@ zip-stream@^4.1.0:
archiver-utils "^3.0.4"
compress-commons "^4.1.2"
readable-stream "^3.6.0"
zip-stream@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-6.0.1.tgz#e141b930ed60ccaf5d7fa9c8260e0d1748a2bbfb"
integrity sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==
dependencies:
archiver-utils "^5.0.0"
compress-commons "^6.0.2"
readable-stream "^4.0.0"