Merge branch 'develop' into cypress-testing
This commit is contained in:
commit
3f1231ea70
|
@ -11,6 +11,7 @@ on:
|
|||
branches:
|
||||
- master
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
BRANCH: ${{ github.event.pull_request.head.ref }}
|
||||
|
|
|
@ -66,7 +66,7 @@ jobs:
|
|||
config-files: values.production.yaml
|
||||
chart-path: charts/budibase
|
||||
namespace: budibase
|
||||
values: globals.appVersion=v${{ env.RELEASE_VERSION }}
|
||||
values: globals.appVersion=v${{ env.RELEASE_VERSION }},services.couchdb.url=${{ secrets.PRODUCTION_COUCHDB_URL }},services.couchdb.password=${{ secrets.PRODUCTION_COUCHDB_PASSWORD }}
|
||||
name: budibase-prod
|
||||
|
||||
- name: Discord Webhook Action
|
||||
|
|
|
@ -14,6 +14,7 @@ on:
|
|||
- 'yarn.lock'
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
|
||||
|
@ -26,6 +27,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Fail if branch is not develop
|
||||
if: github.ref != 'refs/heads/develop'
|
||||
run: |
|
||||
echo "Ref is not develop, you must run this job from develop."
|
||||
exit 1
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
|
|
|
@ -14,6 +14,7 @@ on:
|
|||
- 'yarn.lock'
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
|
||||
|
@ -27,6 +28,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Fail if branch is not master
|
||||
if: github.ref != 'refs/heads/master'
|
||||
run: |
|
||||
echo "Ref is not master, you must run this job from master."
|
||||
exit 1
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
|
|
|
@ -32,7 +32,9 @@ jobs:
|
|||
uses: cypress-io/github-action@v2
|
||||
with:
|
||||
install: false
|
||||
command: yarn test:e2e:ci
|
||||
command: yarn test:e2e:ci:record
|
||||
env:
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
|
||||
# TODO: upload recordings to s3
|
||||
# - name: Configure AWS Credentials
|
||||
|
|
|
@ -112,6 +112,8 @@ spec:
|
|||
value: {{ .Values.globals.google.secret | quote }}
|
||||
- name: AUTOMATION_MAX_ITERATIONS
|
||||
value: {{ .Values.globals.automationMaxIterations | quote }}
|
||||
- name: EXCLUDE_QUOTAS_TENANTS
|
||||
value: {{ .Values.globals.excludeQuotasTenants | quote }}
|
||||
|
||||
image: budibase/apps:{{ .Values.globals.appVersion }}
|
||||
imagePullPolicy: Always
|
||||
|
|
|
@ -12,10 +12,8 @@ spec:
|
|||
resources:
|
||||
requests:
|
||||
storage: {{ .Values.services.objectStore.storage }}
|
||||
{{- if (eq "-" .Values.services.objectStore.storageClass) }}
|
||||
storageClassName: ""
|
||||
{{- else }}
|
||||
storageClassName: "{{ .Values.services.objectStore.storageClass }}"
|
||||
{{ if .Values.services.objectStore.storageClass }}
|
||||
storageClassName: {{ .Values.services.objectStore.storageClass }}
|
||||
{{- end }}
|
||||
status: {}
|
||||
{{- end }}
|
||||
|
|
|
@ -12,10 +12,8 @@ spec:
|
|||
resources:
|
||||
requests:
|
||||
storage: {{ .Values.services.redis.storage }}
|
||||
{{- if (eq "-" .Values.services.redis.storageClass) }}
|
||||
storageClassName: ""
|
||||
{{- else }}
|
||||
storageClassName: "{{ .Values.services.redis.storageClass }}"
|
||||
{{- end }}
|
||||
{{ if .Values.services.redis.storageClass }}
|
||||
storageClassName: {{ .Values.services.redis.storageClass }}
|
||||
{{ end }}
|
||||
status: {}
|
||||
{{- end }}
|
||||
|
|
|
@ -155,7 +155,7 @@ services:
|
|||
## If set to "-", storageClassName: "", which disables dynamic provisioning
|
||||
## If undefined (the default) or set to null, no storageClassName spec is
|
||||
## set, choosing the default provisioner.
|
||||
storageClass: "-"
|
||||
storageClass: ""
|
||||
|
||||
objectStore:
|
||||
minio: true
|
||||
|
@ -171,7 +171,7 @@ services:
|
|||
## If set to "-", storageClassName: "", which disables dynamic provisioning
|
||||
## If undefined (the default) or set to null, no storageClassName spec is
|
||||
## set, choosing the default provisioner.
|
||||
storageClass: "-"
|
||||
storageClass: ""
|
||||
|
||||
# Override values in couchDB subchart
|
||||
couchdb:
|
||||
|
@ -215,7 +215,7 @@ couchdb:
|
|||
## The CouchDB image
|
||||
image:
|
||||
repository: couchdb
|
||||
tag: 3.1.0
|
||||
tag: 3.2.1
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
## Experimental integration with Lucene-powered fulltext search
|
||||
|
|
|
@ -48,7 +48,7 @@ http {
|
|||
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
|
||||
set $csp_object "object-src 'none'";
|
||||
set $csp_base_uri "base-uri 'self'";
|
||||
set $csp_connect "connect-src 'self' https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com";
|
||||
set $csp_connect "connect-src 'self' https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.*.amazonaws.com";
|
||||
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
|
||||
set $csp_frame "frame-src 'self' https:";
|
||||
set $csp_img "img-src http: https: data: blob:";
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "1.0.126-alpha.0",
|
||||
"version": "1.0.142-alpha.0",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -47,6 +47,7 @@
|
|||
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
|
||||
"test:e2e": "lerna run cy:test --stream",
|
||||
"test:e2e:ci": "lerna run cy:ci --stream",
|
||||
"test:e2e:ci:record": "lerna run cy:ci:record --stream",
|
||||
"build:specs": "lerna run specs",
|
||||
"build:docker": "lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/backend-core",
|
||||
"version": "1.0.126-alpha.0",
|
||||
"version": "1.0.142-alpha.0",
|
||||
"description": "Budibase backend core libraries used in server and worker",
|
||||
"main": "src/index.js",
|
||||
"author": "Budibase",
|
||||
|
|
|
@ -41,7 +41,8 @@ exports.closeDB = async db => {
|
|||
return
|
||||
}
|
||||
try {
|
||||
return db.close()
|
||||
// specifically await so that if there is an error, it can be ignored
|
||||
return await db.close()
|
||||
} catch (err) {
|
||||
// ignore error, already closed
|
||||
}
|
||||
|
|
|
@ -1,29 +1,19 @@
|
|||
const PouchDB = require("pouchdb")
|
||||
const env = require("../environment")
|
||||
|
||||
exports.getCouchUrl = () => {
|
||||
if (!env.COUCH_DB_URL) return
|
||||
|
||||
// username and password already exist in URL
|
||||
if (env.COUCH_DB_URL.includes("@")) {
|
||||
return env.COUCH_DB_URL
|
||||
}
|
||||
|
||||
const [protocol, ...rest] = env.COUCH_DB_URL.split("://")
|
||||
|
||||
if (!env.COUCH_DB_USERNAME || !env.COUCH_DB_PASSWORD) {
|
||||
throw new Error(
|
||||
"CouchDB configuration invalid. You must provide a fully qualified CouchDB url, or the COUCH_DB_USER and COUCH_DB_PASSWORD environment variables."
|
||||
)
|
||||
}
|
||||
|
||||
return `${protocol}://${env.COUCH_DB_USERNAME}:${env.COUCH_DB_PASSWORD}@${rest}`
|
||||
}
|
||||
|
||||
exports.splitCouchUrl = url => {
|
||||
function getUrlInfo() {
|
||||
let url = env.COUCH_DB_URL
|
||||
let username, password, host
|
||||
const [protocol, rest] = url.split("://")
|
||||
const [auth, host] = rest.split("@")
|
||||
const [username, password] = auth.split(":")
|
||||
if (url.includes("@")) {
|
||||
const hostParts = rest.split("@")
|
||||
host = hostParts[1]
|
||||
const authParts = hostParts[0].split(":")
|
||||
username = authParts[0]
|
||||
password = authParts[1]
|
||||
} else {
|
||||
host = rest
|
||||
}
|
||||
return {
|
||||
url: `${protocol}://${host}`,
|
||||
auth: {
|
||||
|
@ -33,32 +23,51 @@ exports.splitCouchUrl = url => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.getCouchInfo = () => {
|
||||
const urlInfo = getUrlInfo()
|
||||
let username
|
||||
let password
|
||||
if (env.COUCH_DB_USERNAME) {
|
||||
// set from env
|
||||
username = env.COUCH_DB_USERNAME
|
||||
} else if (urlInfo.auth.username) {
|
||||
// set from url
|
||||
username = urlInfo.auth.username
|
||||
} else if (!env.isTest()) {
|
||||
throw new Error("CouchDB username not set")
|
||||
}
|
||||
if (env.COUCH_DB_PASSWORD) {
|
||||
// set from env
|
||||
password = env.COUCH_DB_PASSWORD
|
||||
} else if (urlInfo.auth.password) {
|
||||
// set from url
|
||||
password = urlInfo.auth.password
|
||||
} else if (!env.isTest()) {
|
||||
throw new Error("CouchDB password not set")
|
||||
}
|
||||
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
|
||||
return {
|
||||
url: urlInfo.url,
|
||||
auth: {
|
||||
username: username,
|
||||
password: password,
|
||||
},
|
||||
cookie: `Basic ${authCookie}`,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a constructor for PouchDB.
|
||||
* This should be rarely used outside of the main application config.
|
||||
* Exposed for exceptional cases such as in-memory views.
|
||||
*/
|
||||
exports.getPouch = (opts = {}) => {
|
||||
let auth = {
|
||||
username: env.COUCH_DB_USERNAME,
|
||||
password: env.COUCH_DB_PASSWORD,
|
||||
}
|
||||
let url = exports.getCouchUrl() || "http://localhost:4005"
|
||||
// need to update security settings
|
||||
if (!auth.username || !auth.password || url.includes("@")) {
|
||||
const split = exports.splitCouchUrl(url)
|
||||
url = split.url
|
||||
auth = split.auth
|
||||
}
|
||||
|
||||
const authCookie = Buffer.from(`${auth.username}:${auth.password}`).toString(
|
||||
"base64"
|
||||
)
|
||||
let { url, cookie } = exports.getCouchInfo()
|
||||
let POUCH_DB_DEFAULTS = {
|
||||
prefix: url,
|
||||
fetch: (url, opts) => {
|
||||
// use a specific authorization cookie - be very explicit about how we authenticate
|
||||
opts.headers.set("Authorization", `Basic ${authCookie}`)
|
||||
opts.headers.set("Authorization", cookie)
|
||||
return PouchDB.fetch(url, opts)
|
||||
},
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ const {
|
|||
const { getTenantId, getGlobalDBName } = require("../tenancy")
|
||||
const fetch = require("node-fetch")
|
||||
const { doWithDB, allDbs } = require("./index")
|
||||
const { getCouchUrl } = require("./pouch")
|
||||
const { getCouchInfo } = require("./pouch")
|
||||
const { getAppMetadata } = require("../cache/appMetadata")
|
||||
const { checkSlashesInUrl } = require("../helpers")
|
||||
const {
|
||||
|
@ -169,8 +169,14 @@ exports.getAllDbs = async (opts = { efficient: false }) => {
|
|||
return allDbs()
|
||||
}
|
||||
let dbs = []
|
||||
async function addDbs(url) {
|
||||
const response = await fetch(checkSlashesInUrl(encodeURI(url)))
|
||||
let { url, cookie } = getCouchInfo()
|
||||
async function addDbs(couchUrl) {
|
||||
const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Authorization: cookie,
|
||||
},
|
||||
})
|
||||
if (response.status === 200) {
|
||||
let json = await response.json()
|
||||
dbs = dbs.concat(json)
|
||||
|
@ -178,7 +184,7 @@ exports.getAllDbs = async (opts = { efficient: false }) => {
|
|||
throw "Cannot connect to CouchDB instance"
|
||||
}
|
||||
}
|
||||
let couchUrl = `${getCouchUrl()}/_all_dbs`
|
||||
let couchUrl = `${url}/_all_dbs`
|
||||
let tenantId = getTenantId()
|
||||
if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) {
|
||||
// just get all DBs when:
|
||||
|
|
|
@ -6,9 +6,13 @@ function isTest() {
|
|||
)
|
||||
}
|
||||
|
||||
function isDev() {
|
||||
return process.env.NODE_ENV !== "production"
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
COUCH_DB_URL: process.env.COUCH_DB_URL,
|
||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
||||
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
|
||||
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
||||
|
@ -32,6 +36,7 @@ module.exports = {
|
|||
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
|
||||
USE_COUCH: process.env.USE_COUCH || true,
|
||||
isTest,
|
||||
isDev,
|
||||
_set(key, value) {
|
||||
process.env[key] = value
|
||||
module.exports[key] = value
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "1.0.126-alpha.0",
|
||||
"version": "1.0.142-alpha.0",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
@ -38,7 +38,7 @@
|
|||
],
|
||||
"dependencies": {
|
||||
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
|
||||
"@budibase/string-templates": "^1.0.126-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.142-alpha.0",
|
||||
"@spectrum-css/actionbutton": "^1.0.1",
|
||||
"@spectrum-css/actiongroup": "^1.0.1",
|
||||
"@spectrum-css/avatar": "^3.0.2",
|
||||
|
|
|
@ -2,17 +2,22 @@
|
|||
import dayjs from "dayjs"
|
||||
|
||||
export let value
|
||||
export let schema
|
||||
|
||||
// adding the 0- will turn a string like 00:00:00 into a valid ISO
|
||||
// date, but will make actual ISO dates invalid
|
||||
$: time = new Date(`0-${value}`)
|
||||
$: isTime = !isNaN(time)
|
||||
$: isTimeOnly = !isNaN(time) || schema?.timeOnly
|
||||
$: isDateOnly = schema?.dateOnly
|
||||
$: format = isTimeOnly
|
||||
? "HH:mm:ss"
|
||||
: isDateOnly
|
||||
? "MMMM D YYYY"
|
||||
: "MMMM D YYYY, HH:mm"
|
||||
</script>
|
||||
|
||||
<div>
|
||||
{dayjs(isTime ? time : value).format(
|
||||
isTime ? "HH:mm:ss" : "MMMM D YYYY, HH:mm"
|
||||
)}
|
||||
{dayjs(isTimeOnly ? time : value).format(format)}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/builder",
|
||||
"version": "1.0.126-alpha.0",
|
||||
"version": "1.0.142-alpha.0",
|
||||
"license": "GPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
@ -14,8 +14,10 @@
|
|||
"cy:open": "cypress open",
|
||||
"cy:run": "cypress run",
|
||||
"cy:run:ci": "xvfb-run cypress run --headed --browser chrome",
|
||||
"cy:run:ci:record": "xvfb-run cypress run --headed --browser chrome --record",
|
||||
"cy:test": "start-server-and-test cy:setup http://localhost:4100/builder cy:run",
|
||||
"cy:ci": "start-server-and-test cy:setup:ci http://localhost:4100/builder cy:run:ci",
|
||||
"cy:ci:record": "start-server-and-test cy:setup:ci http://localhost:4100/builder cy:run:ci:record",
|
||||
"cy:debug": "start-server-and-test cy:setup http://localhost:4100/builder cy:open",
|
||||
"cy:debug:ci": "start-server-and-test cy:setup:ci http://localhost:4100/builder cy:open"
|
||||
},
|
||||
|
@ -65,10 +67,10 @@
|
|||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.126-alpha.0",
|
||||
"@budibase/client": "^1.0.126-alpha.0",
|
||||
"@budibase/frontend-core": "^1.0.126-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.126-alpha.0",
|
||||
"@budibase/bbui": "^1.0.142-alpha.0",
|
||||
"@budibase/client": "^1.0.142-alpha.0",
|
||||
"@budibase/frontend-core": "^1.0.142-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.142-alpha.0",
|
||||
"@sentry/browser": "5.19.1",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
|
|
|
@ -53,6 +53,18 @@
|
|||
x => x.blockToLoop === block.id
|
||||
)
|
||||
|
||||
async function removeLooping() {
|
||||
loopingSelected = false
|
||||
let loopBlock =
|
||||
$automationStore.selectedAutomation?.automation.definition.steps.find(
|
||||
x => x.blockToLoop === block.id
|
||||
)
|
||||
automationStore.actions.deleteAutomationBlock(loopBlock)
|
||||
await automationStore.actions.save(
|
||||
$automationStore.selectedAutomation?.automation
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteStep() {
|
||||
let loopBlock =
|
||||
$automationStore.selectedAutomation?.automation.definition.steps.find(
|
||||
|
@ -151,9 +163,7 @@
|
|||
{#if !showLooping}
|
||||
<div class="blockSection">
|
||||
<div class="block-options">
|
||||
<div class="delete-padding" on:click={() => deleteStep()}>
|
||||
<Icon name="DeleteOutline" />
|
||||
</div>
|
||||
<ActionButton on:click={() => removeLooping()} icon="DeleteOutline" />
|
||||
</div>
|
||||
<Layout noPadding gap="S">
|
||||
<AutomationBlockSetup
|
||||
|
|
|
@ -182,7 +182,11 @@
|
|||
<div class="fields">
|
||||
{#each schemaProperties as [key, value]}
|
||||
<div class="block-field">
|
||||
<Label>{value.title || (key === "row" ? "Table" : key)}</Label>
|
||||
<Label
|
||||
tooltip={value.title === "Binding / Value"
|
||||
? "If using the String input type, please use a comma or newline separated string"
|
||||
: null}>{value.title || (key === "row" ? "Table" : key)}</Label
|
||||
>
|
||||
{#if value.type === "string" && value.enum}
|
||||
<Select
|
||||
on:change={e => onChange(e, key)}
|
||||
|
@ -265,6 +269,7 @@
|
|||
value={inputData[key]}
|
||||
on:change={e => onChange(e, key)}
|
||||
{bindings}
|
||||
{isTestModal}
|
||||
/>
|
||||
{:else if value.customType === "webhookUrl"}
|
||||
<WebhookDisplay
|
||||
|
|
|
@ -4,14 +4,15 @@
|
|||
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { automationStore } from "builderStore"
|
||||
import RowSelectorTypes from "./RowSelectorTypes.svelte"
|
||||
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
export let value
|
||||
export let bindings
|
||||
export let block
|
||||
export let isTestModal
|
||||
|
||||
let table
|
||||
let schemaFields
|
||||
|
@ -103,9 +104,9 @@
|
|||
{#each schemaFields as [field, schema]}
|
||||
{#if !schema.autocolumn}
|
||||
{#if schema.type !== "attachment"}
|
||||
{#if $automationStore.selectedAutomation.automation.testData}
|
||||
{#if !rowControl}
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
{field}
|
||||
{schema}
|
||||
{bindings}
|
||||
|
@ -113,25 +114,8 @@
|
|||
{onChange}
|
||||
/>
|
||||
{:else}
|
||||
<DrawerBindableInput
|
||||
placeholder={placeholders[schema.type]}
|
||||
panel={AutomationBindingPanel}
|
||||
value={Array.isArray(value[field])
|
||||
? value[field].join(" ")
|
||||
: value[field]}
|
||||
on:change={e => onChange(e, field, schema.type)}
|
||||
label={field}
|
||||
type="string"
|
||||
{bindings}
|
||||
fillWidth={true}
|
||||
allowJS={true}
|
||||
updateOnChange={false}
|
||||
/>
|
||||
{/if}
|
||||
{:else if !rowControl}
|
||||
<RowSelectorTypes {field} {schema} {bindings} {value} {onChange} />
|
||||
{:else}
|
||||
<DrawerBindableInput
|
||||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
placeholder={placeholders[schema.type]}
|
||||
panel={AutomationBindingPanel}
|
||||
value={Array.isArray(value[field])
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
} from "@budibase/bbui"
|
||||
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
|
||||
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
||||
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
|
||||
export let onChange
|
||||
|
@ -15,6 +16,7 @@
|
|||
export let schema
|
||||
export let value
|
||||
export let bindings
|
||||
export let isTestModal
|
||||
|
||||
function schemaHasOptions(schema) {
|
||||
return !!schema.constraints?.inclusion?.length
|
||||
|
@ -51,7 +53,8 @@
|
|||
{:else if schema.type === "link"}
|
||||
<LinkedRowSelector bind:linkedRows={value[field]} {schema} />
|
||||
{:else if schema.type === "string" || schema.type === "number"}
|
||||
<DrawerBindableInput
|
||||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
panel={AutomationBindingPanel}
|
||||
value={value[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
|
|
|
@ -165,7 +165,7 @@
|
|||
<TableFilterButton
|
||||
{schema}
|
||||
on:change={onFilter}
|
||||
disabled={!hasCols || !hasRows}
|
||||
disabled={!hasCols}
|
||||
/>
|
||||
{/key}
|
||||
</div>
|
||||
|
|
|
@ -20,6 +20,9 @@
|
|||
export let readonly
|
||||
|
||||
const resolveTimeStamp = timestamp => {
|
||||
if (!timestamp) {
|
||||
return null
|
||||
}
|
||||
let maskedDate = new Date(`0-${timestamp}`)
|
||||
if (maskedDate instanceof Date && !isNaN(maskedDate.getTime())) {
|
||||
return maskedDate
|
||||
|
@ -34,7 +37,7 @@
|
|||
$: label = meta.name ? capitalise(meta.name) : ""
|
||||
|
||||
const timeStamp = resolveTimeStamp(value)
|
||||
const isTimeStamp = !!timeStamp
|
||||
const isTimeStamp = !!timeStamp || meta?.timeOnly
|
||||
</script>
|
||||
|
||||
{#if type === "options" && meta.constraints.inclusion.length !== 0}
|
||||
|
@ -46,7 +49,12 @@
|
|||
sort
|
||||
/>
|
||||
{:else if type === "datetime"}
|
||||
<DatePicker {label} timeOnly={isTimeStamp} bind:value />
|
||||
<DatePicker
|
||||
{label}
|
||||
timeOnly={isTimeStamp}
|
||||
enableTime={!meta?.dateOnly}
|
||||
bind:value
|
||||
/>
|
||||
{:else if type === "attachment"}
|
||||
<Dropzone {label} bind:value />
|
||||
{:else if type === "boolean"}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
<script>
|
||||
import { ActionButton } from "@budibase/bbui"
|
||||
import GoogleLogo from "assets/google-logo.png"
|
||||
import { store } from "builderStore"
|
||||
import { auth } from "stores/portal"
|
||||
|
@ -10,7 +9,7 @@
|
|||
$: tenantId = $auth.tenantId
|
||||
</script>
|
||||
|
||||
<ActionButton
|
||||
<button
|
||||
on:click={async () => {
|
||||
let ds = datasource
|
||||
if (!ds) {
|
||||
|
@ -22,26 +21,32 @@
|
|||
)
|
||||
}}
|
||||
>
|
||||
<div class="inner">
|
||||
<img src={GoogleLogo} alt="google icon" />
|
||||
<p>Sign in with Google</p>
|
||||
</div>
|
||||
</ActionButton>
|
||||
</button>
|
||||
|
||||
<style>
|
||||
.inner {
|
||||
button {
|
||||
width: 195px;
|
||||
height: 40px;
|
||||
font-size: 14px;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding-top: var(--spacing-xs);
|
||||
padding-bottom: var(--spacing-xs);
|
||||
font-weight: 500;
|
||||
background: #4285f4;
|
||||
color: #ffffff;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
padding: 2px;
|
||||
border-radius: 2px;
|
||||
}
|
||||
.inner img {
|
||||
|
||||
img {
|
||||
border-radius: 2px;
|
||||
width: 18px;
|
||||
margin: 3px 10px 3px 3px;
|
||||
}
|
||||
.inner p {
|
||||
margin: 0;
|
||||
margin-right: 11px;
|
||||
background: #ffffff;
|
||||
padding: 10px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -0,0 +1,145 @@
|
|||
<script>
|
||||
export let width = 100
|
||||
export let height = 100
|
||||
</script>
|
||||
|
||||
<svg
|
||||
{width}
|
||||
{height}
|
||||
viewBox="0 0 46 46"
|
||||
version="1.1"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xmlns:sketch="http://www.bohemiancoding.com/sketch/ns"
|
||||
>
|
||||
<!-- Generator: Sketch 3.3.3 (12081) - http://www.bohemiancoding.com/sketch -->
|
||||
<title>btn_google_dark_normal_ios</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<defs>
|
||||
<filter
|
||||
x="-50%"
|
||||
y="-50%"
|
||||
width="200%"
|
||||
height="200%"
|
||||
filterUnits="objectBoundingBox"
|
||||
id="filter-1"
|
||||
>
|
||||
<feOffset dx="0" dy="1" in="SourceAlpha" result="shadowOffsetOuter1" />
|
||||
<feGaussianBlur
|
||||
stdDeviation="0.5"
|
||||
in="shadowOffsetOuter1"
|
||||
result="shadowBlurOuter1"
|
||||
/>
|
||||
<feColorMatrix
|
||||
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.168 0"
|
||||
in="shadowBlurOuter1"
|
||||
type="matrix"
|
||||
result="shadowMatrixOuter1"
|
||||
/>
|
||||
<feOffset dx="0" dy="0" in="SourceAlpha" result="shadowOffsetOuter2" />
|
||||
<feGaussianBlur
|
||||
stdDeviation="0.5"
|
||||
in="shadowOffsetOuter2"
|
||||
result="shadowBlurOuter2"
|
||||
/>
|
||||
<feColorMatrix
|
||||
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.084 0"
|
||||
in="shadowBlurOuter2"
|
||||
type="matrix"
|
||||
result="shadowMatrixOuter2"
|
||||
/>
|
||||
<feMerge>
|
||||
<feMergeNode in="shadowMatrixOuter1" />
|
||||
<feMergeNode in="shadowMatrixOuter2" />
|
||||
<feMergeNode in="SourceGraphic" />
|
||||
</feMerge>
|
||||
</filter>
|
||||
<rect id="path-2" x="0" y="0" width="40" height="40" rx="2" />
|
||||
<rect id="path-3" x="5" y="5" width="38" height="38" rx="1" />
|
||||
</defs>
|
||||
<g
|
||||
id="Google-Button"
|
||||
stroke="none"
|
||||
stroke-width="1"
|
||||
fill="none"
|
||||
fill-rule="evenodd"
|
||||
sketch:type="MSPage"
|
||||
>
|
||||
<g
|
||||
id="9-PATCH"
|
||||
sketch:type="MSArtboardGroup"
|
||||
transform="translate(-608.000000, -219.000000)"
|
||||
/>
|
||||
<g
|
||||
id="btn_google_dark_normal"
|
||||
sketch:type="MSArtboardGroup"
|
||||
transform="translate(-1.000000, -1.000000)"
|
||||
>
|
||||
<g
|
||||
id="button"
|
||||
sketch:type="MSLayerGroup"
|
||||
transform="translate(4.000000, 4.000000)"
|
||||
filter="url(#filter-1)"
|
||||
>
|
||||
<g id="button-bg">
|
||||
<use
|
||||
fill="#4285F4"
|
||||
fill-rule="evenodd"
|
||||
sketch:type="MSShapeGroup"
|
||||
xlink:href="#path-2"
|
||||
/>
|
||||
<use fill="none" xlink:href="#path-2" />
|
||||
<use fill="none" xlink:href="#path-2" />
|
||||
<use fill="none" xlink:href="#path-2" />
|
||||
</g>
|
||||
</g>
|
||||
<g id="button-bg-copy">
|
||||
<use
|
||||
fill="#FFFFFF"
|
||||
fill-rule="evenodd"
|
||||
sketch:type="MSShapeGroup"
|
||||
xlink:href="#path-3"
|
||||
/>
|
||||
<use fill="none" xlink:href="#path-3" />
|
||||
<use fill="none" xlink:href="#path-3" />
|
||||
<use fill="none" xlink:href="#path-3" />
|
||||
</g>
|
||||
<g
|
||||
id="logo_googleg_48dp"
|
||||
sketch:type="MSLayerGroup"
|
||||
transform="translate(15.000000, 15.000000)"
|
||||
>
|
||||
<path
|
||||
d="M17.64,9.20454545 C17.64,8.56636364 17.5827273,7.95272727 17.4763636,7.36363636 L9,7.36363636 L9,10.845 L13.8436364,10.845 C13.635,11.97 13.0009091,12.9231818 12.0477273,13.5613636 L12.0477273,15.8195455 L14.9563636,15.8195455 C16.6581818,14.2527273 17.64,11.9454545 17.64,9.20454545 L17.64,9.20454545 Z"
|
||||
id="Shape"
|
||||
fill="#4285F4"
|
||||
sketch:type="MSShapeGroup"
|
||||
/>
|
||||
<path
|
||||
d="M9,18 C11.43,18 13.4672727,17.1940909 14.9563636,15.8195455 L12.0477273,13.5613636 C11.2418182,14.1013636 10.2109091,14.4204545 9,14.4204545 C6.65590909,14.4204545 4.67181818,12.8372727 3.96409091,10.71 L0.957272727,10.71 L0.957272727,13.0418182 C2.43818182,15.9831818 5.48181818,18 9,18 L9,18 Z"
|
||||
id="Shape"
|
||||
fill="#34A853"
|
||||
sketch:type="MSShapeGroup"
|
||||
/>
|
||||
<path
|
||||
d="M3.96409091,10.71 C3.78409091,10.17 3.68181818,9.59318182 3.68181818,9 C3.68181818,8.40681818 3.78409091,7.83 3.96409091,7.29 L3.96409091,4.95818182 L0.957272727,4.95818182 C0.347727273,6.17318182 0,7.54772727 0,9 C0,10.4522727 0.347727273,11.8268182 0.957272727,13.0418182 L3.96409091,10.71 L3.96409091,10.71 Z"
|
||||
id="Shape"
|
||||
fill="#FBBC05"
|
||||
sketch:type="MSShapeGroup"
|
||||
/>
|
||||
<path
|
||||
d="M9,3.57954545 C10.3213636,3.57954545 11.5077273,4.03363636 12.4404545,4.92545455 L15.0218182,2.34409091 C13.4631818,0.891818182 11.4259091,0 9,0 C5.48181818,0 2.43818182,2.01681818 0.957272727,4.95818182 L3.96409091,7.29 C4.67181818,5.16272727 6.65590909,3.57954545 9,3.57954545 L9,3.57954545 Z"
|
||||
id="Shape"
|
||||
fill="#EA4335"
|
||||
sketch:type="MSShapeGroup"
|
||||
/>
|
||||
<path
|
||||
d="M0,0 L18,0 L18,18 L0,18 L0,0 Z"
|
||||
id="Shape"
|
||||
sketch:type="MSShapeGroup"
|
||||
/>
|
||||
</g>
|
||||
<g id="handles_square" sketch:type="MSLayerGroup" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
|
@ -49,6 +49,10 @@
|
|||
filters = [...filters, duplicate]
|
||||
}
|
||||
|
||||
const getSchema = filter => {
|
||||
return schemaFields.find(field => field.name === filter.field)
|
||||
}
|
||||
|
||||
const onFieldChange = (expression, field) => {
|
||||
// Update the field type
|
||||
expression.type = enrichedSchemaFields.find(x => x.name === field)?.type
|
||||
|
@ -150,7 +154,12 @@
|
|||
bind:value={filter.value}
|
||||
/>
|
||||
{:else if filter.type === "datetime"}
|
||||
<DatePicker disabled={filter.noValue} bind:value={filter.value} />
|
||||
<DatePicker
|
||||
disabled={filter.noValue}
|
||||
enableTime={!getSchema(filter).dateOnly}
|
||||
timeOnly={getSchema(filter).timeOnly}
|
||||
bind:value={filter.value}
|
||||
/>
|
||||
{:else}
|
||||
<DrawerBindableInput disabled />
|
||||
{/if}
|
||||
|
|
|
@ -44,6 +44,20 @@
|
|||
$: readQuery = query.queryVerb === "read" || query.readable
|
||||
$: queryInvalid = !query.name || (readQuery && data.length === 0)
|
||||
|
||||
//Cast field in query preview response to number if specified by schema
|
||||
$: {
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let row = data[i]
|
||||
for (let fieldName of Object.keys(fields)) {
|
||||
if (fields[fieldName] === "number" && !isNaN(Number(row[fieldName]))) {
|
||||
row[fieldName] = Number(row[fieldName])
|
||||
} else {
|
||||
row[fieldName] = row[fieldName]?.toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// seed the transformer
|
||||
if (query && !query.transformer) {
|
||||
query.transformer = "return data"
|
||||
|
|
|
@ -144,7 +144,11 @@ export const RelationshipTypes = {
|
|||
MANY_TO_ONE: "many-to-one",
|
||||
}
|
||||
|
||||
export const ALLOWABLE_STRING_OPTIONS = [FIELDS.STRING, FIELDS.OPTIONS]
|
||||
export const ALLOWABLE_STRING_OPTIONS = [
|
||||
FIELDS.STRING,
|
||||
FIELDS.OPTIONS,
|
||||
FIELDS.LONGFORM,
|
||||
]
|
||||
export const ALLOWABLE_STRING_TYPES = ALLOWABLE_STRING_OPTIONS.map(
|
||||
opt => opt.type
|
||||
)
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
Table,
|
||||
Checkbox,
|
||||
} from "@budibase/bbui"
|
||||
import { email } from "stores/portal"
|
||||
import { email, admin } from "stores/portal"
|
||||
import { API } from "api"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
@ -58,6 +58,7 @@
|
|||
const savedConfig = await API.saveConfig(smtp)
|
||||
smtpConfig._rev = savedConfig._rev
|
||||
smtpConfig._id = savedConfig._id
|
||||
await admin.getChecklist()
|
||||
notifications.success(`Settings saved`)
|
||||
analytics.captureEvent(Events.SMTP.SAVED)
|
||||
} catch (error) {
|
||||
|
|
|
@ -24,14 +24,8 @@ export function createAdminStore() {
|
|||
const admin = writable(DEFAULT_CONFIG)
|
||||
|
||||
async function init() {
|
||||
const tenantId = get(auth).tenantId
|
||||
const checklist = await API.getChecklist(tenantId)
|
||||
const totalSteps = Object.keys(checklist).length
|
||||
const completedSteps = Object.values(checklist).filter(
|
||||
x => x?.checked
|
||||
).length
|
||||
await getChecklist()
|
||||
await getEnvironment()
|
||||
|
||||
// enable system status checks in the cloud
|
||||
if (get(admin).cloud) {
|
||||
await getSystemStatus()
|
||||
|
@ -40,8 +34,6 @@ export function createAdminStore() {
|
|||
|
||||
admin.update(store => {
|
||||
store.loaded = true
|
||||
store.checklist = checklist
|
||||
store.onboardingProgress = (completedSteps / totalSteps) * 100
|
||||
return store
|
||||
})
|
||||
}
|
||||
|
@ -81,6 +73,20 @@ export function createAdminStore() {
|
|||
})
|
||||
}
|
||||
|
||||
async function getChecklist() {
|
||||
const tenantId = get(auth).tenantId
|
||||
const checklist = await API.getChecklist(tenantId)
|
||||
const totalSteps = Object.keys(checklist).length
|
||||
const completedSteps = Object.values(checklist).filter(
|
||||
x => x?.checked
|
||||
).length
|
||||
admin.update(store => {
|
||||
store.checklist = checklist
|
||||
store.onboardingProgress = (completedSteps / totalSteps) * 100
|
||||
return store
|
||||
})
|
||||
}
|
||||
|
||||
function unload() {
|
||||
admin.update(store => {
|
||||
store.loaded = false
|
||||
|
@ -93,6 +99,7 @@ export function createAdminStore() {
|
|||
init,
|
||||
checkImportComplete,
|
||||
unload,
|
||||
getChecklist,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/cli",
|
||||
"version": "1.0.126-alpha.0",
|
||||
"version": "1.0.142-alpha.0",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "src/index.js",
|
||||
"bin": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/client",
|
||||
"version": "1.0.126-alpha.0",
|
||||
"version": "1.0.142-alpha.0",
|
||||
"license": "MPL-2.0",
|
||||
"module": "dist/budibase-client.js",
|
||||
"main": "dist/budibase-client.js",
|
||||
|
@ -19,9 +19,9 @@
|
|||
"dev:builder": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.126-alpha.0",
|
||||
"@budibase/frontend-core": "^1.0.126-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.126-alpha.0",
|
||||
"@budibase/bbui": "^1.0.142-alpha.0",
|
||||
"@budibase/frontend-core": "^1.0.142-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.142-alpha.0",
|
||||
"@spectrum-css/button": "^3.0.3",
|
||||
"@spectrum-css/card": "^3.0.3",
|
||||
"@spectrum-css/divider": "^1.0.3",
|
||||
|
|
|
@ -88,6 +88,10 @@
|
|||
const schema = schemaFields.find(x => x.name === field)
|
||||
return schema?.constraints?.inclusion || []
|
||||
}
|
||||
|
||||
const getSchema = filter => {
|
||||
return schemaFields.find(field => field.name === filter.field)
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="container" class:mobile={$context.device.mobile}>
|
||||
|
@ -134,7 +138,12 @@
|
|||
bind:value={filter.value}
|
||||
/>
|
||||
{:else if filter.type === "datetime"}
|
||||
<DatePicker disabled={filter.noValue} bind:value={filter.value} />
|
||||
<DatePicker
|
||||
disabled={filter.noValue}
|
||||
enableTime={!getSchema(filter).dateOnly}
|
||||
timeOnly={getSchema(filter).timeOnly}
|
||||
bind:value={filter.value}
|
||||
/>
|
||||
{:else}
|
||||
<Input disabled />
|
||||
{/if}
|
||||
|
|
|
@ -44,7 +44,6 @@
|
|||
fieldApi = value?.fieldApi
|
||||
fieldSchema = value?.fieldSchema
|
||||
})
|
||||
onDestroy(() => unsubscribe?.())
|
||||
|
||||
// Determine label class from position
|
||||
$: labelClass = labelPos === "above" ? "" : `spectrum-FieldLabel--${labelPos}`
|
||||
|
@ -52,6 +51,11 @@
|
|||
const updateLabel = e => {
|
||||
builderStore.actions.updateProp("label", e.target.textContent)
|
||||
}
|
||||
|
||||
onDestroy(() => {
|
||||
fieldApi?.deregister()
|
||||
unsubscribe?.()
|
||||
})
|
||||
</script>
|
||||
|
||||
<FieldGroupFallback>
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
if (
|
||||
formContext &&
|
||||
$builderStore.inBuilder &&
|
||||
$componentStore.selectedComponentPath?.includes($component.id)
|
||||
$componentStore?.selectedComponentPath?.includes($component.id)
|
||||
) {
|
||||
formContext.formApi.setStep(step)
|
||||
}
|
||||
|
|
|
@ -329,6 +329,17 @@
|
|||
}
|
||||
}
|
||||
|
||||
// We don't want to actually remove the field state when deregistering, just
|
||||
// remove any errors and validation
|
||||
const deregister = () => {
|
||||
const fieldInfo = getField(field)
|
||||
fieldInfo.update(state => {
|
||||
state.fieldState.validator = null
|
||||
state.fieldState.error = null
|
||||
return state
|
||||
})
|
||||
}
|
||||
|
||||
// Updates the disabled state of a certain field
|
||||
const setDisabled = fieldDisabled => {
|
||||
const fieldInfo = getField(field)
|
||||
|
@ -348,6 +359,7 @@
|
|||
reset,
|
||||
updateValidation,
|
||||
setDisabled,
|
||||
deregister,
|
||||
validate: () => {
|
||||
// Validate the field by force setting the same value again
|
||||
const { fieldState } = get(getField(field))
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@budibase/frontend-core",
|
||||
"version": "1.0.126-alpha.0",
|
||||
"version": "1.0.142-alpha.0",
|
||||
"description": "Budibase frontend core libraries used in builder and client",
|
||||
"author": "Budibase",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.126-alpha.0",
|
||||
"@budibase/bbui": "^1.0.142-alpha.0",
|
||||
"lodash": "^4.17.21",
|
||||
"svelte": "^3.46.2"
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/server",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "1.0.126-alpha.0",
|
||||
"version": "1.0.142-alpha.0",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.ts",
|
||||
"repository": {
|
||||
|
@ -68,10 +68,10 @@
|
|||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@apidevtools/swagger-parser": "^10.0.3",
|
||||
"@budibase/backend-core": "^1.0.126-alpha.0",
|
||||
"@budibase/client": "^1.0.126-alpha.0",
|
||||
"@budibase/pro": "1.0.126-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.126-alpha.0",
|
||||
"@budibase/backend-core": "^1.0.142-alpha.0",
|
||||
"@budibase/client": "^1.0.142-alpha.0",
|
||||
"@budibase/pro": "1.0.142-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.142-alpha.0",
|
||||
"@bull-board/api": "^3.7.0",
|
||||
"@bull-board/koa": "^3.7.0",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
|
|
|
@ -406,11 +406,14 @@ const destroyApp = async (ctx: any) => {
|
|||
if (!env.isTest() && !isUnpublish) {
|
||||
await deleteApp(appId)
|
||||
}
|
||||
// automations only in production
|
||||
if (isUnpublish) {
|
||||
await cleanupAutomations(appId)
|
||||
}
|
||||
// make sure the app/role doesn't stick around after the app has been deleted
|
||||
// remove app role when the dev app is deleted (no trace of app anymore)
|
||||
else {
|
||||
await removeAppFromUserRoles(ctx, appId)
|
||||
}
|
||||
await appCache.invalidateAppMetadata(appId)
|
||||
return result
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { SearchIndexes } = require("../../../db/utils")
|
||||
const fetch = require("node-fetch")
|
||||
const { getCouchUrl } = require("@budibase/backend-core/db")
|
||||
const { getCouchInfo } = require("@budibase/backend-core/db")
|
||||
const { getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
/**
|
||||
|
@ -242,11 +242,10 @@ class QueryBuilder {
|
|||
|
||||
async run() {
|
||||
const appId = getAppId()
|
||||
const url = `${getCouchUrl()}/${appId}/_design/database/_search/${
|
||||
SearchIndexes.ROWS
|
||||
}`
|
||||
const { url, cookie } = getCouchInfo()
|
||||
const fullPath = `${url}/${appId}/_design/database/_search/${SearchIndexes.ROWS}`
|
||||
const body = this.buildSearchBody()
|
||||
return await runQuery(url, body)
|
||||
return await runQuery(fullPath, body, cookie)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -254,12 +253,16 @@ class QueryBuilder {
|
|||
* Executes a lucene search query.
|
||||
* @param url The query URL
|
||||
* @param body The request body defining search criteria
|
||||
* @param cookie The auth cookie for CouchDB
|
||||
* @returns {Promise<{rows: []}>}
|
||||
*/
|
||||
const runQuery = async (url, body) => {
|
||||
const runQuery = async (url, body, cookie) => {
|
||||
const response = await fetch(url, {
|
||||
body: JSON.stringify(body),
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: cookie,
|
||||
},
|
||||
})
|
||||
const json = await response.json()
|
||||
|
||||
|
|
|
@ -86,3 +86,15 @@ exports.substituteLoopStep = (hbsString, substitute) => {
|
|||
|
||||
return hbsString
|
||||
}
|
||||
|
||||
exports.stringSplit = value => {
|
||||
if (value == null) {
|
||||
return []
|
||||
}
|
||||
if (value.split("\n").length > 1) {
|
||||
value = value.split("\n")
|
||||
} else {
|
||||
value = value.split(",")
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
|
|
@ -47,7 +47,11 @@ exports.FieldTypes = {
|
|||
|
||||
exports.CanSwitchTypes = [
|
||||
[exports.FieldTypes.JSON, exports.FieldTypes.ARRAY],
|
||||
[exports.FieldTypes.STRING, exports.FieldTypes.OPTIONS],
|
||||
[
|
||||
exports.FieldTypes.STRING,
|
||||
exports.FieldTypes.OPTIONS,
|
||||
exports.FieldTypes.LONGFORM,
|
||||
],
|
||||
[exports.FieldTypes.BOOLEAN, exports.FieldTypes.NUMBER],
|
||||
]
|
||||
|
||||
|
|
|
@ -242,12 +242,10 @@ module MSSQLModule {
|
|||
if (typeof name !== "string") {
|
||||
continue
|
||||
}
|
||||
const type: string = convertSqlType(def.DATA_TYPE)
|
||||
|
||||
schema[name] = {
|
||||
autocolumn: !!autoColumns.find((col: string) => col === name),
|
||||
name: name,
|
||||
type,
|
||||
...convertSqlType(def.DATA_TYPE),
|
||||
}
|
||||
}
|
||||
tables[tableName] = {
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
} from "./utils"
|
||||
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||
import dayjs from "dayjs"
|
||||
import { FieldTypes } from "../constants"
|
||||
const { NUMBER_REGEX } = require("../utilities")
|
||||
|
||||
module MySQLModule {
|
||||
|
@ -101,7 +102,7 @@ module MySQLModule {
|
|||
}
|
||||
// if not a number, see if it is a date - important to do in this order as any
|
||||
// integer will be considered a valid date
|
||||
else if (dayjs(binding).isValid()) {
|
||||
else if (/^\d/.test(binding) && dayjs(binding).isValid()) {
|
||||
bindings[i] = dayjs(binding).toDate()
|
||||
}
|
||||
}
|
||||
|
@ -151,20 +152,24 @@ module MySQLModule {
|
|||
|
||||
async internalQuery(
|
||||
query: SqlQuery,
|
||||
connect: boolean = true
|
||||
opts: { connect?: boolean; disableCoercion?: boolean } = {
|
||||
connect: true,
|
||||
disableCoercion: false,
|
||||
}
|
||||
): Promise<any[] | any> {
|
||||
try {
|
||||
if (connect) {
|
||||
if (opts?.connect) {
|
||||
await this.connect()
|
||||
}
|
||||
const baseBindings = query.bindings || []
|
||||
const bindings = opts?.disableCoercion
|
||||
? baseBindings
|
||||
: bindingTypeCoerce(baseBindings)
|
||||
// Node MySQL is callback based, so we must wrap our call in a promise
|
||||
const response = await this.client.query(
|
||||
query.sql,
|
||||
bindingTypeCoerce(query.bindings || [])
|
||||
)
|
||||
const response = await this.client.query(query.sql, bindings)
|
||||
return response[0]
|
||||
} finally {
|
||||
if (connect) {
|
||||
if (opts?.connect) {
|
||||
await this.disconnect()
|
||||
}
|
||||
}
|
||||
|
@ -179,7 +184,7 @@ module MySQLModule {
|
|||
// get the tables first
|
||||
const tablesResp = await this.internalQuery(
|
||||
{ sql: "SHOW TABLES;" },
|
||||
false
|
||||
{ connect: false }
|
||||
)
|
||||
const tableNames = tablesResp.map(
|
||||
(obj: any) =>
|
||||
|
@ -191,7 +196,7 @@ module MySQLModule {
|
|||
const schema: TableSchema = {}
|
||||
const descResp = await this.internalQuery(
|
||||
{ sql: `DESCRIBE \`${tableName}\`;` },
|
||||
false
|
||||
{ connect: false }
|
||||
)
|
||||
for (let column of descResp) {
|
||||
const columnName = column.Field
|
||||
|
@ -211,8 +216,8 @@ module MySQLModule {
|
|||
schema[columnName] = {
|
||||
name: columnName,
|
||||
autocolumn: isAuto,
|
||||
type: convertSqlType(column.Type),
|
||||
constraints,
|
||||
...convertSqlType(column.Type),
|
||||
}
|
||||
}
|
||||
if (!tables[tableName]) {
|
||||
|
@ -254,7 +259,8 @@ module MySQLModule {
|
|||
async query(json: QueryJson) {
|
||||
await this.connect()
|
||||
try {
|
||||
const queryFn = (query: any) => this.internalQuery(query, false)
|
||||
const queryFn = (query: any) =>
|
||||
this.internalQuery(query, { connect: false, disableCoercion: true })
|
||||
return await this.queryWithReturning(json, queryFn)
|
||||
} finally {
|
||||
await this.disconnect()
|
||||
|
|
|
@ -279,9 +279,9 @@ module OracleModule {
|
|||
)
|
||||
}
|
||||
|
||||
private internalConvertType(column: OracleColumn): string {
|
||||
private internalConvertType(column: OracleColumn): { type: string } {
|
||||
if (this.isBooleanType(column)) {
|
||||
return FieldTypes.BOOLEAN
|
||||
return { type: FieldTypes.BOOLEAN }
|
||||
}
|
||||
|
||||
return convertSqlType(column.type)
|
||||
|
@ -328,7 +328,7 @@ module OracleModule {
|
|||
fieldSchema = {
|
||||
autocolumn: OracleIntegration.isAutoColumn(oracleColumn),
|
||||
name: columnName,
|
||||
type: this.internalConvertType(oracleColumn),
|
||||
...this.internalConvertType(oracleColumn),
|
||||
}
|
||||
table.schema[columnName] = fieldSchema
|
||||
}
|
||||
|
|
|
@ -227,7 +227,6 @@ module PostgresModule {
|
|||
}
|
||||
}
|
||||
|
||||
const type: string = convertSqlType(column.data_type)
|
||||
const identity = !!(
|
||||
column.identity_generation ||
|
||||
column.identity_start ||
|
||||
|
@ -242,7 +241,7 @@ module PostgresModule {
|
|||
tables[tableName].schema[columnName] = {
|
||||
autocolumn: isAuto,
|
||||
name: columnName,
|
||||
type,
|
||||
...convertSqlType(column.data_type),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,9 @@ const SQL_DATE_TYPE_MAP = {
|
|||
date: FieldTypes.DATETIME,
|
||||
}
|
||||
|
||||
const SQL_DATE_ONLY_TYPES = ["date"]
|
||||
const SQL_TIME_ONLY_TYPES = ["time"]
|
||||
|
||||
const SQL_STRING_TYPE_MAP = {
|
||||
varchar: FieldTypes.STRING,
|
||||
char: FieldTypes.STRING,
|
||||
|
@ -42,9 +45,9 @@ const SQL_STRING_TYPE_MAP = {
|
|||
nvarchar: FieldTypes.STRING,
|
||||
ntext: FieldTypes.STRING,
|
||||
enum: FieldTypes.STRING,
|
||||
blob: FieldTypes.LONGFORM,
|
||||
long: FieldTypes.LONGFORM,
|
||||
text: FieldTypes.LONGFORM,
|
||||
blob: FieldTypes.STRING,
|
||||
long: FieldTypes.STRING,
|
||||
text: FieldTypes.STRING,
|
||||
}
|
||||
|
||||
const SQL_BOOLEAN_TYPE_MAP = {
|
||||
|
@ -85,9 +88,9 @@ export function breakExternalTableId(tableId: string | undefined) {
|
|||
return {}
|
||||
}
|
||||
const parts = tableId.split(DOUBLE_SEPARATOR)
|
||||
let tableName = parts.pop()
|
||||
let datasourceId = parts.shift()
|
||||
// if they need joined
|
||||
let datasourceId = parts.join(DOUBLE_SEPARATOR)
|
||||
let tableName = parts.join(DOUBLE_SEPARATOR)
|
||||
return { datasourceId, tableName }
|
||||
}
|
||||
|
||||
|
@ -137,12 +140,20 @@ export function breakRowIdField(_id: string | { _id: string }): any[] {
|
|||
}
|
||||
|
||||
export function convertSqlType(type: string) {
|
||||
let foundType = FieldTypes.STRING
|
||||
const lcType = type.toLowerCase()
|
||||
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
|
||||
if (type.toLowerCase().includes(external)) {
|
||||
return internal
|
||||
if (lcType.includes(external)) {
|
||||
foundType = internal
|
||||
break
|
||||
}
|
||||
}
|
||||
return FieldTypes.STRING
|
||||
const schema: any = { type: foundType }
|
||||
if (foundType === FieldTypes.DATETIME) {
|
||||
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lcType)
|
||||
schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lcType)
|
||||
}
|
||||
return schema
|
||||
}
|
||||
|
||||
export function getSqlQuery(query: SqlQuery | string): SqlQuery {
|
||||
|
@ -207,11 +218,20 @@ function shouldCopySpecialColumn(
|
|||
column: { type: string },
|
||||
fetchedColumn: { type: string } | undefined
|
||||
) {
|
||||
const specialTypes = [
|
||||
FieldTypes.OPTIONS,
|
||||
FieldTypes.LONGFORM,
|
||||
FieldTypes.ARRAY,
|
||||
FieldTypes.FORMULA,
|
||||
]
|
||||
if (column && !fetchedColumn) {
|
||||
return true
|
||||
}
|
||||
const fetchedIsNumber =
|
||||
!fetchedColumn || fetchedColumn.type === FieldTypes.NUMBER
|
||||
return (
|
||||
column.type === FieldTypes.OPTIONS ||
|
||||
column.type === FieldTypes.ARRAY ||
|
||||
((!fetchedColumn || fetchedColumn.type === FieldTypes.NUMBER) &&
|
||||
column.type === FieldTypes.BOOLEAN)
|
||||
specialTypes.indexOf(column.type) !== -1 ||
|
||||
(fetchedIsNumber && column.type === FieldTypes.BOOLEAN)
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -100,10 +100,10 @@ class Orchestrator {
|
|||
let automation = this._automation
|
||||
const app = await this.getApp()
|
||||
let stopped = false
|
||||
let loopStep
|
||||
let loopStep = null
|
||||
|
||||
let stepCount = 0
|
||||
let loopStepNumber
|
||||
let loopStepNumber = null
|
||||
let loopSteps = []
|
||||
for (let step of automation.definition.steps) {
|
||||
stepCount++
|
||||
|
@ -117,15 +117,17 @@ class Orchestrator {
|
|||
if (loopStep) {
|
||||
input = await processObject(loopStep.inputs, this._context)
|
||||
}
|
||||
let iterations = loopStep ? input.binding.length : 1
|
||||
let iterations = loopStep
|
||||
? Array.isArray(input.binding)
|
||||
? input.binding.length
|
||||
: automationUtils.stringSplit(input.binding).length
|
||||
: 1
|
||||
let iterationCount = 0
|
||||
for (let index = 0; index < iterations; index++) {
|
||||
let originalStepInput = cloneDeep(step.inputs)
|
||||
|
||||
// Handle if the user has set a max iteration count or if it reaches the max limit set by us
|
||||
if (loopStep) {
|
||||
// lets first of all handle the input
|
||||
// if the input is array then use it, if it is a string then split it on every new line
|
||||
let newInput = await processObject(
|
||||
loopStep.inputs,
|
||||
cloneDeep(this._context)
|
||||
|
@ -134,9 +136,6 @@ class Orchestrator {
|
|||
newInput,
|
||||
loopStep.schema.inputs
|
||||
)
|
||||
this._context.steps[loopStepNumber] = {
|
||||
currentItem: newInput.binding[index],
|
||||
}
|
||||
|
||||
let tempOutput = { items: loopSteps, iterations: iterationCount }
|
||||
if (
|
||||
|
@ -154,6 +153,20 @@ class Orchestrator {
|
|||
break
|
||||
}
|
||||
|
||||
let item
|
||||
if (
|
||||
typeof loopStep.inputs.binding === "string" &&
|
||||
loopStep.inputs.option === "String"
|
||||
) {
|
||||
item = automationUtils.stringSplit(newInput.binding)
|
||||
} else {
|
||||
item = loopStep.inputs.binding
|
||||
}
|
||||
|
||||
this._context.steps[loopStepNumber] = {
|
||||
currentItem: item[index],
|
||||
}
|
||||
|
||||
// The "Loop" binding in the front end is "fake", so replace it here so the context can understand it
|
||||
// Pretty hacky because we need to account for the row object
|
||||
for (let [key, value] of Object.entries(originalStepInput)) {
|
||||
|
@ -178,7 +191,6 @@ class Orchestrator {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
index === parseInt(env.AUTOMATION_MAX_ITERATIONS) ||
|
||||
index === loopStep.inputs.iterations
|
||||
|
@ -192,10 +204,25 @@ class Orchestrator {
|
|||
break
|
||||
}
|
||||
|
||||
let isFailure = false
|
||||
if (
|
||||
typeof this._context.steps[loopStepNumber]?.currentItem === "object"
|
||||
) {
|
||||
isFailure = Object.keys(
|
||||
this._context.steps[loopStepNumber].currentItem
|
||||
).some(value => {
|
||||
return (
|
||||
this._context.steps[loopStepNumber].currentItem[value] ===
|
||||
loopStep.inputs.failure
|
||||
)
|
||||
})
|
||||
} else {
|
||||
isFailure =
|
||||
this._context.steps[loopStepNumber]?.currentItem ===
|
||||
loopStep.inputs.failure
|
||||
) {
|
||||
}
|
||||
|
||||
if (isFailure) {
|
||||
this.updateContextAndOutput(loopStepNumber, step, tempOutput, {
|
||||
status: AutomationErrors.FAILURE_CONDITION,
|
||||
success: false,
|
||||
|
@ -286,18 +313,16 @@ class Orchestrator {
|
|||
|
||||
module.exports = (input, callback) => {
|
||||
const appId = input.data.event.appId
|
||||
doInAppContext(appId, () => {
|
||||
doInAppContext(appId, async () => {
|
||||
const automationOrchestrator = new Orchestrator(
|
||||
input.data.automation,
|
||||
input.data.event
|
||||
)
|
||||
automationOrchestrator
|
||||
.execute()
|
||||
.then(response => {
|
||||
try {
|
||||
const response = await automationOrchestrator.execute()
|
||||
callback(null, response)
|
||||
})
|
||||
.catch(err => {
|
||||
} catch (err) {
|
||||
callback(err)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -191,14 +191,13 @@ class QueryRunner {
|
|||
}
|
||||
|
||||
module.exports = (input, callback) => {
|
||||
doInAppContext(input.appId, () => {
|
||||
doInAppContext(input.appId, async () => {
|
||||
const Runner = new QueryRunner(input)
|
||||
Runner.execute()
|
||||
.then(response => {
|
||||
try {
|
||||
const response = await Runner.execute()
|
||||
callback(null, response)
|
||||
})
|
||||
.catch(err => {
|
||||
} catch (err) {
|
||||
callback(err)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -2,7 +2,11 @@ const { budibaseTempDir } = require("../budibaseDir")
|
|||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
const uuid = require("uuid/v4")
|
||||
const { doWithDB } = require("@budibase/backend-core/db")
|
||||
const {
|
||||
doWithDB,
|
||||
dangerousGetDB,
|
||||
closeDB,
|
||||
} = require("@budibase/backend-core/db")
|
||||
const { ObjectStoreBuckets } = require("../../constants")
|
||||
const {
|
||||
upload,
|
||||
|
@ -151,14 +155,18 @@ exports.streamBackup = async appId => {
|
|||
* @return {*} either a readable stream or a string
|
||||
*/
|
||||
exports.exportDB = async (dbName, { stream, filter, exportName } = {}) => {
|
||||
return doWithDB(dbName, async db => {
|
||||
// Stream the dump if required
|
||||
// streaming a DB dump is a bit more complicated, can't close DB
|
||||
if (stream) {
|
||||
const db = dangerousGetDB(dbName)
|
||||
const memStream = new MemoryStream()
|
||||
memStream.on("end", async () => {
|
||||
await closeDB(db)
|
||||
})
|
||||
db.dump(memStream, { filter })
|
||||
return memStream
|
||||
}
|
||||
|
||||
return doWithDB(dbName, async db => {
|
||||
// Write the dump to file if required
|
||||
if (exportName) {
|
||||
const path = join(budibaseTempDir(), exportName)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/string-templates",
|
||||
"version": "1.0.126-alpha.0",
|
||||
"version": "1.0.142-alpha.0",
|
||||
"description": "Handlebars wrapper for Budibase templating.",
|
||||
"main": "src/index.cjs",
|
||||
"module": "dist/bundle.mjs",
|
||||
|
|
|
@ -70,7 +70,7 @@ function createTemplate(string, opts) {
|
|||
* @param {object|array} object The input structure which is to be recursed, it is important to note that
|
||||
* if the structure contains any cycles then this will fail.
|
||||
* @param {object} context The context that handlebars should fill data from.
|
||||
* @param {object|undefined} opts optional - specify some options for processing.
|
||||
* @param {object|undefined} [opts] optional - specify some options for processing.
|
||||
* @returns {Promise<object|array>} The structure input, as fully updated as possible.
|
||||
*/
|
||||
module.exports.processObject = async (object, context, opts) => {
|
||||
|
@ -101,7 +101,7 @@ module.exports.processObject = async (object, context, opts) => {
|
|||
* then nothing will occur.
|
||||
* @param {string} string The template string which is the filled from the context object.
|
||||
* @param {object} context An object of information which will be used to enrich the string.
|
||||
* @param {object|undefined} opts optional - specify some options for processing.
|
||||
* @param {object|undefined} [opts] optional - specify some options for processing.
|
||||
* @returns {Promise<string>} The enriched string, all templates should have been replaced if they can be.
|
||||
*/
|
||||
module.exports.processString = async (string, context, opts) => {
|
||||
|
@ -115,7 +115,7 @@ module.exports.processString = async (string, context, opts) => {
|
|||
* @param {object|array} object The input structure which is to be recursed, it is important to note that
|
||||
* if the structure contains any cycles then this will fail.
|
||||
* @param {object} context The context that handlebars should fill data from.
|
||||
* @param {object|undefined} opts optional - specify some options for processing.
|
||||
* @param {object|undefined} [opts] optional - specify some options for processing.
|
||||
* @returns {object|array} The structure input, as fully updated as possible.
|
||||
*/
|
||||
module.exports.processObjectSync = (object, context, opts) => {
|
||||
|
@ -136,7 +136,7 @@ module.exports.processObjectSync = (object, context, opts) => {
|
|||
* then nothing will occur. This is a pure sync call and therefore does not have the full functionality of the async call.
|
||||
* @param {string} string The template string which is the filled from the context object.
|
||||
* @param {object} context An object of information which will be used to enrich the string.
|
||||
* @param {object|undefined} opts optional - specify some options for processing.
|
||||
* @param {object|undefined} [opts] optional - specify some options for processing.
|
||||
* @returns {string} The enriched string, all templates should have been replaced if they can be.
|
||||
*/
|
||||
module.exports.processStringSync = (string, context, opts) => {
|
||||
|
@ -194,7 +194,7 @@ module.exports.makePropSafe = property => {
|
|||
/**
|
||||
* Checks whether or not a template string contains totally valid syntax (simply tries running it)
|
||||
* @param string The string to test for valid syntax - this may contain no templates and will be considered valid.
|
||||
* @param opts optional - specify some options for processing.
|
||||
* @param [opts] optional - specify some options for processing.
|
||||
* @returns {boolean} Whether or not the input string is valid.
|
||||
*/
|
||||
module.exports.isValid = (string, opts) => {
|
||||
|
@ -205,6 +205,7 @@ module.exports.isValid = (string, opts) => {
|
|||
"array",
|
||||
"cannot read property",
|
||||
"undefined",
|
||||
"json at position 0",
|
||||
]
|
||||
// this is a portion of a specific string always output by handlebars in the case of a syntax error
|
||||
const invalidCases = [`expecting '`]
|
||||
|
|
|
@ -360,6 +360,13 @@ describe("Test the literal helper", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("Test that JSONpase helper", () => {
|
||||
it("should state that the JSONparse helper is valid", async () => {
|
||||
const output = isValid(`{{ JSONparse input }}`)
|
||||
expect(output).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Cover a few complex use cases", () => {
|
||||
it("should allow use of three different collection helpers", async () => {
|
||||
const output = await processString(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/worker",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "1.0.126-alpha.0",
|
||||
"version": "1.0.142-alpha.0",
|
||||
"description": "Budibase background service",
|
||||
"main": "src/index.ts",
|
||||
"repository": {
|
||||
|
@ -31,9 +31,9 @@
|
|||
"author": "Budibase",
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@budibase/backend-core": "^1.0.126-alpha.0",
|
||||
"@budibase/pro": "1.0.126-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.126-alpha.0",
|
||||
"@budibase/backend-core": "^1.0.142-alpha.0",
|
||||
"@budibase/pro": "1.0.142-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.142-alpha.0",
|
||||
"@koa/router": "^8.0.0",
|
||||
"@sentry/node": "6.17.7",
|
||||
"@techpass/passport-openidconnect": "^0.3.0",
|
||||
|
|
|
@ -293,10 +293,10 @@
|
|||
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
|
||||
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
|
||||
|
||||
"@budibase/backend-core@1.0.124-alpha.0":
|
||||
version "1.0.124-alpha.0"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.0.124-alpha.0.tgz#33a9408206088da49154710910dafc8088d864d2"
|
||||
integrity sha512-0ZUkDeqaoXS9qyK91SjwokYEA1wUPhi48nFE0+UwBloF8i7zVDFp2kOX7VNUrUer4gLuND9BoihEdpqsdQDvAg==
|
||||
"@budibase/backend-core@1.0.138":
|
||||
version "1.0.138"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.0.138.tgz#5297d6cf5b9ec8c15f0a6df4c7d8273b8ac900f0"
|
||||
integrity sha512-1qN/5urKX8bBXwEz266Z94rco8dTI7VqIh75m8ZcqrAfoUpjvZJS76gZxfc5U/QWPwrgVFnLtYvnEjaLbGEflg==
|
||||
dependencies:
|
||||
"@techpass/passport-openidconnect" "^0.3.0"
|
||||
aws-sdk "^2.901.0"
|
||||
|
@ -321,12 +321,12 @@
|
|||
uuid "^8.3.2"
|
||||
zlib "^1.0.5"
|
||||
|
||||
"@budibase/pro@1.0.124-alpha.0":
|
||||
version "1.0.124-alpha.0"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-1.0.124-alpha.0.tgz#6287a51fa7c19754e44374c209c4aa3480fc3ac9"
|
||||
integrity sha512-EgMuh+XSd/9tb3Ej9EZa4Y8hgiS6fHG+tuUwUcTuP6zvHbTijQGPb9075yImUbSc10bS3o41AP2qa2/ZdZKV2w==
|
||||
"@budibase/pro@1.0.138":
|
||||
version "1.0.138"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-1.0.138.tgz#cacbebe5ce93eb533af62a794a638944c2c61544"
|
||||
integrity sha512-4ABlUZvl2h8sd8awJATf3KJeoFWV/8SoqdbKiH1ICdUcM/6dad7nhbJ15QqJL+Uuh/+mN2yEbr8V6Un2+yF+CA==
|
||||
dependencies:
|
||||
"@budibase/backend-core" "1.0.124-alpha.0"
|
||||
"@budibase/backend-core" "1.0.138"
|
||||
node-fetch "^2.6.1"
|
||||
|
||||
"@cspotcode/source-map-consumer@0.8.0":
|
||||
|
|
Loading…
Reference in New Issue