Merge branch 'develop' of github.com:Budibase/budibase into lab-day/refactor-app-db
This commit is contained in:
commit
9198439f7f
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "1.0.49-alpha.0",
|
||||
"version": "1.0.49-alpha.1",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/backend-core",
|
||||
"version": "1.0.49-alpha.0",
|
||||
"version": "1.0.49-alpha.1",
|
||||
"description": "Budibase backend core libraries used in server and worker",
|
||||
"main": "src/index.js",
|
||||
"author": "Budibase",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "1.0.49-alpha.0",
|
||||
"version": "1.0.49-alpha.1",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
|
|
@ -6,11 +6,12 @@
|
|||
export let label = null
|
||||
export let labelPosition = "above"
|
||||
export let error = null
|
||||
export let tooltip = ""
|
||||
</script>
|
||||
|
||||
<div class="spectrum-Form-item" class:above={labelPosition === "above"}>
|
||||
{#if label}
|
||||
<FieldLabel forId={id} {label} position={labelPosition} />
|
||||
<FieldLabel forId={id} {label} position={labelPosition} {tooltip} />
|
||||
{/if}
|
||||
<div class="spectrum-Form-itemField">
|
||||
<slot />
|
||||
|
|
|
@ -1,19 +1,24 @@
|
|||
<script>
|
||||
import TooltipWrapper from "../Tooltip/TooltipWrapper.svelte"
|
||||
|
||||
import "@spectrum-css/fieldlabel/dist/index-vars.css"
|
||||
|
||||
export let forId
|
||||
export let label
|
||||
export let position = "above"
|
||||
export let tooltip = ""
|
||||
|
||||
$: className = position === "above" ? "" : `spectrum-FieldLabel--${position}`
|
||||
</script>
|
||||
|
||||
<label
|
||||
<TooltipWrapper {tooltip} size="S">
|
||||
<label
|
||||
for={forId}
|
||||
class={`spectrum-FieldLabel spectrum-FieldLabel--sizeM spectrum-Form-itemLabel ${className}`}
|
||||
>
|
||||
>
|
||||
{label || ""}
|
||||
</label>
|
||||
</label>
|
||||
</TooltipWrapper>
|
||||
|
||||
<style>
|
||||
label {
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
export let quiet = false
|
||||
export let autoWidth = false
|
||||
export let sort = false
|
||||
export let tooltip = ""
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const onChange = e => {
|
||||
|
@ -32,7 +33,7 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<Field {label} {labelPosition} {error}>
|
||||
<Field {label} {labelPosition} {error} {tooltip}>
|
||||
<Select
|
||||
{quiet}
|
||||
{error}
|
||||
|
|
|
@ -1,73 +1,20 @@
|
|||
<script>
|
||||
import "@spectrum-css/fieldlabel/dist/index-vars.css"
|
||||
import Tooltip from "../Tooltip/Tooltip.svelte"
|
||||
import Icon from "../Icon/Icon.svelte"
|
||||
import TooltipWrapper from "../Tooltip/TooltipWrapper.svelte"
|
||||
|
||||
export let size = "M"
|
||||
export let tooltip = ""
|
||||
export let showTooltip = false
|
||||
</script>
|
||||
|
||||
{#if tooltip}
|
||||
<div class="container">
|
||||
<label
|
||||
for=""
|
||||
class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}
|
||||
>
|
||||
<slot />
|
||||
</label>
|
||||
<div class="icon-container">
|
||||
<div
|
||||
class="icon"
|
||||
class:icon-small={size === "M" || size === "S"}
|
||||
on:mouseover={() => (showTooltip = true)}
|
||||
on:mouseleave={() => (showTooltip = false)}
|
||||
>
|
||||
<Icon name="InfoOutline" size="S" disabled={true} />
|
||||
</div>
|
||||
{#if showTooltip}
|
||||
<div class="tooltip">
|
||||
<Tooltip textWrapping={true} direction={"bottom"} text={tooltip} />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
<TooltipWrapper {tooltip} {size}>
|
||||
<label for="" class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}>
|
||||
<slot />
|
||||
</label>
|
||||
{/if}
|
||||
</TooltipWrapper>
|
||||
|
||||
<style>
|
||||
label {
|
||||
padding: 0;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
.icon-container {
|
||||
position: relative;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
margin-top: 1px;
|
||||
margin-left: 5px;
|
||||
margin-right: 5px;
|
||||
}
|
||||
.tooltip {
|
||||
position: absolute;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
top: 15px;
|
||||
z-index: 1;
|
||||
width: 160px;
|
||||
}
|
||||
.icon {
|
||||
transform: scale(0.75);
|
||||
}
|
||||
.icon-small {
|
||||
margin-top: -2px;
|
||||
margin-bottom: -5px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
<script>
|
||||
import Tooltip from "./Tooltip.svelte"
|
||||
import Icon from "../Icon/Icon.svelte"
|
||||
|
||||
export let tooltip = ""
|
||||
export let size = "M"
|
||||
|
||||
let showTooltip = false
|
||||
</script>
|
||||
|
||||
<div class:container={!!tooltip}>
|
||||
<slot />
|
||||
{#if tooltip}
|
||||
<div class="icon-container">
|
||||
<div
|
||||
class="icon"
|
||||
class:icon-small={size === "M" || size === "S"}
|
||||
on:mouseover={() => (showTooltip = true)}
|
||||
on:mouseleave={() => (showTooltip = false)}
|
||||
>
|
||||
<Icon name="InfoOutline" size="S" disabled={true} />
|
||||
</div>
|
||||
{#if showTooltip}
|
||||
<div class="tooltip">
|
||||
<Tooltip textWrapping={true} direction={"bottom"} text={tooltip} />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
.icon-container {
|
||||
position: relative;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
margin-top: 1px;
|
||||
margin-left: 5px;
|
||||
margin-right: 5px;
|
||||
}
|
||||
.tooltip {
|
||||
position: absolute;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
top: 15px;
|
||||
z-index: 1;
|
||||
width: 160px;
|
||||
}
|
||||
.icon {
|
||||
transform: scale(0.75);
|
||||
}
|
||||
.icon-small {
|
||||
margin-top: -2px;
|
||||
margin-bottom: -5px;
|
||||
}
|
||||
</style>
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/builder",
|
||||
"version": "1.0.49-alpha.0",
|
||||
"version": "1.0.49-alpha.1",
|
||||
"license": "GPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
@ -66,10 +66,10 @@
|
|||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.49-alpha.0",
|
||||
"@budibase/client": "^1.0.49-alpha.0",
|
||||
"@budibase/bbui": "^1.0.49-alpha.1",
|
||||
"@budibase/client": "^1.0.49-alpha.1",
|
||||
"@budibase/colorpicker": "1.1.2",
|
||||
"@budibase/string-templates": "^1.0.49-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.49-alpha.1",
|
||||
"@sentry/browser": "5.19.1",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
|
|
|
@ -152,6 +152,7 @@
|
|||
delete field.subtype
|
||||
delete field.tableId
|
||||
delete field.relationshipType
|
||||
delete field.formulaType
|
||||
|
||||
// Add in defaults and initial definition
|
||||
const definition = fieldDefinitions[event.detail?.toUpperCase()]
|
||||
|
@ -163,6 +164,9 @@
|
|||
if (field.type === LINK_TYPE) {
|
||||
field.relationshipType = RelationshipTypes.MANY_TO_MANY
|
||||
}
|
||||
if (field.type === FORMULA_TYPE) {
|
||||
field.formulaType = "dynamic"
|
||||
}
|
||||
}
|
||||
|
||||
function onChangeRequired(e) {
|
||||
|
@ -438,8 +442,22 @@
|
|||
error={errors.relatedName}
|
||||
/>
|
||||
{:else if field.type === FORMULA_TYPE}
|
||||
{#if !table.sql}
|
||||
<Select
|
||||
label="Formula type"
|
||||
bind:value={field.formulaType}
|
||||
options={[
|
||||
{ label: "Dynamic", value: "dynamic" },
|
||||
{ label: "Static", value: "static" },
|
||||
]}
|
||||
getOptionLabel={option => option.label}
|
||||
getOptionValue={option => option.value}
|
||||
tooltip="Dynamic formula are calculated when retrieved, but cannot be filtered,
|
||||
while static formula are calculated when the row is saved."
|
||||
/>
|
||||
{/if}
|
||||
<ModalBindableInput
|
||||
title="Handlebars Formula"
|
||||
title="Formula"
|
||||
label="Formula"
|
||||
value={field.formula}
|
||||
on:change={e => (field.formula = e.detail)}
|
||||
|
@ -448,7 +466,7 @@
|
|||
/>
|
||||
{:else if field.type === AUTO_TYPE}
|
||||
<Select
|
||||
label="Auto Column Type"
|
||||
label="Auto column type"
|
||||
value={field.subtype}
|
||||
on:change={e => (field.subtype = e.detail)}
|
||||
options={Object.entries(getAutoColumnInformation())}
|
||||
|
|
|
@ -131,7 +131,7 @@
|
|||
{bindings}
|
||||
on:change={event => (filter.value = event.detail)}
|
||||
/>
|
||||
{:else if ["string", "longform", "number"].includes(filter.type)}
|
||||
{:else if ["string", "longform", "number", "formula"].includes(filter.type)}
|
||||
<Input disabled={filter.noValue} bind:value={filter.value} />
|
||||
{:else if ["options", "array"].includes(filter.type)}
|
||||
<Combobox
|
||||
|
|
|
@ -59,8 +59,7 @@ export const NoEmptyFilterStrings = [
|
|||
*/
|
||||
export const getValidOperatorsForType = type => {
|
||||
const Op = OperatorOptions
|
||||
if (type === "string") {
|
||||
return [
|
||||
const stringOps = [
|
||||
Op.Equals,
|
||||
Op.NotEquals,
|
||||
Op.StartsWith,
|
||||
|
@ -68,8 +67,7 @@ export const getValidOperatorsForType = type => {
|
|||
Op.Empty,
|
||||
Op.NotEmpty,
|
||||
]
|
||||
} else if (type === "number") {
|
||||
return [
|
||||
const numOps = [
|
||||
Op.Equals,
|
||||
Op.NotEquals,
|
||||
Op.MoreThan,
|
||||
|
@ -77,6 +75,10 @@ export const getValidOperatorsForType = type => {
|
|||
Op.Empty,
|
||||
Op.NotEmpty,
|
||||
]
|
||||
if (type === "string") {
|
||||
return stringOps
|
||||
} else if (type === "number") {
|
||||
return numOps
|
||||
} else if (type === "options") {
|
||||
return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty]
|
||||
} else if (type === "array") {
|
||||
|
@ -84,23 +86,11 @@ export const getValidOperatorsForType = type => {
|
|||
} else if (type === "boolean") {
|
||||
return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty]
|
||||
} else if (type === "longform") {
|
||||
return [
|
||||
Op.Equals,
|
||||
Op.NotEquals,
|
||||
Op.StartsWith,
|
||||
Op.Like,
|
||||
Op.Empty,
|
||||
Op.NotEmpty,
|
||||
]
|
||||
return stringOps
|
||||
} else if (type === "datetime") {
|
||||
return [
|
||||
Op.Equals,
|
||||
Op.NotEquals,
|
||||
Op.MoreThan,
|
||||
Op.LessThan,
|
||||
Op.Empty,
|
||||
Op.NotEmpty,
|
||||
]
|
||||
return numOps
|
||||
} else if (type === "formula") {
|
||||
return stringOps.concat([Op.MoreThan, Op.LessThan])
|
||||
}
|
||||
return []
|
||||
}
|
||||
|
|
|
@ -27,5 +27,8 @@ export function getFields(fields, { allowLinks } = { allowLinks: true }) {
|
|||
filteredFields = filteredFields.concat(getTableFields(linkField))
|
||||
}
|
||||
}
|
||||
return filteredFields
|
||||
const staticFormulaFields = fields.filter(
|
||||
field => field.type === "formula" && field.formulaType === "static"
|
||||
)
|
||||
return filteredFields.concat(staticFormulaFields)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/cli",
|
||||
"version": "1.0.49-alpha.0",
|
||||
"version": "1.0.49-alpha.1",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "src/index.js",
|
||||
"bin": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/client",
|
||||
"version": "1.0.49-alpha.0",
|
||||
"version": "1.0.49-alpha.1",
|
||||
"license": "MPL-2.0",
|
||||
"module": "dist/budibase-client.js",
|
||||
"main": "dist/budibase-client.js",
|
||||
|
@ -19,9 +19,9 @@
|
|||
"dev:builder": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.49-alpha.0",
|
||||
"@budibase/bbui": "^1.0.49-alpha.1",
|
||||
"@budibase/standard-components": "^0.9.139",
|
||||
"@budibase/string-templates": "^1.0.49-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.49-alpha.1",
|
||||
"regexparam": "^1.3.0",
|
||||
"rollup-plugin-polyfill-node": "^0.8.0",
|
||||
"shortid": "^2.2.15",
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
number: "numberfield",
|
||||
datetime: "datetimefield",
|
||||
boolean: "booleanfield",
|
||||
formula: "stringfield",
|
||||
}
|
||||
|
||||
let formId
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
number: "numberfield",
|
||||
datetime: "datetimefield",
|
||||
boolean: "booleanfield",
|
||||
formula: "stringfield",
|
||||
}
|
||||
|
||||
let formId
|
||||
|
@ -60,10 +61,11 @@
|
|||
let enrichedFilter = [...(filter || [])]
|
||||
columns?.forEach(column => {
|
||||
const safePath = column.name.split(".").map(safe).join(".")
|
||||
const stringType = column.type === "string" || column.type === "formula"
|
||||
enrichedFilter.push({
|
||||
field: column.name,
|
||||
operator: column.type === "string" ? "string" : "equal",
|
||||
type: column.type === "string" ? "string" : "number",
|
||||
operator: stringType ? "string" : "equal",
|
||||
type: stringType ? "string" : "number",
|
||||
valueType: "Binding",
|
||||
value: `{{ ${safe(formId)}.${safePath} }}`,
|
||||
})
|
||||
|
|
|
@ -19,10 +19,14 @@
|
|||
export let schemaFields
|
||||
export let filters = []
|
||||
|
||||
const BannedTypes = ["link", "attachment", "formula", "json"]
|
||||
const BannedTypes = ["link", "attachment", "json"]
|
||||
|
||||
$: fieldOptions = (schemaFields ?? [])
|
||||
.filter(field => !BannedTypes.includes(field.type))
|
||||
.filter(
|
||||
field =>
|
||||
!BannedTypes.includes(field.type) ||
|
||||
(field.type === "formula" && field.formulaType === "static")
|
||||
)
|
||||
.map(field => field.name)
|
||||
|
||||
const addFilter = () => {
|
||||
|
@ -114,7 +118,7 @@
|
|||
on:change={e => onOperatorChange(filter, e.detail)}
|
||||
placeholder={null}
|
||||
/>
|
||||
{#if ["string", "longform", "number"].includes(filter.type)}
|
||||
{#if ["string", "longform", "number", "formula"].includes(filter.type)}
|
||||
<Input disabled={filter.noValue} bind:value={filter.value} />
|
||||
{:else if ["options", "array"].includes(filter.type)}
|
||||
<Combobox
|
||||
|
|
|
@ -32,6 +32,7 @@
|
|||
validation,
|
||||
formStep
|
||||
)
|
||||
$: schemaType = fieldSchema?.type !== "formula" ? fieldSchema?.type : "string"
|
||||
|
||||
// Focus label when editing
|
||||
let labelNode
|
||||
|
@ -72,7 +73,7 @@
|
|||
<Placeholder
|
||||
text="Add the Field setting to start using your component"
|
||||
/>
|
||||
{:else if fieldSchema?.type && fieldSchema?.type !== type && type !== "options"}
|
||||
{:else if schemaType && schemaType !== type && type !== "options"}
|
||||
<Placeholder
|
||||
text="This Field setting is the wrong data type for this component"
|
||||
/>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/server",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "1.0.49-alpha.0",
|
||||
"version": "1.0.49-alpha.1",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.ts",
|
||||
"repository": {
|
||||
|
@ -70,9 +70,9 @@
|
|||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@apidevtools/swagger-parser": "^10.0.3",
|
||||
"@budibase/backend-core": "^1.0.49-alpha.0",
|
||||
"@budibase/client": "^1.0.49-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.49-alpha.0",
|
||||
"@budibase/backend-core": "^1.0.49-alpha.1",
|
||||
"@budibase/client": "^1.0.49-alpha.1",
|
||||
"@budibase/string-templates": "^1.0.49-alpha.1",
|
||||
"@bull-board/api": "^3.7.0",
|
||||
"@bull-board/koa": "^3.7.0",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
|
|
|
@ -28,7 +28,7 @@ exports.fetchSelf = async ctx => {
|
|||
// make sure there is never a stale csrf token
|
||||
delete metadata.csrfToken
|
||||
// specifically needs to make sure is enriched
|
||||
ctx.body = await outputProcessing(ctx, userTable, {
|
||||
ctx.body = await outputProcessing(userTable, {
|
||||
...user,
|
||||
...metadata,
|
||||
})
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const linkRows = require("../../../db/linkedRows")
|
||||
const {
|
||||
getRowParams,
|
||||
generateRowID,
|
||||
getRowParams,
|
||||
DocumentTypes,
|
||||
InternalTables,
|
||||
} = require("../../../db/utils")
|
||||
|
@ -9,11 +9,9 @@ const userController = require("../user")
|
|||
const {
|
||||
inputProcessing,
|
||||
outputProcessing,
|
||||
processAutoColumn,
|
||||
cleanupAttachments,
|
||||
} = require("../../../utilities/rowProcessor")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { isEqual } = require("lodash")
|
||||
const { validate, findRow } = require("./utils")
|
||||
const { fullSearch, paginatedSearch } = require("./internalSearch")
|
||||
const { getGlobalUsersFromMetadata } = require("../../../utilities/global")
|
||||
|
@ -27,6 +25,7 @@ const {
|
|||
} = require("../view/utils")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const { finaliseRow, updateRelatedFormula } = require("./staticFormula")
|
||||
|
||||
const CALCULATION_TYPES = {
|
||||
SUM: "sum",
|
||||
|
@ -34,51 +33,6 @@ const CALCULATION_TYPES = {
|
|||
STATS: "stats",
|
||||
}
|
||||
|
||||
async function storeResponse(ctx, db, row, oldTable, table) {
|
||||
row.type = "row"
|
||||
// don't worry about rev, tables handle rev/lastID updates
|
||||
// if another row has been written since processing this will
|
||||
// handle the auto ID clash
|
||||
if (!isEqual(oldTable, table)) {
|
||||
try {
|
||||
await db.put(table)
|
||||
} catch (err) {
|
||||
if (err.status === 409) {
|
||||
const updatedTable = await db.get(table._id)
|
||||
let response = processAutoColumn(null, updatedTable, row, {
|
||||
reprocessing: true,
|
||||
})
|
||||
await db.put(response.table)
|
||||
row = response.row
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
const response = await db.put(row)
|
||||
row._rev = response.rev
|
||||
// process the row before return, to include relationships
|
||||
row = await outputProcessing(ctx, table, row, { squash: false })
|
||||
return { row, table }
|
||||
}
|
||||
|
||||
// doesn't do the outputProcessing
|
||||
async function getRawTableData(ctx, db, tableId) {
|
||||
let rows
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
await userController.fetchMetadata(ctx)
|
||||
rows = ctx.body
|
||||
} else {
|
||||
const response = await db.allDocs(
|
||||
getRowParams(tableId, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
rows = response.rows.map(row => row.doc)
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
async function getView(db, viewName) {
|
||||
let mainGetter = env.SELF_HOSTED ? getFromDesignDoc : getFromMemoryDoc
|
||||
let secondaryGetter = env.SELF_HOSTED ? getFromMemoryDoc : getFromDesignDoc
|
||||
|
@ -105,6 +59,22 @@ async function getView(db, viewName) {
|
|||
return viewInfo
|
||||
}
|
||||
|
||||
async function getRawTableData(ctx, db, tableId) {
|
||||
let rows
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
await userController.fetchMetadata(ctx)
|
||||
rows = ctx.body
|
||||
} else {
|
||||
const response = await db.allDocs(
|
||||
getRowParams(tableId, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
rows = response.rows.map(row => row.doc)
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
exports.patch = async ctx => {
|
||||
const db = getAppDB()
|
||||
const inputs = ctx.request.body
|
||||
|
@ -160,7 +130,10 @@ exports.patch = async ctx => {
|
|||
return { row: ctx.body, table }
|
||||
}
|
||||
|
||||
return storeResponse(ctx, db, row, dbTable, table)
|
||||
return finaliseRow(table, row, {
|
||||
oldTable: dbTable,
|
||||
updateFormula: true,
|
||||
})
|
||||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
|
@ -192,7 +165,10 @@ exports.save = async function (ctx) {
|
|||
table,
|
||||
})
|
||||
|
||||
return storeResponse(ctx, db, row, dbTable, table)
|
||||
return finaliseRow(table, row, {
|
||||
oldTable: dbTable,
|
||||
updateFormula: true,
|
||||
})
|
||||
}
|
||||
|
||||
exports.fetchView = async ctx => {
|
||||
|
@ -231,7 +207,7 @@ exports.fetchView = async ctx => {
|
|||
schema: {},
|
||||
}
|
||||
}
|
||||
rows = await outputProcessing(ctx, table, response.rows)
|
||||
rows = await outputProcessing(table, response.rows)
|
||||
}
|
||||
|
||||
if (calculation === CALCULATION_TYPES.STATS) {
|
||||
|
@ -263,14 +239,14 @@ exports.fetch = async ctx => {
|
|||
const tableId = ctx.params.tableId
|
||||
let table = await db.get(tableId)
|
||||
let rows = await getRawTableData(ctx, db, tableId)
|
||||
return outputProcessing(ctx, table, rows)
|
||||
return outputProcessing(table, rows)
|
||||
}
|
||||
|
||||
exports.find = async ctx => {
|
||||
const db = getAppDB()
|
||||
const table = await db.get(ctx.params.tableId)
|
||||
let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId)
|
||||
row = await outputProcessing(ctx, table, row)
|
||||
row = await outputProcessing(table, row)
|
||||
return row
|
||||
}
|
||||
|
||||
|
@ -284,7 +260,7 @@ exports.destroy = async function (ctx) {
|
|||
}
|
||||
const table = await db.get(row.tableId)
|
||||
// update the row to include full relationships before deleting them
|
||||
row = await outputProcessing(ctx, table, row, { squash: false })
|
||||
row = await outputProcessing(table, row, { squash: false })
|
||||
// now remove the relationships
|
||||
await linkRows.updateLinks({
|
||||
eventType: linkRows.EventType.ROW_DELETE,
|
||||
|
@ -293,6 +269,8 @@ exports.destroy = async function (ctx) {
|
|||
})
|
||||
// remove any attachments that were on the row from object storage
|
||||
await cleanupAttachments(table, { row })
|
||||
// remove any static formula
|
||||
await updateRelatedFormula(table, row)
|
||||
|
||||
let response
|
||||
if (ctx.params.tableId === InternalTables.USER_METADATA) {
|
||||
|
@ -315,7 +293,7 @@ exports.bulkDestroy = async ctx => {
|
|||
|
||||
// before carrying out any updates, make sure the rows are ready to be returned
|
||||
// they need to be the full rows (including previous relationships) for automations
|
||||
rows = await outputProcessing(ctx, table, rows, { squash: false })
|
||||
rows = await outputProcessing(table, rows, { squash: false })
|
||||
|
||||
// remove the relationships first
|
||||
let updates = rows.map(row =>
|
||||
|
@ -339,6 +317,7 @@ exports.bulkDestroy = async ctx => {
|
|||
}
|
||||
// remove any attachments that were on the rows from object storage
|
||||
await cleanupAttachments(table, { rows })
|
||||
await updateRelatedFormula(table, rows)
|
||||
await Promise.all(updates)
|
||||
return { response: { ok: true }, rows }
|
||||
}
|
||||
|
@ -369,7 +348,7 @@ exports.search = async ctx => {
|
|||
response.rows = await getGlobalUsersFromMetadata(response.rows)
|
||||
}
|
||||
const table = await db.get(tableId)
|
||||
response.rows = await outputProcessing(ctx, table, response.rows)
|
||||
response.rows = await outputProcessing(table, response.rows)
|
||||
}
|
||||
|
||||
return response
|
||||
|
@ -419,7 +398,7 @@ exports.fetchEnrichedRow = async ctx => {
|
|||
for (let [tableId, rows] of Object.entries(groups)) {
|
||||
// need to include the IDs in these rows for any links they may have
|
||||
linkedRows = linkedRows.concat(
|
||||
await outputProcessing(ctx, tables[tableId], rows)
|
||||
await outputProcessing(tables[tableId], rows)
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -37,22 +37,30 @@ class QueryBuilder {
|
|||
}
|
||||
|
||||
setLimit(limit) {
|
||||
if (limit != null) {
|
||||
this.limit = limit
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
setSort(sort) {
|
||||
if (sort != null) {
|
||||
this.sort = sort
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
setSortOrder(sortOrder) {
|
||||
if (sortOrder != null) {
|
||||
this.sortOrder = sortOrder
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
setSortType(sortType) {
|
||||
if (sortType != null) {
|
||||
this.sortType = sortType
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,156 @@
|
|||
const { getRowParams } = require("../../../db/utils")
|
||||
const {
|
||||
outputProcessing,
|
||||
processAutoColumn,
|
||||
processFormulas,
|
||||
} = require("../../../utilities/rowProcessor")
|
||||
const { FieldTypes, FormulaTypes } = require("../../../constants")
|
||||
const { isEqual } = require("lodash")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
/**
|
||||
* This function runs through a list of enriched rows, looks at the rows which
|
||||
* are related and then checks if they need the state of their formulas
|
||||
* updated.
|
||||
* NOTE: this will only for affect static formulas.
|
||||
*/
|
||||
exports.updateRelatedFormula = async (table, enrichedRows) => {
|
||||
const db = getAppDB()
|
||||
// no formula to update, we're done
|
||||
if (!table.relatedFormula) {
|
||||
return
|
||||
}
|
||||
let promises = []
|
||||
for (let enrichedRow of Array.isArray(enrichedRows)
|
||||
? enrichedRows
|
||||
: [enrichedRows]) {
|
||||
// the related rows by tableId
|
||||
let relatedRows = {}
|
||||
for (let [key, field] of Object.entries(enrichedRow)) {
|
||||
const columnDefinition = table.schema[key]
|
||||
if (columnDefinition && columnDefinition.type === FieldTypes.LINK) {
|
||||
const relatedTableId = columnDefinition.tableId
|
||||
if (!relatedRows[relatedTableId]) {
|
||||
relatedRows[relatedTableId] = []
|
||||
}
|
||||
relatedRows[relatedTableId] = relatedRows[relatedTableId].concat(field)
|
||||
}
|
||||
}
|
||||
for (let tableId of table.relatedFormula) {
|
||||
let relatedTable
|
||||
try {
|
||||
// no rows to update, skip
|
||||
if (!relatedRows[tableId] || relatedRows[tableId].length === 0) {
|
||||
continue
|
||||
}
|
||||
relatedTable = await db.get(tableId)
|
||||
} catch (err) {
|
||||
// no error scenario, table doesn't seem to exist anymore, ignore
|
||||
}
|
||||
for (let column of Object.values(relatedTable.schema)) {
|
||||
// needs updated in related rows
|
||||
if (
|
||||
column.type === FieldTypes.FORMULA &&
|
||||
column.formulaType === FormulaTypes.STATIC
|
||||
) {
|
||||
// re-enrich rows for all the related, don't update the related formula for them
|
||||
promises = promises.concat(
|
||||
relatedRows[tableId].map(related =>
|
||||
exports.finaliseRow(relatedTable, related, {
|
||||
updateFormula: false,
|
||||
})
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
|
||||
exports.updateAllFormulasInTable = async table => {
|
||||
const db = getAppDB()
|
||||
// start by getting the raw rows (which will be written back to DB after update)
|
||||
let rows = (
|
||||
await db.allDocs(
|
||||
getRowParams(table._id, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
).rows.map(row => row.doc)
|
||||
// now enrich the rows, note the clone so that we have the base state of the
|
||||
// rows so that we don't write any of the enriched information back
|
||||
let enrichedRows = await outputProcessing(table, cloneDeep(rows), {
|
||||
squash: false,
|
||||
})
|
||||
const updatedRows = []
|
||||
for (let row of rows) {
|
||||
// find the enriched row, if found process the formulas
|
||||
const enrichedRow = enrichedRows.find(enriched => enriched._id === row._id)
|
||||
if (enrichedRow) {
|
||||
const processed = processFormulas(table, cloneDeep(row), {
|
||||
dynamic: false,
|
||||
contextRows: enrichedRow,
|
||||
})
|
||||
// values have changed, need to add to bulk docs to update
|
||||
if (!isEqual(processed, row)) {
|
||||
updatedRows.push(processed)
|
||||
}
|
||||
}
|
||||
}
|
||||
await db.bulkDocs(updatedRows)
|
||||
}
|
||||
|
||||
/**
|
||||
* This function runs at the end of the save/patch functions of the row controller, all this
|
||||
* really does is enrich the row, handle any static formula processing, then return the enriched
|
||||
* row. The reason we need to return the enriched row is that the automation row created trigger
|
||||
* expects the row to be totally enriched/contain all relationships.
|
||||
*/
|
||||
exports.finaliseRow = async (
|
||||
table,
|
||||
row,
|
||||
{ oldTable, updateFormula } = { updateFormula: true }
|
||||
) => {
|
||||
const db = getAppDB()
|
||||
row.type = "row"
|
||||
// process the row before return, to include relationships
|
||||
let enrichedRow = await outputProcessing(table, cloneDeep(row), {
|
||||
squash: false,
|
||||
})
|
||||
// use enriched row to generate formulas for saving, specifically only use as context
|
||||
row = processFormulas(table, row, {
|
||||
dynamic: false,
|
||||
contextRows: enrichedRow,
|
||||
})
|
||||
|
||||
// don't worry about rev, tables handle rev/lastID updates
|
||||
// if another row has been written since processing this will
|
||||
// handle the auto ID clash
|
||||
if (oldTable && !isEqual(oldTable, table)) {
|
||||
try {
|
||||
await db.put(table)
|
||||
} catch (err) {
|
||||
if (err.status === 409) {
|
||||
const updatedTable = await db.get(table._id)
|
||||
let response = processAutoColumn(null, updatedTable, row, {
|
||||
reprocessing: true,
|
||||
})
|
||||
await db.put(response.table)
|
||||
row = response.row
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
const response = await db.put(row)
|
||||
// for response, calculate the formulas for the enriched row
|
||||
enrichedRow._rev = response.rev
|
||||
enrichedRow = await processFormulas(table, enrichedRow, { dynamic: false })
|
||||
// this updates the related formulas in other rows based on the relations to this row
|
||||
if (updateFormula) {
|
||||
await exports.updateRelatedFormula(table, enrichedRow)
|
||||
}
|
||||
return { row: enrichedRow, table }
|
||||
}
|
|
@ -0,0 +1,173 @@
|
|||
const { FieldTypes, FormulaTypes } = require("../../../constants")
|
||||
const { getAllInternalTables, clearColumns } = require("./utils")
|
||||
const { doesContainStrings } = require("@budibase/string-templates")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { isEqual, uniq } = require("lodash")
|
||||
const { updateAllFormulasInTable } = require("../row/staticFormula")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
function isStaticFormula(column) {
|
||||
return (
|
||||
column.type === FieldTypes.FORMULA &&
|
||||
column.formulaType === FormulaTypes.STATIC
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* This retrieves the formula columns from a table schema that use a specified column name
|
||||
* in the formula.
|
||||
*/
|
||||
function getFormulaThatUseColumn(table, columnNames) {
|
||||
let formula = []
|
||||
columnNames = Array.isArray(columnNames) ? columnNames : [columnNames]
|
||||
for (let column of Object.values(table.schema)) {
|
||||
// not a static formula, or doesn't contain a relationship
|
||||
if (!isStaticFormula(column)) {
|
||||
continue
|
||||
}
|
||||
if (!doesContainStrings(column.formula, columnNames)) {
|
||||
continue
|
||||
}
|
||||
formula.push(column.name)
|
||||
}
|
||||
return formula
|
||||
}
|
||||
|
||||
/**
|
||||
* This functions checks for when a related table, column or related column is deleted, if any
|
||||
* tables need to have the formula column removed.
|
||||
*/
|
||||
async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
|
||||
// start by retrieving all tables, remove the current table from the list
|
||||
const tables = (await getAllInternalTables()).filter(
|
||||
tbl => tbl._id !== table._id
|
||||
)
|
||||
const schemaToUse = oldTable ? oldTable.schema : table.schema
|
||||
let removedColumns = Object.values(schemaToUse).filter(
|
||||
column => deletion || !table.schema[column.name]
|
||||
)
|
||||
// remove any formula columns that used related columns
|
||||
for (let removed of removedColumns) {
|
||||
let tableToUse = table
|
||||
// if relationship, get the related table
|
||||
if (removed.type === FieldTypes.LINK) {
|
||||
tableToUse = tables.find(table => table._id === removed.tableId)
|
||||
}
|
||||
const columnsToDelete = getFormulaThatUseColumn(tableToUse, removed.name)
|
||||
if (columnsToDelete.length > 0) {
|
||||
await clearColumns(table, columnsToDelete)
|
||||
}
|
||||
// need a special case, where a column has been removed from this table, but was used
|
||||
// in a different, related tables formula
|
||||
if (!table.relatedFormula) {
|
||||
continue
|
||||
}
|
||||
for (let relatedTableId of table.relatedFormula) {
|
||||
const relatedColumns = Object.values(table.schema).filter(
|
||||
column => column.tableId === relatedTableId
|
||||
)
|
||||
const relatedTable = tables.find(table => table._id === relatedTableId)
|
||||
// look to see if the column was used in a relationship formula,
|
||||
// relationships won't be used for this
|
||||
if (relatedTable && relatedColumns && removed.type !== FieldTypes.LINK) {
|
||||
let relatedFormulaToRemove = []
|
||||
for (let column of relatedColumns) {
|
||||
relatedFormulaToRemove = relatedFormulaToRemove.concat(
|
||||
getFormulaThatUseColumn(relatedTable, [
|
||||
column.fieldName,
|
||||
removed.name,
|
||||
])
|
||||
)
|
||||
}
|
||||
if (relatedFormulaToRemove.length > 0) {
|
||||
await clearColumns(relatedTable, uniq(relatedFormulaToRemove))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function adds a note to related tables that they are
|
||||
* used in a static formula - so that the link controller
|
||||
* can manage hydrating related rows formula fields. This is
|
||||
* specifically only for static formula.
|
||||
*/
|
||||
async function updateRelatedFormulaLinksOnTables(
|
||||
table,
|
||||
{ deletion } = { deletion: false }
|
||||
) {
|
||||
const db = getAppDB()
|
||||
// start by retrieving all tables, remove the current table from the list
|
||||
const tables = (await getAllInternalTables()).filter(
|
||||
tbl => tbl._id !== table._id
|
||||
)
|
||||
// clone the tables, so we can compare at end
|
||||
const initialTables = cloneDeep(tables)
|
||||
// first find the related column names
|
||||
const relatedColumns = Object.values(table.schema).filter(
|
||||
col => col.type === FieldTypes.LINK
|
||||
)
|
||||
// we start by removing the formula field from all tables
|
||||
for (let otherTable of tables) {
|
||||
if (!otherTable.relatedFormula) {
|
||||
continue
|
||||
}
|
||||
const index = otherTable.relatedFormula.indexOf(table._id)
|
||||
if (index !== -1) {
|
||||
otherTable.relatedFormula.splice(index, 1)
|
||||
}
|
||||
}
|
||||
// if deleting, just remove the table IDs, don't try add
|
||||
if (!deletion) {
|
||||
for (let relatedCol of relatedColumns) {
|
||||
let columns = getFormulaThatUseColumn(table, relatedCol.name)
|
||||
if (!columns || columns.length === 0) {
|
||||
continue
|
||||
}
|
||||
const relatedTable = tables.find(
|
||||
related => related._id === relatedCol.tableId
|
||||
)
|
||||
// check if the table is already in the list of related formula, if it isn't, then add it
|
||||
if (
|
||||
relatedTable &&
|
||||
(!relatedTable.relatedFormula ||
|
||||
!relatedTable.relatedFormula.includes(table._id))
|
||||
) {
|
||||
relatedTable.relatedFormula = relatedTable.relatedFormula
|
||||
? [...relatedTable.relatedFormula, table._id]
|
||||
: [table._id]
|
||||
}
|
||||
}
|
||||
}
|
||||
// now we just need to compare all the tables and see if any need saved
|
||||
for (let initial of initialTables) {
|
||||
const found = tables.find(tbl => initial._id === tbl._id)
|
||||
if (found && !isEqual(initial, found)) {
|
||||
await db.put(found)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function checkIfFormulaUpdated(table, { oldTable }) {
|
||||
// look to see if any formula values have changed
|
||||
const shouldUpdate = Object.values(table.schema).find(
|
||||
column =>
|
||||
isStaticFormula(column) &&
|
||||
(!oldTable ||
|
||||
!oldTable.schema[column.name] ||
|
||||
!isEqual(oldTable.schema[column.name], column))
|
||||
)
|
||||
// if a static formula column has updated, then need to run the update
|
||||
if (shouldUpdate != null) {
|
||||
await updateAllFormulasInTable(table)
|
||||
}
|
||||
}
|
||||
|
||||
exports.runStaticFormulaChecks = async (table, { oldTable, deletion }) => {
|
||||
await updateRelatedFormulaLinksOnTables(table, { deletion })
|
||||
await checkIfFormulaNeedsCleared(table, { oldTable, deletion })
|
||||
if (!deletion) {
|
||||
await checkIfFormulaUpdated(table, { oldTable })
|
||||
}
|
||||
}
|
|
@ -2,13 +2,9 @@ const internal = require("./internal")
|
|||
const external = require("./external")
|
||||
const csvParser = require("../../../utilities/csvParser")
|
||||
const { isExternalTable, isSQL } = require("../../../integrations/utils")
|
||||
const {
|
||||
getTableParams,
|
||||
getDatasourceParams,
|
||||
BudibaseInternalDB,
|
||||
} = require("../../../db/utils")
|
||||
const { getTable } = require("./utils")
|
||||
const { getDatasourceParams } = require("../../../db/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const { getTable, getAllInternalTables } = require("./utils")
|
||||
|
||||
function pickApi({ tableId, table }) {
|
||||
if (table && !tableId) {
|
||||
|
@ -26,17 +22,7 @@ function pickApi({ tableId, table }) {
|
|||
exports.fetch = async function (ctx) {
|
||||
const db = getAppDB()
|
||||
|
||||
const internalTables = await db.allDocs(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
|
||||
const internal = internalTables.rows.map(tableDoc => ({
|
||||
...tableDoc.doc,
|
||||
type: "internal",
|
||||
sourceId: BudibaseInternalDB._id,
|
||||
}))
|
||||
const internal = await getAllInternalTables()
|
||||
|
||||
const externalTables = await db.allDocs(
|
||||
getDatasourceParams("plus", {
|
||||
|
|
|
@ -10,6 +10,8 @@ const {
|
|||
const usageQuota = require("../../../utilities/usageQuota")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const env = require("../../../environment")
|
||||
const { cleanupAttachments } = require("../../../utilities/rowProcessor")
|
||||
const { runStaticFormulaChecks } = require("./bulkFormula")
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const db = getAppDB()
|
||||
|
@ -102,7 +104,8 @@ exports.save = async function (ctx) {
|
|||
tableToSave._rev = result.rev
|
||||
|
||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||
|
||||
// has to run after, make sure it has _id
|
||||
await runStaticFormulaChecks(tableToSave, { oldTable })
|
||||
return tableToSave
|
||||
}
|
||||
|
||||
|
@ -139,6 +142,9 @@ exports.destroy = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
// has to run after, make sure it has _id
|
||||
await runStaticFormulaChecks(tableToDelete, { deletion: true })
|
||||
await cleanupAttachments(tableToDelete, { rows })
|
||||
return tableToDelete
|
||||
}
|
||||
|
||||
|
|
|
@ -3,10 +3,15 @@ const {
|
|||
getRowParams,
|
||||
generateRowID,
|
||||
InternalTables,
|
||||
getTableParams,
|
||||
BudibaseInternalDB,
|
||||
} = require("../../../db/utils")
|
||||
const { isEqual } = require("lodash/fp")
|
||||
const { isEqual } = require("lodash")
|
||||
const { AutoFieldSubTypes, FieldTypes } = require("../../../constants")
|
||||
const { inputProcessing } = require("../../../utilities/rowProcessor")
|
||||
const {
|
||||
inputProcessing,
|
||||
cleanupAttachments,
|
||||
} = require("../../../utilities/rowProcessor")
|
||||
const {
|
||||
USERS_TABLE_SCHEMA,
|
||||
SwitchableTypes,
|
||||
|
@ -21,6 +26,22 @@ const { getViews, saveView } = require("../view/utils")
|
|||
const viewTemplate = require("../view/viewBuilder")
|
||||
const usageQuota = require("../../../utilities/usageQuota")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
|
||||
exports.clearColumns = async (table, columnNames) => {
|
||||
const db = getAppDB()
|
||||
const rows = await db.allDocs(
|
||||
getRowParams(table._id, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
return db.bulkDocs(
|
||||
rows.rows.map(({ doc }) => {
|
||||
columnNames.forEach(colName => delete doc[colName])
|
||||
return doc
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
|
||||
const db = getAppDB()
|
||||
|
@ -40,16 +61,20 @@ exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
|
|||
include_docs: true,
|
||||
})
|
||||
)
|
||||
updatedRows = rows.rows.map(({ doc }) => {
|
||||
const rawRows = rows.rows.map(({ doc }) => doc)
|
||||
updatedRows = rawRows.map(row => {
|
||||
row = cloneDeep(row)
|
||||
if (rename) {
|
||||
doc[rename.updated] = doc[rename.old]
|
||||
delete doc[rename.old]
|
||||
row[rename.updated] = row[rename.old]
|
||||
delete row[rename.old]
|
||||
} else if (deletedColumns.length !== 0) {
|
||||
deletedColumns.forEach(colName => delete doc[colName])
|
||||
deletedColumns.forEach(colName => delete row[colName])
|
||||
}
|
||||
return doc
|
||||
return row
|
||||
})
|
||||
|
||||
// cleanup any attachments from object storage for deleted attachment columns
|
||||
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
|
||||
// Update views
|
||||
await exports.checkForViewUpdates(updatedTable, rename, deletedColumns)
|
||||
delete updatedTable._rename
|
||||
|
@ -223,6 +248,20 @@ class TableSaveFunctions {
|
|||
}
|
||||
}
|
||||
|
||||
exports.getAllInternalTables = async () => {
|
||||
const db = getAppDB()
|
||||
const internalTables = await db.allDocs(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
return internalTables.rows.map(tableDoc => ({
|
||||
...tableDoc.doc,
|
||||
type: "internal",
|
||||
sourceId: BudibaseInternalDB._id,
|
||||
}))
|
||||
}
|
||||
|
||||
exports.getAllExternalTables = async datasourceId => {
|
||||
const db = getAppDB()
|
||||
const datasource = await db.get(datasourceId)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const { outputProcessing } = require("../../../utilities/rowProcessor")
|
||||
const setup = require("./utilities")
|
||||
const { basicRow } = setup.structures
|
||||
const { doInAppContext } = require("@budibase/backend-core/context")
|
||||
|
||||
// mock the fetch for the search system
|
||||
jest.mock("node-fetch")
|
||||
|
@ -387,11 +388,13 @@ describe("/rows", () => {
|
|||
})
|
||||
// the environment needs configured for this
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
const enriched = await outputProcessing({ appId: config.getAppId() }, table, [row])
|
||||
doInAppContext(config.getAppId(), async () => {
|
||||
const enriched = await outputProcessing(table, [row])
|
||||
expect(enriched[0].attachment[0].url).toBe(
|
||||
`/prod-budi-app-assets/${config.getAppId()}/attachments/test/thing.csv`
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -61,6 +61,11 @@ exports.RelationshipTypes = {
|
|||
MANY_TO_MANY: "many-to-many",
|
||||
}
|
||||
|
||||
exports.FormulaTypes = {
|
||||
STATIC: "static",
|
||||
DYNAMIC: "dynamic",
|
||||
}
|
||||
|
||||
exports.AuthTypes = {
|
||||
APP: "app",
|
||||
BUILDER: "builder",
|
||||
|
|
|
@ -17,6 +17,8 @@ export interface FieldSchema {
|
|||
autocolumn?: boolean
|
||||
throughFrom?: string
|
||||
throughTo?: string
|
||||
formula?: string
|
||||
formulaType?: string
|
||||
main?: boolean
|
||||
meta?: {
|
||||
toTable: string
|
||||
|
@ -46,6 +48,7 @@ export interface Table extends Base {
|
|||
schema: TableSchema
|
||||
primaryDisplay?: string
|
||||
sourceId?: string
|
||||
relatedFormula?: string[]
|
||||
constrained?: string[]
|
||||
}
|
||||
|
||||
|
|
|
@ -180,6 +180,8 @@ function processAutoColumn(
|
|||
}
|
||||
exports.processAutoColumn = processAutoColumn
|
||||
|
||||
exports.processFormulas = processFormulas
|
||||
|
||||
/**
|
||||
* This will coerce a value to the correct types based on the type transform map
|
||||
* @param {object} row The value to coerce
|
||||
|
@ -229,11 +231,12 @@ exports.inputProcessing = (
|
|||
}
|
||||
continue
|
||||
}
|
||||
// specific case to delete formula values if they get saved
|
||||
// type coercion cannot completely remove the field, so have to do it here
|
||||
// remove any formula values, they are to be generated
|
||||
if (field.type === FieldTypes.FORMULA) {
|
||||
delete clonedRow[key]
|
||||
} else {
|
||||
}
|
||||
// otherwise coerce what is there to correct types
|
||||
else {
|
||||
clonedRow[key] = exports.coerce(value, field.type)
|
||||
}
|
||||
}
|
||||
|
@ -250,19 +253,13 @@ exports.inputProcessing = (
|
|||
/**
|
||||
* This function enriches the input rows with anything they are supposed to contain, for example
|
||||
* link records or attachment links.
|
||||
* @param {object} ctx the request which is looking for enriched rows.
|
||||
* @param {object} table the table from which these rows came from originally, this is used to determine
|
||||
* the schema of the rows and then enrich.
|
||||
* @param {object[]|object} rows the rows which are to be enriched.
|
||||
* @param {object} opts used to set some options for the output, such as disabling relationship squashing.
|
||||
* @returns {object[]|object} the enriched rows will be returned.
|
||||
*/
|
||||
exports.outputProcessing = async (
|
||||
ctx,
|
||||
table,
|
||||
rows,
|
||||
opts = { squash: true }
|
||||
) => {
|
||||
exports.outputProcessing = async (table, rows, opts = { squash: true }) => {
|
||||
let wasArray = true
|
||||
if (!(rows instanceof Array)) {
|
||||
rows = [rows]
|
||||
|
@ -272,7 +269,7 @@ exports.outputProcessing = async (
|
|||
let enriched = await linkRows.attachFullLinkedDocs(table, rows)
|
||||
|
||||
// process formulas
|
||||
enriched = processFormulas(table, enriched)
|
||||
enriched = processFormulas(table, enriched, { dynamic: true })
|
||||
|
||||
// update the attachments URL depending on hosting
|
||||
for (let [property, column] of Object.entries(table.schema)) {
|
||||
|
@ -299,9 +296,11 @@ exports.outputProcessing = async (
|
|||
* @param {any} row optional - the row being removed.
|
||||
* @param {any} rows optional - if multiple rows being deleted can do this in bulk.
|
||||
* @param {any} oldRow optional - if updating a row this will determine the difference.
|
||||
* @param {any} oldTable optional - if updating a table, can supply the old table to look for
|
||||
* deleted attachment columns.
|
||||
* @return {Promise<void>} When all attachments have been removed this will return.
|
||||
*/
|
||||
exports.cleanupAttachments = async (table, { row, rows, oldRow }) => {
|
||||
exports.cleanupAttachments = async (table, { row, rows, oldRow, oldTable }) => {
|
||||
const appId = getAppId()
|
||||
if (!isProdAppID(appId)) {
|
||||
const prodAppId = getDeployedAppID(appId)
|
||||
|
@ -317,12 +316,16 @@ exports.cleanupAttachments = async (table, { row, rows, oldRow }) => {
|
|||
files = files.concat(row[key].map(attachment => attachment.key))
|
||||
}
|
||||
}
|
||||
for (let [key, schema] of Object.entries(table.schema)) {
|
||||
const schemaToUse = oldTable ? oldTable.schema : table.schema
|
||||
for (let [key, schema] of Object.entries(schemaToUse)) {
|
||||
if (schema.type !== FieldTypes.ATTACHMENT) {
|
||||
continue
|
||||
}
|
||||
// old table had this column, new table doesn't - delete it
|
||||
if (oldTable && !table.schema[key]) {
|
||||
rows.forEach(row => addFiles(row, key))
|
||||
} else if (oldRow && row) {
|
||||
// if updating, need to manage the differences
|
||||
if (oldRow && row) {
|
||||
files = files.concat(getRemovedAttachmentKeys(oldRow, row, key))
|
||||
} else if (row) {
|
||||
addFiles(row, key)
|
||||
|
|
|
@ -1,23 +1,39 @@
|
|||
const { FieldTypes } = require("../../constants")
|
||||
const { FieldTypes, FormulaTypes } = require("../../constants")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
|
||||
/**
|
||||
* Looks through the rows provided and finds formulas - which it then processes.
|
||||
*/
|
||||
exports.processFormulas = (table, rows) => {
|
||||
exports.processFormulas = (
|
||||
table,
|
||||
rows,
|
||||
{ dynamic, contextRows } = { dynamic: true }
|
||||
) => {
|
||||
const single = !Array.isArray(rows)
|
||||
if (single) {
|
||||
rows = [rows]
|
||||
contextRows = contextRows ? [contextRows] : contextRows
|
||||
}
|
||||
for (let [column, schema] of Object.entries(table.schema)) {
|
||||
if (schema.type !== FieldTypes.FORMULA) {
|
||||
const isStatic = schema.formulaType === FormulaTypes.STATIC
|
||||
if (
|
||||
schema.type !== FieldTypes.FORMULA ||
|
||||
(dynamic && isStatic) ||
|
||||
(!dynamic && !isStatic)
|
||||
) {
|
||||
continue
|
||||
}
|
||||
// iterate through rows and process formula
|
||||
rows = rows.map(row => ({
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
if (schema.formula) {
|
||||
let row = rows[i]
|
||||
let context = contextRows ? contextRows[i] : row
|
||||
rows[i] = {
|
||||
...row,
|
||||
[column]: processStringSync(schema.formula, row),
|
||||
}))
|
||||
[column]: processStringSync(schema.formula, context),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return single ? rows[0] : rows
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/string-templates",
|
||||
"version": "1.0.49-alpha.0",
|
||||
"version": "1.0.49-alpha.1",
|
||||
"description": "Handlebars wrapper for Budibase templating.",
|
||||
"main": "src/index.cjs",
|
||||
"module": "dist/bundle.mjs",
|
||||
|
|
|
@ -15,6 +15,8 @@ module.exports.processStringSync = templates.processStringSync
|
|||
module.exports.processObjectSync = templates.processObjectSync
|
||||
module.exports.processString = templates.processString
|
||||
module.exports.processObject = templates.processObject
|
||||
module.exports.doesContainStrings = templates.doesContainStrings
|
||||
module.exports.doesContainString = templates.doesContainString
|
||||
|
||||
/**
|
||||
* Use vm2 to run JS scripts in a node env
|
||||
|
|
|
@ -3,6 +3,7 @@ const { registerAll } = require("./helpers/index")
|
|||
const processors = require("./processors")
|
||||
const { atob, btoa } = require("./utilities")
|
||||
const manifest = require("../manifest.json")
|
||||
const { FIND_HBS_REGEX } = require("./utilities")
|
||||
|
||||
const hbsInstance = handlebars.create()
|
||||
registerAll(hbsInstance)
|
||||
|
@ -26,7 +27,7 @@ function testObject(object) {
|
|||
* @param {object|array} object The input structure which is to be recursed, it is important to note that
|
||||
* if the structure contains any cycles then this will fail.
|
||||
* @param {object} context The context that handlebars should fill data from.
|
||||
* @param {object} opts optional - specify some options for processing.
|
||||
* @param {object|null} opts optional - specify some options for processing.
|
||||
* @returns {Promise<object|array>} The structure input, as fully updated as possible.
|
||||
*/
|
||||
module.exports.processObject = async (object, context, opts) => {
|
||||
|
@ -57,7 +58,7 @@ module.exports.processObject = async (object, context, opts) => {
|
|||
* then nothing will occur.
|
||||
* @param {string} string The template string which is the filled from the context object.
|
||||
* @param {object} context An object of information which will be used to enrich the string.
|
||||
* @param {object} opts optional - specify some options for processing.
|
||||
* @param {object|null} opts optional - specify some options for processing.
|
||||
* @returns {Promise<string>} The enriched string, all templates should have been replaced if they can be.
|
||||
*/
|
||||
module.exports.processString = async (string, context, opts) => {
|
||||
|
@ -71,7 +72,7 @@ module.exports.processString = async (string, context, opts) => {
|
|||
* @param {object|array} object The input structure which is to be recursed, it is important to note that
|
||||
* if the structure contains any cycles then this will fail.
|
||||
* @param {object} context The context that handlebars should fill data from.
|
||||
* @param {object} opts optional - specify some options for processing.
|
||||
* @param {object|null} opts optional - specify some options for processing.
|
||||
* @returns {object|array} The structure input, as fully updated as possible.
|
||||
*/
|
||||
module.exports.processObjectSync = (object, context, opts) => {
|
||||
|
@ -92,7 +93,7 @@ module.exports.processObjectSync = (object, context, opts) => {
|
|||
* then nothing will occur. This is a pure sync call and therefore does not have the full functionality of the async call.
|
||||
* @param {string} string The template string which is the filled from the context object.
|
||||
* @param {object} context An object of information which will be used to enrich the string.
|
||||
* @param {object} opts optional - specify some options for processing.
|
||||
* @param {object|null} opts optional - specify some options for processing.
|
||||
* @returns {string} The enriched string, all templates should have been replaced if they can be.
|
||||
*/
|
||||
module.exports.processStringSync = (string, context, opts) => {
|
||||
|
@ -222,3 +223,47 @@ module.exports.decodeJSBinding = handlebars => {
|
|||
}
|
||||
return atob(match[1])
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as the doesContainString function, but will check for all the strings
|
||||
* before confirming it contains.
|
||||
* @param {string} template The template string to search.
|
||||
* @param {string[]} strings The strings to look for.
|
||||
* @returns {boolean} Will return true if all strings found in HBS statement.
|
||||
*/
|
||||
module.exports.doesContainStrings = (template, strings) => {
|
||||
let regexp = new RegExp(FIND_HBS_REGEX)
|
||||
let matches = template.match(regexp)
|
||||
if (matches == null) {
|
||||
return false
|
||||
}
|
||||
for (let match of matches) {
|
||||
let hbs = match
|
||||
if (exports.isJSBinding(match)) {
|
||||
hbs = exports.decodeJSBinding(match)
|
||||
}
|
||||
let allFound = true
|
||||
for (let string of strings) {
|
||||
if (!hbs.includes(string)) {
|
||||
allFound = false
|
||||
}
|
||||
}
|
||||
if (allFound) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* This function looks in the supplied template for handlebars instances, if they contain
|
||||
* JS the JS will be decoded and then the supplied string will be looked for. For example
|
||||
* if the template "Hello, your name is {{ related }}" this function would return that true
|
||||
* for the string "related" but not for "name" as it is not within the handlebars statement.
|
||||
* @param {string} template A template string to search for handlebars instances.
|
||||
* @param {string} string The word or sentence to search for.
|
||||
* @returns {boolean} The this return true if the string is found, false if not.
|
||||
*/
|
||||
module.exports.doesContainString = (template, string) => {
|
||||
return exports.doesContainStrings(template, [string])
|
||||
}
|
||||
|
|
|
@ -15,6 +15,8 @@ export const processStringSync = templates.processStringSync
|
|||
export const processObjectSync = templates.processObjectSync
|
||||
export const processString = templates.processString
|
||||
export const processObject = templates.processObject
|
||||
export const doesContainStrings = templates.doesContainStrings
|
||||
export const doesContainString = templates.doesContainString
|
||||
|
||||
/**
|
||||
* Use polyfilled vm to run JS scripts in a browser Env
|
||||
|
|
|
@ -4,6 +4,8 @@ const {
|
|||
isValid,
|
||||
makePropSafe,
|
||||
getManifest,
|
||||
encodeJSBinding,
|
||||
doesContainString,
|
||||
} = require("../src/index.cjs")
|
||||
|
||||
describe("Test that the string processing works correctly", () => {
|
||||
|
@ -157,3 +159,20 @@ describe("check full stops that are safe", () => {
|
|||
expect(output).toEqual("1")
|
||||
})
|
||||
})
|
||||
|
||||
describe("check does contain string function", () => {
|
||||
it("should work for a simple case", () => {
|
||||
const hbs = "hello {{ name }}"
|
||||
expect(doesContainString(hbs, "name")).toEqual(true)
|
||||
})
|
||||
|
||||
it("should reject a case where its in the string, but not the handlebars", () => {
|
||||
const hbs = "hello {{ name }}"
|
||||
expect(doesContainString(hbs, "hello")).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle if its in javascript", () => {
|
||||
const js = encodeJSBinding(`return $("foo")`)
|
||||
expect(doesContainString(js, "foo")).toEqual(true)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/worker",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "1.0.49-alpha.0",
|
||||
"version": "1.0.49-alpha.1",
|
||||
"description": "Budibase background service",
|
||||
"main": "src/index.js",
|
||||
"repository": {
|
||||
|
@ -29,8 +29,8 @@
|
|||
"author": "Budibase",
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@budibase/backend-core": "^1.0.49-alpha.0",
|
||||
"@budibase/string-templates": "^1.0.49-alpha.0",
|
||||
"@budibase/backend-core": "^1.0.49-alpha.1",
|
||||
"@budibase/string-templates": "^1.0.49-alpha.1",
|
||||
"@koa/router": "^8.0.0",
|
||||
"@sentry/node": "^6.0.0",
|
||||
"@techpass/passport-openidconnect": "^0.3.0",
|
||||
|
|
Loading…
Reference in New Issue