Merge branch 'develop' of github.com:Budibase/budibase into custom-theming
This commit is contained in:
commit
d607f7b25b
|
@ -0,0 +1,15 @@
|
||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Versions
|
||||||
|
|
||||||
|
As an open source product, we will only patch the latest major version for security vulnerabilities. Previous versions of budibase will not be retroactively patched.
|
||||||
|
|
||||||
|
## Disclosing
|
||||||
|
|
||||||
|
You can get in touch with us regarding a vulnerability via email at community@budibase.com.
|
||||||
|
|
||||||
|
You can also disclose via huntr.dev. If you believe you have found a vulnerability, please disclose it on huntr and let us know.
|
||||||
|
|
||||||
|
https://huntr.dev/bounties/disclose
|
||||||
|
|
||||||
|
This will enable us to review the vulnerability and potentially reward you for your work.
|
|
@ -33,11 +33,19 @@ static_resources:
|
||||||
route:
|
route:
|
||||||
cluster: app-service
|
cluster: app-service
|
||||||
|
|
||||||
# special case for worker admin API
|
# special cases for worker admin (deprecated), global and system API
|
||||||
|
- match: { prefix: "/api/global/" }
|
||||||
|
route:
|
||||||
|
cluster: worker-service
|
||||||
|
|
||||||
- match: { prefix: "/api/admin/" }
|
- match: { prefix: "/api/admin/" }
|
||||||
route:
|
route:
|
||||||
cluster: worker-service
|
cluster: worker-service
|
||||||
|
|
||||||
|
- match: { prefix: "/api/system/" }
|
||||||
|
route:
|
||||||
|
cluster: worker-service
|
||||||
|
|
||||||
- match: { path: "/" }
|
- match: { path: "/" }
|
||||||
route:
|
route:
|
||||||
cluster: app-service
|
cluster: app-service
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "0.9.117-alpha.2",
|
"version": "0.9.120-alpha.4",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/auth",
|
"name": "@budibase/auth",
|
||||||
"version": "0.9.117-alpha.2",
|
"version": "0.9.120-alpha.4",
|
||||||
"description": "Authentication middlewares for budibase builder and apps",
|
"description": "Authentication middlewares for budibase builder and apps",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/bbui",
|
"name": "@budibase/bbui",
|
||||||
"description": "A UI solution used in the different Budibase projects.",
|
"description": "A UI solution used in the different Budibase projects.",
|
||||||
"version": "0.9.117-alpha.2",
|
"version": "0.9.120-alpha.4",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"svelte": "src/index.js",
|
"svelte": "src/index.js",
|
||||||
"module": "dist/bbui.es.js",
|
"module": "dist/bbui.es.js",
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
import "@spectrum-css/table/dist/index-vars.css"
|
import "@spectrum-css/table/dist/index-vars.css"
|
||||||
import CellRenderer from "./CellRenderer.svelte"
|
import CellRenderer from "./CellRenderer.svelte"
|
||||||
import SelectEditRenderer from "./SelectEditRenderer.svelte"
|
import SelectEditRenderer from "./SelectEditRenderer.svelte"
|
||||||
|
import { cloneDeep } from "lodash"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The expected schema is our normal couch schemas for our tables.
|
* The expected schema is our normal couch schemas for our tables.
|
||||||
|
@ -197,7 +198,7 @@
|
||||||
|
|
||||||
const editRow = (e, row) => {
|
const editRow = (e, row) => {
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
dispatch("editrow", row)
|
dispatch("editrow", cloneDeep(row))
|
||||||
}
|
}
|
||||||
|
|
||||||
const toggleSelectRow = row => {
|
const toggleSelectRow = row => {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/builder",
|
"name": "@budibase/builder",
|
||||||
"version": "0.9.117-alpha.2",
|
"version": "0.9.120-alpha.4",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -65,10 +65,10 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^0.9.117-alpha.2",
|
"@budibase/bbui": "^0.9.120-alpha.4",
|
||||||
"@budibase/client": "^0.9.117-alpha.2",
|
"@budibase/client": "^0.9.120-alpha.4",
|
||||||
"@budibase/colorpicker": "1.1.2",
|
"@budibase/colorpicker": "1.1.2",
|
||||||
"@budibase/string-templates": "^0.9.117-alpha.2",
|
"@budibase/string-templates": "^0.9.120-alpha.4",
|
||||||
"@sentry/browser": "5.19.1",
|
"@sentry/browser": "5.19.1",
|
||||||
"@spectrum-css/page": "^3.0.1",
|
"@spectrum-css/page": "^3.0.1",
|
||||||
"@spectrum-css/vars": "^3.0.1",
|
"@spectrum-css/vars": "^3.0.1",
|
||||||
|
|
|
@ -18,12 +18,10 @@
|
||||||
let exportFormat = FORMATS[0].key
|
let exportFormat = FORMATS[0].key
|
||||||
|
|
||||||
async function exportView() {
|
async function exportView() {
|
||||||
const filename = `export.${exportFormat}`
|
|
||||||
download(
|
download(
|
||||||
`/api/views/export?view=${encodeURIComponent(
|
`/api/views/export?view=${encodeURIComponent(
|
||||||
view
|
view
|
||||||
)}&format=${exportFormat}`,
|
)}&format=${exportFormat}`
|
||||||
filename
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/cli",
|
"name": "@budibase/cli",
|
||||||
"version": "0.9.117-alpha.2",
|
"version": "0.9.120-alpha.4",
|
||||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"bin": {
|
"bin": {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/client",
|
"name": "@budibase/client",
|
||||||
"version": "0.9.117-alpha.2",
|
"version": "0.9.120-alpha.4",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"module": "dist/budibase-client.js",
|
"module": "dist/budibase-client.js",
|
||||||
"main": "dist/budibase-client.js",
|
"main": "dist/budibase-client.js",
|
||||||
|
@ -19,14 +19,14 @@
|
||||||
"dev:builder": "rollup -cw"
|
"dev:builder": "rollup -cw"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^0.9.117-alpha.2",
|
"@budibase/bbui": "^0.9.120-alpha.4",
|
||||||
"@budibase/string-templates": "^0.9.117-alpha.2",
|
"@budibase/string-templates": "^0.9.120-alpha.4",
|
||||||
"regexparam": "^1.3.0",
|
"regexparam": "^1.3.0",
|
||||||
"shortid": "^2.2.15",
|
"shortid": "^2.2.15",
|
||||||
"svelte-spa-router": "^3.0.5"
|
"svelte-spa-router": "^3.0.5"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@budibase/bbui": "^0.9.117-alpha.2",
|
"@budibase/bbui": "^0.9.120-alpha.4",
|
||||||
"@rollup/plugin-alias": "^3.1.5",
|
"@rollup/plugin-alias": "^3.1.5",
|
||||||
"@rollup/plugin-commonjs": "^18.0.0",
|
"@rollup/plugin-commonjs": "^18.0.0",
|
||||||
"@rollup/plugin-node-resolve": "^11.2.1",
|
"@rollup/plugin-node-resolve": "^11.2.1",
|
||||||
|
|
|
@ -112,16 +112,24 @@ export const enrichRows = async (rows, tableId) => {
|
||||||
if (!Array.isArray(rows)) {
|
if (!Array.isArray(rows)) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
if (rows.length && tableId) {
|
if (rows.length) {
|
||||||
// Fetch table schema so we can check column types
|
// map of tables, incase a row being loaded is not from the same table
|
||||||
const tableDefinition = await fetchTableDefinition(tableId)
|
const tables = {}
|
||||||
const schema = tableDefinition && tableDefinition.schema
|
for (let row of rows) {
|
||||||
if (schema) {
|
// fallback to passed in tableId if row doesn't have it specified
|
||||||
const keys = Object.keys(schema)
|
let rowTableId = row.tableId || tableId
|
||||||
rows.forEach(row => {
|
let table = tables[rowTableId]
|
||||||
|
if (!table) {
|
||||||
|
// Fetch table schema so we can check column types
|
||||||
|
table = await fetchTableDefinition(rowTableId)
|
||||||
|
tables[rowTableId] = table
|
||||||
|
}
|
||||||
|
const schema = table?.schema
|
||||||
|
if (schema) {
|
||||||
|
const keys = Object.keys(schema)
|
||||||
for (let key of keys) {
|
for (let key of keys) {
|
||||||
const type = schema[key].type
|
const type = schema[key].type
|
||||||
if (type === "link") {
|
if (type === "link" && Array.isArray(row[key])) {
|
||||||
// Enrich row a string join of relationship fields
|
// Enrich row a string join of relationship fields
|
||||||
row[`${key}_text`] =
|
row[`${key}_text`] =
|
||||||
row[key]
|
row[key]
|
||||||
|
@ -137,7 +145,7 @@ export const enrichRows = async (rows, tableId) => {
|
||||||
row[`${key}_first`] = url
|
row[`${key}_first`] = url
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return rows
|
return rows
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/server",
|
"name": "@budibase/server",
|
||||||
"email": "hi@budibase.com",
|
"email": "hi@budibase.com",
|
||||||
"version": "0.9.117-alpha.2",
|
"version": "0.9.120-alpha.4",
|
||||||
"description": "Budibase Web Server",
|
"description": "Budibase Web Server",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -62,9 +62,9 @@
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "AGPL-3.0-or-later",
|
"license": "AGPL-3.0-or-later",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/auth": "^0.9.117-alpha.2",
|
"@budibase/auth": "^0.9.120-alpha.4",
|
||||||
"@budibase/client": "^0.9.117-alpha.2",
|
"@budibase/client": "^0.9.120-alpha.4",
|
||||||
"@budibase/string-templates": "^0.9.117-alpha.2",
|
"@budibase/string-templates": "^0.9.120-alpha.4",
|
||||||
"@elastic/elasticsearch": "7.10.0",
|
"@elastic/elasticsearch": "7.10.0",
|
||||||
"@koa/router": "8.0.0",
|
"@koa/router": "8.0.0",
|
||||||
"@sendgrid/mail": "7.1.1",
|
"@sendgrid/mail": "7.1.1",
|
||||||
|
@ -117,7 +117,7 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@babel/core": "^7.14.3",
|
"@babel/core": "^7.14.3",
|
||||||
"@babel/preset-env": "^7.14.4",
|
"@babel/preset-env": "^7.14.4",
|
||||||
"@budibase/client": "^0.9.117-alpha.2",
|
"@budibase/client": "^0.9.120-alpha.4",
|
||||||
"@jest/test-sequencer": "^24.8.0",
|
"@jest/test-sequencer": "^24.8.0",
|
||||||
"@types/bull": "^3.15.1",
|
"@types/bull": "^3.15.1",
|
||||||
"@types/jest": "^26.0.23",
|
"@types/jest": "^26.0.23",
|
||||||
|
@ -138,7 +138,7 @@
|
||||||
"supertest": "^4.0.2",
|
"supertest": "^4.0.2",
|
||||||
"ts-jest": "^27.0.3",
|
"ts-jest": "^27.0.3",
|
||||||
"ts-node": "^10.0.0",
|
"ts-node": "^10.0.0",
|
||||||
"typescript": "^4.3.4",
|
"typescript": "^4.3.5",
|
||||||
"update-dotenv": "^1.1.1"
|
"update-dotenv": "^1.1.1"
|
||||||
},
|
},
|
||||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
||||||
|
|
|
@ -106,6 +106,12 @@ async function createInstance(template) {
|
||||||
views: {},
|
views: {},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// NOTE: indexes need to be created before any tables/templates
|
||||||
|
// add view for linked rows
|
||||||
|
await createLinkView(appId)
|
||||||
|
await createRoutingView(appId)
|
||||||
|
await createAllSearchIndex(appId)
|
||||||
|
|
||||||
// replicate the template data to the instance DB
|
// replicate the template data to the instance DB
|
||||||
// this is currently very hard to test, downloading and importing template files
|
// this is currently very hard to test, downloading and importing template files
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
|
@ -119,11 +125,6 @@ async function createInstance(template) {
|
||||||
await db.put(USERS_TABLE_SCHEMA)
|
await db.put(USERS_TABLE_SCHEMA)
|
||||||
}
|
}
|
||||||
|
|
||||||
// add view for linked rows
|
|
||||||
await createLinkView(appId)
|
|
||||||
await createRoutingView(appId)
|
|
||||||
await createAllSearchIndex(appId)
|
|
||||||
|
|
||||||
return { _id: appId }
|
return { _id: appId }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,34 +1,29 @@
|
||||||
import {
|
import {
|
||||||
|
IncludeRelationships,
|
||||||
Operation,
|
Operation,
|
||||||
SearchFilters,
|
|
||||||
SortJson,
|
|
||||||
PaginationJson,
|
PaginationJson,
|
||||||
RelationshipsJson,
|
RelationshipsJson,
|
||||||
|
SearchFilters,
|
||||||
|
SortJson,
|
||||||
} from "../../../definitions/datasource"
|
} from "../../../definitions/datasource"
|
||||||
import {
|
import {Datasource, FieldSchema, Row, Table} from "../../../definitions/common"
|
||||||
Row,
|
import {breakRowIdField, generateRowIdField} from "../../../integrations/utils"
|
||||||
Table,
|
import { RelationshipTypes } from "../../../constants"
|
||||||
FieldSchema,
|
|
||||||
Datasource,
|
|
||||||
} from "../../../definitions/common"
|
|
||||||
import {
|
|
||||||
breakRowIdField,
|
|
||||||
generateRowIdField,
|
|
||||||
} from "../../../integrations/utils"
|
|
||||||
|
|
||||||
interface ManyRelationship {
|
interface ManyRelationship {
|
||||||
tableId?: string
|
tableId?: string
|
||||||
id?: string
|
id?: string
|
||||||
isUpdate?: boolean
|
isUpdate?: boolean
|
||||||
|
key: string
|
||||||
[key: string]: any
|
[key: string]: any
|
||||||
}
|
}
|
||||||
|
|
||||||
interface RunConfig {
|
interface RunConfig {
|
||||||
id: string
|
id?: string
|
||||||
filters: SearchFilters
|
filters?: SearchFilters
|
||||||
sort: SortJson
|
sort?: SortJson
|
||||||
paginate: PaginationJson
|
paginate?: PaginationJson
|
||||||
row: Row
|
row?: Row
|
||||||
}
|
}
|
||||||
|
|
||||||
module External {
|
module External {
|
||||||
|
@ -37,7 +32,6 @@ module External {
|
||||||
const { breakExternalTableId, isSQL } = require("../../../integrations/utils")
|
const { breakExternalTableId, isSQL } = require("../../../integrations/utils")
|
||||||
const { processObjectSync } = require("@budibase/string-templates")
|
const { processObjectSync } = require("@budibase/string-templates")
|
||||||
const { cloneDeep } = require("lodash/fp")
|
const { cloneDeep } = require("lodash/fp")
|
||||||
const { isEqual } = require("lodash")
|
|
||||||
const CouchDB = require("../../../db")
|
const CouchDB = require("../../../db")
|
||||||
|
|
||||||
function buildFilters(
|
function buildFilters(
|
||||||
|
@ -81,7 +75,7 @@ module External {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateIdForRow(row: Row, table: Table): string {
|
function generateIdForRow(row: Row | undefined, table: Table): string {
|
||||||
const primary = table.primary
|
const primary = table.primary
|
||||||
if (!row || !primary) {
|
if (!row || !primary) {
|
||||||
return ""
|
return ""
|
||||||
|
@ -89,7 +83,8 @@ module External {
|
||||||
// build id array
|
// build id array
|
||||||
let idParts = []
|
let idParts = []
|
||||||
for (let field of primary) {
|
for (let field of primary) {
|
||||||
const fieldValue = row[`${table.name}.${field}`]
|
// need to handle table name + field or just field, depending on if relationships used
|
||||||
|
const fieldValue = row[`${table.name}.${field}`] || row[field]
|
||||||
if (fieldValue) {
|
if (fieldValue) {
|
||||||
idParts.push(fieldValue)
|
idParts.push(fieldValue)
|
||||||
}
|
}
|
||||||
|
@ -116,7 +111,7 @@ module External {
|
||||||
const thisRow: { [key: string]: any } = {}
|
const thisRow: { [key: string]: any } = {}
|
||||||
// filter the row down to what is actually the row (not joined)
|
// filter the row down to what is actually the row (not joined)
|
||||||
for (let fieldName of Object.keys(table.schema)) {
|
for (let fieldName of Object.keys(table.schema)) {
|
||||||
const value = row[`${table.name}.${fieldName}`]
|
const value = row[`${table.name}.${fieldName}`] || row[fieldName]
|
||||||
// all responses include "select col as table.col" so that overlaps are handled
|
// all responses include "select col as table.col" so that overlaps are handled
|
||||||
if (value) {
|
if (value) {
|
||||||
thisRow[fieldName] = value
|
thisRow[fieldName] = value
|
||||||
|
@ -156,7 +151,15 @@ module External {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inputProcessing(row: Row, table: Table) {
|
getTable(tableId: string | undefined): Table {
|
||||||
|
if (!tableId) {
|
||||||
|
throw "Table ID is unknown, cannot find table"
|
||||||
|
}
|
||||||
|
const { tableName } = breakExternalTableId(tableId)
|
||||||
|
return this.tables[tableName]
|
||||||
|
}
|
||||||
|
|
||||||
|
inputProcessing(row: Row | undefined, table: Table) {
|
||||||
if (!row) {
|
if (!row) {
|
||||||
return { row, manyRelationships: [] }
|
return { row, manyRelationships: [] }
|
||||||
}
|
}
|
||||||
|
@ -202,6 +205,7 @@ module External {
|
||||||
manyRelationships.push({
|
manyRelationships.push({
|
||||||
tableId: field.through || field.tableId,
|
tableId: field.through || field.tableId,
|
||||||
isUpdate,
|
isUpdate,
|
||||||
|
key: otherKey,
|
||||||
[thisKey]: breakRowIdField(relationship)[0],
|
[thisKey]: breakRowIdField(relationship)[0],
|
||||||
// leave the ID for enrichment later
|
// leave the ID for enrichment later
|
||||||
[otherKey]: `{{ literal ${tablePrimary} }}`,
|
[otherKey]: `{{ literal ${tablePrimary} }}`,
|
||||||
|
@ -343,41 +347,34 @@ module External {
|
||||||
* This is a cached lookup, of relationship records, this is mainly for creating/deleting junction
|
* This is a cached lookup, of relationship records, this is mainly for creating/deleting junction
|
||||||
* information.
|
* information.
|
||||||
*/
|
*/
|
||||||
async lookup(
|
async lookupRelations(tableId: string, row: Row) {
|
||||||
row: Row,
|
const related: {[key: string]: any} = {}
|
||||||
relationship: ManyRelationship,
|
|
||||||
cache: { [key: string]: Row[] } = {}
|
|
||||||
) {
|
|
||||||
const { tableId, isUpdate, id, ...rest } = relationship
|
|
||||||
const { tableName } = breakExternalTableId(tableId)
|
const { tableName } = breakExternalTableId(tableId)
|
||||||
const table = this.tables[tableName]
|
const table = this.tables[tableName]
|
||||||
if (isUpdate) {
|
// @ts-ignore
|
||||||
return { rows: [], table }
|
const primaryKey = table.primary[0]
|
||||||
}
|
// make a new request to get the row with all its relationships
|
||||||
// if not updating need to make sure we have a list of all possible options
|
// we need this to work out if any relationships need removed
|
||||||
let fullKey: string = tableId + "/",
|
for (let field of Object.values(table.schema)) {
|
||||||
rowKey: string = ""
|
if (field.type !== FieldTypes.LINK || !field.fieldName) {
|
||||||
for (let key of Object.keys(rest)) {
|
continue
|
||||||
if (row[key]) {
|
|
||||||
fullKey += key
|
|
||||||
rowKey = key
|
|
||||||
}
|
}
|
||||||
}
|
const isMany = field.relationshipType === RelationshipTypes.MANY_TO_MANY
|
||||||
if (cache[fullKey] == null) {
|
const tableId = isMany ? field.through : field.tableId
|
||||||
|
const fieldName = isMany ? primaryKey : field.fieldName
|
||||||
const response = await makeExternalQuery(this.appId, {
|
const response = await makeExternalQuery(this.appId, {
|
||||||
endpoint: getEndpoint(tableId, DataSourceOperation.READ),
|
endpoint: getEndpoint(tableId, DataSourceOperation.READ),
|
||||||
filters: {
|
filters: {
|
||||||
equal: {
|
equal: {
|
||||||
[rowKey]: row[rowKey],
|
[fieldName]: row[primaryKey],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
// this is the response from knex if no rows found
|
// this is the response from knex if no rows found
|
||||||
if (!response[0].read) {
|
const rows = !response[0].read ? response : []
|
||||||
cache[fullKey] = response
|
related[fieldName] = { rows, isMany, tableId }
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return { rows: cache[fullKey] || [], table }
|
return related
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -390,19 +387,21 @@ module External {
|
||||||
* isn't supposed to exist anymore and delete those. This is better than the usual method of delete them
|
* isn't supposed to exist anymore and delete those. This is better than the usual method of delete them
|
||||||
* all and then re-create, as theres no chance of losing data (e.g. delete succeed, but write fail).
|
* all and then re-create, as theres no chance of losing data (e.g. delete succeed, but write fail).
|
||||||
*/
|
*/
|
||||||
async handleManyRelationships(row: Row, relationships: ManyRelationship[]) {
|
async handleManyRelationships(mainTableId: string, row: Row, relationships: ManyRelationship[]) {
|
||||||
const { appId } = this
|
const { appId } = this
|
||||||
if (relationships.length === 0) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// if we're creating (in a through table) need to wipe the existing ones first
|
// if we're creating (in a through table) need to wipe the existing ones first
|
||||||
const promises = []
|
const promises = []
|
||||||
const cache: { [key: string]: Row[] } = {}
|
const related = await this.lookupRelations(mainTableId, row)
|
||||||
for (let relationship of relationships) {
|
for (let relationship of relationships) {
|
||||||
const { tableId, isUpdate, id, ...rest } = relationship
|
const { key, tableId, isUpdate, id, ...rest } = relationship
|
||||||
const body = processObjectSync(rest, row)
|
const body = processObjectSync(rest, row)
|
||||||
const { table, rows } = await this.lookup(row, relationship, cache)
|
const linkTable = this.getTable(tableId)
|
||||||
const found = rows.find(row => isEqual(body, row))
|
// @ts-ignore
|
||||||
|
const linkPrimary = linkTable.primary[0]
|
||||||
|
const rows = related[key].rows || []
|
||||||
|
const found = rows.find((row: { [key: string]: any }) =>
|
||||||
|
row[linkPrimary] === relationship.id || row[linkPrimary] === body[linkPrimary]
|
||||||
|
)
|
||||||
const operation = isUpdate
|
const operation = isUpdate
|
||||||
? DataSourceOperation.UPDATE
|
? DataSourceOperation.UPDATE
|
||||||
: DataSourceOperation.CREATE
|
: DataSourceOperation.CREATE
|
||||||
|
@ -412,27 +411,27 @@ module External {
|
||||||
endpoint: getEndpoint(tableId, operation),
|
endpoint: getEndpoint(tableId, operation),
|
||||||
// if we're doing many relationships then we're writing, only one response
|
// if we're doing many relationships then we're writing, only one response
|
||||||
body,
|
body,
|
||||||
filters: buildFilters(id, {}, table),
|
filters: buildFilters(id, {}, linkTable),
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
// remove the relationship from the rows
|
// remove the relationship from cache so it isn't adjusted again
|
||||||
rows.splice(rows.indexOf(found), 1)
|
rows.splice(rows.indexOf(found), 1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// finally if creating, cleanup any rows that aren't supposed to be here
|
// finally cleanup anything that needs to be removed
|
||||||
for (let [key, rows] of Object.entries(cache)) {
|
for (let [colName, {isMany, rows, tableId}] of Object.entries(related)) {
|
||||||
// @ts-ignore
|
const table = this.getTable(tableId)
|
||||||
const tableId: string = key.split("/").shift()
|
|
||||||
const { tableName } = breakExternalTableId(tableId)
|
|
||||||
const table = this.tables[tableName]
|
|
||||||
for (let row of rows) {
|
for (let row of rows) {
|
||||||
const filters = buildFilters(generateIdForRow(row, table), {}, table)
|
const filters = buildFilters(generateIdForRow(row, table), {}, table)
|
||||||
// safety check, if there are no filters on deletion bad things happen
|
// safety check, if there are no filters on deletion bad things happen
|
||||||
if (Object.keys(filters).length !== 0) {
|
if (Object.keys(filters).length !== 0) {
|
||||||
|
const op = isMany ? DataSourceOperation.DELETE : DataSourceOperation.UPDATE
|
||||||
|
const body = isMany ? null : { [colName]: null }
|
||||||
promises.push(
|
promises.push(
|
||||||
makeExternalQuery(this.appId, {
|
makeExternalQuery(this.appId, {
|
||||||
endpoint: getEndpoint(tableId, DataSourceOperation.DELETE),
|
endpoint: getEndpoint(tableId, op),
|
||||||
|
body,
|
||||||
filters,
|
filters,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
@ -449,7 +448,7 @@ module External {
|
||||||
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
|
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
|
||||||
* is more performant and has the added benefit of protecting against this scenario.
|
* is more performant and has the added benefit of protecting against this scenario.
|
||||||
*/
|
*/
|
||||||
buildFields(table: Table) {
|
buildFields(table: Table, includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE) {
|
||||||
function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
|
function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
|
||||||
return Object.entries(table.schema)
|
return Object.entries(table.schema)
|
||||||
.filter(
|
.filter(
|
||||||
|
@ -461,7 +460,7 @@ module External {
|
||||||
}
|
}
|
||||||
let fields = extractNonLinkFieldNames(table)
|
let fields = extractNonLinkFieldNames(table)
|
||||||
for (let field of Object.values(table.schema)) {
|
for (let field of Object.values(table.schema)) {
|
||||||
if (field.type !== FieldTypes.LINK) {
|
if (field.type !== FieldTypes.LINK || !includeRelations) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||||
|
@ -491,7 +490,7 @@ module External {
|
||||||
throw `Unable to process query, table "${tableName}" not defined.`
|
throw `Unable to process query, table "${tableName}" not defined.`
|
||||||
}
|
}
|
||||||
// clean up row on ingress using schema
|
// clean up row on ingress using schema
|
||||||
filters = buildFilters(id, filters, table)
|
filters = buildFilters(id, filters || {}, table)
|
||||||
const relationships = this.buildRelationships(table)
|
const relationships = this.buildRelationships(table)
|
||||||
const processed = this.inputProcessing(row, table)
|
const processed = this.inputProcessing(row, table)
|
||||||
row = processed.row
|
row = processed.row
|
||||||
|
@ -524,8 +523,9 @@ module External {
|
||||||
// can't really use response right now
|
// can't really use response right now
|
||||||
const response = await makeExternalQuery(appId, json)
|
const response = await makeExternalQuery(appId, json)
|
||||||
// handle many to many relationships now if we know the ID (could be auto increment)
|
// handle many to many relationships now if we know the ID (could be auto increment)
|
||||||
if (processed.manyRelationships) {
|
if (operation !== DataSourceOperation.READ && processed.manyRelationships) {
|
||||||
await this.handleManyRelationships(
|
await this.handleManyRelationships(
|
||||||
|
table._id || "",
|
||||||
response[0],
|
response[0],
|
||||||
processed.manyRelationships
|
processed.manyRelationships
|
||||||
)
|
)
|
||||||
|
|
|
@ -15,6 +15,7 @@ const { FieldTypes } = require("../../../constants")
|
||||||
const { isEqual } = require("lodash")
|
const { isEqual } = require("lodash")
|
||||||
const { validate, findRow } = require("./utils")
|
const { validate, findRow } = require("./utils")
|
||||||
const { fullSearch, paginatedSearch } = require("./internalSearch")
|
const { fullSearch, paginatedSearch } = require("./internalSearch")
|
||||||
|
const { getGlobalUsersFromMetadata } = require("../../../utilities/global")
|
||||||
|
|
||||||
const CALCULATION_TYPES = {
|
const CALCULATION_TYPES = {
|
||||||
SUM: "sum",
|
SUM: "sum",
|
||||||
|
@ -132,7 +133,7 @@ exports.fetchView = async ctx => {
|
||||||
const viewName = ctx.params.viewName
|
const viewName = ctx.params.viewName
|
||||||
|
|
||||||
// if this is a table view being looked for just transfer to that
|
// if this is a table view being looked for just transfer to that
|
||||||
if (viewName.includes(DocumentTypes.TABLE)) {
|
if (viewName.startsWith(DocumentTypes.TABLE)) {
|
||||||
ctx.params.tableId = viewName
|
ctx.params.tableId = viewName
|
||||||
return exports.fetch(ctx)
|
return exports.fetch(ctx)
|
||||||
}
|
}
|
||||||
|
@ -290,6 +291,10 @@ exports.search = async ctx => {
|
||||||
|
|
||||||
// Enrich search results with relationships
|
// Enrich search results with relationships
|
||||||
if (response.rows && response.rows.length) {
|
if (response.rows && response.rows.length) {
|
||||||
|
// enrich with global users if from users table
|
||||||
|
if (tableId === InternalTables.USER_METADATA) {
|
||||||
|
response.rows = await getGlobalUsersFromMetadata(appId, response.rows)
|
||||||
|
}
|
||||||
const table = await db.get(tableId)
|
const table = await db.get(tableId)
|
||||||
response.rows = await outputProcessing(ctx, table, response.rows)
|
response.rows = await outputProcessing(ctx, table, response.rows)
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ function removeGlobalProps(user) {
|
||||||
|
|
||||||
exports.fetchMetadata = async function (ctx) {
|
exports.fetchMetadata = async function (ctx) {
|
||||||
const database = new CouchDB(ctx.appId)
|
const database = new CouchDB(ctx.appId)
|
||||||
const global = await getGlobalUsers(ctx, ctx.appId)
|
const global = await getGlobalUsers(ctx.appId)
|
||||||
const metadata = (
|
const metadata = (
|
||||||
await database.allDocs(
|
await database.allDocs(
|
||||||
getUserMetadataParams(null, {
|
getUserMetadataParams(null, {
|
||||||
|
|
|
@ -13,8 +13,8 @@ const CouchDB = require("../../db")
|
||||||
const { FieldTypes } = require("../../constants")
|
const { FieldTypes } = require("../../constants")
|
||||||
const { getMultiIDParams, USER_METDATA_PREFIX } = require("../../db/utils")
|
const { getMultiIDParams, USER_METDATA_PREFIX } = require("../../db/utils")
|
||||||
const { partition } = require("lodash")
|
const { partition } = require("lodash")
|
||||||
const { getGlobalUsers } = require("../../utilities/global")
|
const { getGlobalUsersFromMetadata } = require("../../utilities/global")
|
||||||
const processor = require("../../utilities/rowProcessor")
|
const { processFormulas } = require("../../utilities/rowProcessor/utils")
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This functionality makes sure that when rows with links are created, updated or deleted they are processed
|
* This functionality makes sure that when rows with links are created, updated or deleted they are processed
|
||||||
|
@ -71,17 +71,7 @@ async function getFullLinkedDocs(ctx, appId, links) {
|
||||||
let [users, other] = partition(linked, linkRow =>
|
let [users, other] = partition(linked, linkRow =>
|
||||||
linkRow._id.startsWith(USER_METDATA_PREFIX)
|
linkRow._id.startsWith(USER_METDATA_PREFIX)
|
||||||
)
|
)
|
||||||
const globalUsers = await getGlobalUsers(ctx, appId, users)
|
users = await getGlobalUsersFromMetadata(appId, users)
|
||||||
users = users.map(user => {
|
|
||||||
const globalUser = globalUsers.find(
|
|
||||||
globalUser => globalUser && user._id.includes(globalUser._id)
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
...globalUser,
|
|
||||||
// doing user second overwrites the id and rev (always metadata)
|
|
||||||
...user,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return [...other, ...users]
|
return [...other, ...users]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,9 +187,7 @@ exports.attachFullLinkedDocs = async (ctx, table, rows) => {
|
||||||
if (!linkedRow || !linkedTable) {
|
if (!linkedRow || !linkedTable) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
row[link.fieldName].push(
|
row[link.fieldName].push(processFormulas(linkedTable, linkedRow))
|
||||||
processor.processFormulas(linkedTable, linkedRow)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return rows
|
return rows
|
||||||
|
@ -215,19 +203,17 @@ exports.attachFullLinkedDocs = async (ctx, table, rows) => {
|
||||||
exports.squashLinksToPrimaryDisplay = async (appId, table, enriched) => {
|
exports.squashLinksToPrimaryDisplay = async (appId, table, enriched) => {
|
||||||
const db = new CouchDB(appId)
|
const db = new CouchDB(appId)
|
||||||
// will populate this as we find them
|
// will populate this as we find them
|
||||||
const linkedTables = []
|
const linkedTables = [table]
|
||||||
for (let [column, schema] of Object.entries(table.schema)) {
|
for (let row of enriched) {
|
||||||
if (schema.type !== FieldTypes.LINK) {
|
// this only fetches the table if its not already in array
|
||||||
continue
|
const rowTable = await getLinkedTable(db, row.tableId, linkedTables)
|
||||||
}
|
for (let [column, schema] of Object.entries(rowTable.schema)) {
|
||||||
for (let row of enriched) {
|
if (schema.type !== FieldTypes.LINK || !Array.isArray(row[column])) {
|
||||||
if (!row[column] || !row[column].length) {
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
const newLinks = []
|
const newLinks = []
|
||||||
for (let link of row[column]) {
|
for (let link of row[column]) {
|
||||||
const linkTblId = link.tableId || getRelatedTableForField(table, column)
|
const linkTblId = link.tableId || getRelatedTableForField(table, column)
|
||||||
// this only fetches the table if its not already in array
|
|
||||||
const linkedTable = await getLinkedTable(db, linkTblId, linkedTables)
|
const linkedTable = await getLinkedTable(db, linkTblId, linkedTables)
|
||||||
const obj = { _id: link._id }
|
const obj = { _id: link._id }
|
||||||
if (link[linkedTable.primaryDisplay]) {
|
if (link[linkedTable.primaryDisplay]) {
|
||||||
|
|
|
@ -100,11 +100,11 @@ exports.createAllSearchIndex = async appId => {
|
||||||
let idxKey = prev != null ? `${prev}.${key}` : key
|
let idxKey = prev != null ? `${prev}.${key}` : key
|
||||||
idxKey = idxKey.replace(/ /, "_")
|
idxKey = idxKey.replace(/ /, "_")
|
||||||
if (Array.isArray(input[key])) {
|
if (Array.isArray(input[key])) {
|
||||||
for (let val in input[key]) {
|
for (let val of input[key]) {
|
||||||
// eslint-disable-next-line no-undef
|
if (typeof val !== "object") {
|
||||||
index(idxKey, input[key][val], {
|
// eslint-disable-next-line no-undef
|
||||||
store: true,
|
index(idxKey, val, { store: true })
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
} else if (key === "_id" || key === "_rev" || input[key] == null) {
|
} else if (key === "_id" || key === "_rev" || input[key] == null) {
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -40,6 +40,11 @@ export enum SourceNames {
|
||||||
REST = "REST",
|
REST = "REST",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum IncludeRelationships {
|
||||||
|
INCLUDE = 1,
|
||||||
|
EXCLUDE = 0
|
||||||
|
}
|
||||||
|
|
||||||
export interface QueryDefinition {
|
export interface QueryDefinition {
|
||||||
type: QueryTypes
|
type: QueryTypes
|
||||||
displayName?: string
|
displayName?: string
|
||||||
|
|
|
@ -76,6 +76,7 @@ module ArangoModule {
|
||||||
const result = await this.client.query(query.sql)
|
const result = await this.client.query(query.sql)
|
||||||
return result.all()
|
return result.all()
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
// @ts-ignore
|
||||||
console.error("Error querying arangodb", err.message)
|
console.error("Error querying arangodb", err.message)
|
||||||
throw err
|
throw err
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -91,6 +92,7 @@ module ArangoModule {
|
||||||
)
|
)
|
||||||
return result.all()
|
return result.all()
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
// @ts-ignore
|
||||||
console.error("Error querying arangodb", err.message)
|
console.error("Error querying arangodb", err.message)
|
||||||
throw err
|
throw err
|
||||||
} finally {
|
} finally {
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
import { Knex, knex } from "knex"
|
import { Knex, knex } from "knex"
|
||||||
const BASE_LIMIT = 5000
|
const BASE_LIMIT = 5000
|
||||||
|
// if requesting a single row then need to up the limit for the sake of joins
|
||||||
|
const SINGLE_ROW_LIMIT = 100
|
||||||
import {
|
import {
|
||||||
QueryJson,
|
QueryJson,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
|
@ -168,17 +170,21 @@ function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||||
query = query.orderBy(key, direction)
|
query = query.orderBy(key, direction)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
let foundLimit = limit || BASE_LIMIT
|
||||||
// handle pagination
|
// handle pagination
|
||||||
if (paginate && paginate.page && paginate.limit) {
|
if (paginate && paginate.page && paginate.limit) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
||||||
const offset = page * paginate.limit
|
const offset = page * paginate.limit
|
||||||
query = query.offset(offset).limit(paginate.limit)
|
foundLimit = paginate.limit
|
||||||
|
query = query.offset(offset)
|
||||||
} else if (paginate && paginate.limit) {
|
} else if (paginate && paginate.limit) {
|
||||||
query = query.limit(paginate.limit)
|
foundLimit = paginate.limit
|
||||||
} else {
|
|
||||||
query.limit(limit)
|
|
||||||
}
|
}
|
||||||
|
if (foundLimit === 1) {
|
||||||
|
foundLimit = SINGLE_ROW_LIMIT
|
||||||
|
}
|
||||||
|
query = query.limit(foundLimit)
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -73,6 +73,7 @@ module MSSQLModule {
|
||||||
try {
|
try {
|
||||||
return await client.query(query.sql, query.bindings || {})
|
return await client.query(query.sql, query.bindings || {})
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
// @ts-ignore
|
||||||
throw new Error(err)
|
throw new Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -101,6 +102,7 @@ module MSSQLModule {
|
||||||
const client = await this.pool.connect()
|
const client = await this.pool.connect()
|
||||||
this.client = client.request()
|
this.client = client.request()
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
// @ts-ignore
|
||||||
throw new Error(err)
|
throw new Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,6 +94,7 @@ module PostgresModule {
|
||||||
try {
|
try {
|
||||||
return await client.query(query.sql, query.bindings || [])
|
return await client.query(query.sql, query.bindings || [])
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
// @ts-ignore
|
||||||
throw new Error(err)
|
throw new Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,13 +33,14 @@ export function generateRowIdField(keyProps: any[] = []) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// should always return an array
|
// should always return an array
|
||||||
export function breakRowIdField(_id: string): any[] {
|
export function breakRowIdField(_id: string | { _id: string }): any[] {
|
||||||
if (!_id) {
|
if (!_id) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
// have to replace on the way back as we swapped out the double quotes
|
// have to replace on the way back as we swapped out the double quotes
|
||||||
// when encoding, but JSON can't handle the single quotes
|
// when encoding, but JSON can't handle the single quotes
|
||||||
const decoded: string = decodeURIComponent(_id).replace(/'/g, '"')
|
const id = typeof _id === "string" ? _id : _id._id
|
||||||
|
const decoded: string = decodeURIComponent(id).replace(/'/g, '"')
|
||||||
try {
|
try {
|
||||||
const parsed = JSON.parse(decoded)
|
const parsed = JSON.parse(decoded)
|
||||||
return Array.isArray(parsed) ? parsed : [parsed]
|
return Array.isArray(parsed) ? parsed : [parsed]
|
||||||
|
|
|
@ -39,13 +39,13 @@ exports.getCachedSelf = async (ctx, appId) => {
|
||||||
return processUser(appId, user)
|
return processUser(appId, user)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getGlobalUser = async (ctx, appId, userId) => {
|
exports.getGlobalUser = async (appId, userId) => {
|
||||||
const db = getGlobalDB()
|
const db = getGlobalDB()
|
||||||
let user = await db.get(getGlobalIDFromUserMetadataID(userId))
|
let user = await db.get(getGlobalIDFromUserMetadataID(userId))
|
||||||
return processUser(appId, user)
|
return processUser(appId, user)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getGlobalUsers = async (ctx, appId = null, users = null) => {
|
exports.getGlobalUsers = async (appId = null, users = null) => {
|
||||||
const db = getGlobalDB()
|
const db = getGlobalDB()
|
||||||
let globalUsers
|
let globalUsers
|
||||||
if (users) {
|
if (users) {
|
||||||
|
@ -73,3 +73,17 @@ exports.getGlobalUsers = async (ctx, appId = null, users = null) => {
|
||||||
}
|
}
|
||||||
return globalUsers.map(user => exports.updateAppRole(appId, user))
|
return globalUsers.map(user => exports.updateAppRole(appId, user))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.getGlobalUsersFromMetadata = async (appId, users) => {
|
||||||
|
const globalUsers = await exports.getGlobalUsers(appId, users)
|
||||||
|
return users.map(user => {
|
||||||
|
const globalUser = globalUsers.find(
|
||||||
|
globalUser => globalUser && user._id.includes(globalUser._id)
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
...globalUser,
|
||||||
|
// doing user second overwrites the id and rev (always metadata)
|
||||||
|
...user,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
const linkRows = require("../db/linkedRows")
|
const linkRows = require("../../db/linkedRows")
|
||||||
const { cloneDeep } = require("lodash/fp")
|
const { cloneDeep } = require("lodash/fp")
|
||||||
const { FieldTypes, AutoFieldSubTypes } = require("../constants")
|
const { FieldTypes, AutoFieldSubTypes } = require("../../constants")
|
||||||
const { processStringSync } = require("@budibase/string-templates")
|
const { attachmentsRelativeURL } = require("../index")
|
||||||
const { attachmentsRelativeURL } = require("./index")
|
const { processFormulas } = require("./utils")
|
||||||
|
|
||||||
const BASE_AUTO_ID = 1
|
const BASE_AUTO_ID = 1
|
||||||
|
|
||||||
|
@ -128,28 +128,6 @@ function processAutoColumn(user, table, row) {
|
||||||
return { table, row }
|
return { table, row }
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Looks through the rows provided and finds formulas - which it then processes.
|
|
||||||
*/
|
|
||||||
function processFormulas(table, rows) {
|
|
||||||
const single = !Array.isArray(rows)
|
|
||||||
if (single) {
|
|
||||||
rows = [rows]
|
|
||||||
}
|
|
||||||
for (let [column, schema] of Object.entries(table.schema)) {
|
|
||||||
if (schema.type !== FieldTypes.FORMULA) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// iterate through rows and process formula
|
|
||||||
rows = rows.map(row => ({
|
|
||||||
...row,
|
|
||||||
[column]: processStringSync(schema.formula, row),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
return single ? rows[0] : rows
|
|
||||||
}
|
|
||||||
exports.processFormulas = processFormulas
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This will coerce a value to the correct types based on the type transform map
|
* This will coerce a value to the correct types based on the type transform map
|
||||||
* @param {object} row The value to coerce
|
* @param {object} row The value to coerce
|
|
@ -0,0 +1,23 @@
|
||||||
|
const { FieldTypes } = require("../../constants")
|
||||||
|
const { processStringSync } = require("@budibase/string-templates")
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Looks through the rows provided and finds formulas - which it then processes.
|
||||||
|
*/
|
||||||
|
exports.processFormulas = (table, rows) => {
|
||||||
|
const single = !Array.isArray(rows)
|
||||||
|
if (single) {
|
||||||
|
rows = [rows]
|
||||||
|
}
|
||||||
|
for (let [column, schema] of Object.entries(table.schema)) {
|
||||||
|
if (schema.type !== FieldTypes.FORMULA) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// iterate through rows and process formula
|
||||||
|
rows = rows.map(row => ({
|
||||||
|
...row,
|
||||||
|
[column]: processStringSync(schema.formula, row),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
return single ? rows[0] : rows
|
||||||
|
}
|
|
@ -3,7 +3,7 @@ const { InternalTables } = require("../db/utils")
|
||||||
const { getGlobalUser } = require("../utilities/global")
|
const { getGlobalUser } = require("../utilities/global")
|
||||||
|
|
||||||
exports.getFullUser = async (ctx, userId) => {
|
exports.getFullUser = async (ctx, userId) => {
|
||||||
const global = await getGlobalUser(ctx, ctx.appId, userId)
|
const global = await getGlobalUser(ctx.appId, userId)
|
||||||
let metadata
|
let metadata
|
||||||
try {
|
try {
|
||||||
// this will throw an error if the db doesn't exist, or there is no appId
|
// this will throw an error if the db doesn't exist, or there is no appId
|
||||||
|
|
|
@ -104,7 +104,7 @@ exports.addAppRoleToUser = async (ctx, appId, roleId, userId = null) => {
|
||||||
user = await exports.getGlobalSelf(ctx)
|
user = await exports.getGlobalSelf(ctx)
|
||||||
endpoint = `/api/global/users/self`
|
endpoint = `/api/global/users/self`
|
||||||
} else {
|
} else {
|
||||||
user = await getGlobalUser(ctx, appId, userId)
|
user = await getGlobalUser(appId, userId)
|
||||||
body._id = userId
|
body._id = userId
|
||||||
endpoint = `/api/global/users`
|
endpoint = `/api/global/users`
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/string-templates",
|
"name": "@budibase/string-templates",
|
||||||
"version": "0.9.117-alpha.2",
|
"version": "0.9.120-alpha.4",
|
||||||
"description": "Handlebars wrapper for Budibase templating.",
|
"description": "Handlebars wrapper for Budibase templating.",
|
||||||
"main": "src/index.cjs",
|
"main": "src/index.cjs",
|
||||||
"module": "dist/bundle.mjs",
|
"module": "dist/bundle.mjs",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/worker",
|
"name": "@budibase/worker",
|
||||||
"email": "hi@budibase.com",
|
"email": "hi@budibase.com",
|
||||||
"version": "0.9.117-alpha.2",
|
"version": "0.9.120-alpha.4",
|
||||||
"description": "Budibase background service",
|
"description": "Budibase background service",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -23,8 +23,8 @@
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "AGPL-3.0-or-later",
|
"license": "AGPL-3.0-or-later",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/auth": "^0.9.117-alpha.2",
|
"@budibase/auth": "^0.9.120-alpha.4",
|
||||||
"@budibase/string-templates": "^0.9.117-alpha.2",
|
"@budibase/string-templates": "^0.9.120-alpha.4",
|
||||||
"@koa/router": "^8.0.0",
|
"@koa/router": "^8.0.0",
|
||||||
"@techpass/passport-openidconnect": "^0.3.0",
|
"@techpass/passport-openidconnect": "^0.3.0",
|
||||||
"aws-sdk": "^2.811.0",
|
"aws-sdk": "^2.811.0",
|
||||||
|
|
Loading…
Reference in New Issue