Merge pull request #2531 from Budibase/fix/sql-relationship-issues
Relationship bug fixes (SQL and internal) and a few other bits
This commit is contained in:
commit
8ce56c8492
|
@ -138,7 +138,7 @@
|
|||
"supertest": "^4.0.2",
|
||||
"ts-jest": "^27.0.3",
|
||||
"ts-node": "^10.0.0",
|
||||
"typescript": "^4.3.4",
|
||||
"typescript": "^4.3.5",
|
||||
"update-dotenv": "^1.1.1"
|
||||
},
|
||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
||||
|
|
|
@ -106,6 +106,12 @@ async function createInstance(template) {
|
|||
views: {},
|
||||
})
|
||||
|
||||
// NOTE: indexes need to be created before any tables/templates
|
||||
// add view for linked rows
|
||||
await createLinkView(appId)
|
||||
await createRoutingView(appId)
|
||||
await createAllSearchIndex(appId)
|
||||
|
||||
// replicate the template data to the instance DB
|
||||
// this is currently very hard to test, downloading and importing template files
|
||||
/* istanbul ignore next */
|
||||
|
@ -119,11 +125,6 @@ async function createInstance(template) {
|
|||
await db.put(USERS_TABLE_SCHEMA)
|
||||
}
|
||||
|
||||
// add view for linked rows
|
||||
await createLinkView(appId)
|
||||
await createRoutingView(appId)
|
||||
await createAllSearchIndex(appId)
|
||||
|
||||
return { _id: appId }
|
||||
}
|
||||
|
||||
|
|
|
@ -1,34 +1,29 @@
|
|||
import {
|
||||
IncludeRelationships,
|
||||
Operation,
|
||||
SearchFilters,
|
||||
SortJson,
|
||||
PaginationJson,
|
||||
RelationshipsJson,
|
||||
SearchFilters,
|
||||
SortJson,
|
||||
} from "../../../definitions/datasource"
|
||||
import {
|
||||
Row,
|
||||
Table,
|
||||
FieldSchema,
|
||||
Datasource,
|
||||
} from "../../../definitions/common"
|
||||
import {
|
||||
breakRowIdField,
|
||||
generateRowIdField,
|
||||
} from "../../../integrations/utils"
|
||||
import {Datasource, FieldSchema, Row, Table} from "../../../definitions/common"
|
||||
import {breakRowIdField, generateRowIdField} from "../../../integrations/utils"
|
||||
import { RelationshipTypes } from "../../../constants"
|
||||
|
||||
interface ManyRelationship {
|
||||
tableId?: string
|
||||
id?: string
|
||||
isUpdate?: boolean
|
||||
key: string
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface RunConfig {
|
||||
id: string
|
||||
filters: SearchFilters
|
||||
sort: SortJson
|
||||
paginate: PaginationJson
|
||||
row: Row
|
||||
id?: string
|
||||
filters?: SearchFilters
|
||||
sort?: SortJson
|
||||
paginate?: PaginationJson
|
||||
row?: Row
|
||||
}
|
||||
|
||||
module External {
|
||||
|
@ -37,7 +32,6 @@ module External {
|
|||
const { breakExternalTableId, isSQL } = require("../../../integrations/utils")
|
||||
const { processObjectSync } = require("@budibase/string-templates")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { isEqual } = require("lodash")
|
||||
const CouchDB = require("../../../db")
|
||||
|
||||
function buildFilters(
|
||||
|
@ -81,7 +75,7 @@ module External {
|
|||
}
|
||||
}
|
||||
|
||||
function generateIdForRow(row: Row, table: Table): string {
|
||||
function generateIdForRow(row: Row | undefined, table: Table): string {
|
||||
const primary = table.primary
|
||||
if (!row || !primary) {
|
||||
return ""
|
||||
|
@ -89,7 +83,8 @@ module External {
|
|||
// build id array
|
||||
let idParts = []
|
||||
for (let field of primary) {
|
||||
const fieldValue = row[`${table.name}.${field}`]
|
||||
// need to handle table name + field or just field, depending on if relationships used
|
||||
const fieldValue = row[`${table.name}.${field}`] || row[field]
|
||||
if (fieldValue) {
|
||||
idParts.push(fieldValue)
|
||||
}
|
||||
|
@ -116,7 +111,7 @@ module External {
|
|||
const thisRow: { [key: string]: any } = {}
|
||||
// filter the row down to what is actually the row (not joined)
|
||||
for (let fieldName of Object.keys(table.schema)) {
|
||||
const value = row[`${table.name}.${fieldName}`]
|
||||
const value = row[`${table.name}.${fieldName}`] || row[fieldName]
|
||||
// all responses include "select col as table.col" so that overlaps are handled
|
||||
if (value) {
|
||||
thisRow[fieldName] = value
|
||||
|
@ -156,7 +151,15 @@ module External {
|
|||
}
|
||||
}
|
||||
|
||||
inputProcessing(row: Row, table: Table) {
|
||||
getTable(tableId: string | undefined): Table {
|
||||
if (!tableId) {
|
||||
throw "Table ID is unknown, cannot find table"
|
||||
}
|
||||
const { tableName } = breakExternalTableId(tableId)
|
||||
return this.tables[tableName]
|
||||
}
|
||||
|
||||
inputProcessing(row: Row | undefined, table: Table) {
|
||||
if (!row) {
|
||||
return { row, manyRelationships: [] }
|
||||
}
|
||||
|
@ -202,6 +205,7 @@ module External {
|
|||
manyRelationships.push({
|
||||
tableId: field.through || field.tableId,
|
||||
isUpdate,
|
||||
key: otherKey,
|
||||
[thisKey]: breakRowIdField(relationship)[0],
|
||||
// leave the ID for enrichment later
|
||||
[otherKey]: `{{ literal ${tablePrimary} }}`,
|
||||
|
@ -343,41 +347,34 @@ module External {
|
|||
* This is a cached lookup, of relationship records, this is mainly for creating/deleting junction
|
||||
* information.
|
||||
*/
|
||||
async lookup(
|
||||
row: Row,
|
||||
relationship: ManyRelationship,
|
||||
cache: { [key: string]: Row[] } = {}
|
||||
) {
|
||||
const { tableId, isUpdate, id, ...rest } = relationship
|
||||
async lookupRelations(tableId: string, row: Row) {
|
||||
const related: {[key: string]: any} = {}
|
||||
const { tableName } = breakExternalTableId(tableId)
|
||||
const table = this.tables[tableName]
|
||||
if (isUpdate) {
|
||||
return { rows: [], table }
|
||||
}
|
||||
// if not updating need to make sure we have a list of all possible options
|
||||
let fullKey: string = tableId + "/",
|
||||
rowKey: string = ""
|
||||
for (let key of Object.keys(rest)) {
|
||||
if (row[key]) {
|
||||
fullKey += key
|
||||
rowKey = key
|
||||
// @ts-ignore
|
||||
const primaryKey = table.primary[0]
|
||||
// make a new request to get the row with all its relationships
|
||||
// we need this to work out if any relationships need removed
|
||||
for (let field of Object.values(table.schema)) {
|
||||
if (field.type !== FieldTypes.LINK || !field.fieldName) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
if (cache[fullKey] == null) {
|
||||
const isMany = field.relationshipType === RelationshipTypes.MANY_TO_MANY
|
||||
const tableId = isMany ? field.through : field.tableId
|
||||
const fieldName = isMany ? primaryKey : field.fieldName
|
||||
const response = await makeExternalQuery(this.appId, {
|
||||
endpoint: getEndpoint(tableId, DataSourceOperation.READ),
|
||||
filters: {
|
||||
equal: {
|
||||
[rowKey]: row[rowKey],
|
||||
[fieldName]: row[primaryKey],
|
||||
},
|
||||
},
|
||||
})
|
||||
// this is the response from knex if no rows found
|
||||
if (!response[0].read) {
|
||||
cache[fullKey] = response
|
||||
}
|
||||
const rows = !response[0].read ? response : []
|
||||
related[fieldName] = { rows, isMany, tableId }
|
||||
}
|
||||
return { rows: cache[fullKey] || [], table }
|
||||
return related
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -390,19 +387,21 @@ module External {
|
|||
* isn't supposed to exist anymore and delete those. This is better than the usual method of delete them
|
||||
* all and then re-create, as theres no chance of losing data (e.g. delete succeed, but write fail).
|
||||
*/
|
||||
async handleManyRelationships(row: Row, relationships: ManyRelationship[]) {
|
||||
async handleManyRelationships(mainTableId: string, row: Row, relationships: ManyRelationship[]) {
|
||||
const { appId } = this
|
||||
if (relationships.length === 0) {
|
||||
return
|
||||
}
|
||||
// if we're creating (in a through table) need to wipe the existing ones first
|
||||
const promises = []
|
||||
const cache: { [key: string]: Row[] } = {}
|
||||
const related = await this.lookupRelations(mainTableId, row)
|
||||
for (let relationship of relationships) {
|
||||
const { tableId, isUpdate, id, ...rest } = relationship
|
||||
const { key, tableId, isUpdate, id, ...rest } = relationship
|
||||
const body = processObjectSync(rest, row)
|
||||
const { table, rows } = await this.lookup(row, relationship, cache)
|
||||
const found = rows.find(row => isEqual(body, row))
|
||||
const linkTable = this.getTable(tableId)
|
||||
// @ts-ignore
|
||||
const linkPrimary = linkTable.primary[0]
|
||||
const rows = related[key].rows || []
|
||||
const found = rows.find((row: { [key: string]: any }) =>
|
||||
row[linkPrimary] === relationship.id || row[linkPrimary] === body[linkPrimary]
|
||||
)
|
||||
const operation = isUpdate
|
||||
? DataSourceOperation.UPDATE
|
||||
: DataSourceOperation.CREATE
|
||||
|
@ -412,27 +411,27 @@ module External {
|
|||
endpoint: getEndpoint(tableId, operation),
|
||||
// if we're doing many relationships then we're writing, only one response
|
||||
body,
|
||||
filters: buildFilters(id, {}, table),
|
||||
filters: buildFilters(id, {}, linkTable),
|
||||
})
|
||||
)
|
||||
} else {
|
||||
// remove the relationship from the rows
|
||||
// remove the relationship from cache so it isn't adjusted again
|
||||
rows.splice(rows.indexOf(found), 1)
|
||||
}
|
||||
}
|
||||
// finally if creating, cleanup any rows that aren't supposed to be here
|
||||
for (let [key, rows] of Object.entries(cache)) {
|
||||
// @ts-ignore
|
||||
const tableId: string = key.split("/").shift()
|
||||
const { tableName } = breakExternalTableId(tableId)
|
||||
const table = this.tables[tableName]
|
||||
// finally cleanup anything that needs to be removed
|
||||
for (let [colName, {isMany, rows, tableId}] of Object.entries(related)) {
|
||||
const table = this.getTable(tableId)
|
||||
for (let row of rows) {
|
||||
const filters = buildFilters(generateIdForRow(row, table), {}, table)
|
||||
// safety check, if there are no filters on deletion bad things happen
|
||||
if (Object.keys(filters).length !== 0) {
|
||||
const op = isMany ? DataSourceOperation.DELETE : DataSourceOperation.UPDATE
|
||||
const body = isMany ? null : { [colName]: null }
|
||||
promises.push(
|
||||
makeExternalQuery(this.appId, {
|
||||
endpoint: getEndpoint(tableId, DataSourceOperation.DELETE),
|
||||
endpoint: getEndpoint(tableId, op),
|
||||
body,
|
||||
filters,
|
||||
})
|
||||
)
|
||||
|
@ -449,7 +448,7 @@ module External {
|
|||
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
|
||||
* is more performant and has the added benefit of protecting against this scenario.
|
||||
*/
|
||||
buildFields(table: Table) {
|
||||
buildFields(table: Table, includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE) {
|
||||
function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
|
||||
return Object.entries(table.schema)
|
||||
.filter(
|
||||
|
@ -461,7 +460,7 @@ module External {
|
|||
}
|
||||
let fields = extractNonLinkFieldNames(table)
|
||||
for (let field of Object.values(table.schema)) {
|
||||
if (field.type !== FieldTypes.LINK) {
|
||||
if (field.type !== FieldTypes.LINK || !includeRelations) {
|
||||
continue
|
||||
}
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
|
@ -491,7 +490,7 @@ module External {
|
|||
throw `Unable to process query, table "${tableName}" not defined.`
|
||||
}
|
||||
// clean up row on ingress using schema
|
||||
filters = buildFilters(id, filters, table)
|
||||
filters = buildFilters(id, filters || {}, table)
|
||||
const relationships = this.buildRelationships(table)
|
||||
const processed = this.inputProcessing(row, table)
|
||||
row = processed.row
|
||||
|
@ -524,8 +523,9 @@ module External {
|
|||
// can't really use response right now
|
||||
const response = await makeExternalQuery(appId, json)
|
||||
// handle many to many relationships now if we know the ID (could be auto increment)
|
||||
if (processed.manyRelationships) {
|
||||
if (operation !== DataSourceOperation.READ && processed.manyRelationships) {
|
||||
await this.handleManyRelationships(
|
||||
table._id || "",
|
||||
response[0],
|
||||
processed.manyRelationships
|
||||
)
|
||||
|
|
|
@ -15,6 +15,7 @@ const { FieldTypes } = require("../../../constants")
|
|||
const { isEqual } = require("lodash")
|
||||
const { validate, findRow } = require("./utils")
|
||||
const { fullSearch, paginatedSearch } = require("./internalSearch")
|
||||
const { getGlobalUsersFromMetadata } = require("../../../utilities/global")
|
||||
|
||||
const CALCULATION_TYPES = {
|
||||
SUM: "sum",
|
||||
|
@ -132,7 +133,7 @@ exports.fetchView = async ctx => {
|
|||
const viewName = ctx.params.viewName
|
||||
|
||||
// if this is a table view being looked for just transfer to that
|
||||
if (viewName.includes(DocumentTypes.TABLE)) {
|
||||
if (viewName.startsWith(DocumentTypes.TABLE)) {
|
||||
ctx.params.tableId = viewName
|
||||
return exports.fetch(ctx)
|
||||
}
|
||||
|
@ -290,6 +291,10 @@ exports.search = async ctx => {
|
|||
|
||||
// Enrich search results with relationships
|
||||
if (response.rows && response.rows.length) {
|
||||
// enrich with global users if from users table
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
response.rows = await getGlobalUsersFromMetadata(appId, response.rows)
|
||||
}
|
||||
const table = await db.get(tableId)
|
||||
response.rows = await outputProcessing(ctx, table, response.rows)
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ function removeGlobalProps(user) {
|
|||
|
||||
exports.fetchMetadata = async function (ctx) {
|
||||
const database = new CouchDB(ctx.appId)
|
||||
const global = await getGlobalUsers(ctx, ctx.appId)
|
||||
const global = await getGlobalUsers(ctx.appId)
|
||||
const metadata = (
|
||||
await database.allDocs(
|
||||
getUserMetadataParams(null, {
|
||||
|
|
|
@ -13,8 +13,8 @@ const CouchDB = require("../../db")
|
|||
const { FieldTypes } = require("../../constants")
|
||||
const { getMultiIDParams, USER_METDATA_PREFIX } = require("../../db/utils")
|
||||
const { partition } = require("lodash")
|
||||
const { getGlobalUsers } = require("../../utilities/global")
|
||||
const processor = require("../../utilities/rowProcessor")
|
||||
const { getGlobalUsersFromMetadata } = require("../../utilities/global")
|
||||
const { processFormulas } = require("../../utilities/rowProcessor/utils")
|
||||
|
||||
/**
|
||||
* This functionality makes sure that when rows with links are created, updated or deleted they are processed
|
||||
|
@ -71,17 +71,7 @@ async function getFullLinkedDocs(ctx, appId, links) {
|
|||
let [users, other] = partition(linked, linkRow =>
|
||||
linkRow._id.startsWith(USER_METDATA_PREFIX)
|
||||
)
|
||||
const globalUsers = await getGlobalUsers(ctx, appId, users)
|
||||
users = users.map(user => {
|
||||
const globalUser = globalUsers.find(
|
||||
globalUser => globalUser && user._id.includes(globalUser._id)
|
||||
)
|
||||
return {
|
||||
...globalUser,
|
||||
// doing user second overwrites the id and rev (always metadata)
|
||||
...user,
|
||||
}
|
||||
})
|
||||
users = await getGlobalUsersFromMetadata(appId, users)
|
||||
return [...other, ...users]
|
||||
}
|
||||
|
||||
|
@ -197,9 +187,7 @@ exports.attachFullLinkedDocs = async (ctx, table, rows) => {
|
|||
if (!linkedRow || !linkedTable) {
|
||||
continue
|
||||
}
|
||||
row[link.fieldName].push(
|
||||
processor.processFormulas(linkedTable, linkedRow)
|
||||
)
|
||||
row[link.fieldName].push(processFormulas(linkedTable, linkedRow))
|
||||
}
|
||||
}
|
||||
return rows
|
||||
|
|
|
@ -40,6 +40,11 @@ export enum SourceNames {
|
|||
REST = "REST",
|
||||
}
|
||||
|
||||
export enum IncludeRelationships {
|
||||
INCLUDE = 1,
|
||||
EXCLUDE = 0
|
||||
}
|
||||
|
||||
export interface QueryDefinition {
|
||||
type: QueryTypes
|
||||
displayName?: string
|
||||
|
|
|
@ -76,6 +76,7 @@ module ArangoModule {
|
|||
const result = await this.client.query(query.sql)
|
||||
return result.all()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
console.error("Error querying arangodb", err.message)
|
||||
throw err
|
||||
} finally {
|
||||
|
@ -91,6 +92,7 @@ module ArangoModule {
|
|||
)
|
||||
return result.all()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
console.error("Error querying arangodb", err.message)
|
||||
throw err
|
||||
} finally {
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import { Knex, knex } from "knex"
|
||||
const BASE_LIMIT = 5000
|
||||
// if requesting a single row then need to up the limit for the sake of joins
|
||||
const SINGLE_ROW_LIMIT = 100
|
||||
import {
|
||||
QueryJson,
|
||||
SearchFilters,
|
||||
|
@ -168,17 +170,21 @@ function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
|||
query = query.orderBy(key, direction)
|
||||
}
|
||||
}
|
||||
let foundLimit = limit || BASE_LIMIT
|
||||
// handle pagination
|
||||
if (paginate && paginate.page && paginate.limit) {
|
||||
// @ts-ignore
|
||||
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
||||
const offset = page * paginate.limit
|
||||
query = query.offset(offset).limit(paginate.limit)
|
||||
foundLimit = paginate.limit
|
||||
query = query.offset(offset)
|
||||
} else if (paginate && paginate.limit) {
|
||||
query = query.limit(paginate.limit)
|
||||
} else {
|
||||
query.limit(limit)
|
||||
foundLimit = paginate.limit
|
||||
}
|
||||
if (foundLimit === 1) {
|
||||
foundLimit = SINGLE_ROW_LIMIT
|
||||
}
|
||||
query = query.limit(foundLimit)
|
||||
return query
|
||||
}
|
||||
|
||||
|
|
|
@ -73,6 +73,7 @@ module MSSQLModule {
|
|||
try {
|
||||
return await client.query(query.sql, query.bindings || {})
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
@ -101,6 +102,7 @@ module MSSQLModule {
|
|||
const client = await this.pool.connect()
|
||||
this.client = client.request()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,6 +94,7 @@ module PostgresModule {
|
|||
try {
|
||||
return await client.query(query.sql, query.bindings || [])
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,13 +33,14 @@ export function generateRowIdField(keyProps: any[] = []) {
|
|||
}
|
||||
|
||||
// should always return an array
|
||||
export function breakRowIdField(_id: string): any[] {
|
||||
export function breakRowIdField(_id: string | { _id: string }): any[] {
|
||||
if (!_id) {
|
||||
return []
|
||||
}
|
||||
// have to replace on the way back as we swapped out the double quotes
|
||||
// when encoding, but JSON can't handle the single quotes
|
||||
const decoded: string = decodeURIComponent(_id).replace(/'/g, '"')
|
||||
const id = typeof _id === "string" ? _id : _id._id
|
||||
const decoded: string = decodeURIComponent(id).replace(/'/g, '"')
|
||||
try {
|
||||
const parsed = JSON.parse(decoded)
|
||||
return Array.isArray(parsed) ? parsed : [parsed]
|
||||
|
|
|
@ -39,13 +39,13 @@ exports.getCachedSelf = async (ctx, appId) => {
|
|||
return processUser(appId, user)
|
||||
}
|
||||
|
||||
exports.getGlobalUser = async (ctx, appId, userId) => {
|
||||
exports.getGlobalUser = async (appId, userId) => {
|
||||
const db = getGlobalDB()
|
||||
let user = await db.get(getGlobalIDFromUserMetadataID(userId))
|
||||
return processUser(appId, user)
|
||||
}
|
||||
|
||||
exports.getGlobalUsers = async (ctx, appId = null, users = null) => {
|
||||
exports.getGlobalUsers = async (appId = null, users = null) => {
|
||||
const db = getGlobalDB()
|
||||
let globalUsers
|
||||
if (users) {
|
||||
|
@ -73,3 +73,17 @@ exports.getGlobalUsers = async (ctx, appId = null, users = null) => {
|
|||
}
|
||||
return globalUsers.map(user => exports.updateAppRole(appId, user))
|
||||
}
|
||||
|
||||
exports.getGlobalUsersFromMetadata = async (appId, users) => {
|
||||
const globalUsers = await exports.getGlobalUsers(appId, users)
|
||||
return users.map(user => {
|
||||
const globalUser = globalUsers.find(
|
||||
globalUser => globalUser && user._id.includes(globalUser._id)
|
||||
)
|
||||
return {
|
||||
...globalUser,
|
||||
// doing user second overwrites the id and rev (always metadata)
|
||||
...user,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
const linkRows = require("../db/linkedRows")
|
||||
const linkRows = require("../../db/linkedRows")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { FieldTypes, AutoFieldSubTypes } = require("../constants")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
const { attachmentsRelativeURL } = require("./index")
|
||||
const { FieldTypes, AutoFieldSubTypes } = require("../../constants")
|
||||
const { attachmentsRelativeURL } = require("../index")
|
||||
const { processFormulas } = require("./utils")
|
||||
|
||||
const BASE_AUTO_ID = 1
|
||||
|
||||
|
@ -128,28 +128,6 @@ function processAutoColumn(user, table, row) {
|
|||
return { table, row }
|
||||
}
|
||||
|
||||
/**
|
||||
* Looks through the rows provided and finds formulas - which it then processes.
|
||||
*/
|
||||
function processFormulas(table, rows) {
|
||||
const single = !Array.isArray(rows)
|
||||
if (single) {
|
||||
rows = [rows]
|
||||
}
|
||||
for (let [column, schema] of Object.entries(table.schema)) {
|
||||
if (schema.type !== FieldTypes.FORMULA) {
|
||||
continue
|
||||
}
|
||||
// iterate through rows and process formula
|
||||
rows = rows.map(row => ({
|
||||
...row,
|
||||
[column]: processStringSync(schema.formula, row),
|
||||
}))
|
||||
}
|
||||
return single ? rows[0] : rows
|
||||
}
|
||||
exports.processFormulas = processFormulas
|
||||
|
||||
/**
|
||||
* This will coerce a value to the correct types based on the type transform map
|
||||
* @param {object} row The value to coerce
|
|
@ -0,0 +1,23 @@
|
|||
const { FieldTypes } = require("../../constants")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
|
||||
/**
|
||||
* Looks through the rows provided and finds formulas - which it then processes.
|
||||
*/
|
||||
exports.processFormulas = (table, rows) => {
|
||||
const single = !Array.isArray(rows)
|
||||
if (single) {
|
||||
rows = [rows]
|
||||
}
|
||||
for (let [column, schema] of Object.entries(table.schema)) {
|
||||
if (schema.type !== FieldTypes.FORMULA) {
|
||||
continue
|
||||
}
|
||||
// iterate through rows and process formula
|
||||
rows = rows.map(row => ({
|
||||
...row,
|
||||
[column]: processStringSync(schema.formula, row),
|
||||
}))
|
||||
}
|
||||
return single ? rows[0] : rows
|
||||
}
|
|
@ -3,7 +3,7 @@ const { InternalTables } = require("../db/utils")
|
|||
const { getGlobalUser } = require("../utilities/global")
|
||||
|
||||
exports.getFullUser = async (ctx, userId) => {
|
||||
const global = await getGlobalUser(ctx, ctx.appId, userId)
|
||||
const global = await getGlobalUser(ctx.appId, userId)
|
||||
let metadata
|
||||
try {
|
||||
// this will throw an error if the db doesn't exist, or there is no appId
|
||||
|
|
|
@ -104,7 +104,7 @@ exports.addAppRoleToUser = async (ctx, appId, roleId, userId = null) => {
|
|||
user = await exports.getGlobalSelf(ctx)
|
||||
endpoint = `/api/global/users/self`
|
||||
} else {
|
||||
user = await getGlobalUser(ctx, appId, userId)
|
||||
user = await getGlobalUser(appId, userId)
|
||||
body._id = userId
|
||||
endpoint = `/api/global/users`
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue