Fixing issues after upgrading to node 14 and circular dependencies causing issues.

This commit is contained in:
mike12345567 2021-09-02 18:33:41 +01:00
parent b17e9aa5e4
commit 8c3eb30ab1
8 changed files with 2466 additions and 1583 deletions

View File

@ -138,7 +138,7 @@
"supertest": "^4.0.2",
"ts-jest": "^27.0.3",
"ts-node": "^10.0.0",
"typescript": "^4.3.4",
"typescript": "^4.3.5",
"update-dotenv": "^1.1.1"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"

View File

@ -14,7 +14,7 @@ const { FieldTypes } = require("../../constants")
const { getMultiIDParams, USER_METDATA_PREFIX } = require("../../db/utils")
const { partition } = require("lodash")
const { getGlobalUsersFromMetadata } = require("../../utilities/global")
const processor = require("../../utilities/rowProcessor")
const { processFormulas } = require("../../utilities/rowProcessor/utils")
/**
* This functionality makes sure that when rows with links are created, updated or deleted they are processed
@ -187,9 +187,7 @@ exports.attachFullLinkedDocs = async (ctx, table, rows) => {
if (!linkedRow || !linkedTable) {
continue
}
row[link.fieldName].push(
processor.processFormulas(linkedTable, linkedRow)
)
row[link.fieldName].push(processFormulas(linkedTable, linkedRow))
}
}
return rows

View File

@ -76,6 +76,7 @@ module ArangoModule {
const result = await this.client.query(query.sql)
return result.all()
} catch (err) {
// @ts-ignore
console.error("Error querying arangodb", err.message)
throw err
} finally {
@ -91,6 +92,7 @@ module ArangoModule {
)
return result.all()
} catch (err) {
// @ts-ignore
console.error("Error querying arangodb", err.message)
throw err
} finally {

View File

@ -73,6 +73,7 @@ module MSSQLModule {
try {
return await client.query(query.sql, query.bindings || {})
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}
@ -101,6 +102,7 @@ module MSSQLModule {
const client = await this.pool.connect()
this.client = client.request()
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}

View File

@ -94,6 +94,7 @@ module PostgresModule {
try {
return await client.query(query.sql, query.bindings || [])
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}

View File

@ -1,8 +1,8 @@
const linkRows = require("../db/linkedRows")
const linkRows = require("../../db/linkedRows")
const { cloneDeep } = require("lodash/fp")
const { FieldTypes, AutoFieldSubTypes } = require("../constants")
const { processStringSync } = require("@budibase/string-templates")
const { attachmentsRelativeURL } = require("./index")
const { FieldTypes, AutoFieldSubTypes } = require("../../constants")
const { attachmentsRelativeURL } = require("../index")
const { processFormulas } = require("./utils")
const BASE_AUTO_ID = 1
@ -128,28 +128,6 @@ function processAutoColumn(user, table, row) {
return { table, row }
}
/**
* Looks through the rows provided and finds formulas - which it then processes.
*/
function processFormulas(table, rows) {
const single = !Array.isArray(rows)
if (single) {
rows = [rows]
}
for (let [column, schema] of Object.entries(table.schema)) {
if (schema.type !== FieldTypes.FORMULA) {
continue
}
// iterate through rows and process formula
rows = rows.map(row => ({
...row,
[column]: processStringSync(schema.formula, row),
}))
}
return single ? rows[0] : rows
}
exports.processFormulas = processFormulas
/**
* This will coerce a value to the correct types based on the type transform map
* @param {object} row The value to coerce

View File

@ -0,0 +1,23 @@
const { FieldTypes } = require("../../constants")
const { processStringSync } = require("@budibase/string-templates")
/**
* Looks through the rows provided and finds formulas - which it then processes.
*/
exports.processFormulas = (table, rows) => {
const single = !Array.isArray(rows)
if (single) {
rows = [rows]
}
for (let [column, schema] of Object.entries(table.schema)) {
if (schema.type !== FieldTypes.FORMULA) {
continue
}
// iterate through rows and process formula
rows = rows.map(row => ({
...row,
[column]: processStringSync(schema.formula, row),
}))
}
return single ? rows[0] : rows
}

File diff suppressed because it is too large Load Diff