Fixing issues with automation steps found during testing.
This commit is contained in:
parent
ac944e532b
commit
0651b874ac
|
@ -11,6 +11,7 @@ const userController = require("../user")
|
||||||
const {
|
const {
|
||||||
inputProcessing,
|
inputProcessing,
|
||||||
outputProcessing,
|
outputProcessing,
|
||||||
|
processAutoColumn,
|
||||||
} = require("../../../utilities/rowProcessor")
|
} = require("../../../utilities/rowProcessor")
|
||||||
const { FieldTypes } = require("../../../constants")
|
const { FieldTypes } = require("../../../constants")
|
||||||
const { isEqual } = require("lodash")
|
const { isEqual } = require("lodash")
|
||||||
|
@ -28,11 +29,26 @@ const CALCULATION_TYPES = {
|
||||||
|
|
||||||
async function storeResponse(ctx, db, row, oldTable, table) {
|
async function storeResponse(ctx, db, row, oldTable, table) {
|
||||||
row.type = "row"
|
row.type = "row"
|
||||||
const response = await db.put(row)
|
|
||||||
// don't worry about rev, tables handle rev/lastID updates
|
// don't worry about rev, tables handle rev/lastID updates
|
||||||
|
// if another row has been written since processing this will
|
||||||
|
// handle the auto ID clash
|
||||||
if (!isEqual(oldTable, table)) {
|
if (!isEqual(oldTable, table)) {
|
||||||
await db.put(table)
|
try {
|
||||||
|
await db.put(table)
|
||||||
|
} catch (err) {
|
||||||
|
if (err.status === 409) {
|
||||||
|
const updatedTable = await db.get(table._id)
|
||||||
|
let response = processAutoColumn(null, updatedTable, row, {
|
||||||
|
reprocessing: true,
|
||||||
|
})
|
||||||
|
await db.put(response.table)
|
||||||
|
row = response.row
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
const response = await db.put(row)
|
||||||
row._rev = response.rev
|
row._rev = response.rev
|
||||||
// process the row before return, to include relationships
|
// process the row before return, to include relationships
|
||||||
row = await outputProcessing(ctx, table, row, { squash: false })
|
row = await outputProcessing(ctx, table, row, { squash: false })
|
||||||
|
@ -182,7 +198,7 @@ exports.fetchView = async ctx => {
|
||||||
}
|
}
|
||||||
let response
|
let response
|
||||||
// TODO: make sure not self hosted in Cloud
|
// TODO: make sure not self hosted in Cloud
|
||||||
if (!env.SELF_HOSTED) {
|
if (env.SELF_HOSTED) {
|
||||||
response = await db.query(`database/${viewName}`, {
|
response = await db.query(`database/${viewName}`, {
|
||||||
include_docs: !calculation,
|
include_docs: !calculation,
|
||||||
group: !!group,
|
group: !!group,
|
||||||
|
|
|
@ -2,6 +2,7 @@ const rowController = require("../../api/controllers/row")
|
||||||
const automationUtils = require("../automationUtils")
|
const automationUtils = require("../automationUtils")
|
||||||
const env = require("../../environment")
|
const env = require("../../environment")
|
||||||
const usage = require("../../utilities/usageQuota")
|
const usage = require("../../utilities/usageQuota")
|
||||||
|
const { buildCtx } = require("./utils")
|
||||||
|
|
||||||
exports.definition = {
|
exports.definition = {
|
||||||
name: "Create Row",
|
name: "Create Row",
|
||||||
|
@ -69,16 +70,12 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// have to clean up the row, remove the table from it
|
// have to clean up the row, remove the table from it
|
||||||
const ctx = {
|
const ctx = buildCtx(appId, emitter, {
|
||||||
|
body: inputs.row,
|
||||||
params: {
|
params: {
|
||||||
tableId: inputs.row.tableId,
|
tableId: inputs.row.tableId,
|
||||||
},
|
},
|
||||||
request: {
|
})
|
||||||
body: inputs.row,
|
|
||||||
},
|
|
||||||
appId,
|
|
||||||
eventEmitter: emitter,
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
inputs.row = await automationUtils.cleanUpRow(
|
inputs.row = await automationUtils.cleanUpRow(
|
||||||
|
@ -86,7 +83,7 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
||||||
inputs.row.tableId,
|
inputs.row.tableId,
|
||||||
inputs.row
|
inputs.row
|
||||||
)
|
)
|
||||||
if (env.isProd()) {
|
if (env.USE_QUOTAS) {
|
||||||
await usage.update(apiKey, usage.Properties.ROW, 1)
|
await usage.update(apiKey, usage.Properties.ROW, 1)
|
||||||
}
|
}
|
||||||
await rowController.save(ctx)
|
await rowController.save(ctx)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
const rowController = require("../../api/controllers/row")
|
const rowController = require("../../api/controllers/row")
|
||||||
const env = require("../../environment")
|
const env = require("../../environment")
|
||||||
const usage = require("../../utilities/usageQuota")
|
const usage = require("../../utilities/usageQuota")
|
||||||
|
const { buildCtx } = require("./utils")
|
||||||
|
|
||||||
exports.definition = {
|
exports.definition = {
|
||||||
description: "Delete a row from your database",
|
description: "Delete a row from your database",
|
||||||
|
@ -60,19 +61,16 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let ctx = {
|
|
||||||
|
let ctx = buildCtx(appId, emitter, {
|
||||||
|
body: {
|
||||||
|
_id: inputs.id,
|
||||||
|
_rev: inputs.revision,
|
||||||
|
},
|
||||||
params: {
|
params: {
|
||||||
tableId: inputs.tableId,
|
tableId: inputs.tableId,
|
||||||
},
|
},
|
||||||
request: {
|
})
|
||||||
body: {
|
|
||||||
_id: inputs.id,
|
|
||||||
_rev: inputs.revision,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
appId,
|
|
||||||
eventEmitter: emitter,
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (env.isProd()) {
|
if (env.isProd()) {
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
const rowController = require("../../api/controllers/row")
|
const rowController = require("../../api/controllers/row")
|
||||||
const tableController = require("../../api/controllers/table")
|
const tableController = require("../../api/controllers/table")
|
||||||
const { FieldTypes } = require("../../constants")
|
const { FieldTypes } = require("../../constants")
|
||||||
|
const { buildCtx } = require("./utils")
|
||||||
|
|
||||||
const SortOrders = {
|
const SortOrders = {
|
||||||
ASCENDING: "ascending",
|
ASCENDING: "ascending",
|
||||||
|
@ -70,12 +71,11 @@ exports.definition = {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getTable(appId, tableId) {
|
async function getTable(appId, tableId) {
|
||||||
const ctx = {
|
const ctx = buildCtx(appId, null, {
|
||||||
params: {
|
params: {
|
||||||
id: tableId,
|
id: tableId,
|
||||||
},
|
},
|
||||||
appId,
|
})
|
||||||
}
|
|
||||||
await tableController.find(ctx)
|
await tableController.find(ctx)
|
||||||
return ctx.body
|
return ctx.body
|
||||||
}
|
}
|
||||||
|
@ -89,21 +89,18 @@ exports.run = async function ({ inputs, appId }) {
|
||||||
sortType =
|
sortType =
|
||||||
fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING
|
fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING
|
||||||
}
|
}
|
||||||
const ctx = {
|
const ctx = buildCtx(appId, null, {
|
||||||
params: {
|
params: {
|
||||||
tableId,
|
tableId,
|
||||||
},
|
},
|
||||||
request: {
|
body: {
|
||||||
body: {
|
sortOrder,
|
||||||
sortOrder,
|
sortType,
|
||||||
sortType,
|
sort: sortColumn,
|
||||||
sort: sortColumn,
|
query: filters || {},
|
||||||
query: filters || {},
|
limit,
|
||||||
limit,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
appId,
|
})
|
||||||
}
|
|
||||||
try {
|
try {
|
||||||
await rowController.search(ctx)
|
await rowController.search(ctx)
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
const rowController = require("../../api/controllers/row")
|
const rowController = require("../../api/controllers/row")
|
||||||
const automationUtils = require("../automationUtils")
|
const automationUtils = require("../automationUtils")
|
||||||
|
const { buildCtx } = require("./utils")
|
||||||
|
|
||||||
exports.definition = {
|
exports.definition = {
|
||||||
name: "Update Row",
|
name: "Update Row",
|
||||||
|
@ -72,19 +73,15 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// have to clean up the row, remove the table from it
|
// have to clean up the row, remove the table from it
|
||||||
const ctx = {
|
const ctx = buildCtx(appId, emitter, {
|
||||||
|
body: {
|
||||||
|
...inputs.row,
|
||||||
|
_id: inputs.rowId,
|
||||||
|
},
|
||||||
params: {
|
params: {
|
||||||
rowId: inputs.rowId,
|
rowId: inputs.rowId,
|
||||||
},
|
},
|
||||||
request: {
|
})
|
||||||
body: {
|
|
||||||
...inputs.row,
|
|
||||||
_id: inputs.rowId,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
appId,
|
|
||||||
eventEmitter: emitter,
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
inputs.row = await automationUtils.cleanUpRowById(
|
inputs.row = await automationUtils.cleanUpRowById(
|
||||||
|
|
|
@ -17,9 +17,14 @@ exports.runView = async (appId, view, calculation, group, data) => {
|
||||||
runner: view,
|
runner: view,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
// write all the docs to the in memory Pouch
|
// write all the docs to the in memory Pouch (remove revs)
|
||||||
await db.bulkDocs(data)
|
await db.bulkDocs(
|
||||||
const response = await db.query(`database/runner`, {
|
data.map(row => ({
|
||||||
|
...row,
|
||||||
|
_rev: undefined,
|
||||||
|
}))
|
||||||
|
)
|
||||||
|
const response = await db.query("database/runner", {
|
||||||
include_docs: !calculation,
|
include_docs: !calculation,
|
||||||
group: !!group,
|
group: !!group,
|
||||||
})
|
})
|
||||||
|
|
|
@ -89,10 +89,11 @@ const TYPE_TRANSFORM_MAP = {
|
||||||
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields.
|
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields.
|
||||||
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing.
|
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing.
|
||||||
* @param {Object} row The row which is to be updated with information for the auto columns.
|
* @param {Object} row The row which is to be updated with information for the auto columns.
|
||||||
|
* @param {Object} opts specific options for function to carry out optional features.
|
||||||
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
|
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
|
||||||
* for automatic ID purposes.
|
* for automatic ID purposes.
|
||||||
*/
|
*/
|
||||||
function processAutoColumn(user, table, row) {
|
function processAutoColumn(user, table, row, opts = { reprocessing: false }) {
|
||||||
let now = new Date().toISOString()
|
let now = new Date().toISOString()
|
||||||
// if a row doesn't have a revision then it doesn't exist yet
|
// if a row doesn't have a revision then it doesn't exist yet
|
||||||
const creating = !row._rev
|
const creating = !row._rev
|
||||||
|
@ -102,7 +103,7 @@ function processAutoColumn(user, table, row) {
|
||||||
}
|
}
|
||||||
switch (schema.subtype) {
|
switch (schema.subtype) {
|
||||||
case AutoFieldSubTypes.CREATED_BY:
|
case AutoFieldSubTypes.CREATED_BY:
|
||||||
if (creating) {
|
if (creating && !opts.reprocessing) {
|
||||||
row[key] = [user.userId]
|
row[key] = [user.userId]
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
|
@ -112,7 +113,9 @@ function processAutoColumn(user, table, row) {
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
case AutoFieldSubTypes.UPDATED_BY:
|
case AutoFieldSubTypes.UPDATED_BY:
|
||||||
row[key] = [user.userId]
|
if (!opts.reprocessing) {
|
||||||
|
row[key] = [user.userId]
|
||||||
|
}
|
||||||
break
|
break
|
||||||
case AutoFieldSubTypes.UPDATED_AT:
|
case AutoFieldSubTypes.UPDATED_AT:
|
||||||
row[key] = now
|
row[key] = now
|
||||||
|
@ -127,6 +130,7 @@ function processAutoColumn(user, table, row) {
|
||||||
}
|
}
|
||||||
return { table, row }
|
return { table, row }
|
||||||
}
|
}
|
||||||
|
exports.processAutoColumn = processAutoColumn
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This will coerce a value to the correct types based on the type transform map
|
* This will coerce a value to the correct types based on the type transform map
|
||||||
|
|
Loading…
Reference in New Issue