2021-06-14 20:05:39 +02:00
|
|
|
const { makeExternalQuery } = require("./utils")
|
2021-06-25 19:34:21 +02:00
|
|
|
const {
|
|
|
|
DataSourceOperation,
|
|
|
|
SortDirection,
|
|
|
|
FieldTypes,
|
|
|
|
} = require("../../../constants")
|
2021-06-23 20:05:32 +02:00
|
|
|
const { getAllExternalTables } = require("../table/utils")
|
2021-06-16 19:38:00 +02:00
|
|
|
const {
|
|
|
|
breakExternalTableId,
|
|
|
|
generateRowIdField,
|
2021-06-16 19:39:59 +02:00
|
|
|
breakRowIdField,
|
2021-06-16 19:38:00 +02:00
|
|
|
} = require("../../../integrations/utils")
|
2021-06-18 14:14:45 +02:00
|
|
|
const { cloneDeep } = require("lodash/fp")
|
2021-06-15 14:03:55 +02:00
|
|
|
|
2021-06-28 19:19:34 +02:00
|
|
|
function inputProcessing(row, table, allTables) {
|
2021-06-15 15:56:25 +02:00
|
|
|
if (!row) {
|
|
|
|
return row
|
|
|
|
}
|
2021-06-28 19:19:34 +02:00
|
|
|
let newRow = {}, manyRelationships = []
|
|
|
|
for (let [key, field] of Object.entries(table.schema)) {
|
2021-06-15 15:56:25 +02:00
|
|
|
// currently excludes empty strings
|
2021-06-28 19:19:34 +02:00
|
|
|
if (!row[key]) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
const isLink = field.type === FieldTypes.LINK
|
|
|
|
if (isLink && !field.through) {
|
|
|
|
// we don't really support composite keys for relationships, this is why [0] is used
|
|
|
|
newRow[key] = breakRowIdField(row[key][0])[0]
|
|
|
|
} else if (isLink && field.through) {
|
|
|
|
const linkTable = allTables.find(table => table._id === field.tableId)
|
|
|
|
// table has to exist for many to many
|
|
|
|
if (!linkTable) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
row[key].map(relationship => {
|
|
|
|
// we don't really support composite keys for relationships, this is why [0] is used
|
|
|
|
manyRelationships.push({
|
|
|
|
tableId: field.through,
|
|
|
|
[linkTable.primary]: breakRowIdField(relationship)[0],
|
|
|
|
// leave the ID for enrichment later
|
|
|
|
[table.primary]: `{{ id }}`,
|
|
|
|
})
|
|
|
|
})
|
|
|
|
} else {
|
2021-06-15 15:56:25 +02:00
|
|
|
newRow[key] = row[key]
|
|
|
|
}
|
|
|
|
}
|
2021-06-28 19:19:34 +02:00
|
|
|
return { row: newRow, manyRelationships }
|
2021-06-15 15:56:25 +02:00
|
|
|
}
|
|
|
|
|
2021-06-18 14:24:29 +02:00
|
|
|
function generateIdForRow(row, table) {
|
|
|
|
if (!row) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
const primary = table.primary
|
|
|
|
// build id array
|
|
|
|
let idParts = []
|
|
|
|
for (let field of primary) {
|
|
|
|
idParts.push(row[field])
|
|
|
|
}
|
|
|
|
return generateRowIdField(idParts)
|
|
|
|
}
|
|
|
|
|
2021-06-23 20:05:32 +02:00
|
|
|
function updateRelationshipColumns(rows, row, relationships, allTables) {
|
|
|
|
const columns = {}
|
|
|
|
for (let relationship of relationships) {
|
|
|
|
const linkedTable = allTables[relationship.tableName]
|
|
|
|
if (!linkedTable) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
const display = linkedTable.primaryDisplay
|
|
|
|
const related = {}
|
|
|
|
if (display && row[display]) {
|
|
|
|
related.primaryDisplay = row[display]
|
|
|
|
}
|
|
|
|
related._id = row[relationship.to]
|
|
|
|
columns[relationship.from] = related
|
|
|
|
}
|
|
|
|
for (let [column, related] of Object.entries(columns)) {
|
|
|
|
if (!Array.isArray(rows[row._id][column])) {
|
|
|
|
rows[row._id][column] = []
|
|
|
|
}
|
|
|
|
rows[row._id][column].push(related)
|
|
|
|
}
|
|
|
|
return rows
|
|
|
|
}
|
|
|
|
|
2021-06-28 19:19:34 +02:00
|
|
|
async function insertManyRelationships(appId, json, relationships) {
|
|
|
|
const promises = []
|
|
|
|
for (let relationship of relationships) {
|
|
|
|
const newJson = {
|
|
|
|
// copy over datasource stuff
|
|
|
|
endpoint: json.endpoint,
|
|
|
|
}
|
|
|
|
const { tableName } = breakExternalTableId(relationship.tableId)
|
|
|
|
delete relationship.tableId
|
|
|
|
newJson.endpoint.entityId = tableName
|
|
|
|
newJson.body = relationship
|
|
|
|
promises.push(makeExternalQuery(appId, newJson))
|
|
|
|
}
|
|
|
|
await Promise.all(promises)
|
|
|
|
}
|
|
|
|
|
2021-06-23 20:05:32 +02:00
|
|
|
function outputProcessing(rows, table, relationships, allTables) {
|
2021-06-15 15:56:25 +02:00
|
|
|
// if no rows this is what is returned? Might be PG only
|
|
|
|
if (rows[0].read === true) {
|
|
|
|
return []
|
|
|
|
}
|
2021-06-23 20:05:32 +02:00
|
|
|
let finalRows = {}
|
2021-06-15 15:56:25 +02:00
|
|
|
for (let row of rows) {
|
2021-06-18 14:24:29 +02:00
|
|
|
row._id = generateIdForRow(row, table)
|
2021-06-23 20:05:32 +02:00
|
|
|
// this is a relationship of some sort
|
|
|
|
if (finalRows[row._id]) {
|
2021-06-25 19:34:21 +02:00
|
|
|
finalRows = updateRelationshipColumns(
|
|
|
|
finalRows,
|
|
|
|
row,
|
|
|
|
relationships,
|
|
|
|
allTables
|
|
|
|
)
|
2021-06-23 20:05:32 +02:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
const thisRow = {}
|
|
|
|
// filter the row down to what is actually the row (not joined)
|
|
|
|
for (let fieldName of Object.keys(table.schema)) {
|
|
|
|
thisRow[fieldName] = row[fieldName]
|
|
|
|
}
|
|
|
|
thisRow._id = row._id
|
|
|
|
thisRow.tableId = table._id
|
|
|
|
thisRow._rev = "rev"
|
|
|
|
finalRows[thisRow._id] = thisRow
|
|
|
|
// do this at end once its been added to the final rows
|
2021-06-25 19:34:21 +02:00
|
|
|
finalRows = updateRelationshipColumns(
|
|
|
|
finalRows,
|
|
|
|
row,
|
|
|
|
relationships,
|
|
|
|
allTables
|
|
|
|
)
|
2021-06-15 15:56:25 +02:00
|
|
|
}
|
2021-06-23 20:05:32 +02:00
|
|
|
return Object.values(finalRows)
|
2021-06-15 15:56:25 +02:00
|
|
|
}
|
|
|
|
|
2021-06-16 19:38:00 +02:00
|
|
|
function buildFilters(id, filters, table) {
|
|
|
|
const primary = table.primary
|
2021-06-18 14:14:45 +02:00
|
|
|
// if passed in array need to copy for shifting etc
|
|
|
|
let idCopy = cloneDeep(id)
|
2021-06-16 19:38:00 +02:00
|
|
|
if (filters) {
|
|
|
|
// need to map over the filters and make sure the _id field isn't present
|
|
|
|
for (let filter of Object.values(filters)) {
|
|
|
|
if (filter._id) {
|
|
|
|
const parts = breakRowIdField(filter._id)
|
|
|
|
for (let field of primary) {
|
|
|
|
filter[field] = parts.shift()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// make sure this field doesn't exist on any filter
|
|
|
|
delete filter._id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// there is no id, just use the user provided filters
|
2021-06-18 14:14:45 +02:00
|
|
|
if (!idCopy || !table) {
|
2021-06-16 19:38:00 +02:00
|
|
|
return filters
|
2021-06-15 14:03:55 +02:00
|
|
|
}
|
|
|
|
// if used as URL parameter it will have been joined
|
2021-06-18 14:14:45 +02:00
|
|
|
if (typeof idCopy === "string") {
|
|
|
|
idCopy = breakRowIdField(idCopy)
|
2021-06-15 14:03:55 +02:00
|
|
|
}
|
|
|
|
const equal = {}
|
|
|
|
for (let field of primary) {
|
|
|
|
// work through the ID and get the parts
|
2021-06-18 14:14:45 +02:00
|
|
|
equal[field] = idCopy.shift()
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
|
|
|
return {
|
2021-06-15 14:03:55 +02:00
|
|
|
equal,
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-28 18:42:39 +02:00
|
|
|
function buildRelationships(table, allTables) {
|
2021-06-23 20:05:32 +02:00
|
|
|
const relationships = []
|
|
|
|
for (let [fieldName, field] of Object.entries(table.schema)) {
|
|
|
|
if (field.type !== FieldTypes.LINK) {
|
|
|
|
continue
|
|
|
|
}
|
2021-06-28 18:42:39 +02:00
|
|
|
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
|
|
|
const linkTable = allTables.find(table => table._id === field.tableId)
|
|
|
|
// no table to link to, this is not a valid relationships
|
|
|
|
if (!linkTable) {
|
2021-06-23 20:05:32 +02:00
|
|
|
continue
|
|
|
|
}
|
2021-06-28 18:42:39 +02:00
|
|
|
const definition = {
|
|
|
|
from: fieldName || table.primary,
|
|
|
|
to: field.fieldName || linkTable.primary,
|
|
|
|
tableName: linkTableName,
|
|
|
|
through: undefined,
|
|
|
|
}
|
|
|
|
if (field.through) {
|
|
|
|
const { tableName: throughTableName } = breakExternalTableId(field.through)
|
|
|
|
definition.through = throughTableName
|
|
|
|
}
|
|
|
|
relationships.push(definition)
|
2021-06-23 20:05:32 +02:00
|
|
|
}
|
|
|
|
return relationships
|
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
async function handleRequest(
|
|
|
|
appId,
|
|
|
|
operation,
|
|
|
|
tableId,
|
2021-06-15 14:03:55 +02:00
|
|
|
{ id, row, filters, sort, paginate } = {}
|
2021-06-14 20:07:13 +02:00
|
|
|
) {
|
2021-06-16 17:27:33 +02:00
|
|
|
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
2021-06-23 20:05:32 +02:00
|
|
|
const tables = await getAllExternalTables(appId, datasourceId)
|
|
|
|
const table = tables[tableName]
|
2021-06-15 14:03:55 +02:00
|
|
|
if (!table) {
|
|
|
|
throw `Unable to process query, table "${tableName}" not defined.`
|
|
|
|
}
|
2021-06-15 15:56:25 +02:00
|
|
|
// clean up row on ingress using schema
|
2021-06-16 19:38:00 +02:00
|
|
|
filters = buildFilters(id, filters, table)
|
2021-06-28 18:42:39 +02:00
|
|
|
const relationships = buildRelationships(table, tables)
|
2021-06-28 19:19:34 +02:00
|
|
|
const processed = inputProcessing(row, table)
|
|
|
|
row = processed.row
|
2021-06-16 18:50:22 +02:00
|
|
|
if (
|
|
|
|
operation === DataSourceOperation.DELETE &&
|
2021-06-17 15:42:30 +02:00
|
|
|
(filters == null || Object.keys(filters).length === 0)
|
2021-06-16 18:50:22 +02:00
|
|
|
) {
|
2021-06-16 19:39:15 +02:00
|
|
|
throw "Deletion must be filtered"
|
2021-06-15 15:56:25 +02:00
|
|
|
}
|
2021-06-14 20:05:39 +02:00
|
|
|
let json = {
|
|
|
|
endpoint: {
|
|
|
|
datasourceId,
|
|
|
|
entityId: tableName,
|
|
|
|
operation,
|
|
|
|
},
|
2021-06-15 14:50:41 +02:00
|
|
|
resource: {
|
|
|
|
// not specifying any fields means "*"
|
|
|
|
fields: [],
|
|
|
|
},
|
2021-06-16 19:38:00 +02:00
|
|
|
filters,
|
2021-06-14 20:05:39 +02:00
|
|
|
sort,
|
|
|
|
paginate,
|
2021-06-23 20:05:32 +02:00
|
|
|
relationships,
|
2021-06-14 20:05:39 +02:00
|
|
|
body: row,
|
2021-06-18 14:14:45 +02:00
|
|
|
// pass an id filter into extra, purely for mysql/returning
|
|
|
|
extra: {
|
2021-06-18 14:24:29 +02:00
|
|
|
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
|
2021-06-18 14:33:44 +02:00
|
|
|
},
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
2021-06-15 15:56:25 +02:00
|
|
|
// can't really use response right now
|
|
|
|
const response = await makeExternalQuery(appId, json)
|
2021-06-28 19:19:34 +02:00
|
|
|
// handle many to many relationships now if we know the ID (could be auto increment)
|
|
|
|
await insertManyRelationships(appId, json, processed.manyRelationships)
|
2021-06-15 15:56:25 +02:00
|
|
|
// we searched for rows in someway
|
|
|
|
if (operation === DataSourceOperation.READ && Array.isArray(response)) {
|
2021-06-23 20:05:32 +02:00
|
|
|
return outputProcessing(response, table, relationships, tables)
|
2021-06-17 15:42:30 +02:00
|
|
|
} else {
|
2021-06-23 20:05:32 +02:00
|
|
|
row = outputProcessing(response, table, relationships, tables)[0]
|
2021-06-17 15:42:30 +02:00
|
|
|
return { row, table }
|
2021-06-15 15:56:25 +02:00
|
|
|
}
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
2021-06-11 19:56:30 +02:00
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.patch = async ctx => {
|
2021-06-14 20:05:39 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const inputs = ctx.request.body
|
|
|
|
const tableId = ctx.params.tableId
|
2021-06-16 19:38:00 +02:00
|
|
|
const id = breakRowIdField(inputs._id)
|
2021-06-14 20:05:39 +02:00
|
|
|
// don't save the ID to db
|
|
|
|
delete inputs._id
|
2021-06-15 14:50:41 +02:00
|
|
|
return handleRequest(appId, DataSourceOperation.UPDATE, tableId, {
|
2021-06-14 20:07:13 +02:00
|
|
|
id,
|
|
|
|
row: inputs,
|
|
|
|
})
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.save = async ctx => {
|
2021-06-14 20:05:39 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const inputs = ctx.request.body
|
|
|
|
const tableId = ctx.params.tableId
|
2021-06-15 14:50:41 +02:00
|
|
|
return handleRequest(appId, DataSourceOperation.CREATE, tableId, {
|
2021-06-14 20:07:13 +02:00
|
|
|
row: inputs,
|
|
|
|
})
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.fetchView = async ctx => {
|
2021-06-15 14:03:55 +02:00
|
|
|
// there are no views in external data sources, shouldn't ever be called
|
2021-06-15 14:20:25 +02:00
|
|
|
// for now just fetch
|
|
|
|
ctx.params.tableId = ctx.params.viewName.split("all_")[1]
|
|
|
|
return exports.fetch(ctx)
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-15 14:03:55 +02:00
|
|
|
exports.fetch = async ctx => {
|
|
|
|
const appId = ctx.appId
|
|
|
|
const tableId = ctx.params.tableId
|
2021-06-15 14:50:41 +02:00
|
|
|
return handleRequest(appId, DataSourceOperation.READ, tableId)
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.find = async ctx => {
|
2021-06-15 14:03:55 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const id = ctx.params.rowId
|
|
|
|
const tableId = ctx.params.tableId
|
2021-06-15 14:50:41 +02:00
|
|
|
return handleRequest(appId, DataSourceOperation.READ, tableId, {
|
2021-06-15 14:03:55 +02:00
|
|
|
id,
|
|
|
|
})
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.destroy = async ctx => {
|
2021-06-14 20:05:39 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const tableId = ctx.params.tableId
|
2021-06-17 15:42:30 +02:00
|
|
|
const id = ctx.request.body._id
|
2021-06-18 14:14:45 +02:00
|
|
|
const { row } = await handleRequest(
|
|
|
|
appId,
|
|
|
|
DataSourceOperation.DELETE,
|
|
|
|
tableId,
|
|
|
|
{
|
|
|
|
id,
|
|
|
|
}
|
|
|
|
)
|
2021-06-17 15:42:30 +02:00
|
|
|
return { response: { ok: true }, row }
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.bulkDestroy = async ctx => {
|
2021-06-15 14:03:55 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const { rows } = ctx.request.body
|
|
|
|
const tableId = ctx.params.tableId
|
|
|
|
let promises = []
|
|
|
|
for (let row of rows) {
|
2021-06-15 14:47:08 +02:00
|
|
|
promises.push(
|
|
|
|
handleRequest(appId, DataSourceOperation.DELETE, tableId, {
|
2021-06-16 19:38:00 +02:00
|
|
|
id: breakRowIdField(row._id),
|
2021-06-15 14:47:08 +02:00
|
|
|
})
|
|
|
|
)
|
2021-06-15 14:03:55 +02:00
|
|
|
}
|
2021-06-18 14:14:45 +02:00
|
|
|
const responses = await Promise.all(promises)
|
|
|
|
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.search = async ctx => {
|
2021-06-14 20:05:39 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const tableId = ctx.params.tableId
|
|
|
|
const { paginate, query, ...params } = ctx.request.body
|
2021-06-17 16:56:41 +02:00
|
|
|
let { bookmark, limit } = params
|
|
|
|
if (!bookmark && paginate) {
|
|
|
|
bookmark = 1
|
|
|
|
}
|
2021-06-14 20:05:39 +02:00
|
|
|
let paginateObj = {}
|
2021-06-17 16:56:41 +02:00
|
|
|
|
2021-06-14 20:05:39 +02:00
|
|
|
if (paginate) {
|
|
|
|
paginateObj = {
|
2021-06-17 16:56:41 +02:00
|
|
|
// add one so we can track if there is another page
|
|
|
|
limit: limit,
|
|
|
|
page: bookmark,
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
2021-06-17 16:56:41 +02:00
|
|
|
} else if (params && limit) {
|
2021-06-16 19:38:00 +02:00
|
|
|
paginateObj = {
|
2021-06-17 16:56:41 +02:00
|
|
|
limit: limit,
|
2021-06-16 19:38:00 +02:00
|
|
|
}
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
|
|
|
let sort
|
|
|
|
if (params.sort) {
|
2021-06-14 20:07:13 +02:00
|
|
|
const direction =
|
|
|
|
params.sortOrder === "descending"
|
|
|
|
? SortDirection.DESCENDING
|
|
|
|
: SortDirection.ASCENDING
|
2021-06-14 20:05:39 +02:00
|
|
|
sort = {
|
2021-06-14 20:07:13 +02:00
|
|
|
[params.sort]: direction,
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
|
|
|
}
|
2021-06-16 17:27:33 +02:00
|
|
|
const rows = await handleRequest(appId, DataSourceOperation.READ, tableId, {
|
2021-06-14 20:07:13 +02:00
|
|
|
filters: query,
|
|
|
|
sort,
|
|
|
|
paginate: paginateObj,
|
|
|
|
})
|
2021-06-17 16:56:41 +02:00
|
|
|
let hasNextPage = false
|
|
|
|
if (paginate && rows.length === limit) {
|
2021-06-18 14:14:45 +02:00
|
|
|
const nextRows = await handleRequest(
|
|
|
|
appId,
|
|
|
|
DataSourceOperation.READ,
|
|
|
|
tableId,
|
|
|
|
{
|
|
|
|
filters: query,
|
|
|
|
sort,
|
|
|
|
paginate: {
|
|
|
|
limit: 1,
|
|
|
|
page: bookmark * limit + 1,
|
|
|
|
},
|
2021-06-17 16:56:41 +02:00
|
|
|
}
|
2021-06-18 14:14:45 +02:00
|
|
|
)
|
2021-06-17 16:56:41 +02:00
|
|
|
hasNextPage = nextRows.length > 0
|
|
|
|
}
|
2021-06-16 17:27:33 +02:00
|
|
|
// need wrapper object for bookmarks etc when paginating
|
2021-06-17 16:56:41 +02:00
|
|
|
return { rows, hasNextPage, bookmark: bookmark + 1 }
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-16 17:27:33 +02:00
|
|
|
exports.validate = async () => {
|
2021-06-14 20:05:39 +02:00
|
|
|
// can't validate external right now - maybe in future
|
2021-06-15 14:50:41 +02:00
|
|
|
return { valid: true }
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-23 20:05:32 +02:00
|
|
|
exports.fetchEnrichedRow = async () => {}
|