2021-06-14 20:05:39 +02:00
|
|
|
const { makeExternalQuery } = require("./utils")
|
2021-06-29 19:38:27 +02:00
|
|
|
const { DataSourceOperation, SortDirection } = require("../../../constants")
|
2021-06-23 20:05:32 +02:00
|
|
|
const { getAllExternalTables } = require("../table/utils")
|
2021-06-16 19:38:00 +02:00
|
|
|
const {
|
|
|
|
breakExternalTableId,
|
2021-06-16 19:39:59 +02:00
|
|
|
breakRowIdField,
|
2021-06-16 19:38:00 +02:00
|
|
|
} = require("../../../integrations/utils")
|
2021-06-29 18:42:46 +02:00
|
|
|
const {
|
|
|
|
buildRelationships,
|
|
|
|
buildFilters,
|
|
|
|
inputProcessing,
|
|
|
|
outputProcessing,
|
|
|
|
generateIdForRow,
|
2021-06-29 19:38:27 +02:00
|
|
|
buildFields,
|
2021-06-29 18:42:46 +02:00
|
|
|
} = require("./externalUtils")
|
|
|
|
const { processObjectSync } = require("@budibase/string-templates")
|
2021-06-23 20:05:32 +02:00
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
async function handleRequest(
|
|
|
|
appId,
|
|
|
|
operation,
|
|
|
|
tableId,
|
2021-06-30 19:31:16 +02:00
|
|
|
{ id, row, filters, sort, paginate, fullDocs } = {}
|
2021-06-14 20:07:13 +02:00
|
|
|
) {
|
2021-06-29 19:38:27 +02:00
|
|
|
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
2021-06-23 20:05:32 +02:00
|
|
|
const tables = await getAllExternalTables(appId, datasourceId)
|
|
|
|
const table = tables[tableName]
|
2021-06-15 14:03:55 +02:00
|
|
|
if (!table) {
|
|
|
|
throw `Unable to process query, table "${tableName}" not defined.`
|
|
|
|
}
|
2021-06-15 15:56:25 +02:00
|
|
|
// clean up row on ingress using schema
|
2021-06-16 19:38:00 +02:00
|
|
|
filters = buildFilters(id, filters, table)
|
2021-06-28 18:42:39 +02:00
|
|
|
const relationships = buildRelationships(table, tables)
|
2021-06-29 19:38:27 +02:00
|
|
|
const processed = inputProcessing(row, table, tables)
|
2021-06-28 19:19:34 +02:00
|
|
|
row = processed.row
|
2021-06-16 18:50:22 +02:00
|
|
|
if (
|
|
|
|
operation === DataSourceOperation.DELETE &&
|
2021-06-17 15:42:30 +02:00
|
|
|
(filters == null || Object.keys(filters).length === 0)
|
2021-06-16 18:50:22 +02:00
|
|
|
) {
|
2021-06-16 19:39:15 +02:00
|
|
|
throw "Deletion must be filtered"
|
2021-06-15 15:56:25 +02:00
|
|
|
}
|
2021-06-14 20:05:39 +02:00
|
|
|
let json = {
|
|
|
|
endpoint: {
|
|
|
|
datasourceId,
|
|
|
|
entityId: tableName,
|
|
|
|
operation,
|
|
|
|
},
|
2021-06-15 14:50:41 +02:00
|
|
|
resource: {
|
2021-06-29 19:38:27 +02:00
|
|
|
// have to specify the fields to avoid column overlap
|
|
|
|
fields: buildFields(table, tables),
|
2021-06-15 14:50:41 +02:00
|
|
|
},
|
2021-06-16 19:38:00 +02:00
|
|
|
filters,
|
2021-06-14 20:05:39 +02:00
|
|
|
sort,
|
|
|
|
paginate,
|
2021-06-23 20:05:32 +02:00
|
|
|
relationships,
|
2021-06-14 20:05:39 +02:00
|
|
|
body: row,
|
2021-06-18 14:14:45 +02:00
|
|
|
// pass an id filter into extra, purely for mysql/returning
|
|
|
|
extra: {
|
2021-06-18 14:24:29 +02:00
|
|
|
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
|
2021-06-18 14:33:44 +02:00
|
|
|
},
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
2021-06-15 15:56:25 +02:00
|
|
|
// can't really use response right now
|
|
|
|
const response = await makeExternalQuery(appId, json)
|
2021-06-28 19:19:34 +02:00
|
|
|
// handle many to many relationships now if we know the ID (could be auto increment)
|
2021-06-29 18:42:46 +02:00
|
|
|
if (processed.manyRelationships) {
|
|
|
|
const promises = []
|
|
|
|
for (let toInsert of processed.manyRelationships) {
|
2021-06-29 19:38:27 +02:00
|
|
|
const { tableName } = breakExternalTableId(toInsert.tableId)
|
2021-06-29 18:42:46 +02:00
|
|
|
delete toInsert.tableId
|
2021-06-29 19:38:27 +02:00
|
|
|
promises.push(
|
|
|
|
makeExternalQuery(appId, {
|
|
|
|
endpoint: {
|
|
|
|
...json.endpoint,
|
2021-06-30 15:46:44 +02:00
|
|
|
entityId: tableName,
|
2021-06-29 19:38:27 +02:00
|
|
|
},
|
2021-06-30 15:46:44 +02:00
|
|
|
// if we're doing many relationships then we're writing, only one response
|
|
|
|
body: processObjectSync(toInsert, response[0]),
|
2021-06-29 19:38:27 +02:00
|
|
|
})
|
|
|
|
)
|
2021-06-29 18:42:46 +02:00
|
|
|
}
|
|
|
|
await Promise.all(promises)
|
|
|
|
}
|
2021-06-30 19:33:55 +02:00
|
|
|
const output = outputProcessing(
|
|
|
|
response,
|
|
|
|
table,
|
|
|
|
relationships,
|
|
|
|
tables,
|
|
|
|
fullDocs
|
|
|
|
)
|
2021-06-30 19:31:16 +02:00
|
|
|
// if reading it'll just be an array of rows, return whole thing
|
2021-06-30 19:33:55 +02:00
|
|
|
return operation === DataSourceOperation.READ && Array.isArray(response)
|
|
|
|
? output
|
|
|
|
: { row: output[0], table }
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
2021-06-11 19:56:30 +02:00
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.patch = async ctx => {
|
2021-06-14 20:05:39 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const inputs = ctx.request.body
|
|
|
|
const tableId = ctx.params.tableId
|
2021-06-16 19:38:00 +02:00
|
|
|
const id = breakRowIdField(inputs._id)
|
2021-06-14 20:05:39 +02:00
|
|
|
// don't save the ID to db
|
|
|
|
delete inputs._id
|
2021-06-15 14:50:41 +02:00
|
|
|
return handleRequest(appId, DataSourceOperation.UPDATE, tableId, {
|
2021-06-14 20:07:13 +02:00
|
|
|
id,
|
|
|
|
row: inputs,
|
|
|
|
})
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.save = async ctx => {
|
2021-06-14 20:05:39 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const inputs = ctx.request.body
|
|
|
|
const tableId = ctx.params.tableId
|
2021-06-15 14:50:41 +02:00
|
|
|
return handleRequest(appId, DataSourceOperation.CREATE, tableId, {
|
2021-06-14 20:07:13 +02:00
|
|
|
row: inputs,
|
|
|
|
})
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.fetchView = async ctx => {
|
2021-06-15 14:03:55 +02:00
|
|
|
// there are no views in external data sources, shouldn't ever be called
|
2021-06-15 14:20:25 +02:00
|
|
|
// for now just fetch
|
|
|
|
ctx.params.tableId = ctx.params.viewName.split("all_")[1]
|
|
|
|
return exports.fetch(ctx)
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-15 14:03:55 +02:00
|
|
|
exports.fetch = async ctx => {
|
|
|
|
const appId = ctx.appId
|
|
|
|
const tableId = ctx.params.tableId
|
2021-06-15 14:50:41 +02:00
|
|
|
return handleRequest(appId, DataSourceOperation.READ, tableId)
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.find = async ctx => {
|
2021-06-15 14:03:55 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const id = ctx.params.rowId
|
|
|
|
const tableId = ctx.params.tableId
|
2021-06-30 19:33:55 +02:00
|
|
|
const response = await handleRequest(
|
|
|
|
appId,
|
|
|
|
DataSourceOperation.READ,
|
|
|
|
tableId,
|
|
|
|
{
|
|
|
|
id,
|
|
|
|
}
|
|
|
|
)
|
2021-06-30 19:31:16 +02:00
|
|
|
return response ? response[0] : response
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.destroy = async ctx => {
|
2021-06-14 20:05:39 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const tableId = ctx.params.tableId
|
2021-06-17 15:42:30 +02:00
|
|
|
const id = ctx.request.body._id
|
2021-06-18 14:14:45 +02:00
|
|
|
const { row } = await handleRequest(
|
|
|
|
appId,
|
|
|
|
DataSourceOperation.DELETE,
|
|
|
|
tableId,
|
|
|
|
{
|
|
|
|
id,
|
|
|
|
}
|
|
|
|
)
|
2021-06-17 15:42:30 +02:00
|
|
|
return { response: { ok: true }, row }
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.bulkDestroy = async ctx => {
|
2021-06-15 14:03:55 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const { rows } = ctx.request.body
|
|
|
|
const tableId = ctx.params.tableId
|
|
|
|
let promises = []
|
|
|
|
for (let row of rows) {
|
2021-06-15 14:47:08 +02:00
|
|
|
promises.push(
|
|
|
|
handleRequest(appId, DataSourceOperation.DELETE, tableId, {
|
2021-06-16 19:38:00 +02:00
|
|
|
id: breakRowIdField(row._id),
|
2021-06-15 14:47:08 +02:00
|
|
|
})
|
|
|
|
)
|
2021-06-15 14:03:55 +02:00
|
|
|
}
|
2021-06-18 14:14:45 +02:00
|
|
|
const responses = await Promise.all(promises)
|
|
|
|
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
|
|
|
|
2021-06-14 20:07:13 +02:00
|
|
|
exports.search = async ctx => {
|
2021-06-14 20:05:39 +02:00
|
|
|
const appId = ctx.appId
|
|
|
|
const tableId = ctx.params.tableId
|
|
|
|
const { paginate, query, ...params } = ctx.request.body
|
2021-06-17 16:56:41 +02:00
|
|
|
let { bookmark, limit } = params
|
|
|
|
if (!bookmark && paginate) {
|
|
|
|
bookmark = 1
|
|
|
|
}
|
2021-06-14 20:05:39 +02:00
|
|
|
let paginateObj = {}
|
2021-06-17 16:56:41 +02:00
|
|
|
|
2021-06-14 20:05:39 +02:00
|
|
|
if (paginate) {
|
|
|
|
paginateObj = {
|
2021-06-17 16:56:41 +02:00
|
|
|
// add one so we can track if there is another page
|
|
|
|
limit: limit,
|
|
|
|
page: bookmark,
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
2021-06-17 16:56:41 +02:00
|
|
|
} else if (params && limit) {
|
2021-06-16 19:38:00 +02:00
|
|
|
paginateObj = {
|
2021-06-17 16:56:41 +02:00
|
|
|
limit: limit,
|
2021-06-16 19:38:00 +02:00
|
|
|
}
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
|
|
|
let sort
|
|
|
|
if (params.sort) {
|
2021-06-14 20:07:13 +02:00
|
|
|
const direction =
|
|
|
|
params.sortOrder === "descending"
|
|
|
|
? SortDirection.DESCENDING
|
|
|
|
: SortDirection.ASCENDING
|
2021-06-14 20:05:39 +02:00
|
|
|
sort = {
|
2021-06-14 20:07:13 +02:00
|
|
|
[params.sort]: direction,
|
2021-06-14 20:05:39 +02:00
|
|
|
}
|
|
|
|
}
|
2021-06-16 17:27:33 +02:00
|
|
|
const rows = await handleRequest(appId, DataSourceOperation.READ, tableId, {
|
2021-06-14 20:07:13 +02:00
|
|
|
filters: query,
|
|
|
|
sort,
|
|
|
|
paginate: paginateObj,
|
|
|
|
})
|
2021-06-17 16:56:41 +02:00
|
|
|
let hasNextPage = false
|
|
|
|
if (paginate && rows.length === limit) {
|
2021-06-18 14:14:45 +02:00
|
|
|
const nextRows = await handleRequest(
|
|
|
|
appId,
|
|
|
|
DataSourceOperation.READ,
|
|
|
|
tableId,
|
|
|
|
{
|
|
|
|
filters: query,
|
|
|
|
sort,
|
|
|
|
paginate: {
|
|
|
|
limit: 1,
|
|
|
|
page: bookmark * limit + 1,
|
|
|
|
},
|
2021-06-17 16:56:41 +02:00
|
|
|
}
|
2021-06-18 14:14:45 +02:00
|
|
|
)
|
2021-06-17 16:56:41 +02:00
|
|
|
hasNextPage = nextRows.length > 0
|
|
|
|
}
|
2021-06-16 17:27:33 +02:00
|
|
|
// need wrapper object for bookmarks etc when paginating
|
2021-06-17 16:56:41 +02:00
|
|
|
return { rows, hasNextPage, bookmark: bookmark + 1 }
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-16 17:27:33 +02:00
|
|
|
exports.validate = async () => {
|
2021-06-14 20:05:39 +02:00
|
|
|
// can't validate external right now - maybe in future
|
2021-06-15 14:50:41 +02:00
|
|
|
return { valid: true }
|
2021-06-11 19:56:30 +02:00
|
|
|
}
|
|
|
|
|
2021-06-30 19:31:16 +02:00
|
|
|
exports.fetchEnrichedRow = async ctx => {
|
|
|
|
const appId = ctx.appId
|
|
|
|
const id = ctx.params.rowId
|
|
|
|
const tableId = ctx.params.tableId
|
|
|
|
// TODO: this only enriches the full docs 1 layer deep, need to join those as well
|
2021-06-30 19:33:55 +02:00
|
|
|
const response = await handleRequest(
|
|
|
|
appId,
|
|
|
|
DataSourceOperation.READ,
|
|
|
|
tableId,
|
|
|
|
{
|
|
|
|
id,
|
|
|
|
fullDocs: true,
|
|
|
|
}
|
|
|
|
)
|
2021-06-30 19:31:16 +02:00
|
|
|
return response ? response[0] : response
|
|
|
|
}
|