Fixing issue with sql API after refactor.

This commit is contained in:
Michael Drury 2022-02-01 20:59:26 +00:00
parent e1a4136f9c
commit b6e3483481
1 changed files with 23 additions and 52 deletions

View File

@ -31,23 +31,21 @@ async function handleRequest(operation, tableId, opts = {}) {
exports.handleRequest = handleRequest exports.handleRequest = handleRequest
exports.patch = async ctx => { exports.patch = async ctx => {
const appId = ctx.appId
const inputs = ctx.request.body const inputs = ctx.request.body
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const id = breakRowIdField(inputs._id) const id = breakRowIdField(inputs._id)
// don't save the ID to db // don't save the ID to db
delete inputs._id delete inputs._id
return handleRequest(appId, DataSourceOperation.UPDATE, tableId, { return handleRequest(DataSourceOperation.UPDATE, tableId, {
id, id,
row: inputs, row: inputs,
}) })
} }
exports.save = async ctx => { exports.save = async ctx => {
const appId = ctx.appId
const inputs = ctx.request.body const inputs = ctx.request.body
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
return handleRequest(appId, DataSourceOperation.CREATE, tableId, { return handleRequest(DataSourceOperation.CREATE, tableId, {
row: inputs, row: inputs,
}) })
} }
@ -61,49 +59,35 @@ exports.fetchView = async ctx => {
} }
exports.fetch = async ctx => { exports.fetch = async ctx => {
const appId = ctx.appId
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
return handleRequest(appId, DataSourceOperation.READ, tableId) return handleRequest(DataSourceOperation.READ, tableId)
} }
exports.find = async ctx => { exports.find = async ctx => {
const appId = ctx.appId
const id = ctx.params.rowId const id = ctx.params.rowId
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const response = await handleRequest( const response = await handleRequest(DataSourceOperation.READ, tableId, {
appId, id,
DataSourceOperation.READ, })
tableId,
{
id,
}
)
return response ? response[0] : response return response ? response[0] : response
} }
exports.destroy = async ctx => { exports.destroy = async ctx => {
const appId = ctx.appId
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const id = ctx.request.body._id const id = ctx.request.body._id
const { row } = await handleRequest( const { row } = await handleRequest(DataSourceOperation.DELETE, tableId, {
appId, id,
DataSourceOperation.DELETE, })
tableId,
{
id,
}
)
return { response: { ok: true }, row } return { response: { ok: true }, row }
} }
exports.bulkDestroy = async ctx => { exports.bulkDestroy = async ctx => {
const appId = ctx.appId
const { rows } = ctx.request.body const { rows } = ctx.request.body
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
let promises = [] let promises = []
for (let row of rows) { for (let row of rows) {
promises.push( promises.push(
handleRequest(appId, DataSourceOperation.DELETE, tableId, { handleRequest(DataSourceOperation.DELETE, tableId, {
id: breakRowIdField(row._id), id: breakRowIdField(row._id),
}) })
) )
@ -113,7 +97,6 @@ exports.bulkDestroy = async ctx => {
} }
exports.search = async ctx => { exports.search = async ctx => {
const appId = ctx.appId
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const { paginate, query, ...params } = ctx.request.body const { paginate, query, ...params } = ctx.request.body
let { bookmark, limit } = params let { bookmark, limit } = params
@ -143,26 +126,21 @@ exports.search = async ctx => {
[params.sort]: direction, [params.sort]: direction,
} }
} }
const rows = await handleRequest(appId, DataSourceOperation.READ, tableId, { const rows = await handleRequest(DataSourceOperation.READ, tableId, {
filters: query, filters: query,
sort, sort,
paginate: paginateObj, paginate: paginateObj,
}) })
let hasNextPage = false let hasNextPage = false
if (paginate && rows.length === limit) { if (paginate && rows.length === limit) {
const nextRows = await handleRequest( const nextRows = await handleRequest(DataSourceOperation.READ, tableId, {
appId, filters: query,
DataSourceOperation.READ, sort,
tableId, paginate: {
{ limit: 1,
filters: query, page: bookmark * limit + 1,
sort, },
paginate: { })
limit: 1,
page: bookmark * limit + 1,
},
}
)
hasNextPage = nextRows.length > 0 hasNextPage = nextRows.length > 0
} }
// need wrapper object for bookmarks etc when paginating // need wrapper object for bookmarks etc when paginating
@ -175,7 +153,6 @@ exports.validate = async () => {
} }
exports.fetchEnrichedRow = async ctx => { exports.fetchEnrichedRow = async ctx => {
const appId = ctx.appId
const id = ctx.params.rowId const id = ctx.params.rowId
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
@ -185,15 +162,10 @@ exports.fetchEnrichedRow = async ctx => {
ctx.throw(400, "Datasource has not been configured for plus API.") ctx.throw(400, "Datasource has not been configured for plus API.")
} }
const tables = datasource.entities const tables = datasource.entities
const response = await handleRequest( const response = await handleRequest(DataSourceOperation.READ, tableId, {
appId, id,
DataSourceOperation.READ, datasource,
tableId, })
{
id,
datasource,
}
)
const table = tables[tableName] const table = tables[tableName]
const row = response[0] const row = response[0]
// this seems like a lot of work, but basically we need to dig deeper for the enrich // this seems like a lot of work, but basically we need to dig deeper for the enrich
@ -212,7 +184,6 @@ exports.fetchEnrichedRow = async ctx => {
// don't support composite keys right now // don't support composite keys right now
const linkedIds = links.map(link => breakRowIdField(link._id)[0]) const linkedIds = links.map(link => breakRowIdField(link._id)[0])
row[fieldName] = await handleRequest( row[fieldName] = await handleRequest(
appId,
DataSourceOperation.READ, DataSourceOperation.READ,
linkedTableId, linkedTableId,
{ {