Fixing issue with sql API after refactor.
This commit is contained in:
parent
e1a4136f9c
commit
b6e3483481
|
@ -31,23 +31,21 @@ async function handleRequest(operation, tableId, opts = {}) {
|
|||
exports.handleRequest = handleRequest
|
||||
|
||||
exports.patch = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const inputs = ctx.request.body
|
||||
const tableId = ctx.params.tableId
|
||||
const id = breakRowIdField(inputs._id)
|
||||
// don't save the ID to db
|
||||
delete inputs._id
|
||||
return handleRequest(appId, DataSourceOperation.UPDATE, tableId, {
|
||||
return handleRequest(DataSourceOperation.UPDATE, tableId, {
|
||||
id,
|
||||
row: inputs,
|
||||
})
|
||||
}
|
||||
|
||||
exports.save = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const inputs = ctx.request.body
|
||||
const tableId = ctx.params.tableId
|
||||
return handleRequest(appId, DataSourceOperation.CREATE, tableId, {
|
||||
return handleRequest(DataSourceOperation.CREATE, tableId, {
|
||||
row: inputs,
|
||||
})
|
||||
}
|
||||
|
@ -61,49 +59,35 @@ exports.fetchView = async ctx => {
|
|||
}
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const tableId = ctx.params.tableId
|
||||
return handleRequest(appId, DataSourceOperation.READ, tableId)
|
||||
return handleRequest(DataSourceOperation.READ, tableId)
|
||||
}
|
||||
|
||||
exports.find = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const id = ctx.params.rowId
|
||||
const tableId = ctx.params.tableId
|
||||
const response = await handleRequest(
|
||||
appId,
|
||||
DataSourceOperation.READ,
|
||||
tableId,
|
||||
{
|
||||
id,
|
||||
}
|
||||
)
|
||||
const response = await handleRequest(DataSourceOperation.READ, tableId, {
|
||||
id,
|
||||
})
|
||||
return response ? response[0] : response
|
||||
}
|
||||
|
||||
exports.destroy = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const tableId = ctx.params.tableId
|
||||
const id = ctx.request.body._id
|
||||
const { row } = await handleRequest(
|
||||
appId,
|
||||
DataSourceOperation.DELETE,
|
||||
tableId,
|
||||
{
|
||||
id,
|
||||
}
|
||||
)
|
||||
const { row } = await handleRequest(DataSourceOperation.DELETE, tableId, {
|
||||
id,
|
||||
})
|
||||
return { response: { ok: true }, row }
|
||||
}
|
||||
|
||||
exports.bulkDestroy = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const { rows } = ctx.request.body
|
||||
const tableId = ctx.params.tableId
|
||||
let promises = []
|
||||
for (let row of rows) {
|
||||
promises.push(
|
||||
handleRequest(appId, DataSourceOperation.DELETE, tableId, {
|
||||
handleRequest(DataSourceOperation.DELETE, tableId, {
|
||||
id: breakRowIdField(row._id),
|
||||
})
|
||||
)
|
||||
|
@ -113,7 +97,6 @@ exports.bulkDestroy = async ctx => {
|
|||
}
|
||||
|
||||
exports.search = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const tableId = ctx.params.tableId
|
||||
const { paginate, query, ...params } = ctx.request.body
|
||||
let { bookmark, limit } = params
|
||||
|
@ -143,26 +126,21 @@ exports.search = async ctx => {
|
|||
[params.sort]: direction,
|
||||
}
|
||||
}
|
||||
const rows = await handleRequest(appId, DataSourceOperation.READ, tableId, {
|
||||
const rows = await handleRequest(DataSourceOperation.READ, tableId, {
|
||||
filters: query,
|
||||
sort,
|
||||
paginate: paginateObj,
|
||||
})
|
||||
let hasNextPage = false
|
||||
if (paginate && rows.length === limit) {
|
||||
const nextRows = await handleRequest(
|
||||
appId,
|
||||
DataSourceOperation.READ,
|
||||
tableId,
|
||||
{
|
||||
filters: query,
|
||||
sort,
|
||||
paginate: {
|
||||
limit: 1,
|
||||
page: bookmark * limit + 1,
|
||||
},
|
||||
}
|
||||
)
|
||||
const nextRows = await handleRequest(DataSourceOperation.READ, tableId, {
|
||||
filters: query,
|
||||
sort,
|
||||
paginate: {
|
||||
limit: 1,
|
||||
page: bookmark * limit + 1,
|
||||
},
|
||||
})
|
||||
hasNextPage = nextRows.length > 0
|
||||
}
|
||||
// need wrapper object for bookmarks etc when paginating
|
||||
|
@ -175,7 +153,6 @@ exports.validate = async () => {
|
|||
}
|
||||
|
||||
exports.fetchEnrichedRow = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const id = ctx.params.rowId
|
||||
const tableId = ctx.params.tableId
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
|
@ -185,15 +162,10 @@ exports.fetchEnrichedRow = async ctx => {
|
|||
ctx.throw(400, "Datasource has not been configured for plus API.")
|
||||
}
|
||||
const tables = datasource.entities
|
||||
const response = await handleRequest(
|
||||
appId,
|
||||
DataSourceOperation.READ,
|
||||
tableId,
|
||||
{
|
||||
id,
|
||||
datasource,
|
||||
}
|
||||
)
|
||||
const response = await handleRequest(DataSourceOperation.READ, tableId, {
|
||||
id,
|
||||
datasource,
|
||||
})
|
||||
const table = tables[tableName]
|
||||
const row = response[0]
|
||||
// this seems like a lot of work, but basically we need to dig deeper for the enrich
|
||||
|
@ -212,7 +184,6 @@ exports.fetchEnrichedRow = async ctx => {
|
|||
// don't support composite keys right now
|
||||
const linkedIds = links.map(link => breakRowIdField(link._id)[0])
|
||||
row[fieldName] = await handleRequest(
|
||||
appId,
|
||||
DataSourceOperation.READ,
|
||||
linkedTableId,
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue