merge
This commit is contained in:
commit
3eaaff9901
|
@ -34,8 +34,8 @@ exports.save = async function (ctx) {
|
||||||
if (ctx.query.refresh) {
|
if (ctx.query.refresh) {
|
||||||
const PlusConnector = plusIntegrations[datasource.source].integration
|
const PlusConnector = plusIntegrations[datasource.source].integration
|
||||||
|
|
||||||
const connector = new PlusConnector(ctx.request.body.config, datasource)
|
const connector = new PlusConnector(ctx.request.body.config)
|
||||||
await connector.init()
|
await connector.init(datasource._id)
|
||||||
|
|
||||||
datasource.entities = connector.tables
|
datasource.entities = connector.tables
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,15 +2,33 @@ const CouchDB = require("../../../db")
|
||||||
const { makeExternalQuery } = require("./utils")
|
const { makeExternalQuery } = require("./utils")
|
||||||
const { DataSourceOperation, SortDirection } = require("../../../constants")
|
const { DataSourceOperation, SortDirection } = require("../../../constants")
|
||||||
|
|
||||||
async function buildIDFilter(id) {
|
async function getTable(appId, datasourceId, tableName) {
|
||||||
if (!id) {
|
const db = new CouchDB(appId)
|
||||||
return {}
|
const datasource = await db.get(datasourceId)
|
||||||
|
if (!datasource || !datasource.entities) {
|
||||||
|
throw "Datasource is not configured fully."
|
||||||
|
}
|
||||||
|
return Object.values(datasource.entities).find(
|
||||||
|
entity => entity.name === tableName
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildIDFilter(id, table) {
|
||||||
|
if (!id || !table) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
// if used as URL parameter it will have been joined
|
||||||
|
if (typeof id === "string") {
|
||||||
|
id = id.split(",")
|
||||||
|
}
|
||||||
|
const primary = table.primary
|
||||||
|
const equal = {}
|
||||||
|
for (let field of primary) {
|
||||||
|
// work through the ID and get the parts
|
||||||
|
equal[field] = id.shift()
|
||||||
}
|
}
|
||||||
// TODO: work out how to use the schema to get filter
|
|
||||||
return {
|
return {
|
||||||
equal: {
|
equal,
|
||||||
id: id,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,20 +36,24 @@ async function handleRequest(
|
||||||
appId,
|
appId,
|
||||||
operation,
|
operation,
|
||||||
tableId,
|
tableId,
|
||||||
{ id, row, filters, sort, paginate }
|
{ id, row, filters, sort, paginate } = {}
|
||||||
) {
|
) {
|
||||||
let [datasourceId, tableName] = tableId.split("/")
|
const parts = tableId.split("_")
|
||||||
let idFilter = buildIDFilter(id)
|
let tableName = parts.pop()
|
||||||
|
let datasourceId = parts.join("_")
|
||||||
|
const table = await getTable(appId, datasourceId, tableName)
|
||||||
|
if (!table) {
|
||||||
|
throw `Unable to process query, table "${tableName}" not defined.`
|
||||||
|
}
|
||||||
|
// try and build an id filter if required
|
||||||
|
let idFilters = buildIDFilter(id)
|
||||||
let json = {
|
let json = {
|
||||||
endpoint: {
|
endpoint: {
|
||||||
datasourceId,
|
datasourceId,
|
||||||
entityId: tableName,
|
entityId: tableName,
|
||||||
operation,
|
operation,
|
||||||
},
|
},
|
||||||
filters: {
|
filters: idFilters != null ? idFilters : filters,
|
||||||
...filters,
|
|
||||||
...idFilter,
|
|
||||||
},
|
|
||||||
sort,
|
sort,
|
||||||
paginate,
|
paginate,
|
||||||
body: row,
|
body: row,
|
||||||
|
@ -65,15 +87,25 @@ exports.save = async ctx => {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchView = async ctx => {
|
exports.fetchView = async ctx => {
|
||||||
// TODO: don't know what this does for external
|
// there are no views in external data sources, shouldn't ever be called
|
||||||
|
// for now just fetch
|
||||||
|
ctx.params.tableId = ctx.params.viewName.split("all_")[1]
|
||||||
|
return exports.fetch(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchTableRows = async ctx => {
|
exports.fetch = async ctx => {
|
||||||
// TODO: this is a basic read?
|
const appId = ctx.appId
|
||||||
|
const tableId = ctx.params.tableId
|
||||||
|
ctx.body = await handleRequest(appId, DataSourceOperation.READ, tableId)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.find = async ctx => {
|
exports.find = async ctx => {
|
||||||
// TODO: single find
|
const appId = ctx.appId
|
||||||
|
const id = ctx.params.rowId
|
||||||
|
const tableId = ctx.params.tableId
|
||||||
|
ctx.body = await handleRequest(appId, DataSourceOperation.READ, tableId, {
|
||||||
|
id,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.destroy = async ctx => {
|
exports.destroy = async ctx => {
|
||||||
|
@ -85,7 +117,18 @@ exports.destroy = async ctx => {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.bulkDestroy = async ctx => {
|
exports.bulkDestroy = async ctx => {
|
||||||
// TODO: iterate through rows, build a large OR filter?
|
const appId = ctx.appId
|
||||||
|
const { rows } = ctx.request.body
|
||||||
|
const tableId = ctx.params.tableId
|
||||||
|
// TODO: this can probably be optimised to a single SQL statement in the future
|
||||||
|
let promises = []
|
||||||
|
for (let row of rows) {
|
||||||
|
promises.push(handleRequest(appId, DataSourceOperation.DELETE, tableId, {
|
||||||
|
id: row._id,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
await Promise.all(promises)
|
||||||
|
ctx.body = { response: { ok: true }, rows }
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.search = async ctx => {
|
exports.search = async ctx => {
|
||||||
|
@ -123,7 +166,6 @@ exports.validate = async ctx => {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchEnrichedRow = async ctx => {
|
exports.fetchEnrichedRow = async ctx => {
|
||||||
// TODO: should this join?
|
// TODO: How does this work
|
||||||
const appId = ctx.appId
|
ctx.throw(501, "Not implemented")
|
||||||
ctx.body = {}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
const internal = require("./internal")
|
const internal = require("./internal")
|
||||||
const external = require("./external")
|
const external = require("./external")
|
||||||
|
const { DocumentTypes } = require("../../../db/utils")
|
||||||
|
|
||||||
function pickApi(tableId) {
|
function pickApi(tableId) {
|
||||||
// TODO: go to external
|
if (tableId.includes(DocumentTypes.DATASOURCE)) {
|
||||||
|
return external
|
||||||
|
}
|
||||||
return internal
|
return internal
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,7 +36,6 @@ exports.patch = async ctx => {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.save = async function (ctx) {
|
exports.save = async function (ctx) {
|
||||||
// TODO: this used to handle bulk delete, need to update builder/client
|
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
try {
|
try {
|
||||||
|
@ -55,10 +57,10 @@ exports.fetchView = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchTableRows = async function (ctx) {
|
exports.fetch = async function (ctx) {
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
try {
|
try {
|
||||||
ctx.body = await pickApi(tableId).fetchTableRows(ctx)
|
ctx.body = await pickApi(tableId).fetch(ctx)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
ctx.throw(400, err)
|
ctx.throw(400, err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -142,7 +142,7 @@ exports.fetchView = async ctx => {
|
||||||
// if this is a table view being looked for just transfer to that
|
// if this is a table view being looked for just transfer to that
|
||||||
if (viewName.startsWith(TABLE_VIEW_BEGINS_WITH)) {
|
if (viewName.startsWith(TABLE_VIEW_BEGINS_WITH)) {
|
||||||
ctx.params.tableId = viewName.substring(4)
|
ctx.params.tableId = viewName.substring(4)
|
||||||
return exports.fetchTableRows(ctx)
|
return exports.fetch(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
const db = new CouchDB(appId)
|
const db = new CouchDB(appId)
|
||||||
|
@ -195,7 +195,7 @@ exports.fetchView = async ctx => {
|
||||||
return rows
|
return rows
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchTableRows = async ctx => {
|
exports.fetch = async ctx => {
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
const db = new CouchDB(appId)
|
const db = new CouchDB(appId)
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ router
|
||||||
"/api/:tableId/rows",
|
"/api/:tableId/rows",
|
||||||
paramResource("tableId"),
|
paramResource("tableId"),
|
||||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||||
rowController.fetchTableRows
|
rowController.fetch
|
||||||
)
|
)
|
||||||
.get(
|
.get(
|
||||||
"/api/:tableId/rows/:rowId",
|
"/api/:tableId/rows/:rowId",
|
||||||
|
@ -38,7 +38,6 @@ router
|
||||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||||
rowController.search
|
rowController.search
|
||||||
)
|
)
|
||||||
|
|
||||||
.post(
|
.post(
|
||||||
"/api/:tableId/rows",
|
"/api/:tableId/rows",
|
||||||
paramResource("tableId"),
|
paramResource("tableId"),
|
||||||
|
|
|
@ -3,6 +3,8 @@ const { DataSourceOperation, SortDirection } = require("../../constants")
|
||||||
const BASE_LIMIT = 5000
|
const BASE_LIMIT = 5000
|
||||||
|
|
||||||
function addFilters(query, filters) {
|
function addFilters(query, filters) {
|
||||||
|
// if all or specified in filters, then everything is an or
|
||||||
|
const allOr = !!filters.allOr
|
||||||
function iterate(structure, fn) {
|
function iterate(structure, fn) {
|
||||||
for (let [key, value] of Object.entries(structure)) {
|
for (let [key, value] of Object.entries(structure)) {
|
||||||
fn(key, value)
|
fn(key, value)
|
||||||
|
@ -13,7 +15,8 @@ function addFilters(query, filters) {
|
||||||
}
|
}
|
||||||
if (filters.string) {
|
if (filters.string) {
|
||||||
iterate(filters.string, (key, value) => {
|
iterate(filters.string, (key, value) => {
|
||||||
query = query.where(key, "like", `${value}%`)
|
const fnc = allOr ? "orWhere" : "where"
|
||||||
|
query = query[fnc](key, "like", `${value}%`)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (filters.range) {
|
if (filters.range) {
|
||||||
|
@ -21,27 +24,32 @@ function addFilters(query, filters) {
|
||||||
if (!value.high || !value.low) {
|
if (!value.high || !value.low) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
query = query.whereBetween(key, [value.low, value.high])
|
const fnc = allOr ? "orWhereBetween" : "whereBetween"
|
||||||
|
query = query[fnc](key, [value.low, value.high])
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (filters.equal) {
|
if (filters.equal) {
|
||||||
iterate(filters.equal, (key, value) => {
|
iterate(filters.equal, (key, value) => {
|
||||||
query = query.where({ [key]: value })
|
const fnc = allOr ? "orWhere" : "where"
|
||||||
|
query = query[fnc]({ [key]: value })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (filters.notEqual) {
|
if (filters.notEqual) {
|
||||||
iterate(filters.notEqual, (key, value) => {
|
iterate(filters.notEqual, (key, value) => {
|
||||||
query = query.whereNot({ [key]: value })
|
const fnc = allOr ? "orWhereNot" : "whereNot"
|
||||||
|
query = query[fnc]({ [key]: value })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (filters.empty) {
|
if (filters.empty) {
|
||||||
iterate(filters.empty, key => {
|
iterate(filters.empty, key => {
|
||||||
query = query.whereNull(key)
|
const fnc = allOr ? "orWhereNull" : "whereNull"
|
||||||
|
query = query[fnc](key)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (filters.notEmpty) {
|
if (filters.notEmpty) {
|
||||||
iterate(filters.notEmpty, key => {
|
iterate(filters.notEmpty, key => {
|
||||||
query = query.whereNotNull(key)
|
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
|
||||||
|
query = query[fnc](key)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return query
|
return query
|
||||||
|
|
|
@ -82,12 +82,16 @@ class PostgresPlus extends Sql {
|
||||||
this.client = this.pool
|
this.client = this.pool
|
||||||
}
|
}
|
||||||
|
|
||||||
async init() {
|
async init(datasourceId) {
|
||||||
const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
|
let keys = []
|
||||||
const primaryKeys = {}
|
try {
|
||||||
|
const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
|
||||||
for (let table of primaryKeysResponse.rows) {
|
for (let table of primaryKeysResponse.rows) {
|
||||||
primaryKeys[table.column_name] = table.primary_key
|
keys.push(table.column_name)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// TODO: this try catch method isn't right
|
||||||
|
keys = ["id"]
|
||||||
}
|
}
|
||||||
|
|
||||||
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
||||||
|
@ -100,9 +104,9 @@ class PostgresPlus extends Sql {
|
||||||
// table key doesn't exist yet
|
// table key doesn't exist yet
|
||||||
if (!tables[tableName]) {
|
if (!tables[tableName]) {
|
||||||
tables[tableName] = {
|
tables[tableName] = {
|
||||||
_id: `${this.datasource._id}${SEPARATOR}${tableName}`,
|
_id: `${datasourceId}${SEPARATOR}${tableName}`,
|
||||||
// TODO: this needs to accommodate composite keys
|
// TODO: this needs to accommodate composite keys
|
||||||
primary: primaryKeys[tableName],
|
primary: keys,
|
||||||
name: tableName,
|
name: tableName,
|
||||||
schema: {},
|
schema: {},
|
||||||
}
|
}
|
||||||
|
|
|
@ -102,6 +102,22 @@ describe("SQL query builder", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should test for multiple IDs with OR", () => {
|
||||||
|
const query = sql._query(generateReadJson({
|
||||||
|
filters: {
|
||||||
|
equal: {
|
||||||
|
age: 10,
|
||||||
|
name: "John",
|
||||||
|
},
|
||||||
|
allOr: true,
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
expect(query).toEqual({
|
||||||
|
bindings: [10, "John", limit],
|
||||||
|
sql: `select * from "${TABLE_NAME}" where ("age" = $1) or ("name" = $2) limit $3`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
it("should test an create statement", () => {
|
it("should test an create statement", () => {
|
||||||
const query = sql._query(generateCreateJson(TABLE_NAME, {
|
const query = sql._query(generateCreateJson(TABLE_NAME, {
|
||||||
name: "Michael",
|
name: "Michael",
|
||||||
|
|
Loading…
Reference in New Issue