Merge branch 'feature/sql-relationships' of github.com:Budibase/budibase into feature/opinionated-relationships-ui
This commit is contained in:
commit
08393044c9
|
@ -19,7 +19,7 @@ async function handleRequest(
|
|||
appId,
|
||||
operation,
|
||||
tableId,
|
||||
{ id, row, filters, sort, paginate } = {}
|
||||
{ id, row, filters, sort, paginate, fullDocs } = {}
|
||||
) {
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const tables = await getAllExternalTables(appId, datasourceId)
|
||||
|
@ -79,13 +79,17 @@ async function handleRequest(
|
|||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
// we searched for rows in someway
|
||||
if (operation === DataSourceOperation.READ && Array.isArray(response)) {
|
||||
return outputProcessing(response, table, relationships, tables)
|
||||
} else {
|
||||
row = outputProcessing(response, table, relationships, tables)[0]
|
||||
return { row, table }
|
||||
}
|
||||
const output = outputProcessing(
|
||||
response,
|
||||
table,
|
||||
relationships,
|
||||
tables,
|
||||
fullDocs
|
||||
)
|
||||
// if reading it'll just be an array of rows, return whole thing
|
||||
return operation === DataSourceOperation.READ && Array.isArray(response)
|
||||
? output
|
||||
: { row: output[0], table }
|
||||
}
|
||||
|
||||
exports.patch = async ctx => {
|
||||
|
@ -127,9 +131,15 @@ exports.find = async ctx => {
|
|||
const appId = ctx.appId
|
||||
const id = ctx.params.rowId
|
||||
const tableId = ctx.params.tableId
|
||||
return handleRequest(appId, DataSourceOperation.READ, tableId, {
|
||||
id,
|
||||
})
|
||||
const response = await handleRequest(
|
||||
appId,
|
||||
DataSourceOperation.READ,
|
||||
tableId,
|
||||
{
|
||||
id,
|
||||
}
|
||||
)
|
||||
return response ? response[0] : response
|
||||
}
|
||||
|
||||
exports.destroy = async ctx => {
|
||||
|
@ -225,4 +235,19 @@ exports.validate = async () => {
|
|||
return { valid: true }
|
||||
}
|
||||
|
||||
exports.fetchEnrichedRow = async () => {}
|
||||
exports.fetchEnrichedRow = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const id = ctx.params.rowId
|
||||
const tableId = ctx.params.tableId
|
||||
// TODO: this only enriches the full docs 1 layer deep, need to join those as well
|
||||
const response = await handleRequest(
|
||||
appId,
|
||||
DataSourceOperation.READ,
|
||||
tableId,
|
||||
{
|
||||
id,
|
||||
fullDocs: true,
|
||||
}
|
||||
)
|
||||
return response ? response[0] : response
|
||||
}
|
||||
|
|
|
@ -6,6 +6,18 @@ const {
|
|||
const { FieldTypes } = require("../../../constants")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
|
||||
function basicProcessing(row, table) {
|
||||
const thisRow = {}
|
||||
// filter the row down to what is actually the row (not joined)
|
||||
for (let fieldName of Object.keys(table.schema)) {
|
||||
thisRow[fieldName] = row[fieldName]
|
||||
}
|
||||
thisRow._id = exports.generateIdForRow(row, table)
|
||||
thisRow.tableId = table._id
|
||||
thisRow._rev = "rev"
|
||||
return thisRow
|
||||
}
|
||||
|
||||
exports.inputProcessing = (row, table, allTables) => {
|
||||
if (!row) {
|
||||
return { row, manyRelationships: [] }
|
||||
|
@ -64,20 +76,29 @@ exports.generateIdForRow = (row, table) => {
|
|||
return generateRowIdField(idParts)
|
||||
}
|
||||
|
||||
exports.updateRelationshipColumns = (rows, row, relationships, allTables) => {
|
||||
exports.updateRelationshipColumns = (
|
||||
row,
|
||||
rows,
|
||||
relationships,
|
||||
allTables,
|
||||
fullDocs
|
||||
) => {
|
||||
const columns = {}
|
||||
for (let relationship of relationships) {
|
||||
const linkedTable = allTables[relationship.tableName]
|
||||
if (!linkedTable) {
|
||||
continue
|
||||
}
|
||||
const display = linkedTable.primaryDisplay
|
||||
const related = {}
|
||||
if (display && row[display]) {
|
||||
related.primaryDisplay = row[display]
|
||||
let linked = basicProcessing(row, linkedTable)
|
||||
// if not returning full docs then get the minimal links out
|
||||
if (!fullDocs) {
|
||||
const display = linkedTable.primaryDisplay
|
||||
linked = {
|
||||
primaryDisplay: display ? linked[display] : undefined,
|
||||
_id: linked._id,
|
||||
}
|
||||
}
|
||||
related._id = row[relationship.to]
|
||||
columns[relationship.column] = related
|
||||
columns[relationship.column] = linked
|
||||
}
|
||||
for (let [column, related] of Object.entries(columns)) {
|
||||
if (!Array.isArray(rows[row._id][column])) {
|
||||
|
@ -91,7 +112,13 @@ exports.updateRelationshipColumns = (rows, row, relationships, allTables) => {
|
|||
return rows
|
||||
}
|
||||
|
||||
exports.outputProcessing = (rows, table, relationships, allTables) => {
|
||||
exports.outputProcessing = (
|
||||
rows,
|
||||
table,
|
||||
relationships,
|
||||
allTables,
|
||||
fullDocs
|
||||
) => {
|
||||
// if no rows this is what is returned? Might be PG only
|
||||
if (rows[0].read === true) {
|
||||
return []
|
||||
|
@ -102,28 +129,23 @@ exports.outputProcessing = (rows, table, relationships, allTables) => {
|
|||
// this is a relationship of some sort
|
||||
if (finalRows[row._id]) {
|
||||
finalRows = exports.updateRelationshipColumns(
|
||||
finalRows,
|
||||
row,
|
||||
finalRows,
|
||||
relationships,
|
||||
allTables
|
||||
allTables,
|
||||
fullDocs
|
||||
)
|
||||
continue
|
||||
}
|
||||
const thisRow = {}
|
||||
// filter the row down to what is actually the row (not joined)
|
||||
for (let fieldName of Object.keys(table.schema)) {
|
||||
thisRow[fieldName] = row[fieldName]
|
||||
}
|
||||
thisRow._id = row._id
|
||||
thisRow.tableId = table._id
|
||||
thisRow._rev = "rev"
|
||||
const thisRow = basicProcessing(row, table)
|
||||
finalRows[thisRow._id] = thisRow
|
||||
// do this at end once its been added to the final rows
|
||||
finalRows = exports.updateRelationshipColumns(
|
||||
finalRows,
|
||||
row,
|
||||
finalRows,
|
||||
relationships,
|
||||
allTables
|
||||
allTables,
|
||||
fullDocs
|
||||
)
|
||||
}
|
||||
return Object.values(finalRows)
|
||||
|
|
|
@ -11,7 +11,9 @@ import {
|
|||
|
||||
type KnexQuery = Knex.QueryBuilder | Knex
|
||||
|
||||
// right now we only do filters on the specific table being queried
|
||||
function addFilters(
|
||||
tableName: string,
|
||||
query: KnexQuery,
|
||||
filters: SearchFilters | undefined
|
||||
): KnexQuery {
|
||||
|
@ -20,7 +22,7 @@ function addFilters(
|
|||
fn: (key: string, value: any) => void
|
||||
) {
|
||||
for (let [key, value] of Object.entries(structure)) {
|
||||
fn(key, value)
|
||||
fn(`${tableName}.${key}`, value)
|
||||
}
|
||||
}
|
||||
if (!filters) {
|
||||
|
@ -134,7 +136,7 @@ function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
|||
query = query.select("*")
|
||||
}
|
||||
// handle where
|
||||
query = addFilters(query, filters)
|
||||
query = addFilters(tableName, query, filters)
|
||||
// handle join
|
||||
query = addRelationships(query, tableName, relationships)
|
||||
// handle sorting
|
||||
|
@ -165,7 +167,7 @@ function buildUpdate(
|
|||
): KnexQuery {
|
||||
const { endpoint, body, filters } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
query = addFilters(query, filters)
|
||||
query = addFilters(endpoint.entityId, query, filters)
|
||||
// mysql can't use returning
|
||||
if (opts.disableReturning) {
|
||||
return query.update(body)
|
||||
|
@ -181,7 +183,7 @@ function buildDelete(
|
|||
): KnexQuery {
|
||||
const { endpoint, filters } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
query = addFilters(query, filters)
|
||||
query = addFilters(endpoint.entityId, query, filters)
|
||||
// mysql can't use returning
|
||||
if (opts.disableReturning) {
|
||||
return query.delete()
|
||||
|
|
|
@ -8,7 +8,8 @@
|
|||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"esModuleInterop": true,
|
||||
"resolveJsonModule": true
|
||||
"resolveJsonModule": true,
|
||||
"incremental": true
|
||||
},
|
||||
"include": [
|
||||
"./src/**/*"
|
||||
|
|
Loading…
Reference in New Issue