Final typescript conversions for server.
This commit is contained in:
parent
12d6e60525
commit
06d8d19aaa
File diff suppressed because it is too large
Load Diff
|
@ -1,104 +1,117 @@
|
|||
const {
|
||||
DataSourceOperation,
|
||||
import {
|
||||
SortDirection,
|
||||
FieldTypes,
|
||||
NoEmptyFilterStrings,
|
||||
} = require("../../../constants")
|
||||
const {
|
||||
} from "../../../constants"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
breakRowIdField,
|
||||
} = require("../../../integrations/utils")
|
||||
const ExternalRequest = require("./ExternalRequest")
|
||||
const { context } = require("@budibase/backend-core")
|
||||
const exporters = require("../view/exporters")
|
||||
const { apiFileReturn } = require("../../../utilities/fileSystem")
|
||||
} from "../../../integrations/utils"
|
||||
import { ExternalRequest, RunConfig } from "./ExternalRequest"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import * as exporters from "../view/exporters"
|
||||
import { apiFileReturn } from "../../../utilities/fileSystem"
|
||||
import {
|
||||
Operation,
|
||||
BBContext,
|
||||
Row,
|
||||
PaginationJson,
|
||||
Table,
|
||||
Datasource,
|
||||
} from "@budibase/types"
|
||||
|
||||
async function handleRequest(operation, tableId, opts = {}) {
|
||||
export async function handleRequest(
|
||||
operation: Operation,
|
||||
tableId: string,
|
||||
opts?: RunConfig
|
||||
) {
|
||||
// make sure the filters are cleaned up, no empty strings for equals, fuzzy or string
|
||||
if (opts && opts.filters) {
|
||||
for (let filterField of NoEmptyFilterStrings) {
|
||||
if (!opts.filters[filterField]) {
|
||||
continue
|
||||
}
|
||||
// @ts-ignore
|
||||
for (let [key, value] of Object.entries(opts.filters[filterField])) {
|
||||
if (!value || value === "") {
|
||||
// @ts-ignore
|
||||
delete opts.filters[filterField][key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return new ExternalRequest(operation, tableId, opts.datasource).run(opts)
|
||||
return new ExternalRequest(operation, tableId, opts?.datasource).run(
|
||||
opts || {}
|
||||
)
|
||||
}
|
||||
|
||||
exports.handleRequest = handleRequest
|
||||
|
||||
exports.patch = async ctx => {
|
||||
export async function patch(ctx: BBContext) {
|
||||
const inputs = ctx.request.body
|
||||
const tableId = ctx.params.tableId
|
||||
const id = inputs._id
|
||||
// don't save the ID to db
|
||||
delete inputs._id
|
||||
return handleRequest(DataSourceOperation.UPDATE, tableId, {
|
||||
return handleRequest(Operation.UPDATE, tableId, {
|
||||
id: breakRowIdField(id),
|
||||
row: inputs,
|
||||
})
|
||||
}
|
||||
|
||||
exports.save = async ctx => {
|
||||
export async function save(ctx: BBContext) {
|
||||
const inputs = ctx.request.body
|
||||
const tableId = ctx.params.tableId
|
||||
return handleRequest(DataSourceOperation.CREATE, tableId, {
|
||||
return handleRequest(Operation.CREATE, tableId, {
|
||||
row: inputs,
|
||||
})
|
||||
}
|
||||
|
||||
exports.fetchView = async ctx => {
|
||||
export async function fetchView(ctx: BBContext) {
|
||||
// there are no views in external datasources, shouldn't ever be called
|
||||
// for now just fetch
|
||||
const split = ctx.params.viewName.split("all_")
|
||||
ctx.params.tableId = split[1] ? split[1] : split[0]
|
||||
return exports.fetch(ctx)
|
||||
return fetch(ctx)
|
||||
}
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
export async function fetch(ctx: BBContext) {
|
||||
const tableId = ctx.params.tableId
|
||||
return handleRequest(DataSourceOperation.READ, tableId)
|
||||
return handleRequest(Operation.READ, tableId)
|
||||
}
|
||||
|
||||
exports.find = async ctx => {
|
||||
export async function find(ctx: BBContext) {
|
||||
const id = ctx.params.rowId
|
||||
const tableId = ctx.params.tableId
|
||||
const response = await handleRequest(DataSourceOperation.READ, tableId, {
|
||||
const response = (await handleRequest(Operation.READ, tableId, {
|
||||
id: breakRowIdField(id),
|
||||
})
|
||||
})) as Row[]
|
||||
return response ? response[0] : response
|
||||
}
|
||||
|
||||
exports.destroy = async ctx => {
|
||||
export async function destroy(ctx: BBContext) {
|
||||
const tableId = ctx.params.tableId
|
||||
const id = ctx.request.body._id
|
||||
const { row } = await handleRequest(DataSourceOperation.DELETE, tableId, {
|
||||
const { row } = (await handleRequest(Operation.DELETE, tableId, {
|
||||
id: breakRowIdField(id),
|
||||
})
|
||||
})) as { row: Row }
|
||||
return { response: { ok: true }, row }
|
||||
}
|
||||
|
||||
exports.bulkDestroy = async ctx => {
|
||||
export async function bulkDestroy(ctx: BBContext) {
|
||||
const { rows } = ctx.request.body
|
||||
const tableId = ctx.params.tableId
|
||||
let promises = []
|
||||
for (let row of rows) {
|
||||
promises.push(
|
||||
handleRequest(DataSourceOperation.DELETE, tableId, {
|
||||
handleRequest(Operation.DELETE, tableId, {
|
||||
id: breakRowIdField(row._id),
|
||||
})
|
||||
)
|
||||
}
|
||||
const responses = await Promise.all(promises)
|
||||
const responses = (await Promise.all(promises)) as { row: Row }[]
|
||||
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
|
||||
}
|
||||
|
||||
exports.search = async ctx => {
|
||||
export async function search(ctx: BBContext) {
|
||||
const tableId = ctx.params.tableId
|
||||
const { paginate, query, ...params } = ctx.request.body
|
||||
let { bookmark, limit } = params
|
||||
|
@ -129,26 +142,26 @@ exports.search = async ctx => {
|
|||
}
|
||||
}
|
||||
try {
|
||||
const rows = await handleRequest(DataSourceOperation.READ, tableId, {
|
||||
const rows = (await handleRequest(Operation.READ, tableId, {
|
||||
filters: query,
|
||||
sort,
|
||||
paginate: paginateObj,
|
||||
})
|
||||
paginate: paginateObj as PaginationJson,
|
||||
})) as Row[]
|
||||
let hasNextPage = false
|
||||
if (paginate && rows.length === limit) {
|
||||
const nextRows = await handleRequest(DataSourceOperation.READ, tableId, {
|
||||
const nextRows = (await handleRequest(Operation.READ, tableId, {
|
||||
filters: query,
|
||||
sort,
|
||||
paginate: {
|
||||
limit: 1,
|
||||
page: bookmark * limit + 1,
|
||||
},
|
||||
})
|
||||
})) as Row[]
|
||||
hasNextPage = nextRows.length > 0
|
||||
}
|
||||
// need wrapper object for bookmarks etc when paginating
|
||||
return { rows, hasNextPage, bookmark: bookmark + 1 }
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if (err.message && err.message.includes("does not exist")) {
|
||||
throw new Error(
|
||||
`Table updated externally, please re-fetch - ${err.message}`
|
||||
|
@ -159,12 +172,12 @@ exports.search = async ctx => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.validate = async () => {
|
||||
export async function validate(ctx: BBContext) {
|
||||
// can't validate external right now - maybe in future
|
||||
return { valid: true }
|
||||
}
|
||||
|
||||
exports.exportRows = async ctx => {
|
||||
export async function exportRows(ctx: BBContext) {
|
||||
const { datasourceId } = breakExternalTableId(ctx.params.tableId)
|
||||
const db = context.getAppDB()
|
||||
const format = ctx.query.format
|
||||
|
@ -176,13 +189,15 @@ exports.exportRows = async ctx => {
|
|||
ctx.request.body = {
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ctx.request.body.rows.map(row => JSON.parse(decodeURI(row))[0]),
|
||||
_id: ctx.request.body.rows.map(
|
||||
(row: string) => JSON.parse(decodeURI(row))[0]
|
||||
),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
let result = await exports.search(ctx)
|
||||
let rows = []
|
||||
let result = await search(ctx)
|
||||
let rows: Row[] = []
|
||||
|
||||
// Filter data to only specified columns if required
|
||||
if (columns && columns.length) {
|
||||
|
@ -197,6 +212,7 @@ exports.exportRows = async ctx => {
|
|||
}
|
||||
|
||||
let headers = Object.keys(rows[0])
|
||||
// @ts-ignore
|
||||
const exporter = exporters[format]
|
||||
const filename = `export.${format}`
|
||||
|
||||
|
@ -205,21 +221,24 @@ exports.exportRows = async ctx => {
|
|||
return apiFileReturn(exporter(headers, rows))
|
||||
}
|
||||
|
||||
exports.fetchEnrichedRow = async ctx => {
|
||||
export async function fetchEnrichedRow(ctx: BBContext) {
|
||||
const id = ctx.params.rowId
|
||||
const tableId = ctx.params.tableId
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const db = context.getAppDB()
|
||||
const datasource = await db.get(datasourceId)
|
||||
const datasource: Datasource = await db.get(datasourceId)
|
||||
if (!tableName) {
|
||||
ctx.throw(400, "Unable to find table.")
|
||||
}
|
||||
if (!datasource || !datasource.entities) {
|
||||
ctx.throw(400, "Datasource has not been configured for plus API.")
|
||||
}
|
||||
const tables = datasource.entities
|
||||
const response = await handleRequest(DataSourceOperation.READ, tableId, {
|
||||
const response = (await handleRequest(Operation.READ, tableId, {
|
||||
id,
|
||||
datasource,
|
||||
})
|
||||
const table = tables[tableName]
|
||||
})) as Row[]
|
||||
const table: Table = tables[tableName]
|
||||
const row = response[0]
|
||||
// this seems like a lot of work, but basically we need to dig deeper for the enrich
|
||||
// for a single row, there is probably a better way to do this with some smart multi-layer joins
|
||||
|
@ -233,21 +252,19 @@ exports.fetchEnrichedRow = async ctx => {
|
|||
}
|
||||
const links = row[fieldName]
|
||||
const linkedTableId = field.tableId
|
||||
const linkedTable = tables[breakExternalTableId(linkedTableId).tableName]
|
||||
const linkedTableName = breakExternalTableId(linkedTableId).tableName!
|
||||
const linkedTable = tables[linkedTableName]
|
||||
// don't support composite keys right now
|
||||
const linkedIds = links.map(link => breakRowIdField(link._id)[0])
|
||||
row[fieldName] = await handleRequest(
|
||||
DataSourceOperation.READ,
|
||||
linkedTableId,
|
||||
{
|
||||
tables,
|
||||
filters: {
|
||||
oneOf: {
|
||||
[linkedTable.primary]: linkedIds,
|
||||
},
|
||||
const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0])
|
||||
const primaryLink = linkedTable.primary?.[0] as string
|
||||
row[fieldName] = await handleRequest(Operation.READ, linkedTableId!, {
|
||||
tables,
|
||||
filters: {
|
||||
oneOf: {
|
||||
[primaryLink]: linkedIds,
|
||||
},
|
||||
}
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
return row
|
||||
}
|
|
@ -191,7 +191,7 @@ export async function fetchView(ctx: BBContext) {
|
|||
// if this is a table view being looked for just transfer to that
|
||||
if (viewName.startsWith(DocumentType.TABLE)) {
|
||||
ctx.params.tableId = viewName
|
||||
return exports.fetch(ctx)
|
||||
return fetch(ctx)
|
||||
}
|
||||
|
||||
const db = context.getAppDB()
|
||||
|
@ -347,7 +347,7 @@ export async function bulkDestroy(ctx: BBContext) {
|
|||
export async function search(ctx: BBContext) {
|
||||
// Fetch the whole table when running in cypress, as search doesn't work
|
||||
if (!env.COUCH_DB_URL && env.isCypress()) {
|
||||
return { rows: await exports.fetch(ctx) }
|
||||
return { rows: await fetch(ctx) }
|
||||
}
|
||||
|
||||
const { tableId } = ctx.params
|
||||
|
|
|
@ -8,11 +8,7 @@ import {
|
|||
foreignKeyStructure,
|
||||
hasTypeChanged,
|
||||
} from "./utils"
|
||||
import {
|
||||
DataSourceOperation,
|
||||
FieldTypes,
|
||||
RelationshipTypes,
|
||||
} from "../../../constants"
|
||||
import { FieldTypes, RelationshipTypes } from "../../../constants"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import * as csvParser from "../../../utilities/csvParser"
|
||||
import { handleRequest } from "../row/external"
|
||||
|
@ -347,7 +343,7 @@ export async function bulkImport(ctx: BBContext) {
|
|||
...dataImport,
|
||||
existingTable: table,
|
||||
})
|
||||
await handleRequest(DataSourceOperation.BULK_CREATE, table._id, {
|
||||
await handleRequest(Operation.BULK_CREATE, table._id!, {
|
||||
rows,
|
||||
})
|
||||
await events.rows.imported(table, "csv", rows.length)
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
"target": "es6",
|
||||
"module": "commonjs",
|
||||
"lib": ["es2020"],
|
||||
"allowJs": true,
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"esModuleInterop": true,
|
||||
|
@ -23,4 +22,4 @@
|
|||
"**/*.spec.ts",
|
||||
"**/*.spec.js"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue