Merge pull request #11250 from Budibase/BUDI-7189/clean_ctx_dependencies_from_sdk
Clean ctx dependencies from rows SDK
This commit is contained in:
commit
37c1c1c9b0
|
@ -49,6 +49,7 @@ export async function handleRequest(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new ExternalRequest(operation, tableId, opts?.datasource).run(
|
return new ExternalRequest(operation, tableId, opts?.datasource).run(
|
||||||
opts || {}
|
opts || {}
|
||||||
)
|
)
|
||||||
|
|
|
@ -6,6 +6,8 @@ import { Ctx } from "@budibase/types"
|
||||||
import * as utils from "./utils"
|
import * as utils from "./utils"
|
||||||
import { gridSocket } from "../../../websockets"
|
import { gridSocket } from "../../../websockets"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
import * as exporters from "../view/exporters"
|
||||||
|
import { apiFileReturn } from "../../../utilities/fileSystem"
|
||||||
|
|
||||||
function pickApi(tableId: any) {
|
function pickApi(tableId: any) {
|
||||||
if (isExternalTable(tableId)) {
|
if (isExternalTable(tableId)) {
|
||||||
|
@ -65,14 +67,26 @@ export const save = async (ctx: any) => {
|
||||||
}
|
}
|
||||||
export async function fetchView(ctx: any) {
|
export async function fetchView(ctx: any) {
|
||||||
const tableId = utils.getTableId(ctx)
|
const tableId = utils.getTableId(ctx)
|
||||||
ctx.body = await quotas.addQuery(() => sdk.rows.fetchView(tableId, ctx), {
|
const viewName = decodeURIComponent(ctx.params.viewName)
|
||||||
datasourceId: tableId,
|
|
||||||
})
|
const { calculation, group, field } = ctx.query
|
||||||
|
|
||||||
|
ctx.body = await quotas.addQuery(
|
||||||
|
() =>
|
||||||
|
sdk.rows.fetchView(tableId, viewName, {
|
||||||
|
calculation,
|
||||||
|
group,
|
||||||
|
field,
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
datasourceId: tableId,
|
||||||
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(ctx: any) {
|
export async function fetch(ctx: any) {
|
||||||
const tableId = utils.getTableId(ctx)
|
const tableId = utils.getTableId(ctx)
|
||||||
ctx.body = await quotas.addQuery(() => sdk.rows.fetch(tableId, ctx), {
|
ctx.body = await quotas.addQuery(() => sdk.rows.fetch(tableId), {
|
||||||
datasourceId: tableId,
|
datasourceId: tableId,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -120,9 +134,14 @@ export async function destroy(ctx: any) {
|
||||||
|
|
||||||
export async function search(ctx: any) {
|
export async function search(ctx: any) {
|
||||||
const tableId = utils.getTableId(ctx)
|
const tableId = utils.getTableId(ctx)
|
||||||
ctx.status = 200
|
|
||||||
|
|
||||||
ctx.body = await quotas.addQuery(() => sdk.rows.search(tableId, ctx), {
|
const searchParams = {
|
||||||
|
...ctx.request.body,
|
||||||
|
tableId,
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.status = 200
|
||||||
|
ctx.body = await quotas.addQuery(() => sdk.rows.search(searchParams), {
|
||||||
datasourceId: tableId,
|
datasourceId: tableId,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -152,7 +171,33 @@ export async function fetchEnrichedRow(ctx: any) {
|
||||||
|
|
||||||
export const exportRows = async (ctx: any) => {
|
export const exportRows = async (ctx: any) => {
|
||||||
const tableId = utils.getTableId(ctx)
|
const tableId = utils.getTableId(ctx)
|
||||||
ctx.body = await quotas.addQuery(() => sdk.rows.exportRows(tableId, ctx), {
|
|
||||||
datasourceId: tableId,
|
const format = ctx.query.format
|
||||||
})
|
|
||||||
|
const { rows, columns, query } = ctx.request.body
|
||||||
|
if (typeof format !== "string" || !exporters.isFormat(format)) {
|
||||||
|
ctx.throw(
|
||||||
|
400,
|
||||||
|
`Format ${format} not valid. Valid values: ${Object.values(
|
||||||
|
exporters.Format
|
||||||
|
)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.body = await quotas.addQuery(
|
||||||
|
async () => {
|
||||||
|
const { fileName, content } = await sdk.rows.exportRows({
|
||||||
|
tableId,
|
||||||
|
format,
|
||||||
|
rowIds: rows,
|
||||||
|
columns,
|
||||||
|
query,
|
||||||
|
})
|
||||||
|
ctx.attachment(fileName)
|
||||||
|
return apiFileReturn(content)
|
||||||
|
},
|
||||||
|
{
|
||||||
|
datasourceId: tableId,
|
||||||
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -162,7 +162,10 @@ export async function exportView(ctx: Ctx) {
|
||||||
let rows = ctx.body as Row[]
|
let rows = ctx.body as Row[]
|
||||||
|
|
||||||
let schema: TableSchema = view && view.meta && view.meta.schema
|
let schema: TableSchema = view && view.meta && view.meta.schema
|
||||||
const tableId = ctx.params.tableId || view.meta.tableId
|
const tableId =
|
||||||
|
ctx.params.tableId ||
|
||||||
|
view?.meta?.tableId ||
|
||||||
|
(viewName.startsWith(DocumentType.TABLE) && viewName)
|
||||||
const table: Table = await sdk.tables.getTable(tableId)
|
const table: Table = await sdk.tables.getTable(tableId)
|
||||||
if (!schema) {
|
if (!schema) {
|
||||||
schema = table.schema
|
schema = table.schema
|
||||||
|
|
|
@ -1,17 +1,26 @@
|
||||||
import { Ctx, SearchFilters } from "@budibase/types"
|
import { SearchFilters } from "@budibase/types"
|
||||||
import { isExternalTable } from "../../../integrations/utils"
|
import { isExternalTable } from "../../../integrations/utils"
|
||||||
import * as internal from "./search/internal"
|
import * as internal from "./search/internal"
|
||||||
import * as external from "./search/external"
|
import * as external from "./search/external"
|
||||||
|
import { Format } from "../../../api/controllers/view/exporters"
|
||||||
|
|
||||||
export interface SearchParams {
|
export interface SearchParams {
|
||||||
tableId: string
|
tableId: string
|
||||||
paginate: boolean
|
paginate?: boolean
|
||||||
query?: SearchFilters
|
query: SearchFilters
|
||||||
bookmark?: number
|
bookmark?: string
|
||||||
limit: number
|
limit?: number
|
||||||
sort?: string
|
sort?: string
|
||||||
sortOrder?: string
|
sortOrder?: string
|
||||||
sortType?: string
|
sortType?: string
|
||||||
|
version?: string
|
||||||
|
disableEscaping?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ViewParams {
|
||||||
|
calculation: string
|
||||||
|
group: string
|
||||||
|
field: string
|
||||||
}
|
}
|
||||||
|
|
||||||
function pickApi(tableId: any) {
|
function pickApi(tableId: any) {
|
||||||
|
@ -21,18 +30,37 @@ function pickApi(tableId: any) {
|
||||||
return internal
|
return internal
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function search(tableId: string, ctx: Ctx) {
|
export async function search(options: SearchParams) {
|
||||||
return pickApi(tableId).search(ctx)
|
return pickApi(options.tableId).search(options)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function exportRows(tableId: string, ctx: Ctx) {
|
export interface ExportRowsParams {
|
||||||
return pickApi(tableId).exportRows(ctx)
|
tableId: string
|
||||||
|
format: Format
|
||||||
|
rowIds?: string[]
|
||||||
|
columns?: string[]
|
||||||
|
query: SearchFilters
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(tableId: string, ctx: Ctx) {
|
export interface ExportRowsResult {
|
||||||
return pickApi(tableId).fetch(ctx)
|
fileName: string
|
||||||
|
content: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchView(tableId: string, ctx: Ctx) {
|
export async function exportRows(
|
||||||
return pickApi(tableId).fetchView(ctx)
|
options: ExportRowsParams
|
||||||
|
): Promise<ExportRowsResult> {
|
||||||
|
return pickApi(options.tableId).exportRows(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetch(tableId: string) {
|
||||||
|
return pickApi(tableId).fetch(tableId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchView(
|
||||||
|
tableId: string,
|
||||||
|
viewName: string,
|
||||||
|
params: ViewParams
|
||||||
|
) {
|
||||||
|
return pickApi(tableId).fetchView(viewName, params)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,21 +5,23 @@ import {
|
||||||
PaginationJson,
|
PaginationJson,
|
||||||
IncludeRelationship,
|
IncludeRelationship,
|
||||||
Row,
|
Row,
|
||||||
Ctx,
|
SearchFilters,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as exporters from "../../../../api/controllers/view/exporters"
|
import * as exporters from "../../../../api/controllers/view/exporters"
|
||||||
import sdk from "../../../../sdk"
|
import sdk from "../../../../sdk"
|
||||||
import { handleRequest } from "../../../../api/controllers/row/external"
|
import { handleRequest } from "../../../../api/controllers/row/external"
|
||||||
import { breakExternalTableId } from "../../../../integrations/utils"
|
import { breakExternalTableId } from "../../../../integrations/utils"
|
||||||
import { cleanExportRows } from "../utils"
|
import { cleanExportRows } from "../utils"
|
||||||
import { apiFileReturn } from "../../../../utilities/fileSystem"
|
|
||||||
import { utils } from "@budibase/shared-core"
|
import { utils } from "@budibase/shared-core"
|
||||||
|
import { ExportRowsParams, ExportRowsResult, SearchParams } from "../search"
|
||||||
|
import { HTTPError } from "@budibase/backend-core"
|
||||||
|
|
||||||
export async function search(ctx: Ctx) {
|
export async function search(options: SearchParams) {
|
||||||
const tableId = ctx.params.tableId
|
const { tableId } = options
|
||||||
const { paginate, query, ...params } = ctx.request.body
|
const { paginate, query, ...params } = options
|
||||||
let { bookmark, limit } = params
|
const { limit } = params
|
||||||
if (!bookmark && paginate) {
|
let bookmark = (params.bookmark && parseInt(params.bookmark)) || null
|
||||||
|
if (paginate && !bookmark) {
|
||||||
bookmark = 1
|
bookmark = 1
|
||||||
}
|
}
|
||||||
let paginateObj = {}
|
let paginateObj = {}
|
||||||
|
@ -59,14 +61,14 @@ export async function search(ctx: Ctx) {
|
||||||
sort,
|
sort,
|
||||||
paginate: {
|
paginate: {
|
||||||
limit: 1,
|
limit: 1,
|
||||||
page: bookmark * limit + 1,
|
page: bookmark! * limit + 1,
|
||||||
},
|
},
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
})) as Row[]
|
})) as Row[]
|
||||||
hasNextPage = nextRows.length > 0
|
hasNextPage = nextRows.length > 0
|
||||||
}
|
}
|
||||||
// need wrapper object for bookmarks etc when paginating
|
// need wrapper object for bookmarks etc when paginating
|
||||||
return { rows, hasNextPage, bookmark: bookmark + 1 }
|
return { rows, hasNextPage, bookmark: bookmark && bookmark + 1 }
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
if (err.message && err.message.includes("does not exist")) {
|
if (err.message && err.message.includes("does not exist")) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
@ -78,43 +80,38 @@ export async function search(ctx: Ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function exportRows(ctx: Ctx) {
|
export async function exportRows(
|
||||||
const { datasourceId, tableName } = breakExternalTableId(ctx.params.tableId)
|
options: ExportRowsParams
|
||||||
const format = ctx.query.format as string
|
): Promise<ExportRowsResult> {
|
||||||
const { columns } = ctx.request.body
|
const { tableId, format, columns, rowIds } = options
|
||||||
const datasource = await sdk.datasources.get(datasourceId!)
|
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||||
if (!datasource || !datasource.entities) {
|
|
||||||
ctx.throw(400, "Datasource has not been configured for plus API.")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!exporters.isFormat(format)) {
|
let query: SearchFilters = {}
|
||||||
ctx.throw(
|
if (rowIds?.length) {
|
||||||
400,
|
query = {
|
||||||
`Format ${format} not valid. Valid values: ${Object.values(
|
oneOf: {
|
||||||
exporters.Format
|
_id: rowIds.map((row: string) => {
|
||||||
)}`
|
const ids = JSON.parse(
|
||||||
)
|
decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",")
|
||||||
}
|
)
|
||||||
|
if (ids.length > 1) {
|
||||||
if (ctx.request.body.rows) {
|
throw new HTTPError(
|
||||||
ctx.request.body = {
|
"Export data does not support composite keys.",
|
||||||
query: {
|
400
|
||||||
oneOf: {
|
|
||||||
_id: ctx.request.body.rows.map((row: string) => {
|
|
||||||
const ids = JSON.parse(
|
|
||||||
decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",")
|
|
||||||
)
|
)
|
||||||
if (ids.length > 1) {
|
}
|
||||||
ctx.throw(400, "Export data does not support composite keys.")
|
return ids[0]
|
||||||
}
|
}),
|
||||||
return ids[0]
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = await search(ctx)
|
const datasource = await sdk.datasources.get(datasourceId!)
|
||||||
|
if (!datasource || !datasource.entities) {
|
||||||
|
throw new HTTPError("Datasource has not been configured for plus API.", 400)
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = await search({ tableId, query })
|
||||||
let rows: Row[] = []
|
let rows: Row[] = []
|
||||||
|
|
||||||
// Filter data to only specified columns if required
|
// Filter data to only specified columns if required
|
||||||
|
@ -131,14 +128,14 @@ export async function exportRows(ctx: Ctx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!tableName) {
|
if (!tableName) {
|
||||||
ctx.throw(400, "Could not find table name.")
|
throw new HTTPError("Could not find table name.", 400)
|
||||||
}
|
}
|
||||||
let schema = datasource.entities[tableName].schema
|
const schema = datasource.entities[tableName].schema
|
||||||
let exportRows = cleanExportRows(rows, schema, format, columns)
|
let exportRows = cleanExportRows(rows, schema, format, columns)
|
||||||
|
|
||||||
let headers = Object.keys(schema)
|
let headers = Object.keys(schema)
|
||||||
|
|
||||||
let content
|
let content: string
|
||||||
switch (format) {
|
switch (format) {
|
||||||
case exporters.Format.CSV:
|
case exporters.Format.CSV:
|
||||||
content = exporters.csv(headers, exportRows)
|
content = exporters.csv(headers, exportRows)
|
||||||
|
@ -150,28 +147,26 @@ export async function exportRows(ctx: Ctx) {
|
||||||
content = exporters.jsonWithSchema(schema, exportRows)
|
content = exporters.jsonWithSchema(schema, exportRows)
|
||||||
break
|
break
|
||||||
default:
|
default:
|
||||||
utils.unreachable(format)
|
throw utils.unreachable(format)
|
||||||
break
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const filename = `export.${format}`
|
const fileName = `export.${format}`
|
||||||
|
return {
|
||||||
// send down the file
|
fileName,
|
||||||
ctx.attachment(filename)
|
content,
|
||||||
return apiFileReturn(content)
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(ctx: Ctx) {
|
export async function fetch(tableId: string) {
|
||||||
const tableId = ctx.params.tableId
|
|
||||||
return handleRequest(Operation.READ, tableId, {
|
return handleRequest(Operation.READ, tableId, {
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchView(ctx: Ctx) {
|
export async function fetchView(viewName: string) {
|
||||||
// there are no views in external datasources, shouldn't ever be called
|
// there are no views in external datasources, shouldn't ever be called
|
||||||
// for now just fetch
|
// for now just fetch
|
||||||
const split = ctx.params.viewName.split("all_")
|
const split = viewName.split("all_")
|
||||||
ctx.params.tableId = split[1] ? split[1] : split[0]
|
const tableId = split[1] ? split[1] : split[0]
|
||||||
return fetch(ctx)
|
return fetch(tableId)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
import { context } from "@budibase/backend-core"
|
import {
|
||||||
|
context,
|
||||||
|
SearchParams as InternalSearchParams,
|
||||||
|
} from "@budibase/backend-core"
|
||||||
import env from "../../../../environment"
|
import env from "../../../../environment"
|
||||||
import { fullSearch, paginatedSearch } from "./internalSearch"
|
import { fullSearch, paginatedSearch } from "./internalSearch"
|
||||||
import {
|
import {
|
||||||
|
@ -8,7 +11,7 @@ import {
|
||||||
} from "../../../../db/utils"
|
} from "../../../../db/utils"
|
||||||
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
|
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
|
||||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||||
import { Ctx, Database, Row } from "@budibase/types"
|
import { Database, Row } from "@budibase/types"
|
||||||
import { cleanExportRows } from "../utils"
|
import { cleanExportRows } from "../utils"
|
||||||
import {
|
import {
|
||||||
Format,
|
Format,
|
||||||
|
@ -16,7 +19,6 @@ import {
|
||||||
json,
|
json,
|
||||||
jsonWithSchema,
|
jsonWithSchema,
|
||||||
} from "../../../../api/controllers/view/exporters"
|
} from "../../../../api/controllers/view/exporters"
|
||||||
import { apiFileReturn } from "../../../../utilities/fileSystem"
|
|
||||||
import * as inMemoryViews from "../../../../db/inMemoryView"
|
import * as inMemoryViews from "../../../../db/inMemoryView"
|
||||||
import {
|
import {
|
||||||
migrateToInMemoryView,
|
migrateToInMemoryView,
|
||||||
|
@ -25,25 +27,36 @@ import {
|
||||||
getFromMemoryDoc,
|
getFromMemoryDoc,
|
||||||
} from "../../../../api/controllers/view/utils"
|
} from "../../../../api/controllers/view/utils"
|
||||||
import sdk from "../../../../sdk"
|
import sdk from "../../../../sdk"
|
||||||
|
import { ExportRowsParams, ExportRowsResult, SearchParams } from "../search"
|
||||||
|
|
||||||
|
export async function search(options: SearchParams) {
|
||||||
|
const { tableId } = options
|
||||||
|
|
||||||
export async function search(ctx: Ctx) {
|
|
||||||
// Fetch the whole table when running in cypress, as search doesn't work
|
// Fetch the whole table when running in cypress, as search doesn't work
|
||||||
if (!env.COUCH_DB_URL && env.isCypress()) {
|
if (!env.COUCH_DB_URL && env.isCypress()) {
|
||||||
return { rows: await fetch(ctx) }
|
return { rows: await fetch(tableId) }
|
||||||
}
|
}
|
||||||
|
|
||||||
const { tableId } = ctx.params
|
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const { paginate, query, ...params } = ctx.request.body
|
const { paginate, query } = options
|
||||||
params.version = ctx.version
|
|
||||||
params.tableId = tableId
|
const params: InternalSearchParams<any> = {
|
||||||
|
tableId: options.tableId,
|
||||||
|
sort: options.sort,
|
||||||
|
sortOrder: options.sortOrder,
|
||||||
|
sortType: options.sortType,
|
||||||
|
limit: options.limit,
|
||||||
|
bookmark: options.bookmark,
|
||||||
|
version: options.version,
|
||||||
|
disableEscaping: options.disableEscaping,
|
||||||
|
}
|
||||||
|
|
||||||
let table
|
let table
|
||||||
if (params.sort && !params.sortType) {
|
if (params.sort && !params.sortType) {
|
||||||
table = await db.get(tableId)
|
table = await db.get(tableId)
|
||||||
const schema = table.schema
|
const schema = table.schema
|
||||||
const sortField = schema[params.sort]
|
const sortField = schema[params.sort]
|
||||||
params.sortType = sortField.type == "number" ? "number" : "string"
|
params.sortType = sortField.type === "number" ? "number" : "string"
|
||||||
}
|
}
|
||||||
|
|
||||||
let response
|
let response
|
||||||
|
@ -66,15 +79,12 @@ export async function search(ctx: Ctx) {
|
||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function exportRows(ctx: Ctx) {
|
export async function exportRows(
|
||||||
|
options: ExportRowsParams
|
||||||
|
): Promise<ExportRowsResult> {
|
||||||
|
const { tableId, format, rowIds, columns, query } = options
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const table = await db.get(ctx.params.tableId)
|
const table = await db.get(tableId)
|
||||||
const rowIds = ctx.request.body.rows
|
|
||||||
let format = ctx.query.format
|
|
||||||
if (typeof format !== "string") {
|
|
||||||
ctx.throw(400, "Format parameter is not valid")
|
|
||||||
}
|
|
||||||
const { columns, query } = ctx.request.body
|
|
||||||
|
|
||||||
let result
|
let result
|
||||||
if (rowIds) {
|
if (rowIds) {
|
||||||
|
@ -87,7 +97,7 @@ export async function exportRows(ctx: Ctx) {
|
||||||
|
|
||||||
result = await outputProcessing(table, response)
|
result = await outputProcessing(table, response)
|
||||||
} else if (query) {
|
} else if (query) {
|
||||||
let searchResponse = await search(ctx)
|
let searchResponse = await search({ tableId, query })
|
||||||
result = searchResponse.rows
|
result = searchResponse.rows
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,26 +118,32 @@ export async function exportRows(ctx: Ctx) {
|
||||||
|
|
||||||
let exportRows = cleanExportRows(rows, schema, format, columns)
|
let exportRows = cleanExportRows(rows, schema, format, columns)
|
||||||
if (format === Format.CSV) {
|
if (format === Format.CSV) {
|
||||||
ctx.attachment("export.csv")
|
return {
|
||||||
return apiFileReturn(csv(Object.keys(rows[0]), exportRows))
|
fileName: "export.csv",
|
||||||
|
content: csv(Object.keys(rows[0]), exportRows),
|
||||||
|
}
|
||||||
} else if (format === Format.JSON) {
|
} else if (format === Format.JSON) {
|
||||||
ctx.attachment("export.json")
|
return {
|
||||||
return apiFileReturn(json(exportRows))
|
fileName: "export.json",
|
||||||
|
content: json(exportRows),
|
||||||
|
}
|
||||||
} else if (format === Format.JSON_WITH_SCHEMA) {
|
} else if (format === Format.JSON_WITH_SCHEMA) {
|
||||||
ctx.attachment("export.json")
|
return {
|
||||||
return apiFileReturn(jsonWithSchema(schema, exportRows))
|
fileName: "export.json",
|
||||||
|
content: jsonWithSchema(schema, exportRows),
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
throw "Format not recognised"
|
throw "Format not recognised"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(ctx: Ctx) {
|
export async function fetch(tableId: string) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
|
|
||||||
const tableId = ctx.params.tableId
|
|
||||||
let table = await db.get(tableId)
|
let table = await db.get(tableId)
|
||||||
let rows = await getRawTableData(db, tableId)
|
let rows = await getRawTableData(db, tableId)
|
||||||
return outputProcessing(table, rows)
|
const result = await outputProcessing(table, rows)
|
||||||
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getRawTableData(db: Database, tableId: string) {
|
async function getRawTableData(db: Database, tableId: string) {
|
||||||
|
@ -145,17 +161,17 @@ async function getRawTableData(db: Database, tableId: string) {
|
||||||
return rows as Row[]
|
return rows as Row[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchView(ctx: Ctx) {
|
export async function fetchView(
|
||||||
const viewName = decodeURIComponent(ctx.params.viewName)
|
viewName: string,
|
||||||
|
options: { calculation: string; group: string; field: string }
|
||||||
|
) {
|
||||||
// if this is a table view being looked for just transfer to that
|
// if this is a table view being looked for just transfer to that
|
||||||
if (viewName.startsWith(DocumentType.TABLE)) {
|
if (viewName.startsWith(DocumentType.TABLE)) {
|
||||||
ctx.params.tableId = viewName
|
return fetch(viewName)
|
||||||
return fetch(ctx)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const { calculation, group, field } = ctx.query
|
const { calculation, group, field } = options
|
||||||
const viewInfo = await getView(db, viewName)
|
const viewInfo = await getView(db, viewName)
|
||||||
let response
|
let response
|
||||||
if (env.SELF_HOSTED) {
|
if (env.SELF_HOSTED) {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { TableSchema } from "@budibase/types"
|
||||||
import { FieldTypes } from "../../../constants"
|
import { FieldTypes } from "../../../constants"
|
||||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||||
import { Format } from "../../../api/controllers/view/exporters"
|
import { Format } from "../../../api/controllers/view/exporters"
|
||||||
|
@ -11,9 +12,9 @@ export async function getDatasourceAndQuery(json: any) {
|
||||||
|
|
||||||
export function cleanExportRows(
|
export function cleanExportRows(
|
||||||
rows: any[],
|
rows: any[],
|
||||||
schema: any,
|
schema: TableSchema,
|
||||||
format: string,
|
format: string,
|
||||||
columns: string[]
|
columns?: string[]
|
||||||
) {
|
) {
|
||||||
let cleanRows = [...rows]
|
let cleanRows = [...rows]
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
import { exportRows } from "../../app/rows/search/external"
|
import { exportRows } from "../../app/rows/search/external"
|
||||||
import sdk from "../.."
|
import sdk from "../.."
|
||||||
import { ExternalRequest } from "../../../api/controllers/row/ExternalRequest"
|
import { ExternalRequest } from "../../../api/controllers/row/ExternalRequest"
|
||||||
|
import { ExportRowsParams } from "../../app/rows/search"
|
||||||
|
import { Format } from "../../../api/controllers/view/exporters"
|
||||||
|
import { HTTPError } from "@budibase/backend-core"
|
||||||
|
import { Operation } from "@budibase/types"
|
||||||
|
|
||||||
const mockDatasourcesGet = jest.fn()
|
const mockDatasourcesGet = jest.fn()
|
||||||
sdk.datasources.get = mockDatasourcesGet
|
sdk.datasources.get = mockDatasourcesGet
|
||||||
|
@ -16,30 +20,21 @@ jest.mock("../../../api/controllers/view/exporters", () => ({
|
||||||
}))
|
}))
|
||||||
jest.mock("../../../utilities/fileSystem")
|
jest.mock("../../../utilities/fileSystem")
|
||||||
|
|
||||||
function getUserCtx() {
|
describe("external row sdk", () => {
|
||||||
return {
|
|
||||||
params: {
|
|
||||||
tableId: "datasource__tablename",
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
format: "csv",
|
|
||||||
},
|
|
||||||
request: {
|
|
||||||
body: {},
|
|
||||||
},
|
|
||||||
throw: jest.fn(() => {
|
|
||||||
throw "Err"
|
|
||||||
}),
|
|
||||||
attachment: jest.fn(),
|
|
||||||
} as any
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("external row controller", () => {
|
|
||||||
describe("exportRows", () => {
|
describe("exportRows", () => {
|
||||||
|
function getExportOptions(): ExportRowsParams {
|
||||||
|
return {
|
||||||
|
tableId: "datasource__tablename",
|
||||||
|
format: Format.CSV,
|
||||||
|
query: {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const externalRequestCall = jest.fn()
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
jest
|
jest
|
||||||
.spyOn(ExternalRequest.prototype, "run")
|
.spyOn(ExternalRequest.prototype, "run")
|
||||||
.mockImplementation(() => Promise.resolve([]))
|
.mockImplementation(externalRequestCall.mockResolvedValue([]))
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
|
@ -47,15 +42,10 @@ describe("external row controller", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should throw a 400 if no datasource entities are present", async () => {
|
it("should throw a 400 if no datasource entities are present", async () => {
|
||||||
let userCtx = getUserCtx()
|
const exportOptions = getExportOptions()
|
||||||
try {
|
await expect(exportRows(exportOptions)).rejects.toThrowError(
|
||||||
await exportRows(userCtx)
|
new HTTPError("Datasource has not been configured for plus API.", 400)
|
||||||
} catch (e) {
|
)
|
||||||
expect(userCtx.throw).toHaveBeenCalledWith(
|
|
||||||
400,
|
|
||||||
"Datasource has not been configured for plus API."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle single quotes from a row ID", async () => {
|
it("should handle single quotes from a row ID", async () => {
|
||||||
|
@ -66,51 +56,46 @@ describe("external row controller", () => {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
let userCtx = getUserCtx()
|
const exportOptions = getExportOptions()
|
||||||
userCtx.request.body = {
|
exportOptions.rowIds = ["['d001']"]
|
||||||
rows: ["['d001']"],
|
|
||||||
}
|
|
||||||
|
|
||||||
await exportRows(userCtx)
|
await exportRows(exportOptions)
|
||||||
|
|
||||||
expect(userCtx.request.body).toEqual({
|
expect(ExternalRequest).toBeCalledTimes(1)
|
||||||
query: {
|
expect(ExternalRequest).toBeCalledWith(
|
||||||
oneOf: {
|
Operation.READ,
|
||||||
_id: ["d001"],
|
exportOptions.tableId,
|
||||||
|
undefined
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(externalRequestCall).toBeCalledTimes(1)
|
||||||
|
expect(externalRequestCall).toBeCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
filters: {
|
||||||
|
oneOf: {
|
||||||
|
_id: ["d001"],
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
})
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should throw a 400 if any composite keys are present", async () => {
|
it("should throw a 400 if any composite keys are present", async () => {
|
||||||
let userCtx = getUserCtx()
|
const exportOptions = getExportOptions()
|
||||||
userCtx.request.body = {
|
exportOptions.rowIds = ["[123]", "['d001'%2C'10111']"]
|
||||||
rows: ["[123]", "['d001'%2C'10111']"],
|
await expect(exportRows(exportOptions)).rejects.toThrowError(
|
||||||
}
|
new HTTPError("Export data does not support composite keys.", 400)
|
||||||
try {
|
)
|
||||||
await exportRows(userCtx)
|
|
||||||
} catch (e) {
|
|
||||||
expect(userCtx.throw).toHaveBeenCalledWith(
|
|
||||||
400,
|
|
||||||
"Export data does not support composite keys."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should throw a 400 if no table name was found", async () => {
|
it("should throw a 400 if no table name was found", async () => {
|
||||||
let userCtx = getUserCtx()
|
const exportOptions = getExportOptions()
|
||||||
userCtx.params.tableId = "datasource__"
|
exportOptions.tableId = "datasource__"
|
||||||
userCtx.request.body = {
|
exportOptions.rowIds = ["[123]"]
|
||||||
rows: ["[123]"],
|
|
||||||
}
|
await expect(exportRows(exportOptions)).rejects.toThrowError(
|
||||||
try {
|
new HTTPError("Could not find table name.", 400)
|
||||||
await exportRows(userCtx)
|
)
|
||||||
} catch (e) {
|
|
||||||
expect(userCtx.throw).toHaveBeenCalledWith(
|
|
||||||
400,
|
|
||||||
"Could not find table name."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
Loading…
Reference in New Issue