Merge pull request #9025 from Budibase/feature/configurable-data-export

Filtering and sort on data row export
This commit is contained in:
deanhannigan 2023-01-31 15:35:12 +00:00 committed by GitHub
commit fb472b0f42
9 changed files with 285 additions and 56 deletions

View File

@ -39,6 +39,23 @@
$: showError($fetch.error) $: showError($fetch.error)
$: id, (filters = null) $: id, (filters = null)
let appliedFilter
let rawFilter
let appliedSort
let selectedRows = []
$: enrichedSchema,
() => {
appliedFilter = null
rawFilter = null
appliedSort = null
selectedRows = []
}
$: if (Number.isInteger($fetch.pageNumber)) {
selectedRows = []
}
const showError = error => { const showError = error => {
if (error) { if (error) {
notifications.error(error?.message || "Unable to fetch data.") notifications.error(error?.message || "Unable to fetch data.")
@ -95,11 +112,15 @@
} }
// Fetch data whenever sorting option changes // Fetch data whenever sorting option changes
const onSort = e => { const onSort = async e => {
fetch.update({ const sort = {
sortColumn: e.detail.column, sortColumn: e.detail.column,
sortOrder: e.detail.order, sortOrder: e.detail.order,
}) }
await fetch.update(sort)
appliedSort = { ...sort }
appliedSort.sortOrder = appliedSort.sortOrder.toLowerCase()
selectedRows = []
} }
// Fetch data whenever filters change // Fetch data whenever filters change
@ -108,16 +129,19 @@
fetch.update({ fetch.update({
filter: filters, filter: filters,
}) })
appliedFilter = e.detail
} }
// Fetch data whenever schema changes // Fetch data whenever schema changes
const onUpdateColumns = () => { const onUpdateColumns = () => {
selectedRows = []
fetch.refresh() fetch.refresh()
} }
// Fetch data whenever rows are modified. Unfortunately we have to lose // Fetch data whenever rows are modified. Unfortunately we have to lose
// our pagination place, as our bookmarks will have shifted. // our pagination place, as our bookmarks will have shifted.
const onUpdateRows = () => { const onUpdateRows = () => {
selectedRows = []
fetch.refresh() fetch.refresh()
} }
@ -142,6 +166,9 @@
disableSorting disableSorting
on:updatecolumns={onUpdateColumns} on:updatecolumns={onUpdateColumns}
on:updaterows={onUpdateRows} on:updaterows={onUpdateRows}
on:selectionUpdated={e => {
selectedRows = e.detail
}}
customPlaceholder customPlaceholder
> >
<div class="buttons"> <div class="buttons">
@ -183,6 +210,9 @@
<ExportButton <ExportButton
disabled={!hasRows || !hasCols} disabled={!hasRows || !hasCols}
view={$tables.selected?._id} view={$tables.selected?._id}
filters={appliedFilter}
sorting={appliedSort}
{selectedRows}
/> />
{#key id} {#key id}
<TableFilterButton <TableFilterButton

View File

@ -16,6 +16,7 @@
UNSORTABLE_TYPES, UNSORTABLE_TYPES,
} from "constants" } from "constants"
import RoleCell from "./cells/RoleCell.svelte" import RoleCell from "./cells/RoleCell.svelte"
import { createEventDispatcher } from "svelte"
export let schema = {} export let schema = {}
export let data = [] export let data = []
@ -28,6 +29,8 @@
export let disableSorting = false export let disableSorting = false
export let customPlaceholder = false export let customPlaceholder = false
const dispatch = createEventDispatcher()
let selectedRows = [] let selectedRows = []
let editableColumn let editableColumn
let editableRow let editableRow
@ -36,6 +39,7 @@
let customRenderers = [] let customRenderers = []
let confirmDelete let confirmDelete
$: selectedRows, dispatch("selectionUpdated", selectedRows)
$: isUsersTable = tableId === TableNames.USERS $: isUsersTable = tableId === TableNames.USERS
$: data && resetSelectedRows() $: data && resetSelectedRows()
$: editRowComponent = isUsersTable ? CreateEditUser : CreateEditRow $: editRowComponent = isUsersTable ? CreateEditUser : CreateEditRow

View File

@ -3,7 +3,10 @@
import ExportModal from "../modals/ExportModal.svelte" import ExportModal from "../modals/ExportModal.svelte"
export let view export let view
export let filters
export let sorting
export let disabled = false export let disabled = false
export let selectedRows
let modal let modal
</script> </script>
@ -18,5 +21,5 @@
Export Export
</ActionButton> </ActionButton>
<Modal bind:this={modal}> <Modal bind:this={modal}>
<ExportModal {view} /> <ExportModal {view} {filters} {sorting} {selectedRows} />
</Modal> </Modal>

View File

@ -1,7 +1,14 @@
<script> <script>
import { Select, ModalContent, notifications } from "@budibase/bbui" import {
Select,
ModalContent,
notifications,
Body,
Table,
} from "@budibase/bbui"
import download from "downloadjs" import download from "downloadjs"
import { API } from "api" import { API } from "api"
import { Constants, LuceneUtils } from "@budibase/frontend-core"
const FORMATS = [ const FORMATS = [
{ {
@ -19,8 +26,71 @@
] ]
export let view export let view
export let filters
export let sorting
export let selectedRows = []
let exportFormat = FORMATS[0].key let exportFormat = FORMATS[0].key
let filterLookup
$: luceneFilter = LuceneUtils.buildLuceneQuery(filters)
$: exportOpDisplay = buildExportOpDisplay(sorting, filterDisplay, filters)
const buildFilterLookup = () => {
return Object.keys(Constants.OperatorOptions).reduce((acc, key) => {
const op = Constants.OperatorOptions[key]
acc[op.value] = op.label
return acc
}, {})
}
filterLookup = buildFilterLookup()
const filterDisplay = () => {
if (!filters) {
return []
}
return filters.map(filter => {
let newFieldName = filter.field + ""
const parts = newFieldName.split(":")
parts.shift()
newFieldName = parts.join(":")
return {
Field: newFieldName,
Operation: filterLookup[filter.operator],
"Field Value": filter.value || "",
}
})
}
const buildExportOpDisplay = (sorting, filterDisplay) => {
let filterDisplayConfig = filterDisplay()
if (sorting) {
filterDisplayConfig = [
...filterDisplayConfig,
{
Field: sorting.sortColumn,
Operation: "Order By",
"Field Value": sorting.sortOrder,
},
]
}
return filterDisplayConfig
}
const displaySchema = {
Field: {
type: "string",
fieldName: "Field",
},
Operation: {
type: "string",
fieldName: "Operation",
},
"Field Value": {
type: "string",
fieldName: "Value",
},
}
async function exportView() { async function exportView() {
try { try {
@ -33,9 +103,74 @@
notifications.error(`Unable to export ${exportFormat.toUpperCase()} data`) notifications.error(`Unable to export ${exportFormat.toUpperCase()} data`)
} }
} }
async function exportRows() {
if (selectedRows?.length) {
const data = await API.exportRows({
tableId: view,
rows: selectedRows.map(row => row._id),
format: exportFormat,
})
download(data, `export.${exportFormat}`)
} else if (filters || sorting) {
const data = await API.exportRows({
tableId: view,
format: exportFormat,
search: {
query: luceneFilter,
sort: sorting?.sortColumn,
sortOrder: sorting?.sortOrder,
paginate: false,
},
})
download(data, `export.${exportFormat}`)
} else {
await exportView()
}
}
</script> </script>
<ModalContent title="Export Data" confirmText="Export" onConfirm={exportView}> <ModalContent
title="Export Data"
confirmText="Export"
onConfirm={exportRows}
size={filters?.length || sorting ? "M" : "S"}
>
{#if selectedRows?.length}
<Body size="S">
<strong>{selectedRows?.length}</strong>
{`row${selectedRows?.length > 1 ? "s" : ""} will be exported`}
</Body>
{:else if filters || (sorting?.sortOrder && sorting?.sortColumn)}
<Body size="S">
{#if !filters}
Exporting <strong>all</strong> rows
{:else}
Filters applied
{/if}
</Body>
<div class="table-wrap">
<Table
schema={displaySchema}
data={exportOpDisplay}
{filters}
loading={false}
rowCount={filters?.length + 1}
disableSorting={true}
allowSelectRows={false}
allowEditRows={false}
allowEditColumns={false}
quiet={true}
compact={true}
/>
</div>
{:else}
<Body size="S">
Exporting <strong>all</strong> rows
</Body>
{/if}
<Select <Select
label="Format" label="Format"
bind:value={exportFormat} bind:value={exportFormat}
@ -45,3 +180,9 @@
getOptionValue={x => x.key} getOptionValue={x => x.key}
/> />
</ModalContent> </ModalContent>
<style>
.table-wrap :global(.wrapper) {
max-width: 400px;
}
</style>

View File

@ -67,12 +67,13 @@ export const buildRowEndpoints = API => ({
* @param format the format to export (csv or json) * @param format the format to export (csv or json)
* @param columns which columns to export (all if undefined) * @param columns which columns to export (all if undefined)
*/ */
exportRows: async ({ tableId, rows, format, columns }) => { exportRows: async ({ tableId, rows, format, columns, search }) => {
return await API.post({ return await API.post({
url: `/api/${tableId}/rows/exportRows?format=${format}`, url: `/api/${tableId}/rows/exportRows?format=${format}`,
body: { body: {
rows, rows,
columns, columns,
...search,
}, },
parseResponse: async response => { parseResponse: async response => {
return await response.text() return await response.text()

View File

@ -21,6 +21,8 @@ import {
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
const { cleanExportRows } = require("./utils")
export async function handleRequest( export async function handleRequest(
operation: Operation, operation: Operation,
tableId: string, tableId: string,
@ -100,7 +102,7 @@ export async function destroy(ctx: BBContext) {
export async function bulkDestroy(ctx: BBContext) { export async function bulkDestroy(ctx: BBContext) {
const { rows } = ctx.request.body const { rows } = ctx.request.body
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
let promises = [] let promises: Promise<Row[] | { row: Row; table: Table }>[] = []
for (let row of rows) { for (let row of rows) {
promises.push( promises.push(
handleRequest(Operation.DELETE, tableId, { handleRequest(Operation.DELETE, tableId, {
@ -186,6 +188,8 @@ export async function exportRows(ctx: BBContext) {
if (!datasource || !datasource.entities) { if (!datasource || !datasource.entities) {
ctx.throw(400, "Datasource has not been configured for plus API.") ctx.throw(400, "Datasource has not been configured for plus API.")
} }
if (ctx.request.body.rows) {
ctx.request.body = { ctx.request.body = {
query: { query: {
oneOf: { oneOf: {
@ -195,11 +199,13 @@ export async function exportRows(ctx: BBContext) {
}, },
}, },
} }
}
let result = await search(ctx) let result = await search(ctx)
let rows: Row[] = [] let rows: Row[] = []
// Filter data to only specified columns if required // Filter data to only specified columns if required
if (columns && columns.length) { if (columns && columns.length) {
for (let i = 0; i < result.rows.length; i++) { for (let i = 0; i < result.rows.length; i++) {
rows[i] = {} rows[i] = {}
@ -211,14 +217,19 @@ export async function exportRows(ctx: BBContext) {
rows = result.rows rows = result.rows
} }
let headers = Object.keys(rows[0]) // @ts-ignore
let schema = datasource.entities[tableName].schema
let exportRows = cleanExportRows(rows, schema, format, columns)
let headers = Object.keys(schema)
// @ts-ignore // @ts-ignore
const exporter = exporters[format] const exporter = exporters[format]
const filename = `export.${format}` const filename = `export.${format}`
// send down the file // send down the file
ctx.attachment(filename) ctx.attachment(filename)
return apiFileReturn(exporter(headers, rows)) return apiFileReturn(exporter(headers, exportRows))
} }
export async function fetchEnrichedRow(ctx: BBContext) { export async function fetchEnrichedRow(ctx: BBContext) {

View File

@ -27,7 +27,7 @@ import {
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { context, db as dbCore } from "@budibase/backend-core" import { context, db as dbCore } from "@budibase/backend-core"
import { finaliseRow, updateRelatedFormula } from "./staticFormula" import { finaliseRow, updateRelatedFormula } from "./staticFormula"
import { csv, json, jsonWithSchema, Format, isFormat } from "../view/exporters" import { csv, json, jsonWithSchema, Format } from "../view/exporters"
import { apiFileReturn } from "../../../utilities/fileSystem" import { apiFileReturn } from "../../../utilities/fileSystem"
import { import {
Ctx, Ctx,
@ -38,6 +38,8 @@ import {
Table, Table,
} from "@budibase/types" } from "@budibase/types"
const { cleanExportRows } = require("./utils")
const CALCULATION_TYPES = { const CALCULATION_TYPES = {
SUM: "sum", SUM: "sum",
COUNT: "count", COUNT: "count",
@ -357,6 +359,14 @@ export async function search(ctx: Ctx) {
params.version = ctx.version params.version = ctx.version
params.tableId = tableId params.tableId = tableId
let table
if (params.sort && !params.sortType) {
table = await db.get(tableId)
const schema = table.schema
const sortField = schema[params.sort]
params.sortType = sortField.type == "number" ? "number" : "string"
}
let response let response
if (paginate) { if (paginate) {
response = await paginatedSearch(query, params) response = await paginatedSearch(query, params)
@ -370,7 +380,7 @@ export async function search(ctx: Ctx) {
if (tableId === InternalTables.USER_METADATA) { if (tableId === InternalTables.USER_METADATA) {
response.rows = await getGlobalUsersFromMetadata(response.rows) response.rows = await getGlobalUsersFromMetadata(response.rows)
} }
const table = await db.get(tableId) table = table || (await db.get(tableId))
response.rows = await outputProcessing(table, response.rows) response.rows = await outputProcessing(table, response.rows)
} }
@ -389,7 +399,10 @@ export async function exportRows(ctx: Ctx) {
const table = await db.get(ctx.params.tableId) const table = await db.get(ctx.params.tableId)
const rowIds = ctx.request.body.rows const rowIds = ctx.request.body.rows
let format = ctx.query.format let format = ctx.query.format
const { columns } = ctx.request.body const { columns, query } = ctx.request.body
let result
if (rowIds) {
let response = ( let response = (
await db.allDocs({ await db.allDocs({
include_docs: true, include_docs: true,
@ -397,8 +410,14 @@ export async function exportRows(ctx: Ctx) {
}) })
).rows.map(row => row.doc) ).rows.map(row => row.doc)
let result = (await outputProcessing(table, response)) as Row[] result = await outputProcessing(table, response)
} else if (query) {
let searchResponse = await exports.search(ctx)
result = searchResponse.rows
}
let rows: Row[] = [] let rows: Row[] = []
let schema = table.schema
// Filter data to only specified columns if required // Filter data to only specified columns if required
if (columns && columns.length) { if (columns && columns.length) {
@ -412,12 +431,16 @@ export async function exportRows(ctx: Ctx) {
rows = result rows = result
} }
let exportRows = cleanExportRows(rows, schema, format, columns)
if (format === Format.CSV) { if (format === Format.CSV) {
ctx.attachment("export.csv") ctx.attachment("export.csv")
return apiFileReturn(csv(Object.keys(rows[0]), rows)) return apiFileReturn(csv(Object.keys(rows[0]), exportRows))
} else if (format === Format.JSON) { } else if (format === Format.JSON) {
ctx.attachment("export.json") ctx.attachment("export.json")
return apiFileReturn(json(rows)) return apiFileReturn(json(exportRows))
} else if (format === Format.JSON_WITH_SCHEMA) {
ctx.attachment("export.json")
return apiFileReturn(jsonWithSchema(schema, exportRows))
} else { } else {
throw "Format not recognised" throw "Format not recognised"
} }

View File

@ -7,6 +7,7 @@ import { BBContext, Row, Table } from "@budibase/types"
export { removeKeyNumbering } from "../../../integrations/base/utils" export { removeKeyNumbering } from "../../../integrations/base/utils"
const validateJs = require("validate.js") const validateJs = require("validate.js")
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
import { Format } from "../view/exporters"
import { Ctx } from "@budibase/types" import { Ctx } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
@ -117,3 +118,40 @@ export async function validate({
} }
return { valid: Object.keys(errors).length === 0, errors } return { valid: Object.keys(errors).length === 0, errors }
} }
export function cleanExportRows(
rows: any[],
schema: any,
format: string,
columns: string[]
) {
let cleanRows = [...rows]
const relationships = Object.entries(schema)
.filter((entry: any[]) => entry[1].type === FieldTypes.LINK)
.map(entry => entry[0])
relationships.forEach(column => {
cleanRows.forEach(row => {
delete row[column]
})
delete schema[column]
})
// Intended to avoid 'undefined' in export
if (format === Format.CSV) {
const schemaKeys = Object.keys(schema)
for (let key of schemaKeys) {
if (columns?.length && columns.indexOf(key) > 0) {
continue
}
for (let row of cleanRows) {
if (row[key] == null) {
row[key] = ""
}
}
}
}
return cleanRows
}

View File

@ -3,7 +3,6 @@ import { apiFileReturn } from "../../../utilities/fileSystem"
import { csv, json, jsonWithSchema, Format, isFormat } from "./exporters" import { csv, json, jsonWithSchema, Format, isFormat } from "./exporters"
import { deleteView, getView, getViews, saveView } from "./utils" import { deleteView, getView, getViews, saveView } from "./utils"
import { fetchView } from "../row" import { fetchView } from "../row"
import { FieldTypes } from "../../../constants"
import { context, events } from "@budibase/backend-core" import { context, events } from "@budibase/backend-core"
import { DocumentType } from "../../../db/utils" import { DocumentType } from "../../../db/utils"
import sdk from "../../../sdk" import sdk from "../../../sdk"
@ -15,6 +14,7 @@ import {
TableSchema, TableSchema,
View, View,
} from "@budibase/types" } from "@budibase/types"
import { cleanExportRows } from "../row/utils"
const { cloneDeep, isEqual } = require("lodash") const { cloneDeep, isEqual } = require("lodash")
@ -162,39 +162,17 @@ export async function exportView(ctx: BBContext) {
schema = table.schema schema = table.schema
} }
// remove any relationships let exportRows = cleanExportRows(rows, schema, format, [])
const relationships = Object.entries(schema)
.filter(entry => entry[1].type === FieldTypes.LINK)
.map(entry => entry[0])
// iterate relationship columns and remove from and row and schema
relationships.forEach(column => {
rows.forEach(row => {
delete row[column]
})
delete schema[column]
})
// make sure no "undefined" entries appear in the CSV
if (format === Format.CSV) {
const schemaKeys = Object.keys(schema)
for (let key of schemaKeys) {
for (let row of rows) {
if (row[key] == null) {
row[key] = ""
}
}
}
}
if (format === Format.CSV) { if (format === Format.CSV) {
ctx.attachment(`${viewName}.csv`) ctx.attachment(`${viewName}.csv`)
ctx.body = apiFileReturn(csv(Object.keys(schema), rows)) ctx.body = apiFileReturn(csv(Object.keys(schema), exportRows))
} else if (format === Format.JSON) { } else if (format === Format.JSON) {
ctx.attachment(`${viewName}.json`) ctx.attachment(`${viewName}.json`)
ctx.body = apiFileReturn(json(rows)) ctx.body = apiFileReturn(json(exportRows))
} else if (format === Format.JSON_WITH_SCHEMA) { } else if (format === Format.JSON_WITH_SCHEMA) {
ctx.attachment(`${viewName}.json`) ctx.attachment(`${viewName}.json`)
ctx.body = apiFileReturn(jsonWithSchema(schema, rows)) ctx.body = apiFileReturn(jsonWithSchema(schema, exportRows))
} else { } else {
throw "Format not recognised" throw "Format not recognised"
} }