Filtering and sort on data row export
This commit is contained in:
parent
d1bea44c95
commit
cdcefa16bb
|
@ -39,6 +39,23 @@
|
|||
$: showError($fetch.error)
|
||||
$: id, (filters = null)
|
||||
|
||||
let appliedFilter
|
||||
let rawFilter
|
||||
let appliedSort
|
||||
let selectedRows = []
|
||||
|
||||
$: enrichedSchema,
|
||||
() => {
|
||||
appliedFilter = null
|
||||
rawFilter = null
|
||||
appliedSort = null
|
||||
selectedRows = []
|
||||
}
|
||||
|
||||
$: if ($fetch.pageNumber) {
|
||||
selectedRows = []
|
||||
}
|
||||
|
||||
const showError = error => {
|
||||
if (error) {
|
||||
notifications.error(error?.message || "Unable to fetch data.")
|
||||
|
@ -95,11 +112,15 @@
|
|||
}
|
||||
|
||||
// Fetch data whenever sorting option changes
|
||||
const onSort = e => {
|
||||
fetch.update({
|
||||
const onSort = async e => {
|
||||
const sort = {
|
||||
sortColumn: e.detail.column,
|
||||
sortOrder: e.detail.order,
|
||||
})
|
||||
}
|
||||
await fetch.update(sort)
|
||||
appliedSort = { ...sort }
|
||||
appliedSort.sortOrder = appliedSort.sortOrder.toLowerCase()
|
||||
selectedRows = []
|
||||
}
|
||||
|
||||
// Fetch data whenever filters change
|
||||
|
@ -108,16 +129,19 @@
|
|||
fetch.update({
|
||||
filter: filters,
|
||||
})
|
||||
appliedFilter = e.detail
|
||||
}
|
||||
|
||||
// Fetch data whenever schema changes
|
||||
const onUpdateColumns = () => {
|
||||
selectedRows = []
|
||||
fetch.refresh()
|
||||
}
|
||||
|
||||
// Fetch data whenever rows are modified. Unfortunately we have to lose
|
||||
// our pagination place, as our bookmarks will have shifted.
|
||||
const onUpdateRows = () => {
|
||||
selectedRows = []
|
||||
fetch.refresh()
|
||||
}
|
||||
|
||||
|
@ -142,6 +166,9 @@
|
|||
disableSorting
|
||||
on:updatecolumns={onUpdateColumns}
|
||||
on:updaterows={onUpdateRows}
|
||||
on:selectionUpdated={e => {
|
||||
selectedRows = e.detail
|
||||
}}
|
||||
customPlaceholder
|
||||
>
|
||||
<div class="buttons">
|
||||
|
@ -183,6 +210,9 @@
|
|||
<ExportButton
|
||||
disabled={!hasRows || !hasCols}
|
||||
view={$tables.selected?._id}
|
||||
filters={appliedFilter}
|
||||
sorting={appliedSort}
|
||||
{selectedRows}
|
||||
/>
|
||||
{#key id}
|
||||
<TableFilterButton
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
UNSORTABLE_TYPES,
|
||||
} from "constants"
|
||||
import RoleCell from "./cells/RoleCell.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
export let schema = {}
|
||||
export let data = []
|
||||
|
@ -25,10 +26,11 @@
|
|||
export let loading = false
|
||||
export let hideAutocolumns
|
||||
export let rowCount
|
||||
export let type
|
||||
export let disableSorting = false
|
||||
export let customPlaceholder = false
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
let selectedRows = []
|
||||
let editableColumn
|
||||
let editableRow
|
||||
|
@ -37,6 +39,7 @@
|
|||
let customRenderers = []
|
||||
let confirmDelete
|
||||
|
||||
$: selectedRows, dispatch("selectionUpdated", selectedRows)
|
||||
$: isUsersTable = tableId === TableNames.USERS
|
||||
$: data && resetSelectedRows()
|
||||
$: editRowComponent = isUsersTable ? CreateEditUser : CreateEditRow
|
||||
|
|
|
@ -3,7 +3,10 @@
|
|||
import ExportModal from "../modals/ExportModal.svelte"
|
||||
|
||||
export let view
|
||||
export let filters
|
||||
export let sorting
|
||||
export let disabled = false
|
||||
export let selectedRows
|
||||
|
||||
let modal
|
||||
</script>
|
||||
|
@ -18,5 +21,5 @@
|
|||
Export
|
||||
</ActionButton>
|
||||
<Modal bind:this={modal}>
|
||||
<ExportModal {view} />
|
||||
<ExportModal {view} {filters} {sorting} {selectedRows} />
|
||||
</Modal>
|
||||
|
|
|
@ -1,7 +1,14 @@
|
|||
<script>
|
||||
import { Select, ModalContent, notifications } from "@budibase/bbui"
|
||||
import {
|
||||
Select,
|
||||
ModalContent,
|
||||
notifications,
|
||||
Body,
|
||||
Table,
|
||||
} from "@budibase/bbui"
|
||||
import download from "downloadjs"
|
||||
import { API } from "api"
|
||||
import { Constants, LuceneUtils } from "@budibase/frontend-core"
|
||||
|
||||
const FORMATS = [
|
||||
{
|
||||
|
@ -15,8 +22,65 @@
|
|||
]
|
||||
|
||||
export let view
|
||||
export let filters
|
||||
export let sorting
|
||||
export let selectedRows = []
|
||||
|
||||
let exportFormat = FORMATS[0].key
|
||||
let filterLookup
|
||||
let exportOpDisplay = []
|
||||
|
||||
$: luceneFilter = LuceneUtils.buildLuceneQuery(filters)
|
||||
|
||||
$: if (filters) {
|
||||
exportOpDisplay = filterDisplay()
|
||||
}
|
||||
|
||||
$: if (sorting) {
|
||||
exportOpDisplay.push({
|
||||
Field: sorting.sortColumn,
|
||||
Operation: "Order By",
|
||||
"Field Value": sorting.sortOrder,
|
||||
})
|
||||
}
|
||||
|
||||
const buildFilterLookup = () => {
|
||||
return Object.keys(Constants.OperatorOptions).reduce((acc, key) => {
|
||||
const op = Constants.OperatorOptions[key]
|
||||
acc[op.value] = op.label
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
filterLookup = buildFilterLookup()
|
||||
|
||||
const filterDisplay = () => {
|
||||
return filters.map(filter => {
|
||||
let newFieldName = filter.field + ""
|
||||
const parts = newFieldName.split(":")
|
||||
parts.shift()
|
||||
newFieldName = parts.join(":")
|
||||
return {
|
||||
Field: newFieldName,
|
||||
Operation: filterLookup[filter.operator],
|
||||
"Field Value": filter.value || "",
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const displaySchema = {
|
||||
Field: {
|
||||
type: "string",
|
||||
fieldName: "Field",
|
||||
},
|
||||
Operation: {
|
||||
type: "string",
|
||||
fieldName: "Operation",
|
||||
},
|
||||
"Field Value": {
|
||||
type: "string",
|
||||
fieldName: "Value",
|
||||
},
|
||||
}
|
||||
|
||||
async function exportView() {
|
||||
try {
|
||||
|
@ -29,9 +93,74 @@
|
|||
notifications.error(`Unable to export ${exportFormat.toUpperCase()} data`)
|
||||
}
|
||||
}
|
||||
|
||||
async function exportRows() {
|
||||
if (selectedRows?.length) {
|
||||
const data = await API.exportRows({
|
||||
tableId: view,
|
||||
rows: selectedRows.map(row => row._id),
|
||||
format: exportFormat,
|
||||
})
|
||||
download(data, `export.${exportFormat}`)
|
||||
} else if (filters || sorting) {
|
||||
const data = await API.exportRows({
|
||||
tableId: view,
|
||||
format: exportFormat,
|
||||
search: {
|
||||
query: luceneFilter,
|
||||
sort: sorting?.sortColumn,
|
||||
sortOrder: sorting?.sortOrder,
|
||||
paginate: false,
|
||||
},
|
||||
})
|
||||
download(data, `export.${exportFormat}`)
|
||||
} else {
|
||||
await exportView()
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<ModalContent title="Export Data" confirmText="Export" onConfirm={exportView}>
|
||||
<ModalContent
|
||||
title="Export Data"
|
||||
confirmText="Export"
|
||||
onConfirm={exportRows}
|
||||
size={filters?.length || sorting ? "M" : "S"}
|
||||
>
|
||||
{#if selectedRows?.length}
|
||||
<Body size="S">
|
||||
<strong>{selectedRows?.length}</strong>
|
||||
{`row${selectedRows?.length > 1 ? "s" : ""} will be exported`}
|
||||
</Body>
|
||||
{:else if filters || (sorting?.sortOrder && sorting?.sortColumn)}
|
||||
<Body size="S">
|
||||
{#if !filters}
|
||||
Exporting <strong>all</strong> rows
|
||||
{:else}
|
||||
Filters applied
|
||||
{/if}
|
||||
</Body>
|
||||
|
||||
<div class="table-wrap">
|
||||
<Table
|
||||
schema={displaySchema}
|
||||
data={exportOpDisplay}
|
||||
{filters}
|
||||
loading={false}
|
||||
rowCount={filters?.length + 1}
|
||||
disableSorting={true}
|
||||
allowSelectRows={false}
|
||||
allowEditRows={false}
|
||||
allowEditColumns={false}
|
||||
quiet={true}
|
||||
compact={true}
|
||||
/>
|
||||
</div>
|
||||
{:else}
|
||||
<Body size="S">
|
||||
Exporting <strong>all</strong> rows
|
||||
</Body>
|
||||
{/if}
|
||||
|
||||
<Select
|
||||
label="Format"
|
||||
bind:value={exportFormat}
|
||||
|
@ -41,3 +170,9 @@
|
|||
getOptionValue={x => x.key}
|
||||
/>
|
||||
</ModalContent>
|
||||
|
||||
<style>
|
||||
.table-wrap :global(.wrapper) {
|
||||
max-width: 400px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -67,12 +67,13 @@ export const buildRowEndpoints = API => ({
|
|||
* @param format the format to export (csv or json)
|
||||
* @param columns which columns to export (all if undefined)
|
||||
*/
|
||||
exportRows: async ({ tableId, rows, format, columns }) => {
|
||||
exportRows: async ({ tableId, rows, format, columns, search }) => {
|
||||
return await API.post({
|
||||
url: `/api/${tableId}/rows/exportRows?format=${format}`,
|
||||
body: {
|
||||
rows,
|
||||
columns,
|
||||
...search,
|
||||
},
|
||||
parseResponse: async response => {
|
||||
return await response.text()
|
||||
|
|
|
@ -20,6 +20,8 @@ import {
|
|||
Datasource,
|
||||
} from "@budibase/types"
|
||||
|
||||
const { cleanExportRows } = require("../row/utils")
|
||||
|
||||
export async function handleRequest(
|
||||
operation: Operation,
|
||||
tableId: string,
|
||||
|
@ -178,7 +180,7 @@ export async function validate(ctx: BBContext) {
|
|||
}
|
||||
|
||||
export async function exportRows(ctx: BBContext) {
|
||||
const { datasourceId } = breakExternalTableId(ctx.params.tableId)
|
||||
const { datasourceId, tableName } = breakExternalTableId(ctx.params.tableId)
|
||||
const db = context.getAppDB()
|
||||
const format = ctx.query.format
|
||||
const { columns } = ctx.request.body
|
||||
|
@ -186,20 +188,24 @@ export async function exportRows(ctx: BBContext) {
|
|||
if (!datasource || !datasource.entities) {
|
||||
ctx.throw(400, "Datasource has not been configured for plus API.")
|
||||
}
|
||||
ctx.request.body = {
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ctx.request.body.rows.map(
|
||||
(row: string) => JSON.parse(decodeURI(row))[0]
|
||||
),
|
||||
|
||||
if (ctx.request.body.rows) {
|
||||
ctx.request.body = {
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ctx.request.body.rows.map(
|
||||
(row: string) => JSON.parse(decodeURI(row))[0]
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let result = await search(ctx)
|
||||
let rows: Row[] = []
|
||||
|
||||
// Filter data to only specified columns if required
|
||||
|
||||
if (columns && columns.length) {
|
||||
for (let i = 0; i < result.rows.length; i++) {
|
||||
rows[i] = {}
|
||||
|
@ -211,14 +217,19 @@ export async function exportRows(ctx: BBContext) {
|
|||
rows = result.rows
|
||||
}
|
||||
|
||||
let headers = Object.keys(rows[0])
|
||||
// @ts-ignore
|
||||
let schema = datasource.entities[tableName].schema
|
||||
let exportRows = cleanExportRows(rows, schema, format, columns)
|
||||
|
||||
let headers = Object.keys(schema)
|
||||
|
||||
// @ts-ignore
|
||||
const exporter = exporters[format]
|
||||
const filename = `export.${format}`
|
||||
|
||||
// send down the file
|
||||
ctx.attachment(filename)
|
||||
return apiFileReturn(exporter(headers, rows))
|
||||
return apiFileReturn(exporter(headers, exportRows))
|
||||
}
|
||||
|
||||
export async function fetchEnrichedRow(ctx: BBContext) {
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
cleanupAttachments,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
import { validate as rowValidate, findRow } from "./utils"
|
||||
import { validate as rowValidate, findRow, cleanExportRows } from "./utils"
|
||||
import { fullSearch, paginatedSearch } from "./internalSearch"
|
||||
import { getGlobalUsersFromMetadata } from "../../../utilities/global"
|
||||
import * as inMemoryViews from "../../../db/inMemoryView"
|
||||
|
@ -356,6 +356,14 @@ export async function search(ctx: BBContext) {
|
|||
params.version = ctx.version
|
||||
params.tableId = tableId
|
||||
|
||||
let table
|
||||
if (params.sort && !params.sortType) {
|
||||
table = await db.get(tableId)
|
||||
const schema = table.schema
|
||||
const sortField = schema[params.sort]
|
||||
params.sortType = sortField.type == "number" ? "number" : "string"
|
||||
}
|
||||
|
||||
let response
|
||||
if (paginate) {
|
||||
response = await paginatedSearch(query, params)
|
||||
|
@ -369,7 +377,7 @@ export async function search(ctx: BBContext) {
|
|||
if (tableId === InternalTables.USER_METADATA) {
|
||||
response.rows = await getGlobalUsersFromMetadata(response.rows)
|
||||
}
|
||||
const table = await db.get(tableId)
|
||||
table = table || (await db.get(tableId))
|
||||
response.rows = await outputProcessing(table, response.rows)
|
||||
}
|
||||
|
||||
|
@ -388,18 +396,27 @@ export async function exportRows(ctx: BBContext) {
|
|||
const table = await db.get(ctx.params.tableId)
|
||||
const rowIds = ctx.request.body.rows
|
||||
let format = ctx.query.format
|
||||
const { columns } = ctx.request.body
|
||||
let response = (
|
||||
await db.allDocs({
|
||||
include_docs: true,
|
||||
keys: rowIds,
|
||||
})
|
||||
).rows.map(row => row.doc)
|
||||
const { columns, query } = ctx.request.body
|
||||
|
||||
let result
|
||||
if (rowIds) {
|
||||
let response = (
|
||||
await db.allDocs({
|
||||
include_docs: true,
|
||||
keys: rowIds,
|
||||
})
|
||||
).rows.map(row => row.doc)
|
||||
|
||||
result = await outputProcessing(table, response)
|
||||
} else if (query) {
|
||||
let searchResponse = await exports.search(ctx)
|
||||
result = searchResponse.rows
|
||||
}
|
||||
|
||||
let result = (await outputProcessing(table, response)) as Row[]
|
||||
let rows: Row[] = []
|
||||
let schema = table.schema
|
||||
|
||||
// Filter data to only specified columns if required
|
||||
// // Filter data to only specified columns if required
|
||||
if (columns && columns.length) {
|
||||
for (let i = 0; i < result.length; i++) {
|
||||
rows[i] = {}
|
||||
|
@ -411,14 +428,17 @@ export async function exportRows(ctx: BBContext) {
|
|||
rows = result
|
||||
}
|
||||
|
||||
let headers = Object.keys(rows[0])
|
||||
// @ts-ignore
|
||||
let exportRows = cleanExportRows(rows, schema, format, columns) //this isnt correct
|
||||
let headers = Object.keys(schema)
|
||||
|
||||
// @ts-ignore
|
||||
const exporter = exporters[format]
|
||||
const filename = `export.${format}`
|
||||
|
||||
// send down the file
|
||||
ctx.attachment(filename)
|
||||
return apiFileReturn(exporter(headers, rows))
|
||||
return apiFileReturn(exporter(headers, exportRows))
|
||||
}
|
||||
|
||||
export async function fetchEnrichedRow(ctx: BBContext) {
|
||||
|
|
|
@ -7,6 +7,7 @@ import { BBContext, Row, Table } from "@budibase/types"
|
|||
export { removeKeyNumbering } from "../../../integrations/base/utils"
|
||||
const validateJs = require("validate.js")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
import { ExportFormats } from "../view/exporters"
|
||||
|
||||
validateJs.extend(validateJs.validators.datetime, {
|
||||
parse: function (value: string) {
|
||||
|
@ -116,3 +117,40 @@ export async function validate({
|
|||
}
|
||||
return { valid: Object.keys(errors).length === 0, errors }
|
||||
}
|
||||
|
||||
export function cleanExportRows(
|
||||
rows: any[],
|
||||
schema: any,
|
||||
format: string,
|
||||
columns: string[]
|
||||
) {
|
||||
let cleanRows = [...rows]
|
||||
|
||||
const relationships = Object.entries(schema)
|
||||
.filter((entry: any[]) => entry[1].type === FieldTypes.LINK)
|
||||
.map(entry => entry[0])
|
||||
|
||||
relationships.forEach(column => {
|
||||
cleanRows.forEach(row => {
|
||||
delete row[column]
|
||||
})
|
||||
delete schema[column]
|
||||
})
|
||||
|
||||
// Intended to avoid 'undefined' in export
|
||||
if (format === ExportFormats.CSV) {
|
||||
const schemaKeys = Object.keys(schema)
|
||||
for (let key of schemaKeys) {
|
||||
if (columns?.length && columns.indexOf(key) > 0) {
|
||||
continue
|
||||
}
|
||||
for (let row of cleanRows) {
|
||||
if (row[key] == null) {
|
||||
row[key] = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cleanRows
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
TableSchema,
|
||||
View,
|
||||
} from "@budibase/types"
|
||||
import { cleanExportRows } from "../row/utils"
|
||||
|
||||
const { cloneDeep, isEqual } = require("lodash")
|
||||
|
||||
|
@ -158,29 +159,7 @@ export async function exportView(ctx: BBContext) {
|
|||
schema = table.schema
|
||||
}
|
||||
|
||||
// remove any relationships
|
||||
const relationships = Object.entries(schema)
|
||||
.filter(entry => entry[1].type === FieldTypes.LINK)
|
||||
.map(entry => entry[0])
|
||||
// iterate relationship columns and remove from and row and schema
|
||||
relationships.forEach(column => {
|
||||
rows.forEach(row => {
|
||||
delete row[column]
|
||||
})
|
||||
delete schema[column]
|
||||
})
|
||||
|
||||
// make sure no "undefined" entries appear in the CSV
|
||||
if (format === exporters.ExportFormats.CSV) {
|
||||
const schemaKeys = Object.keys(schema)
|
||||
for (let key of schemaKeys) {
|
||||
for (let row of rows) {
|
||||
if (row[key] == null) {
|
||||
row[key] = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let exportRows = cleanExportRows(rows, schema, format, [])
|
||||
|
||||
// Export part
|
||||
let headers = Object.keys(schema)
|
||||
|
@ -188,7 +167,7 @@ export async function exportView(ctx: BBContext) {
|
|||
const filename = `${viewName}.${format}`
|
||||
// send down the file
|
||||
ctx.attachment(filename)
|
||||
ctx.body = apiFileReturn(exporter(headers, rows))
|
||||
ctx.body = apiFileReturn(exporter(headers, exportRows))
|
||||
|
||||
if (viewName.startsWith(DocumentType.TABLE)) {
|
||||
await events.table.exported(table, format as TableExportFormat)
|
||||
|
|
Loading…
Reference in New Issue