Add infinite scroll, improve row fetching, add error handling, fix svelte store updates
This commit is contained in:
parent
b45ba0eba7
commit
385e9eadb0
|
@ -24,132 +24,132 @@
|
|||
import { fetchData, Sheet } from "@budibase/frontend-core"
|
||||
import { API } from "api"
|
||||
|
||||
let hideAutocolumns = true
|
||||
let filters
|
||||
|
||||
$: isUsersTable = $tables.selected?._id === TableNames.USERS
|
||||
$: type = $tables.selected?.type
|
||||
$: isInternal = type !== "external"
|
||||
$: schema = $tables.selected?.schema
|
||||
$: enrichedSchema = enrichSchema($tables.selected?.schema)
|
||||
$: id = $tables.selected?._id
|
||||
$: fetch = createFetch(id)
|
||||
$: hasCols = checkHasCols(schema)
|
||||
$: hasRows = !!$fetch.rows?.length
|
||||
$: showError($fetch.error)
|
||||
$: id, (filters = null)
|
||||
|
||||
let appliedFilter
|
||||
let rawFilter
|
||||
let appliedSort
|
||||
let selectedRows = []
|
||||
|
||||
$: enrichedSchema,
|
||||
() => {
|
||||
appliedFilter = null
|
||||
rawFilter = null
|
||||
appliedSort = null
|
||||
selectedRows = []
|
||||
}
|
||||
|
||||
$: if (Number.isInteger($fetch.pageNumber)) {
|
||||
selectedRows = []
|
||||
}
|
||||
|
||||
const showError = error => {
|
||||
if (error) {
|
||||
notifications.error(error?.message || "Unable to fetch data.")
|
||||
}
|
||||
}
|
||||
|
||||
const enrichSchema = schema => {
|
||||
let tempSchema = { ...schema }
|
||||
tempSchema._id = {
|
||||
type: "internal",
|
||||
editable: false,
|
||||
displayName: "ID",
|
||||
autocolumn: true,
|
||||
}
|
||||
if (isInternal) {
|
||||
tempSchema._rev = {
|
||||
type: "internal",
|
||||
editable: false,
|
||||
displayName: "Revision",
|
||||
autocolumn: true,
|
||||
}
|
||||
}
|
||||
|
||||
return tempSchema
|
||||
}
|
||||
|
||||
const checkHasCols = schema => {
|
||||
if (!schema || Object.keys(schema).length === 0) {
|
||||
return false
|
||||
}
|
||||
let fields = Object.values(schema)
|
||||
for (let field of fields) {
|
||||
if (!field.autocolumn) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Fetches new data whenever the table changes
|
||||
const createFetch = tableId => {
|
||||
return fetchData({
|
||||
API,
|
||||
datasource: {
|
||||
tableId,
|
||||
type: "table",
|
||||
},
|
||||
options: {
|
||||
schema,
|
||||
limit: 10,
|
||||
paginate: true,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Fetch data whenever sorting option changes
|
||||
const onSort = async e => {
|
||||
const sort = {
|
||||
sortColumn: e.detail.column,
|
||||
sortOrder: e.detail.order,
|
||||
}
|
||||
await fetch.update(sort)
|
||||
appliedSort = { ...sort }
|
||||
appliedSort.sortOrder = appliedSort.sortOrder.toLowerCase()
|
||||
selectedRows = []
|
||||
}
|
||||
|
||||
// Fetch data whenever filters change
|
||||
const onFilter = e => {
|
||||
filters = e.detail
|
||||
fetch.update({
|
||||
filter: filters,
|
||||
})
|
||||
appliedFilter = e.detail
|
||||
}
|
||||
|
||||
// Fetch data whenever schema changes
|
||||
const onUpdateColumns = () => {
|
||||
selectedRows = []
|
||||
fetch.refresh()
|
||||
}
|
||||
|
||||
// Fetch data whenever rows are modified. Unfortunately we have to lose
|
||||
// our pagination place, as our bookmarks will have shifted.
|
||||
const onUpdateRows = () => {
|
||||
selectedRows = []
|
||||
fetch.refresh()
|
||||
}
|
||||
|
||||
// When importing new rows it is better to reinitialise request/paging data.
|
||||
// Not doing so causes inconsistency in paging behaviour and content.
|
||||
const onImportData = () => {
|
||||
fetch.getInitialData()
|
||||
}
|
||||
// let hideAutocolumns = true
|
||||
// let filters
|
||||
//
|
||||
// $: isUsersTable = $tables.selected?._id === TableNames.USERS
|
||||
// $: type = $tables.selected?.type
|
||||
// $: isInternal = type !== "external"
|
||||
// $: schema = $tables.selected?.schema
|
||||
// $: enrichedSchema = enrichSchema($tables.selected?.schema)
|
||||
// $: id = $tables.selected?._id
|
||||
// $: fetch = createFetch(id)
|
||||
// $: hasCols = checkHasCols(schema)
|
||||
// $: hasRows = !!$fetch.rows?.length
|
||||
// $: showError($fetch.error)
|
||||
// $: id, (filters = null)
|
||||
//
|
||||
// let appliedFilter
|
||||
// let rawFilter
|
||||
// let appliedSort
|
||||
// let selectedRows = []
|
||||
//
|
||||
// $: enrichedSchema,
|
||||
// () => {
|
||||
// appliedFilter = null
|
||||
// rawFilter = null
|
||||
// appliedSort = null
|
||||
// selectedRows = []
|
||||
// }
|
||||
//
|
||||
// $: if (Number.isInteger($fetch.pageNumber)) {
|
||||
// selectedRows = []
|
||||
// }
|
||||
//
|
||||
// const showError = error => {
|
||||
// if (error) {
|
||||
// notifications.error(error?.message || "Unable to fetch data.")
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// const enrichSchema = schema => {
|
||||
// let tempSchema = { ...schema }
|
||||
// tempSchema._id = {
|
||||
// type: "internal",
|
||||
// editable: false,
|
||||
// displayName: "ID",
|
||||
// autocolumn: true,
|
||||
// }
|
||||
// if (isInternal) {
|
||||
// tempSchema._rev = {
|
||||
// type: "internal",
|
||||
// editable: false,
|
||||
// displayName: "Revision",
|
||||
// autocolumn: true,
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// return tempSchema
|
||||
// }
|
||||
//
|
||||
// const checkHasCols = schema => {
|
||||
// if (!schema || Object.keys(schema).length === 0) {
|
||||
// return false
|
||||
// }
|
||||
// let fields = Object.values(schema)
|
||||
// for (let field of fields) {
|
||||
// if (!field.autocolumn) {
|
||||
// return true
|
||||
// }
|
||||
// }
|
||||
// return false
|
||||
// }
|
||||
//
|
||||
// // Fetches new data whenever the table changes
|
||||
// const createFetch = tableId => {
|
||||
// return fetchData({
|
||||
// API,
|
||||
// datasource: {
|
||||
// tableId,
|
||||
// type: "table",
|
||||
// },
|
||||
// options: {
|
||||
// schema,
|
||||
// limit: 10,
|
||||
// paginate: true,
|
||||
// },
|
||||
// })
|
||||
// }
|
||||
//
|
||||
// // Fetch data whenever sorting option changes
|
||||
// const onSort = async e => {
|
||||
// const sort = {
|
||||
// sortColumn: e.detail.column,
|
||||
// sortOrder: e.detail.order,
|
||||
// }
|
||||
// await fetch.update(sort)
|
||||
// appliedSort = { ...sort }
|
||||
// appliedSort.sortOrder = appliedSort.sortOrder.toLowerCase()
|
||||
// selectedRows = []
|
||||
// }
|
||||
//
|
||||
// // Fetch data whenever filters change
|
||||
// const onFilter = e => {
|
||||
// filters = e.detail
|
||||
// fetch.update({
|
||||
// filter: filters,
|
||||
// })
|
||||
// appliedFilter = e.detail
|
||||
// }
|
||||
//
|
||||
// // Fetch data whenever schema changes
|
||||
// const onUpdateColumns = () => {
|
||||
// selectedRows = []
|
||||
// fetch.refresh()
|
||||
// }
|
||||
//
|
||||
// // Fetch data whenever rows are modified. Unfortunately we have to lose
|
||||
// // our pagination place, as our bookmarks will have shifted.
|
||||
// const onUpdateRows = () => {
|
||||
// selectedRows = []
|
||||
// fetch.refresh()
|
||||
// }
|
||||
//
|
||||
// // When importing new rows it is better to reinitialise request/paging data.
|
||||
// // Not doing so causes inconsistency in paging behaviour and content.
|
||||
// const onImportData = () => {
|
||||
// fetch.getInitialData()
|
||||
// }
|
||||
</script>
|
||||
|
||||
<div>
|
||||
|
|
|
@ -24,12 +24,11 @@
|
|||
if (allSelected) {
|
||||
$selectedRows = {}
|
||||
} else {
|
||||
selectedRows.update(state => {
|
||||
$rows.forEach(row => {
|
||||
state[row._id] = true
|
||||
})
|
||||
return state
|
||||
let allRows = {}
|
||||
$rows.forEach(row => {
|
||||
allRows[row._id] = true
|
||||
})
|
||||
$selectedRows = allRows
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -8,7 +8,9 @@
|
|||
|
||||
const addRow = async field => {
|
||||
const newRow = await rows.actions.addRow()
|
||||
$selectedCellId = `${newRow._id}-${field.name}`
|
||||
if (newRow) {
|
||||
$selectedCellId = `${newRow._id}-${field.name}`
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
<svelte:options immutable={true} />
|
||||
|
||||
<script>
|
||||
import { setContext } from "svelte"
|
||||
import { writable } from "svelte/store"
|
||||
|
@ -24,9 +26,9 @@
|
|||
const rand = Math.random()
|
||||
|
||||
// State stores
|
||||
const tableIdStore = writable(tableId)
|
||||
const tableIdStore = writable()
|
||||
const columns = writable([])
|
||||
const selectedCellId = writable(null)
|
||||
const selectedCellId = writable()
|
||||
const selectedRows = writable({})
|
||||
const scroll = writable({
|
||||
left: 0,
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
<svelte:options immutable={true} />
|
||||
|
||||
<script>
|
||||
export let header = false
|
||||
export let label = false
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
<svelte:options immutable={true} />
|
||||
|
||||
<script>
|
||||
import { getContext } from "svelte"
|
||||
import SpreadsheetCell from "./SheetCell.svelte"
|
||||
|
@ -38,10 +40,10 @@
|
|||
}
|
||||
|
||||
const selectRow = id => {
|
||||
selectedRows.update(state => {
|
||||
state[id] = !state[id]
|
||||
return state
|
||||
})
|
||||
selectedRows.update(state => ({
|
||||
...state,
|
||||
[id]: !state[id],
|
||||
}))
|
||||
}
|
||||
</script>
|
||||
|
||||
|
@ -104,7 +106,9 @@
|
|||
color: var(--spectrum-global-color-gray-500);
|
||||
}
|
||||
.row:hover .checkbox,
|
||||
.checkbox.visible,
|
||||
.checkbox.visible {
|
||||
display: flex;
|
||||
}
|
||||
.number.visible {
|
||||
display: block;
|
||||
}
|
||||
|
|
|
@ -58,10 +58,10 @@ export const createReorderStores = context => {
|
|||
})
|
||||
|
||||
if (swapColumnIdx !== $reorder.swapColumnIdx) {
|
||||
reorder.update(state => {
|
||||
state.swapColumnIdx = swapColumnIdx
|
||||
return state
|
||||
})
|
||||
reorder.update(state => ({
|
||||
...state,
|
||||
swapColumnIdx: swapColumnIdx,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,14 +1,34 @@
|
|||
import { writable, derived, get } from "svelte/store"
|
||||
import { buildLuceneQuery } from "../../../utils/lucene"
|
||||
import { fetchData } from "../../../fetch/fetchData"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
|
||||
export const createRowsStore = context => {
|
||||
const { tableId, filter, API } = context
|
||||
|
||||
// Flag for whether this is the first time loading our fetch
|
||||
let loaded = false
|
||||
|
||||
// Local cache of row IDs to speed up checking if a row exists
|
||||
let rowCacheMap = {}
|
||||
|
||||
// Exported stores
|
||||
const rows = writable([])
|
||||
const schema = writable({})
|
||||
const primaryDisplay = writable(null)
|
||||
|
||||
// Local stores for managing fetching data
|
||||
const query = derived(filter, $filter => buildLuceneQuery($filter))
|
||||
const fetch = derived(tableId, $tableId => {
|
||||
if (!$tableId) {
|
||||
return null
|
||||
}
|
||||
// Wipe state and fully hydrate next time our fetch returns data
|
||||
loaded = false
|
||||
rowCacheMap = {}
|
||||
rows.set([])
|
||||
|
||||
// Create fetch and load initial data
|
||||
return fetchData({
|
||||
API,
|
||||
datasource: {
|
||||
|
@ -27,30 +47,66 @@ export const createRowsStore = context => {
|
|||
|
||||
// Update fetch when query changes
|
||||
query.subscribe($query => {
|
||||
get(fetch).update({
|
||||
get(fetch)?.update({
|
||||
query: $query,
|
||||
})
|
||||
})
|
||||
|
||||
// Observe each data fetch and extract some data
|
||||
fetch.subscribe($fetch => {
|
||||
if (!$fetch) {
|
||||
return
|
||||
}
|
||||
$fetch.subscribe($$fetch => {
|
||||
console.log("new fetch")
|
||||
rows.set($$fetch.rows.map((row, idx) => ({ ...row, __idx: idx })))
|
||||
schema.set($$fetch.schema)
|
||||
primaryDisplay.set($$fetch.definition?.primaryDisplay)
|
||||
if ($$fetch.loaded) {
|
||||
if (!loaded) {
|
||||
// Hydrate initial data
|
||||
loaded = true
|
||||
console.log("instantiate new fetch data")
|
||||
schema.set($$fetch.schema)
|
||||
primaryDisplay.set($$fetch.definition?.primaryDisplay)
|
||||
}
|
||||
|
||||
// Process new rows
|
||||
handleNewRows($$fetch.rows)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Local handler to process new rows inside the fetch, and append any new
|
||||
// rows to state that we haven't encountered before
|
||||
const handleNewRows = newRows => {
|
||||
let rowsToAppend = []
|
||||
let newRow
|
||||
for (let i = 0; i < newRows.length; i++) {
|
||||
newRow = newRows[i]
|
||||
if (!rowCacheMap[newRow._id]) {
|
||||
rowCacheMap[newRow._id] = true
|
||||
rowsToAppend.push(newRow)
|
||||
}
|
||||
}
|
||||
if (rowsToAppend.length) {
|
||||
rows.update($rows => {
|
||||
return [
|
||||
...$rows,
|
||||
...rowsToAppend.map((row, idx) => ({
|
||||
...row,
|
||||
__idx: $rows.length + idx,
|
||||
})),
|
||||
]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Adds a new empty row
|
||||
const addRow = async () => {
|
||||
let newRow = await API.saveRow({ tableId: get(tableId) })
|
||||
newRow.__idx = get(rows).length
|
||||
rows.update(state => {
|
||||
state.push(newRow)
|
||||
return state
|
||||
})
|
||||
return newRow
|
||||
try {
|
||||
const newRow = await API.saveRow({ tableId: get(tableId) })
|
||||
handleNewRows([newRow])
|
||||
return newRow
|
||||
} catch (error) {
|
||||
notifications.error(`Error adding row: ${error?.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Updates a value of a row
|
||||
|
@ -71,7 +127,11 @@ export const createRowsStore = context => {
|
|||
|
||||
// Save change
|
||||
delete newRow.__idx
|
||||
await API.saveRow(newRow)
|
||||
try {
|
||||
await API.saveRow(newRow)
|
||||
} catch (error) {
|
||||
notifications.error(`Error saving row: ${error?.message}`)
|
||||
}
|
||||
|
||||
// Fetch row from the server again
|
||||
newRow = await API.fetchRow({
|
||||
|
@ -103,11 +163,25 @@ export const createRowsStore = context => {
|
|||
})
|
||||
|
||||
// Update state
|
||||
// We deliberately do not remove IDs from the cache map as the data may
|
||||
// still exist inside the fetch, but we don't want to add it again
|
||||
rows.update(state => {
|
||||
return state
|
||||
.filter(row => !deletedIds.includes(row._id))
|
||||
.map((row, idx) => ({ ...row, __idx: idx }))
|
||||
})
|
||||
|
||||
// If we ended up with no rows, try getting the next page
|
||||
if (!get(rows).length) {
|
||||
loadNextPage()
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the next page of data if available
|
||||
const loadNextPage = () => {
|
||||
const $fetch = get(fetch)
|
||||
console.log("fetch next page")
|
||||
$fetch?.nextPage()
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -117,6 +191,7 @@ export const createRowsStore = context => {
|
|||
addRow,
|
||||
updateRow,
|
||||
deleteRows,
|
||||
loadNextPage,
|
||||
},
|
||||
},
|
||||
schema,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { writable, derived } from "svelte/store"
|
||||
import { writable, derived, get } from "svelte/store"
|
||||
|
||||
export const createViewportStores = context => {
|
||||
const { cellHeight, columns, rows, scroll, bounds } = context
|
||||
|
@ -18,7 +18,7 @@ export const createViewportStores = context => {
|
|||
scroll.subscribe(({ left, top }) => {
|
||||
window.requestAnimationFrame(() => {
|
||||
// Only update local state when big changes occur
|
||||
if (Math.abs(top - scrollTop) > cellHeight * 2) {
|
||||
if (Math.abs(top - scrollTop) > cellHeight * 4) {
|
||||
scrollTop = top
|
||||
scrollTopStore.set(top)
|
||||
}
|
||||
|
@ -67,5 +67,15 @@ export const createViewportStores = context => {
|
|||
}
|
||||
)
|
||||
|
||||
// Fetch next page when approaching end of data
|
||||
visibleRows.subscribe($visibleRows => {
|
||||
const lastVisible = $visibleRows[$visibleRows.length - 1]
|
||||
const $rows = get(rows)
|
||||
const lastRow = $rows[$rows.length - 1]
|
||||
if (lastVisible && lastRow && lastVisible._id === lastRow._id) {
|
||||
rows.actions.loadNextPage()
|
||||
}
|
||||
})
|
||||
|
||||
return { visibleRows, visibleColumns }
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue