Add core API implementation and update most of client library to use it
This commit is contained in:
parent
9d4f18f1ce
commit
6694bdc651
|
@ -0,0 +1,120 @@
|
|||
import { createAPIClient } from "@budibase/frontend-core"
|
||||
import { notificationStore } from "./stores"
|
||||
import { FieldTypes } from "./constants"
|
||||
import { TableNames } from "@budibase/frontend-core/src/constants.js"
|
||||
|
||||
export const API = createAPIClient({
|
||||
// Attach client specific headers
|
||||
attachHeaders: headers => {
|
||||
// Attach app ID header
|
||||
headers["x-budibase-app-id"] = window["##BUDIBASE_APP_ID##"]
|
||||
|
||||
// Attach client header if not inside the builder preview
|
||||
if (!window["##BUDIBASE_IN_BUILDER##"]) {
|
||||
headers["x-budibase-type"] = "client"
|
||||
}
|
||||
},
|
||||
|
||||
// Show an error notification for all API failures.
|
||||
// We could also log these to sentry.
|
||||
// Or we could check error.status and redirect to login on a 403 etc.
|
||||
onError: error => {
|
||||
notificationStore.actions.error(error.message)
|
||||
},
|
||||
|
||||
// Patch certain endpoints with functionality specific to client apps
|
||||
patches: {
|
||||
// Enrich rows so they properly handle client bindings
|
||||
fetchSelf: async ({ output }) => {
|
||||
const user = output
|
||||
if (user && user._id) {
|
||||
if (user.roleId === "PUBLIC") {
|
||||
// Don't try to enrich a public user as it will 403
|
||||
return user
|
||||
} else {
|
||||
return (await enrichRows([user], TableNames.USERS))[0]
|
||||
}
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
},
|
||||
fetchRelationshipData: async ({ params, output }) => {
|
||||
const tableId = params[0]?.tableId
|
||||
return await enrichRows(output, tableId)
|
||||
},
|
||||
fetchTableData: async ({ params, output }) => {
|
||||
const tableId = params[0]
|
||||
return await enrichRows(output, tableId)
|
||||
},
|
||||
searchTable: async ({ params, output }) => {
|
||||
const tableId = params[0]?.tableId
|
||||
return {
|
||||
...output,
|
||||
rows: await enrichRows(output?.rows, tableId),
|
||||
}
|
||||
},
|
||||
fetchViewData: async ({ params, output }) => {
|
||||
const tableId = params[0]?.tableId
|
||||
return await enrichRows(output, tableId)
|
||||
},
|
||||
|
||||
// Wipe any HBS formulae from table definitions, as these interfere with
|
||||
// handlebars enrichment
|
||||
fetchTableDefinition: async ({ output }) => {
|
||||
Object.keys(output?.schema || {}).forEach(field => {
|
||||
if (output.schema[field]?.type === "formula") {
|
||||
delete output.schema[field].formula
|
||||
}
|
||||
})
|
||||
return output
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
/**
|
||||
* Enriches rows which contain certain field types so that they can
|
||||
* be properly displayed.
|
||||
* The ability to create these bindings has been removed, but they will still
|
||||
* exist in client apps to support backwards compatibility.
|
||||
*/
|
||||
const enrichRows = async (rows, tableId) => {
|
||||
if (!Array.isArray(rows)) {
|
||||
return []
|
||||
}
|
||||
if (rows.length) {
|
||||
const tables = {}
|
||||
for (let row of rows) {
|
||||
// Fall back to passed in tableId if row doesn't have it specified
|
||||
let rowTableId = row.tableId || tableId
|
||||
let table = tables[rowTableId]
|
||||
if (!table) {
|
||||
// Fetch table schema so we can check column types
|
||||
table = await API.fetchTableDefinition(rowTableId)
|
||||
tables[rowTableId] = table
|
||||
}
|
||||
const schema = table?.schema
|
||||
if (schema) {
|
||||
const keys = Object.keys(schema)
|
||||
for (let key of keys) {
|
||||
const type = schema[key].type
|
||||
if (type === FieldTypes.LINK && Array.isArray(row[key])) {
|
||||
// Enrich row a string join of relationship fields
|
||||
row[`${key}_text`] =
|
||||
row[key]
|
||||
?.map(option => option?.primaryDisplay)
|
||||
.filter(option => !!option)
|
||||
.join(", ") || ""
|
||||
} else if (type === "attachment") {
|
||||
// Enrich row with the first image URL for any attachment fields
|
||||
let url = null
|
||||
if (Array.isArray(row[key]) && row[key][0] != null) {
|
||||
url = row[key][0].url
|
||||
}
|
||||
row[`${key}_first`] = url
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return rows
|
||||
}
|
|
@ -2,6 +2,7 @@
|
|||
import { writable, get } from "svelte/store"
|
||||
import { setContext, onMount } from "svelte"
|
||||
import { Layout, Heading, Body } from "@budibase/bbui"
|
||||
import ErrorSVG from "@budibase/frontend-core/assets/error.svg"
|
||||
import Component from "./Component.svelte"
|
||||
import SDK from "sdk"
|
||||
import {
|
||||
|
@ -24,7 +25,6 @@
|
|||
import HoverIndicator from "components/preview/HoverIndicator.svelte"
|
||||
import CustomThemeWrapper from "./CustomThemeWrapper.svelte"
|
||||
import DNDHandler from "components/preview/DNDHandler.svelte"
|
||||
import ErrorSVG from "builder/assets/error.svg"
|
||||
import KeyboardManager from "components/preview/KeyboardManager.svelte"
|
||||
|
||||
// Provide contexts
|
||||
|
|
|
@ -29,7 +29,10 @@
|
|||
for (let i = 0; i < fileList.length; i++) {
|
||||
data.append("file", fileList[i])
|
||||
}
|
||||
return await API.uploadAttachment(data, formContext?.dataSource?.tableId)
|
||||
return await API.uploadAttachment({
|
||||
data,
|
||||
tableId: formContext?.dataSource?.tableId,
|
||||
})
|
||||
}
|
||||
</script>
|
||||
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
export const TableNames = {
|
||||
USERS: "ta_users",
|
||||
}
|
||||
|
||||
export const FieldTypes = {
|
||||
STRING: "string",
|
||||
LONGFORM: "longform",
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import * as API from "./api"
|
||||
import { SchemaUtils } from "@budibase/frontend-core"
|
||||
import { API } from "./api.js"
|
||||
import {
|
||||
authStore,
|
||||
notificationStore,
|
||||
|
@ -9,7 +10,6 @@ import {
|
|||
import { styleable } from "utils/styleable"
|
||||
import { linkable } from "utils/linkable"
|
||||
import { getAction } from "utils/getAction"
|
||||
import { fetchDatasourceSchema } from "utils/schema.js"
|
||||
import Provider from "components/context/Provider.svelte"
|
||||
import { ActionTypes } from "constants"
|
||||
|
||||
|
@ -23,7 +23,7 @@ export default {
|
|||
styleable,
|
||||
linkable,
|
||||
getAction,
|
||||
fetchDatasourceSchema,
|
||||
fetchDatasourceSchema: SchemaUtils.fetchDatasourceSchema,
|
||||
Provider,
|
||||
ActionTypes,
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import * as API from "../api"
|
||||
import { API } from "../api"
|
||||
import { get, writable } from "svelte/store"
|
||||
|
||||
const createAppStore = () => {
|
||||
const store = writable({})
|
||||
const store = writable(null)
|
||||
|
||||
// Fetches the app definition including screens, layouts and theme
|
||||
const fetchAppDefinition = async () => {
|
||||
|
@ -10,17 +10,25 @@ const createAppStore = () => {
|
|||
if (!appId) {
|
||||
throw "Cannot fetch app definition without app ID set"
|
||||
}
|
||||
try {
|
||||
const appDefinition = await API.fetchAppPackage(appId)
|
||||
store.set({
|
||||
...appDefinition,
|
||||
appId: appDefinition?.application?.appId,
|
||||
})
|
||||
} catch (error) {
|
||||
store.set(null)
|
||||
}
|
||||
}
|
||||
|
||||
// Sets the initial app ID
|
||||
const setAppID = id => {
|
||||
store.update(state => {
|
||||
if (state) {
|
||||
state.appId = id
|
||||
} else {
|
||||
state = { appId: id }
|
||||
}
|
||||
return state
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import * as API from "../api"
|
||||
import { API } from "../api"
|
||||
import { writable } from "svelte/store"
|
||||
|
||||
const createAuthStore = () => {
|
||||
|
@ -6,8 +6,12 @@ const createAuthStore = () => {
|
|||
|
||||
// Fetches the user object if someone is logged in and has reloaded the page
|
||||
const fetchUser = async () => {
|
||||
try {
|
||||
const user = await API.fetchSelf()
|
||||
store.set(user)
|
||||
} catch (error) {
|
||||
store.set(null)
|
||||
}
|
||||
}
|
||||
|
||||
const logOut = async () => {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { writable, derived, get } from "svelte/store"
|
||||
import Manifest from "manifest.json"
|
||||
import { findComponentById, findComponentPathById } from "../utils/components"
|
||||
import { pingEndUser } from "../api"
|
||||
import { API } from "../api"
|
||||
|
||||
const dispatchEvent = (type, data = {}) => {
|
||||
window.parent.postMessage({ type, data })
|
||||
|
@ -65,8 +65,12 @@ const createBuilderStore = () => {
|
|||
notifyLoaded: () => {
|
||||
dispatchEvent("preview-loaded")
|
||||
},
|
||||
pingEndUser: () => {
|
||||
pingEndUser()
|
||||
pingEndUser: async () => {
|
||||
try {
|
||||
await API.pingEndUser()
|
||||
} catch (error) {
|
||||
// Do nothing
|
||||
}
|
||||
},
|
||||
setSelectedPath: path => {
|
||||
writableStore.update(state => ({ ...state, selectedPath: path }))
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { writable, derived } from "svelte/store"
|
||||
import { hashString } from "../utils/helpers"
|
||||
import { Helpers } from "@budibase/frontend-core"
|
||||
|
||||
export const createContextStore = oldContext => {
|
||||
const newContext = writable({})
|
||||
|
@ -10,7 +10,9 @@ export const createContextStore = oldContext => {
|
|||
for (let i = 0; i < $contexts.length - 1; i++) {
|
||||
key += $contexts[i].key
|
||||
}
|
||||
key = hashString(key + JSON.stringify($contexts[$contexts.length - 1]))
|
||||
key = Helpers.hashString(
|
||||
key + JSON.stringify($contexts[$contexts.length - 1])
|
||||
)
|
||||
|
||||
// Reduce global state
|
||||
const reducer = (total, context) => ({ ...total, ...context })
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { writable, get } from "svelte/store"
|
||||
import { fetchTableDefinition } from "../api"
|
||||
import { API } from "../api"
|
||||
import { FieldTypes } from "../constants"
|
||||
import { routeStore } from "./routes"
|
||||
|
||||
|
@ -72,8 +72,14 @@ export const createDataSourceStore = () => {
|
|||
let invalidations = [dataSourceId]
|
||||
|
||||
// Fetch related table IDs from table schema
|
||||
const definition = await fetchTableDefinition(dataSourceId)
|
||||
const schema = definition?.schema
|
||||
let schema
|
||||
try {
|
||||
const definition = await API.fetchTableDefinition(dataSourceId)
|
||||
schema = definition?.schema
|
||||
} catch (error) {
|
||||
schema = null
|
||||
}
|
||||
|
||||
if (schema) {
|
||||
Object.values(schema).forEach(fieldSchema => {
|
||||
if (
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { get, writable } from "svelte/store"
|
||||
import { push } from "svelte-spa-router"
|
||||
import * as API from "../api"
|
||||
import { API } from "../api"
|
||||
import { peekStore } from "./peek"
|
||||
import { builderStore } from "./builder"
|
||||
|
||||
|
@ -16,10 +16,15 @@ const createRouteStore = () => {
|
|||
const store = writable(initialState)
|
||||
|
||||
const fetchRoutes = async () => {
|
||||
const routeConfig = await API.fetchRoutes()
|
||||
let routeConfig
|
||||
try {
|
||||
routeConfig = await API.fetchRoutes()
|
||||
} catch (error) {
|
||||
routeConfig = null
|
||||
}
|
||||
let routes = []
|
||||
Object.values(routeConfig.routes).forEach(route => {
|
||||
Object.entries(route.subpaths).forEach(([path, config]) => {
|
||||
Object.values(routeConfig?.routes || {}).forEach(route => {
|
||||
Object.entries(route.subpaths || {}).forEach(([path, config]) => {
|
||||
routes.push({
|
||||
path,
|
||||
screenId: config.screenId,
|
||||
|
|
|
@ -5,8 +5,10 @@ import {
|
|||
confirmationStore,
|
||||
authStore,
|
||||
stateStore,
|
||||
notificationStore,
|
||||
dataSourceStore,
|
||||
} from "stores"
|
||||
import { saveRow, deleteRow, executeQuery, triggerAutomation } from "api"
|
||||
import { API } from "api"
|
||||
import { ActionTypes } from "constants"
|
||||
import { enrichDataBindings } from "./enrichDataBinding"
|
||||
import { deepSet } from "@budibase/bbui"
|
||||
|
@ -27,9 +29,17 @@ const saveRowHandler = async (action, context) => {
|
|||
if (tableId) {
|
||||
payload.tableId = tableId
|
||||
}
|
||||
const row = await saveRow(payload)
|
||||
return {
|
||||
row,
|
||||
try {
|
||||
const row = await API.saveRow(payload)
|
||||
notificationStore.actions.success("Row saved")
|
||||
|
||||
// Refresh related datasources
|
||||
await dataSourceStore.actions.invalidateDataSource(row.tableId)
|
||||
|
||||
return { row }
|
||||
} catch (error) {
|
||||
// Abort next actions
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -47,9 +57,17 @@ const duplicateRowHandler = async (action, context) => {
|
|||
}
|
||||
delete payload._id
|
||||
delete payload._rev
|
||||
const row = await saveRow(payload)
|
||||
return {
|
||||
row,
|
||||
try {
|
||||
const row = await API.saveRow(payload)
|
||||
notificationStore.actions.success("Row saved")
|
||||
|
||||
// Refresh related datasources
|
||||
await dataSourceStore.actions.invalidateDataSource(row.tableId)
|
||||
|
||||
return { row }
|
||||
} catch (error) {
|
||||
// Abort next actions
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -57,14 +75,32 @@ const duplicateRowHandler = async (action, context) => {
|
|||
const deleteRowHandler = async action => {
|
||||
const { tableId, revId, rowId } = action.parameters
|
||||
if (tableId && revId && rowId) {
|
||||
await deleteRow({ tableId, rowId, revId })
|
||||
try {
|
||||
await API.deleteRow({ tableId, rowId, revId })
|
||||
notificationStore.actions.success("Row deleted")
|
||||
|
||||
// Refresh related datasources
|
||||
await dataSourceStore.actions.invalidateDataSource(tableId)
|
||||
} catch (error) {
|
||||
// Abort next actions
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const triggerAutomationHandler = async action => {
|
||||
const { fields } = action.parameters
|
||||
if (fields) {
|
||||
await triggerAutomation(action.parameters.automationId, fields)
|
||||
try {
|
||||
await API.triggerAutomation({
|
||||
automationId: action.parameters.automationId,
|
||||
fields,
|
||||
})
|
||||
notificationStore.actions.success("Automation triggered")
|
||||
} catch (error) {
|
||||
// Abort next actions
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -75,12 +111,30 @@ const navigationHandler = action => {
|
|||
|
||||
const queryExecutionHandler = async action => {
|
||||
const { datasourceId, queryId, queryParams } = action.parameters
|
||||
const result = await executeQuery({
|
||||
try {
|
||||
const query = await API.fetchQueryDefinition(queryId)
|
||||
if (query?.datasourceId == null) {
|
||||
notificationStore.actions.error("That query couldn't be found")
|
||||
return false
|
||||
}
|
||||
const result = await API.executeQuery({
|
||||
datasourceId,
|
||||
queryId,
|
||||
parameters: queryParams,
|
||||
})
|
||||
|
||||
// Trigger a notification and invalidate the datasource as long as this
|
||||
// was not a readable query
|
||||
if (!query.readable) {
|
||||
API.notifications.error.success("Query executed successfully")
|
||||
await dataSourceStore.actions.invalidateDataSource(query.datasourceId)
|
||||
}
|
||||
|
||||
return { result }
|
||||
} catch (error) {
|
||||
// Abort next actions
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const executeActionHandler = async (
|
||||
|
|
Before Width: | Height: | Size: 2.4 KiB After Width: | Height: | Size: 2.4 KiB |
|
@ -1,10 +1,10 @@
|
|||
import API from "./api"
|
||||
|
||||
export const buildAnalyticsEndpoints = API => ({
|
||||
/**
|
||||
* Notifies that an end user client app has been loaded.
|
||||
*/
|
||||
export const pingEndUser = async () => {
|
||||
pingEndUser: async () => {
|
||||
return await API.post({
|
||||
url: `/api/analytics/ping`,
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,145 +0,0 @@
|
|||
import { ApiVersion } from "../constants"
|
||||
|
||||
const defaultAPIClientConfig = {
|
||||
attachHeaders: null,
|
||||
onError: null,
|
||||
}
|
||||
|
||||
export const createAPIClient = config => {
|
||||
config = {
|
||||
...defaultAPIClientConfig,
|
||||
...config,
|
||||
}
|
||||
|
||||
/**
|
||||
* API cache for cached request responses.
|
||||
*/
|
||||
let cache = {}
|
||||
|
||||
/**
|
||||
* Handler for API errors.
|
||||
*/
|
||||
const makeErrorFromResponse = async response => {
|
||||
// Try to read a message from the error
|
||||
let message
|
||||
try {
|
||||
const json = await response.json()
|
||||
if (json?.error) {
|
||||
message = json.error
|
||||
}
|
||||
} catch (error) {
|
||||
// Do nothing
|
||||
}
|
||||
console.log("building error from", response)
|
||||
return {
|
||||
message,
|
||||
status: response.status,
|
||||
}
|
||||
}
|
||||
|
||||
const makeError = message => {
|
||||
return {
|
||||
message,
|
||||
status: 400,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an API call to the server.
|
||||
* App ID header is always correctly set.
|
||||
*/
|
||||
const makeApiCall = async ({
|
||||
method,
|
||||
url,
|
||||
body,
|
||||
json = true,
|
||||
external = false,
|
||||
}) => {
|
||||
// Build headers
|
||||
let headers = { Accept: "application/json" }
|
||||
if (!external) {
|
||||
headers["x-budibase-api-version"] = ApiVersion
|
||||
}
|
||||
if (json) {
|
||||
headers["Content-Type"] = "application/json"
|
||||
}
|
||||
if (config?.attachHeaders) {
|
||||
config.attachHeaders(headers)
|
||||
}
|
||||
|
||||
// Build request body
|
||||
let requestBody = body
|
||||
if (json) {
|
||||
try {
|
||||
requestBody = JSON.stringify(body)
|
||||
} catch (error) {
|
||||
throw makeError("Invalid JSON body")
|
||||
}
|
||||
}
|
||||
|
||||
// Make request
|
||||
let response
|
||||
try {
|
||||
response = await fetch(url, {
|
||||
method,
|
||||
headers,
|
||||
body: requestBody,
|
||||
credentials: "same-origin",
|
||||
})
|
||||
} catch (error) {
|
||||
throw makeError("Failed to send request")
|
||||
}
|
||||
|
||||
// Handle response
|
||||
if (response.status >= 200 && response.status < 400) {
|
||||
try {
|
||||
return await response.json()
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
} else {
|
||||
const error = await makeErrorFromResponse(response)
|
||||
if (config?.onError) {
|
||||
config.onError(error)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an API call to the server and caches the response.
|
||||
* Future invocation for this URL will return the cached result instead of
|
||||
* hitting the server again.
|
||||
*/
|
||||
const makeCachedApiCall = async params => {
|
||||
const identifier = params.url
|
||||
if (!identifier) {
|
||||
return null
|
||||
}
|
||||
if (!cache[identifier]) {
|
||||
cache[identifier] = makeApiCall(params)
|
||||
cache[identifier] = await cache[identifier]
|
||||
}
|
||||
return await cache[identifier]
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an API call function for a particular HTTP method.
|
||||
*/
|
||||
const requestApiCall = method => async params => {
|
||||
let { url, cache = false, external = false } = params
|
||||
if (!external) {
|
||||
url = `/${url}`.replace("//", "/")
|
||||
}
|
||||
const enrichedParams = { ...params, method, url }
|
||||
return await (cache ? makeCachedApiCall : makeApiCall)(enrichedParams)
|
||||
}
|
||||
|
||||
return {
|
||||
post: requestApiCall("POST"),
|
||||
get: requestApiCall("GET"),
|
||||
patch: requestApiCall("PATCH"),
|
||||
delete: requestApiCall("DELETE"),
|
||||
error: message => throw makeError(message),
|
||||
}
|
||||
}
|
|
@ -1,10 +1,10 @@
|
|||
import API from "./api"
|
||||
|
||||
export const buildAppEndpoints = API => ({
|
||||
/**
|
||||
* Fetches screen definition for an app.
|
||||
*/
|
||||
export const fetchAppPackage = async appId => {
|
||||
fetchAppPackage: async appId => {
|
||||
return await API.get({
|
||||
url: `/api/applications/${appId}/appPackage`,
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import API from "./api"
|
||||
|
||||
export const buildAttachmentEndpoints = API => ({
|
||||
/**
|
||||
* Uploads an attachment to the server.
|
||||
*/
|
||||
export const uploadAttachment = async (data, tableId = "") => {
|
||||
uploadAttachment: async ({ data, tableId }) => {
|
||||
return await API.post({
|
||||
url: `/api/attachments/${tableId}/upload`,
|
||||
body: data,
|
||||
json: false,
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
import API from "./api"
|
||||
import { enrichRows } from "./rows"
|
||||
import { TableNames } from "../constants"
|
||||
|
||||
export const buildAuthEndpoints = API => ({
|
||||
/**
|
||||
* Performs a log in request.
|
||||
*/
|
||||
export const logIn = async ({ email, password }) => {
|
||||
logIn: async ({ email, password }) => {
|
||||
if (!email) {
|
||||
return API.error("Please enter your email")
|
||||
}
|
||||
|
@ -14,23 +11,17 @@ export const logIn = async ({ email, password }) => {
|
|||
}
|
||||
return await API.post({
|
||||
url: "/api/global/auth",
|
||||
body: { username: email, password },
|
||||
body: {
|
||||
username: email,
|
||||
password,
|
||||
},
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches the currently logged in user object
|
||||
*/
|
||||
export const fetchSelf = async () => {
|
||||
const user = await API.get({ url: "/api/self" })
|
||||
if (user && user._id) {
|
||||
if (user.roleId === "PUBLIC") {
|
||||
// Don't try to enrich a public user as it will 403
|
||||
return user
|
||||
} else {
|
||||
return (await enrichRows([user], TableNames.USERS))[0]
|
||||
}
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
fetchSelf: async () => {
|
||||
return await API.get({ url: "/api/self" })
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,16 +1,11 @@
|
|||
import { notificationStore } from "stores/notification"
|
||||
import API from "./api"
|
||||
|
||||
export const buildAutomationEndpoints = API => ({
|
||||
/**
|
||||
* Executes an automation. Must have "App Action" trigger.
|
||||
*/
|
||||
export const triggerAutomation = async (automationId, fields) => {
|
||||
const res = await API.post({
|
||||
triggerAutomation: async ({ automationId, fields }) => {
|
||||
return await API.post({
|
||||
url: `/api/automations/${automationId}/trigger`,
|
||||
body: { fields },
|
||||
})
|
||||
res.error
|
||||
? notificationStore.actions.error("An error has occurred")
|
||||
: notificationStore.actions.success("Automation triggered")
|
||||
return res
|
||||
}
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,11 +1,188 @@
|
|||
export * from "./rows"
|
||||
export * from "./auth"
|
||||
export * from "./tables"
|
||||
export * from "./attachments"
|
||||
export * from "./views"
|
||||
export * from "./relationships"
|
||||
export * from "./routes"
|
||||
export * from "./queries"
|
||||
export * from "./app"
|
||||
export * from "./automations"
|
||||
export * from "./analytics"
|
||||
import { ApiVersion } from "../constants"
|
||||
import { buildAnalyticsEndpoints } from "./analytics"
|
||||
import { buildAppEndpoints } from "./app"
|
||||
import { buildAttachmentEndpoints } from "./attachments"
|
||||
import { buildAuthEndpoints } from "./auth"
|
||||
import { buildAutomationEndpoints } from "./automations"
|
||||
import { buildQueryEndpoints } from "./queries"
|
||||
import { buildRelationshipEndpoints } from "./relationships"
|
||||
import { buildRouteEndpoints } from "./routes"
|
||||
import { buildRowEndpoints } from "./rows"
|
||||
import { buildTableEndpoints } from "./tables"
|
||||
import { buildViewEndpoints } from "./views"
|
||||
|
||||
const defaultAPIClientConfig = {
|
||||
attachHeaders: null,
|
||||
onError: null,
|
||||
patches: null,
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an API client with the provided configuration.
|
||||
* @param config the API client configuration
|
||||
* @return {object} the API client
|
||||
*/
|
||||
export const createAPIClient = config => {
|
||||
config = {
|
||||
...defaultAPIClientConfig,
|
||||
...config,
|
||||
}
|
||||
|
||||
/**
|
||||
* Handler for API errors.
|
||||
*/
|
||||
const makeErrorFromResponse = async response => {
|
||||
// Try to read a message from the error
|
||||
let message
|
||||
try {
|
||||
const json = await response.json()
|
||||
if (json?.error) {
|
||||
message = json.error
|
||||
}
|
||||
} catch (error) {
|
||||
// Do nothing
|
||||
}
|
||||
console.log("building error from", response)
|
||||
return {
|
||||
message,
|
||||
status: response.status,
|
||||
}
|
||||
}
|
||||
|
||||
const makeError = message => {
|
||||
return {
|
||||
message,
|
||||
status: 400,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an API call to the server.
|
||||
* App ID header is always correctly set.
|
||||
*/
|
||||
const makeApiCall = async ({
|
||||
method,
|
||||
url,
|
||||
body,
|
||||
json = true,
|
||||
external = false,
|
||||
}) => {
|
||||
// Build headers
|
||||
let headers = { Accept: "application/json" }
|
||||
if (!external) {
|
||||
headers["x-budibase-api-version"] = ApiVersion
|
||||
}
|
||||
if (json) {
|
||||
headers["Content-Type"] = "application/json"
|
||||
}
|
||||
if (config?.attachHeaders) {
|
||||
config.attachHeaders(headers)
|
||||
}
|
||||
|
||||
// Build request body
|
||||
let requestBody = body
|
||||
if (json) {
|
||||
try {
|
||||
requestBody = JSON.stringify(body)
|
||||
} catch (error) {
|
||||
throw makeError("Invalid JSON body")
|
||||
}
|
||||
}
|
||||
|
||||
// Make request
|
||||
let response
|
||||
try {
|
||||
response = await fetch(url, {
|
||||
method,
|
||||
headers,
|
||||
body: requestBody,
|
||||
credentials: "same-origin",
|
||||
})
|
||||
} catch (error) {
|
||||
throw makeError("Failed to send request")
|
||||
}
|
||||
|
||||
// Handle response
|
||||
if (response.status >= 200 && response.status < 400) {
|
||||
try {
|
||||
return await response.json()
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
} else {
|
||||
const error = await makeErrorFromResponse(response)
|
||||
if (config?.onError) {
|
||||
config.onError(error)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an API call to the server and caches the response.
|
||||
* Future invocation for this URL will return the cached result instead of
|
||||
* hitting the server again.
|
||||
*/
|
||||
let cache = {}
|
||||
const makeCachedApiCall = async params => {
|
||||
const identifier = params.url
|
||||
if (!identifier) {
|
||||
return null
|
||||
}
|
||||
if (!cache[identifier]) {
|
||||
cache[identifier] = makeApiCall(params)
|
||||
cache[identifier] = await cache[identifier]
|
||||
}
|
||||
return await cache[identifier]
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an API call function for a particular HTTP method.
|
||||
*/
|
||||
const requestApiCall = method => async params => {
|
||||
let { url, cache = false, external = false } = params
|
||||
if (!external) {
|
||||
url = `/${url}`.replace("//", "/")
|
||||
}
|
||||
const enrichedParams = { ...params, method, url }
|
||||
return await (cache ? makeCachedApiCall : makeApiCall)(enrichedParams)
|
||||
}
|
||||
|
||||
// Build the underlying core API methods
|
||||
let API = {
|
||||
post: requestApiCall("POST"),
|
||||
get: requestApiCall("GET"),
|
||||
patch: requestApiCall("PATCH"),
|
||||
delete: requestApiCall("DELETE"),
|
||||
error: message => throw makeError(message),
|
||||
}
|
||||
|
||||
// Attach all other endpoints
|
||||
API = {
|
||||
...API,
|
||||
...buildAnalyticsEndpoints(API),
|
||||
...buildAppEndpoints(API),
|
||||
...buildAttachmentEndpoints(API),
|
||||
...buildAuthEndpoints(API),
|
||||
...buildAutomationEndpoints(API),
|
||||
...buildQueryEndpoints(API),
|
||||
...buildRelationshipEndpoints(API),
|
||||
...buildRouteEndpoints(API),
|
||||
...buildRowEndpoints(API),
|
||||
...buildTableEndpoints(API),
|
||||
...buildViewEndpoints(API),
|
||||
}
|
||||
|
||||
// Assign any patches
|
||||
const patches = Object.entries(config.patches || {})
|
||||
if (patches.length) {
|
||||
patches.forEach(([method, fn]) => {
|
||||
API[method] = async (...params) => {
|
||||
const output = await API[method](...params)
|
||||
return await fn({ params, output })
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return API
|
||||
}
|
||||
|
|
|
@ -1,34 +1,24 @@
|
|||
import { notificationStore, dataSourceStore } from "stores"
|
||||
import API from "./api"
|
||||
|
||||
export const buildQueryEndpoints = API => ({
|
||||
/**
|
||||
* Executes a query against an external data connector.
|
||||
*/
|
||||
export const executeQuery = async ({ queryId, pagination, parameters }) => {
|
||||
const query = await fetchQueryDefinition(queryId)
|
||||
if (query?.datasourceId == null) {
|
||||
notificationStore.actions.error("That query couldn't be found")
|
||||
return
|
||||
}
|
||||
const res = await API.post({
|
||||
executeQuery: async ({ queryId, pagination, parameters }) => {
|
||||
return await API.post({
|
||||
url: `/api/v2/queries/${queryId}`,
|
||||
body: {
|
||||
parameters,
|
||||
pagination,
|
||||
},
|
||||
})
|
||||
if (res.error) {
|
||||
notificationStore.actions.error("An error has occurred")
|
||||
} else if (!query.readable) {
|
||||
notificationStore.actions.success("Query executed successfully")
|
||||
await dataSourceStore.actions.invalidateDataSource(query.datasourceId)
|
||||
}
|
||||
return res
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches the definition of an external query.
|
||||
*/
|
||||
export const fetchQueryDefinition = async queryId => {
|
||||
return await API.get({ url: `/api/queries/${queryId}`, cache: true })
|
||||
}
|
||||
fetchQueryDefinition: async queryId => {
|
||||
return await API.get({
|
||||
url: `/api/queries/${queryId}`,
|
||||
cache: true,
|
||||
})
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,14 +1,12 @@
|
|||
import API from "./api"
|
||||
import { enrichRows } from "./rows"
|
||||
|
||||
export const buildRelationshipEndpoints = API => ({
|
||||
/**
|
||||
* Fetches related rows for a certain field of a certain row.
|
||||
*/
|
||||
export const fetchRelationshipData = async ({ tableId, rowId, fieldName }) => {
|
||||
fetchRelationshipData: async ({ tableId, rowId, fieldName }) => {
|
||||
if (!tableId || !rowId || !fieldName) {
|
||||
return []
|
||||
}
|
||||
const response = await API.get({ url: `/api/${tableId}/${rowId}/enrich` })
|
||||
const rows = response[fieldName] || []
|
||||
return await enrichRows(rows, tableId)
|
||||
}
|
||||
return response[fieldName] || []
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import API from "./api"
|
||||
|
||||
export const buildRouteEndpoints = API => ({
|
||||
/**
|
||||
* Fetches available routes for the client app.
|
||||
*/
|
||||
export const fetchRoutes = async () => {
|
||||
fetchClientAppRoutes: async () => {
|
||||
return await API.get({
|
||||
url: `/api/routing/client`,
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,155 +1,43 @@
|
|||
import { notificationStore, dataSourceStore } from "stores"
|
||||
import API from "./api"
|
||||
import { fetchTableDefinition } from "./tables"
|
||||
import { FieldTypes } from "../constants"
|
||||
|
||||
export const buildRowEndpoints = API => ({
|
||||
/**
|
||||
* Fetches data about a certain row in a table.
|
||||
*/
|
||||
export const fetchRow = async ({ tableId, rowId }) => {
|
||||
fetchRow: async ({ tableId, rowId }) => {
|
||||
if (!tableId || !rowId) {
|
||||
return
|
||||
return null
|
||||
}
|
||||
const row = await API.get({
|
||||
url: `/api/${tableId}/rows/${rowId}`,
|
||||
})
|
||||
return (await enrichRows([row], tableId))[0]
|
||||
}
|
||||
return (await API.enrichRows([row], tableId))[0]
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a row in a table.
|
||||
*/
|
||||
export const saveRow = async row => {
|
||||
saveRow: async row => {
|
||||
if (!row?.tableId) {
|
||||
return
|
||||
}
|
||||
const res = await API.post({
|
||||
return await API.post({
|
||||
url: `/api/${row.tableId}/rows`,
|
||||
body: row,
|
||||
})
|
||||
res.error
|
||||
? notificationStore.actions.error("An error has occurred")
|
||||
: notificationStore.actions.success("Row saved")
|
||||
|
||||
// Refresh related datasources
|
||||
await dataSourceStore.actions.invalidateDataSource(row.tableId)
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a row in a table.
|
||||
*/
|
||||
export const updateRow = async row => {
|
||||
if (!row?.tableId || !row?._id) {
|
||||
return
|
||||
}
|
||||
const res = await API.patch({
|
||||
url: `/api/${row.tableId}/rows`,
|
||||
body: row,
|
||||
})
|
||||
res.error
|
||||
? notificationStore.actions.error("An error has occurred")
|
||||
: notificationStore.actions.success("Row updated")
|
||||
|
||||
// Refresh related datasources
|
||||
await dataSourceStore.actions.invalidateDataSource(row.tableId)
|
||||
|
||||
return res
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes a row from a table.
|
||||
*/
|
||||
export const deleteRow = async ({ tableId, rowId, revId }) => {
|
||||
deleteRow: async ({ tableId, rowId, revId }) => {
|
||||
if (!tableId || !rowId || !revId) {
|
||||
return
|
||||
}
|
||||
const res = await API.del({
|
||||
return await API.delete({
|
||||
url: `/api/${tableId}/rows`,
|
||||
body: {
|
||||
_id: rowId,
|
||||
_rev: revId,
|
||||
},
|
||||
})
|
||||
res.error
|
||||
? notificationStore.actions.error("An error has occurred")
|
||||
: notificationStore.actions.success("Row deleted")
|
||||
|
||||
// Refresh related datasources
|
||||
await dataSourceStore.actions.invalidateDataSource(tableId)
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes many rows from a table.
|
||||
*/
|
||||
export const deleteRows = async ({ tableId, rows }) => {
|
||||
if (!tableId || !rows) {
|
||||
return
|
||||
}
|
||||
const res = await API.del({
|
||||
url: `/api/${tableId}/rows`,
|
||||
body: {
|
||||
rows,
|
||||
},
|
||||
})
|
||||
res.error
|
||||
? notificationStore.actions.error("An error has occurred")
|
||||
: notificationStore.actions.success(`${rows.length} row(s) deleted`)
|
||||
|
||||
// Refresh related datasources
|
||||
await dataSourceStore.actions.invalidateDataSource(tableId)
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* Enriches rows which contain certain field types so that they can
|
||||
* be properly displayed.
|
||||
* The ability to create these bindings has been removed, but they will still
|
||||
* exist in client apps to support backwards compatibility.
|
||||
*/
|
||||
export const enrichRows = async (rows, tableId) => {
|
||||
if (!Array.isArray(rows)) {
|
||||
return []
|
||||
}
|
||||
if (rows.length) {
|
||||
// map of tables, incase a row being loaded is not from the same table
|
||||
const tables = {}
|
||||
for (let row of rows) {
|
||||
// fallback to passed in tableId if row doesn't have it specified
|
||||
let rowTableId = row.tableId || tableId
|
||||
let table = tables[rowTableId]
|
||||
if (!table) {
|
||||
// Fetch table schema so we can check column types
|
||||
table = await fetchTableDefinition(rowTableId)
|
||||
tables[rowTableId] = table
|
||||
}
|
||||
const schema = table?.schema
|
||||
if (schema) {
|
||||
const keys = Object.keys(schema)
|
||||
for (let key of keys) {
|
||||
const type = schema[key].type
|
||||
if (type === FieldTypes.LINK && Array.isArray(row[key])) {
|
||||
// Enrich row a string join of relationship fields
|
||||
row[`${key}_text`] =
|
||||
row[key]
|
||||
?.map(option => option?.primaryDisplay)
|
||||
.filter(option => !!option)
|
||||
.join(", ") || ""
|
||||
} else if (type === "attachment") {
|
||||
// Enrich row with the first image URL for any attachment fields
|
||||
let url = null
|
||||
if (Array.isArray(row[key]) && row[key][0] != null) {
|
||||
url = row[key][0].url
|
||||
}
|
||||
row[`${key}_first`] = url
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
|
|
@ -1,35 +1,26 @@
|
|||
import API from "./api"
|
||||
import { enrichRows } from "./rows"
|
||||
|
||||
export const buildTableEndpoints = API => ({
|
||||
/**
|
||||
* Fetches a table definition.
|
||||
* Since definitions cannot change at runtime, the result is cached.
|
||||
*/
|
||||
export const fetchTableDefinition = async tableId => {
|
||||
const res = await API.get({ url: `/api/tables/${tableId}`, cache: true })
|
||||
|
||||
// Wipe any HBS formulae, as these interfere with handlebars enrichment
|
||||
Object.keys(res?.schema || {}).forEach(field => {
|
||||
if (res.schema[field]?.type === "formula") {
|
||||
delete res.schema[field].formula
|
||||
}
|
||||
fetchTableDefinition: async tableId => {
|
||||
return await API.get({
|
||||
url: `/api/tables/${tableId}`,
|
||||
cache: true,
|
||||
})
|
||||
|
||||
return res
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches all rows from a table.
|
||||
*/
|
||||
export const fetchTableData = async tableId => {
|
||||
const rows = await API.get({ url: `/api/${tableId}/rows` })
|
||||
return await enrichRows(rows, tableId)
|
||||
}
|
||||
fetchTableData: async tableId => {
|
||||
return await API.get({ url: `/api/${tableId}/rows` })
|
||||
},
|
||||
|
||||
/**
|
||||
* Searches a table using Lucene.
|
||||
*/
|
||||
export const searchTable = async ({
|
||||
searchTable: async ({
|
||||
tableId,
|
||||
query,
|
||||
bookmark,
|
||||
|
@ -44,7 +35,7 @@ export const searchTable = async ({
|
|||
rows: [],
|
||||
}
|
||||
}
|
||||
const res = await API.post({
|
||||
return await API.post({
|
||||
url: `/api/${tableId}/search`,
|
||||
body: {
|
||||
query,
|
||||
|
@ -56,8 +47,5 @@ export const searchTable = async ({
|
|||
paginate,
|
||||
},
|
||||
})
|
||||
return {
|
||||
...res,
|
||||
rows: await enrichRows(res?.rows, tableId),
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,18 +1,9 @@
|
|||
import API from "./api"
|
||||
import { enrichRows } from "./rows"
|
||||
|
||||
export const buildViewEndpoints = API => ({
|
||||
/**
|
||||
* Fetches all rows in a view.
|
||||
*/
|
||||
export const fetchViewData = async ({
|
||||
name,
|
||||
field,
|
||||
groupBy,
|
||||
calculation,
|
||||
tableId,
|
||||
}) => {
|
||||
fetchViewData: async ({ name, field, groupBy, calculation }) => {
|
||||
const params = new URLSearchParams()
|
||||
|
||||
if (calculation) {
|
||||
params.set("field", field)
|
||||
params.set("calculation", calculation)
|
||||
|
@ -20,11 +11,9 @@ export const fetchViewData = async ({
|
|||
if (groupBy) {
|
||||
params.set("group", groupBy ? "true" : "false")
|
||||
}
|
||||
|
||||
const QUERY_VIEW_URL = field
|
||||
? `/api/views/${name}?${params}`
|
||||
: `/api/views/${name}`
|
||||
|
||||
const rows = await API.get({ url: QUERY_VIEW_URL })
|
||||
return await enrichRows(rows, tableId)
|
||||
}
|
||||
return await API.get({ url: QUERY_VIEW_URL })
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
export const ApiVersion = "1"
|
||||
export const TableNames = {
|
||||
USERS: "ta_users",
|
||||
}
|
||||
|
||||
export const ApiVersion = "1"
|
||||
/**
|
||||
* API Version Changelog
|
||||
* v1:
|
||||
|
|
|
@ -2,10 +2,9 @@ import { writable, derived, get } from "svelte/store"
|
|||
import {
|
||||
buildLuceneQuery,
|
||||
luceneLimit,
|
||||
luceneQuery,
|
||||
runLuceneQuery,
|
||||
luceneSort,
|
||||
} from "../utils/lucene"
|
||||
import { fetchTableDefinition } from "../api"
|
||||
|
||||
/**
|
||||
* Parent class which handles the implementation of fetching data from an
|
||||
|
@ -13,6 +12,9 @@ import { fetchTableDefinition } from "../api"
|
|||
* For other types of datasource, this class is overridden and extended.
|
||||
*/
|
||||
export default class DataFetch {
|
||||
// API client
|
||||
API = null
|
||||
|
||||
// Feature flags
|
||||
featureStore = writable({
|
||||
supportsSearch: false,
|
||||
|
@ -57,10 +59,14 @@ export default class DataFetch {
|
|||
*/
|
||||
constructor(opts) {
|
||||
// Merge options with their default values
|
||||
this.API = opts?.API
|
||||
this.options = {
|
||||
...this.options,
|
||||
...opts,
|
||||
}
|
||||
if (!this.API) {
|
||||
throw "An API client is required for fetching data"
|
||||
}
|
||||
|
||||
// Bind all functions to properly scope "this"
|
||||
this.getData = this.getData.bind(this)
|
||||
|
@ -110,12 +116,6 @@ export default class DataFetch {
|
|||
*/
|
||||
async getInitialData() {
|
||||
const { datasource, filter, sortColumn, paginate } = this.options
|
||||
const tableId = datasource?.tableId
|
||||
|
||||
// Ensure table ID exists
|
||||
if (!tableId) {
|
||||
return
|
||||
}
|
||||
|
||||
// Fetch datasource definition and determine feature flags
|
||||
const definition = await this.constructor.getDefinition(datasource)
|
||||
|
@ -184,7 +184,7 @@ export default class DataFetch {
|
|||
|
||||
// If we don't support searching, do a client search
|
||||
if (!features.supportsSearch) {
|
||||
rows = luceneQuery(rows, query)
|
||||
rows = runLuceneQuery(rows, query)
|
||||
}
|
||||
|
||||
// If we don't support sorting, do a client-side sort
|
||||
|
@ -228,7 +228,11 @@ export default class DataFetch {
|
|||
if (!datasource?.tableId) {
|
||||
return null
|
||||
}
|
||||
return await fetchTableDefinition(datasource.tableId)
|
||||
try {
|
||||
return await this.API.fetchTableDefinition(datasource.tableId)
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -28,7 +28,7 @@ export default class FieldFetch extends DataFetch {
|
|||
|
||||
// These sources will be available directly from context
|
||||
const data = datasource?.value || []
|
||||
let rows = []
|
||||
let rows
|
||||
if (Array.isArray(data) && data[0] && typeof data[0] !== "object") {
|
||||
rows = data.map(value => ({ value }))
|
||||
} else {
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
import FieldFetch from "./FieldFetch.js"
|
||||
import { fetchTableDefinition } from "../api"
|
||||
import { getJSONArrayDatasourceSchema } from "../utils/json"
|
||||
|
||||
export default class JSONArrayFetch extends FieldFetch {
|
||||
static async getDefinition(datasource) {
|
||||
// JSON arrays need their table definitions fetched.
|
||||
// We can then extract their schema as a subset of the table schema.
|
||||
const table = await fetchTableDefinition(datasource.tableId)
|
||||
try {
|
||||
const table = await this.API.fetchTableDefinition(datasource.tableId)
|
||||
const schema = getJSONArrayDatasourceSchema(table?.schema, datasource)
|
||||
return { schema }
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import DataFetch from "./DataFetch.js"
|
||||
import { executeQuery, fetchQueryDefinition } from "../api"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { get } from "svelte/store"
|
||||
|
||||
|
@ -16,7 +15,11 @@ export default class QueryFetch extends DataFetch {
|
|||
if (!datasource?._id) {
|
||||
return null
|
||||
}
|
||||
return await fetchQueryDefinition(datasource._id)
|
||||
try {
|
||||
return await this.API.fetchQueryDefinition(datasource._id)
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async getData() {
|
||||
|
@ -41,7 +44,9 @@ export default class QueryFetch extends DataFetch {
|
|||
}
|
||||
|
||||
// Execute query
|
||||
const { data, pagination, ...rest } = await executeQuery(queryPayload)
|
||||
try {
|
||||
const res = await this.API.executeQuery(queryPayload)
|
||||
const { data, pagination, ...rest } = res
|
||||
|
||||
// Derive pagination info from response
|
||||
let nextCursor = null
|
||||
|
@ -64,5 +69,11 @@ export default class QueryFetch extends DataFetch {
|
|||
cursor: nextCursor,
|
||||
hasNextPage,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
rows: [],
|
||||
hasNextPage: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
import DataFetch from "./DataFetch.js"
|
||||
import { fetchRelationshipData } from "../api"
|
||||
|
||||
export default class RelationshipFetch extends DataFetch {
|
||||
async getData() {
|
||||
const { datasource } = this.options
|
||||
const res = await fetchRelationshipData({
|
||||
try {
|
||||
const res = await this.API.fetchRelationshipData({
|
||||
rowId: datasource?.rowId,
|
||||
tableId: datasource?.rowTableId,
|
||||
fieldName: datasource?.fieldName,
|
||||
})
|
||||
return {
|
||||
rows: res || [],
|
||||
return { rows: res || [] }
|
||||
} catch (error) {
|
||||
return { rows: [] }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { get } from "svelte/store"
|
||||
import DataFetch from "./DataFetch.js"
|
||||
import { searchTable } from "../api"
|
||||
|
||||
export default class TableFetch extends DataFetch {
|
||||
determineFeatureFlags() {
|
||||
|
@ -18,7 +17,8 @@ export default class TableFetch extends DataFetch {
|
|||
const { cursor, query } = get(this.store)
|
||||
|
||||
// Search table
|
||||
const res = await searchTable({
|
||||
try {
|
||||
const res = await this.API.searchTable({
|
||||
tableId,
|
||||
query,
|
||||
limit,
|
||||
|
@ -33,5 +33,11 @@ export default class TableFetch extends DataFetch {
|
|||
hasNextPage: res?.hasNextPage || false,
|
||||
cursor: res?.bookmark || null,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
rows: [],
|
||||
hasNextPage: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import DataFetch from "./DataFetch.js"
|
||||
import { fetchViewData } from "../api"
|
||||
|
||||
export default class ViewFetch extends DataFetch {
|
||||
static getSchema(datasource, definition) {
|
||||
|
@ -8,9 +7,11 @@ export default class ViewFetch extends DataFetch {
|
|||
|
||||
async getData() {
|
||||
const { datasource } = this.options
|
||||
const res = await fetchViewData(datasource)
|
||||
return {
|
||||
rows: res || [],
|
||||
try {
|
||||
const res = await this.API.fetchViewData(datasource)
|
||||
return { rows: res || [] }
|
||||
} catch (error) {
|
||||
return { rows: [] }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ const DataFetchMap = {
|
|||
jsonarray: JSONArrayFetch,
|
||||
}
|
||||
|
||||
export const fetchData = (datasource, options) => {
|
||||
export const fetchData = ({ API, datasource, options }) => {
|
||||
const Fetch = DataFetchMap[datasource?.type] || TableFetch
|
||||
return new Fetch({ datasource, ...options })
|
||||
return new Fetch({ API, datasource, ...options })
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue