Merge branch 'master' into cheeks-fixes
This commit is contained in:
commit
adc0d7869b
|
@ -70,6 +70,10 @@ export function encodeTableId(tableId: string) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function encodeViewId(viewId: string) {
|
||||||
|
return encodeURIComponent(viewId)
|
||||||
|
}
|
||||||
|
|
||||||
export function breakExternalTableId(tableId: string) {
|
export function breakExternalTableId(tableId: string) {
|
||||||
const parts = tableId.split(DOUBLE_SEPARATOR)
|
const parts = tableId.split(DOUBLE_SEPARATOR)
|
||||||
let datasourceId = parts.shift()
|
let datasourceId = parts.shift()
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
"build": "routify -b && NODE_OPTIONS=\"--max_old_space_size=4096\" vite build --emptyOutDir",
|
"build": "routify -b && NODE_OPTIONS=\"--max_old_space_size=4096\" vite build --emptyOutDir",
|
||||||
"start": "routify -c rollup",
|
"start": "routify -c rollup",
|
||||||
"dev": "routify -c dev:vite",
|
"dev": "routify -c dev:vite",
|
||||||
"dev:vite": "vite --host 0.0.0.0",
|
"dev:vite": "vite --host 0.0.0.0 --mode=dev",
|
||||||
"rollup": "rollup -c -w",
|
"rollup": "rollup -c -w",
|
||||||
"test": "vitest run",
|
"test": "vitest run",
|
||||||
"test:watch": "vitest",
|
"test:watch": "vitest",
|
||||||
|
|
|
@ -33,7 +33,7 @@
|
||||||
...datasource,
|
...datasource,
|
||||||
name,
|
name,
|
||||||
}
|
}
|
||||||
await datasources.update({
|
await datasources.save({
|
||||||
datasource: updatedDatasource,
|
datasource: updatedDatasource,
|
||||||
integration: integrationForDatasource(get(integrations), datasource),
|
integration: integrationForDatasource(get(integrations), datasource),
|
||||||
})
|
})
|
||||||
|
|
|
@ -41,7 +41,7 @@
|
||||||
get(integrations),
|
get(integrations),
|
||||||
datasource
|
datasource
|
||||||
)
|
)
|
||||||
await datasources.update({ datasource, integration })
|
await datasources.save({ datasource, integration })
|
||||||
|
|
||||||
await afterSave({ datasource, action })
|
await afterSave({ datasource, action })
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|
|
@ -176,7 +176,7 @@
|
||||||
notifications.success(`Request saved successfully`)
|
notifications.success(`Request saved successfully`)
|
||||||
if (dynamicVariables) {
|
if (dynamicVariables) {
|
||||||
datasource.config.dynamicVariables = rebuildVariables(saveId)
|
datasource.config.dynamicVariables = rebuildVariables(saveId)
|
||||||
datasource = await datasources.update({
|
datasource = await datasources.save({
|
||||||
integration: integrationInfo,
|
integration: integrationInfo,
|
||||||
datasource,
|
datasource,
|
||||||
})
|
})
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
async function saveDatasource({ config, name }) {
|
async function saveDatasource({ config, name }) {
|
||||||
try {
|
try {
|
||||||
await datasources.update({
|
await datasources.save({
|
||||||
integration,
|
integration,
|
||||||
datasource: { ...datasource, config, name },
|
datasource: { ...datasource, config, name },
|
||||||
})
|
})
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
get(integrations),
|
get(integrations),
|
||||||
updatedDatasource
|
updatedDatasource
|
||||||
)
|
)
|
||||||
await datasources.update({ datasource: updatedDatasource, integration })
|
await datasources.save({ datasource: updatedDatasource, integration })
|
||||||
notifications.success(
|
notifications.success(
|
||||||
`Datasource ${updatedDatasource.name} updated successfully`
|
`Datasource ${updatedDatasource.name} updated successfully`
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { writable, derived, get } from "svelte/store"
|
import { derived, get } from "svelte/store"
|
||||||
import {
|
import {
|
||||||
IntegrationTypes,
|
IntegrationTypes,
|
||||||
DEFAULT_BB_DATASOURCE_ID,
|
DEFAULT_BB_DATASOURCE_ID,
|
||||||
|
@ -17,6 +17,7 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
import { TableNames } from "constants"
|
import { TableNames } from "constants"
|
||||||
|
import BudiStore from "stores/BudiStore"
|
||||||
|
|
||||||
// when building the internal DS - seems to represent it slightly differently to the backend typing of a DS
|
// when building the internal DS - seems to represent it slightly differently to the backend typing of a DS
|
||||||
interface InternalDatasource extends Omit<Datasource, "entities"> {
|
interface InternalDatasource extends Omit<Datasource, "entities"> {
|
||||||
|
@ -41,102 +42,131 @@ class TableImportError extends Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
interface DatasourceStore {
|
interface BuilderDatasourceStore {
|
||||||
list: Datasource[]
|
list: Datasource[]
|
||||||
selectedDatasourceId: null | string
|
selectedDatasourceId: null | string
|
||||||
}
|
}
|
||||||
|
|
||||||
export function createDatasourcesStore() {
|
interface DerivedDatasourceStore extends Omit<BuilderDatasourceStore, "list"> {
|
||||||
const store = writable<DatasourceStore>({
|
list: (Datasource | InternalDatasource)[]
|
||||||
list: [],
|
selected?: Datasource | InternalDatasource
|
||||||
selectedDatasourceId: null,
|
hasDefaultData: boolean
|
||||||
})
|
hasData: boolean
|
||||||
|
}
|
||||||
|
|
||||||
const derivedStore = derived([store, tables], ([$store, $tables]) => {
|
export class DatasourceStore extends BudiStore<DerivedDatasourceStore> {
|
||||||
// Set the internal datasource entities from the table list, which we're
|
constructor() {
|
||||||
// able to keep updated unlike the egress generated definition of the
|
super({
|
||||||
// internal datasource
|
list: [],
|
||||||
let internalDS: Datasource | InternalDatasource | undefined =
|
selectedDatasourceId: null,
|
||||||
$store.list?.find(ds => ds._id === BUDIBASE_INTERNAL_DB_ID)
|
hasDefaultData: false,
|
||||||
let otherDS = $store.list?.filter(ds => ds._id !== BUDIBASE_INTERNAL_DB_ID)
|
hasData: false,
|
||||||
if (internalDS) {
|
})
|
||||||
const tables: Table[] = $tables.list?.filter((table: Table) => {
|
|
||||||
return (
|
const derivedStore = derived<
|
||||||
table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
|
[DatasourceStore, BudiStore<any>],
|
||||||
table._id !== TableNames.USERS
|
DerivedDatasourceStore
|
||||||
)
|
>([this, tables as any], ([$store, $tables]) => {
|
||||||
})
|
// Set the internal datasource entities from the table list, which we're
|
||||||
internalDS = {
|
// able to keep updated unlike the egress generated definition of the
|
||||||
...internalDS,
|
// internal datasource
|
||||||
entities: tables,
|
let internalDS: Datasource | InternalDatasource | undefined =
|
||||||
|
$store.list?.find(ds => ds._id === BUDIBASE_INTERNAL_DB_ID)
|
||||||
|
let otherDS = $store.list?.filter(
|
||||||
|
ds => ds._id !== BUDIBASE_INTERNAL_DB_ID
|
||||||
|
)
|
||||||
|
if (internalDS) {
|
||||||
|
const tables: Table[] = $tables.list?.filter((table: Table) => {
|
||||||
|
return (
|
||||||
|
table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
|
||||||
|
table._id !== TableNames.USERS
|
||||||
|
)
|
||||||
|
})
|
||||||
|
internalDS = {
|
||||||
|
...internalDS,
|
||||||
|
entities: tables,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Build up enriched DS list
|
// Build up enriched DS list
|
||||||
// Only add the internal DS if we have at least one non-users table
|
// Only add the internal DS if we have at least one non-users table
|
||||||
let list: (InternalDatasource | Datasource)[] = []
|
let list: (InternalDatasource | Datasource)[] = []
|
||||||
if (internalDS?.entities?.length) {
|
if (internalDS?.entities?.length) {
|
||||||
list.push(internalDS)
|
list.push(internalDS)
|
||||||
}
|
}
|
||||||
list = list.concat(otherDS || [])
|
list = list.concat(otherDS || [])
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...$store,
|
...$store,
|
||||||
list,
|
list,
|
||||||
selected: list?.find(ds => ds._id === $store.selectedDatasourceId),
|
selected: list?.find(ds => ds._id === $store.selectedDatasourceId),
|
||||||
hasDefaultData: list?.some(ds => ds._id === DEFAULT_BB_DATASOURCE_ID),
|
hasDefaultData: list?.some(ds => ds._id === DEFAULT_BB_DATASOURCE_ID),
|
||||||
hasData: list?.length > 0,
|
hasData: list?.length > 0,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
const fetch = async () => {
|
this.fetch = this.fetch.bind(this)
|
||||||
|
this.init = this.fetch.bind(this)
|
||||||
|
this.select = this.select.bind(this)
|
||||||
|
this.updateSchema = this.updateSchema.bind(this)
|
||||||
|
this.create = this.create.bind(this)
|
||||||
|
this.delete = this.deleteDatasource.bind(this)
|
||||||
|
this.save = this.save.bind(this)
|
||||||
|
this.replaceDatasource = this.replaceDatasource.bind(this)
|
||||||
|
this.getTableNames = this.getTableNames.bind(this)
|
||||||
|
this.subscribe = derivedStore.subscribe
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetch() {
|
||||||
const datasources = await API.getDatasources()
|
const datasources = await API.getDatasources()
|
||||||
store.update(state => ({
|
this.store.update(state => ({
|
||||||
...state,
|
...state,
|
||||||
list: datasources,
|
list: datasources,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
const select = (id: string) => {
|
async init() {
|
||||||
store.update(state => ({
|
return this.fetch()
|
||||||
|
}
|
||||||
|
|
||||||
|
select(id: string) {
|
||||||
|
this.store.update(state => ({
|
||||||
...state,
|
...state,
|
||||||
selectedDatasourceId: id,
|
selectedDatasourceId: id,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
const updateDatasource = (
|
private updateDatasourceInStore(
|
||||||
response: { datasource: Datasource; errors?: Record<string, string> },
|
response: { datasource: Datasource; errors?: Record<string, string> },
|
||||||
{ ignoreErrors }: { ignoreErrors?: boolean } = {}
|
{ ignoreErrors }: { ignoreErrors?: boolean } = {}
|
||||||
) => {
|
) {
|
||||||
const { datasource, errors } = response
|
const { datasource, errors } = response
|
||||||
if (!ignoreErrors && errors && Object.keys(errors).length > 0) {
|
if (!ignoreErrors && errors && Object.keys(errors).length > 0) {
|
||||||
throw new TableImportError(errors)
|
throw new TableImportError(errors)
|
||||||
}
|
}
|
||||||
replaceDatasource(datasource._id!, datasource)
|
this.replaceDatasource(datasource._id!, datasource)
|
||||||
select(datasource._id!)
|
this.select(datasource._id!)
|
||||||
return datasource
|
return datasource
|
||||||
}
|
}
|
||||||
|
|
||||||
const updateSchema = async (
|
async updateSchema(datasource: Datasource, tablesFilter: string[]) {
|
||||||
datasource: Datasource,
|
|
||||||
tablesFilter: string[]
|
|
||||||
) => {
|
|
||||||
const response = await API.buildDatasourceSchema(
|
const response = await API.buildDatasourceSchema(
|
||||||
datasource?._id!,
|
datasource?._id!,
|
||||||
tablesFilter
|
tablesFilter
|
||||||
)
|
)
|
||||||
updateDatasource(response)
|
this.updateDatasourceInStore(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
const sourceCount = (source: string) => {
|
sourceCount(source: string) {
|
||||||
return get(store).list.filter(datasource => datasource.source === source)
|
return get(this.store).list.filter(
|
||||||
.length
|
datasource => datasource.source === source
|
||||||
|
).length
|
||||||
}
|
}
|
||||||
|
|
||||||
const checkDatasourceValidity = async (
|
async checkDatasourceValidity(
|
||||||
integration: Integration,
|
integration: Integration,
|
||||||
datasource: Datasource
|
datasource: Datasource
|
||||||
): Promise<{ valid: boolean; error?: string }> => {
|
): Promise<{ valid: boolean; error?: string }> {
|
||||||
if (integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
|
if (integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
|
||||||
const { connected, error } = await API.validateDatasource(datasource)
|
const { connected, error } = await API.validateDatasource(datasource)
|
||||||
if (connected) {
|
if (connected) {
|
||||||
|
@ -148,14 +178,14 @@ export function createDatasourcesStore() {
|
||||||
return { valid: true }
|
return { valid: true }
|
||||||
}
|
}
|
||||||
|
|
||||||
const create = async ({
|
async create({
|
||||||
integration,
|
integration,
|
||||||
config,
|
config,
|
||||||
}: {
|
}: {
|
||||||
integration: UIIntegration
|
integration: UIIntegration
|
||||||
config: Record<string, any>
|
config: Record<string, any>
|
||||||
}) => {
|
}) {
|
||||||
const count = sourceCount(integration.name)
|
const count = this.sourceCount(integration.name)
|
||||||
const nameModifier = count === 0 ? "" : ` ${count + 1}`
|
const nameModifier = count === 0 ? "" : ` ${count + 1}`
|
||||||
|
|
||||||
const datasource: Datasource = {
|
const datasource: Datasource = {
|
||||||
|
@ -167,7 +197,7 @@ export function createDatasourcesStore() {
|
||||||
isSQL: integration.isSQL,
|
isSQL: integration.isSQL,
|
||||||
}
|
}
|
||||||
|
|
||||||
const { valid, error } = await checkDatasourceValidity(
|
const { valid, error } = await this.checkDatasourceValidity(
|
||||||
integration,
|
integration,
|
||||||
datasource
|
datasource
|
||||||
)
|
)
|
||||||
|
@ -180,41 +210,45 @@ export function createDatasourcesStore() {
|
||||||
fetchSchema: integration.plus,
|
fetchSchema: integration.plus,
|
||||||
})
|
})
|
||||||
|
|
||||||
return updateDatasource(response, { ignoreErrors: true })
|
return this.updateDatasourceInStore(response, { ignoreErrors: true })
|
||||||
}
|
}
|
||||||
|
|
||||||
const update = async ({
|
async save({
|
||||||
integration,
|
integration,
|
||||||
datasource,
|
datasource,
|
||||||
}: {
|
}: {
|
||||||
integration: Integration
|
integration: Integration
|
||||||
datasource: Datasource
|
datasource: Datasource
|
||||||
}) => {
|
}) {
|
||||||
if (await checkDatasourceValidity(integration, datasource)) {
|
if (await this.checkDatasourceValidity(integration, datasource)) {
|
||||||
throw new Error("Unable to connect")
|
throw new Error("Unable to connect")
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await API.updateDatasource(datasource)
|
const response = await API.updateDatasource(datasource)
|
||||||
|
|
||||||
return updateDatasource(response)
|
return this.updateDatasourceInStore(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
const deleteDatasource = async (datasource: Datasource) => {
|
async deleteDatasource(datasource: Datasource) {
|
||||||
if (!datasource?._id || !datasource?._rev) {
|
if (!datasource?._id || !datasource?._rev) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
await API.deleteDatasource(datasource._id, datasource._rev)
|
await API.deleteDatasource(datasource._id, datasource._rev)
|
||||||
replaceDatasource(datasource._id)
|
this.replaceDatasource(datasource._id)
|
||||||
}
|
}
|
||||||
|
|
||||||
const replaceDatasource = (datasourceId: string, datasource?: Datasource) => {
|
async delete(datasource: Datasource) {
|
||||||
|
return this.deleteDatasource(datasource)
|
||||||
|
}
|
||||||
|
|
||||||
|
replaceDatasource(datasourceId: string, datasource?: Datasource) {
|
||||||
if (!datasourceId) {
|
if (!datasourceId) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle deletion
|
// Handle deletion
|
||||||
if (!datasource) {
|
if (!datasource) {
|
||||||
store.update(state => ({
|
this.store.update(state => ({
|
||||||
...state,
|
...state,
|
||||||
list: state.list.filter(x => x._id !== datasourceId),
|
list: state.list.filter(x => x._id !== datasourceId),
|
||||||
}))
|
}))
|
||||||
|
@ -224,9 +258,9 @@ export function createDatasourcesStore() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add new datasource
|
// Add new datasource
|
||||||
const index = get(store).list.findIndex(x => x._id === datasource._id)
|
const index = get(this.store).list.findIndex(x => x._id === datasource._id)
|
||||||
if (index === -1) {
|
if (index === -1) {
|
||||||
store.update(state => ({
|
this.store.update(state => ({
|
||||||
...state,
|
...state,
|
||||||
list: [...state.list, datasource],
|
list: [...state.list, datasource],
|
||||||
}))
|
}))
|
||||||
|
@ -238,30 +272,21 @@ export function createDatasourcesStore() {
|
||||||
|
|
||||||
// Update existing datasource
|
// Update existing datasource
|
||||||
else if (datasource) {
|
else if (datasource) {
|
||||||
store.update(state => {
|
this.store.update(state => {
|
||||||
state.list[index] = datasource
|
state.list[index] = datasource
|
||||||
return state
|
return state
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const getTableNames = async (datasource: Datasource) => {
|
async getTableNames(datasource: Datasource) {
|
||||||
const info = await API.fetchInfoForDatasource(datasource)
|
const info = await API.fetchInfoForDatasource(datasource)
|
||||||
return info.tableNames || []
|
return info.tableNames || []
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
// subscribe() {
|
||||||
subscribe: derivedStore.subscribe,
|
// return this.derivedStore.subscribe()
|
||||||
fetch,
|
// }
|
||||||
init: fetch,
|
|
||||||
select,
|
|
||||||
updateSchema,
|
|
||||||
create,
|
|
||||||
update,
|
|
||||||
delete: deleteDatasource,
|
|
||||||
replaceDatasource,
|
|
||||||
getTableNames,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const datasources = createDatasourcesStore()
|
export const datasources = new DatasourceStore()
|
||||||
|
|
|
@ -3,6 +3,7 @@ import { derived } from "svelte/store"
|
||||||
|
|
||||||
import { DatasourceTypes } from "constants/backend"
|
import { DatasourceTypes } from "constants/backend"
|
||||||
import { UIIntegration, Integration } from "@budibase/types"
|
import { UIIntegration, Integration } from "@budibase/types"
|
||||||
|
import BudiStore from "stores/BudiStore"
|
||||||
|
|
||||||
const getIntegrationOrder = (type: string | undefined) => {
|
const getIntegrationOrder = (type: string | undefined) => {
|
||||||
// if type is not known, sort to end
|
// if type is not known, sort to end
|
||||||
|
@ -18,29 +19,35 @@ const getIntegrationOrder = (type: string | undefined) => {
|
||||||
return type.charCodeAt(0) + 4
|
return type.charCodeAt(0) + 4
|
||||||
}
|
}
|
||||||
|
|
||||||
export const createSortedIntegrationsStore = () => {
|
export class SortedIntegrationStore extends BudiStore<UIIntegration[]> {
|
||||||
return derived<typeof integrations, UIIntegration[]>(
|
constructor() {
|
||||||
integrations,
|
super([])
|
||||||
$integrations => {
|
|
||||||
const entries: [string, Integration][] = Object.entries($integrations)
|
|
||||||
const integrationsAsArray = entries.map(([name, integration]) => ({
|
|
||||||
name,
|
|
||||||
...integration,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return integrationsAsArray.sort((integrationA, integrationB) => {
|
const derivedStore = derived<typeof integrations, UIIntegration[]>(
|
||||||
const integrationASortOrder = getIntegrationOrder(integrationA.type)
|
integrations,
|
||||||
const integrationBSortOrder = getIntegrationOrder(integrationB.type)
|
$integrations => {
|
||||||
if (integrationASortOrder === integrationBSortOrder) {
|
const entries: [string, Integration][] = Object.entries($integrations)
|
||||||
return integrationA.friendlyName.localeCompare(
|
const integrationsAsArray = entries.map(([name, integration]) => ({
|
||||||
integrationB.friendlyName
|
name,
|
||||||
)
|
...integration,
|
||||||
}
|
}))
|
||||||
|
|
||||||
return integrationASortOrder < integrationBSortOrder ? -1 : 1
|
return integrationsAsArray.sort((integrationA, integrationB) => {
|
||||||
})
|
const integrationASortOrder = getIntegrationOrder(integrationA.type)
|
||||||
}
|
const integrationBSortOrder = getIntegrationOrder(integrationB.type)
|
||||||
)
|
if (integrationASortOrder === integrationBSortOrder) {
|
||||||
|
return integrationA.friendlyName.localeCompare(
|
||||||
|
integrationB.friendlyName
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return integrationASortOrder < integrationBSortOrder ? -1 : 1
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
this.subscribe = derivedStore.subscribe
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const sortedIntegrations = createSortedIntegrationsStore()
|
export const sortedIntegrations = new SortedIntegrationStore()
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
import { it, expect, describe, beforeEach, vi } from "vitest"
|
import { it, expect, describe, beforeEach, vi } from "vitest"
|
||||||
import { createSortedIntegrationsStore } from "stores/builder/sortedIntegrations"
|
import { SortedIntegrationStore } from "stores/builder/sortedIntegrations"
|
||||||
import { DatasourceTypes } from "constants/backend"
|
import { DatasourceTypes } from "constants/backend"
|
||||||
|
|
||||||
import { derived } from "svelte/store"
|
import { derived } from "svelte/store"
|
||||||
import { integrations } from "stores/builder/integrations"
|
import { integrations } from "stores/builder/integrations"
|
||||||
|
|
||||||
vi.mock("svelte/store", () => ({
|
vi.mock("svelte/store", () => ({
|
||||||
derived: vi.fn(),
|
derived: vi.fn(() => ({
|
||||||
|
subscribe: vi.fn(),
|
||||||
|
})),
|
||||||
writable: vi.fn(() => ({
|
writable: vi.fn(() => ({
|
||||||
subscribe: vi.fn(),
|
subscribe: vi.fn(),
|
||||||
})),
|
})),
|
||||||
|
@ -14,6 +16,8 @@ vi.mock("svelte/store", () => ({
|
||||||
|
|
||||||
vi.mock("stores/builder/integrations", () => ({ integrations: vi.fn() }))
|
vi.mock("stores/builder/integrations", () => ({ integrations: vi.fn() }))
|
||||||
|
|
||||||
|
const mockedDerived = vi.mocked(derived)
|
||||||
|
|
||||||
const inputA = {
|
const inputA = {
|
||||||
nonRelationalA: {
|
nonRelationalA: {
|
||||||
friendlyName: "non-relational A",
|
friendlyName: "non-relational A",
|
||||||
|
@ -104,25 +108,28 @@ const expectedOutput = [
|
||||||
]
|
]
|
||||||
|
|
||||||
describe("sorted integrations store", () => {
|
describe("sorted integrations store", () => {
|
||||||
beforeEach(ctx => {
|
interface LocalContext {
|
||||||
|
returnedStore: SortedIntegrationStore
|
||||||
|
derivedCallback: any
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach<LocalContext>(ctx => {
|
||||||
vi.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
|
|
||||||
ctx.returnedStore = createSortedIntegrationsStore()
|
ctx.returnedStore = new SortedIntegrationStore()
|
||||||
|
ctx.derivedCallback = mockedDerived.mock.calls[0]?.[1]
|
||||||
ctx.derivedCallback = derived.mock.calls[0][1]
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls derived with the correct parameters", () => {
|
it("calls derived with the correct parameters", () => {
|
||||||
expect(derived).toHaveBeenCalledTimes(1)
|
expect(mockedDerived).toHaveBeenCalledTimes(1)
|
||||||
expect(derived).toHaveBeenCalledWith(integrations, expect.toBeFunc())
|
expect(mockedDerived).toHaveBeenCalledWith(
|
||||||
|
integrations,
|
||||||
|
expect.any(Function)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("derived callback", () => {
|
describe("derived callback", () => {
|
||||||
it("When no integrations are loaded", ctx => {
|
it<LocalContext>("When integrations are present", ctx => {
|
||||||
expect(ctx.derivedCallback({})).toEqual([])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("When integrations are present", ctx => {
|
|
||||||
expect(ctx.derivedCallback(inputA)).toEqual(expectedOutput)
|
expect(ctx.derivedCallback(inputA)).toEqual(expectedOutput)
|
||||||
expect(ctx.derivedCallback(inputB)).toEqual(expectedOutput)
|
expect(ctx.derivedCallback(inputB)).toEqual(expectedOutput)
|
||||||
})
|
})
|
|
@ -102,9 +102,8 @@
|
||||||
lastSearchId = Math.random()
|
lastSearchId = Math.random()
|
||||||
searching = true
|
searching = true
|
||||||
const thisSearchId = lastSearchId
|
const thisSearchId = lastSearchId
|
||||||
const results = await searchFunction({
|
const results = await searchFunction(schema.tableId, {
|
||||||
paginate: false,
|
paginate: false,
|
||||||
tableId: schema.tableId,
|
|
||||||
limit: 20,
|
limit: 20,
|
||||||
query: {
|
query: {
|
||||||
string: {
|
string: {
|
||||||
|
|
|
@ -52,10 +52,22 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
const table = await utils.getTableFromSource(source)
|
const table = await utils.getTableFromSource(source)
|
||||||
const { _id, ...rowData } = ctx.request.body
|
const { _id, ...rowData } = ctx.request.body
|
||||||
|
|
||||||
const dataToUpdate = await inputProcessing(
|
const beforeRow = await sdk.rows.external.getRow(table._id!, _id, {
|
||||||
|
relationships: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
let dataToUpdate = cloneDeep(beforeRow)
|
||||||
|
const allowedField = utils.getSourceFields(source)
|
||||||
|
for (const key of Object.keys(rowData)) {
|
||||||
|
if (!allowedField.includes(key)) continue
|
||||||
|
|
||||||
|
dataToUpdate[key] = rowData[key]
|
||||||
|
}
|
||||||
|
|
||||||
|
dataToUpdate = await inputProcessing(
|
||||||
ctx.user?._id,
|
ctx.user?._id,
|
||||||
cloneDeep(source),
|
cloneDeep(source),
|
||||||
rowData
|
dataToUpdate
|
||||||
)
|
)
|
||||||
|
|
||||||
const validateResult = await sdk.rows.utils.validate({
|
const validateResult = await sdk.rows.utils.validate({
|
||||||
|
@ -66,10 +78,6 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
throw { validation: validateResult.errors }
|
throw { validation: validateResult.errors }
|
||||||
}
|
}
|
||||||
|
|
||||||
const beforeRow = await sdk.rows.external.getRow(table._id!, _id, {
|
|
||||||
relationships: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await handleRequest(Operation.UPDATE, source, {
|
const response = await handleRequest(Operation.UPDATE, source, {
|
||||||
id: breakRowIdField(_id),
|
id: breakRowIdField(_id),
|
||||||
row: dataToUpdate,
|
row: dataToUpdate,
|
||||||
|
|
|
@ -66,7 +66,7 @@ export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
|
||||||
if (docIds.isViewId(sourceId)) {
|
if (docIds.isViewId(sourceId)) {
|
||||||
return {
|
return {
|
||||||
tableId: utils.extractViewInfoFromID(sourceId).tableId,
|
tableId: utils.extractViewInfoFromID(sourceId).tableId,
|
||||||
viewId: sourceId,
|
viewId: sql.utils.encodeViewId(sourceId),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return { tableId: sql.utils.encodeTableId(ctx.params.sourceId) }
|
return { tableId: sql.utils.encodeTableId(ctx.params.sourceId) }
|
||||||
|
@ -110,6 +110,21 @@ function fixBooleanFields(row: Row, table: Table) {
|
||||||
return row
|
return row
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getSourceFields(source: Table | ViewV2): string[] {
|
||||||
|
const isView = sdk.views.isView(source)
|
||||||
|
if (isView) {
|
||||||
|
const fields = Object.keys(
|
||||||
|
helpers.views.basicFields(source, { visible: true })
|
||||||
|
)
|
||||||
|
return fields
|
||||||
|
}
|
||||||
|
|
||||||
|
const fields = Object.entries(source.schema)
|
||||||
|
.filter(([_, field]) => field.visible !== false)
|
||||||
|
.map(([columnName]) => columnName)
|
||||||
|
return fields
|
||||||
|
}
|
||||||
|
|
||||||
export async function sqlOutputProcessing(
|
export async function sqlOutputProcessing(
|
||||||
rows: DatasourcePlusQueryResponse,
|
rows: DatasourcePlusQueryResponse,
|
||||||
source: Table | ViewV2,
|
source: Table | ViewV2,
|
||||||
|
|
|
@ -1333,6 +1333,62 @@ if (descriptions.length) {
|
||||||
expect(resp.relationship.length).toBe(1)
|
expect(resp.relationship.length).toBe(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should be able to keep linked data when updating from views that trims links from the main table", async () => {
|
||||||
|
let row = await config.api.row.save(table._id!, {
|
||||||
|
name: "main",
|
||||||
|
description: "main description",
|
||||||
|
})
|
||||||
|
const row2 = await config.api.row.save(otherTable._id!, {
|
||||||
|
name: "link",
|
||||||
|
description: "link description",
|
||||||
|
relationship: [row._id],
|
||||||
|
})
|
||||||
|
|
||||||
|
const view = await config.api.viewV2.create({
|
||||||
|
tableId: table._id!,
|
||||||
|
name: "view",
|
||||||
|
schema: {
|
||||||
|
name: { visible: true },
|
||||||
|
},
|
||||||
|
})
|
||||||
|
const resp = await config.api.row.patch(view.id, {
|
||||||
|
_id: row._id!,
|
||||||
|
_rev: row._rev!,
|
||||||
|
tableId: row.tableId!,
|
||||||
|
name: "test2",
|
||||||
|
relationship: [row2._id],
|
||||||
|
})
|
||||||
|
expect(resp.relationship).toBeUndefined()
|
||||||
|
|
||||||
|
const updatedRow = await config.api.row.get(table._id!, row._id!)
|
||||||
|
expect(updatedRow.relationship.length).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to keep linked data when updating from views that trims links from the foreign table", async () => {
|
||||||
|
let row = await config.api.row.save(table._id!, {
|
||||||
|
name: "main",
|
||||||
|
description: "main description",
|
||||||
|
})
|
||||||
|
const row2 = await config.api.row.save(otherTable._id!, {
|
||||||
|
name: "link",
|
||||||
|
description: "link description",
|
||||||
|
relationship: [row._id],
|
||||||
|
})
|
||||||
|
|
||||||
|
const view = await config.api.viewV2.create({
|
||||||
|
tableId: otherTable._id!,
|
||||||
|
name: "view",
|
||||||
|
})
|
||||||
|
await config.api.row.patch(view.id, {
|
||||||
|
_id: row2._id!,
|
||||||
|
_rev: row2._rev!,
|
||||||
|
tableId: row2.tableId!,
|
||||||
|
})
|
||||||
|
|
||||||
|
const updatedRow = await config.api.row.get(table._id!, row._id!)
|
||||||
|
expect(updatedRow.relationship.length).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
!isInternal &&
|
!isInternal &&
|
||||||
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
||||||
// to identity columns. This is not something Budibase does currently.
|
// to identity columns. This is not something Budibase does currently.
|
||||||
|
|
|
@ -55,7 +55,7 @@ if (descriptions.length) {
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
|
|
||||||
function saveTableRequest(
|
function saveTableRequest(
|
||||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
...overrides: Partial<SaveTableRequest>[]
|
||||||
): SaveTableRequest {
|
): SaveTableRequest {
|
||||||
const req: SaveTableRequest = {
|
const req: SaveTableRequest = {
|
||||||
name: generator.guid().replaceAll("-", "").substring(0, 16),
|
name: generator.guid().replaceAll("-", "").substring(0, 16),
|
||||||
|
@ -1898,6 +1898,36 @@ if (descriptions.length) {
|
||||||
}
|
}
|
||||||
expect(view.queryUI).toEqual(expected)
|
expect(view.queryUI).toEqual(expected)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("tables and views can contain whitespaces", async () => {
|
||||||
|
const table = await config.api.table.save(
|
||||||
|
saveTableRequest({
|
||||||
|
name: `table with spaces ${generator.hash()}`,
|
||||||
|
schema: {
|
||||||
|
name: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "name",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const view = await config.api.viewV2.create({
|
||||||
|
tableId: table._id!,
|
||||||
|
name: `view name with spaces`,
|
||||||
|
schema: {
|
||||||
|
name: { visible: true },
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(await getDelegate(view)).toEqual({
|
||||||
|
...view,
|
||||||
|
schema: {
|
||||||
|
id: { ...table.schema["id"], visible: false },
|
||||||
|
name: { ...table.schema["name"], visible: true },
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("updating table schema", () => {
|
describe("updating table schema", () => {
|
||||||
|
|
|
@ -1,10 +1,4 @@
|
||||||
import {
|
import { context, db as dbCore, docIds, utils } from "@budibase/backend-core"
|
||||||
context,
|
|
||||||
db as dbCore,
|
|
||||||
docIds,
|
|
||||||
utils,
|
|
||||||
sql,
|
|
||||||
} from "@budibase/backend-core"
|
|
||||||
import {
|
import {
|
||||||
DatabaseQueryOpts,
|
DatabaseQueryOpts,
|
||||||
Datasource,
|
Datasource,
|
||||||
|
@ -334,7 +328,7 @@ export function extractViewInfoFromID(viewId: string) {
|
||||||
const regex = new RegExp(`^(?<tableId>.+)${SEPARATOR}([^${SEPARATOR}]+)$`)
|
const regex = new RegExp(`^(?<tableId>.+)${SEPARATOR}([^${SEPARATOR}]+)$`)
|
||||||
const res = regex.exec(viewId)
|
const res = regex.exec(viewId)
|
||||||
return {
|
return {
|
||||||
tableId: sql.utils.encodeTableId(res!.groups!["tableId"]),
|
tableId: res!.groups!["tableId"],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,8 +46,11 @@ export class ViewV2API extends TestAPI {
|
||||||
}
|
}
|
||||||
|
|
||||||
get = async (viewId: string) => {
|
get = async (viewId: string) => {
|
||||||
return (await this._get<ViewResponseEnriched>(`/api/v2/views/${viewId}`))
|
return (
|
||||||
.data
|
await this._get<ViewResponseEnriched>(
|
||||||
|
`/api/v2/views/${encodeURIComponent(viewId)}`
|
||||||
|
)
|
||||||
|
).data
|
||||||
}
|
}
|
||||||
|
|
||||||
fetch = async (expectations?: Expectations) => {
|
fetch = async (expectations?: Expectations) => {
|
||||||
|
|
Loading…
Reference in New Issue