Merge branch 'master' of github.com:Budibase/budibase into chore/aws-v2-to-v3
This commit is contained in:
commit
f41052058c
|
@ -30,7 +30,7 @@ env:
|
|||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -47,7 +47,7 @@ jobs:
|
|||
- run: yarn lint
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -76,7 +76,7 @@ jobs:
|
|||
fi
|
||||
|
||||
helm-lint:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -88,7 +88,7 @@ jobs:
|
|||
- run: cd charts/budibase && helm lint .
|
||||
|
||||
test-libraries:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -122,7 +122,7 @@ jobs:
|
|||
fi
|
||||
|
||||
test-worker:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -151,7 +151,7 @@ jobs:
|
|||
yarn test --verbose --reporters=default --reporters=github-actions
|
||||
|
||||
test-server:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
datasource:
|
||||
|
@ -237,7 +237,7 @@ jobs:
|
|||
yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
|
||||
|
||||
check-pro-submodule:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||
steps:
|
||||
- name: Checkout repo and submodules
|
||||
|
@ -296,7 +296,7 @@ jobs:
|
|||
fi
|
||||
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
|
|
|
@ -4,8 +4,8 @@ import {
|
|||
getContainerRuntimeClient,
|
||||
} from "testcontainers"
|
||||
import { ContainerInfo } from "dockerode"
|
||||
import path from "path"
|
||||
import lockfile from "proper-lockfile"
|
||||
import * as path from "path"
|
||||
import * as lockfile from "proper-lockfile"
|
||||
import { execSync } from "child_process"
|
||||
|
||||
interface DockerContext {
|
||||
|
@ -29,8 +29,8 @@ function getCurrentDockerContext(): DockerContext {
|
|||
|
||||
async function getBudibaseContainers() {
|
||||
const client = await getContainerRuntimeClient()
|
||||
const conatiners = await client.container.list()
|
||||
return conatiners.filter(
|
||||
const containers = await client.container.list()
|
||||
return containers.filter(
|
||||
container =>
|
||||
container.Labels["com.budibase"] === "true" &&
|
||||
container.Labels["org.testcontainers"] === "true"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.2.28",
|
||||
"version": "3.2.29",
|
||||
"npmClient": "yarn",
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
|
|
|
@ -59,11 +59,19 @@ export function isExternalTable(table: Table) {
|
|||
}
|
||||
|
||||
export function buildExternalTableId(datasourceId: string, tableName: string) {
|
||||
// encode spaces
|
||||
if (tableName.includes(" ")) {
|
||||
tableName = encodeURIComponent(tableName)
|
||||
return `${datasourceId}${DOUBLE_SEPARATOR}${encodeURIComponent(tableName)}`
|
||||
}
|
||||
|
||||
export function encodeTableId(tableId: string) {
|
||||
if (isExternalTableID(tableId)) {
|
||||
return encodeURIComponent(tableId)
|
||||
} else {
|
||||
return tableId
|
||||
}
|
||||
return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}`
|
||||
}
|
||||
|
||||
export function encodeViewId(viewId: string) {
|
||||
return encodeURIComponent(viewId)
|
||||
}
|
||||
|
||||
export function breakExternalTableId(tableId: string) {
|
||||
|
|
|
@ -43,12 +43,11 @@
|
|||
flex-direction: row;
|
||||
justify-content: flex-start;
|
||||
align-items: stretch;
|
||||
overflow-y: scroll !important;
|
||||
flex: 1 1 auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
.main {
|
||||
overflow: auto;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
.content {
|
||||
display: flex;
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"build": "routify -b && NODE_OPTIONS=\"--max_old_space_size=4096\" vite build --emptyOutDir",
|
||||
"start": "routify -c rollup",
|
||||
"dev": "routify -c dev:vite",
|
||||
"dev:vite": "vite --host 0.0.0.0",
|
||||
"dev:vite": "vite --host 0.0.0.0 --mode=dev",
|
||||
"rollup": "rollup -c -w",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
|
|
|
@ -8,7 +8,7 @@ import { get } from "svelte/store"
|
|||
import { auth, navigation } from "./stores/portal"
|
||||
|
||||
export const API = createAPIClient({
|
||||
attachHeaders: (headers: Record<string, string>) => {
|
||||
attachHeaders: headers => {
|
||||
// Attach app ID header from store
|
||||
let appId = get(appStore).appId
|
||||
if (appId) {
|
||||
|
@ -22,7 +22,7 @@ export const API = createAPIClient({
|
|||
}
|
||||
},
|
||||
|
||||
onError: (error: any) => {
|
||||
onError: error => {
|
||||
const { url, message, status, method, handled } = error || {}
|
||||
|
||||
// Log any errors that we haven't manually handled
|
||||
|
@ -45,7 +45,7 @@ export const API = createAPIClient({
|
|||
}
|
||||
}
|
||||
},
|
||||
onMigrationDetected: (appId: string) => {
|
||||
onMigrationDetected: appId => {
|
||||
const updatingUrl = `/builder/app/updating/${appId}`
|
||||
|
||||
if (window.location.pathname === updatingUrl) {
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
const disabled = () => {
|
||||
return {
|
||||
SEND_EMAIL_SMTP: {
|
||||
disabled: !$admin.checklist.smtp.checked,
|
||||
disabled: !$admin.checklist?.smtp?.checked,
|
||||
message: "Please configure SMTP",
|
||||
},
|
||||
COLLECT: {
|
||||
|
|
|
@ -98,9 +98,7 @@
|
|||
async function generateAICronExpression() {
|
||||
loadingAICronExpression = true
|
||||
try {
|
||||
const response = await API.generateCronExpression({
|
||||
prompt: aiCronPrompt,
|
||||
})
|
||||
const response = await API.generateCronExpression(aiCronPrompt)
|
||||
cronExpression = response.message
|
||||
dispatch("change", response.message)
|
||||
} catch (err) {
|
||||
|
|
|
@ -56,28 +56,19 @@
|
|||
}
|
||||
|
||||
const exportAllData = async () => {
|
||||
return await API.exportView({
|
||||
viewName: view,
|
||||
format: exportFormat,
|
||||
})
|
||||
return await API.exportView(view, exportFormat)
|
||||
}
|
||||
|
||||
const exportFilteredData = async () => {
|
||||
let payload = {
|
||||
tableId: view,
|
||||
format: exportFormat,
|
||||
search: {
|
||||
paginate: false,
|
||||
},
|
||||
}
|
||||
let payload = {}
|
||||
if (selectedRows?.length) {
|
||||
payload.rows = selectedRows.map(row => row._id)
|
||||
}
|
||||
if (sorting) {
|
||||
payload.search.sort = sorting.sortColumn
|
||||
payload.search.sortOrder = sorting.sortOrder
|
||||
payload.sort = sorting.sortColumn
|
||||
payload.sortOrder = sorting.sortOrder
|
||||
}
|
||||
return await API.exportRows(payload)
|
||||
return await API.exportRows(view, exportFormat, payload)
|
||||
}
|
||||
|
||||
const exportData = async () => {
|
||||
|
|
|
@ -30,11 +30,7 @@
|
|||
const importData = async () => {
|
||||
try {
|
||||
loading = true
|
||||
await API.importTableData({
|
||||
tableId,
|
||||
rows,
|
||||
identifierFields,
|
||||
})
|
||||
await API.importTableData(tableId, rows, identifierFields)
|
||||
notifications.success("Rows successfully imported")
|
||||
popover.hide()
|
||||
} catch (error) {
|
||||
|
|
|
@ -39,9 +39,9 @@
|
|||
|
||||
const toggleAction = async (action, enabled) => {
|
||||
if (enabled) {
|
||||
await rowActions.enableView(tableId, viewId, action.id)
|
||||
await rowActions.enableView(tableId, action.id, viewId)
|
||||
} else {
|
||||
await rowActions.disableView(tableId, viewId, action.id)
|
||||
await rowActions.disableView(tableId, action.id, viewId)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@
|
|||
...datasource,
|
||||
name,
|
||||
}
|
||||
await datasources.update({
|
||||
await datasources.save({
|
||||
datasource: updatedDatasource,
|
||||
integration: integrationForDatasource(get(integrations), datasource),
|
||||
})
|
||||
|
|
|
@ -41,7 +41,7 @@
|
|||
get(integrations),
|
||||
datasource
|
||||
)
|
||||
await datasources.update({ datasource, integration })
|
||||
await datasources.save({ datasource, integration })
|
||||
|
||||
await afterSave({ datasource, action })
|
||||
} catch (err) {
|
||||
|
|
|
@ -128,11 +128,7 @@
|
|||
allValid = false
|
||||
|
||||
if (rows.length > 0) {
|
||||
const response = await API.validateExistingTableImport({
|
||||
rows,
|
||||
tableId,
|
||||
})
|
||||
|
||||
const response = await API.validateExistingTableImport(rows, tableId)
|
||||
validation = response.schemaValidation
|
||||
invalidColumns = response.invalidColumns
|
||||
allValid = response.allValid
|
||||
|
|
|
@ -147,7 +147,7 @@
|
|||
loading = true
|
||||
try {
|
||||
if (rows.length > 0) {
|
||||
const response = await API.validateNewTableImport({ rows, schema })
|
||||
const response = await API.validateNewTableImport(rows, schema)
|
||||
validation = response.schemaValidation
|
||||
allValid = response.allValid
|
||||
errors = response.errors
|
||||
|
|
|
@ -176,7 +176,7 @@
|
|||
notifications.success(`Request saved successfully`)
|
||||
if (dynamicVariables) {
|
||||
datasource.config.dynamicVariables = rebuildVariables(saveId)
|
||||
datasource = await datasources.update({
|
||||
datasource = await datasources.save({
|
||||
integration: integrationInfo,
|
||||
datasource,
|
||||
})
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
const values = writable({ name: "", url: null })
|
||||
const validation = createValidationStore()
|
||||
const encryptionValidation = createValidationStore()
|
||||
const isEncryptedRegex = /^.*\.enc.*\.tar\.gz$/gm
|
||||
|
||||
$: {
|
||||
const { url } = $values
|
||||
|
@ -37,7 +38,9 @@
|
|||
encryptionValidation.check({ ...$values })
|
||||
}
|
||||
|
||||
$: encryptedFile = $values.file?.name?.endsWith(".enc.tar.gz")
|
||||
// filename should be separated to avoid updates everytime any other form element changes
|
||||
$: filename = $values.file?.name
|
||||
$: encryptedFile = isEncryptedRegex.test(filename)
|
||||
|
||||
onMount(async () => {
|
||||
const lastChar = $auth.user?.firstName
|
||||
|
@ -171,7 +174,7 @@
|
|||
try {
|
||||
await createNewApp()
|
||||
} catch (error) {
|
||||
notifications.error("Error creating app")
|
||||
notifications.error(`Error creating app - ${error.message}`)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -68,7 +68,7 @@
|
|||
}
|
||||
|
||||
try {
|
||||
const app = await API.duplicateApp(data, appId)
|
||||
const app = await API.duplicateApp(appId, data)
|
||||
appsStore.load()
|
||||
if (!sdk.users.isBuilder($auth.user, app?.duplicateAppId)) {
|
||||
// Refresh for access to created applications
|
||||
|
|
|
@ -61,7 +61,7 @@ a {
|
|||
height: 8px;
|
||||
}
|
||||
::-webkit-scrollbar-track {
|
||||
background: var(--spectrum-alias-background-color-default);
|
||||
background: transparent;
|
||||
}
|
||||
::-webkit-scrollbar-thumb {
|
||||
background-color: var(--spectrum-global-color-gray-400);
|
||||
|
@ -71,6 +71,5 @@ a {
|
|||
background: var(--spectrum-alias-background-color-default);
|
||||
}
|
||||
html * {
|
||||
scrollbar-color: var(--spectrum-global-color-gray-400)
|
||||
var(--spectrum-alias-background-color-default);
|
||||
scrollbar-color: var(--spectrum-global-color-gray-400) transparent;
|
||||
}
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
declare module "api" {
|
||||
const API: {
|
||||
getPlugins: () => Promise<any>
|
||||
createPlugin: (plugin: object) => Promise<any>
|
||||
uploadPlugin: (plugin: FormData) => Promise<any>
|
||||
deletePlugin: (id: string) => Promise<void>
|
||||
}
|
||||
}
|
|
@ -9,7 +9,7 @@
|
|||
$: useAccountPortal = cloud && !$admin.disableAccountPortal
|
||||
|
||||
onMount(() => {
|
||||
if ($admin?.checklist?.adminUser.checked || useAccountPortal) {
|
||||
if ($admin?.checklist?.adminUser?.checked || useAccountPortal) {
|
||||
$redirect("../")
|
||||
} else {
|
||||
loaded = true
|
||||
|
|
|
@ -36,10 +36,7 @@
|
|||
await API.createAdminUser(adminUser)
|
||||
notifications.success("Admin user created")
|
||||
await admin.init()
|
||||
await auth.login({
|
||||
username: formData?.email.trim(),
|
||||
password: formData?.password,
|
||||
})
|
||||
await auth.login(formData?.email.trim(), formData?.password)
|
||||
$goto("../portal")
|
||||
} catch (error) {
|
||||
submitted = false
|
||||
|
|
|
@ -368,20 +368,22 @@
|
|||
const payload = [
|
||||
{
|
||||
email: newUserEmail,
|
||||
builder: {
|
||||
global: creationRoleType === Constants.BudibaseRoles.Admin,
|
||||
creator: creationRoleType === Constants.BudibaseRoles.Creator,
|
||||
userInfo: {
|
||||
builder: {
|
||||
global: creationRoleType === Constants.BudibaseRoles.Admin,
|
||||
creator: creationRoleType === Constants.BudibaseRoles.Creator,
|
||||
},
|
||||
admin: { global: creationRoleType === Constants.BudibaseRoles.Admin },
|
||||
},
|
||||
admin: { global: creationRoleType === Constants.BudibaseRoles.Admin },
|
||||
},
|
||||
]
|
||||
|
||||
const notCreatingAdmin = creationRoleType !== Constants.BudibaseRoles.Admin
|
||||
const isCreator = creationAccessType === Constants.Roles.CREATOR
|
||||
if (notCreatingAdmin && isCreator) {
|
||||
payload[0].builder.apps = [prodAppId]
|
||||
payload[0].userInfo.builder.apps = [prodAppId]
|
||||
} else if (notCreatingAdmin && !isCreator) {
|
||||
payload[0].apps = { [prodAppId]: creationAccessType }
|
||||
payload[0].userInfo.apps = { [prodAppId]: creationAccessType }
|
||||
}
|
||||
|
||||
let userInviteResponse
|
||||
|
|
|
@ -105,9 +105,6 @@
|
|||
if (!hasSynced && application) {
|
||||
try {
|
||||
await API.syncApp(application)
|
||||
// check if user has beta access
|
||||
// const betaResponse = await API.checkBetaAccess($auth?.user?.email)
|
||||
// betaAccess = betaResponse.access
|
||||
} catch (error) {
|
||||
notifications.error("Failed to sync with production database")
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { Heading, Body, Layout, Button, Modal, Icon } from "@budibase/bbui"
|
||||
import { Heading, Body, Layout, Button, Modal } from "@budibase/bbui"
|
||||
import AutomationPanel from "components/automation/AutomationPanel/AutomationPanel.svelte"
|
||||
import CreateAutomationModal from "components/automation/AutomationPanel/CreateAutomationModal.svelte"
|
||||
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
|
||||
|
@ -12,13 +12,10 @@
|
|||
automationStore,
|
||||
selectedAutomation,
|
||||
} from "stores/builder"
|
||||
import { createLocalStorageStore } from "@budibase/frontend-core"
|
||||
import { fly } from "svelte/transition"
|
||||
|
||||
$: automationId = $selectedAutomation?.data?._id
|
||||
$: builderStore.selectResource(automationId)
|
||||
|
||||
const surveyDismissed = createLocalStorageStore("automation-survey", false)
|
||||
const stopSyncing = syncURLToState({
|
||||
urlParam: "automationId",
|
||||
stateKey: "selectedAutomationId",
|
||||
|
@ -31,11 +28,9 @@
|
|||
|
||||
let modal
|
||||
let webhookModal
|
||||
let mounted = false
|
||||
|
||||
onMount(() => {
|
||||
$automationStore.showTestPanel = false
|
||||
mounted = true
|
||||
})
|
||||
|
||||
onDestroy(stopSyncing)
|
||||
|
@ -83,43 +78,6 @@
|
|||
</Modal>
|
||||
</div>
|
||||
|
||||
{#if !$surveyDismissed && mounted}
|
||||
<div
|
||||
class="survey"
|
||||
in:fly={{ x: 600, duration: 260, delay: 1000 }}
|
||||
out:fly={{ x: 600, duration: 260 }}
|
||||
>
|
||||
<div class="survey__body">
|
||||
<div class="survey__title">We value your feedback!</div>
|
||||
<div class="survey__text">
|
||||
<a
|
||||
href="https://t.maze.co/310149185"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
on:click={() => surveyDismissed.set(true)}
|
||||
>
|
||||
Complete our survey on Automations</a
|
||||
>
|
||||
and receive a $20 thank-you gift.
|
||||
<a
|
||||
href="https://drive.google.com/file/d/12-qk_2F9g5PdbM6wuKoz2KkIyLI-feMX/view?usp=sharing"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
Terms apply.
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<Icon
|
||||
name="Close"
|
||||
hoverable
|
||||
color="var(--spectrum-global-color-static-gray-300)"
|
||||
hoverColor="var(--spectrum-global-color-static-gray-100)"
|
||||
on:click={() => surveyDismissed.set(true)}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.root {
|
||||
flex: 1 1 auto;
|
||||
|
@ -164,39 +122,4 @@
|
|||
grid-column: 3;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
/* Survey */
|
||||
.survey {
|
||||
position: absolute;
|
||||
bottom: 32px;
|
||||
right: 32px;
|
||||
background: var(--spectrum-semantic-positive-color-background);
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
padding: var(--spacing-l) var(--spacing-xl);
|
||||
border-radius: 4px;
|
||||
gap: var(--spacing-xl);
|
||||
}
|
||||
.survey * {
|
||||
color: var(--spectrum-global-color-static-gray-300);
|
||||
white-space: nowrap;
|
||||
}
|
||||
.survey a {
|
||||
text-decoration: underline;
|
||||
transition: color 130ms ease-out;
|
||||
}
|
||||
.survey a:hover {
|
||||
color: var(--spectrum-global-color-static-gray-100);
|
||||
cursor: pointer;
|
||||
}
|
||||
.survey__body {
|
||||
flex: 1 1 auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
.survey__title {
|
||||
font-weight: 600;
|
||||
font-size: 15px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
|
||||
async function saveDatasource({ config, name }) {
|
||||
try {
|
||||
await datasources.update({
|
||||
await datasources.save({
|
||||
integration,
|
||||
datasource: { ...datasource, config, name },
|
||||
})
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
get(integrations),
|
||||
updatedDatasource
|
||||
)
|
||||
await datasources.update({ datasource: updatedDatasource, integration })
|
||||
await datasources.save({ datasource: updatedDatasource, integration })
|
||||
notifications.success(
|
||||
`Datasource ${updatedDatasource.name} updated successfully`
|
||||
)
|
||||
|
|
|
@ -43,8 +43,7 @@
|
|||
return
|
||||
}
|
||||
try {
|
||||
data = await API.fetchViewData({
|
||||
name,
|
||||
data = await API.fetchViewData(name, {
|
||||
calculation,
|
||||
field,
|
||||
groupBy,
|
||||
|
|
|
@ -99,21 +99,18 @@
|
|||
}
|
||||
|
||||
async function fetchBackups(filters, page, dateRange = []) {
|
||||
const body = {
|
||||
appId: $appStore.appId,
|
||||
const opts = {
|
||||
...filters,
|
||||
page,
|
||||
}
|
||||
|
||||
const [startDate, endDate] = dateRange
|
||||
if (startDate) {
|
||||
body.startDate = startDate
|
||||
opts.startDate = startDate
|
||||
}
|
||||
if (endDate) {
|
||||
body.endDate = endDate
|
||||
opts.endDate = endDate
|
||||
}
|
||||
|
||||
const response = await backups.searchBackups(body)
|
||||
const response = await backups.searchBackups($appStore.appId, opts)
|
||||
pageInfo.fetched(response.hasNextPage, response.nextPage)
|
||||
|
||||
// flatten so we have an easier structure to use for the table schema
|
||||
|
@ -123,9 +120,7 @@
|
|||
async function createManualBackup() {
|
||||
try {
|
||||
loading = true
|
||||
let response = await backups.createManualBackup({
|
||||
appId: $appStore.appId,
|
||||
})
|
||||
let response = await backups.createManualBackup($appStore.appId)
|
||||
await fetchBackups(filterOpt, page)
|
||||
notifications.success(response.message)
|
||||
} catch (err) {
|
||||
|
@ -149,24 +144,14 @@
|
|||
|
||||
async function handleButtonClick({ detail }) {
|
||||
if (detail.type === "backupDelete") {
|
||||
await backups.deleteBackup({
|
||||
appId: $appStore.appId,
|
||||
backupId: detail.backupId,
|
||||
})
|
||||
await backups.deleteBackup($appStore.appId, detail.backupId)
|
||||
await fetchBackups(filterOpt, page)
|
||||
} else if (detail.type === "backupRestore") {
|
||||
await backups.restoreBackup({
|
||||
appId: $appStore.appId,
|
||||
backupId: detail.backupId,
|
||||
name: detail.restoreBackupName,
|
||||
})
|
||||
await fetchBackups(filterOpt, page)
|
||||
} else if (detail.type === "backupUpdate") {
|
||||
await backups.updateBackup({
|
||||
appId: $appStore.appId,
|
||||
backupId: detail.backupId,
|
||||
name: detail.name,
|
||||
})
|
||||
await backups.restoreBackup(
|
||||
$appStore.appId,
|
||||
detail.backupId,
|
||||
detail.restoreBackupName
|
||||
)
|
||||
await fetchBackups(filterOpt, page)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,10 +35,7 @@
|
|||
return
|
||||
}
|
||||
try {
|
||||
await auth.login({
|
||||
username: formData?.username.trim(),
|
||||
password: formData?.password,
|
||||
})
|
||||
await auth.login(formData?.username.trim(), formData?.password)
|
||||
if ($auth?.user?.forceResetPassword) {
|
||||
$goto("./reset")
|
||||
} else {
|
||||
|
|
|
@ -66,10 +66,7 @@
|
|||
|
||||
async function login() {
|
||||
try {
|
||||
await auth.login({
|
||||
username: formData.email.trim(),
|
||||
password: formData.password.trim(),
|
||||
})
|
||||
await auth.login(formData.email.trim(), formData.password.trim())
|
||||
notifications.success("Logged in successfully")
|
||||
$goto("../portal")
|
||||
} catch (err) {
|
||||
|
|
|
@ -152,16 +152,16 @@
|
|||
logsPageInfo.loading()
|
||||
await auditLogs.search({
|
||||
bookmark: logsPage,
|
||||
startDate: dateRange[0],
|
||||
endDate: dateRange[1],
|
||||
startDate: dateRange[0] || undefined,
|
||||
endDate: dateRange[1] || undefined,
|
||||
fullSearch: logSearchTerm,
|
||||
userIds: selectedUsers,
|
||||
appIds: selectedApps,
|
||||
events: selectedEvents,
|
||||
})
|
||||
logsPageInfo.fetched(
|
||||
$auditLogs.logs.hasNextPage,
|
||||
$auditLogs.logs.bookmark
|
||||
$auditLogs.logs?.hasNextPage,
|
||||
$auditLogs.logs?.bookmark
|
||||
)
|
||||
} catch (error) {
|
||||
notifications.error(`Error getting audit logs - ${error}`)
|
||||
|
@ -200,6 +200,8 @@
|
|||
return Object.entries(obj).map(([id, label]) => {
|
||||
return { id, label }
|
||||
})
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -316,7 +318,7 @@
|
|||
<Table
|
||||
on:click={({ detail }) => viewDetails(detail)}
|
||||
{customRenderers}
|
||||
data={$auditLogs.logs.data}
|
||||
data={$auditLogs.logs?.data}
|
||||
allowEditColumns={false}
|
||||
allowEditRows={false}
|
||||
allowSelectRows={false}
|
||||
|
|
|
@ -64,7 +64,7 @@
|
|||
|
||||
const activateLicenseKey = async () => {
|
||||
try {
|
||||
await API.activateLicenseKey({ licenseKey })
|
||||
await API.activateLicenseKey(licenseKey)
|
||||
await auth.getSelf()
|
||||
await getLicenseKey()
|
||||
notifications.success("Successfully activated")
|
||||
|
@ -119,7 +119,7 @@
|
|||
|
||||
async function activateOfflineLicense(offlineLicenseToken) {
|
||||
try {
|
||||
await API.activateOfflineLicense({ offlineLicenseToken })
|
||||
await API.activateOfflineLicense(offlineLicenseToken)
|
||||
await auth.getSelf()
|
||||
await getOfflineLicense()
|
||||
notifications.success("Successfully activated")
|
||||
|
|
|
@ -139,7 +139,7 @@
|
|||
await auth.setInitInfo({})
|
||||
$goto(`/builder/app/${createdApp.instance._id}`)
|
||||
} catch (error) {
|
||||
notifications.error("Error creating app")
|
||||
notifications.error(`Error creating app - ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -140,10 +140,7 @@
|
|||
if (image) {
|
||||
let data = new FormData()
|
||||
data.append("file", image)
|
||||
await API.uploadOIDCLogo({
|
||||
name: image.name,
|
||||
data,
|
||||
})
|
||||
await API.uploadOIDCLogo(image.name, data)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -69,10 +69,7 @@
|
|||
async function deleteSmtp() {
|
||||
// Delete the SMTP config
|
||||
try {
|
||||
await API.deleteConfig({
|
||||
id: smtpConfig._id,
|
||||
rev: smtpConfig._rev,
|
||||
})
|
||||
await API.deleteConfig(smtpConfig._id, smtpConfig._rev)
|
||||
smtpConfig = {
|
||||
type: ConfigTypes.SMTP,
|
||||
config: {
|
||||
|
@ -180,7 +177,7 @@
|
|||
<Button
|
||||
secondary
|
||||
on:click={deleteSmtp}
|
||||
disabled={!$admin.checklist.smtp.checked}
|
||||
disabled={!$admin.checklist?.smtp?.checked}
|
||||
>
|
||||
Reset
|
||||
</Button>
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
import { writable } from "svelte/store"
|
||||
|
||||
export default class BudiStore {
|
||||
constructor(init, opts) {
|
||||
const store = writable({ ...init })
|
||||
|
||||
/**
|
||||
* Internal Svelte store
|
||||
*/
|
||||
this.store = store
|
||||
|
||||
/**
|
||||
* Exposes the svelte subscribe fn to allow $ notation access
|
||||
* @example
|
||||
* $navigation.selectedScreenId
|
||||
*/
|
||||
this.subscribe = this.store.subscribe
|
||||
|
||||
/**
|
||||
* Exposes the svelte update fn.
|
||||
* *Store modification should be kept to a minimum
|
||||
*/
|
||||
this.update = this.store.update
|
||||
this.set = this.store.set
|
||||
|
||||
/**
|
||||
* Optional debug mode to output the store updates to console
|
||||
*/
|
||||
if (opts?.debug) {
|
||||
this.subscribe(state => {
|
||||
console.warn(`${this.constructor.name}`, state)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
import { writable, Writable, Readable } from "svelte/store"
|
||||
|
||||
interface BudiStoreOpts {
|
||||
debug?: boolean
|
||||
}
|
||||
|
||||
export class BudiStore<T> {
|
||||
store: Writable<T>
|
||||
subscribe: Writable<T>["subscribe"]
|
||||
update: Writable<T>["update"]
|
||||
set: Writable<T>["set"]
|
||||
|
||||
constructor(init: T, opts?: BudiStoreOpts) {
|
||||
this.store = writable<T>(init)
|
||||
this.subscribe = this.store.subscribe
|
||||
this.update = this.store.update
|
||||
this.set = this.store.set
|
||||
|
||||
// Optional debug mode to output the store updates to console
|
||||
if (opts?.debug) {
|
||||
this.subscribe(state => {
|
||||
console.warn(`${this.constructor.name}`, state)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class DerivedBudiStore<T, DerivedT extends T> extends BudiStore<T> {
|
||||
derivedStore: Readable<DerivedT>
|
||||
subscribe: Readable<DerivedT>["subscribe"]
|
||||
|
||||
constructor(
|
||||
init: T,
|
||||
makeDerivedStore: (store: Writable<T>) => Readable<DerivedT>,
|
||||
opts?: BudiStoreOpts
|
||||
) {
|
||||
super(init, opts)
|
||||
this.derivedStore = makeDerivedStore(this.store)
|
||||
this.subscribe = this.derivedStore.subscribe
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
import { API } from "api"
|
||||
import BudiStore from "../BudiStore"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
|
||||
export const INITIAL_APP_META_STATE = {
|
||||
appId: "",
|
||||
|
|
|
@ -751,10 +751,7 @@ const automationActions = store => ({
|
|||
automation.definition.trigger.inputs.rowActionId
|
||||
)
|
||||
} else {
|
||||
await API.deleteAutomation({
|
||||
automationId: automation?._id,
|
||||
automationRev: automation?._rev,
|
||||
})
|
||||
await API.deleteAutomation(automation?._id, automation?._rev)
|
||||
}
|
||||
|
||||
store.update(state => {
|
||||
|
@ -836,10 +833,7 @@ const automationActions = store => ({
|
|||
test: async (automation, testData) => {
|
||||
let result
|
||||
try {
|
||||
result = await API.testAutomation({
|
||||
automationId: automation?._id,
|
||||
testData,
|
||||
})
|
||||
result = await API.testAutomation(automation?._id, testData)
|
||||
} catch (err) {
|
||||
const message = err.message || err.status || JSON.stringify(err)
|
||||
throw `Automation test failed - ${message}`
|
||||
|
@ -893,10 +887,7 @@ const automationActions = store => ({
|
|||
})
|
||||
},
|
||||
clearLogErrors: async ({ automationId, appId } = {}) => {
|
||||
return await API.clearAutomationLogErrors({
|
||||
automationId,
|
||||
appId,
|
||||
})
|
||||
return await API.clearAutomationLogErrors(automationId, appId)
|
||||
},
|
||||
addTestDataToAutomation: data => {
|
||||
let newAutomation = cloneDeep(get(selectedAutomation).data)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { get } from "svelte/store"
|
||||
import { createBuilderWebsocket } from "./websocket.js"
|
||||
import { BuilderSocketEvent } from "@budibase/shared-core"
|
||||
import BudiStore from "../BudiStore.js"
|
||||
import { BudiStore } from "../BudiStore.js"
|
||||
import { TOUR_KEYS } from "components/portal/onboarding/tours.js"
|
||||
|
||||
export const INITIAL_BUILDER_STATE = {
|
||||
|
|
|
@ -28,7 +28,7 @@ import {
|
|||
DB_TYPE_INTERNAL,
|
||||
DB_TYPE_EXTERNAL,
|
||||
} from "constants/backend"
|
||||
import BudiStore from "../BudiStore"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
import { Utils } from "@budibase/frontend-core"
|
||||
import { FieldType } from "@budibase/types"
|
||||
import { utils } from "@budibase/shared-core"
|
||||
|
|
|
@ -1,222 +0,0 @@
|
|||
import { writable, derived, get } from "svelte/store"
|
||||
import {
|
||||
IntegrationTypes,
|
||||
DEFAULT_BB_DATASOURCE_ID,
|
||||
BUDIBASE_INTERNAL_DB_ID,
|
||||
} from "constants/backend"
|
||||
import { tables } from "./tables"
|
||||
import { queries } from "./queries"
|
||||
import { API } from "api"
|
||||
import { DatasourceFeature } from "@budibase/types"
|
||||
import { TableNames } from "constants"
|
||||
|
||||
class TableImportError extends Error {
|
||||
constructor(errors) {
|
||||
super()
|
||||
this.name = "TableImportError"
|
||||
this.errors = errors
|
||||
}
|
||||
|
||||
get description() {
|
||||
let message = ""
|
||||
for (const key in this.errors) {
|
||||
message += `${key}: ${this.errors[key]}\n`
|
||||
}
|
||||
return message
|
||||
}
|
||||
}
|
||||
|
||||
export function createDatasourcesStore() {
|
||||
const store = writable({
|
||||
list: [],
|
||||
selectedDatasourceId: null,
|
||||
})
|
||||
|
||||
const derivedStore = derived([store, tables], ([$store, $tables]) => {
|
||||
// Set the internal datasource entities from the table list, which we're
|
||||
// able to keep updated unlike the egress generated definition of the
|
||||
// internal datasource
|
||||
let internalDS = $store.list?.find(ds => ds._id === BUDIBASE_INTERNAL_DB_ID)
|
||||
let otherDS = $store.list?.filter(ds => ds._id !== BUDIBASE_INTERNAL_DB_ID)
|
||||
if (internalDS) {
|
||||
internalDS = {
|
||||
...internalDS,
|
||||
entities: $tables.list?.filter(table => {
|
||||
return (
|
||||
table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
|
||||
table._id !== TableNames.USERS
|
||||
)
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
// Build up enriched DS list
|
||||
// Only add the internal DS if we have at least one non-users table
|
||||
let list = []
|
||||
if (internalDS?.entities?.length) {
|
||||
list.push(internalDS)
|
||||
}
|
||||
list = list.concat(otherDS || [])
|
||||
|
||||
return {
|
||||
...$store,
|
||||
list,
|
||||
selected: list?.find(ds => ds._id === $store.selectedDatasourceId),
|
||||
hasDefaultData: list?.some(ds => ds._id === DEFAULT_BB_DATASOURCE_ID),
|
||||
hasData: list?.length > 0,
|
||||
}
|
||||
})
|
||||
|
||||
const fetch = async () => {
|
||||
const datasources = await API.getDatasources()
|
||||
store.update(state => ({
|
||||
...state,
|
||||
list: datasources,
|
||||
}))
|
||||
}
|
||||
|
||||
const select = id => {
|
||||
store.update(state => ({
|
||||
...state,
|
||||
selectedDatasourceId: id,
|
||||
}))
|
||||
}
|
||||
|
||||
const updateDatasource = (response, { ignoreErrors } = {}) => {
|
||||
const { datasource, errors } = response
|
||||
if (!ignoreErrors && errors && Object.keys(errors).length > 0) {
|
||||
throw new TableImportError(errors)
|
||||
}
|
||||
replaceDatasource(datasource._id, datasource)
|
||||
select(datasource._id)
|
||||
return datasource
|
||||
}
|
||||
|
||||
const updateSchema = async (datasource, tablesFilter) => {
|
||||
const response = await API.buildDatasourceSchema({
|
||||
datasourceId: datasource?._id,
|
||||
tablesFilter,
|
||||
})
|
||||
updateDatasource(response)
|
||||
}
|
||||
|
||||
const sourceCount = source => {
|
||||
return get(store).list.filter(datasource => datasource.source === source)
|
||||
.length
|
||||
}
|
||||
|
||||
const checkDatasourceValidity = async (integration, datasource) => {
|
||||
if (integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
|
||||
const { connected, error } = await API.validateDatasource(datasource)
|
||||
if (connected) {
|
||||
return
|
||||
}
|
||||
|
||||
throw new Error(`Unable to connect: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
const create = async ({ integration, config }) => {
|
||||
const count = sourceCount(integration.name)
|
||||
const nameModifier = count === 0 ? "" : ` ${count + 1}`
|
||||
|
||||
const datasource = {
|
||||
type: "datasource",
|
||||
source: integration.name,
|
||||
config,
|
||||
name: `${integration.friendlyName}${nameModifier}`,
|
||||
plus: integration.plus && integration.name !== IntegrationTypes.REST,
|
||||
isSQL: integration.isSQL,
|
||||
}
|
||||
|
||||
if (await checkDatasourceValidity(integration, datasource)) {
|
||||
throw new Error("Unable to connect")
|
||||
}
|
||||
|
||||
const response = await API.createDatasource({
|
||||
datasource,
|
||||
fetchSchema: integration.plus,
|
||||
})
|
||||
|
||||
return updateDatasource(response, { ignoreErrors: true })
|
||||
}
|
||||
|
||||
const update = async ({ integration, datasource }) => {
|
||||
if (await checkDatasourceValidity(integration, datasource)) {
|
||||
throw new Error("Unable to connect")
|
||||
}
|
||||
|
||||
const response = await API.updateDatasource(datasource)
|
||||
|
||||
return updateDatasource(response)
|
||||
}
|
||||
|
||||
const deleteDatasource = async datasource => {
|
||||
if (!datasource?._id || !datasource?._rev) {
|
||||
return
|
||||
}
|
||||
await API.deleteDatasource({
|
||||
datasourceId: datasource._id,
|
||||
datasourceRev: datasource._rev,
|
||||
})
|
||||
replaceDatasource(datasource._id, null)
|
||||
}
|
||||
|
||||
const replaceDatasource = (datasourceId, datasource) => {
|
||||
if (!datasourceId) {
|
||||
return
|
||||
}
|
||||
|
||||
// Handle deletion
|
||||
if (!datasource) {
|
||||
store.update(state => ({
|
||||
...state,
|
||||
list: state.list.filter(x => x._id !== datasourceId),
|
||||
}))
|
||||
tables.removeDatasourceTables(datasourceId)
|
||||
queries.removeDatasourceQueries(datasourceId)
|
||||
return
|
||||
}
|
||||
|
||||
// Add new datasource
|
||||
const index = get(store).list.findIndex(x => x._id === datasource._id)
|
||||
if (index === -1) {
|
||||
store.update(state => ({
|
||||
...state,
|
||||
list: [...state.list, datasource],
|
||||
}))
|
||||
|
||||
// If this is a new datasource then we should refresh the tables list,
|
||||
// because otherwise we'll never see the new tables
|
||||
tables.fetch()
|
||||
}
|
||||
|
||||
// Update existing datasource
|
||||
else if (datasource) {
|
||||
store.update(state => {
|
||||
state.list[index] = datasource
|
||||
return state
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const getTableNames = async datasource => {
|
||||
const info = await API.fetchInfoForDatasource(datasource)
|
||||
return info.tableNames || []
|
||||
}
|
||||
|
||||
return {
|
||||
subscribe: derivedStore.subscribe,
|
||||
fetch,
|
||||
init: fetch,
|
||||
select,
|
||||
updateSchema,
|
||||
create,
|
||||
update,
|
||||
delete: deleteDatasource,
|
||||
replaceDatasource,
|
||||
getTableNames,
|
||||
}
|
||||
}
|
||||
|
||||
export const datasources = createDatasourcesStore()
|
|
@ -0,0 +1,292 @@
|
|||
import { derived, get, Writable } from "svelte/store"
|
||||
import {
|
||||
IntegrationTypes,
|
||||
DEFAULT_BB_DATASOURCE_ID,
|
||||
BUDIBASE_INTERNAL_DB_ID,
|
||||
} from "constants/backend"
|
||||
import { tables } from "./tables"
|
||||
import { queries } from "./queries"
|
||||
import { API } from "api"
|
||||
import {
|
||||
DatasourceFeature,
|
||||
Datasource,
|
||||
Table,
|
||||
Integration,
|
||||
UIIntegration,
|
||||
SourceName,
|
||||
} from "@budibase/types"
|
||||
// @ts-ignore
|
||||
import { TableNames } from "constants"
|
||||
import { DerivedBudiStore } from "stores/BudiStore"
|
||||
|
||||
class TableImportError extends Error {
|
||||
errors: Record<string, string>
|
||||
|
||||
constructor(errors: Record<string, string>) {
|
||||
super()
|
||||
this.name = "TableImportError"
|
||||
this.errors = errors
|
||||
}
|
||||
|
||||
get description() {
|
||||
let message = ""
|
||||
for (const key in this.errors) {
|
||||
message += `${key}: ${this.errors[key]}\n`
|
||||
}
|
||||
return message
|
||||
}
|
||||
}
|
||||
|
||||
// when building the internal DS - seems to represent it slightly differently to the backend typing of a DS
|
||||
interface InternalDatasource extends Omit<Datasource, "entities"> {
|
||||
entities: Table[]
|
||||
}
|
||||
|
||||
interface BuilderDatasourceStore {
|
||||
rawList: Datasource[]
|
||||
selectedDatasourceId: null | string
|
||||
}
|
||||
|
||||
interface DerivedDatasourceStore extends BuilderDatasourceStore {
|
||||
list: (Datasource | InternalDatasource)[]
|
||||
selected?: Datasource | InternalDatasource
|
||||
hasDefaultData: boolean
|
||||
hasData: boolean
|
||||
}
|
||||
|
||||
export class DatasourceStore extends DerivedBudiStore<
|
||||
BuilderDatasourceStore,
|
||||
DerivedDatasourceStore
|
||||
> {
|
||||
constructor() {
|
||||
const makeDerivedStore = (store: Writable<BuilderDatasourceStore>) => {
|
||||
return derived([store, tables], ([$store, $tables]) => {
|
||||
// Set the internal datasource entities from the table list, which we're
|
||||
// able to keep updated unlike the egress generated definition of the
|
||||
// internal datasource
|
||||
let internalDS: Datasource | InternalDatasource | undefined =
|
||||
$store.rawList?.find(ds => ds._id === BUDIBASE_INTERNAL_DB_ID)
|
||||
let otherDS = $store.rawList?.filter(
|
||||
ds => ds._id !== BUDIBASE_INTERNAL_DB_ID
|
||||
)
|
||||
if (internalDS) {
|
||||
const tables: Table[] = $tables.list?.filter((table: Table) => {
|
||||
return (
|
||||
table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
|
||||
table._id !== TableNames.USERS
|
||||
)
|
||||
})
|
||||
internalDS = {
|
||||
...internalDS,
|
||||
entities: tables,
|
||||
}
|
||||
}
|
||||
|
||||
// Build up enriched DS list
|
||||
// Only add the internal DS if we have at least one non-users table
|
||||
let list: (InternalDatasource | Datasource)[] = []
|
||||
if (internalDS?.entities?.length) {
|
||||
list.push(internalDS)
|
||||
}
|
||||
list = list.concat(otherDS || [])
|
||||
|
||||
return {
|
||||
...$store,
|
||||
list,
|
||||
selected: list?.find(ds => ds._id === $store.selectedDatasourceId),
|
||||
hasDefaultData: list?.some(ds => ds._id === DEFAULT_BB_DATASOURCE_ID),
|
||||
hasData: list?.length > 0,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
super(
|
||||
{
|
||||
rawList: [],
|
||||
selectedDatasourceId: null,
|
||||
},
|
||||
makeDerivedStore
|
||||
)
|
||||
|
||||
this.fetch = this.fetch.bind(this)
|
||||
this.init = this.fetch.bind(this)
|
||||
this.select = this.select.bind(this)
|
||||
this.updateSchema = this.updateSchema.bind(this)
|
||||
this.create = this.create.bind(this)
|
||||
this.delete = this.deleteDatasource.bind(this)
|
||||
this.save = this.save.bind(this)
|
||||
this.replaceDatasource = this.replaceDatasource.bind(this)
|
||||
this.getTableNames = this.getTableNames.bind(this)
|
||||
}
|
||||
|
||||
async fetch() {
|
||||
const datasources = await API.getDatasources()
|
||||
this.store.update(state => ({
|
||||
...state,
|
||||
rawList: datasources,
|
||||
}))
|
||||
}
|
||||
|
||||
async init() {
|
||||
return this.fetch()
|
||||
}
|
||||
|
||||
select(id: string) {
|
||||
this.store.update(state => ({
|
||||
...state,
|
||||
selectedDatasourceId: id,
|
||||
}))
|
||||
}
|
||||
|
||||
private updateDatasourceInStore(
|
||||
response: { datasource: Datasource; errors?: Record<string, string> },
|
||||
{ ignoreErrors }: { ignoreErrors?: boolean } = {}
|
||||
) {
|
||||
const { datasource, errors } = response
|
||||
if (!ignoreErrors && errors && Object.keys(errors).length > 0) {
|
||||
throw new TableImportError(errors)
|
||||
}
|
||||
this.replaceDatasource(datasource._id!, datasource)
|
||||
this.select(datasource._id!)
|
||||
return datasource
|
||||
}
|
||||
|
||||
async updateSchema(datasource: Datasource, tablesFilter: string[]) {
|
||||
const response = await API.buildDatasourceSchema(
|
||||
datasource?._id!,
|
||||
tablesFilter
|
||||
)
|
||||
this.updateDatasourceInStore(response)
|
||||
}
|
||||
|
||||
sourceCount(source: string) {
|
||||
return get(this.store).rawList.filter(
|
||||
datasource => datasource.source === source
|
||||
).length
|
||||
}
|
||||
|
||||
async checkDatasourceValidity(
|
||||
integration: Integration,
|
||||
datasource: Datasource
|
||||
): Promise<{ valid: boolean; error?: string }> {
|
||||
if (integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
|
||||
const { connected, error } = await API.validateDatasource(datasource)
|
||||
if (connected) {
|
||||
return { valid: true }
|
||||
} else {
|
||||
return { valid: false, error }
|
||||
}
|
||||
}
|
||||
return { valid: true }
|
||||
}
|
||||
|
||||
async create({
|
||||
integration,
|
||||
config,
|
||||
}: {
|
||||
integration: UIIntegration
|
||||
config: Record<string, any>
|
||||
}) {
|
||||
const count = this.sourceCount(integration.name)
|
||||
const nameModifier = count === 0 ? "" : ` ${count + 1}`
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource",
|
||||
source: integration.name as SourceName,
|
||||
config,
|
||||
name: `${integration.friendlyName}${nameModifier}`,
|
||||
plus: integration.plus && integration.name !== IntegrationTypes.REST,
|
||||
isSQL: integration.isSQL,
|
||||
}
|
||||
|
||||
const { valid, error } = await this.checkDatasourceValidity(
|
||||
integration,
|
||||
datasource
|
||||
)
|
||||
if (!valid) {
|
||||
throw new Error(`Unable to connect - ${error}`)
|
||||
}
|
||||
|
||||
const response = await API.createDatasource({
|
||||
datasource,
|
||||
fetchSchema: integration.plus,
|
||||
})
|
||||
|
||||
return this.updateDatasourceInStore(response, { ignoreErrors: true })
|
||||
}
|
||||
|
||||
async save({
|
||||
integration,
|
||||
datasource,
|
||||
}: {
|
||||
integration: Integration
|
||||
datasource: Datasource
|
||||
}) {
|
||||
if (!(await this.checkDatasourceValidity(integration, datasource)).valid) {
|
||||
throw new Error("Unable to connect")
|
||||
}
|
||||
|
||||
const response = await API.updateDatasource(datasource)
|
||||
|
||||
return this.updateDatasourceInStore(response)
|
||||
}
|
||||
|
||||
async deleteDatasource(datasource: Datasource) {
|
||||
if (!datasource?._id || !datasource?._rev) {
|
||||
return
|
||||
}
|
||||
await API.deleteDatasource(datasource._id, datasource._rev)
|
||||
this.replaceDatasource(datasource._id)
|
||||
}
|
||||
|
||||
async delete(datasource: Datasource) {
|
||||
return this.deleteDatasource(datasource)
|
||||
}
|
||||
|
||||
replaceDatasource(datasourceId: string, datasource?: Datasource) {
|
||||
if (!datasourceId) {
|
||||
return
|
||||
}
|
||||
|
||||
// Handle deletion
|
||||
if (!datasource) {
|
||||
this.store.update(state => ({
|
||||
...state,
|
||||
rawList: state.rawList.filter(x => x._id !== datasourceId),
|
||||
}))
|
||||
tables.removeDatasourceTables(datasourceId)
|
||||
queries.removeDatasourceQueries(datasourceId)
|
||||
return
|
||||
}
|
||||
|
||||
// Add new datasource
|
||||
const index = get(this.store).rawList.findIndex(
|
||||
x => x._id === datasource._id
|
||||
)
|
||||
if (index === -1) {
|
||||
this.store.update(state => ({
|
||||
...state,
|
||||
rawList: [...state.rawList, datasource],
|
||||
}))
|
||||
|
||||
// If this is a new datasource then we should refresh the tables list,
|
||||
// because otherwise we'll never see the new tables
|
||||
tables.fetch()
|
||||
}
|
||||
|
||||
// Update existing datasource
|
||||
else if (datasource) {
|
||||
this.store.update(state => {
|
||||
state.rawList[index] = datasource
|
||||
return state
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async getTableNames(datasource: Datasource) {
|
||||
const info = await API.fetchInfoForDatasource(datasource)
|
||||
return info.tableNames || []
|
||||
}
|
||||
}
|
||||
|
||||
export const datasources = new DatasourceStore()
|
|
@ -10,14 +10,11 @@ export function createFlagsStore() {
|
|||
set(flags)
|
||||
},
|
||||
updateFlag: async (flag, value) => {
|
||||
await API.updateFlag({
|
||||
flag,
|
||||
value,
|
||||
})
|
||||
await API.updateFlag(flag, value)
|
||||
await actions.fetch()
|
||||
},
|
||||
toggleUiFeature: async feature => {
|
||||
await API.toggleUiFeature({ value: feature })
|
||||
await API.toggleUiFeature(feature)
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { get } from "svelte/store"
|
||||
import { previewStore } from "stores/builder"
|
||||
import BudiStore from "../BudiStore"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
|
||||
export const INITIAL_HOVER_STATE = {
|
||||
componentId: null,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { derived, get } from "svelte/store"
|
||||
import { componentStore } from "stores/builder"
|
||||
import BudiStore from "../BudiStore"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
import { API } from "api"
|
||||
|
||||
export const INITIAL_LAYOUT_STATE = {
|
||||
|
@ -59,10 +59,7 @@ export class LayoutStore extends BudiStore {
|
|||
if (!layout?._id) {
|
||||
return
|
||||
}
|
||||
await API.deleteLayout({
|
||||
layoutId: layout._id,
|
||||
layoutRev: layout._rev,
|
||||
})
|
||||
await API.deleteLayout(layout._id, layout._rev)
|
||||
this.update(state => {
|
||||
state.layouts = state.layouts.filter(x => x._id !== layout._id)
|
||||
return state
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { get } from "svelte/store"
|
||||
import { API } from "api"
|
||||
import { appStore } from "stores/builder"
|
||||
import BudiStore from "../BudiStore"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
|
||||
export const INITIAL_NAVIGATION_STATE = {
|
||||
navigation: "Top",
|
||||
|
@ -35,10 +35,7 @@ export class NavigationStore extends BudiStore {
|
|||
|
||||
async save(navigation) {
|
||||
const appId = get(appStore).appId
|
||||
const app = await API.saveAppMetadata({
|
||||
appId,
|
||||
metadata: { navigation },
|
||||
})
|
||||
const app = await API.saveAppMetadata(appId, { navigation })
|
||||
this.syncAppNavigation(app.navigation)
|
||||
}
|
||||
|
||||
|
|
|
@ -7,18 +7,10 @@ export function createPermissionStore() {
|
|||
return {
|
||||
subscribe,
|
||||
save: async ({ level, role, resource }) => {
|
||||
return await API.updatePermissionForResource({
|
||||
resourceId: resource,
|
||||
roleId: role,
|
||||
level,
|
||||
})
|
||||
return await API.updatePermissionForResource(resource, role, level)
|
||||
},
|
||||
remove: async ({ level, role, resource }) => {
|
||||
return await API.removePermissionFromResource({
|
||||
resourceId: resource,
|
||||
roleId: role,
|
||||
level,
|
||||
})
|
||||
return await API.removePermissionFromResource(resource, role, level)
|
||||
},
|
||||
forResource: async resourceId => {
|
||||
return (await API.getPermissionForResource(resourceId)).permissions
|
||||
|
|
|
@ -62,10 +62,7 @@ export function createQueriesStore() {
|
|||
}
|
||||
|
||||
const importQueries = async ({ data, datasourceId }) => {
|
||||
return await API.importQueries({
|
||||
datasourceId,
|
||||
data,
|
||||
})
|
||||
return await API.importQueries(datasourceId, data)
|
||||
}
|
||||
|
||||
const select = id => {
|
||||
|
@ -87,10 +84,7 @@ export function createQueriesStore() {
|
|||
}
|
||||
|
||||
const deleteQuery = async query => {
|
||||
await API.deleteQuery({
|
||||
queryId: query?._id,
|
||||
queryRev: query?._rev,
|
||||
})
|
||||
await API.deleteQuery(query._id, query._rev)
|
||||
store.update(state => {
|
||||
state.list = state.list.filter(existing => existing._id !== query._id)
|
||||
return state
|
||||
|
|
|
@ -43,10 +43,7 @@ export function createRolesStore() {
|
|||
setRoles(roles)
|
||||
},
|
||||
delete: async role => {
|
||||
await API.deleteRole({
|
||||
roleId: role?._id,
|
||||
roleRev: role?._rev,
|
||||
})
|
||||
await API.deleteRole(role._id, role._rev)
|
||||
await actions.fetch()
|
||||
},
|
||||
save: async role => {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { get, derived } from "svelte/store"
|
||||
import BudiStore from "stores/BudiStore"
|
||||
import { BudiStore } from "stores/BudiStore"
|
||||
import { tables } from "./tables"
|
||||
import { viewsV2 } from "./viewsV2"
|
||||
import { automationStore } from "./automations"
|
||||
|
@ -55,15 +55,12 @@ export class RowActionStore extends BudiStore {
|
|||
}
|
||||
|
||||
// Create the action
|
||||
const res = await API.rowActions.create({
|
||||
name,
|
||||
tableId,
|
||||
})
|
||||
const res = await API.rowActions.create(tableId, name)
|
||||
|
||||
// Enable action on this view if adding via a view
|
||||
if (viewId) {
|
||||
await Promise.all([
|
||||
this.enableView(tableId, viewId, res.id),
|
||||
this.enableView(tableId, res.id, viewId),
|
||||
automationStore.actions.fetch(),
|
||||
])
|
||||
} else {
|
||||
|
@ -76,21 +73,13 @@ export class RowActionStore extends BudiStore {
|
|||
return res
|
||||
}
|
||||
|
||||
enableView = async (tableId, viewId, rowActionId) => {
|
||||
await API.rowActions.enableView({
|
||||
tableId,
|
||||
viewId,
|
||||
rowActionId,
|
||||
})
|
||||
enableView = async (tableId, rowActionId, viewId) => {
|
||||
await API.rowActions.enableView(tableId, rowActionId, viewId)
|
||||
await this.refreshRowActions(tableId)
|
||||
}
|
||||
|
||||
disableView = async (tableId, viewId, rowActionId) => {
|
||||
await API.rowActions.disableView({
|
||||
tableId,
|
||||
viewId,
|
||||
rowActionId,
|
||||
})
|
||||
disableView = async (tableId, rowActionId, viewId) => {
|
||||
await API.rowActions.disableView(tableId, rowActionId, viewId)
|
||||
await this.refreshRowActions(tableId)
|
||||
}
|
||||
|
||||
|
@ -105,21 +94,14 @@ export class RowActionStore extends BudiStore {
|
|||
}
|
||||
|
||||
delete = async (tableId, rowActionId) => {
|
||||
await API.rowActions.delete({
|
||||
tableId,
|
||||
rowActionId,
|
||||
})
|
||||
await API.rowActions.delete(tableId, rowActionId)
|
||||
await this.refreshRowActions(tableId)
|
||||
// We don't need to refresh automations as we can only delete row actions
|
||||
// from the automations store, so we already handle the state update there
|
||||
}
|
||||
|
||||
trigger = async (sourceId, rowActionId, rowId) => {
|
||||
await API.rowActions.trigger({
|
||||
sourceId,
|
||||
rowActionId,
|
||||
rowId,
|
||||
})
|
||||
await API.rowActions.trigger(sourceId, rowActionId, rowId)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
} from "stores/builder"
|
||||
import { createHistoryStore } from "stores/builder/history"
|
||||
import { API } from "api"
|
||||
import BudiStore from "../BudiStore"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
|
||||
export const INITIAL_SCREENS_STATE = {
|
||||
screens: [],
|
||||
|
@ -344,12 +344,7 @@ export class ScreenStore extends BudiStore {
|
|||
let deleteUrls = []
|
||||
screensToDelete.forEach(screen => {
|
||||
// Delete the screen
|
||||
promises.push(
|
||||
API.deleteScreen({
|
||||
screenId: screen._id,
|
||||
screenRev: screen._rev,
|
||||
})
|
||||
)
|
||||
promises.push(API.deleteScreen(screen._id, screen._rev))
|
||||
// Remove links to this screen
|
||||
deleteUrls.push(screen.routing.route)
|
||||
})
|
||||
|
|
|
@ -14,19 +14,13 @@ const createsnippets = () => {
|
|||
...get(store).filter(snippet => snippet.name !== updatedSnippet.name),
|
||||
updatedSnippet,
|
||||
]
|
||||
const app = await API.saveAppMetadata({
|
||||
appId: get(appStore).appId,
|
||||
metadata: { snippets },
|
||||
})
|
||||
const app = await API.saveAppMetadata(get(appStore).appId, { snippets })
|
||||
syncMetadata(app)
|
||||
}
|
||||
|
||||
const deleteSnippet = async snippetName => {
|
||||
const snippets = get(store).filter(snippet => snippet.name !== snippetName)
|
||||
const app = await API.saveAppMetadata({
|
||||
appId: get(appStore).appId,
|
||||
metadata: { snippets },
|
||||
})
|
||||
const app = await API.saveAppMetadata(get(appStore).appId, { snippets })
|
||||
syncMetadata(app)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
import { integrations } from "./integrations"
|
||||
import { derived } from "svelte/store"
|
||||
|
||||
import { DatasourceTypes } from "constants/backend"
|
||||
|
||||
const getIntegrationOrder = type => {
|
||||
if (type === DatasourceTypes.API) return 1
|
||||
if (type === DatasourceTypes.RELATIONAL) return 2
|
||||
if (type === DatasourceTypes.NON_RELATIONAL) return 3
|
||||
|
||||
// Sort all others arbitrarily by the first character of their name.
|
||||
// Character codes can technically be as low as 0, so make sure the number is at least 4
|
||||
return type.charCodeAt(0) + 4
|
||||
}
|
||||
|
||||
export const createSortedIntegrationsStore = () => {
|
||||
return derived(integrations, $integrations => {
|
||||
const integrationsAsArray = Object.entries($integrations).map(
|
||||
([name, integration]) => ({
|
||||
name,
|
||||
...integration,
|
||||
})
|
||||
)
|
||||
|
||||
return integrationsAsArray.sort((integrationA, integrationB) => {
|
||||
const integrationASortOrder = getIntegrationOrder(integrationA.type)
|
||||
const integrationBSortOrder = getIntegrationOrder(integrationB.type)
|
||||
if (integrationASortOrder === integrationBSortOrder) {
|
||||
return integrationA.friendlyName.localeCompare(
|
||||
integrationB.friendlyName
|
||||
)
|
||||
}
|
||||
|
||||
return integrationASortOrder < integrationBSortOrder ? -1 : 1
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export const sortedIntegrations = createSortedIntegrationsStore()
|
|
@ -0,0 +1,53 @@
|
|||
import { integrations } from "./integrations"
|
||||
import { derived } from "svelte/store"
|
||||
|
||||
import { DatasourceTypes } from "constants/backend"
|
||||
import { UIIntegration, Integration } from "@budibase/types"
|
||||
import { BudiStore } from "stores/BudiStore"
|
||||
|
||||
const getIntegrationOrder = (type: string | undefined) => {
|
||||
// if type is not known, sort to end
|
||||
if (!type) {
|
||||
return Number.MAX_SAFE_INTEGER
|
||||
}
|
||||
if (type === DatasourceTypes.API) return 1
|
||||
if (type === DatasourceTypes.RELATIONAL) return 2
|
||||
if (type === DatasourceTypes.NON_RELATIONAL) return 3
|
||||
|
||||
// Sort all others arbitrarily by the first character of their name.
|
||||
// Character codes can technically be as low as 0, so make sure the number is at least 4
|
||||
return type.charCodeAt(0) + 4
|
||||
}
|
||||
|
||||
export class SortedIntegrationStore extends BudiStore<UIIntegration[]> {
|
||||
constructor() {
|
||||
super([])
|
||||
|
||||
const derivedStore = derived<typeof integrations, UIIntegration[]>(
|
||||
integrations,
|
||||
$integrations => {
|
||||
const entries: [string, Integration][] = Object.entries($integrations)
|
||||
const integrationsAsArray = entries.map(([name, integration]) => ({
|
||||
name,
|
||||
...integration,
|
||||
}))
|
||||
|
||||
return integrationsAsArray.sort((integrationA, integrationB) => {
|
||||
const integrationASortOrder = getIntegrationOrder(integrationA.type)
|
||||
const integrationBSortOrder = getIntegrationOrder(integrationB.type)
|
||||
if (integrationASortOrder === integrationBSortOrder) {
|
||||
return integrationA.friendlyName.localeCompare(
|
||||
integrationB.friendlyName
|
||||
)
|
||||
}
|
||||
|
||||
return integrationASortOrder < integrationBSortOrder ? -1 : 1
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
this.subscribe = derivedStore.subscribe
|
||||
}
|
||||
}
|
||||
|
||||
export const sortedIntegrations = new SortedIntegrationStore()
|
|
@ -110,10 +110,7 @@ export function createTablesStore() {
|
|||
if (!table?._id) {
|
||||
return
|
||||
}
|
||||
await API.deleteTable({
|
||||
tableId: table._id,
|
||||
tableRev: table._rev || "rev",
|
||||
})
|
||||
await API.deleteTable(table._id, table._rev || "rev")
|
||||
replaceTable(table._id, null)
|
||||
}
|
||||
|
||||
|
|
|
@ -264,10 +264,7 @@ describe("Navigation store", () => {
|
|||
|
||||
await ctx.test.navigationStore.save(update)
|
||||
|
||||
expect(saveSpy).toHaveBeenCalledWith({
|
||||
appId: "testing_123",
|
||||
metadata: { navigation: update },
|
||||
})
|
||||
expect(saveSpy).toHaveBeenCalledWith("testing_123", { navigation: update })
|
||||
|
||||
expect(ctx.test.store.links.length).toBe(3)
|
||||
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
import { it, expect, describe, beforeEach, vi } from "vitest"
|
||||
import { createSortedIntegrationsStore } from "stores/builder/sortedIntegrations"
|
||||
import { SortedIntegrationStore } from "stores/builder/sortedIntegrations"
|
||||
import { DatasourceTypes } from "constants/backend"
|
||||
|
||||
import { derived } from "svelte/store"
|
||||
import { integrations } from "stores/builder/integrations"
|
||||
|
||||
vi.mock("svelte/store", () => ({
|
||||
derived: vi.fn(),
|
||||
derived: vi.fn(() => ({
|
||||
subscribe: vi.fn(),
|
||||
})),
|
||||
writable: vi.fn(() => ({
|
||||
subscribe: vi.fn(),
|
||||
})),
|
||||
|
@ -14,6 +16,8 @@ vi.mock("svelte/store", () => ({
|
|||
|
||||
vi.mock("stores/builder/integrations", () => ({ integrations: vi.fn() }))
|
||||
|
||||
const mockedDerived = vi.mocked(derived)
|
||||
|
||||
const inputA = {
|
||||
nonRelationalA: {
|
||||
friendlyName: "non-relational A",
|
||||
|
@ -104,25 +108,28 @@ const expectedOutput = [
|
|||
]
|
||||
|
||||
describe("sorted integrations store", () => {
|
||||
beforeEach(ctx => {
|
||||
interface LocalContext {
|
||||
returnedStore: SortedIntegrationStore
|
||||
derivedCallback: any
|
||||
}
|
||||
|
||||
beforeEach<LocalContext>(ctx => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
ctx.returnedStore = createSortedIntegrationsStore()
|
||||
|
||||
ctx.derivedCallback = derived.mock.calls[0][1]
|
||||
ctx.returnedStore = new SortedIntegrationStore()
|
||||
ctx.derivedCallback = mockedDerived.mock.calls[0]?.[1]
|
||||
})
|
||||
|
||||
it("calls derived with the correct parameters", () => {
|
||||
expect(derived).toHaveBeenCalledTimes(1)
|
||||
expect(derived).toHaveBeenCalledWith(integrations, expect.toBeFunc())
|
||||
expect(mockedDerived).toHaveBeenCalledTimes(1)
|
||||
expect(mockedDerived).toHaveBeenCalledWith(
|
||||
integrations,
|
||||
expect.any(Function)
|
||||
)
|
||||
})
|
||||
|
||||
describe("derived callback", () => {
|
||||
it("When no integrations are loaded", ctx => {
|
||||
expect(ctx.derivedCallback({})).toEqual([])
|
||||
})
|
||||
|
||||
it("When integrations are present", ctx => {
|
||||
it<LocalContext>("When integrations are present", ctx => {
|
||||
expect(ctx.derivedCallback(inputA)).toEqual(expectedOutput)
|
||||
expect(ctx.derivedCallback(inputB)).toEqual(expectedOutput)
|
||||
})
|
|
@ -20,10 +20,7 @@ export const createThemeStore = () => {
|
|||
}
|
||||
|
||||
const save = async (theme, appId) => {
|
||||
const app = await API.saveAppMetadata({
|
||||
appId,
|
||||
metadata: { theme },
|
||||
})
|
||||
const app = await API.saveAppMetadata(appId, { theme })
|
||||
store.update(state => {
|
||||
state.theme = app.theme
|
||||
return state
|
||||
|
@ -32,10 +29,7 @@ export const createThemeStore = () => {
|
|||
|
||||
const saveCustom = async (theme, appId) => {
|
||||
const updated = { ...get(store).customTheme, ...theme }
|
||||
const app = await API.saveAppMetadata({
|
||||
appId,
|
||||
metadata: { customTheme: updated },
|
||||
})
|
||||
const app = await API.saveAppMetadata(appId, { customTheme: updated })
|
||||
store.update(state => {
|
||||
state.customTheme = app.customTheme
|
||||
return state
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { it, expect, describe, beforeEach, vi } from "vitest"
|
||||
import { DEFAULT_CONFIG, createAdminStore } from "./admin"
|
||||
import { createAdminStore } from "./admin"
|
||||
|
||||
import { writable, get } from "svelte/store"
|
||||
import { API } from "api"
|
||||
|
@ -45,11 +45,6 @@ describe("admin store", () => {
|
|||
ctx.returnedStore = createAdminStore()
|
||||
})
|
||||
|
||||
it("inits the writable store with the default config", () => {
|
||||
expect(writable).toHaveBeenCalledTimes(1)
|
||||
expect(writable).toHaveBeenCalledWith(DEFAULT_CONFIG)
|
||||
})
|
||||
|
||||
it("returns the created store", ctx => {
|
||||
expect(ctx.returnedStore).toEqual({
|
||||
subscribe: expect.toBe(ctx.writableReturn.subscribe),
|
||||
|
|
|
@ -2,27 +2,28 @@ import { writable, get } from "svelte/store"
|
|||
import { API } from "api"
|
||||
import { auth } from "stores/portal"
|
||||
import { banner } from "@budibase/bbui"
|
||||
import {
|
||||
ConfigChecklistResponse,
|
||||
GetEnvironmentResponse,
|
||||
SystemStatusResponse,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const DEFAULT_CONFIG = {
|
||||
loaded: false,
|
||||
multiTenancy: false,
|
||||
cloud: false,
|
||||
isDev: false,
|
||||
disableAccountPortal: false,
|
||||
accountPortalUrl: "",
|
||||
importComplete: false,
|
||||
checklist: {
|
||||
apps: { checked: false },
|
||||
smtp: { checked: false },
|
||||
adminUser: { checked: false },
|
||||
sso: { checked: false },
|
||||
},
|
||||
maintenance: [],
|
||||
offlineMode: false,
|
||||
interface PortalAdminStore extends GetEnvironmentResponse {
|
||||
loaded: boolean
|
||||
checklist?: ConfigChecklistResponse
|
||||
status?: SystemStatusResponse
|
||||
}
|
||||
|
||||
export function createAdminStore() {
|
||||
const admin = writable(DEFAULT_CONFIG)
|
||||
const admin = writable<PortalAdminStore>({
|
||||
loaded: false,
|
||||
multiTenancy: false,
|
||||
cloud: false,
|
||||
isDev: false,
|
||||
disableAccountPortal: false,
|
||||
offlineMode: false,
|
||||
maintenance: [],
|
||||
})
|
||||
|
||||
async function init() {
|
||||
await getChecklist()
|
|
@ -1,19 +1,39 @@
|
|||
import { derived } from "svelte/store"
|
||||
// @ts-ignore
|
||||
import { AppStatus } from "constants"
|
||||
import { API } from "api"
|
||||
import { auth } from "./auth"
|
||||
import BudiStore from "../BudiStore" // move this
|
||||
import { BudiStore } from "../BudiStore"
|
||||
import { App, UpdateAppRequest } from "@budibase/types"
|
||||
|
||||
// properties that should always come from the dev app, not the deployed
|
||||
const DEV_PROPS = ["updatedBy", "updatedAt"]
|
||||
|
||||
export const INITIAL_APPS_STATE = {
|
||||
apps: [],
|
||||
interface AppIdentifierMetadata {
|
||||
devId?: string
|
||||
devRev?: string
|
||||
prodId?: string
|
||||
prodRev?: string
|
||||
}
|
||||
|
||||
export class AppsStore extends BudiStore {
|
||||
interface AppUIMetadata {
|
||||
deployed: boolean
|
||||
lockedYou: boolean
|
||||
lockedOther: boolean
|
||||
favourite: boolean
|
||||
}
|
||||
|
||||
interface StoreApp extends App, AppIdentifierMetadata {}
|
||||
|
||||
interface EnrichedApp extends StoreApp, AppUIMetadata {}
|
||||
|
||||
interface PortalAppsStore {
|
||||
apps: StoreApp[]
|
||||
sortBy?: string
|
||||
}
|
||||
|
||||
export class AppsStore extends BudiStore<PortalAppsStore> {
|
||||
constructor() {
|
||||
super({ ...INITIAL_APPS_STATE })
|
||||
super({
|
||||
apps: [],
|
||||
})
|
||||
|
||||
this.extractAppId = this.extractAppId.bind(this)
|
||||
this.getProdAppID = this.getProdAppID.bind(this)
|
||||
|
@ -22,12 +42,12 @@ export class AppsStore extends BudiStore {
|
|||
this.save = this.save.bind(this)
|
||||
}
|
||||
|
||||
extractAppId(id) {
|
||||
const split = id?.split("_") || []
|
||||
extractAppId(appId?: string) {
|
||||
const split = appId?.split("_") || []
|
||||
return split.length ? split[split.length - 1] : null
|
||||
}
|
||||
|
||||
getProdAppID(appId) {
|
||||
getProdAppID(appId: string) {
|
||||
if (!appId) {
|
||||
return appId
|
||||
}
|
||||
|
@ -47,15 +67,15 @@ export class AppsStore extends BudiStore {
|
|||
return `app${separator}${rest}`
|
||||
}
|
||||
|
||||
updateSort(sortBy) {
|
||||
async updateSort(sortBy: string) {
|
||||
this.update(state => ({
|
||||
...state,
|
||||
sortBy,
|
||||
}))
|
||||
this.updateUserSort(sortBy)
|
||||
await this.updateUserSort(sortBy)
|
||||
}
|
||||
|
||||
async updateUserSort(sortBy) {
|
||||
async updateUserSort(sortBy: string) {
|
||||
try {
|
||||
await auth.updateSelf({ appSort: sortBy })
|
||||
} catch (err) {
|
||||
|
@ -64,16 +84,19 @@ export class AppsStore extends BudiStore {
|
|||
}
|
||||
|
||||
async load() {
|
||||
const json = await API.getApps()
|
||||
const json = (await API.getApps()) as App[]
|
||||
if (Array.isArray(json)) {
|
||||
// Merge apps into one sensible list
|
||||
let appMap = {}
|
||||
let appMap: Record<string, StoreApp> = {}
|
||||
let devApps = json.filter(app => app.status === AppStatus.DEV)
|
||||
let deployedApps = json.filter(app => app.status === AppStatus.DEPLOYED)
|
||||
|
||||
// First append all dev app version
|
||||
devApps.forEach(app => {
|
||||
const id = this.extractAppId(app.appId)
|
||||
if (!id) {
|
||||
return
|
||||
}
|
||||
appMap[id] = {
|
||||
...app,
|
||||
devId: app.appId,
|
||||
|
@ -84,20 +107,22 @@ export class AppsStore extends BudiStore {
|
|||
// Then merge with all prod app versions
|
||||
deployedApps.forEach(app => {
|
||||
const id = this.extractAppId(app.appId)
|
||||
if (!id) {
|
||||
return
|
||||
}
|
||||
|
||||
// Skip any deployed apps which don't have a dev counterpart
|
||||
if (!appMap[id]) {
|
||||
return
|
||||
}
|
||||
|
||||
let devProps = {}
|
||||
// Extract certain properties from the dev app to override the prod app
|
||||
let devProps: Pick<App, "updatedBy" | "updatedAt"> = {}
|
||||
if (appMap[id]) {
|
||||
const entries = Object.entries(appMap[id]).filter(
|
||||
([key]) => DEV_PROPS.indexOf(key) !== -1
|
||||
)
|
||||
entries.forEach(entry => {
|
||||
devProps[entry[0]] = entry[1]
|
||||
})
|
||||
devProps = {
|
||||
updatedBy: appMap[id].updatedBy,
|
||||
updatedAt: appMap[id].updatedAt,
|
||||
}
|
||||
}
|
||||
appMap[id] = {
|
||||
...appMap[id],
|
||||
|
@ -111,7 +136,10 @@ export class AppsStore extends BudiStore {
|
|||
// Transform into an array and clean up
|
||||
const apps = Object.values(appMap)
|
||||
apps.forEach(app => {
|
||||
app.appId = this.extractAppId(app.devId)
|
||||
const appId = this.extractAppId(app.devId)
|
||||
if (appId) {
|
||||
app.appId = appId
|
||||
}
|
||||
delete app._id
|
||||
delete app._rev
|
||||
})
|
||||
|
@ -127,11 +155,8 @@ export class AppsStore extends BudiStore {
|
|||
}
|
||||
}
|
||||
|
||||
async save(appId, value) {
|
||||
await API.saveAppMetadata({
|
||||
appId,
|
||||
metadata: value,
|
||||
})
|
||||
async save(appId: string, value: UpdateAppRequest) {
|
||||
await API.saveAppMetadata(appId, value)
|
||||
this.update(state => {
|
||||
const updatedAppIndex = state.apps.findIndex(
|
||||
app => app.instance._id === appId
|
||||
|
@ -156,15 +181,16 @@ export const sortBy = derived([appsStore, auth], ([$store, $auth]) => {
|
|||
export const enrichedApps = derived(
|
||||
[appsStore, auth, sortBy],
|
||||
([$store, $auth, $sortBy]) => {
|
||||
const enrichedApps = $store.apps
|
||||
? $store.apps.map(app => ({
|
||||
...app,
|
||||
deployed: app.status === AppStatus.DEPLOYED,
|
||||
lockedYou: app.lockedBy && app.lockedBy.email === $auth.user?.email,
|
||||
lockedOther: app.lockedBy && app.lockedBy.email !== $auth.user?.email,
|
||||
favourite: $auth.user?.appFavourites?.includes(app.appId),
|
||||
}))
|
||||
: []
|
||||
const enrichedApps: EnrichedApp[] = $store.apps.map(app => {
|
||||
const user = $auth.user
|
||||
return {
|
||||
...app,
|
||||
deployed: app.status === AppStatus.DEPLOYED,
|
||||
lockedYou: app.lockedBy != null && app.lockedBy.email === user?.email,
|
||||
lockedOther: app.lockedBy != null && app.lockedBy.email !== user?.email,
|
||||
favourite: !!user?.appFavourites?.includes(app.appId),
|
||||
}
|
||||
})
|
||||
|
||||
if ($sortBy === "status") {
|
||||
return enrichedApps.sort((a, b) => {
|
|
@ -1,43 +0,0 @@
|
|||
import { writable, get } from "svelte/store"
|
||||
import { API } from "api"
|
||||
import { licensing } from "stores/portal"
|
||||
|
||||
export function createAuditLogsStore() {
|
||||
const { subscribe, update } = writable({
|
||||
events: {},
|
||||
logs: {},
|
||||
})
|
||||
|
||||
async function search(opts = {}) {
|
||||
if (get(licensing).auditLogsEnabled) {
|
||||
const paged = await API.searchAuditLogs(opts)
|
||||
|
||||
update(state => {
|
||||
return { ...state, logs: { ...paged, opts } }
|
||||
})
|
||||
|
||||
return paged
|
||||
}
|
||||
}
|
||||
|
||||
async function getEventDefinitions() {
|
||||
const events = await API.getEventDefinitions()
|
||||
|
||||
update(state => {
|
||||
return { ...state, ...events }
|
||||
})
|
||||
}
|
||||
|
||||
function getDownloadUrl(opts = {}) {
|
||||
return API.getDownloadUrl(opts)
|
||||
}
|
||||
|
||||
return {
|
||||
subscribe,
|
||||
search,
|
||||
getEventDefinitions,
|
||||
getDownloadUrl,
|
||||
}
|
||||
}
|
||||
|
||||
export const auditLogs = createAuditLogsStore()
|
|
@ -0,0 +1,45 @@
|
|||
import { get } from "svelte/store"
|
||||
import { API } from "api"
|
||||
import { licensing } from "./licensing"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
import {
|
||||
DownloadAuditLogsRequest,
|
||||
SearchAuditLogsRequest,
|
||||
SearchAuditLogsResponse,
|
||||
} from "@budibase/types"
|
||||
|
||||
interface PortalAuditLogsStore {
|
||||
events?: Record<string, string>
|
||||
logs?: SearchAuditLogsResponse
|
||||
}
|
||||
|
||||
export class AuditLogsStore extends BudiStore<PortalAuditLogsStore> {
|
||||
constructor() {
|
||||
super({})
|
||||
}
|
||||
|
||||
async search(opts: SearchAuditLogsRequest = {}) {
|
||||
if (get(licensing).auditLogsEnabled) {
|
||||
const res = await API.searchAuditLogs(opts)
|
||||
this.update(state => ({
|
||||
...state,
|
||||
logs: res,
|
||||
}))
|
||||
return res
|
||||
}
|
||||
}
|
||||
|
||||
async getEventDefinitions() {
|
||||
const res = await API.getEventDefinitions()
|
||||
this.update(state => ({
|
||||
...state,
|
||||
events: res.events,
|
||||
}))
|
||||
}
|
||||
|
||||
getDownloadUrl(opts: DownloadAuditLogsRequest = {}) {
|
||||
return API.getDownloadUrl(opts)
|
||||
}
|
||||
}
|
||||
|
||||
export const auditLogs = new AuditLogsStore()
|
|
@ -1,172 +0,0 @@
|
|||
import { derived, writable, get } from "svelte/store"
|
||||
import { API } from "api"
|
||||
import { admin } from "stores/portal"
|
||||
import analytics from "analytics"
|
||||
|
||||
export function createAuthStore() {
|
||||
const auth = writable({
|
||||
user: null,
|
||||
accountPortalAccess: false,
|
||||
tenantId: "default",
|
||||
tenantSet: false,
|
||||
loaded: false,
|
||||
postLogout: false,
|
||||
})
|
||||
const store = derived(auth, $store => {
|
||||
return {
|
||||
user: $store.user,
|
||||
accountPortalAccess: $store.accountPortalAccess,
|
||||
tenantId: $store.tenantId,
|
||||
tenantSet: $store.tenantSet,
|
||||
loaded: $store.loaded,
|
||||
postLogout: $store.postLogout,
|
||||
isSSO: !!$store.user?.provider,
|
||||
}
|
||||
})
|
||||
|
||||
function setUser(user) {
|
||||
auth.update(store => {
|
||||
store.loaded = true
|
||||
store.user = user
|
||||
store.accountPortalAccess = user?.accountPortalAccess
|
||||
if (user) {
|
||||
store.tenantId = user.tenantId || "default"
|
||||
store.tenantSet = true
|
||||
}
|
||||
return store
|
||||
})
|
||||
|
||||
if (user) {
|
||||
analytics
|
||||
.activate()
|
||||
.then(() => {
|
||||
analytics.identify(user._id)
|
||||
})
|
||||
.catch(() => {
|
||||
// This request may fail due to browser extensions blocking requests
|
||||
// containing the word analytics, so we don't want to spam users with
|
||||
// an error here.
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function setOrganisation(tenantId) {
|
||||
const prevId = get(store).tenantId
|
||||
auth.update(store => {
|
||||
store.tenantId = tenantId
|
||||
store.tenantSet = !!tenantId
|
||||
return store
|
||||
})
|
||||
if (prevId !== tenantId) {
|
||||
// re-init admin after setting org
|
||||
await admin.init()
|
||||
}
|
||||
}
|
||||
|
||||
async function setInitInfo(info) {
|
||||
await API.setInitInfo(info)
|
||||
auth.update(store => {
|
||||
store.initInfo = info
|
||||
return store
|
||||
})
|
||||
return info
|
||||
}
|
||||
|
||||
function setPostLogout() {
|
||||
auth.update(store => {
|
||||
store.postLogout = true
|
||||
return store
|
||||
})
|
||||
}
|
||||
|
||||
async function getInitInfo() {
|
||||
const info = await API.getInitInfo()
|
||||
auth.update(store => {
|
||||
store.initInfo = info
|
||||
return store
|
||||
})
|
||||
return info
|
||||
}
|
||||
|
||||
const actions = {
|
||||
checkQueryString: async () => {
|
||||
const urlParams = new URLSearchParams(window.location.search)
|
||||
if (urlParams.has("tenantId")) {
|
||||
const tenantId = urlParams.get("tenantId")
|
||||
await setOrganisation(tenantId)
|
||||
}
|
||||
},
|
||||
setOrg: async tenantId => {
|
||||
await setOrganisation(tenantId)
|
||||
},
|
||||
getSelf: async () => {
|
||||
// We need to catch this locally as we never want this to fail, even
|
||||
// though normally we never want to swallow API errors at the store level.
|
||||
// We're either logged in or we aren't.
|
||||
// We also need to always update the loaded flag.
|
||||
try {
|
||||
const user = await API.fetchBuilderSelf()
|
||||
setUser(user)
|
||||
} catch (error) {
|
||||
setUser(null)
|
||||
}
|
||||
},
|
||||
login: async creds => {
|
||||
const tenantId = get(store).tenantId
|
||||
await API.logIn({
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
tenantId,
|
||||
})
|
||||
await actions.getSelf()
|
||||
},
|
||||
logout: async () => {
|
||||
await API.logOut()
|
||||
setPostLogout()
|
||||
setUser(null)
|
||||
await setInitInfo({})
|
||||
},
|
||||
updateSelf: async fields => {
|
||||
await API.updateSelf({ ...fields })
|
||||
// Refetch to enrich after update.
|
||||
try {
|
||||
const user = await API.fetchBuilderSelf()
|
||||
setUser(user)
|
||||
} catch (error) {
|
||||
setUser(null)
|
||||
}
|
||||
},
|
||||
forgotPassword: async email => {
|
||||
const tenantId = get(store).tenantId
|
||||
await API.requestForgotPassword({
|
||||
tenantId,
|
||||
email,
|
||||
})
|
||||
},
|
||||
resetPassword: async (password, resetCode) => {
|
||||
const tenantId = get(store).tenantId
|
||||
await API.resetPassword({
|
||||
tenantId,
|
||||
password,
|
||||
resetCode,
|
||||
})
|
||||
},
|
||||
generateAPIKey: async () => {
|
||||
return API.generateAPIKey()
|
||||
},
|
||||
fetchAPIKey: async () => {
|
||||
const info = await API.fetchDeveloperInfo()
|
||||
return info?.apiKey
|
||||
},
|
||||
}
|
||||
|
||||
return {
|
||||
subscribe: store.subscribe,
|
||||
setOrganisation,
|
||||
getInitInfo,
|
||||
setInitInfo,
|
||||
...actions,
|
||||
}
|
||||
}
|
||||
|
||||
export const auth = createAuthStore()
|
|
@ -0,0 +1,168 @@
|
|||
import { get } from "svelte/store"
|
||||
import { API } from "api"
|
||||
import { admin } from "stores/portal"
|
||||
import analytics from "analytics"
|
||||
import { BudiStore } from "stores/BudiStore"
|
||||
import {
|
||||
isSSOUser,
|
||||
SetInitInfoRequest,
|
||||
UpdateSelfRequest,
|
||||
User,
|
||||
} from "@budibase/types"
|
||||
|
||||
interface PortalAuthStore {
|
||||
user?: User
|
||||
initInfo?: Record<string, any>
|
||||
accountPortalAccess: boolean
|
||||
loaded: boolean
|
||||
isSSO: boolean
|
||||
tenantId: string
|
||||
tenantSet: boolean
|
||||
postLogout: boolean
|
||||
}
|
||||
|
||||
class AuthStore extends BudiStore<PortalAuthStore> {
|
||||
constructor() {
|
||||
super({
|
||||
accountPortalAccess: false,
|
||||
tenantId: "default",
|
||||
tenantSet: false,
|
||||
loaded: false,
|
||||
postLogout: false,
|
||||
isSSO: false,
|
||||
})
|
||||
}
|
||||
|
||||
setUser(user?: User) {
|
||||
this.set({
|
||||
loaded: true,
|
||||
user: user,
|
||||
accountPortalAccess: !!user?.accountPortalAccess,
|
||||
tenantId: user?.tenantId || "default",
|
||||
tenantSet: !!user,
|
||||
isSSO: user != null && isSSOUser(user),
|
||||
postLogout: false,
|
||||
})
|
||||
|
||||
if (user) {
|
||||
analytics
|
||||
.activate()
|
||||
.then(() => {
|
||||
analytics.identify(user._id)
|
||||
})
|
||||
.catch(() => {
|
||||
// This request may fail due to browser extensions blocking requests
|
||||
// containing the word analytics, so we don't want to spam users with
|
||||
// an error here.
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async setOrganisation(tenantId: string) {
|
||||
const prevId = get(this.store).tenantId
|
||||
auth.update(store => {
|
||||
store.tenantId = tenantId
|
||||
store.tenantSet = !!tenantId
|
||||
return store
|
||||
})
|
||||
if (prevId !== tenantId) {
|
||||
// re-init admin after setting org
|
||||
await admin.init()
|
||||
}
|
||||
}
|
||||
|
||||
async setInitInfo(info: SetInitInfoRequest) {
|
||||
await API.setInitInfo(info)
|
||||
auth.update(store => {
|
||||
store.initInfo = info
|
||||
return store
|
||||
})
|
||||
return info
|
||||
}
|
||||
|
||||
setPostLogout() {
|
||||
auth.update(store => {
|
||||
store.postLogout = true
|
||||
return store
|
||||
})
|
||||
}
|
||||
|
||||
async getInitInfo() {
|
||||
const info = await API.getInitInfo()
|
||||
auth.update(store => {
|
||||
store.initInfo = info
|
||||
return store
|
||||
})
|
||||
return info
|
||||
}
|
||||
|
||||
async checkQueryString() {
|
||||
const urlParams = new URLSearchParams(window.location.search)
|
||||
const tenantId = urlParams.get("tenantId")
|
||||
if (tenantId) {
|
||||
await this.setOrganisation(tenantId)
|
||||
}
|
||||
}
|
||||
|
||||
async setOrg(tenantId: string) {
|
||||
await this.setOrganisation(tenantId)
|
||||
}
|
||||
|
||||
async getSelf() {
|
||||
// We need to catch this locally as we never want this to fail, even
|
||||
// though normally we never want to swallow API errors at the store level.
|
||||
// We're either logged in or we aren't.
|
||||
// We also need to always update the loaded flag.
|
||||
try {
|
||||
const user = await API.fetchBuilderSelf()
|
||||
this.setUser(user)
|
||||
} catch (error) {
|
||||
this.setUser()
|
||||
}
|
||||
}
|
||||
|
||||
async login(username: string, password: string) {
|
||||
const tenantId = get(this.store).tenantId
|
||||
await API.logIn(tenantId, username, password)
|
||||
await this.getSelf()
|
||||
}
|
||||
|
||||
async logout() {
|
||||
await API.logOut()
|
||||
this.setPostLogout()
|
||||
this.setUser()
|
||||
await this.setInitInfo({})
|
||||
}
|
||||
|
||||
async updateSelf(fields: UpdateSelfRequest) {
|
||||
await API.updateSelf(fields)
|
||||
// Refetch to enrich after update.
|
||||
try {
|
||||
const user = await API.fetchBuilderSelf()
|
||||
this.setUser(user)
|
||||
} catch (error) {
|
||||
this.setUser()
|
||||
}
|
||||
}
|
||||
|
||||
async forgotPassword(email: string) {
|
||||
const tenantId = get(this.store).tenantId
|
||||
await API.requestForgotPassword(tenantId, email)
|
||||
}
|
||||
|
||||
async resetPassword(password: string, resetCode: string) {
|
||||
const tenantId = get(this.store).tenantId
|
||||
await API.resetPassword(tenantId, password, resetCode)
|
||||
}
|
||||
|
||||
async generateAPIKey() {
|
||||
return API.generateAPIKey()
|
||||
}
|
||||
|
||||
async fetchAPIKey() {
|
||||
const info = await API.fetchDeveloperInfo()
|
||||
return info?.apiKey
|
||||
}
|
||||
}
|
||||
|
||||
export const auth = new AuthStore()
|
|
@ -11,40 +11,28 @@ export function createBackupsStore() {
|
|||
})
|
||||
}
|
||||
|
||||
async function searchBackups({
|
||||
appId,
|
||||
trigger,
|
||||
type,
|
||||
page,
|
||||
startDate,
|
||||
endDate,
|
||||
}) {
|
||||
return API.searchBackups({ appId, trigger, type, page, startDate, endDate })
|
||||
async function searchBackups(appId, opts) {
|
||||
return API.searchBackups(appId, opts)
|
||||
}
|
||||
|
||||
async function restoreBackup({ appId, backupId, name }) {
|
||||
return API.restoreBackup({ appId, backupId, name })
|
||||
async function restoreBackup(appId, backupId, name) {
|
||||
return API.restoreBackup(appId, backupId, name)
|
||||
}
|
||||
|
||||
async function deleteBackup({ appId, backupId }) {
|
||||
return API.deleteBackup({ appId, backupId })
|
||||
async function deleteBackup(appId, backupId) {
|
||||
return API.deleteBackup(appId, backupId)
|
||||
}
|
||||
|
||||
async function createManualBackup(appId) {
|
||||
return API.createManualBackup(appId)
|
||||
}
|
||||
|
||||
async function updateBackup({ appId, backupId, name }) {
|
||||
return API.updateBackup({ appId, backupId, name })
|
||||
}
|
||||
|
||||
return {
|
||||
createManualBackup,
|
||||
searchBackups,
|
||||
selectBackup,
|
||||
deleteBackup,
|
||||
restoreBackup,
|
||||
updateBackup,
|
||||
subscribe: store.subscribe,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@ vi.mock("api", () => {
|
|||
restoreBackup: vi.fn(() => "restoreBackupReturn"),
|
||||
deleteBackup: vi.fn(() => "deleteBackupReturn"),
|
||||
createManualBackup: vi.fn(() => "createManualBackupReturn"),
|
||||
updateBackup: vi.fn(() => "updateBackupReturn"),
|
||||
},
|
||||
}
|
||||
})
|
||||
|
@ -61,8 +60,7 @@ describe("backups store", () => {
|
|||
ctx.page = "page"
|
||||
ctx.startDate = "startDate"
|
||||
ctx.endDate = "endDate"
|
||||
ctx.value = await ctx.returnedStore.searchBackups({
|
||||
appId: ctx.appId,
|
||||
ctx.value = await ctx.returnedStore.searchBackups(ctx.appId, {
|
||||
trigger: ctx.trigger,
|
||||
type: ctx.type,
|
||||
page: ctx.page,
|
||||
|
@ -73,8 +71,7 @@ describe("backups store", () => {
|
|||
|
||||
it("calls and returns the API searchBackups method", ctx => {
|
||||
expect(API.searchBackups).toHaveBeenCalledTimes(1)
|
||||
expect(API.searchBackups).toHaveBeenCalledWith({
|
||||
appId: ctx.appId,
|
||||
expect(API.searchBackups).toHaveBeenCalledWith(ctx.appId, {
|
||||
trigger: ctx.trigger,
|
||||
type: ctx.type,
|
||||
page: ctx.page,
|
||||
|
@ -103,18 +100,12 @@ describe("backups store", () => {
|
|||
beforeEach(async ctx => {
|
||||
ctx.appId = "appId"
|
||||
ctx.backupId = "backupId"
|
||||
ctx.value = await ctx.returnedStore.deleteBackup({
|
||||
appId: ctx.appId,
|
||||
backupId: ctx.backupId,
|
||||
})
|
||||
ctx.value = await ctx.returnedStore.deleteBackup(ctx.appId, ctx.backupId)
|
||||
})
|
||||
|
||||
it("calls and returns the API deleteBackup method", ctx => {
|
||||
expect(API.deleteBackup).toHaveBeenCalledTimes(1)
|
||||
expect(API.deleteBackup).toHaveBeenCalledWith({
|
||||
appId: ctx.appId,
|
||||
backupId: ctx.backupId,
|
||||
})
|
||||
expect(API.deleteBackup).toHaveBeenCalledWith(ctx.appId, ctx.backupId)
|
||||
expect(ctx.value).toBe("deleteBackupReturn")
|
||||
})
|
||||
})
|
||||
|
@ -124,47 +115,24 @@ describe("backups store", () => {
|
|||
ctx.appId = "appId"
|
||||
ctx.backupId = "backupId"
|
||||
ctx.$name = "name" // `name` is used by some sort of internal ctx thing and is readonly
|
||||
ctx.value = await ctx.returnedStore.restoreBackup({
|
||||
appId: ctx.appId,
|
||||
backupId: ctx.backupId,
|
||||
name: ctx.$name,
|
||||
})
|
||||
ctx.value = await ctx.returnedStore.restoreBackup(
|
||||
ctx.appId,
|
||||
ctx.backupId,
|
||||
ctx.$name
|
||||
)
|
||||
})
|
||||
|
||||
it("calls and returns the API restoreBackup method", ctx => {
|
||||
expect(API.restoreBackup).toHaveBeenCalledTimes(1)
|
||||
expect(API.restoreBackup).toHaveBeenCalledWith({
|
||||
appId: ctx.appId,
|
||||
backupId: ctx.backupId,
|
||||
name: ctx.$name,
|
||||
})
|
||||
expect(API.restoreBackup).toHaveBeenCalledWith(
|
||||
ctx.appId,
|
||||
ctx.backupId,
|
||||
ctx.$name
|
||||
)
|
||||
expect(ctx.value).toBe("restoreBackupReturn")
|
||||
})
|
||||
})
|
||||
|
||||
describe("updateBackup", () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.appId = "appId"
|
||||
ctx.backupId = "backupId"
|
||||
ctx.$name = "name" // `name` is used by some sort of internal ctx thing and is readonly
|
||||
ctx.value = await ctx.returnedStore.updateBackup({
|
||||
appId: ctx.appId,
|
||||
backupId: ctx.backupId,
|
||||
name: ctx.$name,
|
||||
})
|
||||
})
|
||||
|
||||
it("calls and returns the API updateBackup method", ctx => {
|
||||
expect(API.updateBackup).toHaveBeenCalledTimes(1)
|
||||
expect(API.updateBackup).toHaveBeenCalledWith({
|
||||
appId: ctx.appId,
|
||||
backupId: ctx.backupId,
|
||||
name: ctx.$name,
|
||||
})
|
||||
expect(ctx.value).toBe("updateBackupReturn")
|
||||
})
|
||||
})
|
||||
|
||||
describe("subscribe", () => {
|
||||
it("calls and returns the API updateBackup method", ctx => {
|
||||
expect(ctx.returnedStore.subscribe).toBe(ctx.writableReturn.subscribe)
|
||||
|
|
|
@ -46,10 +46,7 @@ export function createGroupsStore() {
|
|||
},
|
||||
|
||||
delete: async group => {
|
||||
await API.deleteGroup({
|
||||
id: group._id,
|
||||
rev: group._rev,
|
||||
})
|
||||
await API.deleteGroup(group._id, group._rev)
|
||||
store.update(state => {
|
||||
state = state.filter(state => state._id !== group._id)
|
||||
return state
|
||||
|
@ -89,11 +86,11 @@ export function createGroupsStore() {
|
|||
},
|
||||
|
||||
addGroupAppBuilder: async (groupId, appId) => {
|
||||
return await API.addGroupAppBuilder({ groupId, appId })
|
||||
return await API.addGroupAppBuilder(groupId, appId)
|
||||
},
|
||||
|
||||
removeGroupAppBuilder: async (groupId, appId) => {
|
||||
return await API.removeGroupAppBuilder({ groupId, appId })
|
||||
return await API.removeGroupAppBuilder(groupId, appId)
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ export const createLicensingStore = () => {
|
|||
scimEnabled: false,
|
||||
budibaseAIEnabled: false,
|
||||
customAIConfigsEnabled: false,
|
||||
auditLogsEnabled: false,
|
||||
// the currently used quotas from the db
|
||||
quotaUsage: undefined,
|
||||
// derived quota metrics for percentages used
|
||||
|
|
|
@ -2,13 +2,13 @@ import { writable } from "svelte/store"
|
|||
|
||||
type GotoFuncType = (path: string) => void
|
||||
|
||||
interface Store {
|
||||
interface PortalNavigationStore {
|
||||
initialisated: boolean
|
||||
goto: GotoFuncType
|
||||
}
|
||||
|
||||
export function createNavigationStore() {
|
||||
const store = writable<Store>({
|
||||
const store = writable<PortalNavigationStore>({
|
||||
initialisated: false,
|
||||
goto: undefined as any,
|
||||
})
|
||||
|
|
|
@ -1,17 +1,13 @@
|
|||
import { writable } from "svelte/store"
|
||||
import { PluginSource } from "constants/index"
|
||||
|
||||
import { Plugin } from "@budibase/types"
|
||||
import { API } from "api"
|
||||
|
||||
interface Plugin {
|
||||
_id: string
|
||||
}
|
||||
|
||||
export function createPluginsStore() {
|
||||
const { subscribe, set, update } = writable<Plugin[]>([])
|
||||
|
||||
async function load() {
|
||||
const plugins = await API.getPlugins()
|
||||
const plugins: Plugin[] = await API.getPlugins()
|
||||
set(plugins)
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,24 @@ export function createUsersStore() {
|
|||
}
|
||||
|
||||
async function invite(payload) {
|
||||
return API.inviteUsers(payload)
|
||||
const users = payload.map(user => {
|
||||
let builder = undefined
|
||||
if (user.admin || user.builder) {
|
||||
builder = { global: true }
|
||||
} else if (user.creator) {
|
||||
builder = { creator: true }
|
||||
}
|
||||
return {
|
||||
email: user.email,
|
||||
userInfo: {
|
||||
admin: user.admin ? { global: true } : undefined,
|
||||
builder,
|
||||
userGroups: user.groups,
|
||||
roles: user.apps ? user.apps : undefined,
|
||||
},
|
||||
}
|
||||
})
|
||||
return API.inviteUsers(users)
|
||||
}
|
||||
|
||||
async function removeInvites(payload) {
|
||||
|
@ -60,7 +77,7 @@ export function createUsersStore() {
|
|||
}
|
||||
|
||||
async function updateInvite(invite) {
|
||||
return API.updateUserInvite(invite)
|
||||
return API.updateUserInvite(invite.code, invite)
|
||||
}
|
||||
|
||||
async function create(data) {
|
||||
|
@ -93,10 +110,7 @@ export function createUsersStore() {
|
|||
|
||||
return body
|
||||
})
|
||||
const response = await API.createUsers({
|
||||
users: mappedUsers,
|
||||
groups: data.groups,
|
||||
})
|
||||
const response = await API.createUsers(mappedUsers, data.groups)
|
||||
|
||||
// re-search from first page
|
||||
await search()
|
||||
|
@ -108,8 +122,8 @@ export function createUsersStore() {
|
|||
update(users => users.filter(user => user._id !== id))
|
||||
}
|
||||
|
||||
async function getUserCountByApp({ appId }) {
|
||||
return await API.getUserCountByApp({ appId })
|
||||
async function getUserCountByApp(appId) {
|
||||
return await API.getUserCountByApp(appId)
|
||||
}
|
||||
|
||||
async function bulkDelete(users) {
|
||||
|
@ -121,11 +135,11 @@ export function createUsersStore() {
|
|||
}
|
||||
|
||||
async function addAppBuilder(userId, appId) {
|
||||
return await API.addAppBuilder({ userId, appId })
|
||||
return await API.addAppBuilder(userId, appId)
|
||||
}
|
||||
|
||||
async function removeAppBuilder(userId, appId) {
|
||||
return await API.removeAppBuilder({ userId, appId })
|
||||
return await API.removeAppBuilder(userId, appId)
|
||||
}
|
||||
|
||||
async function getAccountHolder() {
|
||||
|
|
|
@ -77,12 +77,11 @@ export const patchAPI = API => {
|
|||
return await enrichRows(rows, tableId)
|
||||
}
|
||||
const searchTable = API.searchTable
|
||||
API.searchTable = async params => {
|
||||
const tableId = params?.tableId
|
||||
const output = await searchTable(params)
|
||||
API.searchTable = async (sourceId, opts) => {
|
||||
const output = await searchTable(sourceId, opts)
|
||||
return {
|
||||
...output,
|
||||
rows: await enrichRows(output?.rows, tableId),
|
||||
rows: await enrichRows(output.rows, sourceId),
|
||||
}
|
||||
}
|
||||
const fetchViewData = API.fetchViewData
|
||||
|
|
|
@ -49,10 +49,7 @@
|
|||
data.append("file", fileList[i])
|
||||
}
|
||||
try {
|
||||
return await API.uploadAttachment({
|
||||
data,
|
||||
tableId: formContext?.dataSource?.tableId,
|
||||
})
|
||||
return await API.uploadAttachment(formContext?.dataSource?.tableId, data)
|
||||
} catch (error) {
|
||||
return []
|
||||
}
|
||||
|
|
|
@ -80,12 +80,7 @@
|
|||
const upload = async () => {
|
||||
loading = true
|
||||
try {
|
||||
const res = await API.externalUpload({
|
||||
datasourceId,
|
||||
bucket,
|
||||
key,
|
||||
data,
|
||||
})
|
||||
const res = await API.externalUpload(datasourceId, bucket, key, data)
|
||||
notificationStore.actions.success("File uploaded successfully")
|
||||
loading = false
|
||||
return res
|
||||
|
|
|
@ -31,10 +31,10 @@
|
|||
let attachRequest = new FormData()
|
||||
attachRequest.append("file", signatureFile)
|
||||
|
||||
const resp = await API.uploadAttachment({
|
||||
data: attachRequest,
|
||||
tableId: formContext?.dataSource?.tableId,
|
||||
})
|
||||
const resp = await API.uploadAttachment(
|
||||
formContext?.dataSource?.tableId,
|
||||
attachRequest
|
||||
)
|
||||
const [signatureAttachment] = resp
|
||||
updateValue = signatureAttachment
|
||||
} else {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { makePropSafe as safe } from "@budibase/string-templates"
|
||||
import { API } from "../api/index.js"
|
||||
import { UILogicalOperator } from "@budibase/types"
|
||||
import { OnEmptyFilter } from "@budibase/frontend-core/src/constants.js"
|
||||
import { Constants } from "@budibase/frontend-core"
|
||||
|
||||
// Map of data types to component types for search fields inside blocks
|
||||
const schemaComponentMap = {
|
||||
|
@ -108,7 +108,7 @@ export const enrichFilter = (filter, columns, formId) => {
|
|||
|
||||
return {
|
||||
logicalOperator: UILogicalOperator.ALL,
|
||||
onEmptyFilter: OnEmptyFilter.RETURN_ALL,
|
||||
onEmptyFilter: Constants.OnEmptyFilter.RETURN_ALL,
|
||||
groups: [
|
||||
...(filter?.groups || []),
|
||||
{
|
||||
|
|
|
@ -147,7 +147,7 @@ const fetchRowHandler = async action => {
|
|||
|
||||
if (tableId && rowId) {
|
||||
try {
|
||||
const row = await API.fetchRow({ tableId, rowId })
|
||||
const row = await API.fetchRow(tableId, rowId)
|
||||
|
||||
return { row }
|
||||
} catch (error) {
|
||||
|
@ -192,7 +192,7 @@ const deleteRowHandler = async action => {
|
|||
return false
|
||||
}
|
||||
|
||||
const resp = await API.deleteRows({ tableId, rows: requestConfig })
|
||||
const resp = await API.deleteRows(tableId, requestConfig)
|
||||
|
||||
if (!notificationOverride) {
|
||||
notificationStore.actions.success(
|
||||
|
@ -251,17 +251,14 @@ const navigationHandler = action => {
|
|||
}
|
||||
|
||||
const queryExecutionHandler = async action => {
|
||||
const { datasourceId, queryId, queryParams, notificationOverride } =
|
||||
action.parameters
|
||||
const { queryId, queryParams, notificationOverride } = action.parameters
|
||||
try {
|
||||
const query = await API.fetchQueryDefinition(queryId)
|
||||
if (query?.datasourceId == null) {
|
||||
notificationStore.actions.error("That query couldn't be found")
|
||||
return false
|
||||
}
|
||||
const result = await API.executeQuery({
|
||||
datasourceId,
|
||||
queryId,
|
||||
const result = await API.executeQuery(queryId, {
|
||||
parameters: queryParams,
|
||||
})
|
||||
|
||||
|
@ -381,10 +378,8 @@ const exportDataHandler = async action => {
|
|||
if (typeof rows[0] !== "string") {
|
||||
rows = rows.map(row => row._id)
|
||||
}
|
||||
const data = await API.exportRows({
|
||||
tableId,
|
||||
const data = await API.exportRows(tableId, type, {
|
||||
rows,
|
||||
format: type,
|
||||
columns: columns?.map(column => column.name || column),
|
||||
delimiter,
|
||||
customHeaders,
|
||||
|
@ -454,12 +449,7 @@ const downloadFileHandler = async action => {
|
|||
const { type } = action.parameters
|
||||
if (type === "attachment") {
|
||||
const { tableId, rowId, attachmentColumn } = action.parameters
|
||||
const res = await API.downloadAttachment(
|
||||
tableId,
|
||||
rowId,
|
||||
attachmentColumn,
|
||||
{ suppressErrors: true }
|
||||
)
|
||||
const res = await API.downloadAttachment(tableId, rowId, attachmentColumn)
|
||||
await downloadStream(res)
|
||||
return
|
||||
}
|
||||
|
@ -495,11 +485,7 @@ const downloadFileHandler = async action => {
|
|||
|
||||
const rowActionHandler = async action => {
|
||||
const { resourceId, rowId, rowActionId } = action.parameters
|
||||
await API.rowActions.trigger({
|
||||
rowActionId,
|
||||
sourceId: resourceId,
|
||||
rowId,
|
||||
})
|
||||
await API.rowActions.trigger(resourceId, rowActionId, rowId)
|
||||
// Refresh related datasources
|
||||
await dataSourceStore.actions.invalidateDataSource(resourceId, {
|
||||
invalidateRelationships: true,
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
"description": "Budibase frontend core libraries used in builder and client",
|
||||
"author": "Budibase",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"svelte": "./src/index.ts",
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "*",
|
||||
"@budibase/shared-core": "*",
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
export const buildAIEndpoints = API => ({
|
||||
/**
|
||||
* Generates a cron expression from a prompt
|
||||
*/
|
||||
generateCronExpression: async ({ prompt }) => {
|
||||
return await API.post({
|
||||
url: "/api/ai/cron",
|
||||
body: { prompt },
|
||||
})
|
||||
},
|
||||
})
|
|
@ -0,0 +1,17 @@
|
|||
import { BaseAPIClient } from "./types"
|
||||
|
||||
export interface AIEndpoints {
|
||||
generateCronExpression: (prompt: string) => Promise<{ message: string }>
|
||||
}
|
||||
|
||||
export const buildAIEndpoints = (API: BaseAPIClient): AIEndpoints => ({
|
||||
/**
|
||||
* Generates a cron expression from a prompt
|
||||
*/
|
||||
generateCronExpression: async prompt => {
|
||||
return await API.post({
|
||||
url: "/api/ai/cron",
|
||||
body: { prompt },
|
||||
})
|
||||
},
|
||||
})
|
|
@ -1,17 +0,0 @@
|
|||
export const buildAnalyticsEndpoints = API => ({
|
||||
/**
|
||||
* Gets the current status of analytics for this environment
|
||||
*/
|
||||
getAnalyticsStatus: async () => {
|
||||
return await API.get({
|
||||
url: "/api/bbtel",
|
||||
})
|
||||
},
|
||||
analyticsPing: async ({ source, embedded }) => {
|
||||
const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone
|
||||
return await API.post({
|
||||
url: "/api/bbtel/ping",
|
||||
body: { source, timezone, embedded },
|
||||
})
|
||||
},
|
||||
})
|
|
@ -0,0 +1,39 @@
|
|||
import { BaseAPIClient } from "./types"
|
||||
import {
|
||||
AnalyticsEnabledResponse,
|
||||
AnalyticsPingRequest,
|
||||
AnalyticsPingResponse,
|
||||
} from "@budibase/types"
|
||||
|
||||
export interface AnalyticsEndpoints {
|
||||
getAnalyticsStatus: () => Promise<AnalyticsEnabledResponse>
|
||||
analyticsPing: (
|
||||
payload: Omit<AnalyticsPingRequest, "timezone">
|
||||
) => Promise<AnalyticsPingResponse>
|
||||
}
|
||||
|
||||
export const buildAnalyticsEndpoints = (
|
||||
API: BaseAPIClient
|
||||
): AnalyticsEndpoints => ({
|
||||
/**
|
||||
* Gets the current status of analytics for this environment
|
||||
*/
|
||||
getAnalyticsStatus: async () => {
|
||||
return await API.get({
|
||||
url: "/api/bbtel",
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Notifies analytics of a certain environment
|
||||
*/
|
||||
analyticsPing: async request => {
|
||||
return await API.post<AnalyticsPingRequest, AnalyticsPingResponse>({
|
||||
url: "/api/bbtel/ping",
|
||||
body: {
|
||||
...request,
|
||||
timezone: Intl.DateTimeFormat().resolvedOptions().timeZone,
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
|
@ -1,6 +1,72 @@
|
|||
import { sdk } from "@budibase/shared-core"
|
||||
import { BaseAPIClient } from "./types"
|
||||
import {
|
||||
AddAppSampleDataResponse,
|
||||
ClearDevLockResponse,
|
||||
CreateAppRequest,
|
||||
CreateAppResponse,
|
||||
DeleteAppResponse,
|
||||
DuplicateAppRequest,
|
||||
DuplicateAppResponse,
|
||||
FetchAppDefinitionResponse,
|
||||
FetchAppPackageResponse,
|
||||
FetchAppsResponse,
|
||||
FetchDeploymentResponse,
|
||||
GetDiagnosticsResponse,
|
||||
ImportToUpdateAppRequest,
|
||||
ImportToUpdateAppResponse,
|
||||
PublishAppResponse,
|
||||
RevertAppClientResponse,
|
||||
RevertAppResponse,
|
||||
SetRevertableAppVersionRequest,
|
||||
SetRevertableAppVersionResponse,
|
||||
SyncAppResponse,
|
||||
UnpublishAppResponse,
|
||||
UpdateAppClientResponse,
|
||||
UpdateAppRequest,
|
||||
UpdateAppResponse,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const buildAppEndpoints = API => ({
|
||||
export interface AppEndpoints {
|
||||
fetchAppPackage: (appId: string) => Promise<FetchAppPackageResponse>
|
||||
saveAppMetadata: (
|
||||
appId: string,
|
||||
metadata: UpdateAppRequest
|
||||
) => Promise<UpdateAppResponse>
|
||||
unpublishApp: (appId: string) => Promise<UnpublishAppResponse>
|
||||
publishAppChanges: (appId: string) => Promise<PublishAppResponse>
|
||||
revertAppChanges: (appId: string) => Promise<RevertAppResponse>
|
||||
updateAppClientVersion: (appId: string) => Promise<UpdateAppClientResponse>
|
||||
revertAppClientVersion: (appId: string) => Promise<RevertAppClientResponse>
|
||||
releaseAppLock: (appId: string) => Promise<ClearDevLockResponse>
|
||||
getAppDeployments: () => Promise<FetchDeploymentResponse>
|
||||
createApp: (app: CreateAppRequest) => Promise<CreateAppResponse>
|
||||
deleteApp: (appId: string) => Promise<DeleteAppResponse>
|
||||
duplicateApp: (
|
||||
appId: string,
|
||||
app: DuplicateAppRequest
|
||||
) => Promise<DuplicateAppResponse>
|
||||
updateAppFromExport: (
|
||||
appId: string,
|
||||
body: ImportToUpdateAppRequest
|
||||
) => Promise<ImportToUpdateAppResponse>
|
||||
fetchSystemDebugInfo: () => Promise<GetDiagnosticsResponse>
|
||||
syncApp: (appId: string) => Promise<SyncAppResponse>
|
||||
getApps: () => Promise<FetchAppsResponse>
|
||||
fetchComponentLibDefinitions: (
|
||||
appId: string
|
||||
) => Promise<FetchAppDefinitionResponse>
|
||||
setRevertableVersion: (
|
||||
appId: string,
|
||||
revertableVersion: string
|
||||
) => Promise<SetRevertableAppVersionResponse>
|
||||
addSampleData: (appId: string) => Promise<AddAppSampleDataResponse>
|
||||
|
||||
// Missing request or response types
|
||||
importApps: (apps: any) => Promise<any>
|
||||
}
|
||||
|
||||
export const buildAppEndpoints = (API: BaseAPIClient): AppEndpoints => ({
|
||||
/**
|
||||
* Fetches screen definition for an app.
|
||||
* @param appId the ID of the app to fetch from
|
||||
|
@ -16,7 +82,7 @@ export const buildAppEndpoints = API => ({
|
|||
* @param appId the ID of the app to update
|
||||
* @param metadata the app metadata to save
|
||||
*/
|
||||
saveAppMetadata: async ({ appId, metadata }) => {
|
||||
saveAppMetadata: async (appId, metadata) => {
|
||||
return await API.put({
|
||||
url: `/api/applications/${appId}`,
|
||||
body: metadata,
|
||||
|
@ -87,7 +153,7 @@ export const buildAppEndpoints = API => ({
|
|||
* Duplicate an existing app
|
||||
* @param app the app to dupe
|
||||
*/
|
||||
duplicateApp: async (app, appId) => {
|
||||
duplicateApp: async (appId, app) => {
|
||||
return await API.post({
|
||||
url: `/api/applications/${appId}/duplicate`,
|
||||
body: app,
|
||||
|
@ -184,7 +250,7 @@ export const buildAppEndpoints = API => ({
|
|||
/**
|
||||
* Fetches the definitions for component library components. This includes
|
||||
* their props and other metadata from components.json.
|
||||
* @param {string} appId - ID of the currently running app
|
||||
* @param appId ID of the currently running app
|
||||
*/
|
||||
fetchComponentLibDefinitions: async appId => {
|
||||
return await API.get({
|
||||
|
@ -192,14 +258,27 @@ export const buildAppEndpoints = API => ({
|
|||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds sample data to an app
|
||||
* @param appId the app ID
|
||||
*/
|
||||
addSampleData: async appId => {
|
||||
return await API.post({
|
||||
url: `/api/applications/${appId}/sample`,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Sets the revertable version of an app.
|
||||
* Used when manually reverting to older client versions.
|
||||
* @param appId the app ID
|
||||
* @param revertableVersion the version number
|
||||
*/
|
||||
setRevertableVersion: async (appId, revertableVersion) => {
|
||||
return await API.post({
|
||||
return await API.post<
|
||||
SetRevertableAppVersionRequest,
|
||||
SetRevertableAppVersionResponse
|
||||
>({
|
||||
url: `/api/applications/${appId}/setRevertableVersion`,
|
||||
body: {
|
||||
revertableVersion,
|
|
@ -1,78 +0,0 @@
|
|||
export const buildAttachmentEndpoints = API => {
|
||||
/**
|
||||
* Generates a signed URL to upload a file to an external datasource.
|
||||
* @param datasourceId the ID of the datasource to upload to
|
||||
* @param bucket the name of the bucket to upload to
|
||||
* @param key the name of the file to upload to
|
||||
*/
|
||||
const getSignedDatasourceURL = async ({ datasourceId, bucket, key }) => {
|
||||
return await API.post({
|
||||
url: `/api/attachments/${datasourceId}/url`,
|
||||
body: { bucket, key },
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
getSignedDatasourceURL,
|
||||
|
||||
/**
|
||||
* Uploads an attachment to the server.
|
||||
* @param data the attachment to upload
|
||||
* @param tableId the table ID to upload to
|
||||
*/
|
||||
uploadAttachment: async ({ data, tableId }) => {
|
||||
return await API.post({
|
||||
url: `/api/attachments/${tableId}/upload`,
|
||||
body: data,
|
||||
json: false,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Uploads an attachment to the server as a builder user from the builder.
|
||||
* @param data the data to upload
|
||||
*/
|
||||
uploadBuilderAttachment: async data => {
|
||||
return await API.post({
|
||||
url: "/api/attachments/process",
|
||||
body: data,
|
||||
json: false,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Uploads a file to an external datasource.
|
||||
* @param datasourceId the ID of the datasource to upload to
|
||||
* @param bucket the name of the bucket to upload to
|
||||
* @param key the name of the file to upload to
|
||||
* @param data the file to upload
|
||||
*/
|
||||
externalUpload: async ({ datasourceId, bucket, key, data }) => {
|
||||
const { signedUrl, publicUrl } = await getSignedDatasourceURL({
|
||||
datasourceId,
|
||||
bucket,
|
||||
key,
|
||||
})
|
||||
await API.put({
|
||||
url: signedUrl,
|
||||
body: data,
|
||||
json: false,
|
||||
external: true,
|
||||
})
|
||||
return { publicUrl }
|
||||
},
|
||||
/**
|
||||
* Download an attachment from a row given its column name.
|
||||
* @param datasourceId the ID of the datasource to download from
|
||||
* @param rowId the ID of the row to download from
|
||||
* @param columnName the column name to download
|
||||
*/
|
||||
downloadAttachment: async (datasourceId, rowId, columnName, options) => {
|
||||
return await API.get({
|
||||
url: `/api/${datasourceId}/rows/${rowId}/attachment/${columnName}`,
|
||||
parseResponse: response => response,
|
||||
suppressErrors: options?.suppressErrors,
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
|
@ -0,0 +1,121 @@
|
|||
import {
|
||||
DownloadAttachmentResponse,
|
||||
GetSignedUploadUrlRequest,
|
||||
GetSignedUploadUrlResponse,
|
||||
ProcessAttachmentResponse,
|
||||
} from "@budibase/types"
|
||||
import { BaseAPIClient } from "./types"
|
||||
|
||||
export interface AttachmentEndpoints {
|
||||
downloadAttachment: (
|
||||
datasourceId: string,
|
||||
rowId: string,
|
||||
columnName: string
|
||||
) => Promise<DownloadAttachmentResponse>
|
||||
getSignedDatasourceURL: (
|
||||
datasourceId: string,
|
||||
bucket: string,
|
||||
key: string
|
||||
) => Promise<GetSignedUploadUrlResponse>
|
||||
uploadAttachment: (
|
||||
tableId: string,
|
||||
data: any
|
||||
) => Promise<ProcessAttachmentResponse>
|
||||
uploadBuilderAttachment: (data: any) => Promise<ProcessAttachmentResponse>
|
||||
externalUpload: (
|
||||
datasourceId: string,
|
||||
bucket: string,
|
||||
key: string,
|
||||
data: any
|
||||
) => Promise<{ publicUrl: string | undefined }>
|
||||
}
|
||||
|
||||
export const buildAttachmentEndpoints = (
|
||||
API: BaseAPIClient
|
||||
): AttachmentEndpoints => {
|
||||
const endpoints: Pick<AttachmentEndpoints, "getSignedDatasourceURL"> = {
|
||||
/**
|
||||
* Generates a signed URL to upload a file to an external datasource.
|
||||
* @param datasourceId the ID of the datasource to upload to
|
||||
* @param bucket the name of the bucket to upload to
|
||||
* @param key the name of the file to upload to
|
||||
*/
|
||||
getSignedDatasourceURL: async (datasourceId, bucket, key) => {
|
||||
return await API.post<
|
||||
GetSignedUploadUrlRequest,
|
||||
GetSignedUploadUrlResponse
|
||||
>({
|
||||
url: `/api/attachments/${datasourceId}/url`,
|
||||
body: { bucket, key },
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
return {
|
||||
...endpoints,
|
||||
|
||||
/**
|
||||
* Uploads an attachment to the server.
|
||||
* @param data the attachment to upload
|
||||
* @param tableId the table ID to upload to
|
||||
*/
|
||||
uploadAttachment: async (tableId, data) => {
|
||||
return await API.post({
|
||||
url: `/api/attachments/${tableId}/upload`,
|
||||
body: data,
|
||||
json: false,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Uploads an attachment to the server as a builder user from the builder.
|
||||
* @param data the data to upload
|
||||
*/
|
||||
uploadBuilderAttachment: async data => {
|
||||
return await API.post({
|
||||
url: "/api/attachments/process",
|
||||
body: data,
|
||||
json: false,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Uploads a file to an external datasource.
|
||||
* @param datasourceId the ID of the datasource to upload to
|
||||
* @param bucket the name of the bucket to upload to
|
||||
* @param key the name of the file to upload to
|
||||
* @param data the file to upload
|
||||
*/
|
||||
externalUpload: async (datasourceId, bucket, key, data) => {
|
||||
const { signedUrl, publicUrl } = await endpoints.getSignedDatasourceURL(
|
||||
datasourceId,
|
||||
bucket,
|
||||
key
|
||||
)
|
||||
if (!signedUrl) {
|
||||
return { publicUrl: undefined }
|
||||
}
|
||||
await API.put({
|
||||
url: signedUrl,
|
||||
body: data,
|
||||
json: false,
|
||||
external: true,
|
||||
})
|
||||
return { publicUrl }
|
||||
},
|
||||
|
||||
/**
|
||||
* Download an attachment from a row given its column name.
|
||||
* @param datasourceId the ID of the datasource to download from
|
||||
* @param rowId the ID of the row to download from
|
||||
* @param columnName the column name to download
|
||||
*/
|
||||
downloadAttachment: async (datasourceId, rowId, columnName) => {
|
||||
return await API.get({
|
||||
url: `/api/${datasourceId}/rows/${rowId}/attachment/${columnName}`,
|
||||
parseResponse: response => response as any,
|
||||
suppressErrors: true,
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
|
@ -1,63 +0,0 @@
|
|||
const buildOpts = ({
|
||||
bookmark,
|
||||
userIds,
|
||||
appIds,
|
||||
startDate,
|
||||
endDate,
|
||||
fullSearch,
|
||||
events,
|
||||
}) => {
|
||||
const opts = {}
|
||||
|
||||
if (bookmark) {
|
||||
opts.bookmark = bookmark
|
||||
}
|
||||
|
||||
if (startDate && endDate) {
|
||||
opts.startDate = startDate
|
||||
opts.endDate = endDate
|
||||
} else if (startDate && !endDate) {
|
||||
opts.startDate = startDate
|
||||
}
|
||||
|
||||
if (fullSearch) {
|
||||
opts.fullSearch = fullSearch
|
||||
}
|
||||
|
||||
if (events.length) {
|
||||
opts.events = events
|
||||
}
|
||||
|
||||
if (userIds.length) {
|
||||
opts.userIds = userIds
|
||||
}
|
||||
|
||||
if (appIds.length) {
|
||||
opts.appIds = appIds
|
||||
}
|
||||
|
||||
return opts
|
||||
}
|
||||
|
||||
export const buildAuditLogsEndpoints = API => ({
|
||||
/**
|
||||
* Gets a list of users in the current tenant.
|
||||
*/
|
||||
searchAuditLogs: async opts => {
|
||||
return await API.post({
|
||||
url: `/api/global/auditlogs/search`,
|
||||
body: buildOpts(opts),
|
||||
})
|
||||
},
|
||||
|
||||
getEventDefinitions: async () => {
|
||||
return await API.get({
|
||||
url: `/api/global/auditlogs/definitions`,
|
||||
})
|
||||
},
|
||||
|
||||
getDownloadUrl: opts => {
|
||||
const query = encodeURIComponent(JSON.stringify(opts))
|
||||
return `/api/global/auditlogs/download?query=${query}`
|
||||
},
|
||||
})
|
|
@ -0,0 +1,35 @@
|
|||
import {
|
||||
SearchAuditLogsRequest,
|
||||
SearchAuditLogsResponse,
|
||||
DefinitionsAuditLogsResponse,
|
||||
DownloadAuditLogsRequest,
|
||||
} from "@budibase/types"
|
||||
import { BaseAPIClient } from "./types"
|
||||
|
||||
export interface AuditLogEndpoints {
|
||||
searchAuditLogs: (
|
||||
opts: SearchAuditLogsRequest
|
||||
) => Promise<SearchAuditLogsResponse>
|
||||
getEventDefinitions: () => Promise<DefinitionsAuditLogsResponse>
|
||||
getDownloadUrl: (opts: DownloadAuditLogsRequest) => string
|
||||
}
|
||||
|
||||
export const buildAuditLogEndpoints = (
|
||||
API: BaseAPIClient
|
||||
): AuditLogEndpoints => ({
|
||||
searchAuditLogs: async opts => {
|
||||
return await API.post({
|
||||
url: `/api/global/auditlogs/search`,
|
||||
body: opts,
|
||||
})
|
||||
},
|
||||
getEventDefinitions: async () => {
|
||||
return await API.get({
|
||||
url: `/api/global/auditlogs/definitions`,
|
||||
})
|
||||
},
|
||||
getDownloadUrl: opts => {
|
||||
const query = encodeURIComponent(JSON.stringify(opts))
|
||||
return `/api/global/auditlogs/download?query=${query}`
|
||||
},
|
||||
})
|
|
@ -1,12 +1,46 @@
|
|||
export const buildAuthEndpoints = API => ({
|
||||
import {
|
||||
GetInitInfoResponse,
|
||||
LoginRequest,
|
||||
LoginResponse,
|
||||
LogoutResponse,
|
||||
PasswordResetRequest,
|
||||
PasswordResetResponse,
|
||||
PasswordResetUpdateRequest,
|
||||
PasswordResetUpdateResponse,
|
||||
SetInitInfoRequest,
|
||||
SetInitInfoResponse,
|
||||
} from "@budibase/types"
|
||||
import { BaseAPIClient } from "./types"
|
||||
|
||||
export interface AuthEndpoints {
|
||||
logIn: (
|
||||
tenantId: string,
|
||||
username: string,
|
||||
password: string
|
||||
) => Promise<LoginResponse>
|
||||
logOut: () => Promise<LogoutResponse>
|
||||
requestForgotPassword: (
|
||||
tenantId: string,
|
||||
email: string
|
||||
) => Promise<PasswordResetResponse>
|
||||
resetPassword: (
|
||||
tenantId: string,
|
||||
password: string,
|
||||
resetCode: string
|
||||
) => Promise<PasswordResetUpdateResponse>
|
||||
setInitInfo: (info: SetInitInfoRequest) => Promise<SetInitInfoResponse>
|
||||
getInitInfo: () => Promise<GetInitInfoResponse>
|
||||
}
|
||||
|
||||
export const buildAuthEndpoints = (API: BaseAPIClient): AuthEndpoints => ({
|
||||
/**
|
||||
* Performs a login request.
|
||||
* @param tenantId the ID of the tenant to log in to
|
||||
* @param username the username (email)
|
||||
* @param password the password
|
||||
*/
|
||||
logIn: async ({ tenantId, username, password }) => {
|
||||
return await API.post({
|
||||
logIn: async (tenantId, username, password) => {
|
||||
return await API.post<LoginRequest, LoginResponse>({
|
||||
url: `/api/global/auth/${tenantId}/login`,
|
||||
body: {
|
||||
username,
|
||||
|
@ -49,8 +83,8 @@ export const buildAuthEndpoints = API => ({
|
|||
* @param tenantId the ID of the tenant the user is in
|
||||
* @param email the email address of the user
|
||||
*/
|
||||
requestForgotPassword: async ({ tenantId, email }) => {
|
||||
return await API.post({
|
||||
requestForgotPassword: async (tenantId, email) => {
|
||||
return await API.post<PasswordResetRequest, PasswordResetResponse>({
|
||||
url: `/api/global/auth/${tenantId}/reset`,
|
||||
body: {
|
||||
email,
|
||||
|
@ -64,8 +98,11 @@ export const buildAuthEndpoints = API => ({
|
|||
* @param password the new password to set
|
||||
* @param resetCode the reset code to authenticate the request
|
||||
*/
|
||||
resetPassword: async ({ tenantId, password, resetCode }) => {
|
||||
return await API.post({
|
||||
resetPassword: async (tenantId, password, resetCode) => {
|
||||
return await API.post<
|
||||
PasswordResetUpdateRequest,
|
||||
PasswordResetUpdateResponse
|
||||
>({
|
||||
url: `/api/global/auth/${tenantId}/reset/update`,
|
||||
body: {
|
||||
password,
|
|
@ -1,111 +0,0 @@
|
|||
export const buildAutomationEndpoints = API => ({
|
||||
/**
|
||||
* Executes an automation. Must have "App Action" trigger.
|
||||
* @param automationId the ID of the automation to trigger
|
||||
* @param fields the fields to trigger the automation with
|
||||
*/
|
||||
triggerAutomation: async ({ automationId, fields, timeout }) => {
|
||||
return await API.post({
|
||||
url: `/api/automations/${automationId}/trigger`,
|
||||
body: { fields, timeout },
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Tests an automation with data.
|
||||
* @param automationId the ID of the automation to test
|
||||
* @param testData the test data to run against the automation
|
||||
*/
|
||||
testAutomation: async ({ automationId, testData }) => {
|
||||
return await API.post({
|
||||
url: `/api/automations/${automationId}/test`,
|
||||
body: testData,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets a list of all automations.
|
||||
*/
|
||||
getAutomations: async () => {
|
||||
return await API.get({
|
||||
url: "/api/automations",
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets a list of all the definitions for blocks in automations.
|
||||
*/
|
||||
getAutomationDefinitions: async () => {
|
||||
return await API.get({
|
||||
url: "/api/automations/definitions/list",
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates an automation.
|
||||
* @param automation the automation to create
|
||||
*/
|
||||
createAutomation: async automation => {
|
||||
return await API.post({
|
||||
url: "/api/automations",
|
||||
body: automation,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Updates an automation.
|
||||
* @param automation the automation to update
|
||||
*/
|
||||
updateAutomation: async automation => {
|
||||
return await API.put({
|
||||
url: "/api/automations",
|
||||
body: automation,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes an automation
|
||||
* @param automationId the ID of the automation to delete
|
||||
* @param automationRev the rev of the automation to delete
|
||||
*/
|
||||
deleteAutomation: async ({ automationId, automationRev }) => {
|
||||
return await API.delete({
|
||||
url: `/api/automations/${automationId}/${automationRev}`,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Get the logs for the app, or by automation ID.
|
||||
* @param automationId The ID of the automation to get logs for.
|
||||
* @param startDate An ISO date string to state the start of the date range.
|
||||
* @param status The status, error or success.
|
||||
* @param page The page to retrieve.
|
||||
*/
|
||||
getAutomationLogs: async ({ automationId, startDate, status, page }) => {
|
||||
return await API.post({
|
||||
url: "/api/automations/logs/search",
|
||||
body: {
|
||||
automationId,
|
||||
startDate,
|
||||
status,
|
||||
page,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Clears automation log errors (which are creating notification) for
|
||||
* automation or the app.
|
||||
* @param automationId optional - the ID of the automation to clear errors for.
|
||||
* @param appId The app ID to clear errors for.
|
||||
*/
|
||||
clearAutomationLogErrors: async ({ automationId, appId }) => {
|
||||
return await API.delete({
|
||||
url: "/api/automations/logs",
|
||||
body: {
|
||||
appId,
|
||||
automationId,
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
|
@ -0,0 +1,158 @@
|
|||
import {
|
||||
ClearAutomationLogRequest,
|
||||
ClearAutomationLogResponse,
|
||||
CreateAutomationRequest,
|
||||
CreateAutomationResponse,
|
||||
DeleteAutomationResponse,
|
||||
FetchAutomationResponse,
|
||||
GetAutomationStepDefinitionsResponse,
|
||||
SearchAutomationLogsRequest,
|
||||
SearchAutomationLogsResponse,
|
||||
TestAutomationRequest,
|
||||
TestAutomationResponse,
|
||||
TriggerAutomationRequest,
|
||||
TriggerAutomationResponse,
|
||||
UpdateAutomationRequest,
|
||||
UpdateAutomationResponse,
|
||||
} from "@budibase/types"
|
||||
import { BaseAPIClient } from "./types"
|
||||
|
||||
export interface AutomationEndpoints {
|
||||
getAutomations: () => Promise<FetchAutomationResponse>
|
||||
createAutomation: (
|
||||
automation: CreateAutomationRequest
|
||||
) => Promise<CreateAutomationResponse>
|
||||
updateAutomation: (
|
||||
automation: UpdateAutomationRequest
|
||||
) => Promise<UpdateAutomationResponse>
|
||||
deleteAutomation: (
|
||||
automationId: string,
|
||||
automationRev: string
|
||||
) => Promise<DeleteAutomationResponse>
|
||||
clearAutomationLogErrors: (
|
||||
automationId: string,
|
||||
appId: string
|
||||
) => Promise<ClearAutomationLogResponse>
|
||||
triggerAutomation: (
|
||||
automationId: string,
|
||||
fields: Record<string, any>,
|
||||
timeout: number
|
||||
) => Promise<TriggerAutomationResponse>
|
||||
testAutomation: (
|
||||
automationdId: string,
|
||||
data: TestAutomationRequest
|
||||
) => Promise<TestAutomationResponse>
|
||||
getAutomationDefinitions: () => Promise<GetAutomationStepDefinitionsResponse>
|
||||
getAutomationLogs: (
|
||||
options: SearchAutomationLogsRequest
|
||||
) => Promise<SearchAutomationLogsResponse>
|
||||
}
|
||||
|
||||
export const buildAutomationEndpoints = (
|
||||
API: BaseAPIClient
|
||||
): AutomationEndpoints => ({
|
||||
/**
|
||||
* Executes an automation. Must have "App Action" trigger.
|
||||
* @param automationId the ID of the automation to trigger
|
||||
* @param fields the fields to trigger the automation with
|
||||
* @param timeout a timeout override
|
||||
*/
|
||||
triggerAutomation: async (automationId, fields, timeout) => {
|
||||
return await API.post<TriggerAutomationRequest, TriggerAutomationResponse>({
|
||||
url: `/api/automations/${automationId}/trigger`,
|
||||
body: { fields, timeout },
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Tests an automation with data.
|
||||
* @param automationId the ID of the automation to test
|
||||
* @param data the test data to run against the automation
|
||||
*/
|
||||
testAutomation: async (automationId, data) => {
|
||||
return await API.post({
|
||||
url: `/api/automations/${automationId}/test`,
|
||||
body: data,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets a list of all automations.
|
||||
*/
|
||||
getAutomations: async () => {
|
||||
return await API.get({
|
||||
url: "/api/automations",
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets a list of all the definitions for blocks in automations.
|
||||
*/
|
||||
getAutomationDefinitions: async () => {
|
||||
return await API.get({
|
||||
url: "/api/automations/definitions/list",
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates an automation.
|
||||
* @param automation the automation to create
|
||||
*/
|
||||
createAutomation: async automation => {
|
||||
return await API.post({
|
||||
url: "/api/automations",
|
||||
body: automation,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Updates an automation.
|
||||
* @param automation the automation to update
|
||||
*/
|
||||
updateAutomation: async automation => {
|
||||
return await API.put({
|
||||
url: "/api/automations",
|
||||
body: automation,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes an automation
|
||||
* @param automationId the ID of the automation to delete
|
||||
* @param automationRev the rev of the automation to delete
|
||||
*/
|
||||
deleteAutomation: async (automationId, automationRev) => {
|
||||
return await API.delete({
|
||||
url: `/api/automations/${automationId}/${automationRev}`,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Get the logs for the app, or by automation ID.
|
||||
*/
|
||||
getAutomationLogs: async data => {
|
||||
return await API.post({
|
||||
url: "/api/automations/logs/search",
|
||||
body: data,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Clears automation log errors (which are creating notification) for
|
||||
* automation or the app.
|
||||
* @param automationId optional - the ID of the automation to clear errors for.
|
||||
* @param appId The app ID to clear errors for.
|
||||
*/
|
||||
clearAutomationLogErrors: async (automationId, appId) => {
|
||||
return await API.delete<
|
||||
ClearAutomationLogRequest,
|
||||
ClearAutomationLogResponse
|
||||
>({
|
||||
url: "/api/automations/logs",
|
||||
body: {
|
||||
appId,
|
||||
automationId,
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
|
@ -1,46 +0,0 @@
|
|||
export const buildBackupsEndpoints = API => ({
|
||||
searchBackups: async ({ appId, trigger, type, page, startDate, endDate }) => {
|
||||
const opts = {}
|
||||
if (page) {
|
||||
opts.page = page
|
||||
}
|
||||
if (trigger && type) {
|
||||
opts.trigger = trigger.toLowerCase()
|
||||
opts.type = type.toLowerCase()
|
||||
}
|
||||
if (startDate && endDate) {
|
||||
opts.startDate = startDate
|
||||
opts.endDate = endDate
|
||||
}
|
||||
return await API.post({
|
||||
url: `/api/apps/${appId}/backups/search`,
|
||||
body: opts,
|
||||
})
|
||||
},
|
||||
|
||||
createManualBackup: async ({ appId }) => {
|
||||
return await API.post({
|
||||
url: `/api/apps/${appId}/backups`,
|
||||
})
|
||||
},
|
||||
|
||||
deleteBackup: async ({ appId, backupId }) => {
|
||||
return await API.delete({
|
||||
url: `/api/apps/${appId}/backups/${backupId}`,
|
||||
})
|
||||
},
|
||||
|
||||
updateBackup: async ({ appId, backupId, name }) => {
|
||||
return await API.patch({
|
||||
url: `/api/apps/${appId}/backups/${backupId}`,
|
||||
body: { name },
|
||||
})
|
||||
},
|
||||
|
||||
restoreBackup: async ({ appId, backupId, name }) => {
|
||||
return await API.post({
|
||||
url: `/api/apps/${appId}/backups/${backupId}/import`,
|
||||
body: { name },
|
||||
})
|
||||
},
|
||||
})
|
|
@ -0,0 +1,50 @@
|
|||
import {
|
||||
CreateAppBackupResponse,
|
||||
ImportAppBackupResponse,
|
||||
SearchAppBackupsRequest,
|
||||
} from "@budibase/types"
|
||||
import { BaseAPIClient } from "./types"
|
||||
|
||||
export interface BackupEndpoints {
|
||||
createManualBackup: (appId: string) => Promise<CreateAppBackupResponse>
|
||||
restoreBackup: (
|
||||
appId: string,
|
||||
backupId: string,
|
||||
name?: string
|
||||
) => Promise<ImportAppBackupResponse>
|
||||
|
||||
// Missing request or response types
|
||||
searchBackups: (appId: string, opts: SearchAppBackupsRequest) => Promise<any>
|
||||
deleteBackup: (
|
||||
appId: string,
|
||||
backupId: string
|
||||
) => Promise<{ message: string }>
|
||||
}
|
||||
|
||||
export const buildBackupEndpoints = (API: BaseAPIClient): BackupEndpoints => ({
|
||||
createManualBackup: async appId => {
|
||||
return await API.post({
|
||||
url: `/api/apps/${appId}/backups`,
|
||||
})
|
||||
},
|
||||
searchBackups: async (appId, opts) => {
|
||||
return await API.post({
|
||||
url: `/api/apps/${appId}/backups/search`,
|
||||
body: opts,
|
||||
})
|
||||
},
|
||||
deleteBackup: async (appId, backupId) => {
|
||||
return await API.delete({
|
||||
url: `/api/apps/${appId}/backups/${backupId}`,
|
||||
})
|
||||
},
|
||||
restoreBackup: async (appId, backupId, name) => {
|
||||
return await API.post({
|
||||
url: `/api/apps/${appId}/backups/${backupId}/import`,
|
||||
// Name is a legacy thing, but unsure if it is needed for restoring.
|
||||
// Leaving this in just in case, but not type casting the body here
|
||||
// as we won't normally have it, but it's required in the type.
|
||||
body: { name },
|
||||
})
|
||||
},
|
||||
})
|
|
@ -1,4 +1,32 @@
|
|||
export const buildConfigEndpoints = API => ({
|
||||
import {
|
||||
Config,
|
||||
ConfigChecklistResponse,
|
||||
ConfigType,
|
||||
DeleteConfigResponse,
|
||||
FindConfigResponse,
|
||||
GetPublicOIDCConfigResponse,
|
||||
GetPublicSettingsResponse,
|
||||
OIDCLogosConfig,
|
||||
SaveConfigRequest,
|
||||
SaveConfigResponse,
|
||||
UploadConfigFileResponse,
|
||||
} from "@budibase/types"
|
||||
import { BaseAPIClient } from "./types"
|
||||
|
||||
export interface ConfigEndpoints {
|
||||
getConfig: (type: ConfigType) => Promise<FindConfigResponse>
|
||||
getTenantConfig: (tentantId: string) => Promise<GetPublicSettingsResponse>
|
||||
getOIDCConfig: (tenantId: string) => Promise<GetPublicOIDCConfigResponse>
|
||||
getOIDCLogos: () => Promise<Config<OIDCLogosConfig>>
|
||||
saveConfig: (config: SaveConfigRequest) => Promise<SaveConfigResponse>
|
||||
deleteConfig: (id: string, rev: string) => Promise<DeleteConfigResponse>
|
||||
getChecklist: (tenantId: string) => Promise<ConfigChecklistResponse>
|
||||
uploadLogo: (data: any) => Promise<UploadConfigFileResponse>
|
||||
uploadFavicon: (data: any) => Promise<UploadConfigFileResponse>
|
||||
uploadOIDCLogo: (name: string, data: any) => Promise<UploadConfigFileResponse>
|
||||
}
|
||||
|
||||
export const buildConfigEndpoints = (API: BaseAPIClient): ConfigEndpoints => ({
|
||||
/**
|
||||
* Saves a global config.
|
||||
* @param config the config to save
|
||||
|
@ -25,7 +53,7 @@ export const buildConfigEndpoints = API => ({
|
|||
* @param id the id of the config to delete
|
||||
* @param rev the revision of the config to delete
|
||||
*/
|
||||
deleteConfig: async ({ id, rev }) => {
|
||||
deleteConfig: async (id, rev) => {
|
||||
return await API.delete({
|
||||
url: `/api/global/configs/${id}/${rev}`,
|
||||
})
|
||||
|
@ -90,7 +118,7 @@ export const buildConfigEndpoints = API => ({
|
|||
* @param name the name of the OIDC provider
|
||||
* @param data the logo form data to upload
|
||||
*/
|
||||
uploadOIDCLogo: async ({ name, data }) => {
|
||||
uploadOIDCLogo: async (name, data) => {
|
||||
return await API.post({
|
||||
url: `/api/global/configs/upload/logos_oidc/${name}`,
|
||||
body: data,
|
|
@ -1,92 +0,0 @@
|
|||
export const buildDatasourceEndpoints = API => ({
|
||||
/**
|
||||
* Gets a list of datasources.
|
||||
*/
|
||||
getDatasources: async () => {
|
||||
return await API.get({
|
||||
url: "/api/datasources",
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Prompts the server to build the schema for a datasource.
|
||||
* @param datasourceId the datasource ID to build the schema for
|
||||
* @param tablesFilter list of specific table names to be build the schema
|
||||
*/
|
||||
buildDatasourceSchema: async ({ datasourceId, tablesFilter }) => {
|
||||
return await API.post({
|
||||
url: `/api/datasources/${datasourceId}/schema`,
|
||||
body: {
|
||||
tablesFilter,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a datasource
|
||||
* @param datasource the datasource to create
|
||||
* @param fetchSchema whether to fetch the schema or not
|
||||
* @param tablesFilter a list of tables to actually fetch rather than simply
|
||||
* all that are accessible.
|
||||
*/
|
||||
createDatasource: async ({ datasource, fetchSchema, tablesFilter }) => {
|
||||
return await API.post({
|
||||
url: "/api/datasources",
|
||||
body: {
|
||||
datasource,
|
||||
fetchSchema,
|
||||
tablesFilter,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Updates a datasource
|
||||
* @param datasource the datasource to update
|
||||
*/
|
||||
updateDatasource: async datasource => {
|
||||
return await API.put({
|
||||
url: `/api/datasources/${datasource._id}`,
|
||||
body: datasource,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes a datasource.
|
||||
* @param datasourceId the ID of the ddtasource to delete
|
||||
* @param datasourceRev the rev of the datasource to delete
|
||||
*/
|
||||
deleteDatasource: async ({ datasourceId, datasourceRev }) => {
|
||||
return await API.delete({
|
||||
url: `/api/datasources/${datasourceId}/${datasourceRev}`,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Validate a datasource configuration
|
||||
* @param datasource the datasource configuration to validate
|
||||
*/
|
||||
validateDatasource: async datasource => {
|
||||
return await API.post({
|
||||
url: `/api/datasources/verify`,
|
||||
body: { datasource },
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetch table names available within the datasource, for filtering out undesired tables
|
||||
* @param datasource the datasource configuration to use for fetching tables
|
||||
*/
|
||||
fetchInfoForDatasource: async datasource => {
|
||||
return await API.post({
|
||||
url: `/api/datasources/info`,
|
||||
body: { datasource },
|
||||
})
|
||||
},
|
||||
|
||||
fetchExternalSchema: async datasourceId => {
|
||||
return await API.get({
|
||||
url: `/api/datasources/${datasourceId}/schema/external`,
|
||||
})
|
||||
},
|
||||
})
|
|
@ -0,0 +1,132 @@
|
|||
import {
|
||||
BuildSchemaFromSourceRequest,
|
||||
BuildSchemaFromSourceResponse,
|
||||
CreateDatasourceRequest,
|
||||
CreateDatasourceResponse,
|
||||
Datasource,
|
||||
DeleteDatasourceResponse,
|
||||
FetchDatasourceInfoRequest,
|
||||
FetchDatasourceInfoResponse,
|
||||
FetchExternalSchemaResponse,
|
||||
UpdateDatasourceRequest,
|
||||
UpdateDatasourceResponse,
|
||||
VerifyDatasourceRequest,
|
||||
VerifyDatasourceResponse,
|
||||
} from "@budibase/types"
|
||||
import { BaseAPIClient } from "./types"
|
||||
|
||||
export interface DatasourceEndpoints {
|
||||
getDatasources: () => Promise<Datasource[]>
|
||||
buildDatasourceSchema: (
|
||||
datasourceId: string,
|
||||
tablesFilter?: string[]
|
||||
) => Promise<BuildSchemaFromSourceResponse>
|
||||
createDatasource: (
|
||||
data: CreateDatasourceRequest
|
||||
) => Promise<CreateDatasourceResponse>
|
||||
updateDatasource: (
|
||||
datasource: Datasource
|
||||
) => Promise<UpdateDatasourceResponse>
|
||||
deleteDatasource: (
|
||||
id: string,
|
||||
rev: string
|
||||
) => Promise<DeleteDatasourceResponse>
|
||||
validateDatasource: (
|
||||
datasource: Datasource
|
||||
) => Promise<VerifyDatasourceResponse>
|
||||
fetchInfoForDatasource: (
|
||||
datasource: Datasource
|
||||
) => Promise<FetchDatasourceInfoResponse>
|
||||
fetchExternalSchema: (
|
||||
datasourceId: string
|
||||
) => Promise<FetchExternalSchemaResponse>
|
||||
}
|
||||
|
||||
export const buildDatasourceEndpoints = (
|
||||
API: BaseAPIClient
|
||||
): DatasourceEndpoints => ({
|
||||
/**
|
||||
* Gets a list of datasources.
|
||||
*/
|
||||
getDatasources: async () => {
|
||||
return await API.get({
|
||||
url: "/api/datasources",
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Prompts the server to build the schema for a datasource.
|
||||
*/
|
||||
buildDatasourceSchema: async (datasourceId, tablesFilter?) => {
|
||||
return await API.post<
|
||||
BuildSchemaFromSourceRequest,
|
||||
BuildSchemaFromSourceResponse
|
||||
>({
|
||||
url: `/api/datasources/${datasourceId}/schema`,
|
||||
body: {
|
||||
tablesFilter,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a datasource
|
||||
*/
|
||||
createDatasource: async data => {
|
||||
return await API.post({
|
||||
url: "/api/datasources",
|
||||
body: data,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Updates a datasource
|
||||
*/
|
||||
updateDatasource: async datasource => {
|
||||
return await API.put<UpdateDatasourceRequest, UpdateDatasourceResponse>({
|
||||
url: `/api/datasources/${datasource._id}`,
|
||||
body: datasource,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes a datasource.
|
||||
*/
|
||||
deleteDatasource: async (id: string, rev: string) => {
|
||||
return await API.delete({
|
||||
url: `/api/datasources/${id}/${rev}`,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Validate a datasource configuration
|
||||
*/
|
||||
validateDatasource: async (datasource: Datasource) => {
|
||||
return await API.post<VerifyDatasourceRequest, VerifyDatasourceResponse>({
|
||||
url: `/api/datasources/verify`,
|
||||
body: { datasource },
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetch table names available within the datasource, for filtering out undesired tables
|
||||
*/
|
||||
fetchInfoForDatasource: async (datasource: Datasource) => {
|
||||
return await API.post<
|
||||
FetchDatasourceInfoRequest,
|
||||
FetchDatasourceInfoResponse
|
||||
>({
|
||||
url: `/api/datasources/info`,
|
||||
body: { datasource },
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches the external schema of a datasource
|
||||
*/
|
||||
fetchExternalSchema: async (datasourceId: string) => {
|
||||
return await API.get({
|
||||
url: `/api/datasources/${datasourceId}/schema/external`,
|
||||
})
|
||||
},
|
||||
})
|
|
@ -1,36 +0,0 @@
|
|||
export const buildEnvironmentVariableEndpoints = API => ({
|
||||
checkEnvironmentVariableStatus: async () => {
|
||||
return await API.get({
|
||||
url: `/api/env/variables/status`,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches a list of environment variables
|
||||
*/
|
||||
fetchEnvironmentVariables: async () => {
|
||||
return await API.get({
|
||||
url: `/api/env/variables`,
|
||||
json: false,
|
||||
})
|
||||
},
|
||||
|
||||
createEnvironmentVariable: async data => {
|
||||
return await API.post({
|
||||
url: `/api/env/variables`,
|
||||
body: data,
|
||||
})
|
||||
},
|
||||
deleteEnvironmentVariable: async varName => {
|
||||
return await API.delete({
|
||||
url: `/api/env/variables/${varName}`,
|
||||
})
|
||||
},
|
||||
|
||||
updateEnvironmentVariable: async data => {
|
||||
return await API.patch({
|
||||
url: `/api/env/variables/${data.name}`,
|
||||
body: data,
|
||||
})
|
||||
},
|
||||
})
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue