Merge remote-tracking branch 'origin/develop' into feature/app-settings-section

This commit is contained in:
Dean 2023-06-15 10:59:28 +01:00
commit c2f8d54a8a
74 changed files with 1174 additions and 355 deletions

View File

@ -12,31 +12,22 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
# - name: Fail if not a tag - name: Fail if not a tag
# run: |
# if [[ $GITHUB_REF != refs/tags/* ]]; then
# echo "Workflow Dispatch can only be run on tags"
# exit 1
# fi
- uses: actions/checkout@v2
# with:
# fetch-depth: 0
# - name: Fail if tag is not in master
# run: |
# if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
# echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
# exit 1
# fi
- name: Pull values.yaml from budibase-infra
run: | run: |
curl -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \ if [[ $GITHUB_REF != refs/tags/* ]]; then
-H 'Accept: application/vnd.github.v3.raw' \ echo "Workflow Dispatch can only be run on tags"
-o values.production.yaml \ exit 1
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/values.yaml fi
wc -l values.production.yaml - uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Get the latest budibase release version - name: Get the latest budibase release version
id: version id: version
@ -48,29 +39,10 @@ jobs:
fi fi
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Configure AWS Credentials - uses: passeidireto/trigger-external-workflow-action@main
uses: aws-actions/configure-aws-credentials@v1 env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} repository: budibase/budibase-deploys
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} event: budicloud-prod-deploy
aws-region: eu-west-1 github_pat: ${{ secrets.GH_ACCESS_TOKEN }}
- name: Deploy to EKS
uses: craftech-io/eks-helm-deploy-action@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS__KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1
cluster-name: budibase-eks-production
config-files: values.production.yaml
chart-path: charts/budibase
namespace: budibase
values: globals.appVersion=v${{ env.RELEASE_VERSION }},services.couchdb.url=${{ secrets.PRODUCTION_COUCHDB_URL }},services.couchdb.password=${{ secrets.PRODUCTION_COUCHDB_PASSWORD }}
name: budibase-prod
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0
with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Production Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Cloud."
embed-title: ${{ env.RELEASE_VERSION }}

View File

@ -24,51 +24,18 @@ jobs:
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch" echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1 exit 1
fi fi
- name: Get the latest budibase release version - name: Get the latest budibase release version
id: version id: version
run: | run: |
release_version=$(cat lerna.json | jq -r '.version') release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1
- name: Pull values.yaml from budibase-infra - uses: passeidireto/trigger-external-workflow-action@main
run: |
curl -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \
-H 'Accept: application/vnd.github.v3.raw' \
-o values.preprod.yaml \
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-preprod/values.yaml
wc -l values.preprod.yaml
- name: Deploy to Preprod Environment
uses: budibase/helm@v1.8.0
with:
release: budibase-preprod
namespace: budibase
chart: charts/budibase
token: ${{ github.token }}
helm: helm3
values: |
globals:
appVersion: v${{ env.RELEASE_VERSION }}
ingress:
enabled: true
nginx: true
value-files: >-
[
"values.preprod.yaml"
]
env: env:
KUBECONFIG_FILE: '${{ secrets.PREPROD_KUBECONFIG }}' PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0
with: with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }} repository: budibase/budibase-deploys
content: "Preprod Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Pre-prod." event: budicloud-preprod-deploy
embed-title: ${{ env.RELEASE_VERSION }} github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -1,5 +1,5 @@
{ {
"version": "2.7.7-alpha.2", "version": "2.7.16-alpha.2",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/backend-core", "packages/backend-core",

View File

@ -343,6 +343,9 @@ export class QueryBuilder<T> {
} }
const oneOf = (key: string, value: any) => { const oneOf = (key: string, value: any) => {
if (!value) {
return `*:*`
}
if (!Array.isArray(value)) { if (!Array.isArray(value)) {
if (typeof value === "string") { if (typeof value === "string") {
value = value.split(",") value = value.split(",")

View File

@ -114,6 +114,25 @@ describe("lucene", () => {
expect(resp.rows.length).toBe(2) expect(resp.rows.length).toBe(2)
}) })
it("should return all rows when doing a one of search against falsey value", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addOneOf("property", null)
let resp = await builder.run()
expect(resp.rows.length).toBe(3)
builder.addOneOf("property", undefined)
resp = await builder.run()
expect(resp.rows.length).toBe(3)
builder.addOneOf("property", "")
resp = await builder.run()
expect(resp.rows.length).toBe(3)
builder.addOneOf("property", [])
resp = await builder.run()
expect(resp.rows.length).toBe(0)
})
it("should be able to perform a contains search", async () => { it("should be able to perform a contains search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME) const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addContains("property", ["word"]) builder.addContains("property", ["word"])

View File

@ -1,12 +1,17 @@
import crypto from "crypto" import crypto from "crypto"
import fs from "fs"
import zlib from "zlib"
import env from "../environment" import env from "../environment"
import { join } from "path"
const ALGO = "aes-256-ctr" const ALGO = "aes-256-ctr"
const SEPARATOR = "-" const SEPARATOR = "-"
const ITERATIONS = 10000 const ITERATIONS = 10000
const RANDOM_BYTES = 16
const STRETCH_LENGTH = 32 const STRETCH_LENGTH = 32
const SALT_LENGTH = 16
const IV_LENGTH = 16
export enum SecretOption { export enum SecretOption {
API = "api", API = "api",
ENCRYPTION = "encryption", ENCRYPTION = "encryption",
@ -31,15 +36,15 @@ export function getSecret(secretOption: SecretOption): string {
return secret return secret
} }
function stretchString(string: string, salt: Buffer) { function stretchString(secret: string, salt: Buffer) {
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512") return crypto.pbkdf2Sync(secret, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
} }
export function encrypt( export function encrypt(
input: string, input: string,
secretOption: SecretOption = SecretOption.API secretOption: SecretOption = SecretOption.API
) { ) {
const salt = crypto.randomBytes(RANDOM_BYTES) const salt = crypto.randomBytes(SALT_LENGTH)
const stretched = stretchString(getSecret(secretOption), salt) const stretched = stretchString(getSecret(secretOption), salt)
const cipher = crypto.createCipheriv(ALGO, stretched, salt) const cipher = crypto.createCipheriv(ALGO, stretched, salt)
const base = cipher.update(input) const base = cipher.update(input)
@ -60,3 +65,115 @@ export function decrypt(
const final = decipher.final() const final = decipher.final()
return Buffer.concat([base, final]).toString() return Buffer.concat([base, final]).toString()
} }
export async function encryptFile(
{ dir, filename }: { dir: string; filename: string },
secret: string
) {
const outputFileName = `${filename}.enc`
const filePath = join(dir, filename)
const inputFile = fs.createReadStream(filePath)
const outputFile = fs.createWriteStream(join(dir, outputFileName))
const salt = crypto.randomBytes(SALT_LENGTH)
const iv = crypto.randomBytes(IV_LENGTH)
const stretched = stretchString(secret, salt)
const cipher = crypto.createCipheriv(ALGO, stretched, iv)
outputFile.write(salt)
outputFile.write(iv)
inputFile.pipe(zlib.createGzip()).pipe(cipher).pipe(outputFile)
return new Promise<{ filename: string; dir: string }>(r => {
outputFile.on("finish", () => {
r({
filename: outputFileName,
dir,
})
})
})
}
async function getSaltAndIV(path: string) {
const fileStream = fs.createReadStream(path)
const salt = await readBytes(fileStream, SALT_LENGTH)
const iv = await readBytes(fileStream, IV_LENGTH)
fileStream.close()
return { salt, iv }
}
export async function decryptFile(
inputPath: string,
outputPath: string,
secret: string
) {
const { salt, iv } = await getSaltAndIV(inputPath)
const inputFile = fs.createReadStream(inputPath, {
start: SALT_LENGTH + IV_LENGTH,
})
const outputFile = fs.createWriteStream(outputPath)
const stretched = stretchString(secret, salt)
const decipher = crypto.createDecipheriv(ALGO, stretched, iv)
const unzip = zlib.createGunzip()
inputFile.pipe(decipher).pipe(unzip).pipe(outputFile)
return new Promise<void>((res, rej) => {
outputFile.on("finish", () => {
outputFile.close()
res()
})
inputFile.on("error", e => {
outputFile.close()
rej(e)
})
decipher.on("error", e => {
outputFile.close()
rej(e)
})
unzip.on("error", e => {
outputFile.close()
rej(e)
})
outputFile.on("error", e => {
outputFile.close()
rej(e)
})
})
}
function readBytes(stream: fs.ReadStream, length: number) {
return new Promise<Buffer>((resolve, reject) => {
let bytesRead = 0
const data: Buffer[] = []
stream.on("readable", () => {
let chunk
while ((chunk = stream.read(length - bytesRead)) !== null) {
data.push(chunk)
bytesRead += chunk.length
}
resolve(Buffer.concat(data))
})
stream.on("end", () => {
reject(new Error("Insufficient data in the stream."))
})
stream.on("error", error => {
reject(error)
})
})
}

View File

@ -140,9 +140,13 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
* Gets the role object, this is mainly useful for two purposes, to check if the level exists and * Gets the role object, this is mainly useful for two purposes, to check if the level exists and
* to check if the role inherits any others. * to check if the role inherits any others.
* @param {string|null} roleId The level ID to lookup. * @param {string|null} roleId The level ID to lookup.
* @param {object|null} opts options for the function, like whether to halt errors, instead return public.
* @returns {Promise<Role|object|null>} The role object, which may contain an "inherits" property. * @returns {Promise<Role|object|null>} The role object, which may contain an "inherits" property.
*/ */
export async function getRole(roleId?: string): Promise<RoleDoc | undefined> { export async function getRole(
roleId?: string,
opts?: { defaultPublic?: boolean }
): Promise<RoleDoc | undefined> {
if (!roleId) { if (!roleId) {
return undefined return undefined
} }
@ -161,6 +165,9 @@ export async function getRole(roleId?: string): Promise<RoleDoc | undefined> {
// finalise the ID // finalise the ID
role._id = getExternalRoleID(role._id) role._id = getExternalRoleID(role._id)
} catch (err) { } catch (err) {
if (!isBuiltin(roleId) && opts?.defaultPublic) {
return cloneDeep(BUILTIN_ROLES.PUBLIC)
}
// only throw an error if there is no role at all // only throw an error if there is no role at all
if (Object.keys(role).length === 0) { if (Object.keys(role).length === 0) {
throw err throw err

View File

@ -8,6 +8,8 @@
export let disabled = false export let disabled = false
export let error = null export let error = null
export let validate = null export let validate = null
export let indeterminate = false
export let compact = false
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
@ -21,11 +23,19 @@
} }
</script> </script>
<FancyField {error} {value} {validate} {disabled} clickable on:click={onChange}> <FancyField
{error}
{value}
{validate}
{disabled}
{compact}
clickable
on:click={onChange}
>
<span> <span>
<Checkbox {disabled} {value} /> <Checkbox {disabled} {value} {indeterminate} />
</span> </span>
<div class="text"> <div class="text" class:compact>
{#if text} {#if text}
{text} {text}
{/if} {/if}
@ -47,6 +57,10 @@
line-clamp: 2; line-clamp: 2;
-webkit-box-orient: vertical; -webkit-box-orient: vertical;
} }
.text.compact {
font-size: 13px;
line-height: 15px;
}
.text > :global(*) { .text > :global(*) {
font-size: inherit !important; font-size: inherit !important;
} }

View File

@ -0,0 +1,68 @@
<script>
import FancyCheckbox from "./FancyCheckbox.svelte"
import FancyForm from "./FancyForm.svelte"
import { createEventDispatcher } from "svelte"
export let options = []
export let selected = []
export let showSelectAll = true
export let selectAllText = "Select all"
let selectedBooleans = reset()
const dispatch = createEventDispatcher()
$: updateSelected(selectedBooleans)
$: dispatch("change", selected)
$: allSelected = selected?.length === options.length
$: noneSelected = !selected?.length
function reset() {
return Array(options.length).fill(true)
}
function updateSelected(selectedArr) {
const array = []
for (let [i, isSelected] of Object.entries(selectedArr)) {
if (isSelected) {
array.push(options[i])
}
}
selected = array
}
function toggleSelectAll() {
if (allSelected === true) {
selectedBooleans = []
} else {
selectedBooleans = reset()
}
}
</script>
{#if options && Array.isArray(options)}
<div class="checkbox-group" class:has-select-all={showSelectAll}>
<FancyForm on:change>
{#if showSelectAll}
<FancyCheckbox
bind:value={allSelected}
on:change={toggleSelectAll}
text={selectAllText}
indeterminate={!allSelected && !noneSelected}
compact
/>
{/if}
{#each options as option, i}
<FancyCheckbox bind:value={selectedBooleans[i]} text={option} compact />
{/each}
</FancyForm>
</div>
{/if}
<style>
.checkbox-group.has-select-all :global(.fancy-field:first-of-type) {
background: var(--spectrum-global-color-gray-100);
}
.checkbox-group.has-select-all :global(.fancy-field:first-of-type:hover) {
background: var(--spectrum-global-color-gray-200);
}
</style>

View File

@ -11,6 +11,7 @@
export let value export let value
export let ref export let ref
export let autoHeight export let autoHeight
export let compact = false
const formContext = getContext("fancy-form") const formContext = getContext("fancy-form")
const id = Math.random() const id = Math.random()
@ -42,6 +43,7 @@
class:disabled class:disabled
class:focused class:focused
class:clickable class:clickable
class:compact
class:auto-height={autoHeight} class:auto-height={autoHeight}
> >
<div class="content" on:click> <div class="content" on:click>
@ -61,7 +63,6 @@
<style> <style>
.fancy-field { .fancy-field {
max-width: 400px;
background: var(--spectrum-global-color-gray-75); background: var(--spectrum-global-color-gray-75);
border: 1px solid var(--spectrum-global-color-gray-300); border: 1px solid var(--spectrum-global-color-gray-300);
border-radius: 4px; border-radius: 4px;
@ -69,6 +70,12 @@
transition: border-color 130ms ease-out, background 130ms ease-out, transition: border-color 130ms ease-out, background 130ms ease-out,
background 130ms ease-out; background 130ms ease-out;
color: var(--spectrum-global-color-gray-800); color: var(--spectrum-global-color-gray-800);
--padding: 16px;
--height: 64px;
}
.fancy-field.compact {
--padding: 8px;
--height: 36px;
} }
.fancy-field:hover { .fancy-field:hover {
border-color: var(--spectrum-global-color-gray-400); border-color: var(--spectrum-global-color-gray-400);
@ -91,8 +98,8 @@
} }
.content { .content {
position: relative; position: relative;
height: 64px; height: var(--height);
padding: 0 16px; padding: 0 var(--padding);
} }
.fancy-field.auto-height .content { .fancy-field.auto-height .content {
height: auto; height: auto;
@ -103,7 +110,7 @@
flex-direction: row; flex-direction: row;
justify-content: flex-start; justify-content: flex-start;
align-items: center; align-items: center;
gap: 16px; gap: var(--padding);
} }
.field { .field {
flex: 1 1 auto; flex: 1 1 auto;

View File

@ -4,4 +4,5 @@ export { default as FancySelect } from "./FancySelect.svelte"
export { default as FancyButton } from "./FancyButton.svelte" export { default as FancyButton } from "./FancyButton.svelte"
export { default as FancyForm } from "./FancyForm.svelte" export { default as FancyForm } from "./FancyForm.svelte"
export { default as FancyButtonRadio } from "./FancyButtonRadio.svelte" export { default as FancyButtonRadio } from "./FancyButtonRadio.svelte"
export { default as FancyCheckboxGroup } from "./FancyCheckboxGroup.svelte"
export { default as ErrorMessage } from "./ErrorMessage.svelte" export { default as ErrorMessage } from "./ErrorMessage.svelte"

View File

@ -9,6 +9,7 @@
export let text = null export let text = null
export let disabled = false export let disabled = false
export let size export let size
export let indeterminate = false
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const onChange = event => { const onChange = event => {
@ -22,6 +23,7 @@
class="spectrum-Checkbox spectrum-Checkbox--emphasized {sizeClass}" class="spectrum-Checkbox spectrum-Checkbox--emphasized {sizeClass}"
class:is-invalid={!!error} class:is-invalid={!!error}
class:checked={value} class:checked={value}
class:is-indeterminate={indeterminate}
> >
<input <input
checked={value} checked={value}

View File

@ -8,6 +8,7 @@
export let fixed = false export let fixed = false
export let inline = false export let inline = false
export let disableCancel = false
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
let visible = fixed || inline let visible = fixed || inline
@ -38,7 +39,7 @@
} }
export function cancel() { export function cancel() {
if (!visible) { if (!visible || disableCancel) {
return return
} }
dispatch("cancel") dispatch("cancel")

View File

@ -204,6 +204,12 @@
}) })
return columns return columns
.sort((a, b) => { .sort((a, b) => {
if (a.divider) {
return a
}
if (b.divider) {
return b
}
const orderA = a.order || Number.MAX_SAFE_INTEGER const orderA = a.order || Number.MAX_SAFE_INTEGER
const orderB = b.order || Number.MAX_SAFE_INTEGER const orderB = b.order || Number.MAX_SAFE_INTEGER
const nameA = getDisplayName(a) const nameA = getDisplayName(a)

View File

@ -23,10 +23,11 @@ function prepareData(config) {
return datasource return datasource
} }
export async function saveDatasource(config, skipFetch = false) { export async function saveDatasource(config, { skipFetch, tablesFilter } = {}) {
const datasource = prepareData(config) const datasource = prepareData(config)
// Create datasource // Create datasource
const resp = await datasources.save(datasource, !skipFetch && datasource.plus) const fetchSchema = !skipFetch && datasource.plus
const resp = await datasources.save(datasource, { fetchSchema, tablesFilter })
// update the tables incase datasource plus // update the tables incase datasource plus
await tables.fetch() await tables.fetch()
@ -41,6 +42,13 @@ export async function createRestDatasource(integration) {
export async function validateDatasourceConfig(config) { export async function validateDatasourceConfig(config) {
const datasource = prepareData(config) const datasource = prepareData(config)
const resp = await API.validateDatasource(datasource) return await API.validateDatasource(datasource)
return resp }
export async function getDatasourceInfo(config) {
let datasource = config
if (!config._id) {
datasource = prepareData(config)
}
return await API.fetchInfoForDatasource(datasource)
} }

View File

@ -74,6 +74,7 @@ const INITIAL_FRONTEND_STATE = {
propertyFocus: null, propertyFocus: null,
builderSidePanel: false, builderSidePanel: false,
hasLock: true, hasLock: true,
showPreview: false,
// URL params // URL params
selectedScreenId: null, selectedScreenId: null,

View File

@ -12,7 +12,7 @@
import { automationStore, selectedAutomation } from "builderStore" import { automationStore, selectedAutomation } from "builderStore"
import { admin, licensing } from "stores/portal" import { admin, licensing } from "stores/portal"
import { externalActions } from "./ExternalActions" import { externalActions } from "./ExternalActions"
import { TriggerStepID } from "constants/backend/automations" import { TriggerStepID, ActionStepID } from "constants/backend/automations"
import { checkForCollectStep } from "builderStore/utils" import { checkForCollectStep } from "builderStore/utils"
export let blockIdx export let blockIdx
@ -149,7 +149,7 @@
<div class="item-body"> <div class="item-body">
<Icon name={action.icon} /> <Icon name={action.icon} />
<Body size="XS">{action.name}</Body> <Body size="XS">{action.name}</Body>
{#if isDisabled && !syncAutomationsEnabled} {#if isDisabled && !syncAutomationsEnabled && action.stepId === ActionStepID.COLLECT}
<div class="tag-color"> <div class="tag-color">
<Tags> <Tags>
<Tag icon="LockClosed">Business</Tag> <Tag icon="LockClosed">Business</Tag>

View File

@ -76,6 +76,10 @@ export function getBindings({
// will be replaced by the main array binding // will be replaced by the main array binding
readableBinding: label, readableBinding: label,
runtimeBinding: binding, runtimeBinding: binding,
display: {
name: label,
type: field.name === FIELDS.LINK.name ? "Array" : field.name,
},
}) })
} }
return bindings return bindings

View File

@ -8,7 +8,7 @@
notifications, notifications,
Modal, Modal,
Table, Table,
Toggle, FancyCheckboxGroup,
} from "@budibase/bbui" } from "@budibase/bbui"
import { datasources, integrations, tables } from "stores/backend" import { datasources, integrations, tables } from "stores/backend"
import CreateEditRelationship from "components/backend/Datasources/CreateEditRelationship.svelte" import CreateEditRelationship from "components/backend/Datasources/CreateEditRelationship.svelte"
@ -16,7 +16,7 @@
import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte" import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import ValuesList from "components/common/ValuesList.svelte" import { getDatasourceInfo } from "builderStore/datasource"
export let datasource export let datasource
export let save export let save
@ -34,7 +34,7 @@
let selectedFromRelationship, selectedToRelationship let selectedFromRelationship, selectedToRelationship
let confirmDialog let confirmDialog
let specificTables = null let specificTables = null
let requireSpecificTables = false let tableList
$: integration = datasource && $integrations[datasource.source] $: integration = datasource && $integrations[datasource.source]
$: plusTables = datasource?.plus $: plusTables = datasource?.plus
@ -153,30 +153,28 @@
warning={false} warning={false}
title="Confirm table fetch" title="Confirm table fetch"
> >
<Toggle
bind:value={requireSpecificTables}
on:change={e => {
requireSpecificTables = e.detail
specificTables = null
}}
thin
text="Fetch listed tables only (one per line)"
/>
{#if requireSpecificTables}
<ValuesList label="" bind:values={specificTables} />
{/if}
<br />
<Body> <Body>
If you have fetched tables from this database before, this action may If you have fetched tables from this database before, this action may
overwrite any changes you made after your initial fetch. overwrite any changes you made after your initial fetch.
</Body> </Body>
<br />
<div class="table-checkboxes">
<FancyCheckboxGroup options={tableList} bind:selected={specificTables} />
</div>
</ConfirmDialog> </ConfirmDialog>
<Divider /> <Divider />
<div class="query-header"> <div class="query-header">
<Heading size="S">Tables</Heading> <Heading size="S">Tables</Heading>
<div class="table-buttons"> <div class="table-buttons">
<Button secondary on:click={() => confirmDialog.show()}> <Button
secondary
on:click={async () => {
const info = await getDatasourceInfo(datasource)
tableList = info.tableNames
confirmDialog.show()
}}
>
Fetch tables Fetch tables
</Button> </Button>
<Button cta icon="Add" on:click={createNewTable}>New table</Button> <Button cta icon="Add" on:click={createNewTable}>New table</Button>
@ -246,4 +244,8 @@
display: flex; display: flex;
gap: var(--spacing-m); gap: var(--spacing-m);
} }
.table-checkboxes {
width: 100%;
}
</style> </style>

View File

@ -44,6 +44,9 @@ export default ICONS
export function getIcon(integrationType, schema) { export function getIcon(integrationType, schema) {
const integrationList = get(integrations) const integrationList = get(integrations)
if (!integrationList) {
return
}
if (integrationList[integrationType]?.iconUrl) { if (integrationList[integrationType]?.iconUrl) {
return { url: integrationList[integrationType].iconUrl } return { url: integrationList[integrationType].iconUrl }
} else if (schema?.custom || !ICONS[integrationType]) { } else if (schema?.custom || !ICONS[integrationType]) {

View File

@ -1,12 +1,19 @@
<script> <script>
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { ModalContent, notifications, Body, Layout } from "@budibase/bbui" import {
ModalContent,
notifications,
Body,
Layout,
FancyCheckboxGroup,
} from "@budibase/bbui"
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte" import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
import { IntegrationNames } from "constants/backend" import { IntegrationNames } from "constants/backend"
import cloneDeep from "lodash/cloneDeepWith" import cloneDeep from "lodash/cloneDeepWith"
import { import {
saveDatasource as save, saveDatasource as save,
validateDatasourceConfig, validateDatasourceConfig,
getDatasourceInfo,
} from "builderStore/datasource" } from "builderStore/datasource"
import { DatasourceFeature } from "@budibase/types" import { DatasourceFeature } from "@budibase/types"
@ -15,11 +22,24 @@
// kill the reference so the input isn't saved // kill the reference so the input isn't saved
let datasource = cloneDeep(integration) let datasource = cloneDeep(integration)
let isValid = false let isValid = false
let fetchTableStep = false
let selectedTables = []
let tableList = []
$: name = $: name =
IntegrationNames[datasource.type] || datasource.name || datasource.type IntegrationNames[datasource?.type] || datasource?.name || datasource?.type
$: datasourcePlus = datasource?.plus
$: title = fetchTableStep ? "Fetch your tables" : `Connect to ${name}`
$: confirmText = fetchTableStep
? "Continue"
: datasourcePlus
? "Connect"
: "Save and continue to query"
async function validateConfig() { async function validateConfig() {
if (!integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
return true
}
const displayError = message => const displayError = message =>
notifications.error(message ?? "Error validating datasource") notifications.error(message ?? "Error validating datasource")
@ -37,7 +57,7 @@
} }
async function saveDatasource() { async function saveDatasource() {
if (integration.features[DatasourceFeature.CONNECTION_CHECKING]) { if (integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
const valid = await validateConfig() const valid = await validateConfig()
if (!valid) { if (!valid) {
return false return false
@ -47,35 +67,75 @@
if (!datasource.name) { if (!datasource.name) {
datasource.name = name datasource.name = name
} }
const resp = await save(datasource) const opts = {}
if (datasourcePlus && selectedTables) {
opts.tablesFilter = selectedTables
}
const resp = await save(datasource, opts)
$goto(`./datasource/${resp._id}`) $goto(`./datasource/${resp._id}`)
notifications.success(`Datasource created successfully.`) notifications.success("Datasource created successfully.")
} catch (err) { } catch (err) {
notifications.error(err?.message ?? "Error saving datasource") notifications.error(err?.message ?? "Error saving datasource")
// prevent the modal from closing // prevent the modal from closing
return false return false
} }
} }
async function nextStep() {
let connected = true
if (datasourcePlus) {
connected = await validateConfig()
}
if (!connected) {
return false
}
if (datasourcePlus && !fetchTableStep) {
notifications.success("Connected to datasource successfully.")
const info = await getDatasourceInfo(datasource)
tableList = info.tableNames
fetchTableStep = true
return false
} else {
await saveDatasource()
return true
}
}
</script> </script>
<ModalContent <ModalContent
title={`Connect to ${name}`} {title}
onConfirm={() => saveDatasource()} onConfirm={() => nextStep()}
confirmText={datasource.plus ? "Connect" : "Save and continue to query"} {confirmText}
cancelText="Back" cancelText={fetchTableStep ? "Cancel" : "Back"}
showSecondaryButton={datasource.plus} showSecondaryButton={datasourcePlus}
size="L" size="L"
disabled={!isValid} disabled={!isValid}
> >
<Layout noPadding> <Layout noPadding>
<Body size="XS" <Body size="XS">
>Connect your database to Budibase using the config below. {#if !fetchTableStep}
Connect your database to Budibase using the config below
{:else}
Choose what tables you want to sync with Budibase
{/if}
</Body> </Body>
</Layout> </Layout>
<IntegrationConfigForm {#if !fetchTableStep}
schema={datasource.schema} <IntegrationConfigForm
bind:datasource schema={datasource?.schema}
creating={true} bind:datasource
on:valid={e => (isValid = e.detail)} creating={true}
/> on:valid={e => (isValid = e.detail)}
/>
{:else}
<div class="table-checkboxes">
<FancyCheckboxGroup options={tableList} bind:selected={selectedTables} />
</div>
{/if}
</ModalContent> </ModalContent>
<style>
.table-checkboxes {
width: 100%;
}
</style>

View File

@ -1,22 +1,27 @@
<script> <script>
import { import {
ModalContent,
Body, Body,
FancyCheckboxGroup,
InlineAlert,
Layout, Layout,
Link, Link,
ModalContent,
notifications, notifications,
} from "@budibase/bbui" } from "@budibase/bbui"
import { IntegrationNames, IntegrationTypes } from "constants/backend" import { IntegrationNames, IntegrationTypes } from "constants/backend"
import GoogleButton from "../_components/GoogleButton.svelte" import GoogleButton from "../_components/GoogleButton.svelte"
import { organisation } from "stores/portal" import { organisation } from "stores/portal"
import { onMount } from "svelte" import { onDestroy, onMount } from "svelte"
import { validateDatasourceConfig } from "builderStore/datasource" import {
getDatasourceInfo,
saveDatasource,
validateDatasourceConfig,
} from "builderStore/datasource"
import cloneDeep from "lodash/cloneDeepWith" import cloneDeep from "lodash/cloneDeepWith"
import IntegrationConfigForm from "../TableIntegrationMenu/IntegrationConfigForm.svelte" import IntegrationConfigForm from "../TableIntegrationMenu/IntegrationConfigForm.svelte"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { saveDatasource } from "builderStore/datasource"
import { DatasourceFeature } from "@budibase/types" import { DatasourceFeature } from "@budibase/types"
import { API } from "api"
export let integration export let integration
export let continueSetupId = false export let continueSetupId = false
@ -24,16 +29,20 @@
let datasource = cloneDeep(integration) let datasource = cloneDeep(integration)
datasource.config.continueSetupId = continueSetupId datasource.config.continueSetupId = continueSetupId
let { schema } = datasource
$: isGoogleConfigured = !!$organisation.googleDatasourceConfigured $: isGoogleConfigured = !!$organisation.googleDatasourceConfigured
onMount(async () => { onMount(async () => {
await organisation.init() await organisation.init()
}) })
const integrationName = IntegrationNames[IntegrationTypes.GOOGLE_SHEETS] const integrationName = IntegrationNames[IntegrationTypes.GOOGLE_SHEETS]
export const GoogleDatasouceConfigStep = { export const GoogleDatasouceConfigStep = {
AUTH: "Auth", AUTH: "auth",
SET_URL: "Set_url", SET_URL: "set_url",
SET_SHEETS: "set_sheets",
} }
let step = continueSetupId let step = continueSetupId
@ -42,12 +51,21 @@
let isValid = false let isValid = false
const modalConfig = { let allSheets
[GoogleDatasouceConfigStep.AUTH]: {}, let selectedSheets
let setSheetsErrorTitle, setSheetsErrorMessage
$: modalConfig = {
[GoogleDatasouceConfigStep.AUTH]: {
title: `Connect to ${integrationName}`,
},
[GoogleDatasouceConfigStep.SET_URL]: { [GoogleDatasouceConfigStep.SET_URL]: {
title: `Connect your spreadsheet`,
confirmButtonText: "Connect", confirmButtonText: "Connect",
onConfirm: async () => { onConfirm: async () => {
if (integration.features[DatasourceFeature.CONNECTION_CHECKING]) { const checkConnection =
integration.features[DatasourceFeature.CONNECTION_CHECKING]
if (checkConnection) {
const resp = await validateDatasourceConfig(datasource) const resp = await validateDatasourceConfig(datasource)
if (!resp.connected) { if (!resp.connected) {
notifications.error(`Unable to connect - ${resp.error}`) notifications.error(`Unable to connect - ${resp.error}`)
@ -56,21 +74,81 @@
} }
try { try {
const resp = await saveDatasource(datasource) datasource = await saveDatasource(datasource, {
$goto(`./datasource/${resp._id}`) tablesFilter: selectedSheets,
notifications.success(`Datasource created successfully.`) skipFetch: true,
})
} catch (err) { } catch (err) {
notifications.error(err?.message ?? "Error saving datasource") notifications.error(err?.message ?? "Error saving datasource")
// prevent the modal from closing // prevent the modal from closing
return false return false
} }
if (!integration.features[DatasourceFeature.FETCH_TABLE_NAMES]) {
notifications.success(`Datasource created successfully.`)
return
}
const info = await getDatasourceInfo(datasource)
allSheets = info.tableNames
step = GoogleDatasouceConfigStep.SET_SHEETS
notifications.success(
checkConnection
? "Connection Successful"
: `Datasource created successfully.`
)
// prevent the modal from closing
return false
},
},
[GoogleDatasouceConfigStep.SET_SHEETS]: {
title: `Choose your sheets`,
confirmButtonText: selectedSheets?.length
? "Fetch sheets"
: "Continue without fetching",
onConfirm: async () => {
try {
if (selectedSheets.length) {
await API.buildDatasourceSchema({
datasourceId: datasource._id,
tablesFilter: selectedSheets,
})
}
return
} catch (err) {
const message = err?.message ?? "Error fetching the sheets"
// Handling message with format: Error title - error description
const indexSeparator = message.indexOf(" - ")
if (indexSeparator >= 0) {
setSheetsErrorTitle = message.substr(0, indexSeparator)
setSheetsErrorMessage =
message[indexSeparator + 3].toUpperCase() +
message.substr(indexSeparator + 4)
} else {
setSheetsErrorTitle = null
setSheetsErrorMessage = message
}
// prevent the modal from closing
return false
}
}, },
}, },
} }
// This will handle the user closing the modal pressing outside the modal
onDestroy(() => {
if (step === GoogleDatasouceConfigStep.SET_SHEETS) {
$goto(`./datasource/${datasource._id}`)
}
})
</script> </script>
<ModalContent <ModalContent
title={`Connect to ${integrationName}`} title={modalConfig[step].title}
cancelText="Cancel" cancelText="Cancel"
size="L" size="L"
confirmText={modalConfig[step].confirmButtonText} confirmText={modalConfig[step].confirmButtonText}
@ -100,11 +178,30 @@
<Body size="S">Add the URL of the sheet you want to connect.</Body> <Body size="S">Add the URL of the sheet you want to connect.</Body>
<IntegrationConfigForm <IntegrationConfigForm
schema={datasource.schema} {schema}
bind:datasource bind:datasource
creating={true} creating={true}
on:valid={e => (isValid = e.detail)} on:valid={e => (isValid = e.detail)}
/> />
</Layout> </Layout>
{/if} {/if}
{#if step === GoogleDatasouceConfigStep.SET_SHEETS}
<Layout noPadding no>
<Body size="S">Select which spreadsheets you want to connect.</Body>
<FancyCheckboxGroup
options={allSheets}
bind:selected={selectedSheets}
selectAllText="Select all sheets"
/>
{#if setSheetsErrorTitle || setSheetsErrorMessage}
<InlineAlert
type="error"
header={setSheetsErrorTitle}
message={setSheetsErrorMessage}
/>
{/if}
</Layout>
{/if}
</ModalContent> </ModalContent>

View File

@ -69,7 +69,7 @@
name: "App", name: "App",
description: "", description: "",
icon: "Play", icon: "Play",
action: () => window.open(`/${$store.appId}`), action: () => store.update(state => ({ ...state, showPreview: true })),
}, },
{ {
type: "Preview", type: "Preview",

View File

@ -19,7 +19,7 @@
readableToRuntimeBinding, readableToRuntimeBinding,
runtimeToReadableBinding, runtimeToReadableBinding,
} from "builderStore/dataBinding" } from "builderStore/dataBinding"
import { store } from "builderStore"
import { convertToJS } from "@budibase/string-templates" import { convertToJS } from "@budibase/string-templates"
import { admin } from "stores/portal" import { admin } from "stores/portal"
import CodeEditor from "../CodeEditor/CodeEditor.svelte" import CodeEditor from "../CodeEditor/CodeEditor.svelte"
@ -339,25 +339,28 @@
</Tab> </Tab>
{/if} {/if}
<div class="drawer-actions"> <div class="drawer-actions">
<Button {#if drawerActions?.hide}
secondary <Button
quiet secondary
on:click={() => { quiet
store.actions.settings.propertyFocus(null) on:click={() => {
drawerActions.hide() drawerActions.hide()
}} }}
> >
Cancel Cancel
</Button> </Button>
<Button {/if}
cta {#if bindingDrawerActions?.save}
disabled={!valid} <Button
on:click={() => { cta
bindingDrawerActions.save() disabled={!valid}
}} on:click={() => {
> bindingDrawerActions.save()
Save }}
</Button> >
Save
</Button>
{/if}
</div> </div>
</Tabs> </Tabs>
</div> </div>

View File

@ -36,7 +36,7 @@
.map(([name, categoryBindings]) => ({ .map(([name, categoryBindings]) => ({
name, name,
bindings: categoryBindings?.filter(binding => { bindings: categoryBindings?.filter(binding => {
return binding.readableBinding.match(searchRgx) return !search || binding.readableBinding.match(searchRgx)
}), }),
})) }))
.filter(category => { .filter(category => {
@ -46,7 +46,11 @@
) )
}) })
$: filteredHelpers = helpers?.filter(helper => { $: filteredHelpers = helpers?.filter(helper => {
return helper.label.match(searchRgx) || helper.description.match(searchRgx) return (
!search ||
helper.label.match(searchRgx) ||
helper.description.match(searchRgx)
)
}) })
const getHelperExample = (helper, js) => { const getHelperExample = (helper, js) => {
@ -124,9 +128,6 @@
<span <span
class="search-input-icon" class="search-input-icon"
on:click={() => { on:click={() => {
if (!search) {
return
}
search = null search = null
}} }}
class:searching={search} class:searching={search}

View File

@ -76,7 +76,7 @@
{/if} {/if}
</div> </div>
<Drawer bind:this={bindingDrawer} {title}> <Drawer bind:this={bindingDrawer} {title} headless>
<svelte:fragment slot="description"> <svelte:fragment slot="description">
Add the objects on the left to enrich your text. Add the objects on the left to enrich your text.
</svelte:fragment> </svelte:fragment>

View File

@ -5,8 +5,6 @@
runtimeToReadableBinding, runtimeToReadableBinding,
} from "builderStore/dataBinding" } from "builderStore/dataBinding"
import { store } from "builderStore"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte" import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
import { createEventDispatcher, setContext } from "svelte" import { createEventDispatcher, setContext } from "svelte"
import { isJSBinding } from "@budibase/string-templates" import { isJSBinding } from "@budibase/string-templates"
@ -36,7 +34,6 @@
const saveBinding = () => { const saveBinding = () => {
onChange(tempValue) onChange(tempValue)
store.actions.settings.propertyFocus(null)
onBlur() onBlur()
bindingDrawer.hide() bindingDrawer.hide()
} }
@ -70,7 +67,6 @@
<div <div
class="icon" class="icon"
on:click={() => { on:click={() => {
store.actions.settings.propertyFocus(key)
bindingDrawer.show() bindingDrawer.show()
}} }}
> >

View File

@ -97,7 +97,10 @@
} }
const previewApp = () => { const previewApp = () => {
window.open(`/${application}`) store.update(state => ({
...state,
showPreview: true,
}))
} }
const viewApp = () => { const viewApp = () => {

View File

@ -73,10 +73,6 @@
if (highlighted) { if (highlighted) {
store.actions.settings.highlight(null) store.actions.settings.highlight(null)
} }
// To fix focus 'affect' when property is target of a drawer other actions in the builder.
if (propertyFocus) {
store.actions.settings.propertyFocus(null)
}
}) })
</script> </script>

View File

@ -186,7 +186,6 @@
} }
div :global(.CodeMirror) { div :global(.CodeMirror) {
width: var(--code-mirror-width) !important;
height: var(--code-mirror-height) !important; height: var(--code-mirror-height) !important;
border-radius: var(--border-radius-s); border-radius: var(--border-radius-s);
font-family: var(--font-mono); font-family: var(--font-mono);

View File

@ -1,5 +1,11 @@
<script> <script>
import { ModalContent, Toggle, Body, InlineAlert } from "@budibase/bbui" import {
ModalContent,
Toggle,
Body,
InlineAlert,
notifications,
} from "@budibase/bbui"
export let app export let app
export let published export let published
@ -8,10 +14,45 @@
$: title = published ? "Export published app" : "Export latest app" $: title = published ? "Export published app" : "Export latest app"
$: confirmText = published ? "Export published" : "Export latest" $: confirmText = published ? "Export published" : "Export latest"
const exportApp = () => { const exportApp = async () => {
const id = published ? app.prodId : app.devId const id = published ? app.prodId : app.devId
const appName = encodeURIComponent(app.name) const url = `/api/backups/export?appId=${id}`
window.location = `/api/backups/export?appId=${id}&appname=${appName}&excludeRows=${excludeRows}` await downloadFile(url, { excludeRows })
}
async function downloadFile(url, body) {
try {
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(body),
})
if (response.ok) {
const contentDisposition = response.headers.get("Content-Disposition")
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(
contentDisposition
)
const filename = matches[1].replace(/['"]/g, "")
const url = URL.createObjectURL(await response.blob())
const link = document.createElement("a")
link.href = url
link.download = filename
link.click()
URL.revokeObjectURL(url)
} else {
notifications.error("Error exporting the app.")
}
} catch (error) {
notifications.error(error.message || "Error downloading the exported app")
}
} }
</script> </script>

View File

@ -0,0 +1,91 @@
<script>
import { onMount } from "svelte"
import { fade, fly } from "svelte/transition"
import { store, selectedScreen } from "builderStore"
import { ProgressCircle } from "@budibase/bbui"
$: route = $selectedScreen?.routing.route || "/"
$: src = `/${$store.appId}#${route}`
const close = () => {
store.update(state => ({
...state,
showPreview: false,
}))
}
onMount(() => {
window.closePreview = () => {
store.update(state => ({
...state,
showPreview: false,
}))
}
})
</script>
<div
class="preview-overlay"
transition:fade={{ duration: 260 }}
on:click|self={close}
>
<div
class="container spectrum {$store.theme}"
transition:fly={{ duration: 260, y: 130 }}
>
<div class="header placeholder" />
<div class="loading placeholder">
<ProgressCircle />
</div>
<iframe title="Budibase App Preview" {src} />
</div>
</div>
<style>
.preview-overlay {
top: 0;
right: 0;
left: 0;
bottom: 0;
z-index: 999;
position: absolute;
background: rgba(255, 255, 255, 0.1);
display: flex;
align-items: stretch;
padding: 48px;
}
.container {
flex: 1 1 auto;
background: var(--spectrum-global-color-gray-75);
border-radius: 4px;
display: flex;
flex-direction: column;
overflow: hidden;
position: relative;
box-shadow: 0 0 80px 0 rgba(0, 0, 0, 0.5);
}
iframe {
position: absolute;
height: 100%;
width: 100%;
border: none;
outline: none;
z-index: 1;
}
.header {
height: 60px;
width: 100%;
background: black;
top: 0;
position: absolute;
}
.loading {
position: absolute;
top: 50%;
left: 50%;
transform: translateY(-50%) translateX(-50%);
}
.placeholder {
z-index: 0;
}
</style>

View File

@ -25,6 +25,7 @@
import BuilderSidePanel from "./_components/BuilderSidePanel.svelte" import BuilderSidePanel from "./_components/BuilderSidePanel.svelte"
import UserAvatars from "./_components/UserAvatars.svelte" import UserAvatars from "./_components/UserAvatars.svelte"
import { TOUR_KEYS, TOURS } from "components/portal/onboarding/tours.js" import { TOUR_KEYS, TOURS } from "components/portal/onboarding/tours.js"
import PreviewOverlay from "./_components/PreviewOverlay.svelte"
export let application export let application
@ -141,7 +142,7 @@
<BuilderSidePanel /> <BuilderSidePanel />
{/if} {/if}
<div class="root"> <div class="root" class:blur={$store.showPreview}>
<div class="top-nav"> <div class="top-nav">
{#if $store.initialised} {#if $store.initialised}
<div class="topleftnav"> <div class="topleftnav">
@ -197,6 +198,10 @@
{/await} {/await}
</div> </div>
{#if $store.showPreview}
<PreviewOverlay />
{/if}
<svelte:window on:keydown={handleKeyDown} /> <svelte:window on:keydown={handleKeyDown} />
<Modal bind:this={commandPaletteModal}> <Modal bind:this={commandPaletteModal}>
<CommandPalette /> <CommandPalette />
@ -222,6 +227,10 @@
width: 100%; width: 100%;
display: flex; display: flex;
flex-direction: column; flex-direction: column;
transition: filter 260ms ease-out;
}
.root.blur {
filter: blur(8px);
} }
.top-nav { .top-nav {

View File

@ -65,7 +65,7 @@
} }
const saveDatasource = async () => { const saveDatasource = async () => {
if (integration.features[DatasourceFeature.CONNECTION_CHECKING]) { if (integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
const valid = await validateConfig() const valid = await validateConfig()
if (!valid) { if (!valid) {
return false return false

View File

@ -373,7 +373,7 @@
<OnboardingTypeModal {chooseCreationType} /> <OnboardingTypeModal {chooseCreationType} />
</Modal> </Modal>
<Modal bind:this={passwordModal}> <Modal bind:this={passwordModal} disableCancel={true}>
<PasswordModal <PasswordModal
createUsersResponse={bulkSaveResponse} createUsersResponse={bulkSaveResponse}
userData={userData.users} userData={userData.users}

View File

@ -57,7 +57,10 @@ export function createDatasourcesStore() {
return updateDatasource(response) return updateDatasource(response)
} }
const save = async (body, fetchSchema = false) => { const save = async (body, { fetchSchema, tablesFilter } = {}) => {
if (fetchSchema == null) {
fetchSchema = false
}
let response let response
if (body._id) { if (body._id) {
response = await API.updateDatasource(body) response = await API.updateDatasource(body)
@ -65,6 +68,7 @@ export function createDatasourcesStore() {
response = await API.createDatasource({ response = await API.createDatasource({
datasource: body, datasource: body,
fetchSchema, fetchSchema,
tablesFilter,
}) })
} }
return updateDatasource(response) return updateDatasource(response)

View File

@ -49,7 +49,7 @@
"pouchdb": "7.3.0", "pouchdb": "7.3.0",
"pouchdb-replication-stream": "1.2.9", "pouchdb-replication-stream": "1.2.9",
"randomstring": "1.1.5", "randomstring": "1.1.5",
"tar": "6.1.11", "tar": "6.1.15",
"yaml": "^2.1.1" "yaml": "^2.1.1"
}, },
"devDependencies": { "devDependencies": {

View File

@ -1,7 +1,6 @@
import { createAPIClient } from "@budibase/frontend-core" import { createAPIClient } from "@budibase/frontend-core"
import { notificationStore } from "../stores/notification.js"
import { authStore } from "../stores/auth.js" import { authStore } from "../stores/auth.js"
import { devToolsStore } from "../stores/devTools.js" import { notificationStore, devToolsEnabled, devToolsStore } from "../stores/"
import { get } from "svelte/store" import { get } from "svelte/store"
export const API = createAPIClient({ export const API = createAPIClient({
@ -25,9 +24,10 @@ export const API = createAPIClient({
} }
// Add role header // Add role header
const devToolsState = get(devToolsStore) const $devToolsStore = get(devToolsStore)
if (devToolsState.enabled && devToolsState.role) { const $devToolsEnabled = get(devToolsEnabled)
headers["x-budibase-role"] = devToolsState.role if ($devToolsEnabled && $devToolsStore.role) {
headers["x-budibase-role"] = $devToolsStore.role
} }
}, },

View File

@ -17,6 +17,7 @@
appStore, appStore,
devToolsStore, devToolsStore,
environmentStore, environmentStore,
devToolsEnabled,
} from "stores" } from "stores"
import NotificationDisplay from "components/overlay/NotificationDisplay.svelte" import NotificationDisplay from "components/overlay/NotificationDisplay.svelte"
import ConfirmationDisplay from "components/overlay/ConfirmationDisplay.svelte" import ConfirmationDisplay from "components/overlay/ConfirmationDisplay.svelte"
@ -47,10 +48,7 @@
let permissionError = false let permissionError = false
// Determine if we should show devtools or not // Determine if we should show devtools or not
$: showDevTools = $: showDevTools = $devToolsEnabled && !$routeStore.queryParams?.peek
!$builderStore.inBuilder &&
$devToolsStore.enabled &&
!$routeStore.queryParams?.peek
// Handle no matching route // Handle no matching route
$: { $: {
@ -107,6 +105,7 @@
lang="en" lang="en"
dir="ltr" dir="ltr"
class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}" class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}"
class:builder={$builderStore.inBuilder}
> >
<DeviceBindingsProvider> <DeviceBindingsProvider>
<UserBindingsProvider> <UserBindingsProvider>
@ -223,12 +222,14 @@
overflow: hidden; overflow: hidden;
height: 100%; height: 100%;
width: 100%; width: 100%;
background: transparent;
display: flex; display: flex;
flex-direction: row; flex-direction: row;
justify-content: center; justify-content: center;
align-items: center; align-items: center;
} }
#spectrum-root.builder {
background: transparent;
}
#clip-root { #clip-root {
max-width: 100%; max-width: 100%;

View File

@ -1,5 +1,5 @@
<script> <script>
import { Heading, Button, Select } from "@budibase/bbui" import { Heading, Select, ActionButton } from "@budibase/bbui"
import { devToolsStore } from "../../stores" import { devToolsStore } from "../../stores"
import { getContext } from "svelte" import { getContext } from "svelte"
@ -30,7 +30,7 @@
</script> </script>
<div class="dev-preview-header" class:mobile={$context.device.mobile}> <div class="dev-preview-header" class:mobile={$context.device.mobile}>
<Heading size="XS">Budibase App Preview</Heading> <Heading size="XS">Preview</Heading>
<Select <Select
quiet quiet
options={previewOptions} options={previewOptions}
@ -40,36 +40,57 @@
on:change={e => devToolsStore.actions.changeRole(e.detail)} on:change={e => devToolsStore.actions.changeRole(e.detail)}
/> />
{#if !$context.device.mobile} {#if !$context.device.mobile}
<Button <ActionButton
quiet quiet
overBackground
icon="Code" icon="Code"
on:click={() => devToolsStore.actions.setVisible(!$devToolsStore.visible)} on:click={() => devToolsStore.actions.setVisible(!$devToolsStore.visible)}
> >
{$devToolsStore.visible ? "Close" : "Open"} DevTools {$devToolsStore.visible ? "Close" : "Open"} DevTools
</Button> </ActionButton>
{/if} {/if}
<ActionButton
quiet
icon="Close"
on:click={() => window.parent.closePreview?.()}
>
Close preview
</ActionButton>
</div> </div>
<style> <style>
.dev-preview-header { .dev-preview-header {
flex: 0 0 50px; flex: 0 0 60px;
height: 50px;
display: grid; display: grid;
align-items: center; align-items: center;
background-color: var(--spectrum-global-color-blue-400); background-color: black;
padding: 0 var(--spacing-xl); padding: 0 var(--spacing-xl);
grid-template-columns: 1fr auto auto; grid-template-columns: 1fr auto auto auto;
grid-gap: var(--spacing-xl); grid-gap: var(--spacing-xl);
} }
.dev-preview-header.mobile { .dev-preview-header.mobile {
flex: 0 0 50px; grid-template-columns: 1fr auto auto;
grid-template-columns: 1fr auto;
} }
.dev-preview-header :global(.spectrum-Heading), .dev-preview-header :global(.spectrum-Heading),
.dev-preview-header :global(.spectrum-Picker-menuIcon), .dev-preview-header :global(.spectrum-Picker-menuIcon),
.dev-preview-header :global(.spectrum-Picker-label) { .dev-preview-header :global(.spectrum-Icon),
color: white !important; .dev-preview-header :global(.spectrum-Picker-label),
.dev-preview-header :global(.spectrum-ActionButton) {
font-weight: 600;
color: white;
}
.dev-preview-header :global(.spectrum-Picker) {
padding-left: 8px;
padding-right: 8px;
transition: background 130ms ease-out;
border-radius: 4px;
}
.dev-preview-header :global(.spectrum-ActionButton:hover),
.dev-preview-header :global(.spectrum-Picker:hover),
.dev-preview-header :global(.spectrum-Picker.is-open) {
background: rgba(255, 255, 255, 0.1);
}
.dev-preview-header :global(.spectrum-ActionButton:active) {
background: rgba(255, 255, 255, 0.2);
} }
@media print { @media print {
.dev-preview-header { .dev-preview-header {

View File

@ -2,7 +2,6 @@ import ClientApp from "./components/ClientApp.svelte"
import { import {
builderStore, builderStore,
appStore, appStore,
devToolsStore,
blockStore, blockStore,
componentStore, componentStore,
environmentStore, environmentStore,
@ -51,11 +50,6 @@ const loadBudibase = async () => {
await environmentStore.actions.fetchEnvironment() await environmentStore.actions.fetchEnvironment()
} }
// Enable dev tools or not. We need to be using a dev app and not inside
// the builder preview to enable them.
const enableDevTools = !get(builderStore).inBuilder && get(appStore).isDevApp
devToolsStore.actions.setEnabled(enableDevTools)
// Register handler for runtime events from the builder // Register handler for runtime events from the builder
window.handleBuilderRuntimeEvent = (type, data) => { window.handleBuilderRuntimeEvent = (type, data) => {
if (!window["##BUDIBASE_IN_BUILDER##"]) { if (!window["##BUDIBASE_IN_BUILDER##"]) {

View File

@ -2,13 +2,14 @@ import { derived } from "svelte/store"
import { Constants } from "@budibase/frontend-core" import { Constants } from "@budibase/frontend-core"
import { devToolsStore } from "../devTools.js" import { devToolsStore } from "../devTools.js"
import { authStore } from "../auth.js" import { authStore } from "../auth.js"
import { devToolsEnabled } from "./devToolsEnabled.js"
// Derive the current role of the logged-in user // Derive the current role of the logged-in user
export const currentRole = derived( export const currentRole = derived(
[devToolsStore, authStore], [devToolsEnabled, devToolsStore, authStore],
([$devToolsStore, $authStore]) => { ([$devToolsEnabled, $devToolsStore, $authStore]) => {
return ( return (
($devToolsStore.enabled && $devToolsStore.role) || ($devToolsEnabled && $devToolsStore.role) ||
$authStore?.roleId || $authStore?.roleId ||
Constants.Roles.PUBLIC Constants.Roles.PUBLIC
) )

View File

@ -0,0 +1,10 @@
import { derived } from "svelte/store"
import { appStore } from "../app.js"
import { builderStore } from "../builder.js"
export const devToolsEnabled = derived(
[appStore, builderStore],
([$appStore, $builderStore]) => {
return !$builderStore.inBuilder && $appStore.isDevApp
}
)

View File

@ -3,3 +3,4 @@
// separately we can keep our actual stores lean and performant. // separately we can keep our actual stores lean and performant.
export { currentRole } from "./currentRole.js" export { currentRole } from "./currentRole.js"
export { dndComponentPath } from "./dndComponentPath.js" export { dndComponentPath } from "./dndComponentPath.js"
export { devToolsEnabled } from "./devToolsEnabled.js"

View File

@ -4,7 +4,6 @@ import { authStore } from "./auth"
import { API } from "../api" import { API } from "../api"
const initialState = { const initialState = {
enabled: false,
visible: false, visible: false,
allowSelection: false, allowSelection: false,
role: null, role: null,
@ -13,13 +12,6 @@ const initialState = {
const createDevToolStore = () => { const createDevToolStore = () => {
const store = createLocalStorageStore("bb-devtools", initialState) const store = createLocalStorageStore("bb-devtools", initialState)
const setEnabled = enabled => {
store.update(state => ({
...state,
enabled,
}))
}
const setVisible = visible => { const setVisible = visible => {
store.update(state => ({ store.update(state => ({
...state, ...state,
@ -46,7 +38,7 @@ const createDevToolStore = () => {
return { return {
subscribe: store.subscribe, subscribe: store.subscribe,
actions: { setEnabled, setVisible, setAllowSelection, changeRole }, actions: { setVisible, setAllowSelection, changeRole },
} }
} }

View File

@ -26,13 +26,16 @@ export const buildDatasourceEndpoints = API => ({
* Creates a datasource * Creates a datasource
* @param datasource the datasource to create * @param datasource the datasource to create
* @param fetchSchema whether to fetch the schema or not * @param fetchSchema whether to fetch the schema or not
* @param tablesFilter a list of tables to actually fetch rather than simply
* all that are accessible.
*/ */
createDatasource: async ({ datasource, fetchSchema }) => { createDatasource: async ({ datasource, fetchSchema, tablesFilter }) => {
return await API.post({ return await API.post({
url: "/api/datasources", url: "/api/datasources",
body: { body: {
datasource, datasource,
fetchSchema, fetchSchema,
tablesFilter,
}, },
}) })
}, },
@ -69,4 +72,15 @@ export const buildDatasourceEndpoints = API => ({
body: { datasource }, body: { datasource },
}) })
}, },
/**
* Fetch table names available within the datasource, for filtering out undesired tables
* @param datasource the datasource configuration to use for fetching tables
*/
fetchInfoForDatasource: async datasource => {
return await API.post({
url: `/api/datasources/info`,
body: { datasource },
})
},
}) })

View File

@ -148,9 +148,9 @@
class:floating={offset > 0} class:floating={offset > 0}
style="--offset:{offset}px; --sticky-width:{width}px;" style="--offset:{offset}px; --sticky-width:{width}px;"
> >
<div class="underlay sticky" transition:fade={{ duration: 130 }} /> <div class="underlay sticky" transition:fade|local={{ duration: 130 }} />
<div class="underlay" transition:fade={{ duration: 130 }} /> <div class="underlay" transition:fade|local={{ duration: 130 }} />
<div class="sticky-column" transition:fade={{ duration: 130 }}> <div class="sticky-column" transition:fade|local={{ duration: 130 }}>
<GutterCell on:expand={addViaModal} rowHovered> <GutterCell on:expand={addViaModal} rowHovered>
<Icon name="Add" color="var(--spectrum-global-color-gray-500)" /> <Icon name="Add" color="var(--spectrum-global-color-gray-500)" />
{#if isAdding} {#if isAdding}
@ -179,7 +179,7 @@
</DataCell> </DataCell>
{/if} {/if}
</div> </div>
<div class="normal-columns" transition:fade={{ duration: 130 }}> <div class="normal-columns" transition:fade|local={{ duration: 130 }}>
<GridScrollWrapper scrollHorizontally wheelInteractive> <GridScrollWrapper scrollHorizontally wheelInteractive>
<div class="row"> <div class="row">
{#each $renderedColumns as column, columnIdx} {#each $renderedColumns as column, columnIdx}
@ -209,7 +209,7 @@
</div> </div>
</GridScrollWrapper> </GridScrollWrapper>
</div> </div>
<div class="buttons" transition:fade={{ duration: 130 }}> <div class="buttons" transition:fade|local={{ duration: 130 }}>
<Button size="M" cta on:click={addRow} disabled={isAdding}> <Button size="M" cta on:click={addRow} disabled={isAdding}>
<div class="button-with-keys"> <div class="button-with-keys">
Save Save

@ -1 +1 @@
Subproject commit 01fbc8670021c5a275c2a1a36ee18b984eeafad5 Subproject commit f4b8449aac9bd265214396afbdce7ff984a2ae34

View File

@ -97,7 +97,7 @@
"koa2-ratelimit": "1.1.1", "koa2-ratelimit": "1.1.1",
"lodash": "4.17.21", "lodash": "4.17.21",
"memorystream": "0.3.1", "memorystream": "0.3.1",
"mongodb": "4.9", "mongodb": "5.6",
"mssql": "6.2.3", "mssql": "6.2.3",
"mysql2": "2.3.3", "mysql2": "2.3.3",
"node-fetch": "2.6.7", "node-fetch": "2.6.7",
@ -117,7 +117,7 @@
"socket.io": "4.6.1", "socket.io": "4.6.1",
"svelte": "3.49.0", "svelte": "3.49.0",
"swagger-parser": "10.0.3", "swagger-parser": "10.0.3",
"tar": "6.1.11", "tar": "6.1.15",
"to-json-schema": "0.2.5", "to-json-schema": "0.2.5",
"uuid": "3.3.2", "uuid": "3.3.2",
"validate.js": "0.13.1", "validate.js": "0.13.1",
@ -150,7 +150,7 @@
"@types/redis": "4.0.11", "@types/redis": "4.0.11",
"@types/server-destroy": "1.0.1", "@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.12", "@types/supertest": "2.0.12",
"@types/tar": "6.1.3", "@types/tar": "6.1.5",
"@typescript-eslint/parser": "5.45.0", "@typescript-eslint/parser": "5.45.0",
"apidoc": "0.50.4", "apidoc": "0.50.4",
"babel-jest": "29.5.0", "babel-jest": "29.5.0",

View File

@ -1,17 +1,31 @@
import sdk from "../../sdk" import sdk from "../../sdk"
import { events, context } from "@budibase/backend-core" import { events, context, db } from "@budibase/backend-core"
import { DocumentType } from "../../db/utils" import { DocumentType } from "../../db/utils"
import { isQsTrue } from "../../utilities" import { Ctx } from "@budibase/types"
interface ExportAppDumpRequest {
excludeRows: boolean
encryptPassword?: string
}
export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) {
const { appId } = ctx.query as any
const { excludeRows, encryptPassword } = ctx.request.body
const [app] = await db.getAppsByIDs([appId])
const appName = app.name
export async function exportAppDump(ctx: any) {
let { appId, excludeRows } = ctx.query
// remove the 120 second limit for the request // remove the 120 second limit for the request
ctx.req.setTimeout(0) ctx.req.setTimeout(0)
const appName = decodeURI(ctx.query.appname)
excludeRows = isQsTrue(excludeRows) const extension = encryptPassword ? "enc.tar.gz" : "tar.gz"
const backupIdentifier = `${appName}-export-${new Date().getTime()}.tar.gz` const backupIdentifier = `${appName}-export-${new Date().getTime()}.${extension}`
ctx.attachment(backupIdentifier) ctx.attachment(backupIdentifier)
ctx.body = await sdk.backups.streamExportApp(appId, excludeRows) ctx.body = await sdk.backups.streamExportApp({
appId,
excludeRows,
encryptPassword,
})
await context.doInAppContext(appId, async () => { await context.doInAppContext(appId, async () => {
const appDb = context.getAppDB() const appDb = context.getAppDB()

View File

@ -103,6 +103,22 @@ async function buildSchemaHelper(datasource: Datasource) {
return { tables: connector.tables, error } return { tables: connector.tables, error }
} }
async function buildFilteredSchema(datasource: Datasource, filter?: string[]) {
let { tables, error } = await buildSchemaHelper(datasource)
let finalTables = tables
if (filter) {
finalTables = {}
for (let key in tables) {
if (
filter.some((filter: any) => filter.toLowerCase() === key.toLowerCase())
) {
finalTables[key] = tables[key]
}
}
}
return { tables: finalTables, error }
}
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx) {
// Get internal tables // Get internal tables
const db = context.getAppDB() const db = context.getAppDB()
@ -174,43 +190,28 @@ export async function information(
} }
const tableNames = await connector.getTableNames() const tableNames = await connector.getTableNames()
ctx.body = { ctx.body = {
tableNames, tableNames: tableNames.sort(),
} }
} }
export async function buildSchemaFromDb(ctx: UserCtx) { export async function buildSchemaFromDb(ctx: UserCtx) {
const db = context.getAppDB() const db = context.getAppDB()
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const tablesFilter = ctx.request.body.tablesFilter const tablesFilter = ctx.request.body.tablesFilter
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
let { tables, error } = await buildSchemaHelper(datasource) const { tables, error } = await buildFilteredSchema(datasource, tablesFilter)
if (tablesFilter) { datasource.entities = tables
if (!datasource.entities) {
datasource.entities = {}
}
for (let key in tables) {
if (
tablesFilter.some(
(filter: any) => filter.toLowerCase() === key.toLowerCase()
)
) {
datasource.entities[key] = tables[key]
}
}
} else {
datasource.entities = tables
}
setDefaultDisplayColumns(datasource) setDefaultDisplayColumns(datasource)
const dbResp = await db.put(datasource) const dbResp = await db.put(datasource)
datasource._rev = dbResp.rev datasource._rev = dbResp.rev
const cleanedDatasource = await sdk.datasources.removeSecretSingle(datasource) const cleanedDatasource = await sdk.datasources.removeSecretSingle(datasource)
const response: any = { datasource: cleanedDatasource } const res: any = { datasource: cleanedDatasource }
if (error) { if (error) {
response.error = error res.error = error
} }
ctx.body = response ctx.body = res
} }
/** /**
@ -320,6 +321,7 @@ export async function save(
const db = context.getAppDB() const db = context.getAppDB()
const plus = ctx.request.body.datasource.plus const plus = ctx.request.body.datasource.plus
const fetchSchema = ctx.request.body.fetchSchema const fetchSchema = ctx.request.body.fetchSchema
const tablesFilter = ctx.request.body.tablesFilter
const datasource = { const datasource = {
_id: generateDatasourceID({ plus }), _id: generateDatasourceID({ plus }),
@ -329,7 +331,10 @@ export async function save(
let schemaError = null let schemaError = null
if (fetchSchema) { if (fetchSchema) {
const { tables, error } = await buildSchemaHelper(datasource) const { tables, error } = await buildFilteredSchema(
datasource,
tablesFilter
)
schemaError = error schemaError = error
datasource.entities = tables datasource.entities = tables
setDefaultDisplayColumns(datasource) setDefaultDisplayColumns(datasource)

View File

@ -4,7 +4,7 @@ import {
getUserMetadataParams, getUserMetadataParams,
InternalTables, InternalTables,
} from "../../db/utils" } from "../../db/utils"
import { BBContext, Database } from "@budibase/types" import { UserCtx, Database } from "@budibase/types"
const UpdateRolesOptions = { const UpdateRolesOptions = {
CREATED: "created", CREATED: "created",
@ -38,15 +38,15 @@ async function updateRolesOnUserTable(
} }
} }
export async function fetch(ctx: BBContext) { export async function fetch(ctx: UserCtx) {
ctx.body = await roles.getAllRoles() ctx.body = await roles.getAllRoles()
} }
export async function find(ctx: BBContext) { export async function find(ctx: UserCtx) {
ctx.body = await roles.getRole(ctx.params.roleId) ctx.body = await roles.getRole(ctx.params.roleId)
} }
export async function save(ctx: BBContext) { export async function save(ctx: UserCtx) {
const db = context.getAppDB() const db = context.getAppDB()
let { _id, name, inherits, permissionId } = ctx.request.body let { _id, name, inherits, permissionId } = ctx.request.body
let isCreate = false let isCreate = false
@ -72,7 +72,7 @@ export async function save(ctx: BBContext) {
ctx.message = `Role '${role.name}' created successfully.` ctx.message = `Role '${role.name}' created successfully.`
} }
export async function destroy(ctx: BBContext) { export async function destroy(ctx: UserCtx) {
const db = context.getAppDB() const db = context.getAppDB()
const roleId = ctx.params.roleId const roleId = ctx.params.roleId
const role = await db.get(roleId) const role = await db.get(roleId)

View File

@ -1,6 +1,6 @@
import { getRoutingInfo } from "../../utilities/routing" import { getRoutingInfo } from "../../utilities/routing"
import { roles } from "@budibase/backend-core" import { roles } from "@budibase/backend-core"
import { BBContext } from "@budibase/types" import { UserCtx } from "@budibase/types"
const URL_SEPARATOR = "/" const URL_SEPARATOR = "/"
@ -56,11 +56,11 @@ async function getRoutingStructure() {
return { routes: routing.json } return { routes: routing.json }
} }
export async function fetch(ctx: BBContext) { export async function fetch(ctx: UserCtx) {
ctx.body = await getRoutingStructure() ctx.body = await getRoutingStructure()
} }
export async function clientFetch(ctx: BBContext) { export async function clientFetch(ctx: UserCtx) {
const routing = await getRoutingStructure() const routing = await getRoutingStructure()
let roleId = ctx.user?.role?._id let roleId = ctx.user?.role?._id
const roleIds = (await roles.getUserRoleHierarchy(roleId, { const roleIds = (await roles.getUserRoleHierarchy(roleId, {

View File

@ -237,9 +237,15 @@ export async function exportRows(ctx: UserCtx) {
ctx.request.body = { ctx.request.body = {
query: { query: {
oneOf: { oneOf: {
_id: ctx.request.body.rows.map( _id: ctx.request.body.rows.map((row: string) => {
(row: string) => JSON.parse(decodeURI(row))[0] const ids = JSON.parse(
), decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",")
)
if (ids.length > 1) {
ctx.throw(400, "Export data does not support composite keys.")
}
return ids[0]
}),
}, },
}, },
} }

View File

@ -0,0 +1,120 @@
import { exportRows } from "../row/external"
import sdk from "../../../sdk"
import { ExternalRequest } from "../row/ExternalRequest"
// @ts-ignore
sdk.datasources = {
get: jest.fn(),
}
jest.mock("../row/ExternalRequest")
jest.mock("../view/exporters", () => ({
csv: jest.fn(),
Format: {
CSV: "csv",
},
}))
jest.mock("../../../utilities/fileSystem")
function getUserCtx() {
return {
params: {
tableId: "datasource__tablename",
},
query: {
format: "csv",
},
request: {
body: {},
},
throw: jest.fn(() => {
throw "Err"
}),
attachment: jest.fn(),
}
}
describe("external row controller", () => {
describe("exportRows", () => {
beforeAll(() => {
//@ts-ignore
jest.spyOn(ExternalRequest.prototype, "run").mockImplementation(() => [])
})
afterEach(() => {
jest.clearAllMocks()
})
it("should throw a 400 if no datasource entities are present", async () => {
let userCtx = getUserCtx()
try {
//@ts-ignore
await exportRows(userCtx)
} catch (e) {
expect(userCtx.throw).toHaveBeenCalledWith(
400,
"Datasource has not been configured for plus API."
)
}
})
it("should handle single quotes from a row ID", async () => {
//@ts-ignore
sdk.datasources.get.mockImplementation(() => ({
entities: {
tablename: {
schema: {},
},
},
}))
let userCtx = getUserCtx()
userCtx.request.body = {
rows: ["['d001']"],
}
//@ts-ignore
await exportRows(userCtx)
expect(userCtx.request.body).toEqual({
query: {
oneOf: {
_id: ["d001"],
},
},
})
})
it("should throw a 400 if any composite keys are present", async () => {
let userCtx = getUserCtx()
userCtx.request.body = {
rows: ["[123]", "['d001'%2C'10111']"],
}
try {
//@ts-ignore
await exportRows(userCtx)
} catch (e) {
expect(userCtx.throw).toHaveBeenCalledWith(
400,
"Export data does not support composite keys."
)
}
})
it("should throw a 400 if no table name was found", async () => {
let userCtx = getUserCtx()
userCtx.params.tableId = "datasource__"
userCtx.request.body = {
rows: ["[123]"],
}
try {
//@ts-ignore
await exportRows(userCtx)
} catch (e) {
expect(userCtx.throw).toHaveBeenCalledWith(
400,
"Could not find table name."
)
}
})
})
})

View File

@ -5,7 +5,7 @@ import { permissions } from "@budibase/backend-core"
const router: Router = new Router() const router: Router = new Router()
router.get( router.post(
"/api/backups/export", "/api/backups/export",
authorized(permissions.BUILDER), authorized(permissions.BUILDER),
controller.exportAppDump controller.exportAppDump

View File

@ -1,7 +1,9 @@
import tk from "timekeeper"
import * as setup from "./utilities" import * as setup from "./utilities"
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { checkBuilderEndpoint } from "./utilities/TestFunctions" import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import { mocks } from "@budibase/backend-core/tests"
describe("/backups", () => { describe("/backups", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -16,7 +18,7 @@ describe("/backups", () => {
describe("exportAppDump", () => { describe("exportAppDump", () => {
it("should be able to export app", async () => { it("should be able to export app", async () => {
const res = await request const res = await request
.get(`/api/backups/export?appId=${config.getAppId()}&appname=test`) .post(`/api/backups/export?appId=${config.getAppId()}`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect(200) .expect(200)
expect(res.headers["content-type"]).toEqual("application/gzip") expect(res.headers["content-type"]).toEqual("application/gzip")
@ -26,10 +28,24 @@ describe("/backups", () => {
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {
await checkBuilderEndpoint({ await checkBuilderEndpoint({
config, config,
method: "GET", method: "POST",
url: `/api/backups/export?appId=${config.getAppId()}`, url: `/api/backups/export?appId=${config.getAppId()}`,
}) })
}) })
it("should infer the app name from the app", async () => {
tk.freeze(mocks.date.MOCK_DATE)
const res = await request
.post(`/api/backups/export?appId=${config.getAppId()}`)
.set(config.defaultHeaders())
expect(res.headers["content-disposition"]).toEqual(
`attachment; filename="${
config.getApp()!.name
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
)
})
}) })
describe("calculateBackupStats", () => { describe("calculateBackupStats", () => {

View File

@ -26,6 +26,10 @@ export default function process(updateCb?: UpdateCallback) {
// if something not found - no changes to perform // if something not found - no changes to perform
if (err?.status === 404) { if (err?.status === 404) {
return return
}
// The user has already been sync in another process
else if (err?.status === 409) {
return
} else { } else {
logging.logAlert("Failed to perform user/group app sync", err) logging.logAlert("Failed to perform user/group app sync", err)
} }

View File

@ -21,7 +21,7 @@ import { buildExternalTableId, finaliseExternalTables } from "./utils"
import { GoogleSpreadsheet, GoogleSpreadsheetRow } from "google-spreadsheet" import { GoogleSpreadsheet, GoogleSpreadsheetRow } from "google-spreadsheet"
import fetch from "node-fetch" import fetch from "node-fetch"
import { cache, configs, context, HTTPError } from "@budibase/backend-core" import { cache, configs, context, HTTPError } from "@budibase/backend-core"
import { dataFilters } from "@budibase/shared-core" import { dataFilters, utils } from "@budibase/shared-core"
import { GOOGLE_SHEETS_PRIMARY_KEY } from "../constants" import { GOOGLE_SHEETS_PRIMARY_KEY } from "../constants"
import sdk from "../sdk" import sdk from "../sdk"
@ -150,7 +150,6 @@ class GoogleSheetsIntegration implements DatasourcePlus {
async testConnection(): Promise<ConnectionInfo> { async testConnection(): Promise<ConnectionInfo> {
try { try {
await setupCreationAuth(this.config)
await this.connect() await this.connect()
return { connected: true } return { connected: true }
} catch (e: any) { } catch (e: any) {
@ -211,6 +210,8 @@ class GoogleSheetsIntegration implements DatasourcePlus {
async connect() { async connect() {
try { try {
await setupCreationAuth(this.config)
// Initialise oAuth client // Initialise oAuth client
let googleConfig = await configs.getGoogleDatasourceConfig() let googleConfig = await configs.getGoogleDatasourceConfig()
if (!googleConfig) { if (!googleConfig) {
@ -273,24 +274,24 @@ class GoogleSheetsIntegration implements DatasourcePlus {
} }
async buildSchema(datasourceId: string, entities: Record<string, Table>) { async buildSchema(datasourceId: string, entities: Record<string, Table>) {
// not fully configured yet
if (!this.config.auth) {
return
}
await this.connect() await this.connect()
const sheets = this.client.sheetsByIndex const sheets = this.client.sheetsByIndex
const tables: Record<string, Table> = {} const tables: Record<string, Table> = {}
for (let sheet of sheets) { await utils.parallelForeach(
// must fetch rows to determine schema sheets,
await sheet.getRows() async sheet => {
// must fetch rows to determine schema
await sheet.getRows({ limit: 0, offset: 0 })
const id = buildExternalTableId(datasourceId, sheet.title) const id = buildExternalTableId(datasourceId, sheet.title)
tables[sheet.title] = this.getTableSchema( tables[sheet.title] = this.getTableSchema(
sheet.title, sheet.title,
sheet.headerValues, sheet.headerValues,
id id
) )
} },
10
)
const final = finaliseExternalTables(tables, entities) const final = finaliseExternalTables(tables, entities)
this.tables = final.tables this.tables = final.tables
this.schemaErrors = final.errors this.schemaErrors = final.errors

View File

@ -351,7 +351,7 @@ const SCHEMA: Integration = getSchema()
class MongoIntegration implements IntegrationBase { class MongoIntegration implements IntegrationBase {
private config: MongoDBConfig private config: MongoDBConfig
private client: any private client: MongoClient
constructor(config: MongoDBConfig) { constructor(config: MongoDBConfig) {
this.config = config this.config = config
@ -372,6 +372,8 @@ class MongoIntegration implements IntegrationBase {
response.connected = true response.connected = true
} catch (e: any) { } catch (e: any) {
response.error = e.message as string response.error = e.message as string
} finally {
await this.client.close()
} }
return response return response
} }
@ -380,7 +382,7 @@ class MongoIntegration implements IntegrationBase {
return this.client.connect() return this.client.connect()
} }
createObjectIds(json: any): object { createObjectIds(json: any) {
const self = this const self = this
function interpolateObjectIds(json: any) { function interpolateObjectIds(json: any) {
for (let field of Object.keys(json)) { for (let field of Object.keys(json)) {

View File

@ -322,7 +322,8 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
await this.openConnection() await this.openConnection()
const columnsResponse: { rows: PostgresColumn[] } = const columnsResponse: { rows: PostgresColumn[] } =
await this.client.query(this.COLUMNS_SQL) await this.client.query(this.COLUMNS_SQL)
return columnsResponse.rows.map(row => row.table_name) const names = columnsResponse.rows.map(row => row.table_name)
return [...new Set(names)]
} finally { } finally {
await this.closeConnection() await this.closeConnection()
} }

View File

@ -103,7 +103,7 @@ export default async (ctx: UserCtx, next: any) => {
userId, userId,
globalId, globalId,
roleId, roleId,
role: await roles.getRole(roleId), role: await roles.getRole(roleId, { defaultPublic: true }),
} }
} }

View File

@ -1,4 +1,4 @@
import { db as dbCore, objectStore } from "@budibase/backend-core" import { db as dbCore, encryption, objectStore } from "@budibase/backend-core"
import { budibaseTempDir } from "../../../utilities/budibaseDir" import { budibaseTempDir } from "../../../utilities/budibaseDir"
import { streamFile, createTempFolder } from "../../../utilities/fileSystem" import { streamFile, createTempFolder } from "../../../utilities/fileSystem"
import { ObjectStoreBuckets } from "../../../constants" import { ObjectStoreBuckets } from "../../../constants"
@ -18,7 +18,8 @@ import { join } from "path"
import env from "../../../environment" import env from "../../../environment"
const uuid = require("uuid/v4") const uuid = require("uuid/v4")
const tar = require("tar") import tar from "tar"
const MemoryStream = require("memorystream") const MemoryStream = require("memorystream")
interface DBDumpOpts { interface DBDumpOpts {
@ -30,16 +31,18 @@ interface ExportOpts extends DBDumpOpts {
tar?: boolean tar?: boolean
excludeRows?: boolean excludeRows?: boolean
excludeLogs?: boolean excludeLogs?: boolean
encryptPassword?: string
} }
function tarFilesToTmp(tmpDir: string, files: string[]) { function tarFilesToTmp(tmpDir: string, files: string[]) {
const exportFile = join(budibaseTempDir(), `${uuid()}.tar.gz`) const fileName = `${uuid()}.tar.gz`
const exportFile = join(budibaseTempDir(), fileName)
tar.create( tar.create(
{ {
sync: true, sync: true,
gzip: true, gzip: true,
file: exportFile, file: exportFile,
recursive: true, noDirRecurse: false,
cwd: tmpDir, cwd: tmpDir,
}, },
files files
@ -124,6 +127,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
) )
} }
} }
const downloadedPath = join(tmpPath, appPath) const downloadedPath = join(tmpPath, appPath)
if (fs.existsSync(downloadedPath)) { if (fs.existsSync(downloadedPath)) {
const allFiles = fs.readdirSync(downloadedPath) const allFiles = fs.readdirSync(downloadedPath)
@ -141,12 +145,27 @@ export async function exportApp(appId: string, config?: ExportOpts) {
filter: defineFilter(config?.excludeRows, config?.excludeLogs), filter: defineFilter(config?.excludeRows, config?.excludeLogs),
exportPath: dbPath, exportPath: dbPath,
}) })
if (config?.encryptPassword) {
for (let file of fs.readdirSync(tmpPath)) {
const path = join(tmpPath, file)
await encryption.encryptFile(
{ dir: tmpPath, filename: file },
config.encryptPassword
)
fs.rmSync(path)
}
}
// if tar requested, return where the tarball is // if tar requested, return where the tarball is
if (config?.tar) { if (config?.tar) {
// now the tmpPath contains both the DB export and attachments, tar this // now the tmpPath contains both the DB export and attachments, tar this
const tarPath = tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath)) const tarPath = tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath))
// cleanup the tmp export files as tarball returned // cleanup the tmp export files as tarball returned
fs.rmSync(tmpPath, { recursive: true, force: true }) fs.rmSync(tmpPath, { recursive: true, force: true })
return tarPath return tarPath
} }
// tar not requested, turn the directory where export is // tar not requested, turn the directory where export is
@ -161,11 +180,20 @@ export async function exportApp(appId: string, config?: ExportOpts) {
* @param {boolean} excludeRows Flag to state whether the export should include data. * @param {boolean} excludeRows Flag to state whether the export should include data.
* @returns {*} a readable stream of the backup which is written in real time * @returns {*} a readable stream of the backup which is written in real time
*/ */
export async function streamExportApp(appId: string, excludeRows: boolean) { export async function streamExportApp({
appId,
excludeRows,
encryptPassword,
}: {
appId: string
excludeRows: boolean
encryptPassword?: string
}) {
const tmpPath = await exportApp(appId, { const tmpPath = await exportApp(appId, {
excludeRows, excludeRows,
excludeLogs: true, excludeLogs: true,
tar: true, tar: true,
encryptPassword,
}) })
return streamFile(tmpPath) return streamFile(tmpPath)
} }

View File

@ -1,4 +1,4 @@
import { db as dbCore, objectStore } from "@budibase/backend-core" import { db as dbCore, encryption, objectStore } from "@budibase/backend-core"
import { Database, Row } from "@budibase/types" import { Database, Row } from "@budibase/types"
import { getAutomationParams, TABLE_ROW_PREFIX } from "../../../db/utils" import { getAutomationParams, TABLE_ROW_PREFIX } from "../../../db/utils"
import { budibaseTempDir } from "../../../utilities/budibaseDir" import { budibaseTempDir } from "../../../utilities/budibaseDir"
@ -20,6 +20,7 @@ type TemplateType = {
file?: { file?: {
type: string type: string
path: string path: string
password?: string
} }
key?: string key?: string
} }
@ -123,6 +124,22 @@ export function untarFile(file: { path: string }) {
return tmpPath return tmpPath
} }
async function decryptFiles(path: string, password: string) {
try {
for (let file of fs.readdirSync(path)) {
const inputPath = join(path, file)
const outputPath = inputPath.replace(/\.enc$/, "")
await encryption.decryptFile(inputPath, outputPath, password)
fs.rmSync(inputPath)
}
} catch (err: any) {
if (err.message === "incorrect header check") {
throw new Error("File cannot be imported")
}
throw err
}
}
export function getGlobalDBFile(tmpPath: string) { export function getGlobalDBFile(tmpPath: string) {
return fs.readFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), "utf8") return fs.readFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), "utf8")
} }
@ -143,6 +160,9 @@ export async function importApp(
template.file && fs.lstatSync(template.file.path).isDirectory() template.file && fs.lstatSync(template.file.path).isDirectory()
if (template.file && (isTar || isDirectory)) { if (template.file && (isTar || isDirectory)) {
const tmpPath = isTar ? untarFile(template.file) : template.file.path const tmpPath = isTar ? untarFile(template.file) : template.file.path
if (isTar && template.file.password) {
await decryptFiles(tmpPath, template.file.password)
}
const contents = fs.readdirSync(tmpPath) const contents = fs.readdirSync(tmpPath)
// have to handle object import // have to handle object import
if (contents.length) { if (contents.length) {

View File

@ -135,7 +135,7 @@ export function mergeConfigs(update: Datasource, old: Datasource) {
// specific to REST datasources, fix the auth configs again if required // specific to REST datasources, fix the auth configs again if required
if (hasAuthConfigs(update)) { if (hasAuthConfigs(update)) {
const configs = update.config.authConfigs as RestAuthConfig[] const configs = update.config.authConfigs as RestAuthConfig[]
const oldConfigs = old.config?.authConfigs as RestAuthConfig[] const oldConfigs = (old.config?.authConfigs as RestAuthConfig[]) || []
for (let config of configs) { for (let config of configs) {
if (config.type !== RestAuthType.BASIC) { if (config.type !== RestAuthType.BASIC) {
continue continue
@ -164,5 +164,6 @@ export function mergeConfigs(update: Datasource, old: Datasource) {
delete update.config[key] delete update.config[key]
} }
} }
return update return update
} }

View File

@ -4,3 +4,42 @@ export function unreachable(
) { ) {
throw new Error(message) throw new Error(message)
} }
export async function parallelForeach<T>(
items: T[],
task: (item: T) => Promise<void>,
maxConcurrency: number
): Promise<void> {
const promises: Promise<void>[] = []
let index = 0
const processItem = async (item: T) => {
try {
await task(item)
} finally {
processNext()
}
}
const processNext = () => {
if (index >= items.length) {
// No more items to process
return
}
const item = items[index]
index++
const promise = processItem(item)
promises.push(promise)
if (promises.length >= maxConcurrency) {
Promise.race(promises).then(processNext)
} else {
processNext()
}
}
processNext()
await Promise.all(promises)
}

View File

@ -12,6 +12,7 @@ export interface UpdateDatasourceResponse {
export interface CreateDatasourceRequest { export interface CreateDatasourceRequest {
datasource: Datasource datasource: Datasource
fetchSchema?: boolean fetchSchema?: boolean
tablesFilter: string[]
} }
export interface VerifyDatasourceRequest { export interface VerifyDatasourceRequest {

View File

@ -10,7 +10,7 @@
}, },
"scripts": { "scripts": {
"setup": "yarn && node scripts/createEnv.js", "setup": "yarn && node scripts/createEnv.js",
"test": "jest --runInBand --json --outputFile=testResults.json", "test": "jest --runInBand --json --outputFile=testResults.json --forceExit",
"test:watch": "yarn run test --watch", "test:watch": "yarn run test --watch",
"test:debug": "DEBUG=1 yarn run test", "test:debug": "DEBUG=1 yarn run test",
"test:notify": "node scripts/testResultsWebhook", "test:notify": "node scripts/testResultsWebhook",

View File

@ -15,6 +15,12 @@ async function generateReport() {
return JSON.parse(report) return JSON.parse(report)
} }
const env = process.argv.slice(2)[0]
if (!env) {
throw new Error("environment argument is required")
}
async function discordResultsNotification(report) { async function discordResultsNotification(report) {
const { const {
numTotalTestSuites, numTotalTestSuites,
@ -39,8 +45,8 @@ async function discordResultsNotification(report) {
content: `**Nightly Tests Status**: ${OUTCOME}`, content: `**Nightly Tests Status**: ${OUTCOME}`,
embeds: [ embeds: [
{ {
title: "Budi QA Bot", title: `Budi QA Bot - ${env}`,
description: `Nightly Tests`, description: `API Integration Tests`,
url: GITHUB_ACTIONS_RUN_URL, url: GITHUB_ACTIONS_RUN_URL,
color: OUTCOME === "success" ? 3066993 : 15548997, color: OUTCOME === "success" ? 3066993 : 15548997,
timestamp: new Date(), timestamp: new Date(),

View File

@ -60,8 +60,16 @@ export default class AccountAPI {
} }
async delete(accountID: string) { async delete(accountID: string) {
const [response, json] = await this.client.del(`/api/accounts/${accountID}`) const [response, json] = await this.client.del(
expect(response).toHaveStatusCode(200) `/api/accounts/${accountID}`,
{
internal: true,
}
)
// can't use expect here due to use in global teardown
if (response.status !== 204) {
throw new Error(`Could not delete accountId=${accountID}`)
}
return response return response
} }
} }

View File

@ -93,7 +93,7 @@ describe("datasource validators", () => {
const result = await integration.testConnection() const result = await integration.testConnection()
expect(result).toEqual({ expect(result).toEqual({
connected: false, connected: false,
error: "Error: getaddrinfo ENOTFOUND http", error: "getaddrinfo ENOTFOUND http",
}) })
}) })
}) })

View File

@ -1,4 +1,5 @@
import { GenericContainer } from "testcontainers" import { GenericContainer } from "testcontainers"
import postgres from "../../../../packages/server/src/integrations/postgres"
jest.unmock("pg") jest.unmock("pg")

View File

@ -10,6 +10,7 @@ const API_OPTS: APIRequestOpts = { doExpect: false }
async function deleteAccount() { async function deleteAccount() {
// @ts-ignore // @ts-ignore
const accountID = global.qa.accountId const accountID = global.qa.accountId
// can't run 'expect' blocks in teardown
await accountsApi.accounts.delete(accountID) await accountsApi.accounts.delete(accountID)
} }

View File

@ -6197,13 +6197,13 @@
dependencies: dependencies:
"@types/node" "*" "@types/node" "*"
"@types/tar@6.1.3": "@types/tar@6.1.5":
version "6.1.3" version "6.1.5"
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.3.tgz#46a2ce7617950c4852dfd7e9cd41aa8161b9d750" resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.5.tgz#90ccb3b6a35430e7427410d50eed564e85feaaff"
integrity sha512-YzDOr5kdAeqS8dcO6NTTHTMJ44MUCBDoLEIyPtwEn7PssKqUYL49R1iCVJPeiPzPlKi6DbH33eZkpeJ27e4vHg== integrity sha512-qm2I/RlZij5RofuY7vohTpYNaYcrSQlN2MyjucQc7ZweDwaEWkdN/EeNh6e9zjK6uEm6PwjdMXkcj05BxZdX1Q==
dependencies: dependencies:
"@types/node" "*" "@types/node" "*"
minipass "^3.3.5" minipass "^4.0.0"
"@types/tern@*": "@types/tern@*":
version "0.23.4" version "0.23.4"
@ -7989,12 +7989,10 @@ bson@*:
resolved "https://registry.yarnpkg.com/bson/-/bson-5.0.1.tgz#4cd3eeeabf6652ef0d6ab600f9a18212d39baac3" resolved "https://registry.yarnpkg.com/bson/-/bson-5.0.1.tgz#4cd3eeeabf6652ef0d6ab600f9a18212d39baac3"
integrity sha512-y09gBGusgHtinMon/GVbv1J6FrXhnr/+6hqLlSmEFzkz6PodqF6TxjyvfvY3AfO+oG1mgUtbC86xSbOlwvM62Q== integrity sha512-y09gBGusgHtinMon/GVbv1J6FrXhnr/+6hqLlSmEFzkz6PodqF6TxjyvfvY3AfO+oG1mgUtbC86xSbOlwvM62Q==
bson@^4.7.0: bson@^5.3.0:
version "4.7.2" version "5.3.0"
resolved "https://registry.yarnpkg.com/bson/-/bson-4.7.2.tgz#320f4ad0eaf5312dd9b45dc369cc48945e2a5f2e" resolved "https://registry.yarnpkg.com/bson/-/bson-5.3.0.tgz#37b006df4cd91ed125cb686467c1dd6d4606b514"
integrity sha512-Ry9wCtIZ5kGqkJoi6aD8KjxFZEx78guTQDnpXWiNthsxzrxAK/i8E6pCHAIZTbaEFWcOCvbecMukfK7XUvyLpQ== integrity sha512-ukmCZMneMlaC5ebPHXIkP8YJzNl5DC41N5MAIvKDqLggdao342t4McltoJBQfQya/nHBWAcSsYRqlXPoQkTJag==
dependencies:
buffer "^5.6.0"
buffer-alloc-unsafe@^1.1.0: buffer-alloc-unsafe@^1.1.0:
version "1.1.0" version "1.1.0"
@ -18036,7 +18034,7 @@ minipass-sized@^1.0.3:
dependencies: dependencies:
minipass "^3.0.0" minipass "^3.0.0"
minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6, minipass@^3.3.5: minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6:
version "3.3.6" version "3.3.6"
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a"
integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==
@ -18210,7 +18208,7 @@ moment@^2.29.4:
resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108"
integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==
mongodb-connection-string-url@^2.5.3: mongodb-connection-string-url@^2.6.0:
version "2.6.0" version "2.6.0"
resolved "https://registry.yarnpkg.com/mongodb-connection-string-url/-/mongodb-connection-string-url-2.6.0.tgz#57901bf352372abdde812c81be47b75c6b2ec5cf" resolved "https://registry.yarnpkg.com/mongodb-connection-string-url/-/mongodb-connection-string-url-2.6.0.tgz#57901bf352372abdde812c81be47b75c6b2ec5cf"
integrity sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ== integrity sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ==
@ -18218,15 +18216,14 @@ mongodb-connection-string-url@^2.5.3:
"@types/whatwg-url" "^8.2.1" "@types/whatwg-url" "^8.2.1"
whatwg-url "^11.0.0" whatwg-url "^11.0.0"
mongodb@4.9: mongodb@5.6:
version "4.9.1" version "5.6.0"
resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-4.9.1.tgz#0c769448228bcf9a6aa7d16daa3625b48312479e" resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-5.6.0.tgz#caff5278341bfc0f1ef6f394bb403d207de03d1e"
integrity sha512-ZhgI/qBf84fD7sI4waZBoLBNJYPQN5IOC++SBCiPiyhzpNKOxN/fi0tBHvH2dEC42HXtNEbFB0zmNz4+oVtorQ== integrity sha512-z8qVs9NfobHJm6uzK56XBZF8XwM9H294iRnB7wNjF0SnY93si5HPziIJn+qqvUR5QOff/4L0gCD6SShdR/GtVQ==
dependencies: dependencies:
bson "^4.7.0" bson "^5.3.0"
denque "^2.1.0" mongodb-connection-string-url "^2.6.0"
mongodb-connection-string-url "^2.5.3" socks "^2.7.1"
socks "^2.7.0"
optionalDependencies: optionalDependencies:
saslprep "^1.0.3" saslprep "^1.0.3"
@ -23100,7 +23097,7 @@ socks-proxy-agent@^7.0.0:
debug "^4.3.3" debug "^4.3.3"
socks "^2.6.2" socks "^2.6.2"
socks@^2.3.3, socks@^2.6.2, socks@^2.7.0: socks@^2.3.3, socks@^2.6.2, socks@^2.7.1:
version "2.7.1" version "2.7.1"
resolved "https://registry.yarnpkg.com/socks/-/socks-2.7.1.tgz#d8e651247178fde79c0663043e07240196857d55" resolved "https://registry.yarnpkg.com/socks/-/socks-2.7.1.tgz#d8e651247178fde79c0663043e07240196857d55"
integrity sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ== integrity sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==
@ -24163,6 +24160,18 @@ tar@6.1.11:
mkdirp "^1.0.3" mkdirp "^1.0.3"
yallist "^4.0.0" yallist "^4.0.0"
tar@6.1.15:
version "6.1.15"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69"
integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==
dependencies:
chownr "^2.0.0"
fs-minipass "^2.0.0"
minipass "^5.0.0"
minizlib "^2.1.1"
mkdirp "^1.0.3"
yallist "^4.0.0"
tar@^6.1.11, tar@^6.1.2: tar@^6.1.11, tar@^6.1.2:
version "6.1.13" version "6.1.13"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.13.tgz#46e22529000f612180601a6fe0680e7da508847b" resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.13.tgz#46e22529000f612180601a6fe0680e7da508847b"