Merge remote-tracking branch 'origin/develop' into feature/app-settings-section
This commit is contained in:
commit
c2f8d54a8a
|
@ -12,31 +12,22 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# - name: Fail if not a tag
|
||||
# run: |
|
||||
# if [[ $GITHUB_REF != refs/tags/* ]]; then
|
||||
# echo "Workflow Dispatch can only be run on tags"
|
||||
# exit 1
|
||||
# fi
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
|
||||
# - name: Fail if tag is not in master
|
||||
# run: |
|
||||
# if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
||||
# echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
||||
# exit 1
|
||||
# fi
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
- name: Fail if not a tag
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \
|
||||
-H 'Accept: application/vnd.github.v3.raw' \
|
||||
-o values.production.yaml \
|
||||
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/values.yaml
|
||||
wc -l values.production.yaml
|
||||
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
||||
echo "Workflow Dispatch can only be run on tags"
|
||||
exit 1
|
||||
fi
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fail if tag is not in master
|
||||
run: |
|
||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
||||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Get the latest budibase release version
|
||||
id: version
|
||||
|
@ -48,29 +39,10 @@ jobs:
|
|||
fi
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
- uses: passeidireto/trigger-external-workflow-action@main
|
||||
env:
|
||||
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
|
||||
- name: Deploy to EKS
|
||||
uses: craftech-io/eks-helm-deploy-action@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS__KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
cluster-name: budibase-eks-production
|
||||
config-files: values.production.yaml
|
||||
chart-path: charts/budibase
|
||||
namespace: budibase
|
||||
values: globals.appVersion=v${{ env.RELEASE_VERSION }},services.couchdb.url=${{ secrets.PRODUCTION_COUCHDB_URL }},services.couchdb.password=${{ secrets.PRODUCTION_COUCHDB_PASSWORD }}
|
||||
name: budibase-prod
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v4.0.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
|
||||
content: "Production Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Cloud."
|
||||
embed-title: ${{ env.RELEASE_VERSION }}
|
||||
repository: budibase/budibase-deploys
|
||||
event: budicloud-prod-deploy
|
||||
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
|
|
|
@ -24,51 +24,18 @@ jobs:
|
|||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
- name: Get the latest budibase release version
|
||||
id: version
|
||||
run: |
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \
|
||||
-H 'Accept: application/vnd.github.v3.raw' \
|
||||
-o values.preprod.yaml \
|
||||
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-preprod/values.yaml
|
||||
wc -l values.preprod.yaml
|
||||
- name: Deploy to Preprod Environment
|
||||
uses: budibase/helm@v1.8.0
|
||||
with:
|
||||
release: budibase-preprod
|
||||
namespace: budibase
|
||||
chart: charts/budibase
|
||||
token: ${{ github.token }}
|
||||
helm: helm3
|
||||
values: |
|
||||
globals:
|
||||
appVersion: v${{ env.RELEASE_VERSION }}
|
||||
ingress:
|
||||
enabled: true
|
||||
nginx: true
|
||||
value-files: >-
|
||||
[
|
||||
"values.preprod.yaml"
|
||||
]
|
||||
- uses: passeidireto/trigger-external-workflow-action@main
|
||||
env:
|
||||
KUBECONFIG_FILE: '${{ secrets.PREPROD_KUBECONFIG }}'
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v4.0.0
|
||||
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
with:
|
||||
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
|
||||
content: "Preprod Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Pre-prod."
|
||||
embed-title: ${{ env.RELEASE_VERSION }}
|
||||
repository: budibase/budibase-deploys
|
||||
event: budicloud-preprod-deploy
|
||||
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.7.7-alpha.2",
|
||||
"version": "2.7.16-alpha.2",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/backend-core",
|
||||
|
|
|
@ -343,6 +343,9 @@ export class QueryBuilder<T> {
|
|||
}
|
||||
|
||||
const oneOf = (key: string, value: any) => {
|
||||
if (!value) {
|
||||
return `*:*`
|
||||
}
|
||||
if (!Array.isArray(value)) {
|
||||
if (typeof value === "string") {
|
||||
value = value.split(",")
|
||||
|
|
|
@ -114,6 +114,25 @@ describe("lucene", () => {
|
|||
expect(resp.rows.length).toBe(2)
|
||||
})
|
||||
|
||||
it("should return all rows when doing a one of search against falsey value", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addOneOf("property", null)
|
||||
let resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(3)
|
||||
|
||||
builder.addOneOf("property", undefined)
|
||||
resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(3)
|
||||
|
||||
builder.addOneOf("property", "")
|
||||
resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(3)
|
||||
|
||||
builder.addOneOf("property", [])
|
||||
resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(0)
|
||||
})
|
||||
|
||||
it("should be able to perform a contains search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addContains("property", ["word"])
|
||||
|
|
|
@ -1,12 +1,17 @@
|
|||
import crypto from "crypto"
|
||||
import fs from "fs"
|
||||
import zlib from "zlib"
|
||||
import env from "../environment"
|
||||
import { join } from "path"
|
||||
|
||||
const ALGO = "aes-256-ctr"
|
||||
const SEPARATOR = "-"
|
||||
const ITERATIONS = 10000
|
||||
const RANDOM_BYTES = 16
|
||||
const STRETCH_LENGTH = 32
|
||||
|
||||
const SALT_LENGTH = 16
|
||||
const IV_LENGTH = 16
|
||||
|
||||
export enum SecretOption {
|
||||
API = "api",
|
||||
ENCRYPTION = "encryption",
|
||||
|
@ -31,15 +36,15 @@ export function getSecret(secretOption: SecretOption): string {
|
|||
return secret
|
||||
}
|
||||
|
||||
function stretchString(string: string, salt: Buffer) {
|
||||
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
||||
function stretchString(secret: string, salt: Buffer) {
|
||||
return crypto.pbkdf2Sync(secret, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
||||
}
|
||||
|
||||
export function encrypt(
|
||||
input: string,
|
||||
secretOption: SecretOption = SecretOption.API
|
||||
) {
|
||||
const salt = crypto.randomBytes(RANDOM_BYTES)
|
||||
const salt = crypto.randomBytes(SALT_LENGTH)
|
||||
const stretched = stretchString(getSecret(secretOption), salt)
|
||||
const cipher = crypto.createCipheriv(ALGO, stretched, salt)
|
||||
const base = cipher.update(input)
|
||||
|
@ -60,3 +65,115 @@ export function decrypt(
|
|||
const final = decipher.final()
|
||||
return Buffer.concat([base, final]).toString()
|
||||
}
|
||||
|
||||
export async function encryptFile(
|
||||
{ dir, filename }: { dir: string; filename: string },
|
||||
secret: string
|
||||
) {
|
||||
const outputFileName = `${filename}.enc`
|
||||
|
||||
const filePath = join(dir, filename)
|
||||
const inputFile = fs.createReadStream(filePath)
|
||||
const outputFile = fs.createWriteStream(join(dir, outputFileName))
|
||||
|
||||
const salt = crypto.randomBytes(SALT_LENGTH)
|
||||
const iv = crypto.randomBytes(IV_LENGTH)
|
||||
const stretched = stretchString(secret, salt)
|
||||
const cipher = crypto.createCipheriv(ALGO, stretched, iv)
|
||||
|
||||
outputFile.write(salt)
|
||||
outputFile.write(iv)
|
||||
|
||||
inputFile.pipe(zlib.createGzip()).pipe(cipher).pipe(outputFile)
|
||||
|
||||
return new Promise<{ filename: string; dir: string }>(r => {
|
||||
outputFile.on("finish", () => {
|
||||
r({
|
||||
filename: outputFileName,
|
||||
dir,
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function getSaltAndIV(path: string) {
|
||||
const fileStream = fs.createReadStream(path)
|
||||
|
||||
const salt = await readBytes(fileStream, SALT_LENGTH)
|
||||
const iv = await readBytes(fileStream, IV_LENGTH)
|
||||
fileStream.close()
|
||||
return { salt, iv }
|
||||
}
|
||||
|
||||
export async function decryptFile(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
secret: string
|
||||
) {
|
||||
const { salt, iv } = await getSaltAndIV(inputPath)
|
||||
const inputFile = fs.createReadStream(inputPath, {
|
||||
start: SALT_LENGTH + IV_LENGTH,
|
||||
})
|
||||
|
||||
const outputFile = fs.createWriteStream(outputPath)
|
||||
|
||||
const stretched = stretchString(secret, salt)
|
||||
const decipher = crypto.createDecipheriv(ALGO, stretched, iv)
|
||||
|
||||
const unzip = zlib.createGunzip()
|
||||
|
||||
inputFile.pipe(decipher).pipe(unzip).pipe(outputFile)
|
||||
|
||||
return new Promise<void>((res, rej) => {
|
||||
outputFile.on("finish", () => {
|
||||
outputFile.close()
|
||||
res()
|
||||
})
|
||||
|
||||
inputFile.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
|
||||
decipher.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
|
||||
unzip.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
|
||||
outputFile.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function readBytes(stream: fs.ReadStream, length: number) {
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
let bytesRead = 0
|
||||
const data: Buffer[] = []
|
||||
|
||||
stream.on("readable", () => {
|
||||
let chunk
|
||||
|
||||
while ((chunk = stream.read(length - bytesRead)) !== null) {
|
||||
data.push(chunk)
|
||||
bytesRead += chunk.length
|
||||
}
|
||||
|
||||
resolve(Buffer.concat(data))
|
||||
})
|
||||
|
||||
stream.on("end", () => {
|
||||
reject(new Error("Insufficient data in the stream."))
|
||||
})
|
||||
|
||||
stream.on("error", error => {
|
||||
reject(error)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -140,9 +140,13 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
|
|||
* Gets the role object, this is mainly useful for two purposes, to check if the level exists and
|
||||
* to check if the role inherits any others.
|
||||
* @param {string|null} roleId The level ID to lookup.
|
||||
* @param {object|null} opts options for the function, like whether to halt errors, instead return public.
|
||||
* @returns {Promise<Role|object|null>} The role object, which may contain an "inherits" property.
|
||||
*/
|
||||
export async function getRole(roleId?: string): Promise<RoleDoc | undefined> {
|
||||
export async function getRole(
|
||||
roleId?: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc | undefined> {
|
||||
if (!roleId) {
|
||||
return undefined
|
||||
}
|
||||
|
@ -161,6 +165,9 @@ export async function getRole(roleId?: string): Promise<RoleDoc | undefined> {
|
|||
// finalise the ID
|
||||
role._id = getExternalRoleID(role._id)
|
||||
} catch (err) {
|
||||
if (!isBuiltin(roleId) && opts?.defaultPublic) {
|
||||
return cloneDeep(BUILTIN_ROLES.PUBLIC)
|
||||
}
|
||||
// only throw an error if there is no role at all
|
||||
if (Object.keys(role).length === 0) {
|
||||
throw err
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
export let disabled = false
|
||||
export let error = null
|
||||
export let validate = null
|
||||
export let indeterminate = false
|
||||
export let compact = false
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
|
@ -21,11 +23,19 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<FancyField {error} {value} {validate} {disabled} clickable on:click={onChange}>
|
||||
<FancyField
|
||||
{error}
|
||||
{value}
|
||||
{validate}
|
||||
{disabled}
|
||||
{compact}
|
||||
clickable
|
||||
on:click={onChange}
|
||||
>
|
||||
<span>
|
||||
<Checkbox {disabled} {value} />
|
||||
<Checkbox {disabled} {value} {indeterminate} />
|
||||
</span>
|
||||
<div class="text">
|
||||
<div class="text" class:compact>
|
||||
{#if text}
|
||||
{text}
|
||||
{/if}
|
||||
|
@ -47,6 +57,10 @@
|
|||
line-clamp: 2;
|
||||
-webkit-box-orient: vertical;
|
||||
}
|
||||
.text.compact {
|
||||
font-size: 13px;
|
||||
line-height: 15px;
|
||||
}
|
||||
.text > :global(*) {
|
||||
font-size: inherit !important;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
<script>
|
||||
import FancyCheckbox from "./FancyCheckbox.svelte"
|
||||
import FancyForm from "./FancyForm.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
export let options = []
|
||||
export let selected = []
|
||||
export let showSelectAll = true
|
||||
export let selectAllText = "Select all"
|
||||
|
||||
let selectedBooleans = reset()
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
$: updateSelected(selectedBooleans)
|
||||
$: dispatch("change", selected)
|
||||
$: allSelected = selected?.length === options.length
|
||||
$: noneSelected = !selected?.length
|
||||
|
||||
function reset() {
|
||||
return Array(options.length).fill(true)
|
||||
}
|
||||
|
||||
function updateSelected(selectedArr) {
|
||||
const array = []
|
||||
for (let [i, isSelected] of Object.entries(selectedArr)) {
|
||||
if (isSelected) {
|
||||
array.push(options[i])
|
||||
}
|
||||
}
|
||||
selected = array
|
||||
}
|
||||
|
||||
function toggleSelectAll() {
|
||||
if (allSelected === true) {
|
||||
selectedBooleans = []
|
||||
} else {
|
||||
selectedBooleans = reset()
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if options && Array.isArray(options)}
|
||||
<div class="checkbox-group" class:has-select-all={showSelectAll}>
|
||||
<FancyForm on:change>
|
||||
{#if showSelectAll}
|
||||
<FancyCheckbox
|
||||
bind:value={allSelected}
|
||||
on:change={toggleSelectAll}
|
||||
text={selectAllText}
|
||||
indeterminate={!allSelected && !noneSelected}
|
||||
compact
|
||||
/>
|
||||
{/if}
|
||||
{#each options as option, i}
|
||||
<FancyCheckbox bind:value={selectedBooleans[i]} text={option} compact />
|
||||
{/each}
|
||||
</FancyForm>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.checkbox-group.has-select-all :global(.fancy-field:first-of-type) {
|
||||
background: var(--spectrum-global-color-gray-100);
|
||||
}
|
||||
.checkbox-group.has-select-all :global(.fancy-field:first-of-type:hover) {
|
||||
background: var(--spectrum-global-color-gray-200);
|
||||
}
|
||||
</style>
|
|
@ -11,6 +11,7 @@
|
|||
export let value
|
||||
export let ref
|
||||
export let autoHeight
|
||||
export let compact = false
|
||||
|
||||
const formContext = getContext("fancy-form")
|
||||
const id = Math.random()
|
||||
|
@ -42,6 +43,7 @@
|
|||
class:disabled
|
||||
class:focused
|
||||
class:clickable
|
||||
class:compact
|
||||
class:auto-height={autoHeight}
|
||||
>
|
||||
<div class="content" on:click>
|
||||
|
@ -61,7 +63,6 @@
|
|||
|
||||
<style>
|
||||
.fancy-field {
|
||||
max-width: 400px;
|
||||
background: var(--spectrum-global-color-gray-75);
|
||||
border: 1px solid var(--spectrum-global-color-gray-300);
|
||||
border-radius: 4px;
|
||||
|
@ -69,6 +70,12 @@
|
|||
transition: border-color 130ms ease-out, background 130ms ease-out,
|
||||
background 130ms ease-out;
|
||||
color: var(--spectrum-global-color-gray-800);
|
||||
--padding: 16px;
|
||||
--height: 64px;
|
||||
}
|
||||
.fancy-field.compact {
|
||||
--padding: 8px;
|
||||
--height: 36px;
|
||||
}
|
||||
.fancy-field:hover {
|
||||
border-color: var(--spectrum-global-color-gray-400);
|
||||
|
@ -91,8 +98,8 @@
|
|||
}
|
||||
.content {
|
||||
position: relative;
|
||||
height: 64px;
|
||||
padding: 0 16px;
|
||||
height: var(--height);
|
||||
padding: 0 var(--padding);
|
||||
}
|
||||
.fancy-field.auto-height .content {
|
||||
height: auto;
|
||||
|
@ -103,7 +110,7 @@
|
|||
flex-direction: row;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
gap: var(--padding);
|
||||
}
|
||||
.field {
|
||||
flex: 1 1 auto;
|
||||
|
|
|
@ -4,4 +4,5 @@ export { default as FancySelect } from "./FancySelect.svelte"
|
|||
export { default as FancyButton } from "./FancyButton.svelte"
|
||||
export { default as FancyForm } from "./FancyForm.svelte"
|
||||
export { default as FancyButtonRadio } from "./FancyButtonRadio.svelte"
|
||||
export { default as FancyCheckboxGroup } from "./FancyCheckboxGroup.svelte"
|
||||
export { default as ErrorMessage } from "./ErrorMessage.svelte"
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
export let text = null
|
||||
export let disabled = false
|
||||
export let size
|
||||
export let indeterminate = false
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const onChange = event => {
|
||||
|
@ -22,6 +23,7 @@
|
|||
class="spectrum-Checkbox spectrum-Checkbox--emphasized {sizeClass}"
|
||||
class:is-invalid={!!error}
|
||||
class:checked={value}
|
||||
class:is-indeterminate={indeterminate}
|
||||
>
|
||||
<input
|
||||
checked={value}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
export let fixed = false
|
||||
export let inline = false
|
||||
export let disableCancel = false
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
let visible = fixed || inline
|
||||
|
@ -38,7 +39,7 @@
|
|||
}
|
||||
|
||||
export function cancel() {
|
||||
if (!visible) {
|
||||
if (!visible || disableCancel) {
|
||||
return
|
||||
}
|
||||
dispatch("cancel")
|
||||
|
|
|
@ -204,6 +204,12 @@
|
|||
})
|
||||
return columns
|
||||
.sort((a, b) => {
|
||||
if (a.divider) {
|
||||
return a
|
||||
}
|
||||
if (b.divider) {
|
||||
return b
|
||||
}
|
||||
const orderA = a.order || Number.MAX_SAFE_INTEGER
|
||||
const orderB = b.order || Number.MAX_SAFE_INTEGER
|
||||
const nameA = getDisplayName(a)
|
||||
|
|
|
@ -23,10 +23,11 @@ function prepareData(config) {
|
|||
return datasource
|
||||
}
|
||||
|
||||
export async function saveDatasource(config, skipFetch = false) {
|
||||
export async function saveDatasource(config, { skipFetch, tablesFilter } = {}) {
|
||||
const datasource = prepareData(config)
|
||||
// Create datasource
|
||||
const resp = await datasources.save(datasource, !skipFetch && datasource.plus)
|
||||
const fetchSchema = !skipFetch && datasource.plus
|
||||
const resp = await datasources.save(datasource, { fetchSchema, tablesFilter })
|
||||
|
||||
// update the tables incase datasource plus
|
||||
await tables.fetch()
|
||||
|
@ -41,6 +42,13 @@ export async function createRestDatasource(integration) {
|
|||
|
||||
export async function validateDatasourceConfig(config) {
|
||||
const datasource = prepareData(config)
|
||||
const resp = await API.validateDatasource(datasource)
|
||||
return resp
|
||||
return await API.validateDatasource(datasource)
|
||||
}
|
||||
|
||||
export async function getDatasourceInfo(config) {
|
||||
let datasource = config
|
||||
if (!config._id) {
|
||||
datasource = prepareData(config)
|
||||
}
|
||||
return await API.fetchInfoForDatasource(datasource)
|
||||
}
|
||||
|
|
|
@ -74,6 +74,7 @@ const INITIAL_FRONTEND_STATE = {
|
|||
propertyFocus: null,
|
||||
builderSidePanel: false,
|
||||
hasLock: true,
|
||||
showPreview: false,
|
||||
|
||||
// URL params
|
||||
selectedScreenId: null,
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
import { automationStore, selectedAutomation } from "builderStore"
|
||||
import { admin, licensing } from "stores/portal"
|
||||
import { externalActions } from "./ExternalActions"
|
||||
import { TriggerStepID } from "constants/backend/automations"
|
||||
import { TriggerStepID, ActionStepID } from "constants/backend/automations"
|
||||
import { checkForCollectStep } from "builderStore/utils"
|
||||
|
||||
export let blockIdx
|
||||
|
@ -149,7 +149,7 @@
|
|||
<div class="item-body">
|
||||
<Icon name={action.icon} />
|
||||
<Body size="XS">{action.name}</Body>
|
||||
{#if isDisabled && !syncAutomationsEnabled}
|
||||
{#if isDisabled && !syncAutomationsEnabled && action.stepId === ActionStepID.COLLECT}
|
||||
<div class="tag-color">
|
||||
<Tags>
|
||||
<Tag icon="LockClosed">Business</Tag>
|
||||
|
|
|
@ -76,6 +76,10 @@ export function getBindings({
|
|||
// will be replaced by the main array binding
|
||||
readableBinding: label,
|
||||
runtimeBinding: binding,
|
||||
display: {
|
||||
name: label,
|
||||
type: field.name === FIELDS.LINK.name ? "Array" : field.name,
|
||||
},
|
||||
})
|
||||
}
|
||||
return bindings
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
notifications,
|
||||
Modal,
|
||||
Table,
|
||||
Toggle,
|
||||
FancyCheckboxGroup,
|
||||
} from "@budibase/bbui"
|
||||
import { datasources, integrations, tables } from "stores/backend"
|
||||
import CreateEditRelationship from "components/backend/Datasources/CreateEditRelationship.svelte"
|
||||
|
@ -16,7 +16,7 @@
|
|||
import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte"
|
||||
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
|
||||
import { goto } from "@roxi/routify"
|
||||
import ValuesList from "components/common/ValuesList.svelte"
|
||||
import { getDatasourceInfo } from "builderStore/datasource"
|
||||
|
||||
export let datasource
|
||||
export let save
|
||||
|
@ -34,7 +34,7 @@
|
|||
let selectedFromRelationship, selectedToRelationship
|
||||
let confirmDialog
|
||||
let specificTables = null
|
||||
let requireSpecificTables = false
|
||||
let tableList
|
||||
|
||||
$: integration = datasource && $integrations[datasource.source]
|
||||
$: plusTables = datasource?.plus
|
||||
|
@ -153,30 +153,28 @@
|
|||
warning={false}
|
||||
title="Confirm table fetch"
|
||||
>
|
||||
<Toggle
|
||||
bind:value={requireSpecificTables}
|
||||
on:change={e => {
|
||||
requireSpecificTables = e.detail
|
||||
specificTables = null
|
||||
}}
|
||||
thin
|
||||
text="Fetch listed tables only (one per line)"
|
||||
/>
|
||||
{#if requireSpecificTables}
|
||||
<ValuesList label="" bind:values={specificTables} />
|
||||
{/if}
|
||||
<br />
|
||||
<Body>
|
||||
If you have fetched tables from this database before, this action may
|
||||
overwrite any changes you made after your initial fetch.
|
||||
</Body>
|
||||
<br />
|
||||
<div class="table-checkboxes">
|
||||
<FancyCheckboxGroup options={tableList} bind:selected={specificTables} />
|
||||
</div>
|
||||
</ConfirmDialog>
|
||||
|
||||
<Divider />
|
||||
<div class="query-header">
|
||||
<Heading size="S">Tables</Heading>
|
||||
<div class="table-buttons">
|
||||
<Button secondary on:click={() => confirmDialog.show()}>
|
||||
<Button
|
||||
secondary
|
||||
on:click={async () => {
|
||||
const info = await getDatasourceInfo(datasource)
|
||||
tableList = info.tableNames
|
||||
confirmDialog.show()
|
||||
}}
|
||||
>
|
||||
Fetch tables
|
||||
</Button>
|
||||
<Button cta icon="Add" on:click={createNewTable}>New table</Button>
|
||||
|
@ -246,4 +244,8 @@
|
|||
display: flex;
|
||||
gap: var(--spacing-m);
|
||||
}
|
||||
|
||||
.table-checkboxes {
|
||||
width: 100%;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -44,6 +44,9 @@ export default ICONS
|
|||
|
||||
export function getIcon(integrationType, schema) {
|
||||
const integrationList = get(integrations)
|
||||
if (!integrationList) {
|
||||
return
|
||||
}
|
||||
if (integrationList[integrationType]?.iconUrl) {
|
||||
return { url: integrationList[integrationType].iconUrl }
|
||||
} else if (schema?.custom || !ICONS[integrationType]) {
|
||||
|
|
|
@ -1,12 +1,19 @@
|
|||
<script>
|
||||
import { goto } from "@roxi/routify"
|
||||
import { ModalContent, notifications, Body, Layout } from "@budibase/bbui"
|
||||
import {
|
||||
ModalContent,
|
||||
notifications,
|
||||
Body,
|
||||
Layout,
|
||||
FancyCheckboxGroup,
|
||||
} from "@budibase/bbui"
|
||||
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
|
||||
import { IntegrationNames } from "constants/backend"
|
||||
import cloneDeep from "lodash/cloneDeepWith"
|
||||
import {
|
||||
saveDatasource as save,
|
||||
validateDatasourceConfig,
|
||||
getDatasourceInfo,
|
||||
} from "builderStore/datasource"
|
||||
import { DatasourceFeature } from "@budibase/types"
|
||||
|
||||
|
@ -15,11 +22,24 @@
|
|||
// kill the reference so the input isn't saved
|
||||
let datasource = cloneDeep(integration)
|
||||
let isValid = false
|
||||
let fetchTableStep = false
|
||||
let selectedTables = []
|
||||
let tableList = []
|
||||
|
||||
$: name =
|
||||
IntegrationNames[datasource.type] || datasource.name || datasource.type
|
||||
IntegrationNames[datasource?.type] || datasource?.name || datasource?.type
|
||||
$: datasourcePlus = datasource?.plus
|
||||
$: title = fetchTableStep ? "Fetch your tables" : `Connect to ${name}`
|
||||
$: confirmText = fetchTableStep
|
||||
? "Continue"
|
||||
: datasourcePlus
|
||||
? "Connect"
|
||||
: "Save and continue to query"
|
||||
|
||||
async function validateConfig() {
|
||||
if (!integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
|
||||
return true
|
||||
}
|
||||
const displayError = message =>
|
||||
notifications.error(message ?? "Error validating datasource")
|
||||
|
||||
|
@ -37,7 +57,7 @@
|
|||
}
|
||||
|
||||
async function saveDatasource() {
|
||||
if (integration.features[DatasourceFeature.CONNECTION_CHECKING]) {
|
||||
if (integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
|
||||
const valid = await validateConfig()
|
||||
if (!valid) {
|
||||
return false
|
||||
|
@ -47,35 +67,75 @@
|
|||
if (!datasource.name) {
|
||||
datasource.name = name
|
||||
}
|
||||
const resp = await save(datasource)
|
||||
const opts = {}
|
||||
if (datasourcePlus && selectedTables) {
|
||||
opts.tablesFilter = selectedTables
|
||||
}
|
||||
const resp = await save(datasource, opts)
|
||||
$goto(`./datasource/${resp._id}`)
|
||||
notifications.success(`Datasource created successfully.`)
|
||||
notifications.success("Datasource created successfully.")
|
||||
} catch (err) {
|
||||
notifications.error(err?.message ?? "Error saving datasource")
|
||||
// prevent the modal from closing
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async function nextStep() {
|
||||
let connected = true
|
||||
if (datasourcePlus) {
|
||||
connected = await validateConfig()
|
||||
}
|
||||
if (!connected) {
|
||||
return false
|
||||
}
|
||||
if (datasourcePlus && !fetchTableStep) {
|
||||
notifications.success("Connected to datasource successfully.")
|
||||
const info = await getDatasourceInfo(datasource)
|
||||
tableList = info.tableNames
|
||||
fetchTableStep = true
|
||||
return false
|
||||
} else {
|
||||
await saveDatasource()
|
||||
return true
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<ModalContent
|
||||
title={`Connect to ${name}`}
|
||||
onConfirm={() => saveDatasource()}
|
||||
confirmText={datasource.plus ? "Connect" : "Save and continue to query"}
|
||||
cancelText="Back"
|
||||
showSecondaryButton={datasource.plus}
|
||||
{title}
|
||||
onConfirm={() => nextStep()}
|
||||
{confirmText}
|
||||
cancelText={fetchTableStep ? "Cancel" : "Back"}
|
||||
showSecondaryButton={datasourcePlus}
|
||||
size="L"
|
||||
disabled={!isValid}
|
||||
>
|
||||
<Layout noPadding>
|
||||
<Body size="XS"
|
||||
>Connect your database to Budibase using the config below.
|
||||
<Body size="XS">
|
||||
{#if !fetchTableStep}
|
||||
Connect your database to Budibase using the config below
|
||||
{:else}
|
||||
Choose what tables you want to sync with Budibase
|
||||
{/if}
|
||||
</Body>
|
||||
</Layout>
|
||||
<IntegrationConfigForm
|
||||
schema={datasource.schema}
|
||||
bind:datasource
|
||||
creating={true}
|
||||
on:valid={e => (isValid = e.detail)}
|
||||
/>
|
||||
{#if !fetchTableStep}
|
||||
<IntegrationConfigForm
|
||||
schema={datasource?.schema}
|
||||
bind:datasource
|
||||
creating={true}
|
||||
on:valid={e => (isValid = e.detail)}
|
||||
/>
|
||||
{:else}
|
||||
<div class="table-checkboxes">
|
||||
<FancyCheckboxGroup options={tableList} bind:selected={selectedTables} />
|
||||
</div>
|
||||
{/if}
|
||||
</ModalContent>
|
||||
|
||||
<style>
|
||||
.table-checkboxes {
|
||||
width: 100%;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -1,22 +1,27 @@
|
|||
<script>
|
||||
import {
|
||||
ModalContent,
|
||||
Body,
|
||||
FancyCheckboxGroup,
|
||||
InlineAlert,
|
||||
Layout,
|
||||
Link,
|
||||
ModalContent,
|
||||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
import { IntegrationNames, IntegrationTypes } from "constants/backend"
|
||||
import GoogleButton from "../_components/GoogleButton.svelte"
|
||||
import { organisation } from "stores/portal"
|
||||
import { onMount } from "svelte"
|
||||
import { validateDatasourceConfig } from "builderStore/datasource"
|
||||
import { onDestroy, onMount } from "svelte"
|
||||
import {
|
||||
getDatasourceInfo,
|
||||
saveDatasource,
|
||||
validateDatasourceConfig,
|
||||
} from "builderStore/datasource"
|
||||
import cloneDeep from "lodash/cloneDeepWith"
|
||||
import IntegrationConfigForm from "../TableIntegrationMenu/IntegrationConfigForm.svelte"
|
||||
import { goto } from "@roxi/routify"
|
||||
|
||||
import { saveDatasource } from "builderStore/datasource"
|
||||
import { DatasourceFeature } from "@budibase/types"
|
||||
import { API } from "api"
|
||||
|
||||
export let integration
|
||||
export let continueSetupId = false
|
||||
|
@ -24,16 +29,20 @@
|
|||
let datasource = cloneDeep(integration)
|
||||
datasource.config.continueSetupId = continueSetupId
|
||||
|
||||
let { schema } = datasource
|
||||
|
||||
$: isGoogleConfigured = !!$organisation.googleDatasourceConfigured
|
||||
|
||||
onMount(async () => {
|
||||
await organisation.init()
|
||||
})
|
||||
|
||||
const integrationName = IntegrationNames[IntegrationTypes.GOOGLE_SHEETS]
|
||||
|
||||
export const GoogleDatasouceConfigStep = {
|
||||
AUTH: "Auth",
|
||||
SET_URL: "Set_url",
|
||||
AUTH: "auth",
|
||||
SET_URL: "set_url",
|
||||
SET_SHEETS: "set_sheets",
|
||||
}
|
||||
|
||||
let step = continueSetupId
|
||||
|
@ -42,12 +51,21 @@
|
|||
|
||||
let isValid = false
|
||||
|
||||
const modalConfig = {
|
||||
[GoogleDatasouceConfigStep.AUTH]: {},
|
||||
let allSheets
|
||||
let selectedSheets
|
||||
let setSheetsErrorTitle, setSheetsErrorMessage
|
||||
|
||||
$: modalConfig = {
|
||||
[GoogleDatasouceConfigStep.AUTH]: {
|
||||
title: `Connect to ${integrationName}`,
|
||||
},
|
||||
[GoogleDatasouceConfigStep.SET_URL]: {
|
||||
title: `Connect your spreadsheet`,
|
||||
confirmButtonText: "Connect",
|
||||
onConfirm: async () => {
|
||||
if (integration.features[DatasourceFeature.CONNECTION_CHECKING]) {
|
||||
const checkConnection =
|
||||
integration.features[DatasourceFeature.CONNECTION_CHECKING]
|
||||
if (checkConnection) {
|
||||
const resp = await validateDatasourceConfig(datasource)
|
||||
if (!resp.connected) {
|
||||
notifications.error(`Unable to connect - ${resp.error}`)
|
||||
|
@ -56,21 +74,81 @@
|
|||
}
|
||||
|
||||
try {
|
||||
const resp = await saveDatasource(datasource)
|
||||
$goto(`./datasource/${resp._id}`)
|
||||
notifications.success(`Datasource created successfully.`)
|
||||
datasource = await saveDatasource(datasource, {
|
||||
tablesFilter: selectedSheets,
|
||||
skipFetch: true,
|
||||
})
|
||||
} catch (err) {
|
||||
notifications.error(err?.message ?? "Error saving datasource")
|
||||
// prevent the modal from closing
|
||||
return false
|
||||
}
|
||||
|
||||
if (!integration.features[DatasourceFeature.FETCH_TABLE_NAMES]) {
|
||||
notifications.success(`Datasource created successfully.`)
|
||||
return
|
||||
}
|
||||
|
||||
const info = await getDatasourceInfo(datasource)
|
||||
allSheets = info.tableNames
|
||||
|
||||
step = GoogleDatasouceConfigStep.SET_SHEETS
|
||||
notifications.success(
|
||||
checkConnection
|
||||
? "Connection Successful"
|
||||
: `Datasource created successfully.`
|
||||
)
|
||||
|
||||
// prevent the modal from closing
|
||||
return false
|
||||
},
|
||||
},
|
||||
[GoogleDatasouceConfigStep.SET_SHEETS]: {
|
||||
title: `Choose your sheets`,
|
||||
confirmButtonText: selectedSheets?.length
|
||||
? "Fetch sheets"
|
||||
: "Continue without fetching",
|
||||
onConfirm: async () => {
|
||||
try {
|
||||
if (selectedSheets.length) {
|
||||
await API.buildDatasourceSchema({
|
||||
datasourceId: datasource._id,
|
||||
tablesFilter: selectedSheets,
|
||||
})
|
||||
}
|
||||
|
||||
return
|
||||
} catch (err) {
|
||||
const message = err?.message ?? "Error fetching the sheets"
|
||||
// Handling message with format: Error title - error description
|
||||
const indexSeparator = message.indexOf(" - ")
|
||||
if (indexSeparator >= 0) {
|
||||
setSheetsErrorTitle = message.substr(0, indexSeparator)
|
||||
setSheetsErrorMessage =
|
||||
message[indexSeparator + 3].toUpperCase() +
|
||||
message.substr(indexSeparator + 4)
|
||||
} else {
|
||||
setSheetsErrorTitle = null
|
||||
setSheetsErrorMessage = message
|
||||
}
|
||||
|
||||
// prevent the modal from closing
|
||||
return false
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// This will handle the user closing the modal pressing outside the modal
|
||||
onDestroy(() => {
|
||||
if (step === GoogleDatasouceConfigStep.SET_SHEETS) {
|
||||
$goto(`./datasource/${datasource._id}`)
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
<ModalContent
|
||||
title={`Connect to ${integrationName}`}
|
||||
title={modalConfig[step].title}
|
||||
cancelText="Cancel"
|
||||
size="L"
|
||||
confirmText={modalConfig[step].confirmButtonText}
|
||||
|
@ -100,11 +178,30 @@
|
|||
<Body size="S">Add the URL of the sheet you want to connect.</Body>
|
||||
|
||||
<IntegrationConfigForm
|
||||
schema={datasource.schema}
|
||||
{schema}
|
||||
bind:datasource
|
||||
creating={true}
|
||||
on:valid={e => (isValid = e.detail)}
|
||||
/>
|
||||
</Layout>
|
||||
{/if}
|
||||
{#if step === GoogleDatasouceConfigStep.SET_SHEETS}
|
||||
<Layout noPadding no>
|
||||
<Body size="S">Select which spreadsheets you want to connect.</Body>
|
||||
|
||||
<FancyCheckboxGroup
|
||||
options={allSheets}
|
||||
bind:selected={selectedSheets}
|
||||
selectAllText="Select all sheets"
|
||||
/>
|
||||
|
||||
{#if setSheetsErrorTitle || setSheetsErrorMessage}
|
||||
<InlineAlert
|
||||
type="error"
|
||||
header={setSheetsErrorTitle}
|
||||
message={setSheetsErrorMessage}
|
||||
/>
|
||||
{/if}
|
||||
</Layout>
|
||||
{/if}
|
||||
</ModalContent>
|
||||
|
|
|
@ -69,7 +69,7 @@
|
|||
name: "App",
|
||||
description: "",
|
||||
icon: "Play",
|
||||
action: () => window.open(`/${$store.appId}`),
|
||||
action: () => store.update(state => ({ ...state, showPreview: true })),
|
||||
},
|
||||
{
|
||||
type: "Preview",
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
readableToRuntimeBinding,
|
||||
runtimeToReadableBinding,
|
||||
} from "builderStore/dataBinding"
|
||||
import { store } from "builderStore"
|
||||
|
||||
import { convertToJS } from "@budibase/string-templates"
|
||||
import { admin } from "stores/portal"
|
||||
import CodeEditor from "../CodeEditor/CodeEditor.svelte"
|
||||
|
@ -339,25 +339,28 @@
|
|||
</Tab>
|
||||
{/if}
|
||||
<div class="drawer-actions">
|
||||
<Button
|
||||
secondary
|
||||
quiet
|
||||
on:click={() => {
|
||||
store.actions.settings.propertyFocus(null)
|
||||
drawerActions.hide()
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
cta
|
||||
disabled={!valid}
|
||||
on:click={() => {
|
||||
bindingDrawerActions.save()
|
||||
}}
|
||||
>
|
||||
Save
|
||||
</Button>
|
||||
{#if drawerActions?.hide}
|
||||
<Button
|
||||
secondary
|
||||
quiet
|
||||
on:click={() => {
|
||||
drawerActions.hide()
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
{/if}
|
||||
{#if bindingDrawerActions?.save}
|
||||
<Button
|
||||
cta
|
||||
disabled={!valid}
|
||||
on:click={() => {
|
||||
bindingDrawerActions.save()
|
||||
}}
|
||||
>
|
||||
Save
|
||||
</Button>
|
||||
{/if}
|
||||
</div>
|
||||
</Tabs>
|
||||
</div>
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
.map(([name, categoryBindings]) => ({
|
||||
name,
|
||||
bindings: categoryBindings?.filter(binding => {
|
||||
return binding.readableBinding.match(searchRgx)
|
||||
return !search || binding.readableBinding.match(searchRgx)
|
||||
}),
|
||||
}))
|
||||
.filter(category => {
|
||||
|
@ -46,7 +46,11 @@
|
|||
)
|
||||
})
|
||||
$: filteredHelpers = helpers?.filter(helper => {
|
||||
return helper.label.match(searchRgx) || helper.description.match(searchRgx)
|
||||
return (
|
||||
!search ||
|
||||
helper.label.match(searchRgx) ||
|
||||
helper.description.match(searchRgx)
|
||||
)
|
||||
})
|
||||
|
||||
const getHelperExample = (helper, js) => {
|
||||
|
@ -124,9 +128,6 @@
|
|||
<span
|
||||
class="search-input-icon"
|
||||
on:click={() => {
|
||||
if (!search) {
|
||||
return
|
||||
}
|
||||
search = null
|
||||
}}
|
||||
class:searching={search}
|
||||
|
|
|
@ -76,7 +76,7 @@
|
|||
{/if}
|
||||
</div>
|
||||
|
||||
<Drawer bind:this={bindingDrawer} {title}>
|
||||
<Drawer bind:this={bindingDrawer} {title} headless>
|
||||
<svelte:fragment slot="description">
|
||||
Add the objects on the left to enrich your text.
|
||||
</svelte:fragment>
|
||||
|
|
|
@ -5,8 +5,6 @@
|
|||
runtimeToReadableBinding,
|
||||
} from "builderStore/dataBinding"
|
||||
|
||||
import { store } from "builderStore"
|
||||
|
||||
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
|
||||
import { createEventDispatcher, setContext } from "svelte"
|
||||
import { isJSBinding } from "@budibase/string-templates"
|
||||
|
@ -36,7 +34,6 @@
|
|||
|
||||
const saveBinding = () => {
|
||||
onChange(tempValue)
|
||||
store.actions.settings.propertyFocus(null)
|
||||
onBlur()
|
||||
bindingDrawer.hide()
|
||||
}
|
||||
|
@ -70,7 +67,6 @@
|
|||
<div
|
||||
class="icon"
|
||||
on:click={() => {
|
||||
store.actions.settings.propertyFocus(key)
|
||||
bindingDrawer.show()
|
||||
}}
|
||||
>
|
||||
|
|
|
@ -97,7 +97,10 @@
|
|||
}
|
||||
|
||||
const previewApp = () => {
|
||||
window.open(`/${application}`)
|
||||
store.update(state => ({
|
||||
...state,
|
||||
showPreview: true,
|
||||
}))
|
||||
}
|
||||
|
||||
const viewApp = () => {
|
||||
|
|
|
@ -73,10 +73,6 @@
|
|||
if (highlighted) {
|
||||
store.actions.settings.highlight(null)
|
||||
}
|
||||
// To fix focus 'affect' when property is target of a drawer other actions in the builder.
|
||||
if (propertyFocus) {
|
||||
store.actions.settings.propertyFocus(null)
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
|
|
|
@ -186,7 +186,6 @@
|
|||
}
|
||||
|
||||
div :global(.CodeMirror) {
|
||||
width: var(--code-mirror-width) !important;
|
||||
height: var(--code-mirror-height) !important;
|
||||
border-radius: var(--border-radius-s);
|
||||
font-family: var(--font-mono);
|
||||
|
|
|
@ -1,5 +1,11 @@
|
|||
<script>
|
||||
import { ModalContent, Toggle, Body, InlineAlert } from "@budibase/bbui"
|
||||
import {
|
||||
ModalContent,
|
||||
Toggle,
|
||||
Body,
|
||||
InlineAlert,
|
||||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
|
||||
export let app
|
||||
export let published
|
||||
|
@ -8,10 +14,45 @@
|
|||
$: title = published ? "Export published app" : "Export latest app"
|
||||
$: confirmText = published ? "Export published" : "Export latest"
|
||||
|
||||
const exportApp = () => {
|
||||
const exportApp = async () => {
|
||||
const id = published ? app.prodId : app.devId
|
||||
const appName = encodeURIComponent(app.name)
|
||||
window.location = `/api/backups/export?appId=${id}&appname=${appName}&excludeRows=${excludeRows}`
|
||||
const url = `/api/backups/export?appId=${id}`
|
||||
await downloadFile(url, { excludeRows })
|
||||
}
|
||||
|
||||
async function downloadFile(url, body) {
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
const contentDisposition = response.headers.get("Content-Disposition")
|
||||
|
||||
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(
|
||||
contentDisposition
|
||||
)
|
||||
|
||||
const filename = matches[1].replace(/['"]/g, "")
|
||||
|
||||
const url = URL.createObjectURL(await response.blob())
|
||||
|
||||
const link = document.createElement("a")
|
||||
link.href = url
|
||||
link.download = filename
|
||||
link.click()
|
||||
|
||||
URL.revokeObjectURL(url)
|
||||
} else {
|
||||
notifications.error("Error exporting the app.")
|
||||
}
|
||||
} catch (error) {
|
||||
notifications.error(error.message || "Error downloading the exported app")
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
<script>
|
||||
import { onMount } from "svelte"
|
||||
import { fade, fly } from "svelte/transition"
|
||||
import { store, selectedScreen } from "builderStore"
|
||||
import { ProgressCircle } from "@budibase/bbui"
|
||||
|
||||
$: route = $selectedScreen?.routing.route || "/"
|
||||
$: src = `/${$store.appId}#${route}`
|
||||
|
||||
const close = () => {
|
||||
store.update(state => ({
|
||||
...state,
|
||||
showPreview: false,
|
||||
}))
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
window.closePreview = () => {
|
||||
store.update(state => ({
|
||||
...state,
|
||||
showPreview: false,
|
||||
}))
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
<div
|
||||
class="preview-overlay"
|
||||
transition:fade={{ duration: 260 }}
|
||||
on:click|self={close}
|
||||
>
|
||||
<div
|
||||
class="container spectrum {$store.theme}"
|
||||
transition:fly={{ duration: 260, y: 130 }}
|
||||
>
|
||||
<div class="header placeholder" />
|
||||
<div class="loading placeholder">
|
||||
<ProgressCircle />
|
||||
</div>
|
||||
<iframe title="Budibase App Preview" {src} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.preview-overlay {
|
||||
top: 0;
|
||||
right: 0;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
z-index: 999;
|
||||
position: absolute;
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
display: flex;
|
||||
align-items: stretch;
|
||||
padding: 48px;
|
||||
}
|
||||
.container {
|
||||
flex: 1 1 auto;
|
||||
background: var(--spectrum-global-color-gray-75);
|
||||
border-radius: 4px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
box-shadow: 0 0 80px 0 rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
iframe {
|
||||
position: absolute;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
border: none;
|
||||
outline: none;
|
||||
z-index: 1;
|
||||
}
|
||||
.header {
|
||||
height: 60px;
|
||||
width: 100%;
|
||||
background: black;
|
||||
top: 0;
|
||||
position: absolute;
|
||||
}
|
||||
.loading {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translateY(-50%) translateX(-50%);
|
||||
}
|
||||
.placeholder {
|
||||
z-index: 0;
|
||||
}
|
||||
</style>
|
|
@ -25,6 +25,7 @@
|
|||
import BuilderSidePanel from "./_components/BuilderSidePanel.svelte"
|
||||
import UserAvatars from "./_components/UserAvatars.svelte"
|
||||
import { TOUR_KEYS, TOURS } from "components/portal/onboarding/tours.js"
|
||||
import PreviewOverlay from "./_components/PreviewOverlay.svelte"
|
||||
|
||||
export let application
|
||||
|
||||
|
@ -141,7 +142,7 @@
|
|||
<BuilderSidePanel />
|
||||
{/if}
|
||||
|
||||
<div class="root">
|
||||
<div class="root" class:blur={$store.showPreview}>
|
||||
<div class="top-nav">
|
||||
{#if $store.initialised}
|
||||
<div class="topleftnav">
|
||||
|
@ -197,6 +198,10 @@
|
|||
{/await}
|
||||
</div>
|
||||
|
||||
{#if $store.showPreview}
|
||||
<PreviewOverlay />
|
||||
{/if}
|
||||
|
||||
<svelte:window on:keydown={handleKeyDown} />
|
||||
<Modal bind:this={commandPaletteModal}>
|
||||
<CommandPalette />
|
||||
|
@ -222,6 +227,10 @@
|
|||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
transition: filter 260ms ease-out;
|
||||
}
|
||||
.root.blur {
|
||||
filter: blur(8px);
|
||||
}
|
||||
|
||||
.top-nav {
|
||||
|
|
|
@ -65,7 +65,7 @@
|
|||
}
|
||||
|
||||
const saveDatasource = async () => {
|
||||
if (integration.features[DatasourceFeature.CONNECTION_CHECKING]) {
|
||||
if (integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
|
||||
const valid = await validateConfig()
|
||||
if (!valid) {
|
||||
return false
|
||||
|
|
|
@ -373,7 +373,7 @@
|
|||
<OnboardingTypeModal {chooseCreationType} />
|
||||
</Modal>
|
||||
|
||||
<Modal bind:this={passwordModal}>
|
||||
<Modal bind:this={passwordModal} disableCancel={true}>
|
||||
<PasswordModal
|
||||
createUsersResponse={bulkSaveResponse}
|
||||
userData={userData.users}
|
||||
|
|
|
@ -57,7 +57,10 @@ export function createDatasourcesStore() {
|
|||
return updateDatasource(response)
|
||||
}
|
||||
|
||||
const save = async (body, fetchSchema = false) => {
|
||||
const save = async (body, { fetchSchema, tablesFilter } = {}) => {
|
||||
if (fetchSchema == null) {
|
||||
fetchSchema = false
|
||||
}
|
||||
let response
|
||||
if (body._id) {
|
||||
response = await API.updateDatasource(body)
|
||||
|
@ -65,6 +68,7 @@ export function createDatasourcesStore() {
|
|||
response = await API.createDatasource({
|
||||
datasource: body,
|
||||
fetchSchema,
|
||||
tablesFilter,
|
||||
})
|
||||
}
|
||||
return updateDatasource(response)
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
"pouchdb": "7.3.0",
|
||||
"pouchdb-replication-stream": "1.2.9",
|
||||
"randomstring": "1.1.5",
|
||||
"tar": "6.1.11",
|
||||
"tar": "6.1.15",
|
||||
"yaml": "^2.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import { createAPIClient } from "@budibase/frontend-core"
|
||||
import { notificationStore } from "../stores/notification.js"
|
||||
import { authStore } from "../stores/auth.js"
|
||||
import { devToolsStore } from "../stores/devTools.js"
|
||||
import { notificationStore, devToolsEnabled, devToolsStore } from "../stores/"
|
||||
import { get } from "svelte/store"
|
||||
|
||||
export const API = createAPIClient({
|
||||
|
@ -25,9 +24,10 @@ export const API = createAPIClient({
|
|||
}
|
||||
|
||||
// Add role header
|
||||
const devToolsState = get(devToolsStore)
|
||||
if (devToolsState.enabled && devToolsState.role) {
|
||||
headers["x-budibase-role"] = devToolsState.role
|
||||
const $devToolsStore = get(devToolsStore)
|
||||
const $devToolsEnabled = get(devToolsEnabled)
|
||||
if ($devToolsEnabled && $devToolsStore.role) {
|
||||
headers["x-budibase-role"] = $devToolsStore.role
|
||||
}
|
||||
},
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
appStore,
|
||||
devToolsStore,
|
||||
environmentStore,
|
||||
devToolsEnabled,
|
||||
} from "stores"
|
||||
import NotificationDisplay from "components/overlay/NotificationDisplay.svelte"
|
||||
import ConfirmationDisplay from "components/overlay/ConfirmationDisplay.svelte"
|
||||
|
@ -47,10 +48,7 @@
|
|||
let permissionError = false
|
||||
|
||||
// Determine if we should show devtools or not
|
||||
$: showDevTools =
|
||||
!$builderStore.inBuilder &&
|
||||
$devToolsStore.enabled &&
|
||||
!$routeStore.queryParams?.peek
|
||||
$: showDevTools = $devToolsEnabled && !$routeStore.queryParams?.peek
|
||||
|
||||
// Handle no matching route
|
||||
$: {
|
||||
|
@ -107,6 +105,7 @@
|
|||
lang="en"
|
||||
dir="ltr"
|
||||
class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}"
|
||||
class:builder={$builderStore.inBuilder}
|
||||
>
|
||||
<DeviceBindingsProvider>
|
||||
<UserBindingsProvider>
|
||||
|
@ -223,12 +222,14 @@
|
|||
overflow: hidden;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
background: transparent;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
#spectrum-root.builder {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#clip-root {
|
||||
max-width: 100%;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { Heading, Button, Select } from "@budibase/bbui"
|
||||
import { Heading, Select, ActionButton } from "@budibase/bbui"
|
||||
import { devToolsStore } from "../../stores"
|
||||
import { getContext } from "svelte"
|
||||
|
||||
|
@ -30,7 +30,7 @@
|
|||
</script>
|
||||
|
||||
<div class="dev-preview-header" class:mobile={$context.device.mobile}>
|
||||
<Heading size="XS">Budibase App Preview</Heading>
|
||||
<Heading size="XS">Preview</Heading>
|
||||
<Select
|
||||
quiet
|
||||
options={previewOptions}
|
||||
|
@ -40,36 +40,57 @@
|
|||
on:change={e => devToolsStore.actions.changeRole(e.detail)}
|
||||
/>
|
||||
{#if !$context.device.mobile}
|
||||
<Button
|
||||
<ActionButton
|
||||
quiet
|
||||
overBackground
|
||||
icon="Code"
|
||||
on:click={() => devToolsStore.actions.setVisible(!$devToolsStore.visible)}
|
||||
>
|
||||
{$devToolsStore.visible ? "Close" : "Open"} DevTools
|
||||
</Button>
|
||||
</ActionButton>
|
||||
{/if}
|
||||
<ActionButton
|
||||
quiet
|
||||
icon="Close"
|
||||
on:click={() => window.parent.closePreview?.()}
|
||||
>
|
||||
Close preview
|
||||
</ActionButton>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.dev-preview-header {
|
||||
flex: 0 0 50px;
|
||||
height: 50px;
|
||||
flex: 0 0 60px;
|
||||
display: grid;
|
||||
align-items: center;
|
||||
background-color: var(--spectrum-global-color-blue-400);
|
||||
background-color: black;
|
||||
padding: 0 var(--spacing-xl);
|
||||
grid-template-columns: 1fr auto auto;
|
||||
grid-template-columns: 1fr auto auto auto;
|
||||
grid-gap: var(--spacing-xl);
|
||||
}
|
||||
.dev-preview-header.mobile {
|
||||
flex: 0 0 50px;
|
||||
grid-template-columns: 1fr auto;
|
||||
grid-template-columns: 1fr auto auto;
|
||||
}
|
||||
.dev-preview-header :global(.spectrum-Heading),
|
||||
.dev-preview-header :global(.spectrum-Picker-menuIcon),
|
||||
.dev-preview-header :global(.spectrum-Picker-label) {
|
||||
color: white !important;
|
||||
.dev-preview-header :global(.spectrum-Icon),
|
||||
.dev-preview-header :global(.spectrum-Picker-label),
|
||||
.dev-preview-header :global(.spectrum-ActionButton) {
|
||||
font-weight: 600;
|
||||
color: white;
|
||||
}
|
||||
.dev-preview-header :global(.spectrum-Picker) {
|
||||
padding-left: 8px;
|
||||
padding-right: 8px;
|
||||
transition: background 130ms ease-out;
|
||||
border-radius: 4px;
|
||||
}
|
||||
.dev-preview-header :global(.spectrum-ActionButton:hover),
|
||||
.dev-preview-header :global(.spectrum-Picker:hover),
|
||||
.dev-preview-header :global(.spectrum-Picker.is-open) {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
.dev-preview-header :global(.spectrum-ActionButton:active) {
|
||||
background: rgba(255, 255, 255, 0.2);
|
||||
}
|
||||
@media print {
|
||||
.dev-preview-header {
|
||||
|
|
|
@ -2,7 +2,6 @@ import ClientApp from "./components/ClientApp.svelte"
|
|||
import {
|
||||
builderStore,
|
||||
appStore,
|
||||
devToolsStore,
|
||||
blockStore,
|
||||
componentStore,
|
||||
environmentStore,
|
||||
|
@ -51,11 +50,6 @@ const loadBudibase = async () => {
|
|||
await environmentStore.actions.fetchEnvironment()
|
||||
}
|
||||
|
||||
// Enable dev tools or not. We need to be using a dev app and not inside
|
||||
// the builder preview to enable them.
|
||||
const enableDevTools = !get(builderStore).inBuilder && get(appStore).isDevApp
|
||||
devToolsStore.actions.setEnabled(enableDevTools)
|
||||
|
||||
// Register handler for runtime events from the builder
|
||||
window.handleBuilderRuntimeEvent = (type, data) => {
|
||||
if (!window["##BUDIBASE_IN_BUILDER##"]) {
|
||||
|
|
|
@ -2,13 +2,14 @@ import { derived } from "svelte/store"
|
|||
import { Constants } from "@budibase/frontend-core"
|
||||
import { devToolsStore } from "../devTools.js"
|
||||
import { authStore } from "../auth.js"
|
||||
import { devToolsEnabled } from "./devToolsEnabled.js"
|
||||
|
||||
// Derive the current role of the logged-in user
|
||||
export const currentRole = derived(
|
||||
[devToolsStore, authStore],
|
||||
([$devToolsStore, $authStore]) => {
|
||||
[devToolsEnabled, devToolsStore, authStore],
|
||||
([$devToolsEnabled, $devToolsStore, $authStore]) => {
|
||||
return (
|
||||
($devToolsStore.enabled && $devToolsStore.role) ||
|
||||
($devToolsEnabled && $devToolsStore.role) ||
|
||||
$authStore?.roleId ||
|
||||
Constants.Roles.PUBLIC
|
||||
)
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
import { derived } from "svelte/store"
|
||||
import { appStore } from "../app.js"
|
||||
import { builderStore } from "../builder.js"
|
||||
|
||||
export const devToolsEnabled = derived(
|
||||
[appStore, builderStore],
|
||||
([$appStore, $builderStore]) => {
|
||||
return !$builderStore.inBuilder && $appStore.isDevApp
|
||||
}
|
||||
)
|
|
@ -3,3 +3,4 @@
|
|||
// separately we can keep our actual stores lean and performant.
|
||||
export { currentRole } from "./currentRole.js"
|
||||
export { dndComponentPath } from "./dndComponentPath.js"
|
||||
export { devToolsEnabled } from "./devToolsEnabled.js"
|
||||
|
|
|
@ -4,7 +4,6 @@ import { authStore } from "./auth"
|
|||
import { API } from "../api"
|
||||
|
||||
const initialState = {
|
||||
enabled: false,
|
||||
visible: false,
|
||||
allowSelection: false,
|
||||
role: null,
|
||||
|
@ -13,13 +12,6 @@ const initialState = {
|
|||
const createDevToolStore = () => {
|
||||
const store = createLocalStorageStore("bb-devtools", initialState)
|
||||
|
||||
const setEnabled = enabled => {
|
||||
store.update(state => ({
|
||||
...state,
|
||||
enabled,
|
||||
}))
|
||||
}
|
||||
|
||||
const setVisible = visible => {
|
||||
store.update(state => ({
|
||||
...state,
|
||||
|
@ -46,7 +38,7 @@ const createDevToolStore = () => {
|
|||
|
||||
return {
|
||||
subscribe: store.subscribe,
|
||||
actions: { setEnabled, setVisible, setAllowSelection, changeRole },
|
||||
actions: { setVisible, setAllowSelection, changeRole },
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -26,13 +26,16 @@ export const buildDatasourceEndpoints = API => ({
|
|||
* Creates a datasource
|
||||
* @param datasource the datasource to create
|
||||
* @param fetchSchema whether to fetch the schema or not
|
||||
* @param tablesFilter a list of tables to actually fetch rather than simply
|
||||
* all that are accessible.
|
||||
*/
|
||||
createDatasource: async ({ datasource, fetchSchema }) => {
|
||||
createDatasource: async ({ datasource, fetchSchema, tablesFilter }) => {
|
||||
return await API.post({
|
||||
url: "/api/datasources",
|
||||
body: {
|
||||
datasource,
|
||||
fetchSchema,
|
||||
tablesFilter,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
@ -69,4 +72,15 @@ export const buildDatasourceEndpoints = API => ({
|
|||
body: { datasource },
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetch table names available within the datasource, for filtering out undesired tables
|
||||
* @param datasource the datasource configuration to use for fetching tables
|
||||
*/
|
||||
fetchInfoForDatasource: async datasource => {
|
||||
return await API.post({
|
||||
url: `/api/datasources/info`,
|
||||
body: { datasource },
|
||||
})
|
||||
},
|
||||
})
|
||||
|
|
|
@ -148,9 +148,9 @@
|
|||
class:floating={offset > 0}
|
||||
style="--offset:{offset}px; --sticky-width:{width}px;"
|
||||
>
|
||||
<div class="underlay sticky" transition:fade={{ duration: 130 }} />
|
||||
<div class="underlay" transition:fade={{ duration: 130 }} />
|
||||
<div class="sticky-column" transition:fade={{ duration: 130 }}>
|
||||
<div class="underlay sticky" transition:fade|local={{ duration: 130 }} />
|
||||
<div class="underlay" transition:fade|local={{ duration: 130 }} />
|
||||
<div class="sticky-column" transition:fade|local={{ duration: 130 }}>
|
||||
<GutterCell on:expand={addViaModal} rowHovered>
|
||||
<Icon name="Add" color="var(--spectrum-global-color-gray-500)" />
|
||||
{#if isAdding}
|
||||
|
@ -179,7 +179,7 @@
|
|||
</DataCell>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="normal-columns" transition:fade={{ duration: 130 }}>
|
||||
<div class="normal-columns" transition:fade|local={{ duration: 130 }}>
|
||||
<GridScrollWrapper scrollHorizontally wheelInteractive>
|
||||
<div class="row">
|
||||
{#each $renderedColumns as column, columnIdx}
|
||||
|
@ -209,7 +209,7 @@
|
|||
</div>
|
||||
</GridScrollWrapper>
|
||||
</div>
|
||||
<div class="buttons" transition:fade={{ duration: 130 }}>
|
||||
<div class="buttons" transition:fade|local={{ duration: 130 }}>
|
||||
<Button size="M" cta on:click={addRow} disabled={isAdding}>
|
||||
<div class="button-with-keys">
|
||||
Save
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 01fbc8670021c5a275c2a1a36ee18b984eeafad5
|
||||
Subproject commit f4b8449aac9bd265214396afbdce7ff984a2ae34
|
|
@ -97,7 +97,7 @@
|
|||
"koa2-ratelimit": "1.1.1",
|
||||
"lodash": "4.17.21",
|
||||
"memorystream": "0.3.1",
|
||||
"mongodb": "4.9",
|
||||
"mongodb": "5.6",
|
||||
"mssql": "6.2.3",
|
||||
"mysql2": "2.3.3",
|
||||
"node-fetch": "2.6.7",
|
||||
|
@ -117,7 +117,7 @@
|
|||
"socket.io": "4.6.1",
|
||||
"svelte": "3.49.0",
|
||||
"swagger-parser": "10.0.3",
|
||||
"tar": "6.1.11",
|
||||
"tar": "6.1.15",
|
||||
"to-json-schema": "0.2.5",
|
||||
"uuid": "3.3.2",
|
||||
"validate.js": "0.13.1",
|
||||
|
@ -150,7 +150,7 @@
|
|||
"@types/redis": "4.0.11",
|
||||
"@types/server-destroy": "1.0.1",
|
||||
"@types/supertest": "2.0.12",
|
||||
"@types/tar": "6.1.3",
|
||||
"@types/tar": "6.1.5",
|
||||
"@typescript-eslint/parser": "5.45.0",
|
||||
"apidoc": "0.50.4",
|
||||
"babel-jest": "29.5.0",
|
||||
|
|
|
@ -1,17 +1,31 @@
|
|||
import sdk from "../../sdk"
|
||||
import { events, context } from "@budibase/backend-core"
|
||||
import { events, context, db } from "@budibase/backend-core"
|
||||
import { DocumentType } from "../../db/utils"
|
||||
import { isQsTrue } from "../../utilities"
|
||||
import { Ctx } from "@budibase/types"
|
||||
|
||||
interface ExportAppDumpRequest {
|
||||
excludeRows: boolean
|
||||
encryptPassword?: string
|
||||
}
|
||||
|
||||
export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) {
|
||||
const { appId } = ctx.query as any
|
||||
const { excludeRows, encryptPassword } = ctx.request.body
|
||||
|
||||
const [app] = await db.getAppsByIDs([appId])
|
||||
const appName = app.name
|
||||
|
||||
export async function exportAppDump(ctx: any) {
|
||||
let { appId, excludeRows } = ctx.query
|
||||
// remove the 120 second limit for the request
|
||||
ctx.req.setTimeout(0)
|
||||
const appName = decodeURI(ctx.query.appname)
|
||||
excludeRows = isQsTrue(excludeRows)
|
||||
const backupIdentifier = `${appName}-export-${new Date().getTime()}.tar.gz`
|
||||
|
||||
const extension = encryptPassword ? "enc.tar.gz" : "tar.gz"
|
||||
const backupIdentifier = `${appName}-export-${new Date().getTime()}.${extension}`
|
||||
ctx.attachment(backupIdentifier)
|
||||
ctx.body = await sdk.backups.streamExportApp(appId, excludeRows)
|
||||
ctx.body = await sdk.backups.streamExportApp({
|
||||
appId,
|
||||
excludeRows,
|
||||
encryptPassword,
|
||||
})
|
||||
|
||||
await context.doInAppContext(appId, async () => {
|
||||
const appDb = context.getAppDB()
|
||||
|
|
|
@ -103,6 +103,22 @@ async function buildSchemaHelper(datasource: Datasource) {
|
|||
return { tables: connector.tables, error }
|
||||
}
|
||||
|
||||
async function buildFilteredSchema(datasource: Datasource, filter?: string[]) {
|
||||
let { tables, error } = await buildSchemaHelper(datasource)
|
||||
let finalTables = tables
|
||||
if (filter) {
|
||||
finalTables = {}
|
||||
for (let key in tables) {
|
||||
if (
|
||||
filter.some((filter: any) => filter.toLowerCase() === key.toLowerCase())
|
||||
) {
|
||||
finalTables[key] = tables[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return { tables: finalTables, error }
|
||||
}
|
||||
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
// Get internal tables
|
||||
const db = context.getAppDB()
|
||||
|
@ -174,43 +190,28 @@ export async function information(
|
|||
}
|
||||
const tableNames = await connector.getTableNames()
|
||||
ctx.body = {
|
||||
tableNames,
|
||||
tableNames: tableNames.sort(),
|
||||
}
|
||||
}
|
||||
|
||||
export async function buildSchemaFromDb(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
|
||||
const tablesFilter = ctx.request.body.tablesFilter
|
||||
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
|
||||
|
||||
let { tables, error } = await buildSchemaHelper(datasource)
|
||||
if (tablesFilter) {
|
||||
if (!datasource.entities) {
|
||||
datasource.entities = {}
|
||||
}
|
||||
for (let key in tables) {
|
||||
if (
|
||||
tablesFilter.some(
|
||||
(filter: any) => filter.toLowerCase() === key.toLowerCase()
|
||||
)
|
||||
) {
|
||||
datasource.entities[key] = tables[key]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
datasource.entities = tables
|
||||
}
|
||||
const { tables, error } = await buildFilteredSchema(datasource, tablesFilter)
|
||||
datasource.entities = tables
|
||||
|
||||
setDefaultDisplayColumns(datasource)
|
||||
const dbResp = await db.put(datasource)
|
||||
datasource._rev = dbResp.rev
|
||||
const cleanedDatasource = await sdk.datasources.removeSecretSingle(datasource)
|
||||
|
||||
const response: any = { datasource: cleanedDatasource }
|
||||
const res: any = { datasource: cleanedDatasource }
|
||||
if (error) {
|
||||
response.error = error
|
||||
res.error = error
|
||||
}
|
||||
ctx.body = response
|
||||
ctx.body = res
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -320,6 +321,7 @@ export async function save(
|
|||
const db = context.getAppDB()
|
||||
const plus = ctx.request.body.datasource.plus
|
||||
const fetchSchema = ctx.request.body.fetchSchema
|
||||
const tablesFilter = ctx.request.body.tablesFilter
|
||||
|
||||
const datasource = {
|
||||
_id: generateDatasourceID({ plus }),
|
||||
|
@ -329,7 +331,10 @@ export async function save(
|
|||
|
||||
let schemaError = null
|
||||
if (fetchSchema) {
|
||||
const { tables, error } = await buildSchemaHelper(datasource)
|
||||
const { tables, error } = await buildFilteredSchema(
|
||||
datasource,
|
||||
tablesFilter
|
||||
)
|
||||
schemaError = error
|
||||
datasource.entities = tables
|
||||
setDefaultDisplayColumns(datasource)
|
||||
|
|
|
@ -4,7 +4,7 @@ import {
|
|||
getUserMetadataParams,
|
||||
InternalTables,
|
||||
} from "../../db/utils"
|
||||
import { BBContext, Database } from "@budibase/types"
|
||||
import { UserCtx, Database } from "@budibase/types"
|
||||
|
||||
const UpdateRolesOptions = {
|
||||
CREATED: "created",
|
||||
|
@ -38,15 +38,15 @@ async function updateRolesOnUserTable(
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetch(ctx: BBContext) {
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
ctx.body = await roles.getAllRoles()
|
||||
}
|
||||
|
||||
export async function find(ctx: BBContext) {
|
||||
export async function find(ctx: UserCtx) {
|
||||
ctx.body = await roles.getRole(ctx.params.roleId)
|
||||
}
|
||||
|
||||
export async function save(ctx: BBContext) {
|
||||
export async function save(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
let { _id, name, inherits, permissionId } = ctx.request.body
|
||||
let isCreate = false
|
||||
|
@ -72,7 +72,7 @@ export async function save(ctx: BBContext) {
|
|||
ctx.message = `Role '${role.name}' created successfully.`
|
||||
}
|
||||
|
||||
export async function destroy(ctx: BBContext) {
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
const roleId = ctx.params.roleId
|
||||
const role = await db.get(roleId)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { getRoutingInfo } from "../../utilities/routing"
|
||||
import { roles } from "@budibase/backend-core"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { UserCtx } from "@budibase/types"
|
||||
|
||||
const URL_SEPARATOR = "/"
|
||||
|
||||
|
@ -56,11 +56,11 @@ async function getRoutingStructure() {
|
|||
return { routes: routing.json }
|
||||
}
|
||||
|
||||
export async function fetch(ctx: BBContext) {
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
ctx.body = await getRoutingStructure()
|
||||
}
|
||||
|
||||
export async function clientFetch(ctx: BBContext) {
|
||||
export async function clientFetch(ctx: UserCtx) {
|
||||
const routing = await getRoutingStructure()
|
||||
let roleId = ctx.user?.role?._id
|
||||
const roleIds = (await roles.getUserRoleHierarchy(roleId, {
|
||||
|
|
|
@ -237,9 +237,15 @@ export async function exportRows(ctx: UserCtx) {
|
|||
ctx.request.body = {
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ctx.request.body.rows.map(
|
||||
(row: string) => JSON.parse(decodeURI(row))[0]
|
||||
),
|
||||
_id: ctx.request.body.rows.map((row: string) => {
|
||||
const ids = JSON.parse(
|
||||
decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",")
|
||||
)
|
||||
if (ids.length > 1) {
|
||||
ctx.throw(400, "Export data does not support composite keys.")
|
||||
}
|
||||
return ids[0]
|
||||
}),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,120 @@
|
|||
import { exportRows } from "../row/external"
|
||||
import sdk from "../../../sdk"
|
||||
import { ExternalRequest } from "../row/ExternalRequest"
|
||||
|
||||
// @ts-ignore
|
||||
sdk.datasources = {
|
||||
get: jest.fn(),
|
||||
}
|
||||
|
||||
jest.mock("../row/ExternalRequest")
|
||||
jest.mock("../view/exporters", () => ({
|
||||
csv: jest.fn(),
|
||||
Format: {
|
||||
CSV: "csv",
|
||||
},
|
||||
}))
|
||||
jest.mock("../../../utilities/fileSystem")
|
||||
|
||||
function getUserCtx() {
|
||||
return {
|
||||
params: {
|
||||
tableId: "datasource__tablename",
|
||||
},
|
||||
query: {
|
||||
format: "csv",
|
||||
},
|
||||
request: {
|
||||
body: {},
|
||||
},
|
||||
throw: jest.fn(() => {
|
||||
throw "Err"
|
||||
}),
|
||||
attachment: jest.fn(),
|
||||
}
|
||||
}
|
||||
|
||||
describe("external row controller", () => {
|
||||
describe("exportRows", () => {
|
||||
beforeAll(() => {
|
||||
//@ts-ignore
|
||||
jest.spyOn(ExternalRequest.prototype, "run").mockImplementation(() => [])
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it("should throw a 400 if no datasource entities are present", async () => {
|
||||
let userCtx = getUserCtx()
|
||||
try {
|
||||
//@ts-ignore
|
||||
await exportRows(userCtx)
|
||||
} catch (e) {
|
||||
expect(userCtx.throw).toHaveBeenCalledWith(
|
||||
400,
|
||||
"Datasource has not been configured for plus API."
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it("should handle single quotes from a row ID", async () => {
|
||||
//@ts-ignore
|
||||
sdk.datasources.get.mockImplementation(() => ({
|
||||
entities: {
|
||||
tablename: {
|
||||
schema: {},
|
||||
},
|
||||
},
|
||||
}))
|
||||
let userCtx = getUserCtx()
|
||||
userCtx.request.body = {
|
||||
rows: ["['d001']"],
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
await exportRows(userCtx)
|
||||
|
||||
expect(userCtx.request.body).toEqual({
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ["d001"],
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should throw a 400 if any composite keys are present", async () => {
|
||||
let userCtx = getUserCtx()
|
||||
userCtx.request.body = {
|
||||
rows: ["[123]", "['d001'%2C'10111']"],
|
||||
}
|
||||
try {
|
||||
//@ts-ignore
|
||||
await exportRows(userCtx)
|
||||
} catch (e) {
|
||||
expect(userCtx.throw).toHaveBeenCalledWith(
|
||||
400,
|
||||
"Export data does not support composite keys."
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it("should throw a 400 if no table name was found", async () => {
|
||||
let userCtx = getUserCtx()
|
||||
userCtx.params.tableId = "datasource__"
|
||||
userCtx.request.body = {
|
||||
rows: ["[123]"],
|
||||
}
|
||||
try {
|
||||
//@ts-ignore
|
||||
await exportRows(userCtx)
|
||||
} catch (e) {
|
||||
expect(userCtx.throw).toHaveBeenCalledWith(
|
||||
400,
|
||||
"Could not find table name."
|
||||
)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
|
@ -5,7 +5,7 @@ import { permissions } from "@budibase/backend-core"
|
|||
|
||||
const router: Router = new Router()
|
||||
|
||||
router.get(
|
||||
router.post(
|
||||
"/api/backups/export",
|
||||
authorized(permissions.BUILDER),
|
||||
controller.exportAppDump
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import tk from "timekeeper"
|
||||
import * as setup from "./utilities"
|
||||
import { events } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import { mocks } from "@budibase/backend-core/tests"
|
||||
|
||||
describe("/backups", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -16,7 +18,7 @@ describe("/backups", () => {
|
|||
describe("exportAppDump", () => {
|
||||
it("should be able to export app", async () => {
|
||||
const res = await request
|
||||
.get(`/api/backups/export?appId=${config.getAppId()}&appname=test`)
|
||||
.post(`/api/backups/export?appId=${config.getAppId()}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
expect(res.headers["content-type"]).toEqual("application/gzip")
|
||||
|
@ -26,10 +28,24 @@ describe("/backups", () => {
|
|||
it("should apply authorization to endpoint", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "GET",
|
||||
method: "POST",
|
||||
url: `/api/backups/export?appId=${config.getAppId()}`,
|
||||
})
|
||||
})
|
||||
|
||||
it("should infer the app name from the app", async () => {
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
const res = await request
|
||||
.post(`/api/backups/export?appId=${config.getAppId()}`)
|
||||
.set(config.defaultHeaders())
|
||||
|
||||
expect(res.headers["content-disposition"]).toEqual(
|
||||
`attachment; filename="${
|
||||
config.getApp()!.name
|
||||
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("calculateBackupStats", () => {
|
||||
|
|
|
@ -26,6 +26,10 @@ export default function process(updateCb?: UpdateCallback) {
|
|||
// if something not found - no changes to perform
|
||||
if (err?.status === 404) {
|
||||
return
|
||||
}
|
||||
// The user has already been sync in another process
|
||||
else if (err?.status === 409) {
|
||||
return
|
||||
} else {
|
||||
logging.logAlert("Failed to perform user/group app sync", err)
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ import { buildExternalTableId, finaliseExternalTables } from "./utils"
|
|||
import { GoogleSpreadsheet, GoogleSpreadsheetRow } from "google-spreadsheet"
|
||||
import fetch from "node-fetch"
|
||||
import { cache, configs, context, HTTPError } from "@budibase/backend-core"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
import { dataFilters, utils } from "@budibase/shared-core"
|
||||
import { GOOGLE_SHEETS_PRIMARY_KEY } from "../constants"
|
||||
import sdk from "../sdk"
|
||||
|
||||
|
@ -150,7 +150,6 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
|
||||
async testConnection(): Promise<ConnectionInfo> {
|
||||
try {
|
||||
await setupCreationAuth(this.config)
|
||||
await this.connect()
|
||||
return { connected: true }
|
||||
} catch (e: any) {
|
||||
|
@ -211,6 +210,8 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
|
||||
async connect() {
|
||||
try {
|
||||
await setupCreationAuth(this.config)
|
||||
|
||||
// Initialise oAuth client
|
||||
let googleConfig = await configs.getGoogleDatasourceConfig()
|
||||
if (!googleConfig) {
|
||||
|
@ -273,24 +274,24 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
}
|
||||
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
// not fully configured yet
|
||||
if (!this.config.auth) {
|
||||
return
|
||||
}
|
||||
await this.connect()
|
||||
const sheets = this.client.sheetsByIndex
|
||||
const tables: Record<string, Table> = {}
|
||||
for (let sheet of sheets) {
|
||||
// must fetch rows to determine schema
|
||||
await sheet.getRows()
|
||||
await utils.parallelForeach(
|
||||
sheets,
|
||||
async sheet => {
|
||||
// must fetch rows to determine schema
|
||||
await sheet.getRows({ limit: 0, offset: 0 })
|
||||
|
||||
const id = buildExternalTableId(datasourceId, sheet.title)
|
||||
tables[sheet.title] = this.getTableSchema(
|
||||
sheet.title,
|
||||
sheet.headerValues,
|
||||
id
|
||||
)
|
||||
}
|
||||
const id = buildExternalTableId(datasourceId, sheet.title)
|
||||
tables[sheet.title] = this.getTableSchema(
|
||||
sheet.title,
|
||||
sheet.headerValues,
|
||||
id
|
||||
)
|
||||
},
|
||||
10
|
||||
)
|
||||
const final = finaliseExternalTables(tables, entities)
|
||||
this.tables = final.tables
|
||||
this.schemaErrors = final.errors
|
||||
|
|
|
@ -351,7 +351,7 @@ const SCHEMA: Integration = getSchema()
|
|||
|
||||
class MongoIntegration implements IntegrationBase {
|
||||
private config: MongoDBConfig
|
||||
private client: any
|
||||
private client: MongoClient
|
||||
|
||||
constructor(config: MongoDBConfig) {
|
||||
this.config = config
|
||||
|
@ -372,6 +372,8 @@ class MongoIntegration implements IntegrationBase {
|
|||
response.connected = true
|
||||
} catch (e: any) {
|
||||
response.error = e.message as string
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
@ -380,7 +382,7 @@ class MongoIntegration implements IntegrationBase {
|
|||
return this.client.connect()
|
||||
}
|
||||
|
||||
createObjectIds(json: any): object {
|
||||
createObjectIds(json: any) {
|
||||
const self = this
|
||||
function interpolateObjectIds(json: any) {
|
||||
for (let field of Object.keys(json)) {
|
||||
|
|
|
@ -322,7 +322,8 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
|||
await this.openConnection()
|
||||
const columnsResponse: { rows: PostgresColumn[] } =
|
||||
await this.client.query(this.COLUMNS_SQL)
|
||||
return columnsResponse.rows.map(row => row.table_name)
|
||||
const names = columnsResponse.rows.map(row => row.table_name)
|
||||
return [...new Set(names)]
|
||||
} finally {
|
||||
await this.closeConnection()
|
||||
}
|
||||
|
|
|
@ -103,7 +103,7 @@ export default async (ctx: UserCtx, next: any) => {
|
|||
userId,
|
||||
globalId,
|
||||
roleId,
|
||||
role: await roles.getRole(roleId),
|
||||
role: await roles.getRole(roleId, { defaultPublic: true }),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { db as dbCore, objectStore } from "@budibase/backend-core"
|
||||
import { db as dbCore, encryption, objectStore } from "@budibase/backend-core"
|
||||
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
||||
import { streamFile, createTempFolder } from "../../../utilities/fileSystem"
|
||||
import { ObjectStoreBuckets } from "../../../constants"
|
||||
|
@ -18,7 +18,8 @@ import { join } from "path"
|
|||
import env from "../../../environment"
|
||||
|
||||
const uuid = require("uuid/v4")
|
||||
const tar = require("tar")
|
||||
import tar from "tar"
|
||||
|
||||
const MemoryStream = require("memorystream")
|
||||
|
||||
interface DBDumpOpts {
|
||||
|
@ -30,16 +31,18 @@ interface ExportOpts extends DBDumpOpts {
|
|||
tar?: boolean
|
||||
excludeRows?: boolean
|
||||
excludeLogs?: boolean
|
||||
encryptPassword?: string
|
||||
}
|
||||
|
||||
function tarFilesToTmp(tmpDir: string, files: string[]) {
|
||||
const exportFile = join(budibaseTempDir(), `${uuid()}.tar.gz`)
|
||||
const fileName = `${uuid()}.tar.gz`
|
||||
const exportFile = join(budibaseTempDir(), fileName)
|
||||
tar.create(
|
||||
{
|
||||
sync: true,
|
||||
gzip: true,
|
||||
file: exportFile,
|
||||
recursive: true,
|
||||
noDirRecurse: false,
|
||||
cwd: tmpDir,
|
||||
},
|
||||
files
|
||||
|
@ -124,6 +127,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
|||
)
|
||||
}
|
||||
}
|
||||
|
||||
const downloadedPath = join(tmpPath, appPath)
|
||||
if (fs.existsSync(downloadedPath)) {
|
||||
const allFiles = fs.readdirSync(downloadedPath)
|
||||
|
@ -141,12 +145,27 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
|||
filter: defineFilter(config?.excludeRows, config?.excludeLogs),
|
||||
exportPath: dbPath,
|
||||
})
|
||||
|
||||
if (config?.encryptPassword) {
|
||||
for (let file of fs.readdirSync(tmpPath)) {
|
||||
const path = join(tmpPath, file)
|
||||
|
||||
await encryption.encryptFile(
|
||||
{ dir: tmpPath, filename: file },
|
||||
config.encryptPassword
|
||||
)
|
||||
|
||||
fs.rmSync(path)
|
||||
}
|
||||
}
|
||||
|
||||
// if tar requested, return where the tarball is
|
||||
if (config?.tar) {
|
||||
// now the tmpPath contains both the DB export and attachments, tar this
|
||||
const tarPath = tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath))
|
||||
// cleanup the tmp export files as tarball returned
|
||||
fs.rmSync(tmpPath, { recursive: true, force: true })
|
||||
|
||||
return tarPath
|
||||
}
|
||||
// tar not requested, turn the directory where export is
|
||||
|
@ -161,11 +180,20 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
|||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
||||
* @returns {*} a readable stream of the backup which is written in real time
|
||||
*/
|
||||
export async function streamExportApp(appId: string, excludeRows: boolean) {
|
||||
export async function streamExportApp({
|
||||
appId,
|
||||
excludeRows,
|
||||
encryptPassword,
|
||||
}: {
|
||||
appId: string
|
||||
excludeRows: boolean
|
||||
encryptPassword?: string
|
||||
}) {
|
||||
const tmpPath = await exportApp(appId, {
|
||||
excludeRows,
|
||||
excludeLogs: true,
|
||||
tar: true,
|
||||
encryptPassword,
|
||||
})
|
||||
return streamFile(tmpPath)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { db as dbCore, objectStore } from "@budibase/backend-core"
|
||||
import { db as dbCore, encryption, objectStore } from "@budibase/backend-core"
|
||||
import { Database, Row } from "@budibase/types"
|
||||
import { getAutomationParams, TABLE_ROW_PREFIX } from "../../../db/utils"
|
||||
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
||||
|
@ -20,6 +20,7 @@ type TemplateType = {
|
|||
file?: {
|
||||
type: string
|
||||
path: string
|
||||
password?: string
|
||||
}
|
||||
key?: string
|
||||
}
|
||||
|
@ -123,6 +124,22 @@ export function untarFile(file: { path: string }) {
|
|||
return tmpPath
|
||||
}
|
||||
|
||||
async function decryptFiles(path: string, password: string) {
|
||||
try {
|
||||
for (let file of fs.readdirSync(path)) {
|
||||
const inputPath = join(path, file)
|
||||
const outputPath = inputPath.replace(/\.enc$/, "")
|
||||
await encryption.decryptFile(inputPath, outputPath, password)
|
||||
fs.rmSync(inputPath)
|
||||
}
|
||||
} catch (err: any) {
|
||||
if (err.message === "incorrect header check") {
|
||||
throw new Error("File cannot be imported")
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
export function getGlobalDBFile(tmpPath: string) {
|
||||
return fs.readFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), "utf8")
|
||||
}
|
||||
|
@ -143,6 +160,9 @@ export async function importApp(
|
|||
template.file && fs.lstatSync(template.file.path).isDirectory()
|
||||
if (template.file && (isTar || isDirectory)) {
|
||||
const tmpPath = isTar ? untarFile(template.file) : template.file.path
|
||||
if (isTar && template.file.password) {
|
||||
await decryptFiles(tmpPath, template.file.password)
|
||||
}
|
||||
const contents = fs.readdirSync(tmpPath)
|
||||
// have to handle object import
|
||||
if (contents.length) {
|
||||
|
|
|
@ -135,7 +135,7 @@ export function mergeConfigs(update: Datasource, old: Datasource) {
|
|||
// specific to REST datasources, fix the auth configs again if required
|
||||
if (hasAuthConfigs(update)) {
|
||||
const configs = update.config.authConfigs as RestAuthConfig[]
|
||||
const oldConfigs = old.config?.authConfigs as RestAuthConfig[]
|
||||
const oldConfigs = (old.config?.authConfigs as RestAuthConfig[]) || []
|
||||
for (let config of configs) {
|
||||
if (config.type !== RestAuthType.BASIC) {
|
||||
continue
|
||||
|
@ -164,5 +164,6 @@ export function mergeConfigs(update: Datasource, old: Datasource) {
|
|||
delete update.config[key]
|
||||
}
|
||||
}
|
||||
|
||||
return update
|
||||
}
|
||||
|
|
|
@ -4,3 +4,42 @@ export function unreachable(
|
|||
) {
|
||||
throw new Error(message)
|
||||
}
|
||||
|
||||
export async function parallelForeach<T>(
|
||||
items: T[],
|
||||
task: (item: T) => Promise<void>,
|
||||
maxConcurrency: number
|
||||
): Promise<void> {
|
||||
const promises: Promise<void>[] = []
|
||||
let index = 0
|
||||
|
||||
const processItem = async (item: T) => {
|
||||
try {
|
||||
await task(item)
|
||||
} finally {
|
||||
processNext()
|
||||
}
|
||||
}
|
||||
|
||||
const processNext = () => {
|
||||
if (index >= items.length) {
|
||||
// No more items to process
|
||||
return
|
||||
}
|
||||
|
||||
const item = items[index]
|
||||
index++
|
||||
|
||||
const promise = processItem(item)
|
||||
promises.push(promise)
|
||||
|
||||
if (promises.length >= maxConcurrency) {
|
||||
Promise.race(promises).then(processNext)
|
||||
} else {
|
||||
processNext()
|
||||
}
|
||||
}
|
||||
processNext()
|
||||
|
||||
await Promise.all(promises)
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ export interface UpdateDatasourceResponse {
|
|||
export interface CreateDatasourceRequest {
|
||||
datasource: Datasource
|
||||
fetchSchema?: boolean
|
||||
tablesFilter: string[]
|
||||
}
|
||||
|
||||
export interface VerifyDatasourceRequest {
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
},
|
||||
"scripts": {
|
||||
"setup": "yarn && node scripts/createEnv.js",
|
||||
"test": "jest --runInBand --json --outputFile=testResults.json",
|
||||
"test": "jest --runInBand --json --outputFile=testResults.json --forceExit",
|
||||
"test:watch": "yarn run test --watch",
|
||||
"test:debug": "DEBUG=1 yarn run test",
|
||||
"test:notify": "node scripts/testResultsWebhook",
|
||||
|
|
|
@ -15,6 +15,12 @@ async function generateReport() {
|
|||
return JSON.parse(report)
|
||||
}
|
||||
|
||||
const env = process.argv.slice(2)[0]
|
||||
|
||||
if (!env) {
|
||||
throw new Error("environment argument is required")
|
||||
}
|
||||
|
||||
async function discordResultsNotification(report) {
|
||||
const {
|
||||
numTotalTestSuites,
|
||||
|
@ -39,8 +45,8 @@ async function discordResultsNotification(report) {
|
|||
content: `**Nightly Tests Status**: ${OUTCOME}`,
|
||||
embeds: [
|
||||
{
|
||||
title: "Budi QA Bot",
|
||||
description: `Nightly Tests`,
|
||||
title: `Budi QA Bot - ${env}`,
|
||||
description: `API Integration Tests`,
|
||||
url: GITHUB_ACTIONS_RUN_URL,
|
||||
color: OUTCOME === "success" ? 3066993 : 15548997,
|
||||
timestamp: new Date(),
|
||||
|
|
|
@ -60,8 +60,16 @@ export default class AccountAPI {
|
|||
}
|
||||
|
||||
async delete(accountID: string) {
|
||||
const [response, json] = await this.client.del(`/api/accounts/${accountID}`)
|
||||
expect(response).toHaveStatusCode(200)
|
||||
const [response, json] = await this.client.del(
|
||||
`/api/accounts/${accountID}`,
|
||||
{
|
||||
internal: true,
|
||||
}
|
||||
)
|
||||
// can't use expect here due to use in global teardown
|
||||
if (response.status !== 204) {
|
||||
throw new Error(`Could not delete accountId=${accountID}`)
|
||||
}
|
||||
return response
|
||||
}
|
||||
}
|
||||
|
|
|
@ -93,7 +93,7 @@ describe("datasource validators", () => {
|
|||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "Error: getaddrinfo ENOTFOUND http",
|
||||
error: "getaddrinfo ENOTFOUND http",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import postgres from "../../../../packages/server/src/integrations/postgres"
|
||||
|
||||
jest.unmock("pg")
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ const API_OPTS: APIRequestOpts = { doExpect: false }
|
|||
async function deleteAccount() {
|
||||
// @ts-ignore
|
||||
const accountID = global.qa.accountId
|
||||
// can't run 'expect' blocks in teardown
|
||||
await accountsApi.accounts.delete(accountID)
|
||||
}
|
||||
|
||||
|
|
53
yarn.lock
53
yarn.lock
|
@ -6197,13 +6197,13 @@
|
|||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/tar@6.1.3":
|
||||
version "6.1.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.3.tgz#46a2ce7617950c4852dfd7e9cd41aa8161b9d750"
|
||||
integrity sha512-YzDOr5kdAeqS8dcO6NTTHTMJ44MUCBDoLEIyPtwEn7PssKqUYL49R1iCVJPeiPzPlKi6DbH33eZkpeJ27e4vHg==
|
||||
"@types/tar@6.1.5":
|
||||
version "6.1.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.5.tgz#90ccb3b6a35430e7427410d50eed564e85feaaff"
|
||||
integrity sha512-qm2I/RlZij5RofuY7vohTpYNaYcrSQlN2MyjucQc7ZweDwaEWkdN/EeNh6e9zjK6uEm6PwjdMXkcj05BxZdX1Q==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
minipass "^3.3.5"
|
||||
minipass "^4.0.0"
|
||||
|
||||
"@types/tern@*":
|
||||
version "0.23.4"
|
||||
|
@ -7989,12 +7989,10 @@ bson@*:
|
|||
resolved "https://registry.yarnpkg.com/bson/-/bson-5.0.1.tgz#4cd3eeeabf6652ef0d6ab600f9a18212d39baac3"
|
||||
integrity sha512-y09gBGusgHtinMon/GVbv1J6FrXhnr/+6hqLlSmEFzkz6PodqF6TxjyvfvY3AfO+oG1mgUtbC86xSbOlwvM62Q==
|
||||
|
||||
bson@^4.7.0:
|
||||
version "4.7.2"
|
||||
resolved "https://registry.yarnpkg.com/bson/-/bson-4.7.2.tgz#320f4ad0eaf5312dd9b45dc369cc48945e2a5f2e"
|
||||
integrity sha512-Ry9wCtIZ5kGqkJoi6aD8KjxFZEx78guTQDnpXWiNthsxzrxAK/i8E6pCHAIZTbaEFWcOCvbecMukfK7XUvyLpQ==
|
||||
dependencies:
|
||||
buffer "^5.6.0"
|
||||
bson@^5.3.0:
|
||||
version "5.3.0"
|
||||
resolved "https://registry.yarnpkg.com/bson/-/bson-5.3.0.tgz#37b006df4cd91ed125cb686467c1dd6d4606b514"
|
||||
integrity sha512-ukmCZMneMlaC5ebPHXIkP8YJzNl5DC41N5MAIvKDqLggdao342t4McltoJBQfQya/nHBWAcSsYRqlXPoQkTJag==
|
||||
|
||||
buffer-alloc-unsafe@^1.1.0:
|
||||
version "1.1.0"
|
||||
|
@ -18036,7 +18034,7 @@ minipass-sized@^1.0.3:
|
|||
dependencies:
|
||||
minipass "^3.0.0"
|
||||
|
||||
minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6, minipass@^3.3.5:
|
||||
minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6:
|
||||
version "3.3.6"
|
||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a"
|
||||
integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==
|
||||
|
@ -18210,7 +18208,7 @@ moment@^2.29.4:
|
|||
resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108"
|
||||
integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==
|
||||
|
||||
mongodb-connection-string-url@^2.5.3:
|
||||
mongodb-connection-string-url@^2.6.0:
|
||||
version "2.6.0"
|
||||
resolved "https://registry.yarnpkg.com/mongodb-connection-string-url/-/mongodb-connection-string-url-2.6.0.tgz#57901bf352372abdde812c81be47b75c6b2ec5cf"
|
||||
integrity sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ==
|
||||
|
@ -18218,15 +18216,14 @@ mongodb-connection-string-url@^2.5.3:
|
|||
"@types/whatwg-url" "^8.2.1"
|
||||
whatwg-url "^11.0.0"
|
||||
|
||||
mongodb@4.9:
|
||||
version "4.9.1"
|
||||
resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-4.9.1.tgz#0c769448228bcf9a6aa7d16daa3625b48312479e"
|
||||
integrity sha512-ZhgI/qBf84fD7sI4waZBoLBNJYPQN5IOC++SBCiPiyhzpNKOxN/fi0tBHvH2dEC42HXtNEbFB0zmNz4+oVtorQ==
|
||||
mongodb@5.6:
|
||||
version "5.6.0"
|
||||
resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-5.6.0.tgz#caff5278341bfc0f1ef6f394bb403d207de03d1e"
|
||||
integrity sha512-z8qVs9NfobHJm6uzK56XBZF8XwM9H294iRnB7wNjF0SnY93si5HPziIJn+qqvUR5QOff/4L0gCD6SShdR/GtVQ==
|
||||
dependencies:
|
||||
bson "^4.7.0"
|
||||
denque "^2.1.0"
|
||||
mongodb-connection-string-url "^2.5.3"
|
||||
socks "^2.7.0"
|
||||
bson "^5.3.0"
|
||||
mongodb-connection-string-url "^2.6.0"
|
||||
socks "^2.7.1"
|
||||
optionalDependencies:
|
||||
saslprep "^1.0.3"
|
||||
|
||||
|
@ -23100,7 +23097,7 @@ socks-proxy-agent@^7.0.0:
|
|||
debug "^4.3.3"
|
||||
socks "^2.6.2"
|
||||
|
||||
socks@^2.3.3, socks@^2.6.2, socks@^2.7.0:
|
||||
socks@^2.3.3, socks@^2.6.2, socks@^2.7.1:
|
||||
version "2.7.1"
|
||||
resolved "https://registry.yarnpkg.com/socks/-/socks-2.7.1.tgz#d8e651247178fde79c0663043e07240196857d55"
|
||||
integrity sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==
|
||||
|
@ -24163,6 +24160,18 @@ tar@6.1.11:
|
|||
mkdirp "^1.0.3"
|
||||
yallist "^4.0.0"
|
||||
|
||||
tar@6.1.15:
|
||||
version "6.1.15"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69"
|
||||
integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==
|
||||
dependencies:
|
||||
chownr "^2.0.0"
|
||||
fs-minipass "^2.0.0"
|
||||
minipass "^5.0.0"
|
||||
minizlib "^2.1.1"
|
||||
mkdirp "^1.0.3"
|
||||
yallist "^4.0.0"
|
||||
|
||||
tar@^6.1.11, tar@^6.1.2:
|
||||
version "6.1.13"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.13.tgz#46e22529000f612180601a6fe0680e7da508847b"
|
||||
|
|
Loading…
Reference in New Issue