Merge branch 'develop' of github.com:Budibase/budibase into grid-block

This commit is contained in:
Andrew Kingston 2023-06-26 09:05:06 +01:00
commit aebc3b2bb8
87 changed files with 2947 additions and 7776 deletions

View File

@ -1,5 +1,9 @@
name: Budibase CI name: Budibase CI
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
on: on:
# Trigger the workflow on push or pull request, # Trigger the workflow on push or pull request,
# but only for the master branch # but only for the master branch
@ -23,6 +27,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Use Node.js 14.x - name: Use Node.js 14.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
@ -135,15 +142,39 @@ jobs:
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Check submodule - name: Check pro commit
id: get_pro_commits
run: | run: |
cd packages/pro cd packages/pro
git fetch pro_commit=$(git rev-parse HEAD)
if ! git merge-base --is-ancestor $(git log -n 1 --pretty=format:%H) origin/develop; then
echo "Current commit has not been merged to develop" branch=${{ github.base_ref || github.ref_name }}
echo "Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md" echo "Running on branch `$branch` (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
exit 1
if [[ "$branch" == "master" ]]; then
base_commit=$(git rev-parse origin/master)
else else
echo "All good, the submodule had been merged!" base_commit=$(git rev-parse origin/develop)
fi fi
echo "pro_commit=$pro_commit"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
- name: Check submodule merged to develop
uses: actions/github-script@v4
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const submoduleCommit = '${{ steps.get_pro_commits.outputs.pro_commit }}';
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the develop branch.');
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
process.exit(1);
} else {
console.log('All good, the submodule had been merged and setup correctly!')
}

View File

@ -2,7 +2,9 @@ const fs = require("fs")
const { execSync } = require("child_process") const { execSync } = require("child_process")
const path = require("path") const path = require("path")
const IMAGES = { const IS_SINGLE_IMAGE = process.env.SINGLE_IMAGE
let IMAGES = {
worker: "budibase/worker", worker: "budibase/worker",
apps: "budibase/apps", apps: "budibase/apps",
proxy: "budibase/proxy", proxy: "budibase/proxy",
@ -10,7 +12,13 @@ const IMAGES = {
couch: "ibmcom/couchdb3", couch: "ibmcom/couchdb3",
curl: "curlimages/curl", curl: "curlimages/curl",
redis: "redis", redis: "redis",
watchtower: "containrrr/watchtower" watchtower: "containrrr/watchtower",
}
if (IS_SINGLE_IMAGE) {
IMAGES = {
budibase: "budibase/budibase"
}
} }
const FILES = { const FILES = {
@ -39,11 +47,10 @@ for (let image in IMAGES) {
} }
// copy config files // copy config files
copyFile(FILES.COMPOSE) if (!IS_SINGLE_IMAGE) {
copyFile(FILES.COMPOSE)
}
copyFile(FILES.ENV) copyFile(FILES.ENV)
// compress // compress
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`) execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)
// clean up
fs.rmdirSync(OUTPUT_DIR, { recursive: true })

View File

@ -37,6 +37,14 @@ COPY --from=build /worker /worker
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
# Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
&& apt install postgresql-client-15 -y \
&& apt remove software-properties-common apt-transport-https gpg -y
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx # install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh && \ RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh && \

View File

@ -1,22 +1,10 @@
{ {
"version": "2.7.25-alpha.1", "version": "2.7.34-alpha.3",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/backend-core", "packages/*"
"packages/bbui",
"packages/builder",
"packages/cli",
"packages/client",
"packages/frontend-core",
"packages/sdk",
"packages/server",
"packages/shared-core",
"packages/string-templates",
"packages/types",
"packages/worker",
"packages/pro/packages/pro"
], ],
"useWorkspaces": true, "useNx": true,
"command": { "command": {
"publish": { "publish": {
"ignoreChanges": [ "ignoreChanges": [

View File

@ -2,23 +2,22 @@
"name": "root", "name": "root",
"private": true, "private": true,
"devDependencies": { "devDependencies": {
"@esbuild-plugins/node-resolve": "^0.2.2",
"@esbuild-plugins/tsconfig-paths": "^0.1.2", "@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@nx/js": "16.2.1", "@nx/js": "16.2.1",
"@rollup/plugin-json": "^4.0.2", "@rollup/plugin-json": "^4.0.2",
"@typescript-eslint/parser": "5.45.0", "@typescript-eslint/parser": "5.45.0",
"babel-eslint": "^10.0.3", "babel-eslint": "^10.0.3",
"esbuild": "^0.17.18", "esbuild": "^0.17.18",
"esbuild-node-externals": "^1.7.0",
"eslint": "^7.28.0", "eslint": "^7.28.0",
"eslint-plugin-cypress": "^2.11.3", "eslint-plugin-cypress": "^2.11.3",
"eslint-plugin-svelte3": "^3.2.0", "eslint-plugin-svelte3": "^3.2.0",
"husky": "^8.0.3", "husky": "^8.0.3",
"js-yaml": "^4.1.0", "js-yaml": "^4.1.0",
"kill-port": "^1.6.1", "kill-port": "^1.6.1",
"lerna": "7.0.0-alpha.0", "lerna": "7.0.2",
"madge": "^6.0.0", "madge": "^6.0.0",
"minimist": "^1.2.8", "minimist": "^1.2.8",
"nx": "^16.2.1",
"prettier": "^2.3.1", "prettier": "^2.3.1",
"prettier-plugin-svelte": "^2.3.0", "prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2", "rimraf": "^3.0.2",
@ -48,9 +47,9 @@
"kill-builder": "kill-port 3000", "kill-builder": "kill-port 3000",
"kill-server": "kill-port 4001 4002", "kill-server": "kill-port 4001 4002",
"kill-all": "yarn run kill-builder && yarn run kill-server", "kill-all": "yarn run kill-builder && yarn run kill-server",
"dev": "yarn run kill-all && lerna run --stream --parallel dev:builder --stream", "dev": "yarn run kill-all && lerna run --stream --parallel dev:builder --stream",
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream --parallel dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker", "dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream --parallel dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --stream --parallel dev:builder --scope @budibase/worker --scope @budibase/server", "dev:server": "yarn run kill-server && yarn build --projects=@budibase/client && lerna run --stream --parallel dev:builder --scope @budibase/worker --scope @budibase/server",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream --parallel dev:built", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream --parallel dev:built",
"dev:docker": "yarn build:docker:pre && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0", "dev:docker": "yarn build:docker:pre && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"test": "lerna run --stream test --stream", "test": "lerna run --stream test --stream",
@ -67,6 +66,7 @@
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -", "build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -", "build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild", "build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -", "build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .", "build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .", "build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",
@ -95,19 +95,7 @@
}, },
"workspaces": { "workspaces": {
"packages": [ "packages": [
"packages/backend-core", "packages/*"
"packages/bbui",
"packages/builder",
"packages/cli",
"packages/client",
"packages/frontend-core",
"packages/sdk",
"packages/server",
"packages/shared-core",
"packages/string-templates",
"packages/types",
"packages/worker",
"packages/pro/packages/pro"
] ]
}, },
"resolutions": { "resolutions": {

View File

@ -31,4 +31,6 @@ const config: Config.InitialOptions = {
coverageReporters: ["lcov", "json", "clover"], coverageReporters: ["lcov", "json", "clover"],
} }
process.env.DISABLE_PINO_LOGGER = "1"
export default config export default config

View File

@ -27,7 +27,7 @@
"@techpass/passport-openidconnect": "0.3.2", "@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0", "aws-cloudfront-sign": "2.2.0",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.0.1", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bull": "4.10.1", "bull": "4.10.1",
"correlation-id": "4.0.0", "correlation-id": "4.0.0",

View File

@ -87,7 +87,7 @@
border-color: var(--spectrum-global-color-gray-400); border-color: var(--spectrum-global-color-gray-400);
} }
/* Toolbar button color */ /* Toolbar button color */
:global(.EasyMDEContainer .editor-toolbar button i) { :global(.EasyMDEContainer .editor-toolbar button) {
color: var(--spectrum-global-color-gray-800); color: var(--spectrum-global-color-gray-800);
} }
/* Separator between toolbar buttons*/ /* Separator between toolbar buttons*/

View File

@ -309,7 +309,7 @@
} }
function canShowField(key, value) { function canShowField(key, value) {
const dependsOn = value.dependsOn const dependsOn = value?.dependsOn
return !dependsOn || !!inputData[dependsOn] return !dependsOn || !!inputData[dependsOn]
} }

View File

@ -1,5 +1,5 @@
<script> <script>
import { tables } from "stores/backend" import { datasources, tables } from "stores/backend"
import EditRolesButton from "./buttons/EditRolesButton.svelte" import EditRolesButton from "./buttons/EditRolesButton.svelte"
import { TableNames } from "constants" import { TableNames } from "constants"
import { Grid } from "@budibase/frontend-core" import { Grid } from "@budibase/frontend-core"
@ -26,6 +26,18 @@
$: id = $tables.selected?._id $: id = $tables.selected?._id
$: isUsersTable = id === TableNames.USERS $: isUsersTable = id === TableNames.USERS
$: isInternal = $tables.selected?.type !== "external" $: isInternal = $tables.selected?.type !== "external"
const handleGridTableUpdate = async e => {
tables.replaceTable(id, e.detail)
// We need to refresh datasources when an external table changes.
// Type "external" may exist - sometimes type is "table" and sometimes it
// is "external" - it has different meanings in different endpoints.
// If we check both these then we hopefully catch all external tables.
if (e.detail?.type === "external" || e.detail?.sql) {
await datasources.fetch()
}
}
</script> </script>
<div class="wrapper"> <div class="wrapper">
@ -36,7 +48,7 @@
allowDeleteRows={!isUsersTable} allowDeleteRows={!isUsersTable}
schemaOverrides={isUsersTable ? userSchemaOverrides : null} schemaOverrides={isUsersTable ? userSchemaOverrides : null}
showAvatars={false} showAvatars={false}
on:updatetable={e => tables.replaceTable(id, e.detail)} on:updatetable={handleGridTableUpdate}
> >
<svelte:fragment slot="filter"> <svelte:fragment slot="filter">
<GridFilterButton /> <GridFilterButton />

View File

@ -59,7 +59,6 @@
$: valid = getErrorCount(errors) === 0 && allRequiredAttributesSet() $: valid = getErrorCount(errors) === 0 && allRequiredAttributesSet()
$: isManyToMany = relationshipType === RelationshipTypes.MANY_TO_MANY $: isManyToMany = relationshipType === RelationshipTypes.MANY_TO_MANY
$: isManyToOne = relationshipType === RelationshipTypes.MANY_TO_ONE $: isManyToOne = relationshipType === RelationshipTypes.MANY_TO_ONE
$: toRelationship.relationshipType = fromRelationship?.relationshipType
function getTable(id) { function getTable(id) {
return plusTables.find(table => table._id === id) return plusTables.find(table => table._id === id)
@ -180,6 +179,16 @@
return getErrorCount(errors) === 0 return getErrorCount(errors) === 0
} }
function otherRelationshipType(type) {
if (type === RelationshipTypes.MANY_TO_ONE) {
return RelationshipTypes.ONE_TO_MANY
} else if (type === RelationshipTypes.ONE_TO_MANY) {
return RelationshipTypes.MANY_TO_ONE
} else if (type === RelationshipTypes.MANY_TO_MANY) {
return RelationshipTypes.MANY_TO_MANY
}
}
function buildRelationships() { function buildRelationships() {
const id = Helpers.uuid() const id = Helpers.uuid()
//Map temporary variables //Map temporary variables
@ -200,6 +209,7 @@
...toRelationship, ...toRelationship,
tableId: fromId, tableId: fromId,
name: fromColumn, name: fromColumn,
relationshipType: otherRelationshipType(relationshipType),
through: throughId, through: throughId,
type: "link", type: "link",
_id: id, _id: id,

View File

@ -93,6 +93,7 @@
try { try {
await beforeSave() await beforeSave()
table = await tables.save(newTable) table = await tables.save(newTable)
await datasources.fetch()
await afterSave(table) await afterSave(table)
} catch (e) { } catch (e) {
notifications.error(e) notifications.error(e)

View File

@ -65,6 +65,7 @@
const updatedTable = cloneDeep(table) const updatedTable = cloneDeep(table)
updatedTable.name = updatedName updatedTable.name = updatedName
await tables.save(updatedTable) await tables.save(updatedTable)
await datasources.fetch()
notifications.success("Table renamed successfully") notifications.success("Table renamed successfully")
} }

View File

@ -9,6 +9,18 @@
faFileArrowUp, faFileArrowUp,
faChevronLeft, faChevronLeft,
faCircleInfo, faCircleInfo,
faBold,
faItalic,
faHeading,
faQuoteLeft,
faListUl,
faListOl,
faLink,
faImage,
faEye,
faColumns,
faArrowsAlt,
faQuestionCircle,
} from "@fortawesome/free-solid-svg-icons" } from "@fortawesome/free-solid-svg-icons"
import { faGithub, faDiscord } from "@fortawesome/free-brands-svg-icons" import { faGithub, faDiscord } from "@fortawesome/free-brands-svg-icons"
@ -22,7 +34,22 @@
faEnvelope, faEnvelope,
faFileArrowUp, faFileArrowUp,
faChevronLeft, faChevronLeft,
faCircleInfo faCircleInfo,
// -- Required for easyMDE use in the builder.
faBold,
faItalic,
faHeading,
faQuoteLeft,
faListUl,
faListOl,
faLink,
faImage,
faEye,
faColumns,
faArrowsAlt,
faQuestionCircle
// --
) )
dom.watch() dom.watch()
</script> </script>

View File

@ -21,7 +21,6 @@
export let allowHelpers = true export let allowHelpers = true
export let updateOnChange = true export let updateOnChange = true
export let drawerLeft export let drawerLeft
export let key
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
let bindingDrawer let bindingDrawer

View File

@ -0,0 +1,50 @@
<script>
import { currentAsset, store } from "builderStore"
import { onMount } from "svelte"
import { Label, Combobox, Select } from "@budibase/bbui"
import {
getActionProviderComponents,
buildFormSchema,
} from "builderStore/dataBinding"
import { findComponent } from "builderStore/componentUtils"
export let parameters
onMount(() => {
if (!parameters.type) {
parameters.type = "top"
}
})
$: formComponent = findComponent($currentAsset.props, parameters.componentId)
$: formSchema = buildFormSchema(formComponent)
$: fieldOptions = Object.keys(formSchema || {})
$: actionProviders = getActionProviderComponents(
$currentAsset,
$store.selectedComponentId,
"ScrollTo"
)
</script>
<div class="root">
<Label small>Form</Label>
<Select
bind:value={parameters.componentId}
options={actionProviders}
getOptionLabel={x => x._instanceName}
getOptionValue={x => x._id}
/>
<Label small>Field</Label>
<Combobox bind:value={parameters.field} options={fieldOptions} />
</div>
<style>
.root {
display: grid;
align-items: center;
gap: var(--spacing-m);
grid-template-columns: auto;
max-width: 400px;
margin: 0 auto;
}
</style>

View File

@ -16,6 +16,7 @@ export { default as S3Upload } from "./S3Upload.svelte"
export { default as ExportData } from "./ExportData.svelte" export { default as ExportData } from "./ExportData.svelte"
export { default as ContinueIf } from "./ContinueIf.svelte" export { default as ContinueIf } from "./ContinueIf.svelte"
export { default as UpdateFieldValue } from "./UpdateFieldValue.svelte" export { default as UpdateFieldValue } from "./UpdateFieldValue.svelte"
export { default as ScrollTo } from "./ScrollTo.svelte"
export { default as ShowNotification } from "./ShowNotification.svelte" export { default as ShowNotification } from "./ShowNotification.svelte"
export { default as PromptUser } from "./PromptUser.svelte" export { default as PromptUser } from "./PromptUser.svelte"
export { default as OpenSidePanel } from "./OpenSidePanel.svelte" export { default as OpenSidePanel } from "./OpenSidePanel.svelte"

View File

@ -70,6 +70,11 @@
"type": "form", "type": "form",
"component": "UpdateFieldValue" "component": "UpdateFieldValue"
}, },
{
"name": "Scroll To Field",
"type": "form",
"component": "ScrollTo"
},
{ {
"name": "Validate Form", "name": "Validate Form",
"type": "form", "type": "form",

View File

@ -11,6 +11,7 @@
import TemplateCard from "components/common/TemplateCard.svelte" import TemplateCard from "components/common/TemplateCard.svelte"
import createFromScratchScreen from "builderStore/store/screenTemplates/createFromScratchScreen" import createFromScratchScreen from "builderStore/store/screenTemplates/createFromScratchScreen"
import { Roles } from "constants/backend" import { Roles } from "constants/backend"
import { lowercase } from "helpers"
export let template export let template
@ -19,6 +20,7 @@
const values = writable({ name: "", url: null }) const values = writable({ name: "", url: null })
const validation = createValidationStore() const validation = createValidationStore()
const encryptionValidation = createValidationStore()
$: { $: {
const { url } = $values const { url } = $values
@ -27,8 +29,11 @@
...$values, ...$values,
url: url?.[0] === "/" ? url.substring(1, url.length) : url, url: url?.[0] === "/" ? url.substring(1, url.length) : url,
}) })
encryptionValidation.check({ ...$values })
} }
$: encryptedFile = $values.file?.name?.endsWith(".enc.tar.gz")
onMount(async () => { onMount(async () => {
const lastChar = $auth.user?.firstName const lastChar = $auth.user?.firstName
? $auth.user?.firstName[$auth.user?.firstName.length - 1] ? $auth.user?.firstName[$auth.user?.firstName.length - 1]
@ -87,6 +92,9 @@
appValidation.name(validation, { apps: applications }) appValidation.name(validation, { apps: applications })
appValidation.url(validation, { apps: applications }) appValidation.url(validation, { apps: applications })
appValidation.file(validation, { template }) appValidation.file(validation, { template })
encryptionValidation.addValidatorType("encryptionPassword", "text", true)
// init validation // init validation
const { url } = $values const { url } = $values
validation.check({ validation.check({
@ -110,6 +118,9 @@
data.append("templateName", template.name) data.append("templateName", template.name)
data.append("templateKey", template.key) data.append("templateKey", template.key)
data.append("templateFile", $values.file) data.append("templateFile", $values.file)
if ($values.encryptionPassword?.trim()) {
data.append("encryptionPassword", $values.encryptionPassword.trim())
}
} }
// Create App // Create App
@ -143,67 +154,119 @@
$goto(`/builder/app/${createdApp.instance._id}`) $goto(`/builder/app/${createdApp.instance._id}`)
} catch (error) { } catch (error) {
creating = false creating = false
console.error(error) throw error
notifications.error("Error creating app")
} }
} }
const Step = { CONFIG: "config", SET_PASSWORD: "set_password" }
let currentStep = Step.CONFIG
$: stepConfig = {
[Step.CONFIG]: {
title: "Create your app",
confirmText: template?.fromFile ? "Import app" : "Create app",
onConfirm: async () => {
if (encryptedFile) {
currentStep = Step.SET_PASSWORD
return false
} else {
try {
await createNewApp()
} catch (error) {
notifications.error("Error creating app")
}
}
},
isValid: $validation.valid,
},
[Step.SET_PASSWORD]: {
title: "Provide the export password",
confirmText: "Import app",
onConfirm: async () => {
try {
await createNewApp()
} catch (e) {
let message = "Error creating app"
if (e.message) {
message += `: ${lowercase(e.message)}`
}
notifications.error(message)
return false
}
},
isValid: $encryptionValidation.valid,
},
}
</script> </script>
<ModalContent <ModalContent
title={"Create your app"} title={stepConfig[currentStep].title}
confirmText={template?.fromFile ? "Import app" : "Create app"} confirmText={stepConfig[currentStep].confirmText}
onConfirm={createNewApp} onConfirm={stepConfig[currentStep].onConfirm}
disabled={!$validation.valid} disabled={!stepConfig[currentStep].isValid}
> >
{#if template && !template?.fromFile} {#if currentStep === Step.CONFIG}
<TemplateCard {#if template && !template?.fromFile}
name={template.name} <TemplateCard
imageSrc={template.image} name={template.name}
backgroundColour={template.background} imageSrc={template.image}
overlayEnabled={false} backgroundColour={template.background}
icon={template.icon} overlayEnabled={false}
/> icon={template.icon}
{/if} />
{#if template?.fromFile}
<Dropzone
error={$validation.touched.file && $validation.errors.file}
gallery={false}
label="File to import"
value={[$values.file]}
on:change={e => {
$values.file = e.detail?.[0]
$validation.touched.file = true
}}
/>
{/if}
<Input
autofocus={true}
bind:value={$values.name}
disabled={creating}
error={$validation.touched.name && $validation.errors.name}
on:blur={() => ($validation.touched.name = true)}
on:change={nameToUrl($values.name)}
label="Name"
placeholder={defaultAppName}
/>
<span>
<Input
bind:value={$values.url}
disabled={creating}
error={$validation.touched.url && $validation.errors.url}
on:blur={() => ($validation.touched.url = true)}
on:change={tidyUrl($values.url)}
label="URL"
placeholder={$values.url
? $values.url
: `/${resolveAppUrl(template, $values.name)}`}
/>
{#if $values.url && $values.url !== "" && !$validation.errors.url}
<div class="app-server" title={appUrl}>
{appUrl}
</div>
{/if} {/if}
</span> {#if template?.fromFile}
<Dropzone
error={$validation.touched.file && $validation.errors.file}
gallery={false}
label="File to import"
value={[$values.file]}
on:change={e => {
$values.file = e.detail?.[0]
$validation.touched.file = true
}}
/>
{/if}
<Input
autofocus={true}
bind:value={$values.name}
disabled={creating}
error={$validation.touched.name && $validation.errors.name}
on:blur={() => ($validation.touched.name = true)}
on:change={nameToUrl($values.name)}
label="Name"
placeholder={defaultAppName}
/>
<span>
<Input
bind:value={$values.url}
disabled={creating}
error={$validation.touched.url && $validation.errors.url}
on:blur={() => ($validation.touched.url = true)}
on:change={tidyUrl($values.url)}
label="URL"
placeholder={$values.url
? $values.url
: `/${resolveAppUrl(template, $values.name)}`}
/>
{#if $values.url && $values.url !== "" && !$validation.errors.url}
<div class="app-server" title={appUrl}>
{appUrl}
</div>
{/if}
</span>
{/if}
{#if currentStep === Step.SET_PASSWORD}
<Input
autofocus={true}
label="Imported file password"
type="password"
bind:value={$values.encryptionPassword}
disabled={creating}
on:blur={() => ($encryptionValidation.touched.encryptionPassword = true)}
error={$encryptionValidation.touched.encryptionPassword &&
$encryptionValidation.errors.encryptionPassword}
/>
{/if}
</ModalContent> </ModalContent>
<style> <style>

View File

@ -16,7 +16,7 @@
let password = null let password = null
const validation = createValidationStore() const validation = createValidationStore()
validation.addValidatorType("password", "password", true) validation.addValidatorType("password", "password", true, { minLength: 8 })
$: validation.observe("password", password) $: validation.observe("password", password)
const Step = { CONFIG: "config", SET_PASSWORD: "set_password" } const Step = { CONFIG: "config", SET_PASSWORD: "set_password" }

View File

@ -21,7 +21,7 @@ export const createValidationStore = () => {
validator[propertyName] = propertyValidator validator[propertyName] = propertyValidator
} }
const addValidatorType = (propertyName, type, required) => { const addValidatorType = (propertyName, type, required, options) => {
if (!type || !propertyName) { if (!type || !propertyName) {
return return
} }
@ -45,11 +45,8 @@ export const createValidationStore = () => {
propertyValidator = propertyValidator.required() propertyValidator = propertyValidator.required()
} }
// We want to do this after the possible required validation, to prioritise the required error if (options?.minLength) {
switch (type) { propertyValidator = propertyValidator.min(options.minLength)
case "password":
propertyValidator = propertyValidator.min(8)
break
} }
validator[propertyName] = propertyValidator validator[propertyName] = propertyValidator

View File

@ -32,7 +32,7 @@
</header> </header>
<Body size="M"> <Body size="M">
Budibase internal tables are part of your app, so the data will be Budibase internal tables are part of your app, so the data will be
stored in your apps context. stored in your app's context.
</Body> </Body>
</Layout> </Layout>
<Divider /> <Divider />

View File

@ -1,6 +1,6 @@
<script> <script>
import { notifications } from "@budibase/bbui" import { notifications } from "@budibase/bbui"
import { apps, templates, licensing, groups } from "stores/portal" import { admin, apps, templates, licensing, groups } from "stores/portal"
import { onMount } from "svelte" import { onMount } from "svelte"
import { redirect } from "@roxi/routify" import { redirect } from "@roxi/routify"
@ -9,14 +9,18 @@
onMount(async () => { onMount(async () => {
try { try {
// Always load latest const promises = [licensing.init()]
await Promise.all([
licensing.init(),
templates.load(),
groups.actions.init(),
])
if ($templates?.length === 0) { if (!$admin.offlineMode) {
promises.push(templates.load())
}
promises.push(groups.actions.init())
// Always load latest
await Promise.all(promises)
if (!$admin.offlineMode && $templates?.length === 0) {
notifications.error("There was a problem loading quick start templates") notifications.error("There was a problem loading quick start templates")
} }

View File

@ -247,7 +247,7 @@
> >
Create new app Create new app
</Button> </Button>
{#if $apps?.length > 0} {#if $apps?.length > 0 && !$admin.offlineMode}
<Button <Button
size="M" size="M"
secondary secondary

View File

@ -0,0 +1,14 @@
<script>
export let value
</script>
<div style="display: flex; ">
{#if value === "Unavailable"}
Email already in use. Please use a different email.
{:else}
{value}
{/if}
</div>
<style>
</style>

View File

@ -1,6 +1,7 @@
<script> <script>
import { Body, ModalContent, Table } from "@budibase/bbui" import { Body, ModalContent, Table } from "@budibase/bbui"
import { onMount } from "svelte" import { onMount } from "svelte"
import InviteResponseRenderer from "./InviteResponseRenderer.svelte"
export let inviteUsersResponse export let inviteUsersResponse
@ -50,7 +51,7 @@
} }
</script> </script>
<ModalContent size="M" showCancelButton={false} {title} confirmText="Done"> <ModalContent size="L" showCancelButton={false} {title} confirmText="Done">
{#if hasSuccess} {#if hasSuccess}
<Body size="XS"> <Body size="XS">
Your users should now receive an email invite to get access to their Your users should now receive an email invite to get access to their
@ -67,6 +68,9 @@
allowEditColumns={false} allowEditColumns={false}
allowEditRows={false} allowEditRows={false}
allowSelectRows={false} allowSelectRows={false}
customRenderers={[
{ column: "reason", component: InviteResponseRenderer },
]}
/> />
{/if} {/if}
</ModalContent> </ModalContent>

View File

@ -117,6 +117,10 @@ export function createDatasourcesStore() {
...state, ...state,
list: [...state.list, datasource], list: [...state.list, datasource],
})) }))
// If this is a new datasource then we should refresh the tables list,
// because otherwise we'll never see the new tables
tables.fetch()
} }
// Update existing datasource // Update existing datasource

View File

@ -1,5 +1,4 @@
import { get, writable, derived } from "svelte/store" import { get, writable, derived } from "svelte/store"
import { datasources } from "./"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { API } from "api" import { API } from "api"
import { SWITCHABLE_TYPES } from "constants/backend" import { SWITCHABLE_TYPES } from "constants/backend"
@ -63,7 +62,6 @@ export function createTablesStore() {
const savedTable = await API.saveTable(updatedTable) const savedTable = await API.saveTable(updatedTable)
replaceTable(savedTable._id, savedTable) replaceTable(savedTable._id, savedTable)
await datasources.fetch()
select(savedTable._id) select(savedTable._id)
return savedTable return savedTable
} }

View File

@ -46,6 +46,7 @@ export function createAdminStore() {
store.accountPortalUrl = environment.accountPortalUrl store.accountPortalUrl = environment.accountPortalUrl
store.isDev = environment.isDev store.isDev = environment.isDev
store.baseUrl = environment.baseUrl store.baseUrl = environment.baseUrl
store.offlineMode = environment.offlineMode
return store return store
}) })
} }

View File

@ -2221,7 +2221,8 @@
"ValidateForm", "ValidateForm",
"ClearForm", "ClearForm",
"ChangeFormStep", "ChangeFormStep",
"UpdateFieldValue" "UpdateFieldValue",
"ScrollTo"
], ],
"styles": ["size"], "styles": ["size"],
"size": { "size": {
@ -3543,7 +3544,8 @@
{ {
"type": "field/sortable", "type": "field/sortable",
"label": "Sort column", "label": "Sort column",
"key": "sortColumn" "key": "sortColumn",
"placeholder": "None"
}, },
{ {
"type": "select", "type": "select",
@ -4322,7 +4324,8 @@
{ {
"type": "field/sortable", "type": "field/sortable",
"label": "Sort by", "label": "Sort by",
"key": "sortColumn" "key": "sortColumn",
"placeholder": "None"
}, },
{ {
"type": "select", "type": "select",
@ -4566,7 +4569,8 @@
{ {
"type": "field/sortable", "type": "field/sortable",
"label": "Sort column", "label": "Sort column",
"key": "sortColumn" "key": "sortColumn",
"placeholder": "None"
}, },
{ {
"type": "select", "type": "select",
@ -4734,7 +4738,8 @@
{ {
"type": "field/sortable", "type": "field/sortable",
"label": "Sort column", "label": "Sort column",
"key": "sortColumn" "key": "sortColumn",
"placeholder": "None"
}, },
{ {
"type": "select", "type": "select",

View File

@ -283,7 +283,7 @@
// Skip if the value is the same // Skip if the value is the same
if (!skipCheck && fieldState.value === value) { if (!skipCheck && fieldState.value === value) {
return true return false
} }
// Update field state // Update field state
@ -295,7 +295,7 @@
return state return state
}) })
return !error return true
} }
// Clears the value of a certain field back to the default value // Clears the value of a certain field back to the default value
@ -376,8 +376,9 @@
deregister, deregister,
validate: () => { validate: () => {
// Validate the field by force setting the same value again // Validate the field by force setting the same value again
const { fieldState } = get(getField(field)) const fieldInfo = getField(field)
return setValue(fieldState.value, true) setValue(get(fieldInfo).fieldState.value, true)
return !get(fieldInfo).fieldState.error
}, },
} }
} }
@ -404,12 +405,20 @@
} }
} }
const handleScrollToField = ({ field }) => {
const fieldId = get(getField(field)).fieldState.fieldId
const label = document.querySelector(`label[for="${fieldId}"]`)
document.getElementById(fieldId).focus({ preventScroll: true })
label.scrollIntoView({ behavior: "smooth" })
}
// Action context to pass to children // Action context to pass to children
const actions = [ const actions = [
{ type: ActionTypes.ValidateForm, callback: formApi.validate }, { type: ActionTypes.ValidateForm, callback: formApi.validate },
{ type: ActionTypes.ClearForm, callback: formApi.reset }, { type: ActionTypes.ClearForm, callback: formApi.reset },
{ type: ActionTypes.ChangeFormStep, callback: formApi.changeStep }, { type: ActionTypes.ChangeFormStep, callback: formApi.changeStep },
{ type: ActionTypes.UpdateFieldValue, callback: handleUpdateFieldValue }, { type: ActionTypes.UpdateFieldValue, callback: handleUpdateFieldValue },
{ type: ActionTypes.ScrollTo, callback: handleScrollToField },
] ]
</script> </script>

View File

@ -6,7 +6,7 @@
<div class="notifications"> <div class="notifications">
{#if $notificationStore} {#if $notificationStore}
{#each $notificationStore as { type, icon, message, id, dismissable } (id)} {#each $notificationStore as { type, icon, message, id, dismissable, count } (id)}
<div <div
in:fly={{ in:fly={{
duration: 300, duration: 300,
@ -17,7 +17,7 @@
> >
<Notification <Notification
{type} {type}
{message} message={count > 1 ? `(${count}) ${message}` : message}
{icon} {icon}
{dismissable} {dismissable}
on:dismiss={() => notificationStore.actions.dismiss(id)} on:dismiss={() => notificationStore.actions.dismiss(id)}

View File

@ -29,6 +29,7 @@ export const ActionTypes = {
SetDataProviderSorting: "SetDataProviderSorting", SetDataProviderSorting: "SetDataProviderSorting",
ClearForm: "ClearForm", ClearForm: "ClearForm",
ChangeFormStep: "ChangeFormStep", ChangeFormStep: "ChangeFormStep",
ScrollTo: "ScrollTo",
} }
export const DNDPlaceholderID = "dnd-placeholder" export const DNDPlaceholderID = "dnd-placeholder"

View File

@ -13,7 +13,13 @@ const createNotificationStore = () => {
setTimeout(() => (block = false), timeout) setTimeout(() => (block = false), timeout)
} }
const send = (message, type = "info", icon, autoDismiss = true) => { const send = (
message,
type = "info",
icon,
autoDismiss = true,
count = 1
) => {
if (block) { if (block) {
return return
} }
@ -33,6 +39,11 @@ const createNotificationStore = () => {
} }
const _id = id() const _id = id()
store.update(state => { store.update(state => {
const duplicateError = state.find(err => err.message === message)
if (duplicateError) {
duplicateError.count += 1
return [...state]
}
return [ return [
...state, ...state,
{ {
@ -42,6 +53,7 @@ const createNotificationStore = () => {
icon, icon,
dismissable: !autoDismiss, dismissable: !autoDismiss,
delay: get(store) != null, delay: get(store) != null,
count,
}, },
] ]
}) })

View File

@ -153,6 +153,17 @@ const navigationHandler = action => {
routeStore.actions.navigate(url, peek, externalNewTab) routeStore.actions.navigate(url, peek, externalNewTab)
} }
const scrollHandler = async (action, context) => {
return await executeActionHandler(
context,
action.parameters.componentId,
ActionTypes.ScrollTo,
{
field: action.parameters.field,
}
)
}
const queryExecutionHandler = async action => { const queryExecutionHandler = async action => {
const { datasourceId, queryId, queryParams, notificationOverride } = const { datasourceId, queryId, queryParams, notificationOverride } =
action.parameters action.parameters
@ -369,6 +380,7 @@ const handlerMap = {
["Duplicate Row"]: duplicateRowHandler, ["Duplicate Row"]: duplicateRowHandler,
["Delete Row"]: deleteRowHandler, ["Delete Row"]: deleteRowHandler,
["Navigate To"]: navigationHandler, ["Navigate To"]: navigationHandler,
["Scroll To Field"]: scrollHandler,
["Execute Query"]: queryExecutionHandler, ["Execute Query"]: queryExecutionHandler,
["Trigger Automation"]: triggerAutomationHandler, ["Trigger Automation"]: triggerAutomationHandler,
["Validate Form"]: validateFormHandler, ["Validate Form"]: validateFormHandler,

View File

@ -107,14 +107,14 @@ export const deriveStores = context => {
// Update local state // Update local state
table.set(newTable) table.set(newTable)
// Broadcast change to external state can be updated, as this change
// will not be received by the builder websocket because we caused it ourselves
dispatch("updatetable", newTable)
// Update server // Update server
if (get(config).allowSchemaChanges) { if (get(config).allowSchemaChanges) {
await API.saveTable(newTable) await API.saveTable(newTable)
} }
// Broadcast change to external state can be updated, as this change
// will not be received by the builder websocket because we caused it ourselves
dispatch("updatetable", newTable)
} }
return { return {

@ -1 +1 @@
Subproject commit f4b8449aac9bd265214396afbdce7ff984a2ae34 Subproject commit 2c9172685cdceef03172bea779e94cb52ff6d1de

View File

@ -26,11 +26,21 @@ RUN apt-get install unzip libaio1
COPY scripts/integrations/oracle/ scripts/integrations/oracle/ COPY scripts/integrations/oracle/ scripts/integrations/oracle/
RUN /bin/bash -e scripts/integrations/oracle/instantclient/linux/x86-64/install.sh RUN /bin/bash -e scripts/integrations/oracle/instantclient/linux/x86-64/install.sh
# Install postgres client for pg_dump utils
RUN apt update && apt upgrade -y \
&& apt install software-properties-common apt-transport-https curl gpg -y \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
&& apt install postgresql-client-15 -y \
&& apt remove software-properties-common apt-transport-https curl gpg -y
COPY package.json . COPY package.json .
COPY dist/yarn.lock . COPY dist/yarn.lock .
RUN yarn install --production=true RUN yarn install --production=true \
# Remove unneeded data from file system to reduce image size # Remove unneeded data from file system to reduce image size
RUN yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \ && yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp && rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
COPY dist/ dist/ COPY dist/ dist/

View File

@ -21,9 +21,8 @@ const baseConfig: Config.InitialProjectOptions = {
} }
// add pro sources if they exist // add pro sources if they exist
if (fs.existsSync("../pro/packages")) { if (fs.existsSync("../pro/src")) {
baseConfig.moduleNameMapper!["@budibase/pro"] = baseConfig.moduleNameMapper!["@budibase/pro"] = "<rootDir>/../pro/src"
"<rootDir>/../pro/packages/pro/src"
} }
const config: Config.InitialOptions = { const config: Config.InitialOptions = {

View File

@ -1,10 +1,10 @@
{ {
"watch": ["src", "../backend-core", "../pro/packages/pro"], "watch": ["src", "../backend-core", "../pro"],
"ext": "js,ts,json", "ext": "js,ts,json",
"ignore": [ "ignore": [
"src/**/*.spec.ts", "src/**/*.spec.ts",
"src/**/*.spec.js", "src/**/*.spec.js",
"../backend-core/dist/**/*" "../backend-core/dist/**/*"
], ],
"exec": "node ./scripts/build.js && node ./dist/index.js" "exec": "yarn build && node ./dist/index.js"
} }

View File

@ -63,6 +63,7 @@
"airtable": "0.10.1", "airtable": "0.10.1",
"arangojs": "7.2.0", "arangojs": "7.2.0",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bull": "4.10.1", "bull": "4.10.1",
"chmodr": "1.2.0", "chmodr": "1.2.0",

View File

@ -115,7 +115,18 @@ function checkAppName(
} }
} }
async function createInstance(appId: string, template: any) { interface AppTemplate {
templateString: string
useTemplate: string
file?: {
type: string
path: string
password?: string
}
key?: string
}
async function createInstance(appId: string, template: AppTemplate) {
const db = context.getAppDB() const db = context.getAppDB()
await db.put({ await db.put({
_id: "_design/database", _id: "_design/database",
@ -240,19 +251,24 @@ export async function fetchAppPackage(ctx: UserCtx) {
async function performAppCreate(ctx: UserCtx) { async function performAppCreate(ctx: UserCtx) {
const apps = (await dbCore.getAllApps({ dev: true })) as App[] const apps = (await dbCore.getAllApps({ dev: true })) as App[]
const name = ctx.request.body.name, const name = ctx.request.body.name,
possibleUrl = ctx.request.body.url possibleUrl = ctx.request.body.url,
encryptionPassword = ctx.request.body.encryptionPassword
checkAppName(ctx, apps, name) checkAppName(ctx, apps, name)
const url = sdk.applications.getAppUrl({ name, url: possibleUrl }) const url = sdk.applications.getAppUrl({ name, url: possibleUrl })
checkAppUrl(ctx, apps, url) checkAppUrl(ctx, apps, url)
const { useTemplate, templateKey, templateString } = ctx.request.body const { useTemplate, templateKey, templateString } = ctx.request.body
const instanceConfig: any = { const instanceConfig: AppTemplate = {
useTemplate, useTemplate,
key: templateKey, key: templateKey,
templateString, templateString,
} }
if (ctx.request.files && ctx.request.files.templateFile) { if (ctx.request.files && ctx.request.files.templateFile) {
instanceConfig.file = ctx.request.files.templateFile instanceConfig.file = {
...(ctx.request.files.templateFile as any),
password: encryptionPassword,
}
} }
const tenantId = tenancy.isMultiTenant() ? tenancy.getTenantId() : null const tenantId = tenancy.isMultiTenant() ? tenancy.getTenantId() : null
const appId = generateDevAppID(generateAppID(tenantId)) const appId = generateDevAppID(generateAppID(tenantId))

View File

@ -441,3 +441,18 @@ export async function query(ctx: UserCtx) {
ctx.throw(400, err) ctx.throw(400, err)
} }
} }
export async function getExternalSchema(ctx: UserCtx) {
const { datasource } = ctx.request.body
const enrichedDatasource = await getAndMergeDatasource(datasource)
const connector = await getConnector(enrichedDatasource)
if (!connector.getExternalSchema) {
ctx.throw(400, "Datasource does not support exporting external schema")
}
const response = await connector.getExternalSchema()
ctx.body = {
schema: response,
}
}

View File

@ -3,10 +3,10 @@ import * as userController from "../user"
import { FieldTypes } from "../../../constants" import { FieldTypes } from "../../../constants"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { makeExternalQuery } from "../../../integrations/base/query" import { makeExternalQuery } from "../../../integrations/base/query"
import { Row, Table } from "@budibase/types" import { FieldType, Row, Table, UserCtx } from "@budibase/types"
import { Format } from "../view/exporters" import { Format } from "../view/exporters"
import { UserCtx } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
const validateJs = require("validate.js") const validateJs = require("validate.js")
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
@ -20,6 +20,13 @@ validateJs.extend(validateJs.validators.datetime, {
}, },
}) })
function isForeignKey(key: string, table: Table) {
const relationships = Object.values(table.schema).filter(
column => column.type === FieldType.LINK
)
return relationships.some(relationship => relationship.foreignKey === key)
}
export async function getDatasourceAndQuery(json: any) { export async function getDatasourceAndQuery(json: any) {
const datasourceId = json.endpoint.datasourceId const datasourceId = json.endpoint.datasourceId
const datasource = await sdk.datasources.get(datasourceId) const datasource = await sdk.datasources.get(datasourceId)
@ -65,6 +72,10 @@ export async function validate({
const column = fetchedTable.schema[fieldName] const column = fetchedTable.schema[fieldName]
const constraints = cloneDeep(column.constraints) const constraints = cloneDeep(column.constraints)
const type = column.type const type = column.type
// foreign keys are likely to be enriched
if (isForeignKey(fieldName, fetchedTable)) {
continue
}
// formulas shouldn't validated, data will be deleted anyway // formulas shouldn't validated, data will be deleted anyway
if (type === FieldTypes.FORMULA || column.autocolumn) { if (type === FieldTypes.FORMULA || column.autocolumn) {
continue continue

View File

@ -26,6 +26,7 @@ import {
RelationshipTypes, RelationshipTypes,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { builderSocket } from "../../../websockets"
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
async function makeTableRequest( async function makeTableRequest(
@ -318,6 +319,11 @@ export async function save(ctx: UserCtx) {
datasource.entities[tableToSave.name] = tableToSave datasource.entities[tableToSave.name] = tableToSave
await db.put(datasource) await db.put(datasource)
// Since tables are stored inside datasources, we need to notify clients
// that the datasource definition changed
const updatedDatasource = await db.get(datasource._id)
builderSocket?.emitDatasourceUpdate(ctx, updatedDatasource)
return tableToSave return tableToSave
} }
@ -344,6 +350,11 @@ export async function destroy(ctx: UserCtx) {
await db.put(datasource) await db.put(datasource)
// Since tables are stored inside datasources, we need to notify clients
// that the datasource definition changed
const updatedDatasource = await db.get(datasource._id)
builderSocket?.emitDatasourceUpdate(ctx, updatedDatasource)
return tableToDelete return tableToDelete
} }

View File

@ -66,5 +66,10 @@ router
authorized(permissions.BUILDER), authorized(permissions.BUILDER),
datasourceController.destroy datasourceController.destroy
) )
.get(
"/api/datasources/:datasourceId/schema/external",
authorized(permissions.BUILDER),
datasourceController.getExternalSchema
)
export default router export default router

View File

@ -15,7 +15,6 @@ import * as api from "./api"
import * as automations from "./automations" import * as automations from "./automations"
import { Thread } from "./threads" import { Thread } from "./threads"
import * as redis from "./utilities/redis" import * as redis from "./utilities/redis"
import { initialise as initialiseWebsockets } from "./websockets"
import { events, logging, middleware, timers } from "@budibase/backend-core" import { events, logging, middleware, timers } from "@budibase/backend-core"
import { startup } from "./startup" import { startup } from "./startup"
const Sentry = require("@sentry/node") const Sentry = require("@sentry/node")

View File

@ -81,6 +81,7 @@ const environment = {
SELF_HOSTED: process.env.SELF_HOSTED, SELF_HOSTED: process.env.SELF_HOSTED,
HTTP_MB_LIMIT: process.env.HTTP_MB_LIMIT, HTTP_MB_LIMIT: process.env.HTTP_MB_LIMIT,
FORKED_PROCESS_NAME: process.env.FORKED_PROCESS_NAME || "main", FORKED_PROCESS_NAME: process.env.FORKED_PROCESS_NAME || "main",
OFFLINE_MODE: process.env.OFFLINE_MODE,
// old // old
CLIENT_ID: process.env.CLIENT_ID, CLIENT_ID: process.env.CLIENT_ID,
_set(key: string, value: any) { _set(key: string, value: any) {

View File

@ -13,7 +13,7 @@ import {
Row, Row,
SearchFilters, SearchFilters,
SortJson, SortJson,
Table, ExternalTable,
TableRequest, TableRequest,
} from "@budibase/types" } from "@budibase/types"
import { OAuth2Client } from "google-auth-library" import { OAuth2Client } from "google-auth-library"
@ -139,7 +139,7 @@ const SCHEMA: Integration = {
class GoogleSheetsIntegration implements DatasourcePlus { class GoogleSheetsIntegration implements DatasourcePlus {
private readonly config: GoogleSheetsConfig private readonly config: GoogleSheetsConfig
private client: GoogleSpreadsheet private client: GoogleSpreadsheet
public tables: Record<string, Table> = {} public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {} public schemaErrors: Record<string, string> = {}
constructor(config: GoogleSheetsConfig) { constructor(config: GoogleSheetsConfig) {
@ -253,12 +253,18 @@ class GoogleSheetsIntegration implements DatasourcePlus {
return sheets.map(s => s.title) return sheets.map(s => s.title)
} }
getTableSchema(title: string, headerValues: string[], id?: string) { getTableSchema(
title: string,
headerValues: string[],
datasourceId: string,
id?: string
) {
// base table // base table
const table: Table = { const table: ExternalTable = {
name: title, name: title,
primary: [GOOGLE_SHEETS_PRIMARY_KEY], primary: [GOOGLE_SHEETS_PRIMARY_KEY],
schema: {}, schema: {},
sourceId: datasourceId,
} }
if (id) { if (id) {
table._id = id table._id = id
@ -273,20 +279,28 @@ class GoogleSheetsIntegration implements DatasourcePlus {
return table return table
} }
async buildSchema(datasourceId: string, entities: Record<string, Table>) { async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
// not fully configured yet
if (!this.config.auth) {
return
}
await this.connect() await this.connect()
const sheets = this.client.sheetsByIndex const sheets = this.client.sheetsByIndex
const tables: Record<string, Table> = {} const tables: Record<string, ExternalTable> = {}
await utils.parallelForeach( await utils.parallelForeach(
sheets, sheets,
async sheet => { async sheet => {
// must fetch rows to determine schema // must fetch rows to determine schema
await sheet.getRows({ limit: 0, offset: 0 }) await sheet.getRows()
const id = buildExternalTableId(datasourceId, sheet.title) const id = buildExternalTableId(datasourceId, sheet.title)
tables[sheet.title] = this.getTableSchema( tables[sheet.title] = this.getTableSchema(
sheet.title, sheet.title,
sheet.headerValues, sheet.headerValues,
datasourceId,
id id
) )
}, },

View File

@ -2,7 +2,7 @@ import {
DatasourceFieldType, DatasourceFieldType,
Integration, Integration,
Operation, Operation,
Table, ExternalTable,
TableSchema, TableSchema,
QueryJson, QueryJson,
QueryType, QueryType,
@ -43,6 +43,7 @@ const SCHEMA: Integration = {
features: { features: {
[DatasourceFeature.CONNECTION_CHECKING]: true, [DatasourceFeature.CONNECTION_CHECKING]: true,
[DatasourceFeature.FETCH_TABLE_NAMES]: true, [DatasourceFeature.FETCH_TABLE_NAMES]: true,
[DatasourceFeature.EXPORT_SCHEMA]: true,
}, },
datasource: { datasource: {
user: { user: {
@ -97,7 +98,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
private index: number = 0 private index: number = 0
private readonly pool: any private readonly pool: any
private client: any private client: any
public tables: Record<string, Table> = {} public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {} public schemaErrors: Record<string, string> = {}
MASTER_TABLES = [ MASTER_TABLES = [
@ -220,7 +221,10 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
* @param {*} datasourceId - datasourceId to fetch * @param {*} datasourceId - datasourceId to fetch
* @param entities - the tables that are to be built * @param entities - the tables that are to be built
*/ */
async buildSchema(datasourceId: string, entities: Record<string, Table>) { async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
await this.connect() await this.connect()
let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL) let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL)
if (tableInfo == null || !Array.isArray(tableInfo)) { if (tableInfo == null || !Array.isArray(tableInfo)) {
@ -233,7 +237,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
.map((record: any) => record.TABLE_NAME) .map((record: any) => record.TABLE_NAME)
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1) .filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
const tables: Record<string, Table> = {} const tables: Record<string, ExternalTable> = {}
for (let tableName of tableNames) { for (let tableName of tableNames) {
// get the column definition (type) // get the column definition (type)
const definition = await this.runSQL(this.getDefinitionSQL(tableName)) const definition = await this.runSQL(this.getDefinitionSQL(tableName))
@ -276,6 +280,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
} }
tables[tableName] = { tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName), _id: buildExternalTableId(datasourceId, tableName),
sourceId: datasourceId,
primary: primaryKeys, primary: primaryKeys,
name: tableName, name: tableName,
schema, schema,
@ -336,6 +341,81 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
result.recordset ? result.recordset : [{ [operation]: true }] result.recordset ? result.recordset : [{ [operation]: true }]
return this.queryWithReturning(json, queryFn, processFn) return this.queryWithReturning(json, queryFn, processFn)
} }
async getExternalSchema() {
// Query to retrieve table schema
const query = `
SELECT
t.name AS TableName,
c.name AS ColumnName,
ty.name AS DataType,
c.max_length AS MaxLength,
c.is_nullable AS IsNullable,
c.is_identity AS IsIdentity
FROM
sys.tables t
INNER JOIN sys.columns c ON t.object_id = c.object_id
INNER JOIN sys.types ty ON c.system_type_id = ty.system_type_id
WHERE
t.is_ms_shipped = 0
ORDER BY
t.name, c.column_id
`
await this.connect()
const result = await this.internalQuery({
sql: query,
})
const scriptParts = []
const tables: any = {}
for (const row of result.recordset) {
const {
TableName,
ColumnName,
DataType,
MaxLength,
IsNullable,
IsIdentity,
} = row
if (!tables[TableName]) {
tables[TableName] = {
columns: [],
}
}
const columnDefinition = `${ColumnName} ${DataType}${
MaxLength ? `(${MaxLength})` : ""
}${IsNullable ? " NULL" : " NOT NULL"}`
tables[TableName].columns.push(columnDefinition)
if (IsIdentity) {
tables[TableName].identityColumn = ColumnName
}
}
// Generate SQL statements for table creation
for (const tableName in tables) {
const { columns, identityColumn } = tables[tableName]
let createTableStatement = `CREATE TABLE [${tableName}] (\n`
createTableStatement += columns.join(",\n")
if (identityColumn) {
createTableStatement += `,\n CONSTRAINT [PK_${tableName}] PRIMARY KEY (${identityColumn})`
}
createTableStatement += "\n);"
scriptParts.push(createTableStatement)
}
const schema = scriptParts.join("\n")
return schema
}
} }
export default { export default {

View File

@ -385,7 +385,7 @@ class MongoIntegration implements IntegrationBase {
createObjectIds(json: any) { createObjectIds(json: any) {
const self = this const self = this
function interpolateObjectIds(json: any) { function interpolateObjectIds(json: any) {
for (let field of Object.keys(json)) { for (let field of Object.keys(json || {})) {
if (json[field] instanceof Object) { if (json[field] instanceof Object) {
json[field] = self.createObjectIds(json[field]) json[field] = self.createObjectIds(json[field])
} }
@ -489,7 +489,11 @@ class MongoIntegration implements IntegrationBase {
switch (query.extra.actionType) { switch (query.extra.actionType) {
case "find": { case "find": {
return await collection.find(json).toArray() if (json) {
return await collection.find(json).toArray()
} else {
return await collection.find().toArray()
}
} }
case "findOne": { case "findOne": {
return await collection.findOne(json) return await collection.findOne(json)

View File

@ -4,7 +4,7 @@ import {
QueryType, QueryType,
QueryJson, QueryJson,
SqlQuery, SqlQuery,
Table, ExternalTable,
TableSchema, TableSchema,
DatasourcePlus, DatasourcePlus,
DatasourceFeature, DatasourceFeature,
@ -39,6 +39,7 @@ const SCHEMA: Integration = {
features: { features: {
[DatasourceFeature.CONNECTION_CHECKING]: true, [DatasourceFeature.CONNECTION_CHECKING]: true,
[DatasourceFeature.FETCH_TABLE_NAMES]: true, [DatasourceFeature.FETCH_TABLE_NAMES]: true,
[DatasourceFeature.EXPORT_SCHEMA]: true,
}, },
datasource: { datasource: {
host: { host: {
@ -123,7 +124,7 @@ export function bindingTypeCoerce(bindings: any[]) {
class MySQLIntegration extends Sql implements DatasourcePlus { class MySQLIntegration extends Sql implements DatasourcePlus {
private config: MySQLConfig private config: MySQLConfig
private client?: mysql.Connection private client?: mysql.Connection
public tables: Record<string, Table> = {} public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {} public schemaErrors: Record<string, string> = {}
constructor(config: MySQLConfig) { constructor(config: MySQLConfig) {
@ -220,8 +221,11 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
} }
} }
async buildSchema(datasourceId: string, entities: Record<string, Table>) { async buildSchema(
const tables: { [key: string]: Table } = {} datasourceId: string,
entities: Record<string, ExternalTable>
) {
const tables: { [key: string]: ExternalTable } = {}
await this.connect() await this.connect()
try { try {
@ -259,6 +263,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
if (!tables[tableName]) { if (!tables[tableName]) {
tables[tableName] = { tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName), _id: buildExternalTableId(datasourceId, tableName),
sourceId: datasourceId,
primary: primaryKeys, primary: primaryKeys,
name: tableName, name: tableName,
schema, schema,
@ -324,6 +329,36 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
await this.disconnect() await this.disconnect()
} }
} }
async getExternalSchema() {
try {
const [databaseResult] = await this.internalQuery({
sql: `SHOW CREATE DATABASE ${this.config.database}`,
})
let dumpContent = [databaseResult["Create Database"]]
const tablesResult = await this.internalQuery({
sql: `SHOW TABLES`,
})
for (const row of tablesResult) {
const tableName = row[`Tables_in_${this.config.database}`]
const createTableResults = await this.internalQuery({
sql: `SHOW CREATE TABLE \`${tableName}\``,
})
const createTableStatement = createTableResults[0]["Create Table"]
dumpContent.push(createTableStatement)
}
const schema = dumpContent.join("\n")
return schema
} finally {
this.disconnect()
}
}
} }
export default { export default {

View File

@ -5,7 +5,7 @@ import {
QueryJson, QueryJson,
QueryType, QueryType,
SqlQuery, SqlQuery,
Table, ExternalTable,
DatasourcePlus, DatasourcePlus,
DatasourceFeature, DatasourceFeature,
ConnectionInfo, ConnectionInfo,
@ -108,7 +108,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
private readonly config: OracleConfig private readonly config: OracleConfig
private index: number = 1 private index: number = 1
public tables: Record<string, Table> = {} public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {} public schemaErrors: Record<string, string> = {}
private readonly COLUMNS_SQL = ` private readonly COLUMNS_SQL = `
@ -262,13 +262,16 @@ class OracleIntegration extends Sql implements DatasourcePlus {
* @param {*} datasourceId - datasourceId to fetch * @param {*} datasourceId - datasourceId to fetch
* @param entities - the tables that are to be built * @param entities - the tables that are to be built
*/ */
async buildSchema(datasourceId: string, entities: Record<string, Table>) { async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({ const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL, sql: this.COLUMNS_SQL,
}) })
const oracleTables = this.mapColumns(columnsResponse) const oracleTables = this.mapColumns(columnsResponse)
const tables: { [key: string]: Table } = {} const tables: { [key: string]: ExternalTable } = {}
// iterate each table // iterate each table
Object.values(oracleTables).forEach(oracleTable => { Object.values(oracleTables).forEach(oracleTable => {
@ -279,6 +282,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
primary: [], primary: [],
name: oracleTable.name, name: oracleTable.name,
schema: {}, schema: {},
sourceId: datasourceId,
} }
tables[oracleTable.name] = table tables[oracleTable.name] = table
} }

View File

@ -1,10 +1,11 @@
import fs from "fs"
import { import {
Integration, Integration,
DatasourceFieldType, DatasourceFieldType,
QueryType, QueryType,
QueryJson, QueryJson,
SqlQuery, SqlQuery,
Table, ExternalTable,
DatasourcePlus, DatasourcePlus,
DatasourceFeature, DatasourceFeature,
ConnectionInfo, ConnectionInfo,
@ -21,6 +22,8 @@ import { PostgresColumn } from "./base/types"
import { escapeDangerousCharacters } from "../utilities" import { escapeDangerousCharacters } from "../utilities"
import { Client, ClientConfig, types } from "pg" import { Client, ClientConfig, types } from "pg"
import { exec } from "child_process"
import { storeTempFile } from "../utilities/fileSystem"
// Return "date" and "timestamp" types as plain strings. // Return "date" and "timestamp" types as plain strings.
// This lets us reference the original stored timezone. // This lets us reference the original stored timezone.
@ -57,6 +60,7 @@ const SCHEMA: Integration = {
features: { features: {
[DatasourceFeature.CONNECTION_CHECKING]: true, [DatasourceFeature.CONNECTION_CHECKING]: true,
[DatasourceFeature.FETCH_TABLE_NAMES]: true, [DatasourceFeature.FETCH_TABLE_NAMES]: true,
[DatasourceFeature.EXPORT_SCHEMA]: true,
}, },
datasource: { datasource: {
host: { host: {
@ -139,7 +143,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
private readonly config: PostgresConfig private readonly config: PostgresConfig
private index: number = 1 private index: number = 1
private open: boolean private open: boolean
public tables: Record<string, Table> = {} public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {} public schemaErrors: Record<string, string> = {}
COLUMNS_SQL!: string COLUMNS_SQL!: string
@ -178,6 +182,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
const response: ConnectionInfo = { const response: ConnectionInfo = {
connected: false, connected: false,
} }
try { try {
await this.openConnection() await this.openConnection()
response.connected = true response.connected = true
@ -256,7 +261,10 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
* @param {*} datasourceId - datasourceId to fetch * @param {*} datasourceId - datasourceId to fetch
* @param entities - the tables that are to be built * @param entities - the tables that are to be built
*/ */
async buildSchema(datasourceId: string, entities: Record<string, Table>) { async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
let tableKeys: { [key: string]: string[] } = {} let tableKeys: { [key: string]: string[] } = {}
await this.openConnection() await this.openConnection()
try { try {
@ -282,7 +290,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
const columnsResponse: { rows: PostgresColumn[] } = const columnsResponse: { rows: PostgresColumn[] } =
await this.client.query(this.COLUMNS_SQL) await this.client.query(this.COLUMNS_SQL)
const tables: { [key: string]: Table } = {} const tables: { [key: string]: ExternalTable } = {}
for (let column of columnsResponse.rows) { for (let column of columnsResponse.rows) {
const tableName: string = column.table_name const tableName: string = column.table_name
@ -295,6 +303,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
primary: tableKeys[tableName] || [], primary: tableKeys[tableName] || [],
name: tableName, name: tableName,
schema: {}, schema: {},
sourceId: datasourceId,
} }
} }
@ -381,6 +390,59 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
return response.rows.length ? response.rows : [{ [operation]: true }] return response.rows.length ? response.rows : [{ [operation]: true }]
} }
} }
async getExternalSchema() {
const dumpCommandParts = [
`user=${this.config.user}`,
`host=${this.config.host}`,
`port=${this.config.port}`,
`dbname=${this.config.database}`,
]
if (this.config.ssl) {
dumpCommandParts.push("sslmode=verify-ca")
if (this.config.ca) {
const caFilePath = storeTempFile(this.config.ca)
fs.chmodSync(caFilePath, "0600")
dumpCommandParts.push(`sslrootcert=${caFilePath}`)
}
if (this.config.clientCert) {
const clientCertFilePath = storeTempFile(this.config.clientCert)
fs.chmodSync(clientCertFilePath, "0600")
dumpCommandParts.push(`sslcert=${clientCertFilePath}`)
}
if (this.config.clientKey) {
const clientKeyFilePath = storeTempFile(this.config.clientKey)
fs.chmodSync(clientKeyFilePath, "0600")
dumpCommandParts.push(`sslkey=${clientKeyFilePath}`)
}
}
const dumpCommand = `PGPASSWORD="${
this.config.password
}" pg_dump --schema-only "${dumpCommandParts.join(" ")}"`
return new Promise<string>((res, rej) => {
exec(dumpCommand, (error, stdout, stderr) => {
if (error) {
console.error(`Error generating dump: ${error.message}`)
rej(error.message)
return
}
if (stderr) {
console.error(`pg_dump error: ${stderr}`)
rej(stderr)
return
}
res(stdout)
console.log("SQL dump generated successfully!")
})
})
}
} }
export default { export default {

View File

@ -36,11 +36,14 @@ export function checkDatasourceTypes(schema: Integration, config: any) {
async function enrichDatasourceWithValues(datasource: Datasource) { async function enrichDatasourceWithValues(datasource: Datasource) {
const cloned = cloneDeep(datasource) const cloned = cloneDeep(datasource)
const env = await getEnvironmentVariables() const env = await getEnvironmentVariables()
//Do not process entities, as we do not want to process formulas
const { entities, ...clonedWithoutEntities } = cloned
const processed = processObjectSync( const processed = processObjectSync(
cloned, clonedWithoutEntities,
{ env }, { env },
{ onlyFound: true } { onlyFound: true }
) as Datasource ) as Datasource
processed.entities = entities
const definition = await getDefinition(processed.source) const definition = await getDefinition(processed.source)
processed.config = checkDatasourceTypes(definition!, processed.config) processed.config = checkDatasourceTypes(definition!, processed.config)
return { return {

View File

@ -81,7 +81,9 @@ export const streamFile = (path: string) => {
* @param {string} fileContents contents which will be written to a temp file. * @param {string} fileContents contents which will be written to a temp file.
* @return {string} the path to the temp file. * @return {string} the path to the temp file.
*/ */
export const storeTempFile = (fileContents: any) => { export const storeTempFile = (
fileContents: string | NodeJS.ArrayBufferView
) => {
const path = join(budibaseTempDir(), uuid()) const path = join(budibaseTempDir(), uuid())
fs.writeFileSync(path, fileContents) fs.writeFileSync(path, fileContents)
return path return path

View File

@ -9,7 +9,7 @@ import {
import env from "../environment" import env from "../environment"
import { groups } from "@budibase/pro" import { groups } from "@budibase/pro"
import { UserCtx, ContextUser, User, UserGroup } from "@budibase/types" import { UserCtx, ContextUser, User, UserGroup } from "@budibase/types"
import { global } from "yargs" import { cloneDeep } from "lodash"
export function updateAppRole( export function updateAppRole(
user: ContextUser, user: ContextUser,
@ -65,16 +65,20 @@ export async function processUser(
user: ContextUser, user: ContextUser,
opts: { appId?: string; groups?: UserGroup[] } = {} opts: { appId?: string; groups?: UserGroup[] } = {}
) { ) {
if (user) { let clonedUser = cloneDeep(user)
delete user.password if (clonedUser) {
delete clonedUser.password
} }
const appId = opts.appId || context.getAppId() const appId = opts.appId || context.getAppId()
user = updateAppRole(user, { appId }) clonedUser = updateAppRole(clonedUser, { appId })
if (!user.roleId && user?.userGroups?.length) { if (!clonedUser.roleId && clonedUser?.userGroups?.length) {
user = await checkGroupRoles(user, { appId, groups: opts?.groups }) clonedUser = await checkGroupRoles(clonedUser, {
appId,
groups: opts?.groups,
})
} }
return user return clonedUser
} }
export async function getCachedSelf(ctx: UserCtx, appId: string) { export async function getCachedSelf(ctx: UserCtx, appId: string) {

View File

@ -1,6 +1,5 @@
// @ts-nocheck // @ts-nocheck
import { FieldTypes } from "../../constants" import { FieldTypes } from "../../constants"
import { logging } from "@budibase/backend-core"
const parseArrayString = value => { const parseArrayString = value => {
if (typeof value === "string") { if (typeof value === "string") {
@ -12,7 +11,7 @@ const parseArrayString = value => {
result = JSON.parse(value.replace(/'/g, '"')) result = JSON.parse(value.replace(/'/g, '"'))
return result return result
} catch (e) { } catch (e) {
logging.logWarn("Could not parse row value", e) return value
} }
} }
return value return value

View File

@ -17,7 +17,7 @@
"@budibase/backend-core": ["../backend-core/src"], "@budibase/backend-core": ["../backend-core/src"],
"@budibase/backend-core/*": ["../backend-core/*"], "@budibase/backend-core/*": ["../backend-core/*"],
"@budibase/shared-core": ["../shared-core/src"], "@budibase/shared-core": ["../shared-core/src"],
"@budibase/pro": ["../pro/packages/pro/src"] "@budibase/pro": ["../pro/src"]
} }
}, },
"include": ["src/**/*"], "include": ["src/**/*"],

View File

@ -82,6 +82,10 @@ export interface Table extends Document {
rowHeight?: number rowHeight?: number
} }
export interface ExternalTable extends Table {
sourceId: string
}
export interface TableRequest extends Table { export interface TableRequest extends Table {
_rename?: RenameColumn _rename?: RenameColumn
created?: boolean created?: boolean

View File

@ -76,6 +76,7 @@ export enum FilterType {
export enum DatasourceFeature { export enum DatasourceFeature {
CONNECTION_CHECKING = "connection", CONNECTION_CHECKING = "connection",
FETCH_TABLE_NAMES = "fetch_table_names", FETCH_TABLE_NAMES = "fetch_table_names",
EXPORT_SCHEMA = "export_schema",
} }
export interface StepDefinition { export interface StepDefinition {
@ -140,6 +141,7 @@ export interface IntegrationBase {
update?(query: any): Promise<any[] | any> update?(query: any): Promise<any[] | any>
delete?(query: any): Promise<any[] | any> delete?(query: any): Promise<any[] | any>
testConnection?(): Promise<ConnectionInfo> testConnection?(): Promise<ConnectionInfo>
getExternalSchema?(): Promise<string>
} }
export interface DatasourcePlus extends IntegrationBase { export interface DatasourcePlus extends IntegrationBase {

View File

@ -21,11 +21,9 @@ const config: Config.InitialOptions = {
} }
// add pro sources if they exist // add pro sources if they exist
if (fs.existsSync("../pro/packages")) { if (fs.existsSync("../pro/src")) {
config.moduleNameMapper!["@budibase/pro/(.*)"] = config.moduleNameMapper!["@budibase/pro/(.*)"] = "<rootDir>/../pro/$1"
"<rootDir>/../pro/packages/pro/$1" config.moduleNameMapper!["@budibase/pro"] = "<rootDir>/../pro/src"
config.moduleNameMapper!["@budibase/pro"] =
"<rootDir>/../pro/packages/pro/src"
} }
export default config export default config

View File

@ -1,5 +1,5 @@
{ {
"watch": ["src", "../backend-core", "../pro/packages/pro"], "watch": ["src", "../backend-core", "../pro"],
"ext": "js,ts,json", "ext": "js,ts,json",
"ignore": [ "ignore": [
"src/**/*.spec.ts", "src/**/*.spec.ts",

View File

@ -47,6 +47,7 @@
"@techpass/passport-openidconnect": "0.3.2", "@techpass/passport-openidconnect": "0.3.2",
"@types/global-agent": "2.1.1", "@types/global-agent": "2.1.1",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"dd-trace": "3.13.2", "dd-trace": "3.13.2",
"dotenv": "8.6.0", "dotenv": "8.6.0",

View File

@ -38,7 +38,7 @@ const MAX_USERS_UPLOAD_LIMIT = 1000
export const save = async (ctx: UserCtx<User, SaveUserResponse>) => { export const save = async (ctx: UserCtx<User, SaveUserResponse>) => {
try { try {
const currentUserId = ctx.user._id const currentUserId = ctx.user?._id
const requestUser = ctx.request.body const requestUser = ctx.request.body
const user = await userSdk.save(requestUser, { currentUserId }) const user = await userSdk.save(requestUser, { currentUserId })

View File

@ -1,9 +1,10 @@
import { BBContext } from "@budibase/types" import { Ctx } from "@budibase/types"
import env from "../../../environment" import env from "../../../environment"
export const fetch = async (ctx: BBContext) => { export const fetch = async (ctx: Ctx) => {
ctx.body = { ctx.body = {
multiTenancy: !!env.MULTI_TENANCY, multiTenancy: !!env.MULTI_TENANCY,
offlineMode: !!env.OFFLINE_MODE,
cloud: !env.SELF_HOSTED, cloud: !env.SELF_HOSTED,
accountPortalUrl: env.ACCOUNT_PORTAL_URL, accountPortalUrl: env.ACCOUNT_PORTAL_URL,
disableAccountPortal: env.DISABLE_ACCOUNT_PORTAL, disableAccountPortal: env.DISABLE_ACCOUNT_PORTAL,

View File

@ -24,6 +24,7 @@ describe("/api/system/environment", () => {
isDev: false, isDev: false,
multiTenancy: true, multiTenancy: true,
baseUrl: "http://localhost:10000", baseUrl: "http://localhost:10000",
offlineMode: false,
}) })
}) })
}) })

View File

@ -61,6 +61,7 @@ const environment = {
CHECKLIST_CACHE_TTL: parseIntSafe(process.env.CHECKLIST_CACHE_TTL) || 3600, CHECKLIST_CACHE_TTL: parseIntSafe(process.env.CHECKLIST_CACHE_TTL) || 3600,
SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD, SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,
ENCRYPTED_TEST_PUBLIC_API_KEY: process.env.ENCRYPTED_TEST_PUBLIC_API_KEY, ENCRYPTED_TEST_PUBLIC_API_KEY: process.env.ENCRYPTED_TEST_PUBLIC_API_KEY,
OFFLINE_MODE: process.env.OFFLINE_MODE,
/** /**
* Mock the email service in use - links to ethereal hosted emails are logged instead. * Mock the email service in use - links to ethereal hosted emails are logged instead.
*/ */

View File

@ -16,7 +16,7 @@
"@budibase/backend-core": ["../backend-core/src"], "@budibase/backend-core": ["../backend-core/src"],
"@budibase/backend-core/*": ["../backend-core/*"], "@budibase/backend-core/*": ["../backend-core/*"],
"@budibase/shared-core": ["../shared-core/src"], "@budibase/shared-core": ["../shared-core/src"],
"@budibase/pro": ["../pro/packages/pro/src"] "@budibase/pro": ["../pro/src"]
} }
}, },
"include": ["src/**/*"], "include": ["src/**/*"],

5669
qa-core/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -10,6 +10,7 @@
}, },
"scripts": { "scripts": {
"setup": "yarn && node scripts/createEnv.js", "setup": "yarn && node scripts/createEnv.js",
"user": "yarn && node scripts/createEnv.js && node scripts/createUser.js",
"test": "jest --runInBand --json --outputFile=testResults.json --forceExit", "test": "jest --runInBand --json --outputFile=testResults.json --forceExit",
"test:watch": "yarn run test --watch", "test:watch": "yarn run test --watch",
"test:debug": "DEBUG=1 yarn run test", "test:debug": "DEBUG=1 yarn run test",

View File

@ -0,0 +1,49 @@
const dotenv = require("dotenv")
const { join } = require("path")
const fs = require("fs")
const fetch = require("node-fetch")
function getVarFromDotEnv(path, varName) {
const parsed = dotenv.parse(fs.readFileSync(path))
return parsed[varName]
}
async function createUser() {
const serverPath = join(__dirname, "..", "..", "packages", "server", ".env")
const qaCorePath = join(__dirname, "..", ".env")
const apiKey = getVarFromDotEnv(serverPath, "INTERNAL_API_KEY")
const username = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_EMAIL")
const password = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_PASSWORD")
const url = getVarFromDotEnv(qaCorePath, "BUDIBASE_URL")
const resp = await fetch(`${url}/api/public/v1/users`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-budibase-api-key": apiKey,
},
body: JSON.stringify({
email: username,
password,
builder: {
global: true,
},
admin: {
global: true,
},
roles: {},
}),
})
if (resp.status !== 200) {
throw new Error(await resp.text())
} else {
return await resp.json()
}
}
createUser()
.then(() => {
console.log("User created - ready to use")
})
.catch(err => {
console.error("Failed to create user - ", err)
})

View File

@ -67,11 +67,12 @@ export default class AccountInternalAPIClient {
} }
const message = `${method} ${url} - ${response.status}` const message = `${method} ${url} - ${response.status}`
const isDebug = process.env.LOG_LEVEL === "debug"
if (response.status > 499) { if (response.status > 499) {
console.error(message, data) console.error(message, data)
} else if (response.status >= 400) { } else if (response.status >= 400) {
console.warn(message, data) console.warn(message, data)
} else { } else if (isDebug) {
console.debug(message, data) console.debug(message, data)
} }

View File

@ -0,0 +1,112 @@
import { GenericContainer, Wait } from "testcontainers"
import { Duration, TemporalUnit } from "node-duration"
import mssql from "../../../../packages/server/src/integrations/microsoftSqlServer"
jest.unmock("mssql")
describe("getExternalSchema", () => {
describe("postgres", () => {
let config: any
beforeAll(async () => {
const password = "Str0Ng_p@ssW0rd!"
const container = await new GenericContainer(
"mcr.microsoft.com/mssql/server"
)
.withExposedPorts(1433)
.withEnv("ACCEPT_EULA", "Y")
.withEnv("MSSQL_SA_PASSWORD", password)
.withEnv("MSSQL_PID", "Developer")
.withWaitStrategy(Wait.forHealthCheck())
.withHealthCheck({
test: `/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "${password}" -Q "SELECT 1" -b -o /dev/null`,
interval: new Duration(1000, TemporalUnit.MILLISECONDS),
timeout: new Duration(3, TemporalUnit.SECONDS),
retries: 20,
startPeriod: new Duration(100, TemporalUnit.MILLISECONDS),
})
.start()
const host = container.getContainerIpAddress()
const port = container.getMappedPort(1433)
config = {
user: "sa",
password,
server: host,
port: port,
database: "master",
schema: "dbo",
}
})
it("can export an empty database", async () => {
const integration = new mssql.integration(config)
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(`""`)
})
it("can export a database with tables", async () => {
const integration = new mssql.integration(config)
await integration.connect()
await integration.internalQuery({
sql: `
CREATE TABLE users (
id INT IDENTITY(1,1) PRIMARY KEY,
name VARCHAR(100) NOT NULL,
role VARCHAR(15) NOT NULL
);
CREATE TABLE products (
id INT IDENTITY(1,1) PRIMARY KEY,
name VARCHAR(100) NOT NULL,
price DECIMAL(10, 2) NOT NULL
);
`,
})
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(`
"CREATE TABLE [products] (
id int(4) NOT NULL,
name varchar(100) NOT NULL,
price decimal(9) NOT NULL,
CONSTRAINT [PK_products] PRIMARY KEY (id)
);
CREATE TABLE [users] (
id int(4) NOT NULL,
name varchar(100) NOT NULL,
role varchar(15) NOT NULL,
CONSTRAINT [PK_users] PRIMARY KEY (id)
);"
`)
})
it("does not export a data", async () => {
const integration = new mssql.integration(config)
await integration.connect()
await integration.internalQuery({
sql: `INSERT INTO [users] ([name], [role]) VALUES ('John Doe', 'Administrator');
INSERT INTO [products] ([name], [price]) VALUES ('Book', 7.68);
`,
})
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(`
"CREATE TABLE [products] (
id int(4) NOT NULL,
name varchar(100) NOT NULL,
price decimal(9) NOT NULL,
CONSTRAINT [PK_products] PRIMARY KEY (id)
);
CREATE TABLE [users] (
id int(4) NOT NULL,
name varchar(100) NOT NULL,
role varchar(15) NOT NULL,
CONSTRAINT [PK_users] PRIMARY KEY (id)
);"
`)
})
})
})

View File

@ -0,0 +1,108 @@
import { GenericContainer } from "testcontainers"
import mysql from "../../../../packages/server/src/integrations/mysql"
jest.unmock("mysql2/promise")
describe("datasource validators", () => {
describe("mysql", () => {
let config: any
beforeAll(async () => {
const container = await new GenericContainer("mysql")
.withExposedPorts(3306)
.withEnv("MYSQL_ROOT_PASSWORD", "admin")
.withEnv("MYSQL_DATABASE", "db")
.withEnv("MYSQL_USER", "user")
.withEnv("MYSQL_PASSWORD", "password")
.start()
const host = container.getContainerIpAddress()
const port = container.getMappedPort(3306)
config = {
host,
port,
user: "user",
database: "db",
password: "password",
rejectUnauthorized: true,
}
})
it("can export an empty database", async () => {
const integration = new mysql.integration(config)
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(
`"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */"`
)
})
it("can export a database with tables", async () => {
const integration = new mysql.integration(config)
await integration.internalQuery({
sql: `
CREATE TABLE users (
id INT AUTO_INCREMENT,
name VARCHAR(100) NOT NULL,
role VARCHAR(15) NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE products (
id INT AUTO_INCREMENT,
name VARCHAR(100) NOT NULL,
price DECIMAL,
PRIMARY KEY (id)
);
`,
})
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(`
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
CREATE TABLE \`products\` (
\`id\` int NOT NULL AUTO_INCREMENT,
\`name\` varchar(100) NOT NULL,
\`price\` decimal(10,0) DEFAULT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
CREATE TABLE \`users\` (
\`id\` int NOT NULL AUTO_INCREMENT,
\`name\` varchar(100) NOT NULL,
\`role\` varchar(15) NOT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
`)
})
it("does not export a data", async () => {
const integration = new mysql.integration(config)
await integration.internalQuery({
sql: `INSERT INTO users (name, role) VALUES ('John Doe', 'Administrator');`,
})
await integration.internalQuery({
sql: `INSERT INTO products (name, price) VALUES ('Book', 7.68);`,
})
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(`
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
CREATE TABLE \`products\` (
\`id\` int NOT NULL AUTO_INCREMENT,
\`name\` varchar(100) NOT NULL,
\`price\` decimal(10,0) DEFAULT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
CREATE TABLE \`users\` (
\`id\` int NOT NULL AUTO_INCREMENT,
\`name\` varchar(100) NOT NULL,
\`role\` varchar(15) NOT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
`)
})
})
})

View File

@ -0,0 +1,377 @@
import { GenericContainer } from "testcontainers"
import postgres from "../../../../packages/server/src/integrations/postgres"
jest.unmock("pg")
describe("getExternalSchema", () => {
describe("postgres", () => {
let config: any
// Remove versioning from the outputs to prevent failures when running different pg_dump versions
function stripResultsVersions(sql: string) {
const result = sql
.replace(/\n[^\n]+Dumped from database version[^\n]+\n/, "")
.replace(/\n[^\n]+Dumped by pg_dump version[^\n]+\n/, "")
.toString()
return result
}
beforeAll(async () => {
// This is left on propose without a tag, so if a new version introduces a breaking change we will be notified
const container = await new GenericContainer("postgres")
.withExposedPorts(5432)
.withEnv("POSTGRES_PASSWORD", "password")
.start()
const host = container.getContainerIpAddress()
const port = container.getMappedPort(5432)
config = {
host,
port,
database: "postgres",
user: "postgres",
password: "password",
schema: "public",
ssl: false,
rejectUnauthorized: false,
}
})
it("can export an empty database", async () => {
const integration = new postgres.integration(config)
const result = await integration.getExternalSchema()
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
"--
-- PostgreSQL database dump
--
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- PostgreSQL database dump complete
--
"
`)
})
it("can export a database with tables", async () => {
const integration = new postgres.integration(config)
await integration.internalQuery(
{
sql: `
CREATE TABLE "users" (
"id" SERIAL,
"name" VARCHAR(100) NOT NULL,
"role" VARCHAR(15) NOT NULL,
PRIMARY KEY ("id")
);
CREATE TABLE "products" (
"id" SERIAL,
"name" VARCHAR(100) NOT NULL,
"price" DECIMAL NOT NULL,
"owner" INTEGER NULL,
PRIMARY KEY ("id")
);
ALTER TABLE "products" ADD CONSTRAINT "fk_owner" FOREIGN KEY ("owner") REFERENCES "users" ("id");`,
},
false
)
const result = await integration.getExternalSchema()
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
"--
-- PostgreSQL database dump
--
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
SET default_table_access_method = heap;
--
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
--
CREATE TABLE public.products (
id integer NOT NULL,
name character varying(100) NOT NULL,
price numeric NOT NULL,
owner integer
);
ALTER TABLE public.products OWNER TO postgres;
--
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE public.products_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.products_id_seq OWNER TO postgres;
--
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
--
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
--
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
--
CREATE TABLE public.users (
id integer NOT NULL,
name character varying(100) NOT NULL,
role character varying(15) NOT NULL
);
ALTER TABLE public.users OWNER TO postgres;
--
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE public.users_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.users_id_seq OWNER TO postgres;
--
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
--
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
--
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
--
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
--
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
--
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.users
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
--
-- Name: products fk_owner; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products
ADD CONSTRAINT fk_owner FOREIGN KEY (owner) REFERENCES public.users(id);
--
-- PostgreSQL database dump complete
--
"
`)
})
it("does not export a data", async () => {
const integration = new postgres.integration(config)
await integration.internalQuery(
{
sql: `INSERT INTO "users" ("name", "role") VALUES ('John Doe', 'Administrator');
INSERT INTO "products" ("name", "price") VALUES ('Book', 7.68);`,
},
false
)
const result = await integration.getExternalSchema()
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
"--
-- PostgreSQL database dump
--
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
SET default_table_access_method = heap;
--
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
--
CREATE TABLE public.products (
id integer NOT NULL,
name character varying(100) NOT NULL,
price numeric NOT NULL,
owner integer
);
ALTER TABLE public.products OWNER TO postgres;
--
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE public.products_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.products_id_seq OWNER TO postgres;
--
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
--
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
--
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
--
CREATE TABLE public.users (
id integer NOT NULL,
name character varying(100) NOT NULL,
role character varying(15) NOT NULL
);
ALTER TABLE public.users OWNER TO postgres;
--
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE public.users_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.users_id_seq OWNER TO postgres;
--
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
--
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
--
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
--
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
--
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
--
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.users
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
--
-- Name: products fk_owner; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products
ADD CONSTRAINT fk_owner FOREIGN KEY (owner) REFERENCES public.users(id);
--
-- PostgreSQL database dump complete
--
"
`)
})
})
})

View File

@ -58,11 +58,12 @@ class BudibaseInternalAPIClient {
} }
const message = `${method} ${url} - ${response.status}` const message = `${method} ${url} - ${response.status}`
const isDebug = process.env.LOG_LEVEL === "debug"
if (response.status > 499) { if (response.status > 499) {
console.error(message, data) console.error(message, data)
} else if (response.status >= 400) { } else if (response.status >= 400) {
console.warn(message, data) console.warn(message, data)
} else { } else if (isDebug) {
console.debug(message, data) console.debug(message, data)
} }

View File

@ -1,19 +1,23 @@
import { generator } from "../../shared" import { generator } from "../../shared"
import { CreateAppRequest } from "../../types" import { CreateAppRequest } from "../../types"
function uniqueWord() {
return generator.word() + generator.hash()
}
export const generateApp = ( export const generateApp = (
overrides: Partial<CreateAppRequest> = {} overrides: Partial<CreateAppRequest> = {}
): CreateAppRequest => ({ ): CreateAppRequest => ({
name: generator.word() + generator.hash(), name: uniqueWord(),
url: `/${generator.word() + generator.hash()}`, url: `/${uniqueWord()}`,
...overrides, ...overrides,
}) })
// Applications type doesn't work here, save to add useTemplate parameter? // Applications type doesn't work here, save to add useTemplate parameter?
export const appFromTemplate = (): CreateAppRequest => { export const appFromTemplate = (): CreateAppRequest => {
return { return {
name: generator.word(), name: uniqueWord(),
url: `/${generator.word()}`, url: `/${uniqueWord()}`,
// @ts-ignore // @ts-ignore
useTemplate: "true", useTemplate: "true",
templateName: "Near Miss Register", templateName: "Near Miss Register",

View File

@ -2,7 +2,7 @@ import TestConfiguration from "../../config/TestConfiguration"
import * as fixtures from "../../fixtures" import * as fixtures from "../../fixtures"
import { Query } from "@budibase/types" import { Query } from "@budibase/types"
describe("Internal API - Data Sources: MongoDB", () => { xdescribe("Internal API - Data Sources: MongoDB", () => {
const config = new TestConfiguration() const config = new TestConfiguration()
beforeAll(async () => { beforeAll(async () => {

View File

@ -1,3 +1,4 @@
process.env.DISABLE_PINO_LOGGER = "1"
import { DEFAULT_TENANT_ID, logging } from "@budibase/backend-core" import { DEFAULT_TENANT_ID, logging } from "@budibase/backend-core"
import { AccountInternalAPI } from "../account-api" import { AccountInternalAPI } from "../account-api"
import * as fixtures from "../internal-api/fixtures" import * as fixtures from "../internal-api/fixtures"

View File

@ -57,11 +57,12 @@ class BudibasePublicAPIClient {
} }
const message = `${method} ${url} - ${response.status}` const message = `${method} ${url} - ${response.status}`
const isDebug = process.env.LOG_LEVEL === "debug"
if (response.status > 499) { if (response.status > 499) {
console.error(message, data) console.error(message, data)
} else if (response.status >= 400) { } else if (response.status >= 400) {
console.warn(message, data) console.warn(message, data)
} else { } else if (isDebug) {
console.debug(message, data) console.debug(message, data)
} }

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
if [ -d "packages/pro/packages" ]; then if [ -d "packages/pro/src" ]; then
cd packages/pro cd packages/pro
yarn yarn

View File

@ -8,10 +8,10 @@ const path = require("path")
const { build } = require("esbuild") const { build } = require("esbuild")
const { default: NodeResolve } = require("@esbuild-plugins/node-resolve")
const { const {
default: TsconfigPathsPlugin, default: TsconfigPathsPlugin,
} = require("@esbuild-plugins/tsconfig-paths") } = require("@esbuild-plugins/tsconfig-paths")
const { nodeExternalsPlugin } = require("esbuild-node-externals")
var argv = require("minimist")(process.argv.slice(2)) var argv = require("minimist")(process.argv.slice(2))
@ -25,32 +25,28 @@ function runBuild(entry, outfile) {
minify: !isDev, minify: !isDev,
sourcemap: isDev, sourcemap: isDev,
tsconfig, tsconfig,
plugins: [ plugins: [TsconfigPathsPlugin({ tsconfig }), nodeExternalsPlugin()],
TsconfigPathsPlugin({ tsconfig }),
NodeResolve({
extensions: [".ts", ".js"],
onResolved: resolved => {
if (resolved.includes("node_modules") && !resolved.includes("/@budibase/pro/")) {
return {
external: true,
}
}
return resolved
},
}),
],
target: "node14", target: "node14",
preserveSymlinks: true, preserveSymlinks: true,
loader: { loader: {
".svelte": "copy", ".svelte": "copy",
}, },
metafile: true,
external: [
"deasync",
"mock-aws-s3",
"nock",
"pino",
"koa-pino-logger",
"bull",
],
} }
build({ build({
...sharedConfig, ...sharedConfig,
platform: "node", platform: "node",
outfile, outfile,
}).then(() => { }).then(result => {
glob(`${process.cwd()}/src/**/*.hbs`, {}, (err, files) => { glob(`${process.cwd()}/src/**/*.hbs`, {}, (err, files) => {
for (const file of files) { for (const file of files) {
fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`) fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`)
@ -61,6 +57,11 @@ function runBuild(entry, outfile) {
`Build successfully in ${(Date.now() - start) / 1000} seconds` `Build successfully in ${(Date.now() - start) / 1000} seconds`
) )
}) })
fs.writeFileSync(
`dist/${path.basename(outfile)}.meta.json`,
JSON.stringify(result.metafile)
)
}) })
} }

View File

@ -34,7 +34,7 @@ yarn unlink
yarn link yarn link
cd - cd -
if [ -d packages/pro/packages ]; then if [ -d packages/pro/src ]; then
pro_loaded_locally=true pro_loaded_locally=true
else else
pro_loaded_locally=false pro_loaded_locally=false
@ -42,7 +42,7 @@ fi
if [ $pro_loaded_locally = true ]; then if [ $pro_loaded_locally = true ]; then
echo "Linking pro" echo "Linking pro"
cd packages/pro/packages/pro cd packages/pro
yarn unlink yarn unlink
yarn link yarn link
cd - cd -

View File

@ -3,7 +3,7 @@ const path = require("path")
const { execSync } = require("child_process") const { execSync } = require("child_process")
let version = "0.0.0" let version = "0.0.0"
const localPro = fs.existsSync("packages/pro/packages") const localPro = fs.existsSync("packages/pro/src")
if (!localPro) { if (!localPro) {
const branchName = execSync("git rev-parse --abbrev-ref HEAD") const branchName = execSync("git rev-parse --abbrev-ref HEAD")
.toString() .toString()

1326
yarn.lock

File diff suppressed because it is too large Load Diff