Merge branch 'develop' of github.com:Budibase/budibase into grid-block
This commit is contained in:
commit
aebc3b2bb8
|
@ -1,5 +1,9 @@
|
|||
name: Budibase CI
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
# Trigger the workflow on push or pull request,
|
||||
# but only for the master branch
|
||||
|
@ -23,6 +27,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
|
@ -135,15 +142,39 @@ jobs:
|
|||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
fetch-depth: 0
|
||||
- name: Check submodule
|
||||
|
||||
- name: Check pro commit
|
||||
id: get_pro_commits
|
||||
run: |
|
||||
cd packages/pro
|
||||
git fetch
|
||||
if ! git merge-base --is-ancestor $(git log -n 1 --pretty=format:%H) origin/develop; then
|
||||
echo "Current commit has not been merged to develop"
|
||||
echo "Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md"
|
||||
exit 1
|
||||
pro_commit=$(git rev-parse HEAD)
|
||||
|
||||
branch=${{ github.base_ref || github.ref_name }}
|
||||
echo "Running on branch `$branch` (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
|
||||
|
||||
if [[ "$branch" == "master" ]]; then
|
||||
base_commit=$(git rev-parse origin/master)
|
||||
else
|
||||
echo "All good, the submodule had been merged!"
|
||||
base_commit=$(git rev-parse origin/develop)
|
||||
fi
|
||||
|
||||
echo "pro_commit=$pro_commit"
|
||||
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
|
||||
echo "base_commit=$base_commit"
|
||||
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Check submodule merged to develop
|
||||
uses: actions/github-script@v4
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const submoduleCommit = '${{ steps.get_pro_commits.outputs.pro_commit }}';
|
||||
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
|
||||
|
||||
if (submoduleCommit !== baseCommit) {
|
||||
console.error('Submodule commit does not match the latest commit on the develop branch.');
|
||||
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('All good, the submodule had been merged and setup correctly!')
|
||||
}
|
||||
|
|
|
@ -2,7 +2,9 @@ const fs = require("fs")
|
|||
const { execSync } = require("child_process")
|
||||
const path = require("path")
|
||||
|
||||
const IMAGES = {
|
||||
const IS_SINGLE_IMAGE = process.env.SINGLE_IMAGE
|
||||
|
||||
let IMAGES = {
|
||||
worker: "budibase/worker",
|
||||
apps: "budibase/apps",
|
||||
proxy: "budibase/proxy",
|
||||
|
@ -10,7 +12,13 @@ const IMAGES = {
|
|||
couch: "ibmcom/couchdb3",
|
||||
curl: "curlimages/curl",
|
||||
redis: "redis",
|
||||
watchtower: "containrrr/watchtower"
|
||||
watchtower: "containrrr/watchtower",
|
||||
}
|
||||
|
||||
if (IS_SINGLE_IMAGE) {
|
||||
IMAGES = {
|
||||
budibase: "budibase/budibase"
|
||||
}
|
||||
}
|
||||
|
||||
const FILES = {
|
||||
|
@ -39,11 +47,10 @@ for (let image in IMAGES) {
|
|||
}
|
||||
|
||||
// copy config files
|
||||
copyFile(FILES.COMPOSE)
|
||||
if (!IS_SINGLE_IMAGE) {
|
||||
copyFile(FILES.COMPOSE)
|
||||
}
|
||||
copyFile(FILES.ENV)
|
||||
|
||||
// compress
|
||||
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)
|
||||
|
||||
// clean up
|
||||
fs.rmdirSync(OUTPUT_DIR, { recursive: true })
|
||||
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)
|
|
@ -37,6 +37,14 @@ COPY --from=build /worker /worker
|
|||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
|
||||
|
||||
# Install postgres client for pg_dump utils
|
||||
RUN apt install software-properties-common apt-transport-https gpg -y \
|
||||
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
|
||||
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
|
||||
&& apt update -y \
|
||||
&& apt install postgresql-client-15 -y \
|
||||
&& apt remove software-properties-common apt-transport-https gpg -y
|
||||
|
||||
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
|
||||
WORKDIR /nodejs
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh && \
|
||||
|
|
18
lerna.json
18
lerna.json
|
@ -1,22 +1,10 @@
|
|||
{
|
||||
"version": "2.7.25-alpha.1",
|
||||
"version": "2.7.34-alpha.3",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/backend-core",
|
||||
"packages/bbui",
|
||||
"packages/builder",
|
||||
"packages/cli",
|
||||
"packages/client",
|
||||
"packages/frontend-core",
|
||||
"packages/sdk",
|
||||
"packages/server",
|
||||
"packages/shared-core",
|
||||
"packages/string-templates",
|
||||
"packages/types",
|
||||
"packages/worker",
|
||||
"packages/pro/packages/pro"
|
||||
"packages/*"
|
||||
],
|
||||
"useWorkspaces": true,
|
||||
"useNx": true,
|
||||
"command": {
|
||||
"publish": {
|
||||
"ignoreChanges": [
|
||||
|
|
26
package.json
26
package.json
|
@ -2,23 +2,22 @@
|
|||
"name": "root",
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"@esbuild-plugins/node-resolve": "^0.2.2",
|
||||
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
|
||||
"@nx/js": "16.2.1",
|
||||
"@rollup/plugin-json": "^4.0.2",
|
||||
"@typescript-eslint/parser": "5.45.0",
|
||||
"babel-eslint": "^10.0.3",
|
||||
"esbuild": "^0.17.18",
|
||||
"esbuild-node-externals": "^1.7.0",
|
||||
"eslint": "^7.28.0",
|
||||
"eslint-plugin-cypress": "^2.11.3",
|
||||
"eslint-plugin-svelte3": "^3.2.0",
|
||||
"husky": "^8.0.3",
|
||||
"js-yaml": "^4.1.0",
|
||||
"kill-port": "^1.6.1",
|
||||
"lerna": "7.0.0-alpha.0",
|
||||
"lerna": "7.0.2",
|
||||
"madge": "^6.0.0",
|
||||
"minimist": "^1.2.8",
|
||||
"nx": "^16.2.1",
|
||||
"prettier": "^2.3.1",
|
||||
"prettier-plugin-svelte": "^2.3.0",
|
||||
"rimraf": "^3.0.2",
|
||||
|
@ -48,9 +47,9 @@
|
|||
"kill-builder": "kill-port 3000",
|
||||
"kill-server": "kill-port 4001 4002",
|
||||
"kill-all": "yarn run kill-builder && yarn run kill-server",
|
||||
"dev": "yarn run kill-all && lerna run --stream --parallel dev:builder --stream",
|
||||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream --parallel dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
||||
"dev:server": "yarn run kill-server && lerna run --stream --parallel dev:builder --scope @budibase/worker --scope @budibase/server",
|
||||
"dev": "yarn run kill-all && lerna run --stream --parallel dev:builder --stream",
|
||||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream --parallel dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
||||
"dev:server": "yarn run kill-server && yarn build --projects=@budibase/client && lerna run --stream --parallel dev:builder --scope @budibase/worker --scope @budibase/server",
|
||||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream --parallel dev:built",
|
||||
"dev:docker": "yarn build:docker:pre && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
||||
"test": "lerna run --stream test --stream",
|
||||
|
@ -67,6 +66,7 @@
|
|||
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
||||
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
||||
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
|
||||
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",
|
||||
|
@ -95,19 +95,7 @@
|
|||
},
|
||||
"workspaces": {
|
||||
"packages": [
|
||||
"packages/backend-core",
|
||||
"packages/bbui",
|
||||
"packages/builder",
|
||||
"packages/cli",
|
||||
"packages/client",
|
||||
"packages/frontend-core",
|
||||
"packages/sdk",
|
||||
"packages/server",
|
||||
"packages/shared-core",
|
||||
"packages/string-templates",
|
||||
"packages/types",
|
||||
"packages/worker",
|
||||
"packages/pro/packages/pro"
|
||||
"packages/*"
|
||||
]
|
||||
},
|
||||
"resolutions": {
|
||||
|
|
|
@ -31,4 +31,6 @@ const config: Config.InitialOptions = {
|
|||
coverageReporters: ["lcov", "json", "clover"],
|
||||
}
|
||||
|
||||
process.env.DISABLE_PINO_LOGGER = "1"
|
||||
|
||||
export default config
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
"@techpass/passport-openidconnect": "0.3.2",
|
||||
"aws-cloudfront-sign": "2.2.0",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.0.1",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bull": "4.10.1",
|
||||
"correlation-id": "4.0.0",
|
||||
|
|
|
@ -87,7 +87,7 @@
|
|||
border-color: var(--spectrum-global-color-gray-400);
|
||||
}
|
||||
/* Toolbar button color */
|
||||
:global(.EasyMDEContainer .editor-toolbar button i) {
|
||||
:global(.EasyMDEContainer .editor-toolbar button) {
|
||||
color: var(--spectrum-global-color-gray-800);
|
||||
}
|
||||
/* Separator between toolbar buttons*/
|
||||
|
|
|
@ -309,7 +309,7 @@
|
|||
}
|
||||
|
||||
function canShowField(key, value) {
|
||||
const dependsOn = value.dependsOn
|
||||
const dependsOn = value?.dependsOn
|
||||
return !dependsOn || !!inputData[dependsOn]
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { tables } from "stores/backend"
|
||||
import { datasources, tables } from "stores/backend"
|
||||
import EditRolesButton from "./buttons/EditRolesButton.svelte"
|
||||
import { TableNames } from "constants"
|
||||
import { Grid } from "@budibase/frontend-core"
|
||||
|
@ -26,6 +26,18 @@
|
|||
$: id = $tables.selected?._id
|
||||
$: isUsersTable = id === TableNames.USERS
|
||||
$: isInternal = $tables.selected?.type !== "external"
|
||||
|
||||
const handleGridTableUpdate = async e => {
|
||||
tables.replaceTable(id, e.detail)
|
||||
|
||||
// We need to refresh datasources when an external table changes.
|
||||
// Type "external" may exist - sometimes type is "table" and sometimes it
|
||||
// is "external" - it has different meanings in different endpoints.
|
||||
// If we check both these then we hopefully catch all external tables.
|
||||
if (e.detail?.type === "external" || e.detail?.sql) {
|
||||
await datasources.fetch()
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="wrapper">
|
||||
|
@ -36,7 +48,7 @@
|
|||
allowDeleteRows={!isUsersTable}
|
||||
schemaOverrides={isUsersTable ? userSchemaOverrides : null}
|
||||
showAvatars={false}
|
||||
on:updatetable={e => tables.replaceTable(id, e.detail)}
|
||||
on:updatetable={handleGridTableUpdate}
|
||||
>
|
||||
<svelte:fragment slot="filter">
|
||||
<GridFilterButton />
|
||||
|
|
|
@ -59,7 +59,6 @@
|
|||
$: valid = getErrorCount(errors) === 0 && allRequiredAttributesSet()
|
||||
$: isManyToMany = relationshipType === RelationshipTypes.MANY_TO_MANY
|
||||
$: isManyToOne = relationshipType === RelationshipTypes.MANY_TO_ONE
|
||||
$: toRelationship.relationshipType = fromRelationship?.relationshipType
|
||||
|
||||
function getTable(id) {
|
||||
return plusTables.find(table => table._id === id)
|
||||
|
@ -180,6 +179,16 @@
|
|||
return getErrorCount(errors) === 0
|
||||
}
|
||||
|
||||
function otherRelationshipType(type) {
|
||||
if (type === RelationshipTypes.MANY_TO_ONE) {
|
||||
return RelationshipTypes.ONE_TO_MANY
|
||||
} else if (type === RelationshipTypes.ONE_TO_MANY) {
|
||||
return RelationshipTypes.MANY_TO_ONE
|
||||
} else if (type === RelationshipTypes.MANY_TO_MANY) {
|
||||
return RelationshipTypes.MANY_TO_MANY
|
||||
}
|
||||
}
|
||||
|
||||
function buildRelationships() {
|
||||
const id = Helpers.uuid()
|
||||
//Map temporary variables
|
||||
|
@ -200,6 +209,7 @@
|
|||
...toRelationship,
|
||||
tableId: fromId,
|
||||
name: fromColumn,
|
||||
relationshipType: otherRelationshipType(relationshipType),
|
||||
through: throughId,
|
||||
type: "link",
|
||||
_id: id,
|
||||
|
|
|
@ -93,6 +93,7 @@
|
|||
try {
|
||||
await beforeSave()
|
||||
table = await tables.save(newTable)
|
||||
await datasources.fetch()
|
||||
await afterSave(table)
|
||||
} catch (e) {
|
||||
notifications.error(e)
|
||||
|
|
|
@ -65,6 +65,7 @@
|
|||
const updatedTable = cloneDeep(table)
|
||||
updatedTable.name = updatedName
|
||||
await tables.save(updatedTable)
|
||||
await datasources.fetch()
|
||||
notifications.success("Table renamed successfully")
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,18 @@
|
|||
faFileArrowUp,
|
||||
faChevronLeft,
|
||||
faCircleInfo,
|
||||
faBold,
|
||||
faItalic,
|
||||
faHeading,
|
||||
faQuoteLeft,
|
||||
faListUl,
|
||||
faListOl,
|
||||
faLink,
|
||||
faImage,
|
||||
faEye,
|
||||
faColumns,
|
||||
faArrowsAlt,
|
||||
faQuestionCircle,
|
||||
} from "@fortawesome/free-solid-svg-icons"
|
||||
import { faGithub, faDiscord } from "@fortawesome/free-brands-svg-icons"
|
||||
|
||||
|
@ -22,7 +34,22 @@
|
|||
faEnvelope,
|
||||
faFileArrowUp,
|
||||
faChevronLeft,
|
||||
faCircleInfo
|
||||
faCircleInfo,
|
||||
|
||||
// -- Required for easyMDE use in the builder.
|
||||
faBold,
|
||||
faItalic,
|
||||
faHeading,
|
||||
faQuoteLeft,
|
||||
faListUl,
|
||||
faListOl,
|
||||
faLink,
|
||||
faImage,
|
||||
faEye,
|
||||
faColumns,
|
||||
faArrowsAlt,
|
||||
faQuestionCircle
|
||||
// --
|
||||
)
|
||||
dom.watch()
|
||||
</script>
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
export let allowHelpers = true
|
||||
export let updateOnChange = true
|
||||
export let drawerLeft
|
||||
export let key
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
let bindingDrawer
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
<script>
|
||||
import { currentAsset, store } from "builderStore"
|
||||
import { onMount } from "svelte"
|
||||
import { Label, Combobox, Select } from "@budibase/bbui"
|
||||
import {
|
||||
getActionProviderComponents,
|
||||
buildFormSchema,
|
||||
} from "builderStore/dataBinding"
|
||||
import { findComponent } from "builderStore/componentUtils"
|
||||
|
||||
export let parameters
|
||||
|
||||
onMount(() => {
|
||||
if (!parameters.type) {
|
||||
parameters.type = "top"
|
||||
}
|
||||
})
|
||||
|
||||
$: formComponent = findComponent($currentAsset.props, parameters.componentId)
|
||||
$: formSchema = buildFormSchema(formComponent)
|
||||
$: fieldOptions = Object.keys(formSchema || {})
|
||||
$: actionProviders = getActionProviderComponents(
|
||||
$currentAsset,
|
||||
$store.selectedComponentId,
|
||||
"ScrollTo"
|
||||
)
|
||||
</script>
|
||||
|
||||
<div class="root">
|
||||
<Label small>Form</Label>
|
||||
<Select
|
||||
bind:value={parameters.componentId}
|
||||
options={actionProviders}
|
||||
getOptionLabel={x => x._instanceName}
|
||||
getOptionValue={x => x._id}
|
||||
/>
|
||||
<Label small>Field</Label>
|
||||
<Combobox bind:value={parameters.field} options={fieldOptions} />
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.root {
|
||||
display: grid;
|
||||
align-items: center;
|
||||
gap: var(--spacing-m);
|
||||
grid-template-columns: auto;
|
||||
max-width: 400px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
</style>
|
|
@ -16,6 +16,7 @@ export { default as S3Upload } from "./S3Upload.svelte"
|
|||
export { default as ExportData } from "./ExportData.svelte"
|
||||
export { default as ContinueIf } from "./ContinueIf.svelte"
|
||||
export { default as UpdateFieldValue } from "./UpdateFieldValue.svelte"
|
||||
export { default as ScrollTo } from "./ScrollTo.svelte"
|
||||
export { default as ShowNotification } from "./ShowNotification.svelte"
|
||||
export { default as PromptUser } from "./PromptUser.svelte"
|
||||
export { default as OpenSidePanel } from "./OpenSidePanel.svelte"
|
||||
|
|
|
@ -70,6 +70,11 @@
|
|||
"type": "form",
|
||||
"component": "UpdateFieldValue"
|
||||
},
|
||||
{
|
||||
"name": "Scroll To Field",
|
||||
"type": "form",
|
||||
"component": "ScrollTo"
|
||||
},
|
||||
{
|
||||
"name": "Validate Form",
|
||||
"type": "form",
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
import TemplateCard from "components/common/TemplateCard.svelte"
|
||||
import createFromScratchScreen from "builderStore/store/screenTemplates/createFromScratchScreen"
|
||||
import { Roles } from "constants/backend"
|
||||
import { lowercase } from "helpers"
|
||||
|
||||
export let template
|
||||
|
||||
|
@ -19,6 +20,7 @@
|
|||
|
||||
const values = writable({ name: "", url: null })
|
||||
const validation = createValidationStore()
|
||||
const encryptionValidation = createValidationStore()
|
||||
|
||||
$: {
|
||||
const { url } = $values
|
||||
|
@ -27,8 +29,11 @@
|
|||
...$values,
|
||||
url: url?.[0] === "/" ? url.substring(1, url.length) : url,
|
||||
})
|
||||
encryptionValidation.check({ ...$values })
|
||||
}
|
||||
|
||||
$: encryptedFile = $values.file?.name?.endsWith(".enc.tar.gz")
|
||||
|
||||
onMount(async () => {
|
||||
const lastChar = $auth.user?.firstName
|
||||
? $auth.user?.firstName[$auth.user?.firstName.length - 1]
|
||||
|
@ -87,6 +92,9 @@
|
|||
appValidation.name(validation, { apps: applications })
|
||||
appValidation.url(validation, { apps: applications })
|
||||
appValidation.file(validation, { template })
|
||||
|
||||
encryptionValidation.addValidatorType("encryptionPassword", "text", true)
|
||||
|
||||
// init validation
|
||||
const { url } = $values
|
||||
validation.check({
|
||||
|
@ -110,6 +118,9 @@
|
|||
data.append("templateName", template.name)
|
||||
data.append("templateKey", template.key)
|
||||
data.append("templateFile", $values.file)
|
||||
if ($values.encryptionPassword?.trim()) {
|
||||
data.append("encryptionPassword", $values.encryptionPassword.trim())
|
||||
}
|
||||
}
|
||||
|
||||
// Create App
|
||||
|
@ -143,67 +154,119 @@
|
|||
$goto(`/builder/app/${createdApp.instance._id}`)
|
||||
} catch (error) {
|
||||
creating = false
|
||||
console.error(error)
|
||||
notifications.error("Error creating app")
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const Step = { CONFIG: "config", SET_PASSWORD: "set_password" }
|
||||
let currentStep = Step.CONFIG
|
||||
$: stepConfig = {
|
||||
[Step.CONFIG]: {
|
||||
title: "Create your app",
|
||||
confirmText: template?.fromFile ? "Import app" : "Create app",
|
||||
onConfirm: async () => {
|
||||
if (encryptedFile) {
|
||||
currentStep = Step.SET_PASSWORD
|
||||
return false
|
||||
} else {
|
||||
try {
|
||||
await createNewApp()
|
||||
} catch (error) {
|
||||
notifications.error("Error creating app")
|
||||
}
|
||||
}
|
||||
},
|
||||
isValid: $validation.valid,
|
||||
},
|
||||
[Step.SET_PASSWORD]: {
|
||||
title: "Provide the export password",
|
||||
confirmText: "Import app",
|
||||
onConfirm: async () => {
|
||||
try {
|
||||
await createNewApp()
|
||||
} catch (e) {
|
||||
let message = "Error creating app"
|
||||
if (e.message) {
|
||||
message += `: ${lowercase(e.message)}`
|
||||
}
|
||||
notifications.error(message)
|
||||
return false
|
||||
}
|
||||
},
|
||||
isValid: $encryptionValidation.valid,
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
||||
<ModalContent
|
||||
title={"Create your app"}
|
||||
confirmText={template?.fromFile ? "Import app" : "Create app"}
|
||||
onConfirm={createNewApp}
|
||||
disabled={!$validation.valid}
|
||||
title={stepConfig[currentStep].title}
|
||||
confirmText={stepConfig[currentStep].confirmText}
|
||||
onConfirm={stepConfig[currentStep].onConfirm}
|
||||
disabled={!stepConfig[currentStep].isValid}
|
||||
>
|
||||
{#if template && !template?.fromFile}
|
||||
<TemplateCard
|
||||
name={template.name}
|
||||
imageSrc={template.image}
|
||||
backgroundColour={template.background}
|
||||
overlayEnabled={false}
|
||||
icon={template.icon}
|
||||
/>
|
||||
{/if}
|
||||
{#if template?.fromFile}
|
||||
<Dropzone
|
||||
error={$validation.touched.file && $validation.errors.file}
|
||||
gallery={false}
|
||||
label="File to import"
|
||||
value={[$values.file]}
|
||||
on:change={e => {
|
||||
$values.file = e.detail?.[0]
|
||||
$validation.touched.file = true
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
<Input
|
||||
autofocus={true}
|
||||
bind:value={$values.name}
|
||||
disabled={creating}
|
||||
error={$validation.touched.name && $validation.errors.name}
|
||||
on:blur={() => ($validation.touched.name = true)}
|
||||
on:change={nameToUrl($values.name)}
|
||||
label="Name"
|
||||
placeholder={defaultAppName}
|
||||
/>
|
||||
<span>
|
||||
<Input
|
||||
bind:value={$values.url}
|
||||
disabled={creating}
|
||||
error={$validation.touched.url && $validation.errors.url}
|
||||
on:blur={() => ($validation.touched.url = true)}
|
||||
on:change={tidyUrl($values.url)}
|
||||
label="URL"
|
||||
placeholder={$values.url
|
||||
? $values.url
|
||||
: `/${resolveAppUrl(template, $values.name)}`}
|
||||
/>
|
||||
{#if $values.url && $values.url !== "" && !$validation.errors.url}
|
||||
<div class="app-server" title={appUrl}>
|
||||
{appUrl}
|
||||
</div>
|
||||
{#if currentStep === Step.CONFIG}
|
||||
{#if template && !template?.fromFile}
|
||||
<TemplateCard
|
||||
name={template.name}
|
||||
imageSrc={template.image}
|
||||
backgroundColour={template.background}
|
||||
overlayEnabled={false}
|
||||
icon={template.icon}
|
||||
/>
|
||||
{/if}
|
||||
</span>
|
||||
{#if template?.fromFile}
|
||||
<Dropzone
|
||||
error={$validation.touched.file && $validation.errors.file}
|
||||
gallery={false}
|
||||
label="File to import"
|
||||
value={[$values.file]}
|
||||
on:change={e => {
|
||||
$values.file = e.detail?.[0]
|
||||
$validation.touched.file = true
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
<Input
|
||||
autofocus={true}
|
||||
bind:value={$values.name}
|
||||
disabled={creating}
|
||||
error={$validation.touched.name && $validation.errors.name}
|
||||
on:blur={() => ($validation.touched.name = true)}
|
||||
on:change={nameToUrl($values.name)}
|
||||
label="Name"
|
||||
placeholder={defaultAppName}
|
||||
/>
|
||||
<span>
|
||||
<Input
|
||||
bind:value={$values.url}
|
||||
disabled={creating}
|
||||
error={$validation.touched.url && $validation.errors.url}
|
||||
on:blur={() => ($validation.touched.url = true)}
|
||||
on:change={tidyUrl($values.url)}
|
||||
label="URL"
|
||||
placeholder={$values.url
|
||||
? $values.url
|
||||
: `/${resolveAppUrl(template, $values.name)}`}
|
||||
/>
|
||||
{#if $values.url && $values.url !== "" && !$validation.errors.url}
|
||||
<div class="app-server" title={appUrl}>
|
||||
{appUrl}
|
||||
</div>
|
||||
{/if}
|
||||
</span>
|
||||
{/if}
|
||||
{#if currentStep === Step.SET_PASSWORD}
|
||||
<Input
|
||||
autofocus={true}
|
||||
label="Imported file password"
|
||||
type="password"
|
||||
bind:value={$values.encryptionPassword}
|
||||
disabled={creating}
|
||||
on:blur={() => ($encryptionValidation.touched.encryptionPassword = true)}
|
||||
error={$encryptionValidation.touched.encryptionPassword &&
|
||||
$encryptionValidation.errors.encryptionPassword}
|
||||
/>
|
||||
{/if}
|
||||
</ModalContent>
|
||||
|
||||
<style>
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
let password = null
|
||||
const validation = createValidationStore()
|
||||
validation.addValidatorType("password", "password", true)
|
||||
validation.addValidatorType("password", "password", true, { minLength: 8 })
|
||||
$: validation.observe("password", password)
|
||||
|
||||
const Step = { CONFIG: "config", SET_PASSWORD: "set_password" }
|
||||
|
|
|
@ -21,7 +21,7 @@ export const createValidationStore = () => {
|
|||
validator[propertyName] = propertyValidator
|
||||
}
|
||||
|
||||
const addValidatorType = (propertyName, type, required) => {
|
||||
const addValidatorType = (propertyName, type, required, options) => {
|
||||
if (!type || !propertyName) {
|
||||
return
|
||||
}
|
||||
|
@ -45,11 +45,8 @@ export const createValidationStore = () => {
|
|||
propertyValidator = propertyValidator.required()
|
||||
}
|
||||
|
||||
// We want to do this after the possible required validation, to prioritise the required error
|
||||
switch (type) {
|
||||
case "password":
|
||||
propertyValidator = propertyValidator.min(8)
|
||||
break
|
||||
if (options?.minLength) {
|
||||
propertyValidator = propertyValidator.min(options.minLength)
|
||||
}
|
||||
|
||||
validator[propertyName] = propertyValidator
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
</header>
|
||||
<Body size="M">
|
||||
Budibase internal tables are part of your app, so the data will be
|
||||
stored in your apps context.
|
||||
stored in your app's context.
|
||||
</Body>
|
||||
</Layout>
|
||||
<Divider />
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<script>
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import { apps, templates, licensing, groups } from "stores/portal"
|
||||
import { admin, apps, templates, licensing, groups } from "stores/portal"
|
||||
import { onMount } from "svelte"
|
||||
import { redirect } from "@roxi/routify"
|
||||
|
||||
|
@ -9,14 +9,18 @@
|
|||
|
||||
onMount(async () => {
|
||||
try {
|
||||
// Always load latest
|
||||
await Promise.all([
|
||||
licensing.init(),
|
||||
templates.load(),
|
||||
groups.actions.init(),
|
||||
])
|
||||
const promises = [licensing.init()]
|
||||
|
||||
if ($templates?.length === 0) {
|
||||
if (!$admin.offlineMode) {
|
||||
promises.push(templates.load())
|
||||
}
|
||||
|
||||
promises.push(groups.actions.init())
|
||||
|
||||
// Always load latest
|
||||
await Promise.all(promises)
|
||||
|
||||
if (!$admin.offlineMode && $templates?.length === 0) {
|
||||
notifications.error("There was a problem loading quick start templates")
|
||||
}
|
||||
|
||||
|
|
|
@ -247,7 +247,7 @@
|
|||
>
|
||||
Create new app
|
||||
</Button>
|
||||
{#if $apps?.length > 0}
|
||||
{#if $apps?.length > 0 && !$admin.offlineMode}
|
||||
<Button
|
||||
size="M"
|
||||
secondary
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
<script>
|
||||
export let value
|
||||
</script>
|
||||
|
||||
<div style="display: flex; ">
|
||||
{#if value === "Unavailable"}
|
||||
Email already in use. Please use a different email.
|
||||
{:else}
|
||||
{value}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
</style>
|
|
@ -1,6 +1,7 @@
|
|||
<script>
|
||||
import { Body, ModalContent, Table } from "@budibase/bbui"
|
||||
import { onMount } from "svelte"
|
||||
import InviteResponseRenderer from "./InviteResponseRenderer.svelte"
|
||||
|
||||
export let inviteUsersResponse
|
||||
|
||||
|
@ -50,7 +51,7 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<ModalContent size="M" showCancelButton={false} {title} confirmText="Done">
|
||||
<ModalContent size="L" showCancelButton={false} {title} confirmText="Done">
|
||||
{#if hasSuccess}
|
||||
<Body size="XS">
|
||||
Your users should now receive an email invite to get access to their
|
||||
|
@ -67,6 +68,9 @@
|
|||
allowEditColumns={false}
|
||||
allowEditRows={false}
|
||||
allowSelectRows={false}
|
||||
customRenderers={[
|
||||
{ column: "reason", component: InviteResponseRenderer },
|
||||
]}
|
||||
/>
|
||||
{/if}
|
||||
</ModalContent>
|
||||
|
|
|
@ -117,6 +117,10 @@ export function createDatasourcesStore() {
|
|||
...state,
|
||||
list: [...state.list, datasource],
|
||||
}))
|
||||
|
||||
// If this is a new datasource then we should refresh the tables list,
|
||||
// because otherwise we'll never see the new tables
|
||||
tables.fetch()
|
||||
}
|
||||
|
||||
// Update existing datasource
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import { get, writable, derived } from "svelte/store"
|
||||
import { datasources } from "./"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { API } from "api"
|
||||
import { SWITCHABLE_TYPES } from "constants/backend"
|
||||
|
@ -63,7 +62,6 @@ export function createTablesStore() {
|
|||
|
||||
const savedTable = await API.saveTable(updatedTable)
|
||||
replaceTable(savedTable._id, savedTable)
|
||||
await datasources.fetch()
|
||||
select(savedTable._id)
|
||||
return savedTable
|
||||
}
|
||||
|
|
|
@ -46,6 +46,7 @@ export function createAdminStore() {
|
|||
store.accountPortalUrl = environment.accountPortalUrl
|
||||
store.isDev = environment.isDev
|
||||
store.baseUrl = environment.baseUrl
|
||||
store.offlineMode = environment.offlineMode
|
||||
return store
|
||||
})
|
||||
}
|
||||
|
|
|
@ -2221,7 +2221,8 @@
|
|||
"ValidateForm",
|
||||
"ClearForm",
|
||||
"ChangeFormStep",
|
||||
"UpdateFieldValue"
|
||||
"UpdateFieldValue",
|
||||
"ScrollTo"
|
||||
],
|
||||
"styles": ["size"],
|
||||
"size": {
|
||||
|
@ -3543,7 +3544,8 @@
|
|||
{
|
||||
"type": "field/sortable",
|
||||
"label": "Sort column",
|
||||
"key": "sortColumn"
|
||||
"key": "sortColumn",
|
||||
"placeholder": "None"
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
|
@ -4322,7 +4324,8 @@
|
|||
{
|
||||
"type": "field/sortable",
|
||||
"label": "Sort by",
|
||||
"key": "sortColumn"
|
||||
"key": "sortColumn",
|
||||
"placeholder": "None"
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
|
@ -4566,7 +4569,8 @@
|
|||
{
|
||||
"type": "field/sortable",
|
||||
"label": "Sort column",
|
||||
"key": "sortColumn"
|
||||
"key": "sortColumn",
|
||||
"placeholder": "None"
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
|
@ -4734,7 +4738,8 @@
|
|||
{
|
||||
"type": "field/sortable",
|
||||
"label": "Sort column",
|
||||
"key": "sortColumn"
|
||||
"key": "sortColumn",
|
||||
"placeholder": "None"
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
|
|
|
@ -283,7 +283,7 @@
|
|||
|
||||
// Skip if the value is the same
|
||||
if (!skipCheck && fieldState.value === value) {
|
||||
return true
|
||||
return false
|
||||
}
|
||||
|
||||
// Update field state
|
||||
|
@ -295,7 +295,7 @@
|
|||
return state
|
||||
})
|
||||
|
||||
return !error
|
||||
return true
|
||||
}
|
||||
|
||||
// Clears the value of a certain field back to the default value
|
||||
|
@ -376,8 +376,9 @@
|
|||
deregister,
|
||||
validate: () => {
|
||||
// Validate the field by force setting the same value again
|
||||
const { fieldState } = get(getField(field))
|
||||
return setValue(fieldState.value, true)
|
||||
const fieldInfo = getField(field)
|
||||
setValue(get(fieldInfo).fieldState.value, true)
|
||||
return !get(fieldInfo).fieldState.error
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -404,12 +405,20 @@
|
|||
}
|
||||
}
|
||||
|
||||
const handleScrollToField = ({ field }) => {
|
||||
const fieldId = get(getField(field)).fieldState.fieldId
|
||||
const label = document.querySelector(`label[for="${fieldId}"]`)
|
||||
document.getElementById(fieldId).focus({ preventScroll: true })
|
||||
label.scrollIntoView({ behavior: "smooth" })
|
||||
}
|
||||
|
||||
// Action context to pass to children
|
||||
const actions = [
|
||||
{ type: ActionTypes.ValidateForm, callback: formApi.validate },
|
||||
{ type: ActionTypes.ClearForm, callback: formApi.reset },
|
||||
{ type: ActionTypes.ChangeFormStep, callback: formApi.changeStep },
|
||||
{ type: ActionTypes.UpdateFieldValue, callback: handleUpdateFieldValue },
|
||||
{ type: ActionTypes.ScrollTo, callback: handleScrollToField },
|
||||
]
|
||||
</script>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
<div class="notifications">
|
||||
{#if $notificationStore}
|
||||
{#each $notificationStore as { type, icon, message, id, dismissable } (id)}
|
||||
{#each $notificationStore as { type, icon, message, id, dismissable, count } (id)}
|
||||
<div
|
||||
in:fly={{
|
||||
duration: 300,
|
||||
|
@ -17,7 +17,7 @@
|
|||
>
|
||||
<Notification
|
||||
{type}
|
||||
{message}
|
||||
message={count > 1 ? `(${count}) ${message}` : message}
|
||||
{icon}
|
||||
{dismissable}
|
||||
on:dismiss={() => notificationStore.actions.dismiss(id)}
|
||||
|
|
|
@ -29,6 +29,7 @@ export const ActionTypes = {
|
|||
SetDataProviderSorting: "SetDataProviderSorting",
|
||||
ClearForm: "ClearForm",
|
||||
ChangeFormStep: "ChangeFormStep",
|
||||
ScrollTo: "ScrollTo",
|
||||
}
|
||||
|
||||
export const DNDPlaceholderID = "dnd-placeholder"
|
||||
|
|
|
@ -13,7 +13,13 @@ const createNotificationStore = () => {
|
|||
setTimeout(() => (block = false), timeout)
|
||||
}
|
||||
|
||||
const send = (message, type = "info", icon, autoDismiss = true) => {
|
||||
const send = (
|
||||
message,
|
||||
type = "info",
|
||||
icon,
|
||||
autoDismiss = true,
|
||||
count = 1
|
||||
) => {
|
||||
if (block) {
|
||||
return
|
||||
}
|
||||
|
@ -33,6 +39,11 @@ const createNotificationStore = () => {
|
|||
}
|
||||
const _id = id()
|
||||
store.update(state => {
|
||||
const duplicateError = state.find(err => err.message === message)
|
||||
if (duplicateError) {
|
||||
duplicateError.count += 1
|
||||
return [...state]
|
||||
}
|
||||
return [
|
||||
...state,
|
||||
{
|
||||
|
@ -42,6 +53,7 @@ const createNotificationStore = () => {
|
|||
icon,
|
||||
dismissable: !autoDismiss,
|
||||
delay: get(store) != null,
|
||||
count,
|
||||
},
|
||||
]
|
||||
})
|
||||
|
|
|
@ -153,6 +153,17 @@ const navigationHandler = action => {
|
|||
routeStore.actions.navigate(url, peek, externalNewTab)
|
||||
}
|
||||
|
||||
const scrollHandler = async (action, context) => {
|
||||
return await executeActionHandler(
|
||||
context,
|
||||
action.parameters.componentId,
|
||||
ActionTypes.ScrollTo,
|
||||
{
|
||||
field: action.parameters.field,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const queryExecutionHandler = async action => {
|
||||
const { datasourceId, queryId, queryParams, notificationOverride } =
|
||||
action.parameters
|
||||
|
@ -369,6 +380,7 @@ const handlerMap = {
|
|||
["Duplicate Row"]: duplicateRowHandler,
|
||||
["Delete Row"]: deleteRowHandler,
|
||||
["Navigate To"]: navigationHandler,
|
||||
["Scroll To Field"]: scrollHandler,
|
||||
["Execute Query"]: queryExecutionHandler,
|
||||
["Trigger Automation"]: triggerAutomationHandler,
|
||||
["Validate Form"]: validateFormHandler,
|
||||
|
|
|
@ -107,14 +107,14 @@ export const deriveStores = context => {
|
|||
// Update local state
|
||||
table.set(newTable)
|
||||
|
||||
// Broadcast change to external state can be updated, as this change
|
||||
// will not be received by the builder websocket because we caused it ourselves
|
||||
dispatch("updatetable", newTable)
|
||||
|
||||
// Update server
|
||||
if (get(config).allowSchemaChanges) {
|
||||
await API.saveTable(newTable)
|
||||
}
|
||||
|
||||
// Broadcast change to external state can be updated, as this change
|
||||
// will not be received by the builder websocket because we caused it ourselves
|
||||
dispatch("updatetable", newTable)
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit f4b8449aac9bd265214396afbdce7ff984a2ae34
|
||||
Subproject commit 2c9172685cdceef03172bea779e94cb52ff6d1de
|
|
@ -26,11 +26,21 @@ RUN apt-get install unzip libaio1
|
|||
COPY scripts/integrations/oracle/ scripts/integrations/oracle/
|
||||
RUN /bin/bash -e scripts/integrations/oracle/instantclient/linux/x86-64/install.sh
|
||||
|
||||
# Install postgres client for pg_dump utils
|
||||
RUN apt update && apt upgrade -y \
|
||||
&& apt install software-properties-common apt-transport-https curl gpg -y \
|
||||
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
|
||||
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
|
||||
&& apt update -y \
|
||||
&& apt install postgresql-client-15 -y \
|
||||
&& apt remove software-properties-common apt-transport-https curl gpg -y
|
||||
|
||||
|
||||
COPY package.json .
|
||||
COPY dist/yarn.lock .
|
||||
RUN yarn install --production=true
|
||||
# Remove unneeded data from file system to reduce image size
|
||||
RUN yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
|
||||
RUN yarn install --production=true \
|
||||
# Remove unneeded data from file system to reduce image size
|
||||
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
|
||||
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
|
||||
|
||||
COPY dist/ dist/
|
||||
|
|
|
@ -21,9 +21,8 @@ const baseConfig: Config.InitialProjectOptions = {
|
|||
}
|
||||
|
||||
// add pro sources if they exist
|
||||
if (fs.existsSync("../pro/packages")) {
|
||||
baseConfig.moduleNameMapper!["@budibase/pro"] =
|
||||
"<rootDir>/../pro/packages/pro/src"
|
||||
if (fs.existsSync("../pro/src")) {
|
||||
baseConfig.moduleNameMapper!["@budibase/pro"] = "<rootDir>/../pro/src"
|
||||
}
|
||||
|
||||
const config: Config.InitialOptions = {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
{
|
||||
"watch": ["src", "../backend-core", "../pro/packages/pro"],
|
||||
"watch": ["src", "../backend-core", "../pro"],
|
||||
"ext": "js,ts,json",
|
||||
"ignore": [
|
||||
"src/**/*.spec.ts",
|
||||
"src/**/*.spec.js",
|
||||
"../backend-core/dist/**/*"
|
||||
],
|
||||
"exec": "node ./scripts/build.js && node ./dist/index.js"
|
||||
"exec": "yarn build && node ./dist/index.js"
|
||||
}
|
||||
|
|
|
@ -63,6 +63,7 @@
|
|||
"airtable": "0.10.1",
|
||||
"arangojs": "7.2.0",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bull": "4.10.1",
|
||||
"chmodr": "1.2.0",
|
||||
|
|
|
@ -115,7 +115,18 @@ function checkAppName(
|
|||
}
|
||||
}
|
||||
|
||||
async function createInstance(appId: string, template: any) {
|
||||
interface AppTemplate {
|
||||
templateString: string
|
||||
useTemplate: string
|
||||
file?: {
|
||||
type: string
|
||||
path: string
|
||||
password?: string
|
||||
}
|
||||
key?: string
|
||||
}
|
||||
|
||||
async function createInstance(appId: string, template: AppTemplate) {
|
||||
const db = context.getAppDB()
|
||||
await db.put({
|
||||
_id: "_design/database",
|
||||
|
@ -240,19 +251,24 @@ export async function fetchAppPackage(ctx: UserCtx) {
|
|||
async function performAppCreate(ctx: UserCtx) {
|
||||
const apps = (await dbCore.getAllApps({ dev: true })) as App[]
|
||||
const name = ctx.request.body.name,
|
||||
possibleUrl = ctx.request.body.url
|
||||
possibleUrl = ctx.request.body.url,
|
||||
encryptionPassword = ctx.request.body.encryptionPassword
|
||||
|
||||
checkAppName(ctx, apps, name)
|
||||
const url = sdk.applications.getAppUrl({ name, url: possibleUrl })
|
||||
checkAppUrl(ctx, apps, url)
|
||||
|
||||
const { useTemplate, templateKey, templateString } = ctx.request.body
|
||||
const instanceConfig: any = {
|
||||
const instanceConfig: AppTemplate = {
|
||||
useTemplate,
|
||||
key: templateKey,
|
||||
templateString,
|
||||
}
|
||||
if (ctx.request.files && ctx.request.files.templateFile) {
|
||||
instanceConfig.file = ctx.request.files.templateFile
|
||||
instanceConfig.file = {
|
||||
...(ctx.request.files.templateFile as any),
|
||||
password: encryptionPassword,
|
||||
}
|
||||
}
|
||||
const tenantId = tenancy.isMultiTenant() ? tenancy.getTenantId() : null
|
||||
const appId = generateDevAppID(generateAppID(tenantId))
|
||||
|
|
|
@ -441,3 +441,18 @@ export async function query(ctx: UserCtx) {
|
|||
ctx.throw(400, err)
|
||||
}
|
||||
}
|
||||
|
||||
export async function getExternalSchema(ctx: UserCtx) {
|
||||
const { datasource } = ctx.request.body
|
||||
const enrichedDatasource = await getAndMergeDatasource(datasource)
|
||||
const connector = await getConnector(enrichedDatasource)
|
||||
|
||||
if (!connector.getExternalSchema) {
|
||||
ctx.throw(400, "Datasource does not support exporting external schema")
|
||||
}
|
||||
const response = await connector.getExternalSchema()
|
||||
|
||||
ctx.body = {
|
||||
schema: response,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,10 +3,10 @@ import * as userController from "../user"
|
|||
import { FieldTypes } from "../../../constants"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import { Row, Table } from "@budibase/types"
|
||||
import { FieldType, Row, Table, UserCtx } from "@budibase/types"
|
||||
import { Format } from "../view/exporters"
|
||||
import { UserCtx } from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
const validateJs = require("validate.js")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
|
||||
|
@ -20,6 +20,13 @@ validateJs.extend(validateJs.validators.datetime, {
|
|||
},
|
||||
})
|
||||
|
||||
function isForeignKey(key: string, table: Table) {
|
||||
const relationships = Object.values(table.schema).filter(
|
||||
column => column.type === FieldType.LINK
|
||||
)
|
||||
return relationships.some(relationship => relationship.foreignKey === key)
|
||||
}
|
||||
|
||||
export async function getDatasourceAndQuery(json: any) {
|
||||
const datasourceId = json.endpoint.datasourceId
|
||||
const datasource = await sdk.datasources.get(datasourceId)
|
||||
|
@ -65,6 +72,10 @@ export async function validate({
|
|||
const column = fetchedTable.schema[fieldName]
|
||||
const constraints = cloneDeep(column.constraints)
|
||||
const type = column.type
|
||||
// foreign keys are likely to be enriched
|
||||
if (isForeignKey(fieldName, fetchedTable)) {
|
||||
continue
|
||||
}
|
||||
// formulas shouldn't validated, data will be deleted anyway
|
||||
if (type === FieldTypes.FORMULA || column.autocolumn) {
|
||||
continue
|
||||
|
|
|
@ -26,6 +26,7 @@ import {
|
|||
RelationshipTypes,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { builderSocket } from "../../../websockets"
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
|
||||
async function makeTableRequest(
|
||||
|
@ -318,6 +319,11 @@ export async function save(ctx: UserCtx) {
|
|||
datasource.entities[tableToSave.name] = tableToSave
|
||||
await db.put(datasource)
|
||||
|
||||
// Since tables are stored inside datasources, we need to notify clients
|
||||
// that the datasource definition changed
|
||||
const updatedDatasource = await db.get(datasource._id)
|
||||
builderSocket?.emitDatasourceUpdate(ctx, updatedDatasource)
|
||||
|
||||
return tableToSave
|
||||
}
|
||||
|
||||
|
@ -344,6 +350,11 @@ export async function destroy(ctx: UserCtx) {
|
|||
|
||||
await db.put(datasource)
|
||||
|
||||
// Since tables are stored inside datasources, we need to notify clients
|
||||
// that the datasource definition changed
|
||||
const updatedDatasource = await db.get(datasource._id)
|
||||
builderSocket?.emitDatasourceUpdate(ctx, updatedDatasource)
|
||||
|
||||
return tableToDelete
|
||||
}
|
||||
|
||||
|
|
|
@ -66,5 +66,10 @@ router
|
|||
authorized(permissions.BUILDER),
|
||||
datasourceController.destroy
|
||||
)
|
||||
.get(
|
||||
"/api/datasources/:datasourceId/schema/external",
|
||||
authorized(permissions.BUILDER),
|
||||
datasourceController.getExternalSchema
|
||||
)
|
||||
|
||||
export default router
|
||||
|
|
|
@ -15,7 +15,6 @@ import * as api from "./api"
|
|||
import * as automations from "./automations"
|
||||
import { Thread } from "./threads"
|
||||
import * as redis from "./utilities/redis"
|
||||
import { initialise as initialiseWebsockets } from "./websockets"
|
||||
import { events, logging, middleware, timers } from "@budibase/backend-core"
|
||||
import { startup } from "./startup"
|
||||
const Sentry = require("@sentry/node")
|
||||
|
|
|
@ -81,6 +81,7 @@ const environment = {
|
|||
SELF_HOSTED: process.env.SELF_HOSTED,
|
||||
HTTP_MB_LIMIT: process.env.HTTP_MB_LIMIT,
|
||||
FORKED_PROCESS_NAME: process.env.FORKED_PROCESS_NAME || "main",
|
||||
OFFLINE_MODE: process.env.OFFLINE_MODE,
|
||||
// old
|
||||
CLIENT_ID: process.env.CLIENT_ID,
|
||||
_set(key: string, value: any) {
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
Row,
|
||||
SearchFilters,
|
||||
SortJson,
|
||||
Table,
|
||||
ExternalTable,
|
||||
TableRequest,
|
||||
} from "@budibase/types"
|
||||
import { OAuth2Client } from "google-auth-library"
|
||||
|
@ -139,7 +139,7 @@ const SCHEMA: Integration = {
|
|||
class GoogleSheetsIntegration implements DatasourcePlus {
|
||||
private readonly config: GoogleSheetsConfig
|
||||
private client: GoogleSpreadsheet
|
||||
public tables: Record<string, Table> = {}
|
||||
public tables: Record<string, ExternalTable> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
constructor(config: GoogleSheetsConfig) {
|
||||
|
@ -253,12 +253,18 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
return sheets.map(s => s.title)
|
||||
}
|
||||
|
||||
getTableSchema(title: string, headerValues: string[], id?: string) {
|
||||
getTableSchema(
|
||||
title: string,
|
||||
headerValues: string[],
|
||||
datasourceId: string,
|
||||
id?: string
|
||||
) {
|
||||
// base table
|
||||
const table: Table = {
|
||||
const table: ExternalTable = {
|
||||
name: title,
|
||||
primary: [GOOGLE_SHEETS_PRIMARY_KEY],
|
||||
schema: {},
|
||||
sourceId: datasourceId,
|
||||
}
|
||||
if (id) {
|
||||
table._id = id
|
||||
|
@ -273,20 +279,28 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
return table
|
||||
}
|
||||
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
async buildSchema(
|
||||
datasourceId: string,
|
||||
entities: Record<string, ExternalTable>
|
||||
) {
|
||||
// not fully configured yet
|
||||
if (!this.config.auth) {
|
||||
return
|
||||
}
|
||||
await this.connect()
|
||||
const sheets = this.client.sheetsByIndex
|
||||
const tables: Record<string, Table> = {}
|
||||
const tables: Record<string, ExternalTable> = {}
|
||||
await utils.parallelForeach(
|
||||
sheets,
|
||||
async sheet => {
|
||||
// must fetch rows to determine schema
|
||||
await sheet.getRows({ limit: 0, offset: 0 })
|
||||
await sheet.getRows()
|
||||
|
||||
const id = buildExternalTableId(datasourceId, sheet.title)
|
||||
tables[sheet.title] = this.getTableSchema(
|
||||
sheet.title,
|
||||
sheet.headerValues,
|
||||
datasourceId,
|
||||
id
|
||||
)
|
||||
},
|
||||
|
|
|
@ -2,7 +2,7 @@ import {
|
|||
DatasourceFieldType,
|
||||
Integration,
|
||||
Operation,
|
||||
Table,
|
||||
ExternalTable,
|
||||
TableSchema,
|
||||
QueryJson,
|
||||
QueryType,
|
||||
|
@ -43,6 +43,7 @@ const SCHEMA: Integration = {
|
|||
features: {
|
||||
[DatasourceFeature.CONNECTION_CHECKING]: true,
|
||||
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
|
||||
[DatasourceFeature.EXPORT_SCHEMA]: true,
|
||||
},
|
||||
datasource: {
|
||||
user: {
|
||||
|
@ -97,7 +98,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
private index: number = 0
|
||||
private readonly pool: any
|
||||
private client: any
|
||||
public tables: Record<string, Table> = {}
|
||||
public tables: Record<string, ExternalTable> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
MASTER_TABLES = [
|
||||
|
@ -220,7 +221,10 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
* @param {*} datasourceId - datasourceId to fetch
|
||||
* @param entities - the tables that are to be built
|
||||
*/
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
async buildSchema(
|
||||
datasourceId: string,
|
||||
entities: Record<string, ExternalTable>
|
||||
) {
|
||||
await this.connect()
|
||||
let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL)
|
||||
if (tableInfo == null || !Array.isArray(tableInfo)) {
|
||||
|
@ -233,7 +237,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
.map((record: any) => record.TABLE_NAME)
|
||||
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
|
||||
|
||||
const tables: Record<string, Table> = {}
|
||||
const tables: Record<string, ExternalTable> = {}
|
||||
for (let tableName of tableNames) {
|
||||
// get the column definition (type)
|
||||
const definition = await this.runSQL(this.getDefinitionSQL(tableName))
|
||||
|
@ -276,6 +280,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
}
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
sourceId: datasourceId,
|
||||
primary: primaryKeys,
|
||||
name: tableName,
|
||||
schema,
|
||||
|
@ -336,6 +341,81 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
result.recordset ? result.recordset : [{ [operation]: true }]
|
||||
return this.queryWithReturning(json, queryFn, processFn)
|
||||
}
|
||||
|
||||
async getExternalSchema() {
|
||||
// Query to retrieve table schema
|
||||
const query = `
|
||||
SELECT
|
||||
t.name AS TableName,
|
||||
c.name AS ColumnName,
|
||||
ty.name AS DataType,
|
||||
c.max_length AS MaxLength,
|
||||
c.is_nullable AS IsNullable,
|
||||
c.is_identity AS IsIdentity
|
||||
FROM
|
||||
sys.tables t
|
||||
INNER JOIN sys.columns c ON t.object_id = c.object_id
|
||||
INNER JOIN sys.types ty ON c.system_type_id = ty.system_type_id
|
||||
WHERE
|
||||
t.is_ms_shipped = 0
|
||||
ORDER BY
|
||||
t.name, c.column_id
|
||||
`
|
||||
|
||||
await this.connect()
|
||||
|
||||
const result = await this.internalQuery({
|
||||
sql: query,
|
||||
})
|
||||
|
||||
const scriptParts = []
|
||||
const tables: any = {}
|
||||
for (const row of result.recordset) {
|
||||
const {
|
||||
TableName,
|
||||
ColumnName,
|
||||
DataType,
|
||||
MaxLength,
|
||||
IsNullable,
|
||||
IsIdentity,
|
||||
} = row
|
||||
|
||||
if (!tables[TableName]) {
|
||||
tables[TableName] = {
|
||||
columns: [],
|
||||
}
|
||||
}
|
||||
|
||||
const columnDefinition = `${ColumnName} ${DataType}${
|
||||
MaxLength ? `(${MaxLength})` : ""
|
||||
}${IsNullable ? " NULL" : " NOT NULL"}`
|
||||
|
||||
tables[TableName].columns.push(columnDefinition)
|
||||
|
||||
if (IsIdentity) {
|
||||
tables[TableName].identityColumn = ColumnName
|
||||
}
|
||||
}
|
||||
|
||||
// Generate SQL statements for table creation
|
||||
for (const tableName in tables) {
|
||||
const { columns, identityColumn } = tables[tableName]
|
||||
|
||||
let createTableStatement = `CREATE TABLE [${tableName}] (\n`
|
||||
createTableStatement += columns.join(",\n")
|
||||
|
||||
if (identityColumn) {
|
||||
createTableStatement += `,\n CONSTRAINT [PK_${tableName}] PRIMARY KEY (${identityColumn})`
|
||||
}
|
||||
|
||||
createTableStatement += "\n);"
|
||||
|
||||
scriptParts.push(createTableStatement)
|
||||
}
|
||||
|
||||
const schema = scriptParts.join("\n")
|
||||
return schema
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
|
|
|
@ -385,7 +385,7 @@ class MongoIntegration implements IntegrationBase {
|
|||
createObjectIds(json: any) {
|
||||
const self = this
|
||||
function interpolateObjectIds(json: any) {
|
||||
for (let field of Object.keys(json)) {
|
||||
for (let field of Object.keys(json || {})) {
|
||||
if (json[field] instanceof Object) {
|
||||
json[field] = self.createObjectIds(json[field])
|
||||
}
|
||||
|
@ -489,7 +489,11 @@ class MongoIntegration implements IntegrationBase {
|
|||
|
||||
switch (query.extra.actionType) {
|
||||
case "find": {
|
||||
return await collection.find(json).toArray()
|
||||
if (json) {
|
||||
return await collection.find(json).toArray()
|
||||
} else {
|
||||
return await collection.find().toArray()
|
||||
}
|
||||
}
|
||||
case "findOne": {
|
||||
return await collection.findOne(json)
|
||||
|
|
|
@ -4,7 +4,7 @@ import {
|
|||
QueryType,
|
||||
QueryJson,
|
||||
SqlQuery,
|
||||
Table,
|
||||
ExternalTable,
|
||||
TableSchema,
|
||||
DatasourcePlus,
|
||||
DatasourceFeature,
|
||||
|
@ -39,6 +39,7 @@ const SCHEMA: Integration = {
|
|||
features: {
|
||||
[DatasourceFeature.CONNECTION_CHECKING]: true,
|
||||
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
|
||||
[DatasourceFeature.EXPORT_SCHEMA]: true,
|
||||
},
|
||||
datasource: {
|
||||
host: {
|
||||
|
@ -123,7 +124,7 @@ export function bindingTypeCoerce(bindings: any[]) {
|
|||
class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||
private config: MySQLConfig
|
||||
private client?: mysql.Connection
|
||||
public tables: Record<string, Table> = {}
|
||||
public tables: Record<string, ExternalTable> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
constructor(config: MySQLConfig) {
|
||||
|
@ -220,8 +221,11 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
}
|
||||
}
|
||||
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
const tables: { [key: string]: Table } = {}
|
||||
async buildSchema(
|
||||
datasourceId: string,
|
||||
entities: Record<string, ExternalTable>
|
||||
) {
|
||||
const tables: { [key: string]: ExternalTable } = {}
|
||||
await this.connect()
|
||||
|
||||
try {
|
||||
|
@ -259,6 +263,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
if (!tables[tableName]) {
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
sourceId: datasourceId,
|
||||
primary: primaryKeys,
|
||||
name: tableName,
|
||||
schema,
|
||||
|
@ -324,6 +329,36 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
await this.disconnect()
|
||||
}
|
||||
}
|
||||
|
||||
async getExternalSchema() {
|
||||
try {
|
||||
const [databaseResult] = await this.internalQuery({
|
||||
sql: `SHOW CREATE DATABASE ${this.config.database}`,
|
||||
})
|
||||
let dumpContent = [databaseResult["Create Database"]]
|
||||
|
||||
const tablesResult = await this.internalQuery({
|
||||
sql: `SHOW TABLES`,
|
||||
})
|
||||
|
||||
for (const row of tablesResult) {
|
||||
const tableName = row[`Tables_in_${this.config.database}`]
|
||||
|
||||
const createTableResults = await this.internalQuery({
|
||||
sql: `SHOW CREATE TABLE \`${tableName}\``,
|
||||
})
|
||||
|
||||
const createTableStatement = createTableResults[0]["Create Table"]
|
||||
|
||||
dumpContent.push(createTableStatement)
|
||||
}
|
||||
|
||||
const schema = dumpContent.join("\n")
|
||||
return schema
|
||||
} finally {
|
||||
this.disconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
|
|
|
@ -5,7 +5,7 @@ import {
|
|||
QueryJson,
|
||||
QueryType,
|
||||
SqlQuery,
|
||||
Table,
|
||||
ExternalTable,
|
||||
DatasourcePlus,
|
||||
DatasourceFeature,
|
||||
ConnectionInfo,
|
||||
|
@ -108,7 +108,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
private readonly config: OracleConfig
|
||||
private index: number = 1
|
||||
|
||||
public tables: Record<string, Table> = {}
|
||||
public tables: Record<string, ExternalTable> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
private readonly COLUMNS_SQL = `
|
||||
|
@ -262,13 +262,16 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
* @param {*} datasourceId - datasourceId to fetch
|
||||
* @param entities - the tables that are to be built
|
||||
*/
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
async buildSchema(
|
||||
datasourceId: string,
|
||||
entities: Record<string, ExternalTable>
|
||||
) {
|
||||
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
|
||||
sql: this.COLUMNS_SQL,
|
||||
})
|
||||
const oracleTables = this.mapColumns(columnsResponse)
|
||||
|
||||
const tables: { [key: string]: Table } = {}
|
||||
const tables: { [key: string]: ExternalTable } = {}
|
||||
|
||||
// iterate each table
|
||||
Object.values(oracleTables).forEach(oracleTable => {
|
||||
|
@ -279,6 +282,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
primary: [],
|
||||
name: oracleTable.name,
|
||||
schema: {},
|
||||
sourceId: datasourceId,
|
||||
}
|
||||
tables[oracleTable.name] = table
|
||||
}
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import fs from "fs"
|
||||
import {
|
||||
Integration,
|
||||
DatasourceFieldType,
|
||||
QueryType,
|
||||
QueryJson,
|
||||
SqlQuery,
|
||||
Table,
|
||||
ExternalTable,
|
||||
DatasourcePlus,
|
||||
DatasourceFeature,
|
||||
ConnectionInfo,
|
||||
|
@ -21,6 +22,8 @@ import { PostgresColumn } from "./base/types"
|
|||
import { escapeDangerousCharacters } from "../utilities"
|
||||
|
||||
import { Client, ClientConfig, types } from "pg"
|
||||
import { exec } from "child_process"
|
||||
import { storeTempFile } from "../utilities/fileSystem"
|
||||
|
||||
// Return "date" and "timestamp" types as plain strings.
|
||||
// This lets us reference the original stored timezone.
|
||||
|
@ -57,6 +60,7 @@ const SCHEMA: Integration = {
|
|||
features: {
|
||||
[DatasourceFeature.CONNECTION_CHECKING]: true,
|
||||
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
|
||||
[DatasourceFeature.EXPORT_SCHEMA]: true,
|
||||
},
|
||||
datasource: {
|
||||
host: {
|
||||
|
@ -139,7 +143,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
|||
private readonly config: PostgresConfig
|
||||
private index: number = 1
|
||||
private open: boolean
|
||||
public tables: Record<string, Table> = {}
|
||||
public tables: Record<string, ExternalTable> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
COLUMNS_SQL!: string
|
||||
|
@ -178,6 +182,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
|||
const response: ConnectionInfo = {
|
||||
connected: false,
|
||||
}
|
||||
|
||||
try {
|
||||
await this.openConnection()
|
||||
response.connected = true
|
||||
|
@ -256,7 +261,10 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
|||
* @param {*} datasourceId - datasourceId to fetch
|
||||
* @param entities - the tables that are to be built
|
||||
*/
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
async buildSchema(
|
||||
datasourceId: string,
|
||||
entities: Record<string, ExternalTable>
|
||||
) {
|
||||
let tableKeys: { [key: string]: string[] } = {}
|
||||
await this.openConnection()
|
||||
try {
|
||||
|
@ -282,7 +290,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
|||
const columnsResponse: { rows: PostgresColumn[] } =
|
||||
await this.client.query(this.COLUMNS_SQL)
|
||||
|
||||
const tables: { [key: string]: Table } = {}
|
||||
const tables: { [key: string]: ExternalTable } = {}
|
||||
|
||||
for (let column of columnsResponse.rows) {
|
||||
const tableName: string = column.table_name
|
||||
|
@ -295,6 +303,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
|||
primary: tableKeys[tableName] || [],
|
||||
name: tableName,
|
||||
schema: {},
|
||||
sourceId: datasourceId,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -381,6 +390,59 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
|||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||
}
|
||||
}
|
||||
|
||||
async getExternalSchema() {
|
||||
const dumpCommandParts = [
|
||||
`user=${this.config.user}`,
|
||||
`host=${this.config.host}`,
|
||||
`port=${this.config.port}`,
|
||||
`dbname=${this.config.database}`,
|
||||
]
|
||||
|
||||
if (this.config.ssl) {
|
||||
dumpCommandParts.push("sslmode=verify-ca")
|
||||
if (this.config.ca) {
|
||||
const caFilePath = storeTempFile(this.config.ca)
|
||||
fs.chmodSync(caFilePath, "0600")
|
||||
dumpCommandParts.push(`sslrootcert=${caFilePath}`)
|
||||
}
|
||||
|
||||
if (this.config.clientCert) {
|
||||
const clientCertFilePath = storeTempFile(this.config.clientCert)
|
||||
fs.chmodSync(clientCertFilePath, "0600")
|
||||
dumpCommandParts.push(`sslcert=${clientCertFilePath}`)
|
||||
}
|
||||
|
||||
if (this.config.clientKey) {
|
||||
const clientKeyFilePath = storeTempFile(this.config.clientKey)
|
||||
fs.chmodSync(clientKeyFilePath, "0600")
|
||||
dumpCommandParts.push(`sslkey=${clientKeyFilePath}`)
|
||||
}
|
||||
}
|
||||
|
||||
const dumpCommand = `PGPASSWORD="${
|
||||
this.config.password
|
||||
}" pg_dump --schema-only "${dumpCommandParts.join(" ")}"`
|
||||
|
||||
return new Promise<string>((res, rej) => {
|
||||
exec(dumpCommand, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
console.error(`Error generating dump: ${error.message}`)
|
||||
rej(error.message)
|
||||
return
|
||||
}
|
||||
|
||||
if (stderr) {
|
||||
console.error(`pg_dump error: ${stderr}`)
|
||||
rej(stderr)
|
||||
return
|
||||
}
|
||||
|
||||
res(stdout)
|
||||
console.log("SQL dump generated successfully!")
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
|
|
|
@ -36,11 +36,14 @@ export function checkDatasourceTypes(schema: Integration, config: any) {
|
|||
async function enrichDatasourceWithValues(datasource: Datasource) {
|
||||
const cloned = cloneDeep(datasource)
|
||||
const env = await getEnvironmentVariables()
|
||||
//Do not process entities, as we do not want to process formulas
|
||||
const { entities, ...clonedWithoutEntities } = cloned
|
||||
const processed = processObjectSync(
|
||||
cloned,
|
||||
clonedWithoutEntities,
|
||||
{ env },
|
||||
{ onlyFound: true }
|
||||
) as Datasource
|
||||
processed.entities = entities
|
||||
const definition = await getDefinition(processed.source)
|
||||
processed.config = checkDatasourceTypes(definition!, processed.config)
|
||||
return {
|
||||
|
|
|
@ -81,7 +81,9 @@ export const streamFile = (path: string) => {
|
|||
* @param {string} fileContents contents which will be written to a temp file.
|
||||
* @return {string} the path to the temp file.
|
||||
*/
|
||||
export const storeTempFile = (fileContents: any) => {
|
||||
export const storeTempFile = (
|
||||
fileContents: string | NodeJS.ArrayBufferView
|
||||
) => {
|
||||
const path = join(budibaseTempDir(), uuid())
|
||||
fs.writeFileSync(path, fileContents)
|
||||
return path
|
||||
|
|
|
@ -9,7 +9,7 @@ import {
|
|||
import env from "../environment"
|
||||
import { groups } from "@budibase/pro"
|
||||
import { UserCtx, ContextUser, User, UserGroup } from "@budibase/types"
|
||||
import { global } from "yargs"
|
||||
import { cloneDeep } from "lodash"
|
||||
|
||||
export function updateAppRole(
|
||||
user: ContextUser,
|
||||
|
@ -65,16 +65,20 @@ export async function processUser(
|
|||
user: ContextUser,
|
||||
opts: { appId?: string; groups?: UserGroup[] } = {}
|
||||
) {
|
||||
if (user) {
|
||||
delete user.password
|
||||
let clonedUser = cloneDeep(user)
|
||||
if (clonedUser) {
|
||||
delete clonedUser.password
|
||||
}
|
||||
const appId = opts.appId || context.getAppId()
|
||||
user = updateAppRole(user, { appId })
|
||||
if (!user.roleId && user?.userGroups?.length) {
|
||||
user = await checkGroupRoles(user, { appId, groups: opts?.groups })
|
||||
clonedUser = updateAppRole(clonedUser, { appId })
|
||||
if (!clonedUser.roleId && clonedUser?.userGroups?.length) {
|
||||
clonedUser = await checkGroupRoles(clonedUser, {
|
||||
appId,
|
||||
groups: opts?.groups,
|
||||
})
|
||||
}
|
||||
|
||||
return user
|
||||
return clonedUser
|
||||
}
|
||||
|
||||
export async function getCachedSelf(ctx: UserCtx, appId: string) {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// @ts-nocheck
|
||||
import { FieldTypes } from "../../constants"
|
||||
import { logging } from "@budibase/backend-core"
|
||||
|
||||
const parseArrayString = value => {
|
||||
if (typeof value === "string") {
|
||||
|
@ -12,7 +11,7 @@ const parseArrayString = value => {
|
|||
result = JSON.parse(value.replace(/'/g, '"'))
|
||||
return result
|
||||
} catch (e) {
|
||||
logging.logWarn("Could not parse row value", e)
|
||||
return value
|
||||
}
|
||||
}
|
||||
return value
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
"@budibase/backend-core": ["../backend-core/src"],
|
||||
"@budibase/backend-core/*": ["../backend-core/*"],
|
||||
"@budibase/shared-core": ["../shared-core/src"],
|
||||
"@budibase/pro": ["../pro/packages/pro/src"]
|
||||
"@budibase/pro": ["../pro/src"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
|
|
|
@ -82,6 +82,10 @@ export interface Table extends Document {
|
|||
rowHeight?: number
|
||||
}
|
||||
|
||||
export interface ExternalTable extends Table {
|
||||
sourceId: string
|
||||
}
|
||||
|
||||
export interface TableRequest extends Table {
|
||||
_rename?: RenameColumn
|
||||
created?: boolean
|
||||
|
|
|
@ -76,6 +76,7 @@ export enum FilterType {
|
|||
export enum DatasourceFeature {
|
||||
CONNECTION_CHECKING = "connection",
|
||||
FETCH_TABLE_NAMES = "fetch_table_names",
|
||||
EXPORT_SCHEMA = "export_schema",
|
||||
}
|
||||
|
||||
export interface StepDefinition {
|
||||
|
@ -140,6 +141,7 @@ export interface IntegrationBase {
|
|||
update?(query: any): Promise<any[] | any>
|
||||
delete?(query: any): Promise<any[] | any>
|
||||
testConnection?(): Promise<ConnectionInfo>
|
||||
getExternalSchema?(): Promise<string>
|
||||
}
|
||||
|
||||
export interface DatasourcePlus extends IntegrationBase {
|
||||
|
|
|
@ -21,11 +21,9 @@ const config: Config.InitialOptions = {
|
|||
}
|
||||
|
||||
// add pro sources if they exist
|
||||
if (fs.existsSync("../pro/packages")) {
|
||||
config.moduleNameMapper!["@budibase/pro/(.*)"] =
|
||||
"<rootDir>/../pro/packages/pro/$1"
|
||||
config.moduleNameMapper!["@budibase/pro"] =
|
||||
"<rootDir>/../pro/packages/pro/src"
|
||||
if (fs.existsSync("../pro/src")) {
|
||||
config.moduleNameMapper!["@budibase/pro/(.*)"] = "<rootDir>/../pro/$1"
|
||||
config.moduleNameMapper!["@budibase/pro"] = "<rootDir>/../pro/src"
|
||||
}
|
||||
|
||||
export default config
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"watch": ["src", "../backend-core", "../pro/packages/pro"],
|
||||
"watch": ["src", "../backend-core", "../pro"],
|
||||
"ext": "js,ts,json",
|
||||
"ignore": [
|
||||
"src/**/*.spec.ts",
|
||||
|
|
|
@ -47,6 +47,7 @@
|
|||
"@techpass/passport-openidconnect": "0.3.2",
|
||||
"@types/global-agent": "2.1.1",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"dd-trace": "3.13.2",
|
||||
"dotenv": "8.6.0",
|
||||
|
|
|
@ -38,7 +38,7 @@ const MAX_USERS_UPLOAD_LIMIT = 1000
|
|||
|
||||
export const save = async (ctx: UserCtx<User, SaveUserResponse>) => {
|
||||
try {
|
||||
const currentUserId = ctx.user._id
|
||||
const currentUserId = ctx.user?._id
|
||||
const requestUser = ctx.request.body
|
||||
|
||||
const user = await userSdk.save(requestUser, { currentUserId })
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
import env from "../../../environment"
|
||||
|
||||
export const fetch = async (ctx: BBContext) => {
|
||||
export const fetch = async (ctx: Ctx) => {
|
||||
ctx.body = {
|
||||
multiTenancy: !!env.MULTI_TENANCY,
|
||||
offlineMode: !!env.OFFLINE_MODE,
|
||||
cloud: !env.SELF_HOSTED,
|
||||
accountPortalUrl: env.ACCOUNT_PORTAL_URL,
|
||||
disableAccountPortal: env.DISABLE_ACCOUNT_PORTAL,
|
||||
|
|
|
@ -24,6 +24,7 @@ describe("/api/system/environment", () => {
|
|||
isDev: false,
|
||||
multiTenancy: true,
|
||||
baseUrl: "http://localhost:10000",
|
||||
offlineMode: false,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -61,6 +61,7 @@ const environment = {
|
|||
CHECKLIST_CACHE_TTL: parseIntSafe(process.env.CHECKLIST_CACHE_TTL) || 3600,
|
||||
SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,
|
||||
ENCRYPTED_TEST_PUBLIC_API_KEY: process.env.ENCRYPTED_TEST_PUBLIC_API_KEY,
|
||||
OFFLINE_MODE: process.env.OFFLINE_MODE,
|
||||
/**
|
||||
* Mock the email service in use - links to ethereal hosted emails are logged instead.
|
||||
*/
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
"@budibase/backend-core": ["../backend-core/src"],
|
||||
"@budibase/backend-core/*": ["../backend-core/*"],
|
||||
"@budibase/shared-core": ["../shared-core/src"],
|
||||
"@budibase/pro": ["../pro/packages/pro/src"]
|
||||
"@budibase/pro": ["../pro/src"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -10,6 +10,7 @@
|
|||
},
|
||||
"scripts": {
|
||||
"setup": "yarn && node scripts/createEnv.js",
|
||||
"user": "yarn && node scripts/createEnv.js && node scripts/createUser.js",
|
||||
"test": "jest --runInBand --json --outputFile=testResults.json --forceExit",
|
||||
"test:watch": "yarn run test --watch",
|
||||
"test:debug": "DEBUG=1 yarn run test",
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
const dotenv = require("dotenv")
|
||||
const { join } = require("path")
|
||||
const fs = require("fs")
|
||||
const fetch = require("node-fetch")
|
||||
|
||||
function getVarFromDotEnv(path, varName) {
|
||||
const parsed = dotenv.parse(fs.readFileSync(path))
|
||||
return parsed[varName]
|
||||
}
|
||||
|
||||
async function createUser() {
|
||||
const serverPath = join(__dirname, "..", "..", "packages", "server", ".env")
|
||||
const qaCorePath = join(__dirname, "..", ".env")
|
||||
const apiKey = getVarFromDotEnv(serverPath, "INTERNAL_API_KEY")
|
||||
const username = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_EMAIL")
|
||||
const password = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_PASSWORD")
|
||||
const url = getVarFromDotEnv(qaCorePath, "BUDIBASE_URL")
|
||||
const resp = await fetch(`${url}/api/public/v1/users`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"x-budibase-api-key": apiKey,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
email: username,
|
||||
password,
|
||||
builder: {
|
||||
global: true,
|
||||
},
|
||||
admin: {
|
||||
global: true,
|
||||
},
|
||||
roles: {},
|
||||
}),
|
||||
})
|
||||
if (resp.status !== 200) {
|
||||
throw new Error(await resp.text())
|
||||
} else {
|
||||
return await resp.json()
|
||||
}
|
||||
}
|
||||
|
||||
createUser()
|
||||
.then(() => {
|
||||
console.log("User created - ready to use")
|
||||
})
|
||||
.catch(err => {
|
||||
console.error("Failed to create user - ", err)
|
||||
})
|
|
@ -67,11 +67,12 @@ export default class AccountInternalAPIClient {
|
|||
}
|
||||
const message = `${method} ${url} - ${response.status}`
|
||||
|
||||
const isDebug = process.env.LOG_LEVEL === "debug"
|
||||
if (response.status > 499) {
|
||||
console.error(message, data)
|
||||
} else if (response.status >= 400) {
|
||||
console.warn(message, data)
|
||||
} else {
|
||||
} else if (isDebug) {
|
||||
console.debug(message, data)
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,112 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { Duration, TemporalUnit } from "node-duration"
|
||||
import mssql from "../../../../packages/server/src/integrations/microsoftSqlServer"
|
||||
|
||||
jest.unmock("mssql")
|
||||
|
||||
describe("getExternalSchema", () => {
|
||||
describe("postgres", () => {
|
||||
let config: any
|
||||
|
||||
beforeAll(async () => {
|
||||
const password = "Str0Ng_p@ssW0rd!"
|
||||
const container = await new GenericContainer(
|
||||
"mcr.microsoft.com/mssql/server"
|
||||
)
|
||||
.withExposedPorts(1433)
|
||||
.withEnv("ACCEPT_EULA", "Y")
|
||||
.withEnv("MSSQL_SA_PASSWORD", password)
|
||||
.withEnv("MSSQL_PID", "Developer")
|
||||
.withWaitStrategy(Wait.forHealthCheck())
|
||||
.withHealthCheck({
|
||||
test: `/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "${password}" -Q "SELECT 1" -b -o /dev/null`,
|
||||
interval: new Duration(1000, TemporalUnit.MILLISECONDS),
|
||||
timeout: new Duration(3, TemporalUnit.SECONDS),
|
||||
retries: 20,
|
||||
startPeriod: new Duration(100, TemporalUnit.MILLISECONDS),
|
||||
})
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(1433)
|
||||
config = {
|
||||
user: "sa",
|
||||
password,
|
||||
server: host,
|
||||
port: port,
|
||||
database: "master",
|
||||
schema: "dbo",
|
||||
}
|
||||
})
|
||||
|
||||
it("can export an empty database", async () => {
|
||||
const integration = new mssql.integration(config)
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`""`)
|
||||
})
|
||||
|
||||
it("can export a database with tables", async () => {
|
||||
const integration = new mssql.integration(config)
|
||||
|
||||
await integration.connect()
|
||||
await integration.internalQuery({
|
||||
sql: `
|
||||
CREATE TABLE users (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
role VARCHAR(15) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE products (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
price DECIMAL(10, 2) NOT NULL
|
||||
);
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE TABLE [products] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
price decimal(9) NOT NULL,
|
||||
CONSTRAINT [PK_products] PRIMARY KEY (id)
|
||||
);
|
||||
CREATE TABLE [users] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
role varchar(15) NOT NULL,
|
||||
CONSTRAINT [PK_users] PRIMARY KEY (id)
|
||||
);"
|
||||
`)
|
||||
})
|
||||
|
||||
it("does not export a data", async () => {
|
||||
const integration = new mssql.integration(config)
|
||||
|
||||
await integration.connect()
|
||||
await integration.internalQuery({
|
||||
sql: `INSERT INTO [users] ([name], [role]) VALUES ('John Doe', 'Administrator');
|
||||
INSERT INTO [products] ([name], [price]) VALUES ('Book', 7.68);
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE TABLE [products] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
price decimal(9) NOT NULL,
|
||||
CONSTRAINT [PK_products] PRIMARY KEY (id)
|
||||
);
|
||||
CREATE TABLE [users] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
role varchar(15) NOT NULL,
|
||||
CONSTRAINT [PK_users] PRIMARY KEY (id)
|
||||
);"
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,108 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import mysql from "../../../../packages/server/src/integrations/mysql"
|
||||
|
||||
jest.unmock("mysql2/promise")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("mysql", () => {
|
||||
let config: any
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("mysql")
|
||||
.withExposedPorts(3306)
|
||||
.withEnv("MYSQL_ROOT_PASSWORD", "admin")
|
||||
.withEnv("MYSQL_DATABASE", "db")
|
||||
.withEnv("MYSQL_USER", "user")
|
||||
.withEnv("MYSQL_PASSWORD", "password")
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(3306)
|
||||
config = {
|
||||
host,
|
||||
port,
|
||||
user: "user",
|
||||
database: "db",
|
||||
password: "password",
|
||||
rejectUnauthorized: true,
|
||||
}
|
||||
})
|
||||
|
||||
it("can export an empty database", async () => {
|
||||
const integration = new mysql.integration(config)
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(
|
||||
`"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */"`
|
||||
)
|
||||
})
|
||||
|
||||
it("can export a database with tables", async () => {
|
||||
const integration = new mysql.integration(config)
|
||||
|
||||
await integration.internalQuery({
|
||||
sql: `
|
||||
CREATE TABLE users (
|
||||
id INT AUTO_INCREMENT,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
role VARCHAR(15) NOT NULL,
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE products (
|
||||
id INT AUTO_INCREMENT,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
price DECIMAL,
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
|
||||
CREATE TABLE \`products\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`price\` decimal(10,0) DEFAULT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
|
||||
CREATE TABLE \`users\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`role\` varchar(15) NOT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
|
||||
`)
|
||||
})
|
||||
|
||||
it("does not export a data", async () => {
|
||||
const integration = new mysql.integration(config)
|
||||
|
||||
await integration.internalQuery({
|
||||
sql: `INSERT INTO users (name, role) VALUES ('John Doe', 'Administrator');`,
|
||||
})
|
||||
|
||||
await integration.internalQuery({
|
||||
sql: `INSERT INTO products (name, price) VALUES ('Book', 7.68);`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
|
||||
CREATE TABLE \`products\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`price\` decimal(10,0) DEFAULT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
|
||||
CREATE TABLE \`users\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`role\` varchar(15) NOT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,377 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import postgres from "../../../../packages/server/src/integrations/postgres"
|
||||
|
||||
jest.unmock("pg")
|
||||
|
||||
describe("getExternalSchema", () => {
|
||||
describe("postgres", () => {
|
||||
let config: any
|
||||
|
||||
// Remove versioning from the outputs to prevent failures when running different pg_dump versions
|
||||
function stripResultsVersions(sql: string) {
|
||||
const result = sql
|
||||
.replace(/\n[^\n]+Dumped from database version[^\n]+\n/, "")
|
||||
.replace(/\n[^\n]+Dumped by pg_dump version[^\n]+\n/, "")
|
||||
.toString()
|
||||
return result
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
// This is left on propose without a tag, so if a new version introduces a breaking change we will be notified
|
||||
const container = await new GenericContainer("postgres")
|
||||
.withExposedPorts(5432)
|
||||
.withEnv("POSTGRES_PASSWORD", "password")
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(5432)
|
||||
|
||||
config = {
|
||||
host,
|
||||
port,
|
||||
database: "postgres",
|
||||
user: "postgres",
|
||||
password: "password",
|
||||
schema: "public",
|
||||
ssl: false,
|
||||
rejectUnauthorized: false,
|
||||
}
|
||||
})
|
||||
|
||||
it("can export an empty database", async () => {
|
||||
const integration = new postgres.integration(config)
|
||||
const result = await integration.getExternalSchema()
|
||||
|
||||
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
|
||||
"--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
"
|
||||
`)
|
||||
})
|
||||
|
||||
it("can export a database with tables", async () => {
|
||||
const integration = new postgres.integration(config)
|
||||
|
||||
await integration.internalQuery(
|
||||
{
|
||||
sql: `
|
||||
CREATE TABLE "users" (
|
||||
"id" SERIAL,
|
||||
"name" VARCHAR(100) NOT NULL,
|
||||
"role" VARCHAR(15) NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
CREATE TABLE "products" (
|
||||
"id" SERIAL,
|
||||
"name" VARCHAR(100) NOT NULL,
|
||||
"price" DECIMAL NOT NULL,
|
||||
"owner" INTEGER NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
ALTER TABLE "products" ADD CONSTRAINT "fk_owner" FOREIGN KEY ("owner") REFERENCES "users" ("id");`,
|
||||
},
|
||||
false
|
||||
)
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
|
||||
"--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.products (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
price numeric NOT NULL,
|
||||
owner integer
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.products OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.products_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.products_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.users (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
role character varying(15) NOT NULL
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.users OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.users_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.users_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users
|
||||
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products fk_owner; Type: FK CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT fk_owner FOREIGN KEY (owner) REFERENCES public.users(id);
|
||||
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
"
|
||||
`)
|
||||
})
|
||||
|
||||
it("does not export a data", async () => {
|
||||
const integration = new postgres.integration(config)
|
||||
|
||||
await integration.internalQuery(
|
||||
{
|
||||
sql: `INSERT INTO "users" ("name", "role") VALUES ('John Doe', 'Administrator');
|
||||
INSERT INTO "products" ("name", "price") VALUES ('Book', 7.68);`,
|
||||
},
|
||||
false
|
||||
)
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
|
||||
"--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.products (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
price numeric NOT NULL,
|
||||
owner integer
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.products OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.products_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.products_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.users (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
role character varying(15) NOT NULL
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.users OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.users_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.users_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users
|
||||
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products fk_owner; Type: FK CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT fk_owner FOREIGN KEY (owner) REFERENCES public.users(id);
|
||||
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
"
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -58,11 +58,12 @@ class BudibaseInternalAPIClient {
|
|||
}
|
||||
const message = `${method} ${url} - ${response.status}`
|
||||
|
||||
const isDebug = process.env.LOG_LEVEL === "debug"
|
||||
if (response.status > 499) {
|
||||
console.error(message, data)
|
||||
} else if (response.status >= 400) {
|
||||
console.warn(message, data)
|
||||
} else {
|
||||
} else if (isDebug) {
|
||||
console.debug(message, data)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,19 +1,23 @@
|
|||
import { generator } from "../../shared"
|
||||
import { CreateAppRequest } from "../../types"
|
||||
|
||||
function uniqueWord() {
|
||||
return generator.word() + generator.hash()
|
||||
}
|
||||
|
||||
export const generateApp = (
|
||||
overrides: Partial<CreateAppRequest> = {}
|
||||
): CreateAppRequest => ({
|
||||
name: generator.word() + generator.hash(),
|
||||
url: `/${generator.word() + generator.hash()}`,
|
||||
name: uniqueWord(),
|
||||
url: `/${uniqueWord()}`,
|
||||
...overrides,
|
||||
})
|
||||
|
||||
// Applications type doesn't work here, save to add useTemplate parameter?
|
||||
export const appFromTemplate = (): CreateAppRequest => {
|
||||
return {
|
||||
name: generator.word(),
|
||||
url: `/${generator.word()}`,
|
||||
name: uniqueWord(),
|
||||
url: `/${uniqueWord()}`,
|
||||
// @ts-ignore
|
||||
useTemplate: "true",
|
||||
templateName: "Near Miss Register",
|
||||
|
|
|
@ -2,7 +2,7 @@ import TestConfiguration from "../../config/TestConfiguration"
|
|||
import * as fixtures from "../../fixtures"
|
||||
import { Query } from "@budibase/types"
|
||||
|
||||
describe("Internal API - Data Sources: MongoDB", () => {
|
||||
xdescribe("Internal API - Data Sources: MongoDB", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
process.env.DISABLE_PINO_LOGGER = "1"
|
||||
import { DEFAULT_TENANT_ID, logging } from "@budibase/backend-core"
|
||||
import { AccountInternalAPI } from "../account-api"
|
||||
import * as fixtures from "../internal-api/fixtures"
|
||||
|
|
|
@ -57,11 +57,12 @@ class BudibasePublicAPIClient {
|
|||
}
|
||||
const message = `${method} ${url} - ${response.status}`
|
||||
|
||||
const isDebug = process.env.LOG_LEVEL === "debug"
|
||||
if (response.status > 499) {
|
||||
console.error(message, data)
|
||||
} else if (response.status >= 400) {
|
||||
console.warn(message, data)
|
||||
} else {
|
||||
} else if (isDebug) {
|
||||
console.debug(message, data)
|
||||
}
|
||||
|
||||
|
|
2059
qa-core/yarn.lock
2059
qa-core/yarn.lock
File diff suppressed because it is too large
Load Diff
|
@ -1,4 +1,4 @@
|
|||
if [ -d "packages/pro/packages" ]; then
|
||||
if [ -d "packages/pro/src" ]; then
|
||||
cd packages/pro
|
||||
|
||||
yarn
|
||||
|
|
|
@ -8,10 +8,10 @@ const path = require("path")
|
|||
|
||||
const { build } = require("esbuild")
|
||||
|
||||
const { default: NodeResolve } = require("@esbuild-plugins/node-resolve")
|
||||
const {
|
||||
default: TsconfigPathsPlugin,
|
||||
} = require("@esbuild-plugins/tsconfig-paths")
|
||||
const { nodeExternalsPlugin } = require("esbuild-node-externals")
|
||||
|
||||
var argv = require("minimist")(process.argv.slice(2))
|
||||
|
||||
|
@ -25,32 +25,28 @@ function runBuild(entry, outfile) {
|
|||
minify: !isDev,
|
||||
sourcemap: isDev,
|
||||
tsconfig,
|
||||
plugins: [
|
||||
TsconfigPathsPlugin({ tsconfig }),
|
||||
NodeResolve({
|
||||
extensions: [".ts", ".js"],
|
||||
onResolved: resolved => {
|
||||
if (resolved.includes("node_modules") && !resolved.includes("/@budibase/pro/")) {
|
||||
return {
|
||||
external: true,
|
||||
}
|
||||
}
|
||||
return resolved
|
||||
},
|
||||
}),
|
||||
],
|
||||
plugins: [TsconfigPathsPlugin({ tsconfig }), nodeExternalsPlugin()],
|
||||
target: "node14",
|
||||
preserveSymlinks: true,
|
||||
loader: {
|
||||
".svelte": "copy",
|
||||
},
|
||||
metafile: true,
|
||||
external: [
|
||||
"deasync",
|
||||
"mock-aws-s3",
|
||||
"nock",
|
||||
"pino",
|
||||
"koa-pino-logger",
|
||||
"bull",
|
||||
],
|
||||
}
|
||||
|
||||
build({
|
||||
...sharedConfig,
|
||||
platform: "node",
|
||||
outfile,
|
||||
}).then(() => {
|
||||
}).then(result => {
|
||||
glob(`${process.cwd()}/src/**/*.hbs`, {}, (err, files) => {
|
||||
for (const file of files) {
|
||||
fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`)
|
||||
|
@ -61,6 +57,11 @@ function runBuild(entry, outfile) {
|
|||
`Build successfully in ${(Date.now() - start) / 1000} seconds`
|
||||
)
|
||||
})
|
||||
|
||||
fs.writeFileSync(
|
||||
`dist/${path.basename(outfile)}.meta.json`,
|
||||
JSON.stringify(result.metafile)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ yarn unlink
|
|||
yarn link
|
||||
cd -
|
||||
|
||||
if [ -d packages/pro/packages ]; then
|
||||
if [ -d packages/pro/src ]; then
|
||||
pro_loaded_locally=true
|
||||
else
|
||||
pro_loaded_locally=false
|
||||
|
@ -42,7 +42,7 @@ fi
|
|||
|
||||
if [ $pro_loaded_locally = true ]; then
|
||||
echo "Linking pro"
|
||||
cd packages/pro/packages/pro
|
||||
cd packages/pro
|
||||
yarn unlink
|
||||
yarn link
|
||||
cd -
|
||||
|
|
|
@ -3,7 +3,7 @@ const path = require("path")
|
|||
const { execSync } = require("child_process")
|
||||
|
||||
let version = "0.0.0"
|
||||
const localPro = fs.existsSync("packages/pro/packages")
|
||||
const localPro = fs.existsSync("packages/pro/src")
|
||||
if (!localPro) {
|
||||
const branchName = execSync("git rev-parse --abbrev-ref HEAD")
|
||||
.toString()
|
||||
|
|
Loading…
Reference in New Issue