Merge branch 'master' into feature/postgresql-multiple-schema

This commit is contained in:
Michael Drury 2024-01-08 15:39:12 +00:00 committed by GitHub
commit fa41b15ded
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 158 additions and 82 deletions

View File

@ -37,7 +37,6 @@
"downloadjs": "1.4.7", "downloadjs": "1.4.7",
"html5-qrcode": "^2.2.1", "html5-qrcode": "^2.2.1",
"leaflet": "^1.7.1", "leaflet": "^1.7.1",
"regexparam": "^1.3.0",
"sanitize-html": "^2.7.0", "sanitize-html": "^2.7.0",
"screenfull": "^6.0.1", "screenfull": "^6.0.1",
"shortid": "^2.2.15", "shortid": "^2.2.15",

View File

@ -77,4 +77,10 @@ export const API = createAPIClient({
// Log all errors to console // Log all errors to console
console.warn(`[Client] HTTP ${status} on ${method}:${url}\n\t${message}`) console.warn(`[Client] HTTP ${status} on ${method}:${url}\n\t${message}`)
}, },
onMigrationDetected: _appId => {
if (!window.MIGRATING_APP) {
// We will force a reload, that will display the updating screen until the migration is running
window.location.reload()
}
},
}) })

View File

@ -0,0 +1,23 @@
<script>
import { Updating } from "@budibase/frontend-core"
import { API } from "../api"
async function isMigrationDone() {
const response = await API.getMigrationStatus()
return response.migrated
}
async function onMigrationDone() {
window.location.reload()
}
</script>
<div class="updating">
<Updating {isMigrationDone} {onMigrationDone} />
</div>
<style>
.updating {
font-family: var(--font-sans);
}
</style>

View File

@ -1,4 +1,5 @@
import ClientApp from "./components/ClientApp.svelte" import ClientApp from "./components/ClientApp.svelte"
import UpdatingApp from "./components/UpdatingApp.svelte"
import { import {
builderStore, builderStore,
appStore, appStore,
@ -52,6 +53,13 @@ const loadBudibase = async () => {
window["##BUDIBASE_APP_EMBEDDED##"] === "true" window["##BUDIBASE_APP_EMBEDDED##"] === "true"
) )
if (window.MIGRATING_APP) {
new UpdatingApp({
target: window.document.body,
})
return
}
// Fetch environment info // Fetch environment info
if (!get(environmentStore)?.loaded) { if (!get(environmentStore)?.loaded) {
await environmentStore.actions.fetchEnvironment() await environmentStore.actions.fetchEnvironment()

View File

@ -7,7 +7,7 @@
"../shared-core", "../shared-core",
"../string-templates" "../string-templates"
], ],
"ext": "js,ts,json", "ext": "js,ts,json,svelte",
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js", "../*/dist/**/*"], "ignore": ["src/**/*.spec.ts", "src/**/*.spec.js", "../*/dist/**/*"],
"exec": "yarn build && node ./dist/index.js" "exec": "yarn build && node ./dist/index.js"
} }

View File

@ -14,10 +14,7 @@ import {
DatasourcePlus, DatasourcePlus,
FetchDatasourceInfoRequest, FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse, FetchDatasourceInfoResponse,
IntegrationBase,
Schema,
SourceName, SourceName,
Table,
UpdateDatasourceResponse, UpdateDatasourceResponse,
UserCtx, UserCtx,
VerifyDatasourceRequest, VerifyDatasourceRequest,
@ -28,65 +25,6 @@ import { builderSocket } from "../../websockets"
import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets" import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets"
import { isEqual } from "lodash" import { isEqual } from "lodash"
async function getConnector(
datasource: Datasource
): Promise<IntegrationBase | DatasourcePlus> {
const Connector = await getIntegration(datasource.source)
// can't enrich if it doesn't have an ID yet
if (datasource._id) {
datasource = await sdk.datasources.enrich(datasource)
}
// Connect to the DB and build the schema
return new Connector(datasource.config)
}
async function getAndMergeDatasource(datasource: Datasource) {
let existingDatasource: undefined | Datasource
if (datasource._id) {
existingDatasource = await sdk.datasources.get(datasource._id)
}
let enrichedDatasource = datasource
if (existingDatasource) {
enrichedDatasource = sdk.datasources.mergeConfigs(
datasource,
existingDatasource
)
}
return await sdk.datasources.enrich(enrichedDatasource)
}
async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus
return await connector.buildSchema(
datasource._id!,
datasource.entities! as Record<string, Table>
)
}
async function buildFilteredSchema(
datasource: Datasource,
filter?: string[]
): Promise<Schema> {
let schema = await buildSchemaHelper(datasource)
if (!filter) {
return schema
}
let filteredSchema: Schema = { tables: {}, errors: {} }
for (let key in schema.tables) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.tables[key] = schema.tables[key]
}
}
for (let key in schema.errors) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.errors[key] = schema.errors[key]
}
}
return filteredSchema
}
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx) {
ctx.body = await sdk.datasources.fetch() ctx.body = await sdk.datasources.fetch()
} }
@ -95,8 +33,10 @@ export async function verify(
ctx: UserCtx<VerifyDatasourceRequest, VerifyDatasourceResponse> ctx: UserCtx<VerifyDatasourceRequest, VerifyDatasourceResponse>
) { ) {
const { datasource } = ctx.request.body const { datasource } = ctx.request.body
const enrichedDatasource = await getAndMergeDatasource(datasource) const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
const connector = await getConnector(enrichedDatasource) datasource
)
const connector = await sdk.datasources.getConnector(enrichedDatasource)
if (!connector.testConnection) { if (!connector.testConnection) {
ctx.throw(400, "Connection information verification not supported") ctx.throw(400, "Connection information verification not supported")
} }
@ -112,8 +52,12 @@ export async function information(
ctx: UserCtx<FetchDatasourceInfoRequest, FetchDatasourceInfoResponse> ctx: UserCtx<FetchDatasourceInfoRequest, FetchDatasourceInfoResponse>
) { ) {
const { datasource } = ctx.request.body const { datasource } = ctx.request.body
const enrichedDatasource = await getAndMergeDatasource(datasource) const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
const connector = (await getConnector(enrichedDatasource)) as DatasourcePlus datasource
)
const connector = (await sdk.datasources.getConnector(
enrichedDatasource
)) as DatasourcePlus
if (!connector.getTableNames) { if (!connector.getTableNames) {
ctx.throw(400, "Table name fetching not supported by datasource") ctx.throw(400, "Table name fetching not supported by datasource")
} }
@ -128,7 +72,10 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
const tablesFilter = ctx.request.body.tablesFilter const tablesFilter = ctx.request.body.tablesFilter
const datasource = await sdk.datasources.get(ctx.params.datasourceId) const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const { tables, errors } = await buildFilteredSchema(datasource, tablesFilter) const { tables, errors } = await sdk.datasources.buildFilteredSchema(
datasource,
tablesFilter
)
datasource.entities = tables datasource.entities = tables
setDefaultDisplayColumns(datasource) setDefaultDisplayColumns(datasource)
@ -280,7 +227,10 @@ export async function save(
let errors: Record<string, string> = {} let errors: Record<string, string> = {}
if (fetchSchema) { if (fetchSchema) {
const schema = await buildFilteredSchema(datasource, tablesFilter) const schema = await sdk.datasources.buildFilteredSchema(
datasource,
tablesFilter
)
datasource.entities = schema.tables datasource.entities = schema.tables
setDefaultDisplayColumns(datasource) setDefaultDisplayColumns(datasource)
errors = schema.errors errors = schema.errors
@ -384,8 +334,10 @@ export async function query(ctx: UserCtx) {
export async function getExternalSchema(ctx: UserCtx) { export async function getExternalSchema(ctx: UserCtx) {
const datasource = await sdk.datasources.get(ctx.params.datasourceId) const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const enrichedDatasource = await getAndMergeDatasource(datasource) const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
const connector = await getConnector(enrichedDatasource) datasource
)
const connector = await sdk.datasources.getConnector(enrichedDatasource)
if (!connector.getExternalSchema) { if (!connector.getExternalSchema) {
ctx.throw(400, "Datasource does not support exporting external schema") ctx.throw(400, "Datasource does not support exporting external schema")

View File

@ -25,8 +25,12 @@ import fs from "fs"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
import { App, Ctx, ProcessAttachmentResponse } from "@budibase/types" import { App, Ctx, ProcessAttachmentResponse } from "@budibase/types"
import {
getAppMigrationVersion,
getLatestMigrationId,
} from "../../../appMigrations"
const send = require("koa-send") import send from "koa-send"
export const toggleBetaUiFeature = async function (ctx: Ctx) { export const toggleBetaUiFeature = async function (ctx: Ctx) {
const cookieName = `beta:${ctx.params.feature}` const cookieName = `beta:${ctx.params.feature}`
@ -125,7 +129,26 @@ export const deleteObjects = async function (ctx: Ctx) {
) )
} }
const requiresMigration = async (ctx: Ctx) => {
const appId = context.getAppId()
if (!appId) {
ctx.throw("AppId could not be found")
}
const latestMigration = getLatestMigrationId()
if (!latestMigration) {
return false
}
const latestMigrationApplied = await getAppMigrationVersion(appId)
const requiresMigrations = latestMigrationApplied !== latestMigration
return requiresMigrations
}
export const serveApp = async function (ctx: Ctx) { export const serveApp = async function (ctx: Ctx) {
const needMigrations = await requiresMigration(ctx)
const bbHeaderEmbed = const bbHeaderEmbed =
ctx.request.get("x-budibase-embed")?.toLowerCase() === "true" ctx.request.get("x-budibase-embed")?.toLowerCase() === "true"
@ -145,8 +168,8 @@ export const serveApp = async function (ctx: Ctx) {
let appId = context.getAppId() let appId = context.getAppId()
if (!env.isJest()) { if (!env.isJest()) {
const App = require("./templates/BudibaseApp.svelte").default
const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins) const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins)
const App = require("./templates/BudibaseApp.svelte").default
const { head, html, css } = App.render({ const { head, html, css } = App.render({
metaImage: metaImage:
branding?.metaImageUrl || branding?.metaImageUrl ||
@ -167,6 +190,7 @@ export const serveApp = async function (ctx: Ctx) {
config?.logoUrl !== "" config?.logoUrl !== ""
? objectStore.getGlobalFileUrl("settings", "logoUrl") ? objectStore.getGlobalFileUrl("settings", "logoUrl")
: "", : "",
appMigrating: needMigrations,
}) })
const appHbs = loadHandlebarsFile(appHbsPath) const appHbs = loadHandlebarsFile(appHbsPath)
ctx.body = await processString(appHbs, { ctx.body = await processString(appHbs, {
@ -273,7 +297,6 @@ export const getSignedUploadURL = async function (ctx: Ctx) {
const { bucket, key } = ctx.request.body || {} const { bucket, key } = ctx.request.body || {}
if (!bucket || !key) { if (!bucket || !key) {
ctx.throw(400, "bucket and key values are required") ctx.throw(400, "bucket and key values are required")
return
} }
try { try {
const s3 = new AWS.S3({ const s3 = new AWS.S3({

View File

@ -8,6 +8,7 @@
export let clientLibPath export let clientLibPath
export let usedPlugins export let usedPlugins
export let appMigrating
</script> </script>
<svelte:head> <svelte:head>
@ -110,6 +111,11 @@
<script type="application/javascript"> <script type="application/javascript">
window.INIT_TIME = Date.now() window.INIT_TIME = Date.now()
</script> </script>
{#if appMigrating}
<script type="application/javascript">
window.MIGRATING_APP = true
</script>
{/if}
<script type="application/javascript" src={clientLibPath}> <script type="application/javascript" src={clientLibPath}>
</script> </script>
<!-- Custom components need inserted after the core client library --> <!-- Custom components need inserted after the core client library -->

View File

@ -367,8 +367,8 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
}) })
} }
let finalizedTables = finaliseExternalTables(tables, entities) const finalizedTables = finaliseExternalTables(tables, entities)
let errors = checkExternalTables(finalizedTables) const errors = checkExternalTables(finalizedTables)
return { tables: finalizedTables, errors } return { tables: finalizedTables, errors }
} catch (err) { } catch (err) {
// @ts-ignore // @ts-ignore

View File

@ -1,5 +1,7 @@
import * as datasources from "./datasources" import * as datasources from "./datasources"
import * as plus from "./plus"
export default { export default {
...datasources, ...datasources,
...plus,
} }

View File

@ -0,0 +1,62 @@
import {
Datasource,
DatasourcePlus,
IntegrationBase,
Schema,
} from "@budibase/types"
import * as datasources from "./datasources"
import { getIntegration } from "../../../integrations"
export async function buildFilteredSchema(
datasource: Datasource,
filter?: string[]
): Promise<Schema> {
const schema = await buildSchemaHelper(datasource)
if (!filter) {
return schema
}
let filteredSchema: Schema = { tables: {}, errors: {} }
for (let key in schema.tables) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.tables[key] = schema.tables[key]
}
}
for (let key in schema.errors) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.errors[key] = schema.errors[key]
}
}
return filteredSchema
}
async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus
const externalSchema = await connector.buildSchema(
datasource._id!,
datasource.entities!
)
return externalSchema
}
export async function getConnector(
datasource: Datasource
): Promise<IntegrationBase | DatasourcePlus> {
const Connector = await getIntegration(datasource.source)
// can't enrich if it doesn't have an ID yet
if (datasource._id) {
datasource = await datasources.enrich(datasource)
}
// Connect to the DB and build the schema
return new Connector(datasource.config)
}
export async function getAndMergeDatasource(datasource: Datasource) {
if (datasource._id) {
const existingDatasource = await datasources.get(datasource._id)
datasource = datasources.mergeConfigs(datasource, existingDatasource)
}
return await datasources.enrich(datasource)
}

View File

@ -18312,11 +18312,6 @@ regexparam@2.0.1:
resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-2.0.1.tgz#c912f5dae371e3798100b3c9ce22b7414d0889fa" resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-2.0.1.tgz#c912f5dae371e3798100b3c9ce22b7414d0889fa"
integrity sha512-zRgSaYemnNYxUv+/5SeoHI0eJIgTL/A2pUtXUPLHQxUldagouJ9p+K6IbIZ/JiQuCEv2E2B1O11SjVQy3aMCkw== integrity sha512-zRgSaYemnNYxUv+/5SeoHI0eJIgTL/A2pUtXUPLHQxUldagouJ9p+K6IbIZ/JiQuCEv2E2B1O11SjVQy3aMCkw==
regexparam@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-1.3.0.tgz#2fe42c93e32a40eff6235d635e0ffa344b92965f"
integrity sha512-6IQpFBv6e5vz1QAqI+V4k8P2e/3gRrqfCJ9FI+O1FLQTO+Uz6RXZEZOPmTJ6hlGj7gkERzY5BRCv09whKP96/g==
regexpu-core@^5.3.1: regexpu-core@^5.3.1:
version "5.3.1" version "5.3.1"
resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.1.tgz#66900860f88def39a5cb79ebd9490e84f17bcdfb" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.1.tgz#66900860f88def39a5cb79ebd9490e84f17bcdfb"