diff --git a/hosting/couchdb/runner.sh b/hosting/couchdb/runner.sh
index e56b8e0e7f..9f6a853ca7 100644
--- a/hosting/couchdb/runner.sh
+++ b/hosting/couchdb/runner.sh
@@ -76,6 +76,6 @@ done
# CouchDB needs the `_users` and `_replicator` databases to exist before it will
# function correctly, so we create them here.
-curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_users
-curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_replicator
+curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users
+curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator
sleep infinity
\ No newline at end of file
diff --git a/lerna.json b/lerna.json
index d6fa262685..386fbe425a 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "2.14.3",
+ "version": "2.14.4",
"npmClient": "yarn",
"packages": [
"packages/*",
diff --git a/packages/account-portal b/packages/account-portal
index b11e6b4737..bcd86d9034 160000
--- a/packages/account-portal
+++ b/packages/account-portal
@@ -1 +1 @@
-Subproject commit b11e6b47370d9b77c63648b45929c86bfed6360c
+Subproject commit bcd86d9034ba954f013da4c10171bf495ab88189
diff --git a/packages/backend-core/src/middleware/authenticated.ts b/packages/backend-core/src/middleware/authenticated.ts
index e8e16589de..d357dbdbdc 100644
--- a/packages/backend-core/src/middleware/authenticated.ts
+++ b/packages/backend-core/src/middleware/authenticated.ts
@@ -172,11 +172,8 @@ export default function (
tracer.setUser({
id: user?._id,
tenantId: user?.tenantId,
- admin: user?.admin,
- builder: user?.builder,
budibaseAccess: user?.budibaseAccess,
status: user?.status,
- roles: user?.roles,
})
}
diff --git a/packages/builder/src/api.js b/packages/builder/src/api.js
index 37894d9bbc..ac878bf82f 100644
--- a/packages/builder/src/api.js
+++ b/packages/builder/src/api.js
@@ -5,7 +5,7 @@ import {
} from "@budibase/frontend-core"
import { store } from "./builderStore"
import { get } from "svelte/store"
-import { auth } from "./stores/portal"
+import { auth, navigation } from "./stores/portal"
export const API = createAPIClient({
attachHeaders: headers => {
@@ -45,4 +45,15 @@ export const API = createAPIClient({
}
}
},
+ onMigrationDetected: appId => {
+ const updatingUrl = `/builder/app/updating/${appId}`
+
+ if (window.location.pathname === updatingUrl) {
+ return
+ }
+
+ get(navigation).goto(
+ `${updatingUrl}?returnUrl=${encodeURIComponent(window.location.pathname)}`
+ )
+ },
})
diff --git a/packages/builder/src/pages/builder/_layout.svelte b/packages/builder/src/pages/builder/_layout.svelte
index b216958045..62d3951fb5 100644
--- a/packages/builder/src/pages/builder/_layout.svelte
+++ b/packages/builder/src/pages/builder/_layout.svelte
@@ -1,6 +1,6 @@
+
+
diff --git a/packages/builder/src/stores/portal/index.js b/packages/builder/src/stores/portal/index.js
index e70df5c3ee..7f1b9e10f0 100644
--- a/packages/builder/src/stores/portal/index.js
+++ b/packages/builder/src/stores/portal/index.js
@@ -16,5 +16,6 @@ export { environment } from "./environment"
export { menu } from "./menu"
export { auditLogs } from "./auditLogs"
export { features } from "./features"
+export { navigation } from "./navigation"
export const sideBarCollapsed = writable(false)
diff --git a/packages/builder/src/stores/portal/navigation.js b/packages/builder/src/stores/portal/navigation.js
new file mode 100644
index 0000000000..67a06eff53
--- /dev/null
+++ b/packages/builder/src/stores/portal/navigation.js
@@ -0,0 +1,31 @@
+import { writable } from "svelte/store"
+
+export function createNavigationStore() {
+ const store = writable({
+ initialisated: false,
+ goto: undefined,
+ })
+ const { set, subscribe } = store
+
+ const init = gotoFunc => {
+ if (typeof gotoFunc !== "function") {
+ throw new Error(
+ `gotoFunc must be a function, found a "${typeof gotoFunc}" instead`
+ )
+ }
+
+ set({
+ initialisated: true,
+ goto: gotoFunc,
+ })
+ }
+
+ return {
+ subscribe,
+ actions: {
+ init,
+ },
+ }
+}
+
+export const navigation = createNavigationStore()
diff --git a/packages/client/package.json b/packages/client/package.json
index 39ddb4bd49..227c7b25d4 100644
--- a/packages/client/package.json
+++ b/packages/client/package.json
@@ -37,7 +37,6 @@
"downloadjs": "1.4.7",
"html5-qrcode": "^2.2.1",
"leaflet": "^1.7.1",
- "regexparam": "^1.3.0",
"sanitize-html": "^2.7.0",
"screenfull": "^6.0.1",
"shortid": "^2.2.15",
diff --git a/packages/client/src/api/api.js b/packages/client/src/api/api.js
index 8488b702b6..d4c8faa4d2 100644
--- a/packages/client/src/api/api.js
+++ b/packages/client/src/api/api.js
@@ -77,4 +77,10 @@ export const API = createAPIClient({
// Log all errors to console
console.warn(`[Client] HTTP ${status} on ${method}:${url}\n\t${message}`)
},
+ onMigrationDetected: _appId => {
+ if (!window.MIGRATING_APP) {
+ // We will force a reload, that will display the updating screen until the migration is running
+ window.location.reload()
+ }
+ },
})
diff --git a/packages/client/src/components/UpdatingApp.svelte b/packages/client/src/components/UpdatingApp.svelte
new file mode 100644
index 0000000000..74e5500715
--- /dev/null
+++ b/packages/client/src/components/UpdatingApp.svelte
@@ -0,0 +1,23 @@
+
+
+
+
+
+
+
diff --git a/packages/client/src/components/app/blocks/MultiStepFormblock.svelte b/packages/client/src/components/app/blocks/MultiStepFormblock.svelte
index 0573ba15b5..b90d0d4c7b 100644
--- a/packages/client/src/components/app/blocks/MultiStepFormblock.svelte
+++ b/packages/client/src/components/app/blocks/MultiStepFormblock.svelte
@@ -14,6 +14,7 @@
const { fetchDatasourceSchema } = getContext("sdk")
const component = getContext("component")
+ const context = getContext("context")
// Set current step context to force child form to use it
const currentStep = writable(1)
@@ -157,18 +158,23 @@
-
- {#each step.fields as field, fieldIdx (`${field.field || field.name}_${stepIdx}_${fieldIdx}`)}
- {#if getComponentForField(field)}
-
- {/if}
- {/each}
+
+
+ {#each step.fields as field, fieldIdx (`${field.field || field.name}_${stepIdx}_${fieldIdx}`)}
+ {#if getComponentForField(field)}
+
+ {/if}
+ {/each}
+
+
+
diff --git a/packages/client/src/index.js b/packages/client/src/index.js
index a3cb4206c3..f6ed23b2a9 100644
--- a/packages/client/src/index.js
+++ b/packages/client/src/index.js
@@ -1,4 +1,5 @@
import ClientApp from "./components/ClientApp.svelte"
+import UpdatingApp from "./components/UpdatingApp.svelte"
import {
builderStore,
appStore,
@@ -52,6 +53,13 @@ const loadBudibase = async () => {
window["##BUDIBASE_APP_EMBEDDED##"] === "true"
)
+ if (window.MIGRATING_APP) {
+ new UpdatingApp({
+ target: window.document.body,
+ })
+ return
+ }
+
// Fetch environment info
if (!get(environmentStore)?.loaded) {
await environmentStore.actions.fetchEnvironment()
diff --git a/packages/frontend-core/src/api/index.js b/packages/frontend-core/src/api/index.js
index d4b4f3636e..066ab16f6e 100644
--- a/packages/frontend-core/src/api/index.js
+++ b/packages/frontend-core/src/api/index.js
@@ -33,6 +33,7 @@ import { buildEnvironmentVariableEndpoints } from "./environmentVariables"
import { buildEventEndpoints } from "./events"
import { buildAuditLogsEndpoints } from "./auditLogs"
import { buildLogsEndpoints } from "./logs"
+import { buildMigrationEndpoints } from "./migrations"
/**
* Random identifier to uniquely identify a session in a tab. This is
@@ -298,6 +299,7 @@ export const createAPIClient = config => {
...buildEventEndpoints(API),
...buildAuditLogsEndpoints(API),
...buildLogsEndpoints(API),
+ ...buildMigrationEndpoints(API),
viewV2: buildViewV2Endpoints(API),
}
}
diff --git a/packages/frontend-core/src/api/migrations.js b/packages/frontend-core/src/api/migrations.js
new file mode 100644
index 0000000000..2da70d6fcb
--- /dev/null
+++ b/packages/frontend-core/src/api/migrations.js
@@ -0,0 +1,10 @@
+export const buildMigrationEndpoints = API => ({
+ /**
+ * Gets the info about the current app migration
+ */
+ getMigrationStatus: async () => {
+ return await API.get({
+ url: "/api/migrations/status",
+ })
+ },
+})
diff --git a/packages/frontend-core/src/components/Updating.svelte b/packages/frontend-core/src/components/Updating.svelte
new file mode 100644
index 0000000000..7d4a101fee
--- /dev/null
+++ b/packages/frontend-core/src/components/Updating.svelte
@@ -0,0 +1,79 @@
+
+
+
+
+
+ {#if !timedOut}
+ Please wait and we will be back in a second!
+ {:else}
+ An error occurred, please try again later.
+
+ Contact
+ support if the
+ issue persists.
+ {/if}
+
+
+
diff --git a/packages/frontend-core/src/components/index.js b/packages/frontend-core/src/components/index.js
index 01a7c78cb8..f724e1e4d9 100644
--- a/packages/frontend-core/src/components/index.js
+++ b/packages/frontend-core/src/components/index.js
@@ -3,4 +3,5 @@ export { default as TestimonialPage } from "./TestimonialPage.svelte"
export { default as Testimonial } from "./Testimonial.svelte"
export { default as UserAvatar } from "./UserAvatar.svelte"
export { default as UserAvatars } from "./UserAvatars.svelte"
+export { default as Updating } from "./Updating.svelte"
export { Grid } from "./grid"
diff --git a/packages/server/nodemon.json b/packages/server/nodemon.json
index 33d277dd64..5535e0772e 100644
--- a/packages/server/nodemon.json
+++ b/packages/server/nodemon.json
@@ -7,7 +7,7 @@
"../shared-core",
"../string-templates"
],
- "ext": "js,ts,json",
+ "ext": "js,ts,json,svelte",
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js", "../*/dist/**/*"],
"exec": "yarn build && node ./dist/index.js"
}
diff --git a/packages/server/src/api/controllers/datasource.ts b/packages/server/src/api/controllers/datasource.ts
index 0a7430aa94..8c177f1704 100644
--- a/packages/server/src/api/controllers/datasource.ts
+++ b/packages/server/src/api/controllers/datasource.ts
@@ -14,10 +14,7 @@ import {
DatasourcePlus,
FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse,
- IntegrationBase,
- Schema,
SourceName,
- Table,
UpdateDatasourceResponse,
UserCtx,
VerifyDatasourceRequest,
@@ -28,65 +25,6 @@ import { builderSocket } from "../../websockets"
import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets"
import { isEqual } from "lodash"
-async function getConnector(
- datasource: Datasource
-): Promise {
- const Connector = await getIntegration(datasource.source)
- // can't enrich if it doesn't have an ID yet
- if (datasource._id) {
- datasource = await sdk.datasources.enrich(datasource)
- }
- // Connect to the DB and build the schema
- return new Connector(datasource.config)
-}
-
-async function getAndMergeDatasource(datasource: Datasource) {
- let existingDatasource: undefined | Datasource
- if (datasource._id) {
- existingDatasource = await sdk.datasources.get(datasource._id)
- }
- let enrichedDatasource = datasource
- if (existingDatasource) {
- enrichedDatasource = sdk.datasources.mergeConfigs(
- datasource,
- existingDatasource
- )
- }
- return await sdk.datasources.enrich(enrichedDatasource)
-}
-
-async function buildSchemaHelper(datasource: Datasource): Promise {
- const connector = (await getConnector(datasource)) as DatasourcePlus
- return await connector.buildSchema(
- datasource._id!,
- datasource.entities! as Record
- )
-}
-
-async function buildFilteredSchema(
- datasource: Datasource,
- filter?: string[]
-): Promise {
- let schema = await buildSchemaHelper(datasource)
- if (!filter) {
- return schema
- }
-
- let filteredSchema: Schema = { tables: {}, errors: {} }
- for (let key in schema.tables) {
- if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
- filteredSchema.tables[key] = schema.tables[key]
- }
- }
-
- for (let key in schema.errors) {
- if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
- filteredSchema.errors[key] = schema.errors[key]
- }
- }
- return filteredSchema
-}
-
export async function fetch(ctx: UserCtx) {
ctx.body = await sdk.datasources.fetch()
}
@@ -95,8 +33,10 @@ export async function verify(
ctx: UserCtx
) {
const { datasource } = ctx.request.body
- const enrichedDatasource = await getAndMergeDatasource(datasource)
- const connector = await getConnector(enrichedDatasource)
+ const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
+ datasource
+ )
+ const connector = await sdk.datasources.getConnector(enrichedDatasource)
if (!connector.testConnection) {
ctx.throw(400, "Connection information verification not supported")
}
@@ -112,8 +52,12 @@ export async function information(
ctx: UserCtx
) {
const { datasource } = ctx.request.body
- const enrichedDatasource = await getAndMergeDatasource(datasource)
- const connector = (await getConnector(enrichedDatasource)) as DatasourcePlus
+ const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
+ datasource
+ )
+ const connector = (await sdk.datasources.getConnector(
+ enrichedDatasource
+ )) as DatasourcePlus
if (!connector.getTableNames) {
ctx.throw(400, "Table name fetching not supported by datasource")
}
@@ -128,7 +72,10 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
const tablesFilter = ctx.request.body.tablesFilter
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
- const { tables, errors } = await buildFilteredSchema(datasource, tablesFilter)
+ const { tables, errors } = await sdk.datasources.buildFilteredSchema(
+ datasource,
+ tablesFilter
+ )
datasource.entities = tables
setDefaultDisplayColumns(datasource)
@@ -280,7 +227,10 @@ export async function save(
let errors: Record = {}
if (fetchSchema) {
- const schema = await buildFilteredSchema(datasource, tablesFilter)
+ const schema = await sdk.datasources.buildFilteredSchema(
+ datasource,
+ tablesFilter
+ )
datasource.entities = schema.tables
setDefaultDisplayColumns(datasource)
errors = schema.errors
@@ -384,8 +334,10 @@ export async function query(ctx: UserCtx) {
export async function getExternalSchema(ctx: UserCtx) {
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
- const enrichedDatasource = await getAndMergeDatasource(datasource)
- const connector = await getConnector(enrichedDatasource)
+ const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
+ datasource
+ )
+ const connector = await sdk.datasources.getConnector(enrichedDatasource)
if (!connector.getExternalSchema) {
ctx.throw(400, "Datasource does not support exporting external schema")
diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts
index 2963546e7f..5f383e837d 100644
--- a/packages/server/src/api/controllers/static/index.ts
+++ b/packages/server/src/api/controllers/static/index.ts
@@ -25,8 +25,12 @@ import fs from "fs"
import sdk from "../../../sdk"
import * as pro from "@budibase/pro"
import { App, Ctx, ProcessAttachmentResponse } from "@budibase/types"
+import {
+ getAppMigrationVersion,
+ getLatestMigrationId,
+} from "../../../appMigrations"
-const send = require("koa-send")
+import send from "koa-send"
export const toggleBetaUiFeature = async function (ctx: Ctx) {
const cookieName = `beta:${ctx.params.feature}`
@@ -125,7 +129,26 @@ export const deleteObjects = async function (ctx: Ctx) {
)
}
+const requiresMigration = async (ctx: Ctx) => {
+ const appId = context.getAppId()
+ if (!appId) {
+ ctx.throw("AppId could not be found")
+ }
+
+ const latestMigration = getLatestMigrationId()
+ if (!latestMigration) {
+ return false
+ }
+
+ const latestMigrationApplied = await getAppMigrationVersion(appId)
+
+ const requiresMigrations = latestMigrationApplied !== latestMigration
+ return requiresMigrations
+}
+
export const serveApp = async function (ctx: Ctx) {
+ const needMigrations = await requiresMigration(ctx)
+
const bbHeaderEmbed =
ctx.request.get("x-budibase-embed")?.toLowerCase() === "true"
@@ -145,8 +168,8 @@ export const serveApp = async function (ctx: Ctx) {
let appId = context.getAppId()
if (!env.isJest()) {
- const App = require("./templates/BudibaseApp.svelte").default
const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins)
+ const App = require("./templates/BudibaseApp.svelte").default
const { head, html, css } = App.render({
metaImage:
branding?.metaImageUrl ||
@@ -167,6 +190,7 @@ export const serveApp = async function (ctx: Ctx) {
config?.logoUrl !== ""
? objectStore.getGlobalFileUrl("settings", "logoUrl")
: "",
+ appMigrating: needMigrations,
})
const appHbs = loadHandlebarsFile(appHbsPath)
ctx.body = await processString(appHbs, {
@@ -273,7 +297,6 @@ export const getSignedUploadURL = async function (ctx: Ctx) {
const { bucket, key } = ctx.request.body || {}
if (!bucket || !key) {
ctx.throw(400, "bucket and key values are required")
- return
}
try {
const s3 = new AWS.S3({
diff --git a/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte b/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte
index 32edb6dc7b..7819368fc0 100644
--- a/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte
+++ b/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte
@@ -8,6 +8,7 @@
export let clientLibPath
export let usedPlugins
+ export let appMigrating
@@ -110,6 +111,11 @@
+ {#if appMigrating}
+
+ {/if}
diff --git a/packages/server/src/appMigrations/index.ts b/packages/server/src/appMigrations/index.ts
index b382d8b533..0758b9f324 100644
--- a/packages/server/src/appMigrations/index.ts
+++ b/packages/server/src/appMigrations/index.ts
@@ -17,7 +17,7 @@ export const getLatestMigrationId = () =>
.sort()
.reverse()[0]
-const getTimestamp = (versionId: string) => versionId?.split("_")[0]
+const getTimestamp = (versionId: string) => versionId?.split("_")[0] || ""
export async function checkMissingMigrations(
ctx: UserCtx,
diff --git a/packages/server/src/ddApm.ts b/packages/server/src/ddApm.ts
index 6c9b8aa289..f0f3ec6055 100644
--- a/packages/server/src/ddApm.ts
+++ b/packages/server/src/ddApm.ts
@@ -3,5 +3,9 @@ import apm from "dd-trace"
// enable APM if configured
if (process.env.DD_APM_ENABLED) {
console.log("Starting dd-trace")
- apm.init()
+ apm.init({
+ // @ts-ignore for some reason dd-trace types don't include this options,
+ // even though it's spoken about in the docs.
+ debug: process.env.DD_ENV === "qa",
+ })
}
diff --git a/packages/server/src/integration-test/postgres.spec.ts b/packages/server/src/integration-test/postgres.spec.ts
index 67e4fee81c..600566c813 100644
--- a/packages/server/src/integration-test/postgres.spec.ts
+++ b/packages/server/src/integration-test/postgres.spec.ts
@@ -1118,4 +1118,76 @@ describe("postgres integrations", () => {
})
})
})
+
+ describe("Integration compatibility with postgres search_path", () => {
+ let client: Client, pathDatasource: Datasource
+ const schema1 = "test1",
+ schema2 = "test-2"
+
+ beforeAll(async () => {
+ const dsConfig = await databaseTestProviders.postgres.getDsConfig()
+ const dbConfig = dsConfig.config!
+
+ client = new Client(dbConfig)
+ await client.connect()
+ await client.query(`CREATE SCHEMA "${schema1}";`)
+ await client.query(`CREATE SCHEMA "${schema2}";`)
+
+ const pathConfig: any = {
+ ...dsConfig,
+ config: {
+ ...dbConfig,
+ schema: `${schema1}, ${schema2}`,
+ },
+ }
+ pathDatasource = await config.api.datasource.create(pathConfig)
+ })
+
+ afterAll(async () => {
+ await client.query(`DROP SCHEMA "${schema1}" CASCADE;`)
+ await client.query(`DROP SCHEMA "${schema2}" CASCADE;`)
+ await client.end()
+ })
+
+ it("discovers tables from any schema in search path", async () => {
+ await client.query(
+ `CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);`
+ )
+ await client.query(
+ `CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);`
+ )
+ const response = await makeRequest("post", "/api/datasources/info", {
+ datasource: pathDatasource,
+ })
+ expect(response.status).toBe(200)
+ expect(response.body.tableNames).toBeDefined()
+ expect(response.body.tableNames).toEqual(
+ expect.arrayContaining(["table1", "table2"])
+ )
+ })
+
+ it("does not mix columns from different tables", async () => {
+ const repeated_table_name = "table_same_name"
+ await client.query(
+ `CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
+ )
+ await client.query(
+ `CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
+ )
+ const response = await makeRequest(
+ "post",
+ `/api/datasources/${pathDatasource._id}/schema`,
+ {
+ tablesFilter: [repeated_table_name],
+ }
+ )
+ expect(response.status).toBe(200)
+ expect(
+ response.body.datasource.entities[repeated_table_name].schema
+ ).toBeDefined()
+ const schema =
+ response.body.datasource.entities[repeated_table_name].schema
+ expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
+ })
+ })
})
diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts
index de3bf0e59e..ddc6e82f6d 100644
--- a/packages/server/src/integrations/postgres.ts
+++ b/packages/server/src/integrations/postgres.ts
@@ -159,7 +159,8 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
JOIN pg_index ON pg_class.oid = pg_index.indrelid AND pg_index.indisprimary
JOIN pg_attribute ON pg_attribute.attrelid = pg_class.oid AND pg_attribute.attnum = ANY(pg_index.indkey)
JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace
- WHERE pg_namespace.nspname = '${this.config.schema}';
+ WHERE pg_namespace.nspname = ANY(current_schemas(false))
+ AND pg_table_is_visible(pg_class.oid);
`
ENUM_VALUES = () => `
@@ -219,8 +220,12 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
if (!this.config.schema) {
this.config.schema = "public"
}
- await this.client.query(`SET search_path TO "${this.config.schema}"`)
- this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'`
+ const search_path = this.config.schema
+ .split(",")
+ .map(item => `"${item.trim()}"`)
+ await this.client.query(`SET search_path TO ${search_path.join(",")};`)
+ this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = ANY(current_schemas(false))
+ AND pg_table_is_visible(to_regclass(table_schema || '.' || table_name));`
this.open = true
}
@@ -362,8 +367,8 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
})
}
- let finalizedTables = finaliseExternalTables(tables, entities)
- let errors = checkExternalTables(finalizedTables)
+ const finalizedTables = finaliseExternalTables(tables, entities)
+ const errors = checkExternalTables(finalizedTables)
return { tables: finalizedTables, errors }
} catch (err) {
// @ts-ignore
diff --git a/packages/server/src/jsRunner.ts b/packages/server/src/jsRunner.ts
index a9dcd506d7..6fde3ab82a 100644
--- a/packages/server/src/jsRunner.ts
+++ b/packages/server/src/jsRunner.ts
@@ -12,36 +12,51 @@ export function init() {
const perRequestLimit = env.JS_PER_REQUEST_TIME_LIMIT_MS
let track: TrackerFn = f => f()
if (perRequestLimit) {
- const bbCtx = context.getCurrentContext()
- if (bbCtx) {
- if (!bbCtx.jsExecutionTracker) {
- bbCtx.jsExecutionTracker =
- timers.ExecutionTimeTracker.withLimit(perRequestLimit)
+ tracer.trace("runJS.setupTracker", {}, span => {
+ const bbCtx = context.getCurrentContext()
+ if (bbCtx) {
+ if (!bbCtx.jsExecutionTracker) {
+ span?.addTags({
+ createdExecutionTracker: true,
+ })
+ bbCtx.jsExecutionTracker =
+ timers.ExecutionTimeTracker.withLimit(perRequestLimit)
+ }
+ span?.addTags({
+ js: {
+ limitMS: bbCtx.jsExecutionTracker.limitMs,
+ elapsedMS: bbCtx.jsExecutionTracker.elapsedMS,
+ },
+ })
+ // We call checkLimit() here to prevent paying the cost of creating
+ // a new VM context below when we don't need to.
+ bbCtx.jsExecutionTracker.checkLimit()
+ track = bbCtx.jsExecutionTracker.track.bind(
+ bbCtx.jsExecutionTracker
+ )
}
- span?.addTags({
- js: {
- limitMS: bbCtx.jsExecutionTracker.limitMs,
- elapsedMS: bbCtx.jsExecutionTracker.elapsedMS,
- },
- })
- // We call checkLimit() here to prevent paying the cost of creating
- // a new VM context below when we don't need to.
- bbCtx.jsExecutionTracker.checkLimit()
- track = bbCtx.jsExecutionTracker.track.bind(bbCtx.jsExecutionTracker)
- }
+ })
}
- ctx = {
- ...ctx,
- alert: undefined,
- setInterval: undefined,
- setTimeout: undefined,
- }
- vm.createContext(ctx)
+ ctx = tracer.trace("runJS.ctxClone", {}, span => {
+ return {
+ ...ctx,
+ alert: undefined,
+ setInterval: undefined,
+ setTimeout: undefined,
+ }
+ })
+
+ tracer.trace("runJS.vm.createContext", {}, span => {
+ vm.createContext(ctx)
+ })
+
return track(() =>
- vm.runInNewContext(js, ctx, {
- timeout: env.JS_PER_EXECUTION_TIME_LIMIT_MS,
- })
+ tracer.trace("runJS.vm.runInNewContext", {}, span =>
+ vm.runInNewContext(js, ctx, {
+ timeout: env.JS_PER_EXECUTION_TIME_LIMIT_MS,
+ })
+ )
)
})
})
diff --git a/packages/server/src/sdk/app/datasources/index.ts b/packages/server/src/sdk/app/datasources/index.ts
index 1ce6b0e689..8f06e989d3 100644
--- a/packages/server/src/sdk/app/datasources/index.ts
+++ b/packages/server/src/sdk/app/datasources/index.ts
@@ -1,5 +1,7 @@
import * as datasources from "./datasources"
+import * as plus from "./plus"
export default {
...datasources,
+ ...plus,
}
diff --git a/packages/server/src/sdk/app/datasources/plus.ts b/packages/server/src/sdk/app/datasources/plus.ts
new file mode 100644
index 0000000000..117d19a6a7
--- /dev/null
+++ b/packages/server/src/sdk/app/datasources/plus.ts
@@ -0,0 +1,62 @@
+import {
+ Datasource,
+ DatasourcePlus,
+ IntegrationBase,
+ Schema,
+} from "@budibase/types"
+import * as datasources from "./datasources"
+import { getIntegration } from "../../../integrations"
+
+export async function buildFilteredSchema(
+ datasource: Datasource,
+ filter?: string[]
+): Promise {
+ const schema = await buildSchemaHelper(datasource)
+ if (!filter) {
+ return schema
+ }
+
+ let filteredSchema: Schema = { tables: {}, errors: {} }
+ for (let key in schema.tables) {
+ if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
+ filteredSchema.tables[key] = schema.tables[key]
+ }
+ }
+
+ for (let key in schema.errors) {
+ if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
+ filteredSchema.errors[key] = schema.errors[key]
+ }
+ }
+ return filteredSchema
+}
+
+async function buildSchemaHelper(datasource: Datasource): Promise {
+ const connector = (await getConnector(datasource)) as DatasourcePlus
+ const externalSchema = await connector.buildSchema(
+ datasource._id!,
+ datasource.entities!
+ )
+ return externalSchema
+}
+
+export async function getConnector(
+ datasource: Datasource
+): Promise {
+ const Connector = await getIntegration(datasource.source)
+ // can't enrich if it doesn't have an ID yet
+ if (datasource._id) {
+ datasource = await datasources.enrich(datasource)
+ }
+ // Connect to the DB and build the schema
+ return new Connector(datasource.config)
+}
+
+export async function getAndMergeDatasource(datasource: Datasource) {
+ if (datasource._id) {
+ const existingDatasource = await datasources.get(datasource._id)
+
+ datasource = datasources.mergeConfigs(datasource, existingDatasource)
+ }
+ return await datasources.enrich(datasource)
+}
diff --git a/yarn.lock b/yarn.lock
index 38a09daf19..7359a2b226 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -18312,11 +18312,6 @@ regexparam@2.0.1:
resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-2.0.1.tgz#c912f5dae371e3798100b3c9ce22b7414d0889fa"
integrity sha512-zRgSaYemnNYxUv+/5SeoHI0eJIgTL/A2pUtXUPLHQxUldagouJ9p+K6IbIZ/JiQuCEv2E2B1O11SjVQy3aMCkw==
-regexparam@^1.3.0:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-1.3.0.tgz#2fe42c93e32a40eff6235d635e0ffa344b92965f"
- integrity sha512-6IQpFBv6e5vz1QAqI+V4k8P2e/3gRrqfCJ9FI+O1FLQTO+Uz6RXZEZOPmTJ6hlGj7gkERzY5BRCv09whKP96/g==
-
regexpu-core@^5.3.1:
version "5.3.1"
resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.1.tgz#66900860f88def39a5cb79ebd9490e84f17bcdfb"