Merge branch 'develop' into chore/refactor_pro_as_package

This commit is contained in:
Adria Navarro 2023-06-23 10:43:57 +01:00
commit 2f548d834f
32 changed files with 1886 additions and 6833 deletions

View File

@ -2,7 +2,9 @@ const fs = require("fs")
const { execSync } = require("child_process")
const path = require("path")
const IMAGES = {
const IS_SINGLE_IMAGE = process.env.SINGLE_IMAGE
let IMAGES = {
worker: "budibase/worker",
apps: "budibase/apps",
proxy: "budibase/proxy",
@ -10,7 +12,13 @@ const IMAGES = {
couch: "ibmcom/couchdb3",
curl: "curlimages/curl",
redis: "redis",
watchtower: "containrrr/watchtower"
watchtower: "containrrr/watchtower",
}
if (IS_SINGLE_IMAGE) {
IMAGES = {
budibase: "budibase/budibase"
}
}
const FILES = {
@ -39,11 +47,10 @@ for (let image in IMAGES) {
}
// copy config files
copyFile(FILES.COMPOSE)
if (!IS_SINGLE_IMAGE) {
copyFile(FILES.COMPOSE)
}
copyFile(FILES.ENV)
// compress
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)
// clean up
fs.rmdirSync(OUTPUT_DIR, { recursive: true })
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)

View File

@ -37,6 +37,14 @@ COPY --from=build /worker /worker
RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
# Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
&& apt install postgresql-client-15 -y \
&& apt remove software-properties-common apt-transport-https gpg -y
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh && \

View File

@ -1,5 +1,5 @@
{
"version": "2.7.25-alpha.8",
"version": "2.7.26-alpha.2",
"npmClient": "yarn",
"packages": ["packages/*"],
"useNx": true,

View File

@ -66,6 +66,7 @@
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",

View File

@ -16,7 +16,7 @@
let password = null
const validation = createValidationStore()
validation.addValidatorType("password", "password", true)
validation.addValidatorType("password", "password", true, { minLength: 8 })
$: validation.observe("password", password)
const Step = { CONFIG: "config", SET_PASSWORD: "set_password" }

View File

@ -21,7 +21,7 @@ export const createValidationStore = () => {
validator[propertyName] = propertyValidator
}
const addValidatorType = (propertyName, type, required) => {
const addValidatorType = (propertyName, type, required, options) => {
if (!type || !propertyName) {
return
}
@ -45,11 +45,8 @@ export const createValidationStore = () => {
propertyValidator = propertyValidator.required()
}
// We want to do this after the possible required validation, to prioritise the required error
switch (type) {
case "password":
propertyValidator = propertyValidator.min(8)
break
if (options?.minLength) {
propertyValidator = propertyValidator.min(options.minLength)
}
validator[propertyName] = propertyValidator

View File

@ -1,6 +1,6 @@
<script>
import { notifications } from "@budibase/bbui"
import { apps, templates, licensing, groups } from "stores/portal"
import { admin, apps, templates, licensing, groups } from "stores/portal"
import { onMount } from "svelte"
import { redirect } from "@roxi/routify"
@ -9,14 +9,18 @@
onMount(async () => {
try {
// Always load latest
await Promise.all([
licensing.init(),
templates.load(),
groups.actions.init(),
])
const promises = [licensing.init()]
if ($templates?.length === 0) {
if (!$admin.offlineMode) {
promises.push(templates.load())
}
promises.push(groups.actions.init())
// Always load latest
await Promise.all(promises)
if (!$admin.offlineMode && $templates?.length === 0) {
notifications.error("There was a problem loading quick start templates")
}

View File

@ -247,7 +247,7 @@
>
Create new app
</Button>
{#if $apps?.length > 0}
{#if $apps?.length > 0 && !$admin.offlineMode}
<Button
size="M"
secondary

View File

@ -46,6 +46,7 @@ export function createAdminStore() {
store.accountPortalUrl = environment.accountPortalUrl
store.isDev = environment.isDev
store.baseUrl = environment.baseUrl
store.offlineMode = environment.offlineMode
return store
})
}

View File

@ -26,11 +26,21 @@ RUN apt-get install unzip libaio1
COPY scripts/integrations/oracle/ scripts/integrations/oracle/
RUN /bin/bash -e scripts/integrations/oracle/instantclient/linux/x86-64/install.sh
# Install postgres client for pg_dump utils
RUN apt update && apt upgrade -y \
&& apt install software-properties-common apt-transport-https curl gpg -y \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
&& apt install postgresql-client-15 -y \
&& apt remove software-properties-common apt-transport-https curl gpg -y
COPY package.json .
COPY dist/yarn.lock .
RUN yarn install --production=true
# Remove unneeded data from file system to reduce image size
RUN yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
RUN yarn install --production=true \
# Remove unneeded data from file system to reduce image size
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
COPY dist/ dist/

View File

@ -441,3 +441,18 @@ export async function query(ctx: UserCtx) {
ctx.throw(400, err)
}
}
export async function getExternalSchema(ctx: UserCtx) {
const { datasource } = ctx.request.body
const enrichedDatasource = await getAndMergeDatasource(datasource)
const connector = await getConnector(enrichedDatasource)
if (!connector.getExternalSchema) {
ctx.throw(400, "Datasource does not support exporting external schema")
}
const response = await connector.getExternalSchema()
ctx.body = {
schema: response,
}
}

View File

@ -66,5 +66,10 @@ router
authorized(permissions.BUILDER),
datasourceController.destroy
)
.get(
"/api/datasources/:datasourceId/schema/external",
authorized(permissions.BUILDER),
datasourceController.getExternalSchema
)
export default router

View File

@ -81,6 +81,7 @@ const environment = {
SELF_HOSTED: process.env.SELF_HOSTED,
HTTP_MB_LIMIT: process.env.HTTP_MB_LIMIT,
FORKED_PROCESS_NAME: process.env.FORKED_PROCESS_NAME || "main",
OFFLINE_MODE: process.env.OFFLINE_MODE,
// old
CLIENT_ID: process.env.CLIENT_ID,
_set(key: string, value: any) {

View File

@ -13,7 +13,7 @@ import {
Row,
SearchFilters,
SortJson,
Table,
ExternalTable,
TableRequest,
} from "@budibase/types"
import { OAuth2Client } from "google-auth-library"
@ -139,7 +139,7 @@ const SCHEMA: Integration = {
class GoogleSheetsIntegration implements DatasourcePlus {
private readonly config: GoogleSheetsConfig
private client: GoogleSpreadsheet
public tables: Record<string, Table> = {}
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: GoogleSheetsConfig) {
@ -253,12 +253,18 @@ class GoogleSheetsIntegration implements DatasourcePlus {
return sheets.map(s => s.title)
}
getTableSchema(title: string, headerValues: string[], id?: string) {
getTableSchema(
title: string,
headerValues: string[],
datasourceId: string,
id?: string
) {
// base table
const table: Table = {
const table: ExternalTable = {
name: title,
primary: [GOOGLE_SHEETS_PRIMARY_KEY],
schema: {},
sourceId: datasourceId,
}
if (id) {
table._id = id
@ -273,20 +279,28 @@ class GoogleSheetsIntegration implements DatasourcePlus {
return table
}
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
// not fully configured yet
if (!this.config.auth) {
return
}
await this.connect()
const sheets = this.client.sheetsByIndex
const tables: Record<string, Table> = {}
const tables: Record<string, ExternalTable> = {}
await utils.parallelForeach(
sheets,
async sheet => {
// must fetch rows to determine schema
await sheet.getRows({ limit: 0, offset: 0 })
await sheet.getRows()
const id = buildExternalTableId(datasourceId, sheet.title)
tables[sheet.title] = this.getTableSchema(
sheet.title,
sheet.headerValues,
datasourceId,
id
)
},

View File

@ -2,7 +2,7 @@ import {
DatasourceFieldType,
Integration,
Operation,
Table,
ExternalTable,
TableSchema,
QueryJson,
QueryType,
@ -43,6 +43,7 @@ const SCHEMA: Integration = {
features: {
[DatasourceFeature.CONNECTION_CHECKING]: true,
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
[DatasourceFeature.EXPORT_SCHEMA]: true,
},
datasource: {
user: {
@ -97,7 +98,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
private index: number = 0
private readonly pool: any
private client: any
public tables: Record<string, Table> = {}
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
MASTER_TABLES = [
@ -220,7 +221,10 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
* @param {*} datasourceId - datasourceId to fetch
* @param entities - the tables that are to be built
*/
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
await this.connect()
let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL)
if (tableInfo == null || !Array.isArray(tableInfo)) {
@ -233,7 +237,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
.map((record: any) => record.TABLE_NAME)
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
const tables: Record<string, Table> = {}
const tables: Record<string, ExternalTable> = {}
for (let tableName of tableNames) {
// get the column definition (type)
const definition = await this.runSQL(this.getDefinitionSQL(tableName))
@ -276,6 +280,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
}
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
sourceId: datasourceId,
primary: primaryKeys,
name: tableName,
schema,
@ -336,6 +341,81 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
result.recordset ? result.recordset : [{ [operation]: true }]
return this.queryWithReturning(json, queryFn, processFn)
}
async getExternalSchema() {
// Query to retrieve table schema
const query = `
SELECT
t.name AS TableName,
c.name AS ColumnName,
ty.name AS DataType,
c.max_length AS MaxLength,
c.is_nullable AS IsNullable,
c.is_identity AS IsIdentity
FROM
sys.tables t
INNER JOIN sys.columns c ON t.object_id = c.object_id
INNER JOIN sys.types ty ON c.system_type_id = ty.system_type_id
WHERE
t.is_ms_shipped = 0
ORDER BY
t.name, c.column_id
`
await this.connect()
const result = await this.internalQuery({
sql: query,
})
const scriptParts = []
const tables: any = {}
for (const row of result.recordset) {
const {
TableName,
ColumnName,
DataType,
MaxLength,
IsNullable,
IsIdentity,
} = row
if (!tables[TableName]) {
tables[TableName] = {
columns: [],
}
}
const columnDefinition = `${ColumnName} ${DataType}${
MaxLength ? `(${MaxLength})` : ""
}${IsNullable ? " NULL" : " NOT NULL"}`
tables[TableName].columns.push(columnDefinition)
if (IsIdentity) {
tables[TableName].identityColumn = ColumnName
}
}
// Generate SQL statements for table creation
for (const tableName in tables) {
const { columns, identityColumn } = tables[tableName]
let createTableStatement = `CREATE TABLE [${tableName}] (\n`
createTableStatement += columns.join(",\n")
if (identityColumn) {
createTableStatement += `,\n CONSTRAINT [PK_${tableName}] PRIMARY KEY (${identityColumn})`
}
createTableStatement += "\n);"
scriptParts.push(createTableStatement)
}
const schema = scriptParts.join("\n")
return schema
}
}
export default {

View File

@ -4,7 +4,7 @@ import {
QueryType,
QueryJson,
SqlQuery,
Table,
ExternalTable,
TableSchema,
DatasourcePlus,
DatasourceFeature,
@ -39,6 +39,7 @@ const SCHEMA: Integration = {
features: {
[DatasourceFeature.CONNECTION_CHECKING]: true,
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
[DatasourceFeature.EXPORT_SCHEMA]: true,
},
datasource: {
host: {
@ -123,7 +124,7 @@ export function bindingTypeCoerce(bindings: any[]) {
class MySQLIntegration extends Sql implements DatasourcePlus {
private config: MySQLConfig
private client?: mysql.Connection
public tables: Record<string, Table> = {}
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: MySQLConfig) {
@ -220,8 +221,11 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
}
}
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
const tables: { [key: string]: Table } = {}
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
const tables: { [key: string]: ExternalTable } = {}
await this.connect()
try {
@ -259,6 +263,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
if (!tables[tableName]) {
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
sourceId: datasourceId,
primary: primaryKeys,
name: tableName,
schema,
@ -324,6 +329,36 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
await this.disconnect()
}
}
async getExternalSchema() {
try {
const [databaseResult] = await this.internalQuery({
sql: `SHOW CREATE DATABASE ${this.config.database}`,
})
let dumpContent = [databaseResult["Create Database"]]
const tablesResult = await this.internalQuery({
sql: `SHOW TABLES`,
})
for (const row of tablesResult) {
const tableName = row[`Tables_in_${this.config.database}`]
const createTableResults = await this.internalQuery({
sql: `SHOW CREATE TABLE \`${tableName}\``,
})
const createTableStatement = createTableResults[0]["Create Table"]
dumpContent.push(createTableStatement)
}
const schema = dumpContent.join("\n")
return schema
} finally {
this.disconnect()
}
}
}
export default {

View File

@ -5,7 +5,7 @@ import {
QueryJson,
QueryType,
SqlQuery,
Table,
ExternalTable,
DatasourcePlus,
DatasourceFeature,
ConnectionInfo,
@ -108,7 +108,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
private readonly config: OracleConfig
private index: number = 1
public tables: Record<string, Table> = {}
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
private readonly COLUMNS_SQL = `
@ -262,13 +262,16 @@ class OracleIntegration extends Sql implements DatasourcePlus {
* @param {*} datasourceId - datasourceId to fetch
* @param entities - the tables that are to be built
*/
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL,
})
const oracleTables = this.mapColumns(columnsResponse)
const tables: { [key: string]: Table } = {}
const tables: { [key: string]: ExternalTable } = {}
// iterate each table
Object.values(oracleTables).forEach(oracleTable => {
@ -279,6 +282,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
primary: [],
name: oracleTable.name,
schema: {},
sourceId: datasourceId,
}
tables[oracleTable.name] = table
}

View File

@ -1,10 +1,11 @@
import fs from "fs"
import {
Integration,
DatasourceFieldType,
QueryType,
QueryJson,
SqlQuery,
Table,
ExternalTable,
DatasourcePlus,
DatasourceFeature,
ConnectionInfo,
@ -21,6 +22,8 @@ import { PostgresColumn } from "./base/types"
import { escapeDangerousCharacters } from "../utilities"
import { Client, ClientConfig, types } from "pg"
import { exec } from "child_process"
import { storeTempFile } from "../utilities/fileSystem"
// Return "date" and "timestamp" types as plain strings.
// This lets us reference the original stored timezone.
@ -57,6 +60,7 @@ const SCHEMA: Integration = {
features: {
[DatasourceFeature.CONNECTION_CHECKING]: true,
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
[DatasourceFeature.EXPORT_SCHEMA]: true,
},
datasource: {
host: {
@ -139,7 +143,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
private readonly config: PostgresConfig
private index: number = 1
private open: boolean
public tables: Record<string, Table> = {}
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
COLUMNS_SQL!: string
@ -178,6 +182,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
const response: ConnectionInfo = {
connected: false,
}
try {
await this.openConnection()
response.connected = true
@ -256,7 +261,10 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
* @param {*} datasourceId - datasourceId to fetch
* @param entities - the tables that are to be built
*/
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
let tableKeys: { [key: string]: string[] } = {}
await this.openConnection()
try {
@ -282,7 +290,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
const columnsResponse: { rows: PostgresColumn[] } =
await this.client.query(this.COLUMNS_SQL)
const tables: { [key: string]: Table } = {}
const tables: { [key: string]: ExternalTable } = {}
for (let column of columnsResponse.rows) {
const tableName: string = column.table_name
@ -295,6 +303,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
primary: tableKeys[tableName] || [],
name: tableName,
schema: {},
sourceId: datasourceId,
}
}
@ -381,6 +390,59 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
return response.rows.length ? response.rows : [{ [operation]: true }]
}
}
async getExternalSchema() {
const dumpCommandParts = [
`user=${this.config.user}`,
`host=${this.config.host}`,
`port=${this.config.port}`,
`dbname=${this.config.database}`,
]
if (this.config.ssl) {
dumpCommandParts.push("sslmode=verify-ca")
if (this.config.ca) {
const caFilePath = storeTempFile(this.config.ca)
fs.chmodSync(caFilePath, "0600")
dumpCommandParts.push(`sslrootcert=${caFilePath}`)
}
if (this.config.clientCert) {
const clientCertFilePath = storeTempFile(this.config.clientCert)
fs.chmodSync(clientCertFilePath, "0600")
dumpCommandParts.push(`sslcert=${clientCertFilePath}`)
}
if (this.config.clientKey) {
const clientKeyFilePath = storeTempFile(this.config.clientKey)
fs.chmodSync(clientKeyFilePath, "0600")
dumpCommandParts.push(`sslkey=${clientKeyFilePath}`)
}
}
const dumpCommand = `PGPASSWORD="${
this.config.password
}" pg_dump --schema-only "${dumpCommandParts.join(" ")}"`
return new Promise<string>((res, rej) => {
exec(dumpCommand, (error, stdout, stderr) => {
if (error) {
console.error(`Error generating dump: ${error.message}`)
rej(error.message)
return
}
if (stderr) {
console.error(`pg_dump error: ${stderr}`)
rej(stderr)
return
}
res(stdout)
console.log("SQL dump generated successfully!")
})
})
}
}
export default {

View File

@ -81,7 +81,9 @@ export const streamFile = (path: string) => {
* @param {string} fileContents contents which will be written to a temp file.
* @return {string} the path to the temp file.
*/
export const storeTempFile = (fileContents: any) => {
export const storeTempFile = (
fileContents: string | NodeJS.ArrayBufferView
) => {
const path = join(budibaseTempDir(), uuid())
fs.writeFileSync(path, fileContents)
return path

View File

@ -9,7 +9,7 @@ import {
import env from "../environment"
import { groups } from "@budibase/pro"
import { UserCtx, ContextUser, User, UserGroup } from "@budibase/types"
import { global } from "yargs"
import { cloneDeep } from "lodash"
export function updateAppRole(
user: ContextUser,
@ -65,16 +65,20 @@ export async function processUser(
user: ContextUser,
opts: { appId?: string; groups?: UserGroup[] } = {}
) {
if (user) {
delete user.password
let clonedUser = cloneDeep(user)
if (clonedUser) {
delete clonedUser.password
}
const appId = opts.appId || context.getAppId()
user = updateAppRole(user, { appId })
if (!user.roleId && user?.userGroups?.length) {
user = await checkGroupRoles(user, { appId, groups: opts?.groups })
clonedUser = updateAppRole(clonedUser, { appId })
if (!clonedUser.roleId && clonedUser?.userGroups?.length) {
clonedUser = await checkGroupRoles(clonedUser, {
appId,
groups: opts?.groups,
})
}
return user
return clonedUser
}
export async function getCachedSelf(ctx: UserCtx, appId: string) {

View File

@ -1,6 +1,5 @@
// @ts-nocheck
import { FieldTypes } from "../../constants"
import { logging } from "@budibase/backend-core"
const parseArrayString = value => {
if (typeof value === "string") {
@ -12,7 +11,7 @@ const parseArrayString = value => {
result = JSON.parse(value.replace(/'/g, '"'))
return result
} catch (e) {
logging.logWarn("Could not parse row value", e)
return value
}
}
return value

View File

@ -82,6 +82,10 @@ export interface Table extends Document {
rowHeight?: number
}
export interface ExternalTable extends Table {
sourceId: string
}
export interface TableRequest extends Table {
_rename?: RenameColumn
created?: boolean

View File

@ -76,6 +76,7 @@ export enum FilterType {
export enum DatasourceFeature {
CONNECTION_CHECKING = "connection",
FETCH_TABLE_NAMES = "fetch_table_names",
EXPORT_SCHEMA = "export_schema",
}
export interface StepDefinition {
@ -140,6 +141,7 @@ export interface IntegrationBase {
update?(query: any): Promise<any[] | any>
delete?(query: any): Promise<any[] | any>
testConnection?(): Promise<ConnectionInfo>
getExternalSchema?(): Promise<string>
}
export interface DatasourcePlus extends IntegrationBase {

View File

@ -1,9 +1,10 @@
import { BBContext } from "@budibase/types"
import { Ctx } from "@budibase/types"
import env from "../../../environment"
export const fetch = async (ctx: BBContext) => {
export const fetch = async (ctx: Ctx) => {
ctx.body = {
multiTenancy: !!env.MULTI_TENANCY,
offlineMode: !!env.OFFLINE_MODE,
cloud: !env.SELF_HOSTED,
accountPortalUrl: env.ACCOUNT_PORTAL_URL,
disableAccountPortal: env.DISABLE_ACCOUNT_PORTAL,

View File

@ -24,6 +24,7 @@ describe("/api/system/environment", () => {
isDev: false,
multiTenancy: true,
baseUrl: "http://localhost:10000",
offlineMode: false,
})
})
})

View File

@ -61,6 +61,7 @@ const environment = {
CHECKLIST_CACHE_TTL: parseIntSafe(process.env.CHECKLIST_CACHE_TTL) || 3600,
SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,
ENCRYPTED_TEST_PUBLIC_API_KEY: process.env.ENCRYPTED_TEST_PUBLIC_API_KEY,
OFFLINE_MODE: process.env.OFFLINE_MODE,
/**
* Mock the email service in use - links to ethereal hosted emails are logged instead.
*/

5669
qa-core/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,112 @@
import { GenericContainer, Wait } from "testcontainers"
import { Duration, TemporalUnit } from "node-duration"
import mssql from "../../../../packages/server/src/integrations/microsoftSqlServer"
jest.unmock("mssql")
describe("getExternalSchema", () => {
describe("postgres", () => {
let config: any
beforeAll(async () => {
const password = "Str0Ng_p@ssW0rd!"
const container = await new GenericContainer(
"mcr.microsoft.com/mssql/server"
)
.withExposedPorts(1433)
.withEnv("ACCEPT_EULA", "Y")
.withEnv("MSSQL_SA_PASSWORD", password)
.withEnv("MSSQL_PID", "Developer")
.withWaitStrategy(Wait.forHealthCheck())
.withHealthCheck({
test: `/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "${password}" -Q "SELECT 1" -b -o /dev/null`,
interval: new Duration(1000, TemporalUnit.MILLISECONDS),
timeout: new Duration(3, TemporalUnit.SECONDS),
retries: 20,
startPeriod: new Duration(100, TemporalUnit.MILLISECONDS),
})
.start()
const host = container.getContainerIpAddress()
const port = container.getMappedPort(1433)
config = {
user: "sa",
password,
server: host,
port: port,
database: "master",
schema: "dbo",
}
})
it("can export an empty database", async () => {
const integration = new mssql.integration(config)
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(`""`)
})
it("can export a database with tables", async () => {
const integration = new mssql.integration(config)
await integration.connect()
await integration.internalQuery({
sql: `
CREATE TABLE users (
id INT IDENTITY(1,1) PRIMARY KEY,
name VARCHAR(100) NOT NULL,
role VARCHAR(15) NOT NULL
);
CREATE TABLE products (
id INT IDENTITY(1,1) PRIMARY KEY,
name VARCHAR(100) NOT NULL,
price DECIMAL(10, 2) NOT NULL
);
`,
})
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(`
"CREATE TABLE [products] (
id int(4) NOT NULL,
name varchar(100) NOT NULL,
price decimal(9) NOT NULL,
CONSTRAINT [PK_products] PRIMARY KEY (id)
);
CREATE TABLE [users] (
id int(4) NOT NULL,
name varchar(100) NOT NULL,
role varchar(15) NOT NULL,
CONSTRAINT [PK_users] PRIMARY KEY (id)
);"
`)
})
it("does not export a data", async () => {
const integration = new mssql.integration(config)
await integration.connect()
await integration.internalQuery({
sql: `INSERT INTO [users] ([name], [role]) VALUES ('John Doe', 'Administrator');
INSERT INTO [products] ([name], [price]) VALUES ('Book', 7.68);
`,
})
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(`
"CREATE TABLE [products] (
id int(4) NOT NULL,
name varchar(100) NOT NULL,
price decimal(9) NOT NULL,
CONSTRAINT [PK_products] PRIMARY KEY (id)
);
CREATE TABLE [users] (
id int(4) NOT NULL,
name varchar(100) NOT NULL,
role varchar(15) NOT NULL,
CONSTRAINT [PK_users] PRIMARY KEY (id)
);"
`)
})
})
})

View File

@ -0,0 +1,108 @@
import { GenericContainer } from "testcontainers"
import mysql from "../../../../packages/server/src/integrations/mysql"
jest.unmock("mysql2/promise")
describe("datasource validators", () => {
describe("mysql", () => {
let config: any
beforeAll(async () => {
const container = await new GenericContainer("mysql")
.withExposedPorts(3306)
.withEnv("MYSQL_ROOT_PASSWORD", "admin")
.withEnv("MYSQL_DATABASE", "db")
.withEnv("MYSQL_USER", "user")
.withEnv("MYSQL_PASSWORD", "password")
.start()
const host = container.getContainerIpAddress()
const port = container.getMappedPort(3306)
config = {
host,
port,
user: "user",
database: "db",
password: "password",
rejectUnauthorized: true,
}
})
it("can export an empty database", async () => {
const integration = new mysql.integration(config)
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(
`"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */"`
)
})
it("can export a database with tables", async () => {
const integration = new mysql.integration(config)
await integration.internalQuery({
sql: `
CREATE TABLE users (
id INT AUTO_INCREMENT,
name VARCHAR(100) NOT NULL,
role VARCHAR(15) NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE products (
id INT AUTO_INCREMENT,
name VARCHAR(100) NOT NULL,
price DECIMAL,
PRIMARY KEY (id)
);
`,
})
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(`
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
CREATE TABLE \`products\` (
\`id\` int NOT NULL AUTO_INCREMENT,
\`name\` varchar(100) NOT NULL,
\`price\` decimal(10,0) DEFAULT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
CREATE TABLE \`users\` (
\`id\` int NOT NULL AUTO_INCREMENT,
\`name\` varchar(100) NOT NULL,
\`role\` varchar(15) NOT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
`)
})
it("does not export a data", async () => {
const integration = new mysql.integration(config)
await integration.internalQuery({
sql: `INSERT INTO users (name, role) VALUES ('John Doe', 'Administrator');`,
})
await integration.internalQuery({
sql: `INSERT INTO products (name, price) VALUES ('Book', 7.68);`,
})
const result = await integration.getExternalSchema()
expect(result).toMatchInlineSnapshot(`
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
CREATE TABLE \`products\` (
\`id\` int NOT NULL AUTO_INCREMENT,
\`name\` varchar(100) NOT NULL,
\`price\` decimal(10,0) DEFAULT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
CREATE TABLE \`users\` (
\`id\` int NOT NULL AUTO_INCREMENT,
\`name\` varchar(100) NOT NULL,
\`role\` varchar(15) NOT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
`)
})
})
})

View File

@ -0,0 +1,377 @@
import { GenericContainer } from "testcontainers"
import postgres from "../../../../packages/server/src/integrations/postgres"
jest.unmock("pg")
describe("getExternalSchema", () => {
describe("postgres", () => {
let config: any
// Remove versioning from the outputs to prevent failures when running different pg_dump versions
function stripResultsVersions(sql: string) {
const result = sql
.replace(/\n[^\n]+Dumped from database version[^\n]+\n/, "")
.replace(/\n[^\n]+Dumped by pg_dump version[^\n]+\n/, "")
.toString()
return result
}
beforeAll(async () => {
// This is left on propose without a tag, so if a new version introduces a breaking change we will be notified
const container = await new GenericContainer("postgres")
.withExposedPorts(5432)
.withEnv("POSTGRES_PASSWORD", "password")
.start()
const host = container.getContainerIpAddress()
const port = container.getMappedPort(5432)
config = {
host,
port,
database: "postgres",
user: "postgres",
password: "password",
schema: "public",
ssl: false,
rejectUnauthorized: false,
}
})
it("can export an empty database", async () => {
const integration = new postgres.integration(config)
const result = await integration.getExternalSchema()
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
"--
-- PostgreSQL database dump
--
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- PostgreSQL database dump complete
--
"
`)
})
it("can export a database with tables", async () => {
const integration = new postgres.integration(config)
await integration.internalQuery(
{
sql: `
CREATE TABLE "users" (
"id" SERIAL,
"name" VARCHAR(100) NOT NULL,
"role" VARCHAR(15) NOT NULL,
PRIMARY KEY ("id")
);
CREATE TABLE "products" (
"id" SERIAL,
"name" VARCHAR(100) NOT NULL,
"price" DECIMAL NOT NULL,
"owner" INTEGER NULL,
PRIMARY KEY ("id")
);
ALTER TABLE "products" ADD CONSTRAINT "fk_owner" FOREIGN KEY ("owner") REFERENCES "users" ("id");`,
},
false
)
const result = await integration.getExternalSchema()
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
"--
-- PostgreSQL database dump
--
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
SET default_table_access_method = heap;
--
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
--
CREATE TABLE public.products (
id integer NOT NULL,
name character varying(100) NOT NULL,
price numeric NOT NULL,
owner integer
);
ALTER TABLE public.products OWNER TO postgres;
--
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE public.products_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.products_id_seq OWNER TO postgres;
--
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
--
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
--
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
--
CREATE TABLE public.users (
id integer NOT NULL,
name character varying(100) NOT NULL,
role character varying(15) NOT NULL
);
ALTER TABLE public.users OWNER TO postgres;
--
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE public.users_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.users_id_seq OWNER TO postgres;
--
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
--
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
--
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
--
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
--
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
--
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.users
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
--
-- Name: products fk_owner; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products
ADD CONSTRAINT fk_owner FOREIGN KEY (owner) REFERENCES public.users(id);
--
-- PostgreSQL database dump complete
--
"
`)
})
it("does not export a data", async () => {
const integration = new postgres.integration(config)
await integration.internalQuery(
{
sql: `INSERT INTO "users" ("name", "role") VALUES ('John Doe', 'Administrator');
INSERT INTO "products" ("name", "price") VALUES ('Book', 7.68);`,
},
false
)
const result = await integration.getExternalSchema()
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
"--
-- PostgreSQL database dump
--
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
SET default_table_access_method = heap;
--
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
--
CREATE TABLE public.products (
id integer NOT NULL,
name character varying(100) NOT NULL,
price numeric NOT NULL,
owner integer
);
ALTER TABLE public.products OWNER TO postgres;
--
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE public.products_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.products_id_seq OWNER TO postgres;
--
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
--
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
--
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
--
CREATE TABLE public.users (
id integer NOT NULL,
name character varying(100) NOT NULL,
role character varying(15) NOT NULL
);
ALTER TABLE public.users OWNER TO postgres;
--
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE public.users_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.users_id_seq OWNER TO postgres;
--
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
--
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
--
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
--
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
--
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
--
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.users
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
--
-- Name: products fk_owner; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY public.products
ADD CONSTRAINT fk_owner FOREIGN KEY (owner) REFERENCES public.users(id);
--
-- PostgreSQL database dump complete
--
"
`)
})
})
})

View File

@ -1,19 +1,23 @@
import { generator } from "../../shared"
import { CreateAppRequest } from "../../types"
function uniqueWord() {
return generator.word() + generator.hash()
}
export const generateApp = (
overrides: Partial<CreateAppRequest> = {}
): CreateAppRequest => ({
name: generator.word() + generator.hash(),
url: `/${generator.word() + generator.hash()}`,
name: uniqueWord(),
url: `/${uniqueWord()}`,
...overrides,
})
// Applications type doesn't work here, save to add useTemplate parameter?
export const appFromTemplate = (): CreateAppRequest => {
return {
name: generator.word(),
url: `/${generator.word()}`,
name: uniqueWord(),
url: `/${uniqueWord()}`,
// @ts-ignore
useTemplate: "true",
templateName: "Near Miss Register",

File diff suppressed because it is too large Load Diff