Refactor DatasourcePlus's buildSchema to return a value, rather than rely on member variables.

This commit is contained in:
Sam Rose 2023-10-12 16:27:18 +01:00
parent 6e1962e6ea
commit 85b3af2971
9 changed files with 56 additions and 138 deletions

View File

@ -25,7 +25,6 @@ export function createDatasourcesStore() {
const store = writable({
list: [],
selectedDatasourceId: null,
schemaError: null,
})
const derivedStore = derived([store, tables], ([$store, $tables]) => {
@ -75,18 +74,13 @@ export function createDatasourcesStore() {
store.update(state => ({
...state,
selectedDatasourceId: id,
// Remove any possible schema error
schemaError: null,
}))
}
const updateDatasource = response => {
const { datasource, error } = response
if (error) {
store.update(state => ({
...state,
schemaError: error,
}))
store.update(state => ({ ...state }))
}
replaceDatasource(datasource._id, datasource)
select(datasource._id)
@ -171,12 +165,6 @@ export function createDatasourcesStore() {
replaceDatasource(datasource._id, null)
}
const removeSchemaError = () => {
store.update(state => {
return { ...state, schemaError: null }
})
}
const replaceDatasource = (datasourceId, datasource) => {
if (!datasourceId) {
return
@ -229,7 +217,6 @@ export function createDatasourcesStore() {
create,
update,
delete: deleteDatasource,
removeSchemaError,
replaceDatasource,
getTableNames,
}

View File

@ -18,6 +18,7 @@ import {
FetchDatasourceInfoResponse,
IntegrationBase,
SourceName,
Table,
UpdateDatasourceResponse,
UserCtx,
VerifyDatasourceRequest,
@ -71,46 +72,18 @@ async function getAndMergeDatasource(datasource: Datasource) {
return await sdk.datasources.enrich(enrichedDatasource)
}
async function buildSchemaHelper(datasource: Datasource) {
async function buildSchemaHelper(
datasource: Datasource
): Promise<Record<string, Table>> {
const connector = (await getConnector(datasource)) as DatasourcePlus
await connector.buildSchema(datasource._id!, datasource.entities!)
const errors = connector.schemaErrors
let error = null
if (errors && Object.keys(errors).length > 0) {
const noKey = getErrorTables(errors, BuildSchemaErrors.NO_KEY)
if (noKey.length) {
error = updateError(
error,
"No primary key constraint found for the following:",
noKey
)
}
const invalidCol = getErrorTables(errors, BuildSchemaErrors.INVALID_COLUMN)
if (invalidCol.length) {
const invalidCols = Object.values(InvalidColumns).join(", ")
error = updateError(
error,
`Cannot use columns ${invalidCols} found in following:`,
invalidCol
)
}
const noHeader = getErrorTables(errors, BuildSchemaErrors.NO_HEADER_ROW)
if (noHeader.length) {
error = updateError(
error,
`No header row found in the following:`,
noHeader
)
}
}
return { tables: connector.tables, error }
return await connector.buildSchema(datasource._id!, datasource.entities!)
}
async function buildFilteredSchema(datasource: Datasource, filter?: string[]) {
let { tables, error } = await buildSchemaHelper(datasource)
async function buildFilteredSchema(
datasource: Datasource,
filter?: string[]
): Promise<{ tables: Record<string, Table> }> {
let tables = await buildSchemaHelper(datasource)
let finalTables = tables
if (filter) {
finalTables = {}
@ -122,7 +95,7 @@ async function buildFilteredSchema(datasource: Datasource, filter?: string[]) {
}
}
}
return { tables: finalTables, error }
return { tables: finalTables }
}
export async function fetch(ctx: UserCtx) {
@ -166,7 +139,7 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
const tablesFilter = ctx.request.body.tablesFilter
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const { tables, error } = await buildFilteredSchema(datasource, tablesFilter)
const { tables } = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = tables
setDefaultDisplayColumns(datasource)
@ -177,9 +150,6 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
const cleanedDatasource = await sdk.datasources.removeSecretSingle(datasource)
const res: any = { datasource: cleanedDatasource }
if (error) {
res.error = error
}
ctx.body = res
}
@ -308,13 +278,8 @@ export async function save(
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
}
let schemaError = null
if (fetchSchema) {
const { tables, error } = await buildFilteredSchema(
datasource,
tablesFilter
)
schemaError = error
const { tables } = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = tables
setDefaultDisplayColumns(datasource)
}
@ -340,9 +305,6 @@ export async function save(
const response: CreateDatasourceResponse = {
datasource: await sdk.datasources.removeSecretSingle(datasource),
}
if (schemaError) {
response.error = schemaError
}
ctx.body = response
builderSocket?.emitDatasourceUpdate(ctx, datasource)
}

View File

@ -14,6 +14,7 @@ import {
SortJson,
ExternalTable,
TableRequest,
Table,
} from "@budibase/types"
import { OAuth2Client } from "google-auth-library"
import { buildExternalTableId, finaliseExternalTables } from "./utils"
@ -138,8 +139,6 @@ const SCHEMA: Integration = {
class GoogleSheetsIntegration implements DatasourcePlus {
private readonly config: GoogleSheetsConfig
private client: GoogleSpreadsheet
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: GoogleSheetsConfig) {
this.config = config
@ -280,11 +279,11 @@ class GoogleSheetsIntegration implements DatasourcePlus {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
entities: Record<string, Table>
): Promise<Record<string, Table>> {
// not fully configured yet
if (!this.config.auth) {
return
return {}
}
await this.connect()
const sheets = this.client.sheetsByIndex
@ -300,7 +299,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
// We expect this to always be an Error so if it's not, rethrow it to
// make sure we don't fail quietly.
if (!(err instanceof Error)) {
throw err;
throw err
}
if (err.message.startsWith("No values in the header row")) {
@ -308,7 +307,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
} else {
// If we get an error we don't have a BuildSchemaErrors enum variant
// for, rethrow to avoid failing quietly.
throw err;
throw err
}
return
}
@ -323,9 +322,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
},
10
)
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = { ...final.errors, ...errors }
return finaliseExternalTables(tables, entities)
}
async query(json: QueryJson) {

View File

@ -11,6 +11,7 @@ import {
DatasourceFeature,
ConnectionInfo,
SourceName,
Table,
} from "@budibase/types"
import {
getSqlQuery,
@ -380,8 +381,8 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
*/
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
entities: Record<string, Table>
): Promise<Record<string, Table>> {
await this.connect()
let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL)
if (tableInfo == null || !Array.isArray(tableInfo)) {
@ -394,7 +395,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
.map((record: any) => record.TABLE_NAME)
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
const tables: Record<string, ExternalTable> = {}
const tables: Record<string, Table> = {}
for (let tableName of tableNames) {
// get the column definition (type)
const definition = await this.runSQL(
@ -445,9 +446,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
schema,
}
}
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
return finaliseExternalTables(tables, entities)
}
async queryTableNames() {

View File

@ -10,6 +10,7 @@ import {
DatasourceFeature,
ConnectionInfo,
SourceName,
Table,
} from "@budibase/types"
import {
getSqlQuery,
@ -278,9 +279,9 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
const tables: { [key: string]: ExternalTable } = {}
entities: Record<string, Table>
): Promise<Record<string, Table>> {
const tables: { [key: string]: Table } = {}
await this.connect()
try {
@ -328,9 +329,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
} finally {
await this.disconnect()
}
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
return finaliseExternalTables(tables, entities)
}
async queryTableNames() {

View File

@ -9,6 +9,7 @@ import {
DatasourcePlus,
DatasourceFeature,
ConnectionInfo,
Table,
} from "@budibase/types"
import {
buildExternalTableId,
@ -264,8 +265,8 @@ class OracleIntegration extends Sql implements DatasourcePlus {
*/
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
entities: Record<string, Table>
): Promise<Record<string, Table>> {
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL,
})
@ -326,9 +327,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
})
})
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
return finaliseExternalTables(tables, entities)
}
async getTableNames() {

View File

@ -10,6 +10,7 @@ import {
DatasourceFeature,
ConnectionInfo,
SourceName,
Table,
} from "@budibase/types"
import {
getSqlQuery,
@ -145,8 +146,6 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
private readonly config: PostgresConfig
private index: number = 1
private open: boolean
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
COLUMNS_SQL!: string
@ -273,8 +272,8 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
*/
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
entities: Record<string, Table>
): Promise<Record<string, Table>> {
let tableKeys: { [key: string]: string[] } = {}
await this.openConnection()
try {
@ -342,9 +341,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
}
}
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
return finaliseExternalTables(tables, entities)
} catch (err) {
// @ts-ignore
throw new Error(err)

View File

@ -4,13 +4,10 @@ import {
SearchFilters,
Datasource,
FieldType,
ExternalTable,
} from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils"
import {
BuildSchemaErrors,
InvalidColumns,
NoEmptyFilterStrings,
} from "../constants"
import { NoEmptyFilterStrings } from "../constants"
import { helpers } from "@budibase/shared-core"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
@ -266,9 +263,9 @@ export function shouldCopySpecialColumn(
function copyExistingPropsOver(
tableName: string,
table: Table,
entities: { [key: string]: any },
tableIds: [string]
) {
entities: Record<string, Table>,
tableIds: string[]
): Table {
if (entities && entities[tableName]) {
if (entities[tableName]?.primaryDisplay) {
table.primaryDisplay = entities[tableName].primaryDisplay
@ -295,42 +292,23 @@ function copyExistingPropsOver(
/**
* Look through the final table definitions to see if anything needs to be
* copied over from the old and if any errors have occurred mark them so
* that the user can be made aware.
* copied over from the old.
* @param tables The list of tables that have been retrieved from the external database.
* @param entities The old list of tables, if there was any to look for definitions in.
*/
export function finaliseExternalTables(
tables: { [key: string]: any },
entities: { [key: string]: any }
) {
const invalidColumns = Object.values(InvalidColumns)
let finalTables: { [key: string]: any } = {}
const errors: { [key: string]: string } = {}
// @ts-ignore
const tableIds: [string] = Object.values(tables).map(table => table._id)
tables: Record<string, Table>,
entities: Record<string, Table>
): Record<string, Table> {
let finalTables: Record<string, Table> = {}
const tableIds = Object.values(tables).map(table => table._id!)
for (let [name, table] of Object.entries(tables)) {
const schemaFields = Object.keys(table.schema)
// make sure every table has a key
if (table.primary == null || table.primary.length === 0) {
errors[name] = BuildSchemaErrors.NO_KEY
continue
} else if (
schemaFields.find(field =>
invalidColumns.includes(field as InvalidColumns)
)
) {
errors[name] = BuildSchemaErrors.INVALID_COLUMN
continue
}
// make sure all previous props have been added back
finalTables[name] = copyExistingPropsOver(name, table, entities, tableIds)
}
// sort the tables by name
finalTables = Object.entries(finalTables)
// sort the tables by name (TODO(samwho): why?)
return Object.entries(finalTables)
.sort(([a], [b]) => a.localeCompare(b))
.reduce((r, [k, v]) => ({ ...r, [k]: v }), {})
return { tables: finalTables, errors }
}
/**

View File

@ -175,13 +175,13 @@ export interface IntegrationBase {
}
export interface DatasourcePlus extends IntegrationBase {
tables: Record<string, Table>
schemaErrors: Record<string, string>
// if the datasource supports the use of bindings directly (to protect against SQL injection)
// this returns the format of the identifier
getBindingIdentifier(): string
getStringConcat(parts: string[]): string
buildSchema(datasourceId: string, entities: Record<string, Table>): any
buildSchema(
datasourceId: string,
entities: Record<string, Table>
): Promise<Record<string, Table>>
getTableNames(): Promise<string[]>
}