Updating datasource controller types, this branched out a little bit to removing as many anys as possible, and allowing RowValue to correctly be returned from the allDocs function of the DB.

This commit is contained in:
mike12345567 2024-02-29 16:28:00 +00:00
parent 78359808bf
commit 50bbbb2e06
15 changed files with 84 additions and 43 deletions

View File

@ -11,6 +11,7 @@ import {
Document, Document,
isDocument, isDocument,
RowResponse, RowResponse,
RowValue,
} from "@budibase/types" } from "@budibase/types"
import { getCouchInfo } from "./connections" import { getCouchInfo } from "./connections"
import { directCouchUrlCall } from "./utils" import { directCouchUrlCall } from "./utils"
@ -221,7 +222,7 @@ export class DatabaseImpl implements Database {
}) })
} }
async allDocs<T extends Document>( async allDocs<T extends Document | RowValue>(
params: DatabaseQueryOpts params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> { ): Promise<AllDocsResponse<T>> {
return this.performCall(db => { return this.performCall(db => {

View File

@ -1,5 +1,4 @@
import { import {
DocumentScope,
DocumentDestroyResponse, DocumentDestroyResponse,
DocumentInsertResponse, DocumentInsertResponse,
DocumentBulkResponse, DocumentBulkResponse,
@ -13,6 +12,7 @@ import {
DatabasePutOpts, DatabasePutOpts,
DatabaseQueryOpts, DatabaseQueryOpts,
Document, Document,
RowValue,
} from "@budibase/types" } from "@budibase/types"
import tracer from "dd-trace" import tracer from "dd-trace"
import { Writable } from "stream" import { Writable } from "stream"
@ -79,7 +79,7 @@ export class DDInstrumentedDatabase implements Database {
}) })
} }
allDocs<T extends Document>( allDocs<T extends Document | RowValue>(
params: DatabaseQueryOpts params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> { ): Promise<AllDocsResponse<T>> {
return tracer.trace("db.allDocs", span => { return tracer.trace("db.allDocs", span => {

View File

@ -15,10 +15,14 @@ import {
FieldType, FieldType,
RelationshipFieldMetadata, RelationshipFieldMetadata,
SourceName, SourceName,
UpdateDatasourceRequest,
UpdateDatasourceResponse, UpdateDatasourceResponse,
UserCtx, UserCtx,
VerifyDatasourceRequest, VerifyDatasourceRequest,
VerifyDatasourceResponse, VerifyDatasourceResponse,
Table,
RowValue,
DynamicVariable,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../sdk" import sdk from "../../sdk"
import { builderSocket } from "../../websockets" import { builderSocket } from "../../websockets"
@ -90,8 +94,10 @@ async function invalidateVariables(
existingDatasource: Datasource, existingDatasource: Datasource,
updatedDatasource: Datasource updatedDatasource: Datasource
) { ) {
const existingVariables: any = existingDatasource.config?.dynamicVariables const existingVariables: DynamicVariable[] =
const updatedVariables: any = updatedDatasource.config?.dynamicVariables existingDatasource.config?.dynamicVariables
const updatedVariables: DynamicVariable[] =
updatedDatasource.config?.dynamicVariables
const toInvalidate = [] const toInvalidate = []
if (!existingVariables) { if (!existingVariables) {
@ -103,9 +109,9 @@ async function invalidateVariables(
toInvalidate.push(...existingVariables) toInvalidate.push(...existingVariables)
} else { } else {
// invaldate changed / removed // invaldate changed / removed
existingVariables.forEach((existing: any) => { existingVariables.forEach(existing => {
const unchanged = updatedVariables.find( const unchanged = updatedVariables.find(
(updated: any) => updated =>
existing.name === updated.name && existing.name === updated.name &&
existing.queryId === updated.queryId && existing.queryId === updated.queryId &&
existing.value === updated.value existing.value === updated.value
@ -118,24 +124,32 @@ async function invalidateVariables(
await invalidateDynamicVariables(toInvalidate) await invalidateDynamicVariables(toInvalidate)
} }
export async function update(ctx: UserCtx<any, UpdateDatasourceResponse>) { export async function update(
ctx: UserCtx<UpdateDatasourceRequest, UpdateDatasourceResponse>
) {
const db = context.getAppDB() const db = context.getAppDB()
const datasourceId = ctx.params.datasourceId const datasourceId = ctx.params.datasourceId
const baseDatasource = await sdk.datasources.get(datasourceId) const baseDatasource = await sdk.datasources.get(datasourceId)
const auth = baseDatasource.config?.auth
await invalidateVariables(baseDatasource, ctx.request.body) await invalidateVariables(baseDatasource, ctx.request.body)
const isBudibaseSource = const isBudibaseSource =
baseDatasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE baseDatasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE
const dataSourceBody = isBudibaseSource const dataSourceBody: Datasource = isBudibaseSource
? { name: ctx.request.body?.name } ? {
name: ctx.request.body?.name,
type: dbCore.BUDIBASE_DATASOURCE_TYPE,
source: SourceName.BUDIBASE,
}
: ctx.request.body : ctx.request.body
let datasource: Datasource = { let datasource: Datasource = {
...baseDatasource, ...baseDatasource,
...sdk.datasources.mergeConfigs(dataSourceBody, baseDatasource), ...sdk.datasources.mergeConfigs(dataSourceBody, baseDatasource),
} }
// this block is specific to GSheets, if no auth set, set it back
const auth = baseDatasource.config?.auth
if (auth && !ctx.request.body.auth) { if (auth && !ctx.request.body.auth) {
// don't strip auth config from DB // don't strip auth config from DB
datasource.config!.auth = auth datasource.config!.auth = auth
@ -204,7 +218,7 @@ async function destroyInternalTablesBySourceId(datasourceId: string) {
const db = context.getAppDB() const db = context.getAppDB()
// Get all internal tables // Get all internal tables
const internalTables = await db.allDocs( const internalTables = await db.allDocs<Table>(
getTableParams(null, { getTableParams(null, {
include_docs: true, include_docs: true,
}) })
@ -212,8 +226,8 @@ async function destroyInternalTablesBySourceId(datasourceId: string) {
// Filter by datasource and return the docs. // Filter by datasource and return the docs.
const datasourceTableDocs = internalTables.rows.reduce( const datasourceTableDocs = internalTables.rows.reduce(
(acc: any, table: any) => { (acc: Table[], table) => {
if (table.doc.sourceId == datasourceId) { if (table.doc?.sourceId == datasourceId) {
acc.push(table.doc) acc.push(table.doc)
} }
return acc return acc
@ -254,9 +268,9 @@ export async function destroy(ctx: UserCtx) {
if (datasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE) { if (datasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE) {
await destroyInternalTablesBySourceId(datasourceId) await destroyInternalTablesBySourceId(datasourceId)
} else { } else {
const queries = await db.allDocs(getQueryParams(datasourceId)) const queries = await db.allDocs<RowValue>(getQueryParams(datasourceId))
await db.bulkDocs( await db.bulkDocs(
queries.rows.map((row: any) => ({ queries.rows.map(row => ({
_id: row.id, _id: row.id,
_rev: row.value.rev, _rev: row.value.rev,
_deleted: true, _deleted: true,

View File

@ -1,7 +1,10 @@
import { getDefinition, getDefinitions } from "../../integrations" import { getDefinition, getDefinitions } from "../../integrations"
import { SourceName, UserCtx } from "@budibase/types" import { SourceName, UserCtx } from "@budibase/types"
const DISABLED_EXTERNAL_INTEGRATIONS = [SourceName.AIRTABLE] const DISABLED_EXTERNAL_INTEGRATIONS = [
SourceName.AIRTABLE,
SourceName.BUDIBASE,
]
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx) {
const definitions = await getDefinitions() const definitions = await getDefinitions()

View File

@ -1,8 +1,8 @@
import { import {
DEFAULT_BB_DATASOURCE_ID, DEFAULT_BB_DATASOURCE_ID,
DEFAULT_INVENTORY_TABLE_ID,
DEFAULT_EMPLOYEE_TABLE_ID, DEFAULT_EMPLOYEE_TABLE_ID,
DEFAULT_EXPENSES_TABLE_ID, DEFAULT_EXPENSES_TABLE_ID,
DEFAULT_INVENTORY_TABLE_ID,
DEFAULT_JOBS_TABLE_ID, DEFAULT_JOBS_TABLE_ID,
} from "../../constants" } from "../../constants"
import { importToRows } from "../../api/controllers/table/utils" import { importToRows } from "../../api/controllers/table/utils"
@ -15,19 +15,21 @@ import { expensesImport } from "./expensesImport"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { import {
AutoFieldSubType, AutoFieldSubType,
Datasource,
FieldType, FieldType,
RelationshipType, RelationshipType,
Row, Row,
SourceName,
Table, Table,
TableSchema, TableSchema,
TableSourceType, TableSourceType,
} from "@budibase/types" } from "@budibase/types"
const defaultDatasource = { const defaultDatasource: Datasource = {
_id: DEFAULT_BB_DATASOURCE_ID, _id: DEFAULT_BB_DATASOURCE_ID,
type: dbCore.BUDIBASE_DATASOURCE_TYPE, type: dbCore.BUDIBASE_DATASOURCE_TYPE,
name: "Sample Data", name: "Sample Data",
source: "BUDIBASE", source: SourceName.BUDIBASE,
config: {}, config: {},
} }

View File

@ -1,13 +1,15 @@
import newid from "./newid" import newid from "./newid"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { import {
FieldType, DatabaseQueryOpts,
Datasource,
DocumentType, DocumentType,
FieldSchema, FieldSchema,
RelationshipFieldMetadata, FieldType,
VirtualDocumentType,
INTERNAL_TABLE_SOURCE_ID, INTERNAL_TABLE_SOURCE_ID,
DatabaseQueryOpts, RelationshipFieldMetadata,
SourceName,
VirtualDocumentType,
} from "@budibase/types" } from "@budibase/types"
export { DocumentType, VirtualDocumentType } from "@budibase/types" export { DocumentType, VirtualDocumentType } from "@budibase/types"
@ -20,11 +22,11 @@ export const enum AppStatus {
DEPLOYED = "published", DEPLOYED = "published",
} }
export const BudibaseInternalDB = { export const BudibaseInternalDB: Datasource = {
_id: INTERNAL_TABLE_SOURCE_ID, _id: INTERNAL_TABLE_SOURCE_ID,
type: dbCore.BUDIBASE_DATASOURCE_TYPE, type: dbCore.BUDIBASE_DATASOURCE_TYPE,
name: "Budibase DB", name: "Budibase DB",
source: "BUDIBASE", source: SourceName.BUDIBASE,
config: {}, config: {},
} }

View File

@ -37,6 +37,7 @@ const DEFINITIONS: Record<SourceName, Integration | undefined> = {
[SourceName.REDIS]: redis.schema, [SourceName.REDIS]: redis.schema,
[SourceName.SNOWFLAKE]: snowflake.schema, [SourceName.SNOWFLAKE]: snowflake.schema,
[SourceName.ORACLE]: undefined, [SourceName.ORACLE]: undefined,
[SourceName.BUDIBASE]: undefined,
} }
const INTEGRATIONS: Record<SourceName, any> = { const INTEGRATIONS: Record<SourceName, any> = {
@ -56,6 +57,7 @@ const INTEGRATIONS: Record<SourceName, any> = {
[SourceName.REDIS]: redis.integration, [SourceName.REDIS]: redis.integration,
[SourceName.SNOWFLAKE]: snowflake.integration, [SourceName.SNOWFLAKE]: snowflake.integration,
[SourceName.ORACLE]: undefined, [SourceName.ORACLE]: undefined,
[SourceName.BUDIBASE]: undefined,
} }
// optionally add oracle integration if the oracle binary can be installed // optionally add oracle integration if the oracle binary can be installed

View File

@ -99,7 +99,14 @@ export class IsolatedVM implements VM {
} }
withContext<T>(context: Record<string, any>, executeWithContext: () => T) { withContext<T>(context: Record<string, any>, executeWithContext: () => T) {
this.addToContext(context) this.addToContext({
...context,
Snippets: {
specialFunction: function (special: string) {
return "hello world! " + special
},
},
})
try { try {
return executeWithContext() return executeWithContext()

View File

@ -85,7 +85,9 @@ async function getImportableDocuments(db: Database) {
const docPromises = [] const docPromises = []
for (let docType of DocumentTypesToImport) { for (let docType of DocumentTypesToImport) {
docPromises.push( docPromises.push(
db.allDocs(dbCore.getDocParams(docType, null, { include_docs: true })) db.allDocs<Document>(
dbCore.getDocParams(docType, null, { include_docs: true })
)
) )
} }
// map the responses to the document itself // map the responses to the document itself

View File

@ -229,7 +229,7 @@ export async function removeSecretSingle(datasource: Datasource) {
} }
export function mergeConfigs(update: Datasource, old: Datasource) { export function mergeConfigs(update: Datasource, old: Datasource) {
if (!update.config) { if (!update.config || !old.config) {
return update return update
} }
// specific to REST datasources, fix the auth configs again if required // specific to REST datasources, fix the auth configs again if required

View File

@ -32,9 +32,7 @@ export interface FetchDatasourceInfoResponse {
tableNames: string[] tableNames: string[]
} }
export interface UpdateDatasourceRequest extends Datasource { export interface UpdateDatasourceRequest extends Datasource {}
datasource: Datasource
}
export interface BuildSchemaFromSourceRequest { export interface BuildSchemaFromSourceRequest {
tablesFilter?: string[] tablesFilter?: string[]

View File

@ -6,6 +6,9 @@ export interface Datasource extends Document {
type: string type: string
name?: string name?: string
source: SourceName source: SourceName
// this is a googlesheets specific property which
// can be found in the GSheets schema - pertains to SSO creds
auth?: { type: string }
// the config is defined by the schema // the config is defined by the schema
config?: Record<string, any> config?: Record<string, any>
plus?: boolean plus?: boolean
@ -36,6 +39,12 @@ export interface RestAuthConfig {
config: RestBasicAuthConfig | RestBearerAuthConfig config: RestBasicAuthConfig | RestBearerAuthConfig
} }
export interface DynamicVariable {
name: string
queryId: string
value: string
}
export interface RestConfig { export interface RestConfig {
url: string url: string
rejectUnauthorized: boolean rejectUnauthorized: boolean
@ -47,11 +56,5 @@ export interface RestConfig {
staticVariables: { staticVariables: {
[key: string]: string [key: string]: string
} }
dynamicVariables: [ dynamicVariables: DynamicVariable[]
{
name: string
queryId: string
value: string
}
]
} }

View File

@ -5,7 +5,7 @@ export interface RowValue {
deleted: boolean deleted: boolean
} }
export interface RowResponse<T extends Document> { export interface RowResponse<T extends Document | RowValue> {
id: string id: string
key: string key: string
error: string error: string
@ -13,7 +13,7 @@ export interface RowResponse<T extends Document> {
doc?: T doc?: T
} }
export interface AllDocsResponse<T extends Document> { export interface AllDocsResponse<T extends Document | RowValue> {
offset: number offset: number
total_rows: number total_rows: number
rows: RowResponse<T>[] rows: RowResponse<T>[]

View File

@ -56,6 +56,7 @@ export enum SourceName {
FIRESTORE = "FIRESTORE", FIRESTORE = "FIRESTORE",
REDIS = "REDIS", REDIS = "REDIS",
SNOWFLAKE = "SNOWFLAKE", SNOWFLAKE = "SNOWFLAKE",
BUDIBASE = "BUDIBASE",
} }
export enum IncludeRelationship { export enum IncludeRelationship {

View File

@ -1,5 +1,11 @@
import type Nano from "@budibase/nano" import type Nano from "@budibase/nano"
import { AllDocsResponse, AnyDocument, Document, ViewTemplateOpts } from "../" import {
AllDocsResponse,
AnyDocument,
Document,
RowValue,
ViewTemplateOpts,
} from "../"
import { Writable } from "stream" import { Writable } from "stream"
export enum SearchIndex { export enum SearchIndex {
@ -135,7 +141,7 @@ export interface Database {
opts?: DatabasePutOpts opts?: DatabasePutOpts
): Promise<Nano.DocumentInsertResponse> ): Promise<Nano.DocumentInsertResponse>
bulkDocs(documents: AnyDocument[]): Promise<Nano.DocumentBulkResponse[]> bulkDocs(documents: AnyDocument[]): Promise<Nano.DocumentBulkResponse[]>
allDocs<T extends Document>( allDocs<T extends Document | RowValue>(
params: DatabaseQueryOpts params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> ): Promise<AllDocsResponse<T>>
query<T extends Document>( query<T extends Document>(