Making integrations more like what custom integrations look like (to simplify integration).

This commit is contained in:
mike12345567 2022-08-12 17:03:06 +01:00
parent 6fc70fa0ab
commit 970e7ee3e7
21 changed files with 3297 additions and 3336 deletions

View File

@ -1,12 +1,11 @@
const { cloneDeep } = require("lodash") const { getDefinitions } = require("../../integrations")
const { definitions } = require("../../integrations")
const { SourceName } = require("@budibase/types") const { SourceName } = require("@budibase/types")
const googlesheets = require("../../integrations/googlesheets") const googlesheets = require("../../integrations/googlesheets")
const { featureFlags } = require("@budibase/backend-core") const { featureFlags } = require("@budibase/backend-core")
exports.fetch = async function (ctx) { exports.fetch = async function (ctx) {
ctx.status = 200 ctx.status = 200
const defs = cloneDeep(definitions) const defs = await getDefinitions()
// for google sheets integration google verification // for google sheets integration google verification
if (featureFlags.isEnabled(featureFlags.FeatureFlag.GOOGLE_SHEETS)) { if (featureFlags.isEnabled(featureFlags.FeatureFlag.GOOGLE_SHEETS)) {
@ -17,6 +16,7 @@ exports.fetch = async function (ctx) {
} }
exports.find = async function (ctx) { exports.find = async function (ctx) {
const defs = await getDefinitions()
ctx.status = 200 ctx.status = 200
ctx.body = definitions[ctx.params.type] ctx.body = defs[ctx.params.type]
} }

View File

@ -3,6 +3,22 @@ import { extractPluginTarball } from "../../utilities/fileSystem"
import { getGlobalDB } from "@budibase/backend-core/tenancy" import { getGlobalDB } from "@budibase/backend-core/tenancy"
import { generatePluginID, getPluginParams } from "../../db/utils" import { generatePluginID, getPluginParams } from "../../db/utils"
import { uploadDirectory } from "@budibase/backend-core/objectStore" import { uploadDirectory } from "@budibase/backend-core/objectStore"
import { PluginType } from "@budibase/types"
export async function getPlugins(type?: PluginType) {
const db = getGlobalDB()
const response = await db.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
const plugins = response.rows.map((row: any) => row.doc)
if (type) {
return plugins.filter((plugin: any) => plugin.schema?.type === type)
} else {
return plugins
}
}
export async function upload(ctx: any) { export async function upload(ctx: any) {
const plugins = const plugins =
@ -68,13 +84,7 @@ export async function upload(ctx: any) {
} }
export async function fetch(ctx: any) { export async function fetch(ctx: any) {
const db = getGlobalDB() ctx.body = await getPlugins()
const response = await db.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
ctx.body = response.rows.map((row: any) => row.doc)
} }
export async function destroy(ctx: any) {} export async function destroy(ctx: any) {}

View File

@ -5,15 +5,14 @@ import {
IntegrationBase, IntegrationBase,
} from "@budibase/types" } from "@budibase/types"
module AirtableModule { const Airtable = require("airtable")
const Airtable = require("airtable")
interface AirtableConfig { interface AirtableConfig {
apiKey: string apiKey: string
base: string base: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://airtable.com/api", docs: "https://airtable.com/api",
description: description:
"Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.", "Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.",
@ -78,9 +77,9 @@ module AirtableModule {
type: QueryType.JSON, type: QueryType.JSON,
}, },
}, },
} }
class AirtableIntegration implements IntegrationBase { class AirtableIntegration implements IntegrationBase {
private config: AirtableConfig private config: AirtableConfig
private client: any private client: any
@ -141,10 +140,9 @@ module AirtableModule {
throw err throw err
} }
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: AirtableIntegration, integration: AirtableIntegration,
}
} }

View File

@ -5,18 +5,17 @@ import {
IntegrationBase, IntegrationBase,
} from "@budibase/types" } from "@budibase/types"
module ArangoModule { const { Database, aql } = require("arangojs")
const { Database, aql } = require("arangojs")
interface ArangodbConfig { interface ArangodbConfig {
url: string url: string
username: string username: string
password: string password: string
databaseName: string databaseName: string
collection: string collection: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/arangodb/arangojs", docs: "https://github.com/arangodb/arangojs",
friendlyName: "ArangoDB", friendlyName: "ArangoDB",
type: "Non-relational", type: "Non-relational",
@ -55,9 +54,9 @@ module ArangoModule {
type: QueryType.JSON, type: QueryType.JSON,
}, },
}, },
} }
class ArangoDBIntegration implements IntegrationBase { class ArangoDBIntegration implements IntegrationBase {
private config: ArangodbConfig private config: ArangodbConfig
private client: any private client: any
@ -101,10 +100,9 @@ module ArangoModule {
this.client.close() this.client.close()
} }
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: ArangoDBIntegration, integration: ArangoDBIntegration,
}
} }

View File

@ -5,15 +5,14 @@ import {
IntegrationBase, IntegrationBase,
} from "@budibase/types" } from "@budibase/types"
module CouchDBModule { const PouchDB = require("pouchdb")
const PouchDB = require("pouchdb")
interface CouchDBConfig { interface CouchDBConfig {
url: string url: string
database: string database: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://docs.couchdb.org/en/stable/", docs: "https://docs.couchdb.org/en/stable/",
friendlyName: "CouchDB", friendlyName: "CouchDB",
type: "Non-relational", type: "Non-relational",
@ -50,9 +49,9 @@ module CouchDBModule {
}, },
}, },
}, },
} }
class CouchDBIntegration implements IntegrationBase { class CouchDBIntegration implements IntegrationBase {
private config: CouchDBConfig private config: CouchDBConfig
private readonly client: any private readonly client: any
@ -95,19 +94,14 @@ module CouchDBModule {
} }
async delete(query: { id: string }) { async delete(query: { id: string }) {
const doc = await this.query( const doc = await this.query("get", "Cannot find doc to be deleted", query)
"get",
"Cannot find doc to be deleted",
query
)
return this.query("remove", "Error deleting couchDB document", { return this.query("remove", "Error deleting couchDB document", {
json: doc, json: doc,
}) })
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: CouchDBIntegration, integration: CouchDBIntegration,
}
} }

View File

@ -5,18 +5,17 @@ import {
IntegrationBase, IntegrationBase,
} from "@budibase/types" } from "@budibase/types"
module DynamoModule { const AWS = require("aws-sdk")
const AWS = require("aws-sdk") const { AWS_REGION } = require("../db/dynamoClient")
const { AWS_REGION } = require("../db/dynamoClient")
interface DynamoDBConfig { interface DynamoDBConfig {
region: string region: string
accessKeyId: string accessKeyId: string
secretAccessKey: string secretAccessKey: string
endpoint: string endpoint: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/dabit3/dynamodb-documentclient-cheat-sheet", docs: "https://github.com/dabit3/dynamodb-documentclient-cheat-sheet",
description: description:
"Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.", "Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.",
@ -124,9 +123,9 @@ module DynamoModule {
}, },
}, },
}, },
} }
class DynamoDBIntegration implements IntegrationBase { class DynamoDBIntegration implements IntegrationBase {
private config: DynamoDBConfig private config: DynamoDBConfig
private client: any private client: any
@ -223,10 +222,9 @@ module DynamoModule {
} }
return this.client.delete(params).promise() return this.client.delete(params).promise()
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: DynamoDBIntegration, integration: DynamoDBIntegration,
}
} }

View File

@ -5,14 +5,13 @@ import {
IntegrationBase, IntegrationBase,
} from "@budibase/types" } from "@budibase/types"
module ElasticsearchModule { const { Client } = require("@elastic/elasticsearch")
const { Client } = require("@elastic/elasticsearch")
interface ElasticsearchConfig { interface ElasticsearchConfig {
url: string url: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html", docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
description: description:
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.", "Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
@ -74,9 +73,9 @@ module ElasticsearchModule {
}, },
}, },
}, },
} }
class ElasticSearchIntegration implements IntegrationBase { class ElasticSearchIntegration implements IntegrationBase {
private config: ElasticsearchConfig private config: ElasticsearchConfig
private client: any private client: any
@ -146,10 +145,9 @@ module ElasticsearchModule {
await this.client.close() await this.client.close()
} }
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: ElasticSearchIntegration, integration: ElasticSearchIntegration,
}
} }

View File

@ -6,14 +6,13 @@ import {
} from "@budibase/types" } from "@budibase/types"
import { Firestore, WhereFilterOp } from "@google-cloud/firestore" import { Firestore, WhereFilterOp } from "@google-cloud/firestore"
module Firebase { interface FirebaseConfig {
interface FirebaseConfig {
email: string email: string
privateKey: string privateKey: string
projectId: string projectId: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://firebase.google.com/docs/firestore/quickstart", docs: "https://firebase.google.com/docs/firestore/quickstart",
friendlyName: "Firestore", friendlyName: "Firestore",
type: "Non-relational", type: "Non-relational",
@ -83,9 +82,9 @@ module Firebase {
required: false, required: false,
}, },
}, },
} }
class FirebaseIntegration implements IntegrationBase { class FirebaseIntegration implements IntegrationBase {
private config: FirebaseConfig private config: FirebaseConfig
private client: Firestore private client: Firestore
@ -180,10 +179,9 @@ module Firebase {
throw err throw err
} }
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: FirebaseIntegration, integration: FirebaseIntegration,
}
} }

View File

@ -13,34 +13,33 @@ import { DataSourceOperation, FieldTypes } from "../constants"
import { GoogleSpreadsheet } from "google-spreadsheet" import { GoogleSpreadsheet } from "google-spreadsheet"
import env from "../environment" import env from "../environment"
module GoogleSheetsModule { const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { getScopedConfig } = require("@budibase/backend-core/db")
const { getScopedConfig } = require("@budibase/backend-core/db") const { Configs } = require("@budibase/backend-core/constants")
const { Configs } = require("@budibase/backend-core/constants") const fetch = require("node-fetch")
const fetch = require("node-fetch")
interface GoogleSheetsConfig { interface GoogleSheetsConfig {
spreadsheetId: string spreadsheetId: string
auth: OAuthClientConfig auth: OAuthClientConfig
} }
interface OAuthClientConfig { interface OAuthClientConfig {
appId: string appId: string
accessToken: string accessToken: string
refreshToken: string refreshToken: string
} }
interface AuthTokenRequest { interface AuthTokenRequest {
client_id: string client_id: string
client_secret: string client_secret: string
refresh_token: string refresh_token: string
} }
interface AuthTokenResponse { interface AuthTokenResponse {
access_token: string access_token: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
plus: true, plus: true,
auth: { auth: {
type: "google", type: "google",
@ -112,9 +111,9 @@ module GoogleSheetsModule {
}, },
}, },
}, },
} }
class GoogleSheetsIntegration implements DatasourcePlus { class GoogleSheetsIntegration implements DatasourcePlus {
private readonly config: GoogleSheetsConfig private readonly config: GoogleSheetsConfig
private client: any private client: any
public tables: Record<string, Table> = {} public tables: Record<string, Table> = {}
@ -152,9 +151,7 @@ module GoogleSheetsModule {
async fetchAccessToken( async fetchAccessToken(
payload: AuthTokenRequest payload: AuthTokenRequest
): Promise<AuthTokenResponse> { ): Promise<AuthTokenResponse> {
const response = await fetch( const response = await fetch("https://www.googleapis.com/oauth2/v4/token", {
"https://www.googleapis.com/oauth2/v4/token",
{
method: "POST", method: "POST",
body: JSON.stringify({ body: JSON.stringify({
...payload, ...payload,
@ -163,8 +160,7 @@ module GoogleSheetsModule {
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
}, },
} })
)
const json = await response.json() const json = await response.json()
@ -385,11 +381,7 @@ module GoogleSheetsModule {
} }
await row.save() await row.save()
return [ return [
this.buildRowObject( this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber),
sheet.headerValues,
row._rawData,
row._rowNumber
),
] ]
} else { } else {
throw new Error("Row does not exist.") throw new Error("Row does not exist.")
@ -412,10 +404,9 @@ module GoogleSheetsModule {
throw new Error("Row does not exist.") throw new Error("Row does not exist.")
} }
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: GoogleSheetsIntegration, integration: GoogleSheetsIntegration,
}
} }

View File

@ -1,22 +1,24 @@
const postgres = require("./postgres") import postgres from "./postgres"
const dynamodb = require("./dynamodb") import dynamodb from "./dynamodb"
const mongodb = require("./mongodb") import mongodb from "./mongodb"
const elasticsearch = require("./elasticsearch") import elasticsearch from "./elasticsearch"
const couchdb = require("./couchdb") import couchdb from "./couchdb"
const sqlServer = require("./microsoftSqlServer") import sqlServer from "./microsoftSqlServer"
const s3 = require("./s3") import s3 from "./s3"
const airtable = require("./airtable") import airtable from "./airtable"
const mysql = require("./mysql") import mysql from "./mysql"
const arangodb = require("./arangodb") import arangodb from "./arangodb"
const rest = require("./rest") import rest from "./rest"
const googlesheets = require("./googlesheets") import googlesheets from "./googlesheets"
const firebase = require("./firebase") import firebase from "./firebase"
const redis = require("./redis") import redis from "./redis"
const snowflake = require("./snowflake") import snowflake from "./snowflake"
const { SourceName } = require("@budibase/types") import { getPlugins } from "../api/controllers/plugin"
import { SourceName, Integration, PluginType } from "@budibase/types"
const environment = require("../environment") const environment = require("../environment")
const { cloneDeep } = require("lodash")
const DEFINITIONS = { const DEFINITIONS: { [key: string]: Integration } = {
[SourceName.POSTGRES]: postgres.schema, [SourceName.POSTGRES]: postgres.schema,
[SourceName.DYNAMODB]: dynamodb.schema, [SourceName.DYNAMODB]: dynamodb.schema,
[SourceName.MONGODB]: mongodb.schema, [SourceName.MONGODB]: mongodb.schema,
@ -33,7 +35,7 @@ const DEFINITIONS = {
[SourceName.SNOWFLAKE]: snowflake.schema, [SourceName.SNOWFLAKE]: snowflake.schema,
} }
const INTEGRATIONS = { const INTEGRATIONS: { [key: string]: any } = {
[SourceName.POSTGRES]: postgres.integration, [SourceName.POSTGRES]: postgres.integration,
[SourceName.DYNAMODB]: dynamodb.integration, [SourceName.DYNAMODB]: dynamodb.integration,
[SourceName.MONGODB]: mongodb.integration, [SourceName.MONGODB]: mongodb.integration,
@ -48,7 +50,7 @@ const INTEGRATIONS = {
[SourceName.FIRESTORE]: firebase.integration, [SourceName.FIRESTORE]: firebase.integration,
[SourceName.GOOGLE_SHEETS]: googlesheets.integration, [SourceName.GOOGLE_SHEETS]: googlesheets.integration,
[SourceName.REDIS]: redis.integration, [SourceName.REDIS]: redis.integration,
[SourceName.FIREBASE]: firebase.integration, [SourceName.FIRESTORE]: firebase.integration,
[SourceName.SNOWFLAKE]: snowflake.integration, [SourceName.SNOWFLAKE]: snowflake.integration,
} }
@ -64,6 +66,9 @@ if (environment.SELF_HOSTED) {
} }
module.exports = { module.exports = {
definitions: DEFINITIONS, getDefinitions: async () => {
const custom = await getPlugins(PluginType.DATASOURCE)
return cloneDeep(DEFINITIONS)
},
integrations: INTEGRATIONS, integrations: INTEGRATIONS,
} }

View File

@ -18,11 +18,10 @@ import {
} from "./utils" } from "./utils"
import Sql from "./base/sql" import Sql from "./base/sql"
module MSSQLModule { const sqlServer = require("mssql")
const sqlServer = require("mssql") const DEFAULT_SCHEMA = "dbo"
const DEFAULT_SCHEMA = "dbo"
interface MSSQLConfig { interface MSSQLConfig {
user: string user: string
password: string password: string
server: string server: string
@ -30,16 +29,16 @@ module MSSQLModule {
database: string database: string
schema: string schema: string
encrypt?: boolean encrypt?: boolean
} }
interface TablesResponse { interface TablesResponse {
TABLE_CATALOG: string TABLE_CATALOG: string
TABLE_SCHEMA: string TABLE_SCHEMA: string
TABLE_NAME: string TABLE_NAME: string
TABLE_TYPE: string TABLE_TYPE: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/tediousjs/node-mssql", docs: "https://github.com/tediousjs/node-mssql",
plus: true, plus: true,
description: description:
@ -92,9 +91,9 @@ module MSSQLModule {
type: QueryType.SQL, type: QueryType.SQL,
}, },
}, },
} }
class SqlServerIntegration extends Sql implements DatasourcePlus { class SqlServerIntegration extends Sql implements DatasourcePlus {
private readonly config: MSSQLConfig private readonly config: MSSQLConfig
private index: number = 0 private index: number = 0
private readonly pool: any private readonly pool: any
@ -300,10 +299,9 @@ module MSSQLModule {
result.recordset ? result.recordset : [{ [operation]: true }] result.recordset ? result.recordset : [{ [operation]: true }]
return this.queryWithReturning(json, queryFn, processFn) return this.queryWithReturning(json, queryFn, processFn)
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: SqlServerIntegration, integration: SqlServerIntegration,
}
} }

View File

@ -15,13 +15,12 @@ import {
CommonOptions, CommonOptions,
} from "mongodb" } from "mongodb"
module MongoDBModule { interface MongoDBConfig {
interface MongoDBConfig {
connectionString: string connectionString: string
db: string db: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/mongodb/node-mongodb-native", docs: "https://github.com/mongodb/node-mongodb-native",
friendlyName: "MongoDB", friendlyName: "MongoDB",
type: "Non-relational", type: "Non-relational",
@ -70,9 +69,9 @@ module MongoDBModule {
}, },
}, },
}, },
} }
class MongoIntegration implements IntegrationBase { class MongoIntegration implements IntegrationBase {
private config: MongoDBConfig private config: MongoDBConfig
private client: any private client: any
@ -96,9 +95,7 @@ module MongoDBModule {
(field === "_id" || field?.startsWith("$")) && (field === "_id" || field?.startsWith("$")) &&
typeof json[field] === "string" typeof json[field] === "string"
) { ) {
const id = json[field].match( const id = json[field].match(/(?<=objectid\(['"]).*(?=['"]\))/gi)?.[0]
/(?<=objectid\(['"]).*(?=['"]\))/gi
)?.[0]
if (id) { if (id) {
json[field] = ObjectID.createFromHexString(id) json[field] = ObjectID.createFromHexString(id)
} }
@ -318,10 +315,9 @@ module MongoDBModule {
await this.client.close() await this.client.close()
} }
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: MongoIntegration, integration: MongoIntegration,
}
} }

View File

@ -19,10 +19,9 @@ import dayjs from "dayjs"
const { NUMBER_REGEX } = require("../utilities") const { NUMBER_REGEX } = require("../utilities")
import Sql from "./base/sql" import Sql from "./base/sql"
module MySQLModule { const mysql = require("mysql2/promise")
const mysql = require("mysql2/promise")
interface MySQLConfig { interface MySQLConfig {
host: string host: string
port: number port: number
user: string user: string
@ -31,9 +30,9 @@ module MySQLModule {
ssl?: { [key: string]: any } ssl?: { [key: string]: any }
rejectUnauthorized: boolean rejectUnauthorized: boolean
typeCast: Function typeCast: Function
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/sidorares/node-mysql2", docs: "https://github.com/sidorares/node-mysql2",
plus: true, plus: true,
friendlyName: "MySQL", friendlyName: "MySQL",
@ -89,11 +88,11 @@ module MySQLModule {
type: QueryType.SQL, type: QueryType.SQL,
}, },
}, },
} }
const TimezoneAwareDateTypes = ["timestamp"] const TimezoneAwareDateTypes = ["timestamp"]
function bindingTypeCoerce(bindings: any[]) { function bindingTypeCoerce(bindings: any[]) {
for (let i = 0; i < bindings.length; i++) { for (let i = 0; i < bindings.length; i++) {
const binding = bindings[i] const binding = bindings[i]
if (typeof binding !== "string") { if (typeof binding !== "string") {
@ -111,9 +110,9 @@ module MySQLModule {
} }
} }
return bindings return bindings
} }
class MySQLIntegration extends Sql implements DatasourcePlus { class MySQLIntegration extends Sql implements DatasourcePlus {
private config: MySQLConfig private config: MySQLConfig
private client: any private client: any
public tables: Record<string, Table> = {} public tables: Record<string, Table> = {}
@ -216,10 +215,7 @@ module MySQLModule {
) )
for (let column of descResp) { for (let column of descResp) {
const columnName = column.Field const columnName = column.Field
if ( if (column.Key === "PRI" && primaryKeys.indexOf(column.Key) === -1) {
column.Key === "PRI" &&
primaryKeys.indexOf(column.Key) === -1
) {
primaryKeys.push(columnName) primaryKeys.push(columnName)
} }
const constraints = { const constraints = {
@ -283,10 +279,9 @@ module MySQLModule {
await this.disconnect() await this.disconnect()
} }
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: MySQLIntegration, integration: MySQLIntegration,
}
} }

View File

@ -25,18 +25,17 @@ import oracledb, {
import Sql from "./base/sql" import Sql from "./base/sql"
import { FieldTypes } from "../constants" import { FieldTypes } from "../constants"
module OracleModule { oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT
oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT
interface OracleConfig { interface OracleConfig {
host: string host: string
port: number port: number
database: string database: string
user: string user: string
password: string password: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/oracle/node-oracledb", docs: "https://github.com/oracle/node-oracledb",
plus: true, plus: true,
friendlyName: "Oracle", friendlyName: "Oracle",
@ -81,14 +80,14 @@ module OracleModule {
type: QueryType.SQL, type: QueryType.SQL,
}, },
}, },
} }
const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"] const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"]
/** /**
* Raw query response * Raw query response
*/ */
interface ColumnsResponse { interface ColumnsResponse {
TABLE_NAME: string TABLE_NAME: string
COLUMN_NAME: string COLUMN_NAME: string
DATA_TYPE: string DATA_TYPE: string
@ -98,45 +97,45 @@ module OracleModule {
CONSTRAINT_TYPE: string | null CONSTRAINT_TYPE: string | null
R_CONSTRAINT_NAME: string | null R_CONSTRAINT_NAME: string | null
SEARCH_CONDITION: string | null SEARCH_CONDITION: string | null
} }
/** /**
* An oracle constraint * An oracle constraint
*/ */
interface OracleConstraint { interface OracleConstraint {
name: string name: string
type: string type: string
relatedConstraintName: string | null relatedConstraintName: string | null
searchCondition: string | null searchCondition: string | null
} }
/** /**
* An oracle column and it's related constraints * An oracle column and it's related constraints
*/ */
interface OracleColumn { interface OracleColumn {
name: string name: string
type: string type: string
default: string | null default: string | null
id: number id: number
constraints: { [key: string]: OracleConstraint } constraints: { [key: string]: OracleConstraint }
} }
/** /**
* An oracle table and it's related columns * An oracle table and it's related columns
*/ */
interface OracleTable { interface OracleTable {
name: string name: string
columns: { [key: string]: OracleColumn } columns: { [key: string]: OracleColumn }
} }
const OracleContraintTypes = { const OracleContraintTypes = {
PRIMARY: "P", PRIMARY: "P",
NOT_NULL_OR_CHECK: "C", NOT_NULL_OR_CHECK: "C",
FOREIGN_KEY: "R", FOREIGN_KEY: "R",
UNIQUE: "U", UNIQUE: "U",
} }
class OracleIntegration extends Sql implements DatasourcePlus { class OracleIntegration extends Sql implements DatasourcePlus {
private readonly config: OracleConfig private readonly config: OracleConfig
private index: number = 1 private index: number = 1
@ -268,10 +267,7 @@ module OracleModule {
const condition = c.searchCondition const condition = c.searchCondition
.replace(/\s/g, "") // remove spaces .replace(/\s/g, "") // remove spaces
.replace(/[']+/g, "") // remove quotes .replace(/[']+/g, "") // remove quotes
if ( if (condition.includes("in(0,1)") || condition.includes("in(1,0)")) {
condition.includes("in(0,1)") ||
condition.includes("in(1,0)")
) {
return true return true
} }
} }
@ -335,13 +331,11 @@ module OracleModule {
} }
// iterate each constraint on the column // iterate each constraint on the column
Object.values(oracleColumn.constraints).forEach( Object.values(oracleColumn.constraints).forEach(oracleConstraint => {
oracleConstraint => {
if (oracleConstraint.type === OracleContraintTypes.PRIMARY) { if (oracleConstraint.type === OracleContraintTypes.PRIMARY) {
table.primary!.push(columnName) table.primary!.push(columnName)
} }
} })
)
}) })
}) })
@ -452,10 +446,9 @@ module OracleModule {
} }
} }
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: OracleIntegration, integration: OracleIntegration,
}
} }

View File

@ -16,22 +16,21 @@ import {
} from "./utils" } from "./utils"
import Sql from "./base/sql" import Sql from "./base/sql"
module PostgresModule { const { Client, types } = require("pg")
const { Client, types } = require("pg") const { escapeDangerousCharacters } = require("../utilities")
const { escapeDangerousCharacters } = require("../utilities")
// Return "date" and "timestamp" types as plain strings. // Return "date" and "timestamp" types as plain strings.
// This lets us reference the original stored timezone. // This lets us reference the original stored timezone.
// types is undefined when running in a test env for some reason. // types is undefined when running in a test env for some reason.
if (types) { if (types) {
types.setTypeParser(1114, (val: any) => val) // timestamp types.setTypeParser(1114, (val: any) => val) // timestamp
types.setTypeParser(1082, (val: any) => val) // date types.setTypeParser(1082, (val: any) => val) // date
types.setTypeParser(1184, (val: any) => val) // timestampz types.setTypeParser(1184, (val: any) => val) // timestampz
} }
const JSON_REGEX = /'{.*}'::json/s const JSON_REGEX = /'{.*}'::json/s
interface PostgresConfig { interface PostgresConfig {
host: string host: string
port: number port: number
database: string database: string
@ -41,9 +40,9 @@ module PostgresModule {
ssl?: boolean ssl?: boolean
ca?: string ca?: string
rejectUnauthorized?: boolean rejectUnauthorized?: boolean
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://node-postgres.com", docs: "https://node-postgres.com",
plus: true, plus: true,
friendlyName: "PostgreSQL", friendlyName: "PostgreSQL",
@ -111,9 +110,9 @@ module PostgresModule {
type: QueryType.SQL, type: QueryType.SQL,
}, },
}, },
} }
class PostgresIntegration extends Sql implements DatasourcePlus { class PostgresIntegration extends Sql implements DatasourcePlus {
private readonly client: any private readonly client: any
private readonly config: PostgresConfig private readonly config: PostgresConfig
private index: number = 1 private index: number = 1
@ -221,9 +220,7 @@ module PostgresModule {
let tableKeys: { [key: string]: string[] } = {} let tableKeys: { [key: string]: string[] } = {}
await this.openConnection() await this.openConnection()
try { try {
const primaryKeysResponse = await this.client.query( const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
this.PRIMARY_KEYS_SQL
)
for (let table of primaryKeysResponse.rows) { for (let table of primaryKeysResponse.rows) {
const tableName = table.table_name const tableName = table.table_name
if (!tableKeys[tableName]) { if (!tableKeys[tableName]) {
@ -323,10 +320,9 @@ module PostgresModule {
return response.rows.length ? response.rows : [{ [operation]: true }] return response.rows.length ? response.rows : [{ [operation]: true }]
} }
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: PostgresIntegration, integration: PostgresIntegration,
}
} }

View File

@ -1,15 +1,14 @@
import { DatasourceFieldType, Integration, QueryType } from "@budibase/types" import { DatasourceFieldType, Integration, QueryType } from "@budibase/types"
import Redis from "ioredis" import Redis from "ioredis"
module RedisModule { interface RedisConfig {
interface RedisConfig {
host: string host: string
port: number port: number
username: string username: string
password?: string password?: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://redis.io/docs/", docs: "https://redis.io/docs/",
description: "", description: "",
friendlyName: "Redis", friendlyName: "Redis",
@ -76,9 +75,9 @@ module RedisModule {
type: QueryType.JSON, type: QueryType.JSON,
}, },
}, },
} }
class RedisIntegration { class RedisIntegration {
private readonly config: RedisConfig private readonly config: RedisConfig
private client: any private client: any
@ -140,10 +139,9 @@ module RedisModule {
} }
}) })
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: RedisIntegration, integration: RedisIntegration,
}
} }

View File

@ -14,6 +14,11 @@ import {
BearerAuthConfig, BearerAuthConfig,
} from "../definitions/datasource" } from "../definitions/datasource"
import { get } from "lodash" import { get } from "lodash"
const fetch = require("node-fetch")
const { formatBytes } = require("../utilities")
const { performance } = require("perf_hooks")
const FormData = require("form-data")
const { URLSearchParams } = require("url")
const BodyTypes = { const BodyTypes = {
NONE: "none", NONE: "none",
@ -50,18 +55,9 @@ const coreFields = {
}, },
} }
module RestModule { const { parseStringPromise: xmlParser, Builder: XmlBuilder } = require("xml2js")
const fetch = require("node-fetch")
const { formatBytes } = require("../utilities")
const { performance } = require("perf_hooks")
const FormData = require("form-data")
const { URLSearchParams } = require("url")
const {
parseStringPromise: xmlParser,
Builder: XmlBuilder,
} = require("xml2js")
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/node-fetch/node-fetch", docs: "https://github.com/node-fetch/node-fetch",
description: description:
"With the REST API datasource, you can connect, query and pull data from multiple REST APIs. You can then use the retrieved data to build apps.", "With the REST API datasource, you can connect, query and pull data from multiple REST APIs. You can then use the retrieved data to build apps.",
@ -111,9 +107,9 @@ module RestModule {
fields: coreFields, fields: coreFields,
}, },
}, },
} }
class RestIntegration implements IntegrationBase { class RestIntegration implements IntegrationBase {
private config: RestConfig private config: RestConfig
private headers: { private headers: {
[key: string]: string [key: string]: string
@ -402,11 +398,10 @@ module RestModule {
async delete(opts: RestQuery) { async delete(opts: RestQuery) {
return this._req({ ...opts, method: "DELETE" }) return this._req({ ...opts, method: "DELETE" })
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: RestIntegration, integration: RestIntegration,
AuthType, AuthType,
}
} }

View File

@ -1,17 +1,15 @@
import { Integration, QueryType, IntegrationBase } from "@budibase/types" import { Integration, QueryType, IntegrationBase } from "@budibase/types"
const AWS = require("aws-sdk")
module S3Module { interface S3Config {
const AWS = require("aws-sdk")
interface S3Config {
region: string region: string
accessKeyId: string accessKeyId: string
secretAccessKey: string secretAccessKey: string
s3ForcePathStyle: boolean s3ForcePathStyle: boolean
endpoint?: string endpoint?: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html", docs: "https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
description: description:
"Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.", "Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.",
@ -52,9 +50,9 @@ module S3Module {
}, },
}, },
}, },
} }
class S3Integration implements IntegrationBase { class S3Integration implements IntegrationBase {
private readonly config: S3Config private readonly config: S3Config
private client: any private client: any
@ -77,10 +75,9 @@ module S3Module {
.promise() .promise()
return response.Contents return response.Contents
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: S3Integration, integration: S3Integration,
}
} }

View File

@ -1,17 +1,16 @@
import { Integration, QueryType, SqlQuery } from "@budibase/types" import { Integration, QueryType, SqlQuery } from "@budibase/types"
import { Snowflake } from "snowflake-promise" import { Snowflake } from "snowflake-promise"
module SnowflakeModule { interface SnowflakeConfig {
interface SnowflakeConfig {
account: string account: string
username: string username: string
password: string password: string
warehouse: string warehouse: string
database: string database: string
schema: string schema: string
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://developers.snowflake.com/", docs: "https://developers.snowflake.com/",
description: description:
"Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.", "Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.",
@ -57,9 +56,9 @@ module SnowflakeModule {
type: QueryType.SQL, type: QueryType.SQL,
}, },
}, },
} }
class SnowflakeIntegration { class SnowflakeIntegration {
private client: Snowflake private client: Snowflake
constructor(config: SnowflakeConfig) { constructor(config: SnowflakeConfig) {
@ -90,10 +89,9 @@ module SnowflakeModule {
async delete(query: SqlQuery) { async delete(query: SqlQuery) {
return this.internalQuery(query) return this.internalQuery(query)
} }
} }
module.exports = { export default {
schema: SCHEMA, schema: SCHEMA,
integration: SnowflakeIntegration, integration: SnowflakeIntegration,
}
} }

View File

@ -1,5 +1,6 @@
export * from "./account" export * from "./account"
export * from "./app" export * from "./app"
export * from "./global" export * from "./global"
export * from "./plugin"
export * from "./platform" export * from "./platform"
export * from "./document" export * from "./document"

View File

@ -0,0 +1,4 @@
export enum PluginType {
DATASOURCE = "datasource",
COMPONENT = "component",
}