Making integrations more like what custom integrations look like (to simplify integration).

This commit is contained in:
mike12345567 2022-08-12 17:03:06 +01:00
parent 67298ff44d
commit 97466f183b
21 changed files with 3297 additions and 3336 deletions

View File

@ -1,12 +1,11 @@
const { cloneDeep } = require("lodash")
const { definitions } = require("../../integrations")
const { getDefinitions } = require("../../integrations")
const { SourceName } = require("@budibase/types")
const googlesheets = require("../../integrations/googlesheets")
const { featureFlags } = require("@budibase/backend-core")
exports.fetch = async function (ctx) {
ctx.status = 200
const defs = cloneDeep(definitions)
const defs = await getDefinitions()
// for google sheets integration google verification
if (featureFlags.isEnabled(featureFlags.FeatureFlag.GOOGLE_SHEETS)) {
@ -17,6 +16,7 @@ exports.fetch = async function (ctx) {
}
exports.find = async function (ctx) {
const defs = await getDefinitions()
ctx.status = 200
ctx.body = definitions[ctx.params.type]
ctx.body = defs[ctx.params.type]
}

View File

@ -3,6 +3,22 @@ import { extractPluginTarball } from "../../utilities/fileSystem"
import { getGlobalDB } from "@budibase/backend-core/tenancy"
import { generatePluginID, getPluginParams } from "../../db/utils"
import { uploadDirectory } from "@budibase/backend-core/objectStore"
import { PluginType } from "@budibase/types"
export async function getPlugins(type?: PluginType) {
const db = getGlobalDB()
const response = await db.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
const plugins = response.rows.map((row: any) => row.doc)
if (type) {
return plugins.filter((plugin: any) => plugin.schema?.type === type)
} else {
return plugins
}
}
export async function upload(ctx: any) {
const plugins =
@ -68,13 +84,7 @@ export async function upload(ctx: any) {
}
export async function fetch(ctx: any) {
const db = getGlobalDB()
const response = await db.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
ctx.body = response.rows.map((row: any) => row.doc)
ctx.body = await getPlugins()
}
export async function destroy(ctx: any) {}

View File

@ -5,15 +5,14 @@ import {
IntegrationBase,
} from "@budibase/types"
module AirtableModule {
const Airtable = require("airtable")
const Airtable = require("airtable")
interface AirtableConfig {
interface AirtableConfig {
apiKey: string
base: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://airtable.com/api",
description:
"Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.",
@ -78,9 +77,9 @@ module AirtableModule {
type: QueryType.JSON,
},
},
}
}
class AirtableIntegration implements IntegrationBase {
class AirtableIntegration implements IntegrationBase {
private config: AirtableConfig
private client: any
@ -141,10 +140,9 @@ module AirtableModule {
throw err
}
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: AirtableIntegration,
}
}

View File

@ -5,18 +5,17 @@ import {
IntegrationBase,
} from "@budibase/types"
module ArangoModule {
const { Database, aql } = require("arangojs")
const { Database, aql } = require("arangojs")
interface ArangodbConfig {
interface ArangodbConfig {
url: string
username: string
password: string
databaseName: string
collection: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://github.com/arangodb/arangojs",
friendlyName: "ArangoDB",
type: "Non-relational",
@ -55,9 +54,9 @@ module ArangoModule {
type: QueryType.JSON,
},
},
}
}
class ArangoDBIntegration implements IntegrationBase {
class ArangoDBIntegration implements IntegrationBase {
private config: ArangodbConfig
private client: any
@ -101,10 +100,9 @@ module ArangoModule {
this.client.close()
}
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: ArangoDBIntegration,
}
}

View File

@ -5,15 +5,14 @@ import {
IntegrationBase,
} from "@budibase/types"
module CouchDBModule {
const PouchDB = require("pouchdb")
const PouchDB = require("pouchdb")
interface CouchDBConfig {
interface CouchDBConfig {
url: string
database: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://docs.couchdb.org/en/stable/",
friendlyName: "CouchDB",
type: "Non-relational",
@ -50,9 +49,9 @@ module CouchDBModule {
},
},
},
}
}
class CouchDBIntegration implements IntegrationBase {
class CouchDBIntegration implements IntegrationBase {
private config: CouchDBConfig
private readonly client: any
@ -95,19 +94,14 @@ module CouchDBModule {
}
async delete(query: { id: string }) {
const doc = await this.query(
"get",
"Cannot find doc to be deleted",
query
)
const doc = await this.query("get", "Cannot find doc to be deleted", query)
return this.query("remove", "Error deleting couchDB document", {
json: doc,
})
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: CouchDBIntegration,
}
}

View File

@ -5,18 +5,17 @@ import {
IntegrationBase,
} from "@budibase/types"
module DynamoModule {
const AWS = require("aws-sdk")
const { AWS_REGION } = require("../db/dynamoClient")
const AWS = require("aws-sdk")
const { AWS_REGION } = require("../db/dynamoClient")
interface DynamoDBConfig {
interface DynamoDBConfig {
region: string
accessKeyId: string
secretAccessKey: string
endpoint: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://github.com/dabit3/dynamodb-documentclient-cheat-sheet",
description:
"Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.",
@ -124,9 +123,9 @@ module DynamoModule {
},
},
},
}
}
class DynamoDBIntegration implements IntegrationBase {
class DynamoDBIntegration implements IntegrationBase {
private config: DynamoDBConfig
private client: any
@ -223,10 +222,9 @@ module DynamoModule {
}
return this.client.delete(params).promise()
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: DynamoDBIntegration,
}
}

View File

@ -5,14 +5,13 @@ import {
IntegrationBase,
} from "@budibase/types"
module ElasticsearchModule {
const { Client } = require("@elastic/elasticsearch")
const { Client } = require("@elastic/elasticsearch")
interface ElasticsearchConfig {
interface ElasticsearchConfig {
url: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
description:
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
@ -74,9 +73,9 @@ module ElasticsearchModule {
},
},
},
}
}
class ElasticSearchIntegration implements IntegrationBase {
class ElasticSearchIntegration implements IntegrationBase {
private config: ElasticsearchConfig
private client: any
@ -146,10 +145,9 @@ module ElasticsearchModule {
await this.client.close()
}
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: ElasticSearchIntegration,
}
}

View File

@ -6,14 +6,13 @@ import {
} from "@budibase/types"
import { Firestore, WhereFilterOp } from "@google-cloud/firestore"
module Firebase {
interface FirebaseConfig {
interface FirebaseConfig {
email: string
privateKey: string
projectId: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://firebase.google.com/docs/firestore/quickstart",
friendlyName: "Firestore",
type: "Non-relational",
@ -83,9 +82,9 @@ module Firebase {
required: false,
},
},
}
}
class FirebaseIntegration implements IntegrationBase {
class FirebaseIntegration implements IntegrationBase {
private config: FirebaseConfig
private client: Firestore
@ -180,10 +179,9 @@ module Firebase {
throw err
}
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: FirebaseIntegration,
}
}

View File

@ -13,34 +13,33 @@ import { DataSourceOperation, FieldTypes } from "../constants"
import { GoogleSpreadsheet } from "google-spreadsheet"
import env from "../environment"
module GoogleSheetsModule {
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const { getScopedConfig } = require("@budibase/backend-core/db")
const { Configs } = require("@budibase/backend-core/constants")
const fetch = require("node-fetch")
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const { getScopedConfig } = require("@budibase/backend-core/db")
const { Configs } = require("@budibase/backend-core/constants")
const fetch = require("node-fetch")
interface GoogleSheetsConfig {
interface GoogleSheetsConfig {
spreadsheetId: string
auth: OAuthClientConfig
}
}
interface OAuthClientConfig {
interface OAuthClientConfig {
appId: string
accessToken: string
refreshToken: string
}
}
interface AuthTokenRequest {
interface AuthTokenRequest {
client_id: string
client_secret: string
refresh_token: string
}
}
interface AuthTokenResponse {
interface AuthTokenResponse {
access_token: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
plus: true,
auth: {
type: "google",
@ -112,9 +111,9 @@ module GoogleSheetsModule {
},
},
},
}
}
class GoogleSheetsIntegration implements DatasourcePlus {
class GoogleSheetsIntegration implements DatasourcePlus {
private readonly config: GoogleSheetsConfig
private client: any
public tables: Record<string, Table> = {}
@ -152,9 +151,7 @@ module GoogleSheetsModule {
async fetchAccessToken(
payload: AuthTokenRequest
): Promise<AuthTokenResponse> {
const response = await fetch(
"https://www.googleapis.com/oauth2/v4/token",
{
const response = await fetch("https://www.googleapis.com/oauth2/v4/token", {
method: "POST",
body: JSON.stringify({
...payload,
@ -163,8 +160,7 @@ module GoogleSheetsModule {
headers: {
"Content-Type": "application/json",
},
}
)
})
const json = await response.json()
@ -385,11 +381,7 @@ module GoogleSheetsModule {
}
await row.save()
return [
this.buildRowObject(
sheet.headerValues,
row._rawData,
row._rowNumber
),
this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber),
]
} else {
throw new Error("Row does not exist.")
@ -412,10 +404,9 @@ module GoogleSheetsModule {
throw new Error("Row does not exist.")
}
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: GoogleSheetsIntegration,
}
}

View File

@ -1,22 +1,24 @@
const postgres = require("./postgres")
const dynamodb = require("./dynamodb")
const mongodb = require("./mongodb")
const elasticsearch = require("./elasticsearch")
const couchdb = require("./couchdb")
const sqlServer = require("./microsoftSqlServer")
const s3 = require("./s3")
const airtable = require("./airtable")
const mysql = require("./mysql")
const arangodb = require("./arangodb")
const rest = require("./rest")
const googlesheets = require("./googlesheets")
const firebase = require("./firebase")
const redis = require("./redis")
const snowflake = require("./snowflake")
const { SourceName } = require("@budibase/types")
import postgres from "./postgres"
import dynamodb from "./dynamodb"
import mongodb from "./mongodb"
import elasticsearch from "./elasticsearch"
import couchdb from "./couchdb"
import sqlServer from "./microsoftSqlServer"
import s3 from "./s3"
import airtable from "./airtable"
import mysql from "./mysql"
import arangodb from "./arangodb"
import rest from "./rest"
import googlesheets from "./googlesheets"
import firebase from "./firebase"
import redis from "./redis"
import snowflake from "./snowflake"
import { getPlugins } from "../api/controllers/plugin"
import { SourceName, Integration, PluginType } from "@budibase/types"
const environment = require("../environment")
const { cloneDeep } = require("lodash")
const DEFINITIONS = {
const DEFINITIONS: { [key: string]: Integration } = {
[SourceName.POSTGRES]: postgres.schema,
[SourceName.DYNAMODB]: dynamodb.schema,
[SourceName.MONGODB]: mongodb.schema,
@ -33,7 +35,7 @@ const DEFINITIONS = {
[SourceName.SNOWFLAKE]: snowflake.schema,
}
const INTEGRATIONS = {
const INTEGRATIONS: { [key: string]: any } = {
[SourceName.POSTGRES]: postgres.integration,
[SourceName.DYNAMODB]: dynamodb.integration,
[SourceName.MONGODB]: mongodb.integration,
@ -48,7 +50,7 @@ const INTEGRATIONS = {
[SourceName.FIRESTORE]: firebase.integration,
[SourceName.GOOGLE_SHEETS]: googlesheets.integration,
[SourceName.REDIS]: redis.integration,
[SourceName.FIREBASE]: firebase.integration,
[SourceName.FIRESTORE]: firebase.integration,
[SourceName.SNOWFLAKE]: snowflake.integration,
}
@ -64,6 +66,9 @@ if (environment.SELF_HOSTED) {
}
module.exports = {
definitions: DEFINITIONS,
getDefinitions: async () => {
const custom = await getPlugins(PluginType.DATASOURCE)
return cloneDeep(DEFINITIONS)
},
integrations: INTEGRATIONS,
}

View File

@ -18,11 +18,10 @@ import {
} from "./utils"
import Sql from "./base/sql"
module MSSQLModule {
const sqlServer = require("mssql")
const DEFAULT_SCHEMA = "dbo"
const sqlServer = require("mssql")
const DEFAULT_SCHEMA = "dbo"
interface MSSQLConfig {
interface MSSQLConfig {
user: string
password: string
server: string
@ -30,16 +29,16 @@ module MSSQLModule {
database: string
schema: string
encrypt?: boolean
}
}
interface TablesResponse {
interface TablesResponse {
TABLE_CATALOG: string
TABLE_SCHEMA: string
TABLE_NAME: string
TABLE_TYPE: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://github.com/tediousjs/node-mssql",
plus: true,
description:
@ -92,9 +91,9 @@ module MSSQLModule {
type: QueryType.SQL,
},
},
}
}
class SqlServerIntegration extends Sql implements DatasourcePlus {
class SqlServerIntegration extends Sql implements DatasourcePlus {
private readonly config: MSSQLConfig
private index: number = 0
private readonly pool: any
@ -300,10 +299,9 @@ module MSSQLModule {
result.recordset ? result.recordset : [{ [operation]: true }]
return this.queryWithReturning(json, queryFn, processFn)
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: SqlServerIntegration,
}
}

View File

@ -15,13 +15,12 @@ import {
CommonOptions,
} from "mongodb"
module MongoDBModule {
interface MongoDBConfig {
interface MongoDBConfig {
connectionString: string
db: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://github.com/mongodb/node-mongodb-native",
friendlyName: "MongoDB",
type: "Non-relational",
@ -70,9 +69,9 @@ module MongoDBModule {
},
},
},
}
}
class MongoIntegration implements IntegrationBase {
class MongoIntegration implements IntegrationBase {
private config: MongoDBConfig
private client: any
@ -96,9 +95,7 @@ module MongoDBModule {
(field === "_id" || field?.startsWith("$")) &&
typeof json[field] === "string"
) {
const id = json[field].match(
/(?<=objectid\(['"]).*(?=['"]\))/gi
)?.[0]
const id = json[field].match(/(?<=objectid\(['"]).*(?=['"]\))/gi)?.[0]
if (id) {
json[field] = ObjectID.createFromHexString(id)
}
@ -318,10 +315,9 @@ module MongoDBModule {
await this.client.close()
}
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: MongoIntegration,
}
}

View File

@ -19,10 +19,9 @@ import dayjs from "dayjs"
const { NUMBER_REGEX } = require("../utilities")
import Sql from "./base/sql"
module MySQLModule {
const mysql = require("mysql2/promise")
const mysql = require("mysql2/promise")
interface MySQLConfig {
interface MySQLConfig {
host: string
port: number
user: string
@ -31,9 +30,9 @@ module MySQLModule {
ssl?: { [key: string]: any }
rejectUnauthorized: boolean
typeCast: Function
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://github.com/sidorares/node-mysql2",
plus: true,
friendlyName: "MySQL",
@ -89,11 +88,11 @@ module MySQLModule {
type: QueryType.SQL,
},
},
}
}
const TimezoneAwareDateTypes = ["timestamp"]
const TimezoneAwareDateTypes = ["timestamp"]
function bindingTypeCoerce(bindings: any[]) {
function bindingTypeCoerce(bindings: any[]) {
for (let i = 0; i < bindings.length; i++) {
const binding = bindings[i]
if (typeof binding !== "string") {
@ -111,9 +110,9 @@ module MySQLModule {
}
}
return bindings
}
}
class MySQLIntegration extends Sql implements DatasourcePlus {
class MySQLIntegration extends Sql implements DatasourcePlus {
private config: MySQLConfig
private client: any
public tables: Record<string, Table> = {}
@ -216,10 +215,7 @@ module MySQLModule {
)
for (let column of descResp) {
const columnName = column.Field
if (
column.Key === "PRI" &&
primaryKeys.indexOf(column.Key) === -1
) {
if (column.Key === "PRI" && primaryKeys.indexOf(column.Key) === -1) {
primaryKeys.push(columnName)
}
const constraints = {
@ -283,10 +279,9 @@ module MySQLModule {
await this.disconnect()
}
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: MySQLIntegration,
}
}

View File

@ -25,18 +25,17 @@ import oracledb, {
import Sql from "./base/sql"
import { FieldTypes } from "../constants"
module OracleModule {
oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT
oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT
interface OracleConfig {
interface OracleConfig {
host: string
port: number
database: string
user: string
password: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://github.com/oracle/node-oracledb",
plus: true,
friendlyName: "Oracle",
@ -81,14 +80,14 @@ module OracleModule {
type: QueryType.SQL,
},
},
}
}
const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"]
const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"]
/**
/**
* Raw query response
*/
interface ColumnsResponse {
interface ColumnsResponse {
TABLE_NAME: string
COLUMN_NAME: string
DATA_TYPE: string
@ -98,45 +97,45 @@ module OracleModule {
CONSTRAINT_TYPE: string | null
R_CONSTRAINT_NAME: string | null
SEARCH_CONDITION: string | null
}
}
/**
/**
* An oracle constraint
*/
interface OracleConstraint {
interface OracleConstraint {
name: string
type: string
relatedConstraintName: string | null
searchCondition: string | null
}
}
/**
/**
* An oracle column and it's related constraints
*/
interface OracleColumn {
interface OracleColumn {
name: string
type: string
default: string | null
id: number
constraints: { [key: string]: OracleConstraint }
}
}
/**
/**
* An oracle table and it's related columns
*/
interface OracleTable {
interface OracleTable {
name: string
columns: { [key: string]: OracleColumn }
}
}
const OracleContraintTypes = {
const OracleContraintTypes = {
PRIMARY: "P",
NOT_NULL_OR_CHECK: "C",
FOREIGN_KEY: "R",
UNIQUE: "U",
}
}
class OracleIntegration extends Sql implements DatasourcePlus {
class OracleIntegration extends Sql implements DatasourcePlus {
private readonly config: OracleConfig
private index: number = 1
@ -268,10 +267,7 @@ module OracleModule {
const condition = c.searchCondition
.replace(/\s/g, "") // remove spaces
.replace(/[']+/g, "") // remove quotes
if (
condition.includes("in(0,1)") ||
condition.includes("in(1,0)")
) {
if (condition.includes("in(0,1)") || condition.includes("in(1,0)")) {
return true
}
}
@ -335,13 +331,11 @@ module OracleModule {
}
// iterate each constraint on the column
Object.values(oracleColumn.constraints).forEach(
oracleConstraint => {
Object.values(oracleColumn.constraints).forEach(oracleConstraint => {
if (oracleConstraint.type === OracleContraintTypes.PRIMARY) {
table.primary!.push(columnName)
}
}
)
})
})
})
@ -452,10 +446,9 @@ module OracleModule {
}
}
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: OracleIntegration,
}
}

View File

@ -16,22 +16,21 @@ import {
} from "./utils"
import Sql from "./base/sql"
module PostgresModule {
const { Client, types } = require("pg")
const { escapeDangerousCharacters } = require("../utilities")
const { Client, types } = require("pg")
const { escapeDangerousCharacters } = require("../utilities")
// Return "date" and "timestamp" types as plain strings.
// This lets us reference the original stored timezone.
// types is undefined when running in a test env for some reason.
if (types) {
// Return "date" and "timestamp" types as plain strings.
// This lets us reference the original stored timezone.
// types is undefined when running in a test env for some reason.
if (types) {
types.setTypeParser(1114, (val: any) => val) // timestamp
types.setTypeParser(1082, (val: any) => val) // date
types.setTypeParser(1184, (val: any) => val) // timestampz
}
}
const JSON_REGEX = /'{.*}'::json/s
const JSON_REGEX = /'{.*}'::json/s
interface PostgresConfig {
interface PostgresConfig {
host: string
port: number
database: string
@ -41,9 +40,9 @@ module PostgresModule {
ssl?: boolean
ca?: string
rejectUnauthorized?: boolean
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://node-postgres.com",
plus: true,
friendlyName: "PostgreSQL",
@ -111,9 +110,9 @@ module PostgresModule {
type: QueryType.SQL,
},
},
}
}
class PostgresIntegration extends Sql implements DatasourcePlus {
class PostgresIntegration extends Sql implements DatasourcePlus {
private readonly client: any
private readonly config: PostgresConfig
private index: number = 1
@ -221,9 +220,7 @@ module PostgresModule {
let tableKeys: { [key: string]: string[] } = {}
await this.openConnection()
try {
const primaryKeysResponse = await this.client.query(
this.PRIMARY_KEYS_SQL
)
const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
for (let table of primaryKeysResponse.rows) {
const tableName = table.table_name
if (!tableKeys[tableName]) {
@ -323,10 +320,9 @@ module PostgresModule {
return response.rows.length ? response.rows : [{ [operation]: true }]
}
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: PostgresIntegration,
}
}

View File

@ -1,15 +1,14 @@
import { DatasourceFieldType, Integration, QueryType } from "@budibase/types"
import Redis from "ioredis"
module RedisModule {
interface RedisConfig {
interface RedisConfig {
host: string
port: number
username: string
password?: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://redis.io/docs/",
description: "",
friendlyName: "Redis",
@ -76,9 +75,9 @@ module RedisModule {
type: QueryType.JSON,
},
},
}
}
class RedisIntegration {
class RedisIntegration {
private readonly config: RedisConfig
private client: any
@ -140,10 +139,9 @@ module RedisModule {
}
})
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: RedisIntegration,
}
}

View File

@ -14,6 +14,11 @@ import {
BearerAuthConfig,
} from "../definitions/datasource"
import { get } from "lodash"
const fetch = require("node-fetch")
const { formatBytes } = require("../utilities")
const { performance } = require("perf_hooks")
const FormData = require("form-data")
const { URLSearchParams } = require("url")
const BodyTypes = {
NONE: "none",
@ -50,18 +55,9 @@ const coreFields = {
},
}
module RestModule {
const fetch = require("node-fetch")
const { formatBytes } = require("../utilities")
const { performance } = require("perf_hooks")
const FormData = require("form-data")
const { URLSearchParams } = require("url")
const {
parseStringPromise: xmlParser,
Builder: XmlBuilder,
} = require("xml2js")
const { parseStringPromise: xmlParser, Builder: XmlBuilder } = require("xml2js")
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://github.com/node-fetch/node-fetch",
description:
"With the REST API datasource, you can connect, query and pull data from multiple REST APIs. You can then use the retrieved data to build apps.",
@ -111,9 +107,9 @@ module RestModule {
fields: coreFields,
},
},
}
}
class RestIntegration implements IntegrationBase {
class RestIntegration implements IntegrationBase {
private config: RestConfig
private headers: {
[key: string]: string
@ -402,11 +398,10 @@ module RestModule {
async delete(opts: RestQuery) {
return this._req({ ...opts, method: "DELETE" })
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: RestIntegration,
AuthType,
}
}

View File

@ -1,17 +1,15 @@
import { Integration, QueryType, IntegrationBase } from "@budibase/types"
const AWS = require("aws-sdk")
module S3Module {
const AWS = require("aws-sdk")
interface S3Config {
interface S3Config {
region: string
accessKeyId: string
secretAccessKey: string
s3ForcePathStyle: boolean
endpoint?: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
description:
"Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.",
@ -52,9 +50,9 @@ module S3Module {
},
},
},
}
}
class S3Integration implements IntegrationBase {
class S3Integration implements IntegrationBase {
private readonly config: S3Config
private client: any
@ -77,10 +75,9 @@ module S3Module {
.promise()
return response.Contents
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: S3Integration,
}
}

View File

@ -1,17 +1,16 @@
import { Integration, QueryType, SqlQuery } from "@budibase/types"
import { Snowflake } from "snowflake-promise"
module SnowflakeModule {
interface SnowflakeConfig {
interface SnowflakeConfig {
account: string
username: string
password: string
warehouse: string
database: string
schema: string
}
}
const SCHEMA: Integration = {
const SCHEMA: Integration = {
docs: "https://developers.snowflake.com/",
description:
"Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.",
@ -57,9 +56,9 @@ module SnowflakeModule {
type: QueryType.SQL,
},
},
}
}
class SnowflakeIntegration {
class SnowflakeIntegration {
private client: Snowflake
constructor(config: SnowflakeConfig) {
@ -90,10 +89,9 @@ module SnowflakeModule {
async delete(query: SqlQuery) {
return this.internalQuery(query)
}
}
}
module.exports = {
export default {
schema: SCHEMA,
integration: SnowflakeIntegration,
}
}

View File

@ -1,5 +1,6 @@
export * from "./account"
export * from "./app"
export * from "./global"
export * from "./plugin"
export * from "./platform"
export * from "./document"

View File

@ -0,0 +1,4 @@
export enum PluginType {
DATASOURCE = "datasource",
COMPONENT = "component",
}