Making integrations more like what custom integrations look like (to simplify integration).
This commit is contained in:
parent
6fc70fa0ab
commit
970e7ee3e7
|
@ -1,12 +1,11 @@
|
|||
const { cloneDeep } = require("lodash")
|
||||
const { definitions } = require("../../integrations")
|
||||
const { getDefinitions } = require("../../integrations")
|
||||
const { SourceName } = require("@budibase/types")
|
||||
const googlesheets = require("../../integrations/googlesheets")
|
||||
const { featureFlags } = require("@budibase/backend-core")
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
ctx.status = 200
|
||||
const defs = cloneDeep(definitions)
|
||||
const defs = await getDefinitions()
|
||||
|
||||
// for google sheets integration google verification
|
||||
if (featureFlags.isEnabled(featureFlags.FeatureFlag.GOOGLE_SHEETS)) {
|
||||
|
@ -17,6 +16,7 @@ exports.fetch = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.find = async function (ctx) {
|
||||
const defs = await getDefinitions()
|
||||
ctx.status = 200
|
||||
ctx.body = definitions[ctx.params.type]
|
||||
ctx.body = defs[ctx.params.type]
|
||||
}
|
||||
|
|
|
@ -3,6 +3,22 @@ import { extractPluginTarball } from "../../utilities/fileSystem"
|
|||
import { getGlobalDB } from "@budibase/backend-core/tenancy"
|
||||
import { generatePluginID, getPluginParams } from "../../db/utils"
|
||||
import { uploadDirectory } from "@budibase/backend-core/objectStore"
|
||||
import { PluginType } from "@budibase/types"
|
||||
|
||||
export async function getPlugins(type?: PluginType) {
|
||||
const db = getGlobalDB()
|
||||
const response = await db.allDocs(
|
||||
getPluginParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
const plugins = response.rows.map((row: any) => row.doc)
|
||||
if (type) {
|
||||
return plugins.filter((plugin: any) => plugin.schema?.type === type)
|
||||
} else {
|
||||
return plugins
|
||||
}
|
||||
}
|
||||
|
||||
export async function upload(ctx: any) {
|
||||
const plugins =
|
||||
|
@ -68,13 +84,7 @@ export async function upload(ctx: any) {
|
|||
}
|
||||
|
||||
export async function fetch(ctx: any) {
|
||||
const db = getGlobalDB()
|
||||
const response = await db.allDocs(
|
||||
getPluginParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
ctx.body = response.rows.map((row: any) => row.doc)
|
||||
ctx.body = await getPlugins()
|
||||
}
|
||||
|
||||
export async function destroy(ctx: any) {}
|
||||
|
|
|
@ -5,146 +5,144 @@ import {
|
|||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
|
||||
module AirtableModule {
|
||||
const Airtable = require("airtable")
|
||||
const Airtable = require("airtable")
|
||||
|
||||
interface AirtableConfig {
|
||||
apiKey: string
|
||||
base: string
|
||||
}
|
||||
interface AirtableConfig {
|
||||
apiKey: string
|
||||
base: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://airtable.com/api",
|
||||
description:
|
||||
"Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.",
|
||||
friendlyName: "Airtable",
|
||||
type: "Spreadsheet",
|
||||
datasource: {
|
||||
apiKey: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
default: "enter api key",
|
||||
required: true,
|
||||
},
|
||||
base: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "mybase",
|
||||
required: true,
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://airtable.com/api",
|
||||
description:
|
||||
"Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.",
|
||||
friendlyName: "Airtable",
|
||||
type: "Spreadsheet",
|
||||
datasource: {
|
||||
apiKey: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
default: "enter api key",
|
||||
required: true,
|
||||
},
|
||||
base: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "mybase",
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
view: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
numRecords: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
default: 10,
|
||||
},
|
||||
view: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
id: {
|
||||
display: "Record ID",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
numRecords: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
default: 10,
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
id: {
|
||||
display: "Record ID",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class AirtableIntegration implements IntegrationBase {
|
||||
private config: AirtableConfig
|
||||
private client: any
|
||||
|
||||
constructor(config: AirtableConfig) {
|
||||
this.config = config
|
||||
this.client = new Airtable(config).base(config.base)
|
||||
}
|
||||
|
||||
class AirtableIntegration implements IntegrationBase {
|
||||
private config: AirtableConfig
|
||||
private client: any
|
||||
async create(query: { table: any; json: any }) {
|
||||
const { table, json } = query
|
||||
|
||||
constructor(config: AirtableConfig) {
|
||||
this.config = config
|
||||
this.client = new Airtable(config).base(config.base)
|
||||
}
|
||||
|
||||
async create(query: { table: any; json: any }) {
|
||||
const { table, json } = query
|
||||
|
||||
try {
|
||||
return await this.client(table).create([
|
||||
{
|
||||
fields: json,
|
||||
},
|
||||
])
|
||||
} catch (err) {
|
||||
console.error("Error writing to airtable", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async read(query: { table: any; numRecords: any; view: any }) {
|
||||
try {
|
||||
const records = await this.client(query.table)
|
||||
.select({ maxRecords: query.numRecords || 10, view: query.view })
|
||||
.firstPage()
|
||||
// @ts-ignore
|
||||
return records.map(({ fields }) => fields)
|
||||
} catch (err) {
|
||||
console.error("Error writing to airtable", err)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
async update(query: { table: any; id: any; json: any }) {
|
||||
const { table, id, json } = query
|
||||
|
||||
try {
|
||||
return await this.client(table).update([
|
||||
{
|
||||
id,
|
||||
fields: json,
|
||||
},
|
||||
])
|
||||
} catch (err) {
|
||||
console.error("Error writing to airtable", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async delete(query: { table: any; ids: any }) {
|
||||
try {
|
||||
return await this.client(query.table).destroy(query.ids)
|
||||
} catch (err) {
|
||||
console.error("Error writing to airtable", err)
|
||||
throw err
|
||||
}
|
||||
try {
|
||||
return await this.client(table).create([
|
||||
{
|
||||
fields: json,
|
||||
},
|
||||
])
|
||||
} catch (err) {
|
||||
console.error("Error writing to airtable", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: AirtableIntegration,
|
||||
async read(query: { table: any; numRecords: any; view: any }) {
|
||||
try {
|
||||
const records = await this.client(query.table)
|
||||
.select({ maxRecords: query.numRecords || 10, view: query.view })
|
||||
.firstPage()
|
||||
// @ts-ignore
|
||||
return records.map(({ fields }) => fields)
|
||||
} catch (err) {
|
||||
console.error("Error writing to airtable", err)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
async update(query: { table: any; id: any; json: any }) {
|
||||
const { table, id, json } = query
|
||||
|
||||
try {
|
||||
return await this.client(table).update([
|
||||
{
|
||||
id,
|
||||
fields: json,
|
||||
},
|
||||
])
|
||||
} catch (err) {
|
||||
console.error("Error writing to airtable", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async delete(query: { table: any; ids: any }) {
|
||||
try {
|
||||
return await this.client(query.table).destroy(query.ids)
|
||||
} catch (err) {
|
||||
console.error("Error writing to airtable", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: AirtableIntegration,
|
||||
}
|
||||
|
|
|
@ -5,106 +5,104 @@ import {
|
|||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
|
||||
module ArangoModule {
|
||||
const { Database, aql } = require("arangojs")
|
||||
const { Database, aql } = require("arangojs")
|
||||
|
||||
interface ArangodbConfig {
|
||||
url: string
|
||||
username: string
|
||||
password: string
|
||||
databaseName: string
|
||||
collection: string
|
||||
}
|
||||
interface ArangodbConfig {
|
||||
url: string
|
||||
username: string
|
||||
password: string
|
||||
databaseName: string
|
||||
collection: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/arangodb/arangojs",
|
||||
friendlyName: "ArangoDB",
|
||||
type: "Non-relational",
|
||||
description:
|
||||
"ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "http://localhost:8529",
|
||||
required: true,
|
||||
},
|
||||
username: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
databaseName: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "_system",
|
||||
required: true,
|
||||
},
|
||||
collection: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/arangodb/arangojs",
|
||||
friendlyName: "ArangoDB",
|
||||
type: "Non-relational",
|
||||
description:
|
||||
"ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "http://localhost:8529",
|
||||
required: true,
|
||||
},
|
||||
query: {
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
create: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
username: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
databaseName: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "_system",
|
||||
required: true,
|
||||
},
|
||||
collection: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
create: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class ArangoDBIntegration implements IntegrationBase {
|
||||
private config: ArangodbConfig
|
||||
private client: any
|
||||
|
||||
constructor(config: ArangodbConfig) {
|
||||
const newConfig = {
|
||||
auth: {
|
||||
username: config.username,
|
||||
password: config.password,
|
||||
},
|
||||
}
|
||||
|
||||
this.config = config
|
||||
this.client = new Database(newConfig)
|
||||
}
|
||||
|
||||
class ArangoDBIntegration implements IntegrationBase {
|
||||
private config: ArangodbConfig
|
||||
private client: any
|
||||
|
||||
constructor(config: ArangodbConfig) {
|
||||
const newConfig = {
|
||||
auth: {
|
||||
username: config.username,
|
||||
password: config.password,
|
||||
},
|
||||
}
|
||||
|
||||
this.config = config
|
||||
this.client = new Database(newConfig)
|
||||
}
|
||||
|
||||
async read(query: { sql: any }) {
|
||||
try {
|
||||
const result = await this.client.query(query.sql)
|
||||
return result.all()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
console.error("Error querying arangodb", err.message)
|
||||
throw err
|
||||
} finally {
|
||||
this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async create(query: { json: any }) {
|
||||
const clc = this.client.collection(this.config.collection)
|
||||
try {
|
||||
const result = await this.client.query(
|
||||
aql`INSERT ${query.json} INTO ${clc} RETURN NEW`
|
||||
)
|
||||
return result.all()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
console.error("Error querying arangodb", err.message)
|
||||
throw err
|
||||
} finally {
|
||||
this.client.close()
|
||||
}
|
||||
async read(query: { sql: any }) {
|
||||
try {
|
||||
const result = await this.client.query(query.sql)
|
||||
return result.all()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
console.error("Error querying arangodb", err.message)
|
||||
throw err
|
||||
} finally {
|
||||
this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: ArangoDBIntegration,
|
||||
async create(query: { json: any }) {
|
||||
const clc = this.client.collection(this.config.collection)
|
||||
try {
|
||||
const result = await this.client.query(
|
||||
aql`INSERT ${query.json} INTO ${clc} RETURN NEW`
|
||||
)
|
||||
return result.all()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
console.error("Error querying arangodb", err.message)
|
||||
throw err
|
||||
} finally {
|
||||
this.client.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: ArangoDBIntegration,
|
||||
}
|
||||
|
|
|
@ -5,109 +5,103 @@ import {
|
|||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
|
||||
module CouchDBModule {
|
||||
const PouchDB = require("pouchdb")
|
||||
const PouchDB = require("pouchdb")
|
||||
|
||||
interface CouchDBConfig {
|
||||
url: string
|
||||
database: string
|
||||
}
|
||||
interface CouchDBConfig {
|
||||
url: string
|
||||
database: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://docs.couchdb.org/en/stable/",
|
||||
friendlyName: "CouchDB",
|
||||
type: "Non-relational",
|
||||
description:
|
||||
"Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "http://localhost:5984",
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://docs.couchdb.org/en/stable/",
|
||||
friendlyName: "CouchDB",
|
||||
type: "Non-relational",
|
||||
description:
|
||||
"Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "http://localhost:5984",
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
id: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
id: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class CouchDBIntegration implements IntegrationBase {
|
||||
private config: CouchDBConfig
|
||||
private readonly client: any
|
||||
|
||||
constructor(config: CouchDBConfig) {
|
||||
this.config = config
|
||||
this.client = new PouchDB(`${config.url}/${config.database}`)
|
||||
}
|
||||
|
||||
class CouchDBIntegration implements IntegrationBase {
|
||||
private config: CouchDBConfig
|
||||
private readonly client: any
|
||||
|
||||
constructor(config: CouchDBConfig) {
|
||||
this.config = config
|
||||
this.client = new PouchDB(`${config.url}/${config.database}`)
|
||||
}
|
||||
|
||||
async query(
|
||||
command: string,
|
||||
errorMsg: string,
|
||||
query: { json?: object; id?: string }
|
||||
) {
|
||||
try {
|
||||
const response = await this.client[command](query.id || query.json)
|
||||
await this.client.close()
|
||||
return response
|
||||
} catch (err) {
|
||||
console.error(errorMsg, err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async create(query: { json: object }) {
|
||||
return this.query("post", "Error writing to couchDB", query)
|
||||
}
|
||||
|
||||
async read(query: { json: object }) {
|
||||
const result = await this.query("allDocs", "Error querying couchDB", {
|
||||
json: {
|
||||
include_docs: true,
|
||||
...query.json,
|
||||
},
|
||||
})
|
||||
return result.rows.map((row: { doc: object }) => row.doc)
|
||||
}
|
||||
|
||||
async update(query: { json: object }) {
|
||||
return this.query("put", "Error updating couchDB document", query)
|
||||
}
|
||||
|
||||
async delete(query: { id: string }) {
|
||||
const doc = await this.query(
|
||||
"get",
|
||||
"Cannot find doc to be deleted",
|
||||
query
|
||||
)
|
||||
return this.query("remove", "Error deleting couchDB document", {
|
||||
json: doc,
|
||||
})
|
||||
async query(
|
||||
command: string,
|
||||
errorMsg: string,
|
||||
query: { json?: object; id?: string }
|
||||
) {
|
||||
try {
|
||||
const response = await this.client[command](query.id || query.json)
|
||||
await this.client.close()
|
||||
return response
|
||||
} catch (err) {
|
||||
console.error(errorMsg, err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: CouchDBIntegration,
|
||||
async create(query: { json: object }) {
|
||||
return this.query("post", "Error writing to couchDB", query)
|
||||
}
|
||||
|
||||
async read(query: { json: object }) {
|
||||
const result = await this.query("allDocs", "Error querying couchDB", {
|
||||
json: {
|
||||
include_docs: true,
|
||||
...query.json,
|
||||
},
|
||||
})
|
||||
return result.rows.map((row: { doc: object }) => row.doc)
|
||||
}
|
||||
|
||||
async update(query: { json: object }) {
|
||||
return this.query("put", "Error updating couchDB document", query)
|
||||
}
|
||||
|
||||
async delete(query: { id: string }) {
|
||||
const doc = await this.query("get", "Cannot find doc to be deleted", query)
|
||||
return this.query("remove", "Error deleting couchDB document", {
|
||||
json: doc,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: CouchDBIntegration,
|
||||
}
|
||||
|
|
|
@ -5,228 +5,226 @@ import {
|
|||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
|
||||
module DynamoModule {
|
||||
const AWS = require("aws-sdk")
|
||||
const { AWS_REGION } = require("../db/dynamoClient")
|
||||
const AWS = require("aws-sdk")
|
||||
const { AWS_REGION } = require("../db/dynamoClient")
|
||||
|
||||
interface DynamoDBConfig {
|
||||
region: string
|
||||
accessKeyId: string
|
||||
secretAccessKey: string
|
||||
endpoint: string
|
||||
}
|
||||
interface DynamoDBConfig {
|
||||
region: string
|
||||
accessKeyId: string
|
||||
secretAccessKey: string
|
||||
endpoint: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/dabit3/dynamodb-documentclient-cheat-sheet",
|
||||
description:
|
||||
"Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.",
|
||||
friendlyName: "DynamoDB",
|
||||
type: "Non-relational",
|
||||
datasource: {
|
||||
region: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "us-east-1",
|
||||
},
|
||||
accessKeyId: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
endpoint: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: false,
|
||||
default: "https://dynamodb.us-east-1.amazonaws.com",
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/dabit3/dynamodb-documentclient-cheat-sheet",
|
||||
description:
|
||||
"Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.",
|
||||
friendlyName: "DynamoDB",
|
||||
type: "Non-relational",
|
||||
datasource: {
|
||||
region: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "us-east-1",
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
},
|
||||
},
|
||||
},
|
||||
scan: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
},
|
||||
},
|
||||
},
|
||||
describe: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
get: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
accessKeyId: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
endpoint: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: false,
|
||||
default: "https://dynamodb.us-east-1.amazonaws.com",
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
},
|
||||
},
|
||||
},
|
||||
scan: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
},
|
||||
},
|
||||
},
|
||||
describe: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
get: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class DynamoDBIntegration implements IntegrationBase {
|
||||
private config: DynamoDBConfig
|
||||
private client: any
|
||||
|
||||
constructor(config: DynamoDBConfig) {
|
||||
this.config = config
|
||||
if (this.config.endpoint && !this.config.endpoint.includes("localhost")) {
|
||||
this.connect()
|
||||
}
|
||||
let options = {
|
||||
correctClockSkew: true,
|
||||
region: this.config.region || AWS_REGION,
|
||||
endpoint: config.endpoint ? config.endpoint : undefined,
|
||||
}
|
||||
this.client = new AWS.DynamoDB.DocumentClient(options)
|
||||
}
|
||||
|
||||
class DynamoDBIntegration implements IntegrationBase {
|
||||
private config: DynamoDBConfig
|
||||
private client: any
|
||||
|
||||
constructor(config: DynamoDBConfig) {
|
||||
this.config = config
|
||||
if (this.config.endpoint && !this.config.endpoint.includes("localhost")) {
|
||||
this.connect()
|
||||
}
|
||||
let options = {
|
||||
correctClockSkew: true,
|
||||
region: this.config.region || AWS_REGION,
|
||||
endpoint: config.endpoint ? config.endpoint : undefined,
|
||||
}
|
||||
this.client = new AWS.DynamoDB.DocumentClient(options)
|
||||
}
|
||||
|
||||
end() {
|
||||
this.disconnect()
|
||||
}
|
||||
|
||||
connect() {
|
||||
AWS.config.update(this.config)
|
||||
}
|
||||
|
||||
disconnect() {
|
||||
AWS.config.update({
|
||||
secretAccessKey: undefined,
|
||||
accessKeyId: undefined,
|
||||
region: AWS_REGION,
|
||||
})
|
||||
}
|
||||
|
||||
async create(query: { table: string; json: object }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.put(params).promise()
|
||||
}
|
||||
|
||||
async read(query: { table: string; json: object; index: null | string }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
IndexName: query.index ? query.index : undefined,
|
||||
...query.json,
|
||||
}
|
||||
const response = await this.client.query(params).promise()
|
||||
if (response.Items) {
|
||||
return response.Items
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
async scan(query: { table: string; json: object; index: null | string }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
IndexName: query.index ? query.index : undefined,
|
||||
...query.json,
|
||||
}
|
||||
const response = await this.client.scan(params).promise()
|
||||
if (response.Items) {
|
||||
return response.Items
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
async describe(query: { table: string }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
}
|
||||
return new AWS.DynamoDB().describeTable(params).promise()
|
||||
}
|
||||
|
||||
async get(query: { table: string; json: object }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.get(params).promise()
|
||||
}
|
||||
|
||||
async update(query: { table: string; json: object }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.update(params).promise()
|
||||
}
|
||||
|
||||
async delete(query: { table: string; json: object }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.delete(params).promise()
|
||||
}
|
||||
end() {
|
||||
this.disconnect()
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: DynamoDBIntegration,
|
||||
connect() {
|
||||
AWS.config.update(this.config)
|
||||
}
|
||||
|
||||
disconnect() {
|
||||
AWS.config.update({
|
||||
secretAccessKey: undefined,
|
||||
accessKeyId: undefined,
|
||||
region: AWS_REGION,
|
||||
})
|
||||
}
|
||||
|
||||
async create(query: { table: string; json: object }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.put(params).promise()
|
||||
}
|
||||
|
||||
async read(query: { table: string; json: object; index: null | string }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
IndexName: query.index ? query.index : undefined,
|
||||
...query.json,
|
||||
}
|
||||
const response = await this.client.query(params).promise()
|
||||
if (response.Items) {
|
||||
return response.Items
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
async scan(query: { table: string; json: object; index: null | string }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
IndexName: query.index ? query.index : undefined,
|
||||
...query.json,
|
||||
}
|
||||
const response = await this.client.scan(params).promise()
|
||||
if (response.Items) {
|
||||
return response.Items
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
async describe(query: { table: string }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
}
|
||||
return new AWS.DynamoDB().describeTable(params).promise()
|
||||
}
|
||||
|
||||
async get(query: { table: string; json: object }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.get(params).promise()
|
||||
}
|
||||
|
||||
async update(query: { table: string; json: object }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.update(params).promise()
|
||||
}
|
||||
|
||||
async delete(query: { table: string; json: object }) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.delete(params).promise()
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: DynamoDBIntegration,
|
||||
}
|
||||
|
|
|
@ -5,151 +5,149 @@ import {
|
|||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
|
||||
module ElasticsearchModule {
|
||||
const { Client } = require("@elastic/elasticsearch")
|
||||
const { Client } = require("@elastic/elasticsearch")
|
||||
|
||||
interface ElasticsearchConfig {
|
||||
url: string
|
||||
}
|
||||
interface ElasticsearchConfig {
|
||||
url: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||
description:
|
||||
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
|
||||
friendlyName: "ElasticSearch",
|
||||
type: "Non-relational",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "http://localhost:9200",
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||
description:
|
||||
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
|
||||
friendlyName: "ElasticSearch",
|
||||
type: "Non-relational",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "http://localhost:9200",
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
id: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
id: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
id: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
index: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
id: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class ElasticSearchIntegration implements IntegrationBase {
|
||||
private config: ElasticsearchConfig
|
||||
private client: any
|
||||
|
||||
constructor(config: ElasticsearchConfig) {
|
||||
this.config = config
|
||||
this.client = new Client({ node: config.url })
|
||||
}
|
||||
|
||||
class ElasticSearchIntegration implements IntegrationBase {
|
||||
private config: ElasticsearchConfig
|
||||
private client: any
|
||||
async create(query: { index: string; json: object }) {
|
||||
const { index, json } = query
|
||||
|
||||
constructor(config: ElasticsearchConfig) {
|
||||
this.config = config
|
||||
this.client = new Client({ node: config.url })
|
||||
}
|
||||
|
||||
async create(query: { index: string; json: object }) {
|
||||
const { index, json } = query
|
||||
|
||||
try {
|
||||
const result = await this.client.index({
|
||||
index,
|
||||
body: json,
|
||||
})
|
||||
return result.body
|
||||
} catch (err) {
|
||||
console.error("Error writing to elasticsearch", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async read(query: { index: string; json: object }) {
|
||||
const { index, json } = query
|
||||
try {
|
||||
const result = await this.client.search({
|
||||
index: index,
|
||||
body: json,
|
||||
})
|
||||
return result.body.hits.hits.map(({ _source }: any) => _source)
|
||||
} catch (err) {
|
||||
console.error("Error querying elasticsearch", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async update(query: { id: string; index: string; json: object }) {
|
||||
const { id, index, json } = query
|
||||
try {
|
||||
const result = await this.client.update({
|
||||
id,
|
||||
index,
|
||||
body: json,
|
||||
})
|
||||
return result.body
|
||||
} catch (err) {
|
||||
console.error("Error querying elasticsearch", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async delete(query: object) {
|
||||
try {
|
||||
const result = await this.client.delete(query)
|
||||
return result.body
|
||||
} catch (err) {
|
||||
console.error("Error deleting from elasticsearch", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
try {
|
||||
const result = await this.client.index({
|
||||
index,
|
||||
body: json,
|
||||
})
|
||||
return result.body
|
||||
} catch (err) {
|
||||
console.error("Error writing to elasticsearch", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: ElasticSearchIntegration,
|
||||
async read(query: { index: string; json: object }) {
|
||||
const { index, json } = query
|
||||
try {
|
||||
const result = await this.client.search({
|
||||
index: index,
|
||||
body: json,
|
||||
})
|
||||
return result.body.hits.hits.map(({ _source }: any) => _source)
|
||||
} catch (err) {
|
||||
console.error("Error querying elasticsearch", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async update(query: { id: string; index: string; json: object }) {
|
||||
const { id, index, json } = query
|
||||
try {
|
||||
const result = await this.client.update({
|
||||
id,
|
||||
index,
|
||||
body: json,
|
||||
})
|
||||
return result.body
|
||||
} catch (err) {
|
||||
console.error("Error querying elasticsearch", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async delete(query: object) {
|
||||
try {
|
||||
const result = await this.client.delete(query)
|
||||
return result.body
|
||||
} catch (err) {
|
||||
console.error("Error deleting from elasticsearch", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: ElasticSearchIntegration,
|
||||
}
|
||||
|
|
|
@ -6,184 +6,182 @@ import {
|
|||
} from "@budibase/types"
|
||||
import { Firestore, WhereFilterOp } from "@google-cloud/firestore"
|
||||
|
||||
module Firebase {
|
||||
interface FirebaseConfig {
|
||||
email: string
|
||||
privateKey: string
|
||||
projectId: string
|
||||
interface FirebaseConfig {
|
||||
email: string
|
||||
privateKey: string
|
||||
projectId: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://firebase.google.com/docs/firestore/quickstart",
|
||||
friendlyName: "Firestore",
|
||||
type: "Non-relational",
|
||||
description:
|
||||
"Cloud Firestore is a flexible, scalable database for mobile, web, and server development from Firebase and Google Cloud.",
|
||||
datasource: {
|
||||
email: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
privateKey: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
projectId: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
extra: {
|
||||
collection: {
|
||||
displayName: "Collection",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
filterField: {
|
||||
displayName: "Filter field",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: false,
|
||||
},
|
||||
filter: {
|
||||
displayName: "Filter comparison",
|
||||
type: DatasourceFieldType.LIST,
|
||||
required: false,
|
||||
data: {
|
||||
read: [
|
||||
"==",
|
||||
"<",
|
||||
"<=",
|
||||
"!=",
|
||||
">=",
|
||||
">",
|
||||
"array-contains",
|
||||
"in",
|
||||
"not-in",
|
||||
"array-contains-any",
|
||||
],
|
||||
},
|
||||
},
|
||||
filterValue: {
|
||||
displayName: "Filter value",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class FirebaseIntegration implements IntegrationBase {
|
||||
private config: FirebaseConfig
|
||||
private client: Firestore
|
||||
|
||||
constructor(config: FirebaseConfig) {
|
||||
this.config = config
|
||||
this.client = new Firestore({
|
||||
projectId: config.projectId,
|
||||
credentials: {
|
||||
client_email: config.email,
|
||||
private_key: config.privateKey?.replace(/\\n/g, "\n"),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://firebase.google.com/docs/firestore/quickstart",
|
||||
friendlyName: "Firestore",
|
||||
type: "Non-relational",
|
||||
description:
|
||||
"Cloud Firestore is a flexible, scalable database for mobile, web, and server development from Firebase and Google Cloud.",
|
||||
datasource: {
|
||||
email: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
privateKey: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
projectId: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
extra: {
|
||||
collection: {
|
||||
displayName: "Collection",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
filterField: {
|
||||
displayName: "Filter field",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: false,
|
||||
},
|
||||
filter: {
|
||||
displayName: "Filter comparison",
|
||||
type: DatasourceFieldType.LIST,
|
||||
required: false,
|
||||
data: {
|
||||
read: [
|
||||
"==",
|
||||
"<",
|
||||
"<=",
|
||||
"!=",
|
||||
">=",
|
||||
">",
|
||||
"array-contains",
|
||||
"in",
|
||||
"not-in",
|
||||
"array-contains-any",
|
||||
],
|
||||
},
|
||||
},
|
||||
filterValue: {
|
||||
displayName: "Filter value",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
async create(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
const documentReference = this.client
|
||||
.collection(query.extra.collection)
|
||||
.doc()
|
||||
await documentReference.set({ ...query.json, id: documentReference.id })
|
||||
const snapshot = await documentReference.get()
|
||||
return snapshot.data()
|
||||
} catch (err) {
|
||||
console.error("Error writing to Firestore", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
class FirebaseIntegration implements IntegrationBase {
|
||||
private config: FirebaseConfig
|
||||
private client: Firestore
|
||||
|
||||
constructor(config: FirebaseConfig) {
|
||||
this.config = config
|
||||
this.client = new Firestore({
|
||||
projectId: config.projectId,
|
||||
credentials: {
|
||||
client_email: config.email,
|
||||
private_key: config.privateKey?.replace(/\\n/g, "\n"),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async create(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
const documentReference = this.client
|
||||
.collection(query.extra.collection)
|
||||
.doc()
|
||||
await documentReference.set({ ...query.json, id: documentReference.id })
|
||||
const snapshot = await documentReference.get()
|
||||
return snapshot.data()
|
||||
} catch (err) {
|
||||
console.error("Error writing to Firestore", err)
|
||||
throw err
|
||||
async read(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
let snapshot
|
||||
const collectionRef = this.client.collection(query.extra.collection)
|
||||
if (
|
||||
query.extra.filterField &&
|
||||
query.extra.filter &&
|
||||
query.extra.filterValue
|
||||
) {
|
||||
snapshot = await collectionRef
|
||||
.where(
|
||||
query.extra.filterField,
|
||||
query.extra.filter as WhereFilterOp,
|
||||
query.extra.filterValue
|
||||
)
|
||||
.get()
|
||||
} else {
|
||||
snapshot = await collectionRef.get()
|
||||
}
|
||||
const result: any[] = []
|
||||
snapshot.forEach(doc => result.push(doc.data()))
|
||||
|
||||
return result
|
||||
} catch (err) {
|
||||
console.error("Error querying Firestore", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async read(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
let snapshot
|
||||
const collectionRef = this.client.collection(query.extra.collection)
|
||||
if (
|
||||
query.extra.filterField &&
|
||||
query.extra.filter &&
|
||||
query.extra.filterValue
|
||||
) {
|
||||
snapshot = await collectionRef
|
||||
.where(
|
||||
query.extra.filterField,
|
||||
query.extra.filter as WhereFilterOp,
|
||||
query.extra.filterValue
|
||||
)
|
||||
.get()
|
||||
} else {
|
||||
snapshot = await collectionRef.get()
|
||||
}
|
||||
const result: any[] = []
|
||||
snapshot.forEach(doc => result.push(doc.data()))
|
||||
async update(query: {
|
||||
json: Record<string, any>
|
||||
extra: { [key: string]: string }
|
||||
}) {
|
||||
try {
|
||||
await this.client
|
||||
.collection(query.extra.collection)
|
||||
.doc(query.json.id)
|
||||
.update(query.json)
|
||||
|
||||
return result
|
||||
} catch (err) {
|
||||
console.error("Error querying Firestore", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async update(query: {
|
||||
json: Record<string, any>
|
||||
extra: { [key: string]: string }
|
||||
}) {
|
||||
try {
|
||||
return (
|
||||
await this.client
|
||||
.collection(query.extra.collection)
|
||||
.doc(query.json.id)
|
||||
.update(query.json)
|
||||
|
||||
return (
|
||||
await this.client
|
||||
.collection(query.extra.collection)
|
||||
.doc(query.json.id)
|
||||
.get()
|
||||
).data()
|
||||
} catch (err) {
|
||||
console.error("Error writing to Firestore", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async delete(query: {
|
||||
json: { id: string }
|
||||
extra: { [key: string]: string }
|
||||
}) {
|
||||
try {
|
||||
await this.client
|
||||
.collection(query.extra.collection)
|
||||
.doc(query.json.id)
|
||||
.delete()
|
||||
return true
|
||||
} catch (err) {
|
||||
console.error("Error deleting from Firestore", err)
|
||||
throw err
|
||||
}
|
||||
.get()
|
||||
).data()
|
||||
} catch (err) {
|
||||
console.error("Error writing to Firestore", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: FirebaseIntegration,
|
||||
async delete(query: {
|
||||
json: { id: string }
|
||||
extra: { [key: string]: string }
|
||||
}) {
|
||||
try {
|
||||
await this.client
|
||||
.collection(query.extra.collection)
|
||||
.doc(query.json.id)
|
||||
.delete()
|
||||
return true
|
||||
} catch (err) {
|
||||
console.error("Error deleting from Firestore", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: FirebaseIntegration,
|
||||
}
|
||||
|
|
|
@ -13,409 +13,400 @@ import { DataSourceOperation, FieldTypes } from "../constants"
|
|||
import { GoogleSpreadsheet } from "google-spreadsheet"
|
||||
import env from "../environment"
|
||||
|
||||
module GoogleSheetsModule {
|
||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
||||
const { getScopedConfig } = require("@budibase/backend-core/db")
|
||||
const { Configs } = require("@budibase/backend-core/constants")
|
||||
const fetch = require("node-fetch")
|
||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
||||
const { getScopedConfig } = require("@budibase/backend-core/db")
|
||||
const { Configs } = require("@budibase/backend-core/constants")
|
||||
const fetch = require("node-fetch")
|
||||
|
||||
interface GoogleSheetsConfig {
|
||||
spreadsheetId: string
|
||||
auth: OAuthClientConfig
|
||||
}
|
||||
interface GoogleSheetsConfig {
|
||||
spreadsheetId: string
|
||||
auth: OAuthClientConfig
|
||||
}
|
||||
|
||||
interface OAuthClientConfig {
|
||||
appId: string
|
||||
accessToken: string
|
||||
refreshToken: string
|
||||
}
|
||||
interface OAuthClientConfig {
|
||||
appId: string
|
||||
accessToken: string
|
||||
refreshToken: string
|
||||
}
|
||||
|
||||
interface AuthTokenRequest {
|
||||
client_id: string
|
||||
client_secret: string
|
||||
refresh_token: string
|
||||
}
|
||||
interface AuthTokenRequest {
|
||||
client_id: string
|
||||
client_secret: string
|
||||
refresh_token: string
|
||||
}
|
||||
|
||||
interface AuthTokenResponse {
|
||||
access_token: string
|
||||
}
|
||||
interface AuthTokenResponse {
|
||||
access_token: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
plus: true,
|
||||
auth: {
|
||||
type: "google",
|
||||
const SCHEMA: Integration = {
|
||||
plus: true,
|
||||
auth: {
|
||||
type: "google",
|
||||
},
|
||||
relationships: false,
|
||||
docs: "https://developers.google.com/sheets/api/quickstart/nodejs",
|
||||
description:
|
||||
"Create and collaborate on online spreadsheets in real-time and from any device. ",
|
||||
friendlyName: "Google Sheets",
|
||||
type: "Spreadsheet",
|
||||
datasource: {
|
||||
spreadsheetId: {
|
||||
display: "Google Sheet URL",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
relationships: false,
|
||||
docs: "https://developers.google.com/sheets/api/quickstart/nodejs",
|
||||
description:
|
||||
"Create and collaborate on online spreadsheets in real-time and from any device. ",
|
||||
friendlyName: "Google Sheets",
|
||||
type: "Spreadsheet",
|
||||
datasource: {
|
||||
spreadsheetId: {
|
||||
display: "Google Sheet URL",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
row: {
|
||||
type: QueryType.JSON,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
rowIndex: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
row: {
|
||||
type: QueryType.JSON,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
rowIndex: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: true,
|
||||
},
|
||||
row: {
|
||||
type: QueryType.JSON,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
rowIndex: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
row: {
|
||||
type: QueryType.JSON,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
rowIndex: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class GoogleSheetsIntegration implements DatasourcePlus {
|
||||
private readonly config: GoogleSheetsConfig
|
||||
private client: any
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
constructor(config: GoogleSheetsConfig) {
|
||||
this.config = config
|
||||
const spreadsheetId = this.cleanSpreadsheetUrl(this.config.spreadsheetId)
|
||||
this.client = new GoogleSpreadsheet(spreadsheetId)
|
||||
}
|
||||
|
||||
class GoogleSheetsIntegration implements DatasourcePlus {
|
||||
private readonly config: GoogleSheetsConfig
|
||||
private client: any
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
getBindingIdentifier() {
|
||||
return ""
|
||||
}
|
||||
|
||||
constructor(config: GoogleSheetsConfig) {
|
||||
this.config = config
|
||||
const spreadsheetId = this.cleanSpreadsheetUrl(this.config.spreadsheetId)
|
||||
this.client = new GoogleSpreadsheet(spreadsheetId)
|
||||
}
|
||||
getStringConcat(parts: string[]) {
|
||||
return ""
|
||||
}
|
||||
|
||||
getBindingIdentifier() {
|
||||
return ""
|
||||
}
|
||||
|
||||
getStringConcat(parts: string[]) {
|
||||
return ""
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull the spreadsheet ID out from a valid google sheets URL
|
||||
* @param spreadsheetId - the URL or standard spreadsheetId of the google sheet
|
||||
* @returns spreadsheet Id of the google sheet
|
||||
*/
|
||||
cleanSpreadsheetUrl(spreadsheetId: string) {
|
||||
if (!spreadsheetId) {
|
||||
throw new Error(
|
||||
"You must set a spreadsheet ID in your configuration to fetch tables."
|
||||
)
|
||||
}
|
||||
const parts = spreadsheetId.split("/")
|
||||
return parts.length > 5 ? parts[5] : spreadsheetId
|
||||
}
|
||||
|
||||
async fetchAccessToken(
|
||||
payload: AuthTokenRequest
|
||||
): Promise<AuthTokenResponse> {
|
||||
const response = await fetch(
|
||||
"https://www.googleapis.com/oauth2/v4/token",
|
||||
{
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
...payload,
|
||||
grant_type: "refresh_token",
|
||||
}),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
}
|
||||
/**
|
||||
* Pull the spreadsheet ID out from a valid google sheets URL
|
||||
* @param spreadsheetId - the URL or standard spreadsheetId of the google sheet
|
||||
* @returns spreadsheet Id of the google sheet
|
||||
*/
|
||||
cleanSpreadsheetUrl(spreadsheetId: string) {
|
||||
if (!spreadsheetId) {
|
||||
throw new Error(
|
||||
"You must set a spreadsheet ID in your configuration to fetch tables."
|
||||
)
|
||||
}
|
||||
const parts = spreadsheetId.split("/")
|
||||
return parts.length > 5 ? parts[5] : spreadsheetId
|
||||
}
|
||||
|
||||
const json = await response.json()
|
||||
async fetchAccessToken(
|
||||
payload: AuthTokenRequest
|
||||
): Promise<AuthTokenResponse> {
|
||||
const response = await fetch("https://www.googleapis.com/oauth2/v4/token", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
...payload,
|
||||
grant_type: "refresh_token",
|
||||
}),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
})
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(
|
||||
`Error authenticating with google sheets. ${json.error_description}`
|
||||
const json = await response.json()
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(
|
||||
`Error authenticating with google sheets. ${json.error_description}`
|
||||
)
|
||||
}
|
||||
|
||||
return json
|
||||
}
|
||||
|
||||
async connect() {
|
||||
try {
|
||||
// Initialise oAuth client
|
||||
const db = getGlobalDB()
|
||||
let googleConfig = await getScopedConfig(db, {
|
||||
type: Configs.GOOGLE,
|
||||
})
|
||||
|
||||
if (!googleConfig) {
|
||||
googleConfig = {
|
||||
clientID: env.GOOGLE_CLIENT_ID,
|
||||
clientSecret: env.GOOGLE_CLIENT_SECRET,
|
||||
}
|
||||
}
|
||||
|
||||
const oauthClient = new OAuth2Client({
|
||||
clientId: googleConfig.clientID,
|
||||
clientSecret: googleConfig.clientSecret,
|
||||
})
|
||||
|
||||
const tokenResponse = await this.fetchAccessToken({
|
||||
client_id: googleConfig.clientID,
|
||||
client_secret: googleConfig.clientSecret,
|
||||
refresh_token: this.config.auth.refreshToken,
|
||||
})
|
||||
|
||||
oauthClient.setCredentials({
|
||||
refresh_token: this.config.auth.refreshToken,
|
||||
access_token: tokenResponse.access_token,
|
||||
})
|
||||
|
||||
this.client.useOAuth2Client(oauthClient)
|
||||
await this.client.loadInfo()
|
||||
} catch (err) {
|
||||
console.error("Error connecting to google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async buildSchema(datasourceId: string) {
|
||||
await this.connect()
|
||||
const sheets = await this.client.sheetsByIndex
|
||||
const tables: Record<string, Table> = {}
|
||||
for (let sheet of sheets) {
|
||||
// must fetch rows to determine schema
|
||||
await sheet.getRows()
|
||||
// build schema
|
||||
const schema: TableSchema = {}
|
||||
|
||||
// build schema from headers
|
||||
for (let header of sheet.headerValues) {
|
||||
schema[header] = {
|
||||
name: header,
|
||||
type: FieldTypes.STRING,
|
||||
}
|
||||
}
|
||||
|
||||
// create tables
|
||||
tables[sheet.title] = {
|
||||
_id: buildExternalTableId(datasourceId, sheet.title),
|
||||
name: sheet.title,
|
||||
primary: ["rowNumber"],
|
||||
schema,
|
||||
}
|
||||
}
|
||||
|
||||
this.tables = tables
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
const sheet = json.endpoint.entityId
|
||||
|
||||
const handlers = {
|
||||
[DataSourceOperation.CREATE]: () =>
|
||||
this.create({ sheet, row: json.body }),
|
||||
[DataSourceOperation.READ]: () => this.read({ sheet }),
|
||||
[DataSourceOperation.UPDATE]: () =>
|
||||
this.update({
|
||||
// exclude the header row and zero index
|
||||
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
|
||||
sheet,
|
||||
row: json.body,
|
||||
}),
|
||||
[DataSourceOperation.DELETE]: () =>
|
||||
this.delete({
|
||||
// exclude the header row and zero index
|
||||
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
|
||||
sheet,
|
||||
}),
|
||||
[DataSourceOperation.CREATE_TABLE]: () =>
|
||||
this.createTable(json?.table?.name),
|
||||
[DataSourceOperation.UPDATE_TABLE]: () => this.updateTable(json.table),
|
||||
[DataSourceOperation.DELETE_TABLE]: () =>
|
||||
this.deleteTable(json?.table?.name),
|
||||
}
|
||||
|
||||
const internalQueryMethod = handlers[json.endpoint.operation]
|
||||
|
||||
return await internalQueryMethod()
|
||||
}
|
||||
|
||||
buildRowObject(headers: string[], values: string[], rowNumber: number) {
|
||||
const rowObject: { rowNumber: number; [key: string]: any } = { rowNumber }
|
||||
for (let i = 0; i < headers.length; i++) {
|
||||
rowObject._id = rowNumber
|
||||
rowObject[headers[i]] = values[i]
|
||||
}
|
||||
return rowObject
|
||||
}
|
||||
|
||||
async createTable(name?: string) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheet = await this.client.addSheet({ title: name })
|
||||
return sheet
|
||||
} catch (err) {
|
||||
console.error("Error creating new table in google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async updateTable(table?: any) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheet = await this.client.sheetsByTitle[table.name]
|
||||
await sheet.loadHeaderRow()
|
||||
|
||||
if (table._rename) {
|
||||
const headers = []
|
||||
for (let header of sheet.headerValues) {
|
||||
if (header === table._rename.old) {
|
||||
headers.push(table._rename.updated)
|
||||
} else {
|
||||
headers.push(header)
|
||||
}
|
||||
}
|
||||
await sheet.setHeaderRow(headers)
|
||||
} else {
|
||||
let newField = Object.keys(table.schema).find(
|
||||
key => !sheet.headerValues.includes(key)
|
||||
)
|
||||
await sheet.setHeaderRow([...sheet.headerValues, newField])
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error updating table in google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async deleteTable(sheet: any) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheetToDelete = await this.client.sheetsByTitle[sheet]
|
||||
return await sheetToDelete.delete()
|
||||
} catch (err) {
|
||||
console.error("Error deleting table in google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async create(query: { sheet: string; row: any }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||
const rowToInsert =
|
||||
typeof query.row === "string" ? JSON.parse(query.row) : query.row
|
||||
const row = await sheet.addRow(rowToInsert)
|
||||
return [
|
||||
this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber),
|
||||
]
|
||||
} catch (err) {
|
||||
console.error("Error writing to google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async read(query: { sheet: string }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||
const rows = await sheet.getRows()
|
||||
const headerValues = sheet.headerValues
|
||||
const response = []
|
||||
for (let row of rows) {
|
||||
response.push(
|
||||
this.buildRowObject(headerValues, row._rawData, row._rowNumber)
|
||||
)
|
||||
}
|
||||
|
||||
return json
|
||||
return response
|
||||
} catch (err) {
|
||||
console.error("Error reading from google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async connect() {
|
||||
try {
|
||||
// Initialise oAuth client
|
||||
const db = getGlobalDB()
|
||||
let googleConfig = await getScopedConfig(db, {
|
||||
type: Configs.GOOGLE,
|
||||
})
|
||||
|
||||
if (!googleConfig) {
|
||||
googleConfig = {
|
||||
clientID: env.GOOGLE_CLIENT_ID,
|
||||
clientSecret: env.GOOGLE_CLIENT_SECRET,
|
||||
}
|
||||
}
|
||||
|
||||
const oauthClient = new OAuth2Client({
|
||||
clientId: googleConfig.clientID,
|
||||
clientSecret: googleConfig.clientSecret,
|
||||
})
|
||||
|
||||
const tokenResponse = await this.fetchAccessToken({
|
||||
client_id: googleConfig.clientID,
|
||||
client_secret: googleConfig.clientSecret,
|
||||
refresh_token: this.config.auth.refreshToken,
|
||||
})
|
||||
|
||||
oauthClient.setCredentials({
|
||||
refresh_token: this.config.auth.refreshToken,
|
||||
access_token: tokenResponse.access_token,
|
||||
})
|
||||
|
||||
this.client.useOAuth2Client(oauthClient)
|
||||
await this.client.loadInfo()
|
||||
} catch (err) {
|
||||
console.error("Error connecting to google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async buildSchema(datasourceId: string) {
|
||||
await this.connect()
|
||||
const sheets = await this.client.sheetsByIndex
|
||||
const tables: Record<string, Table> = {}
|
||||
for (let sheet of sheets) {
|
||||
// must fetch rows to determine schema
|
||||
await sheet.getRows()
|
||||
// build schema
|
||||
const schema: TableSchema = {}
|
||||
|
||||
// build schema from headers
|
||||
for (let header of sheet.headerValues) {
|
||||
schema[header] = {
|
||||
name: header,
|
||||
type: FieldTypes.STRING,
|
||||
}
|
||||
}
|
||||
|
||||
// create tables
|
||||
tables[sheet.title] = {
|
||||
_id: buildExternalTableId(datasourceId, sheet.title),
|
||||
name: sheet.title,
|
||||
primary: ["rowNumber"],
|
||||
schema,
|
||||
}
|
||||
}
|
||||
|
||||
this.tables = tables
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
const sheet = json.endpoint.entityId
|
||||
|
||||
const handlers = {
|
||||
[DataSourceOperation.CREATE]: () =>
|
||||
this.create({ sheet, row: json.body }),
|
||||
[DataSourceOperation.READ]: () => this.read({ sheet }),
|
||||
[DataSourceOperation.UPDATE]: () =>
|
||||
this.update({
|
||||
// exclude the header row and zero index
|
||||
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
|
||||
sheet,
|
||||
row: json.body,
|
||||
}),
|
||||
[DataSourceOperation.DELETE]: () =>
|
||||
this.delete({
|
||||
// exclude the header row and zero index
|
||||
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
|
||||
sheet,
|
||||
}),
|
||||
[DataSourceOperation.CREATE_TABLE]: () =>
|
||||
this.createTable(json?.table?.name),
|
||||
[DataSourceOperation.UPDATE_TABLE]: () => this.updateTable(json.table),
|
||||
[DataSourceOperation.DELETE_TABLE]: () =>
|
||||
this.deleteTable(json?.table?.name),
|
||||
}
|
||||
|
||||
const internalQueryMethod = handlers[json.endpoint.operation]
|
||||
|
||||
return await internalQueryMethod()
|
||||
}
|
||||
|
||||
buildRowObject(headers: string[], values: string[], rowNumber: number) {
|
||||
const rowObject: { rowNumber: number; [key: string]: any } = { rowNumber }
|
||||
for (let i = 0; i < headers.length; i++) {
|
||||
rowObject._id = rowNumber
|
||||
rowObject[headers[i]] = values[i]
|
||||
}
|
||||
return rowObject
|
||||
}
|
||||
|
||||
async createTable(name?: string) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheet = await this.client.addSheet({ title: name })
|
||||
return sheet
|
||||
} catch (err) {
|
||||
console.error("Error creating new table in google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async updateTable(table?: any) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheet = await this.client.sheetsByTitle[table.name]
|
||||
await sheet.loadHeaderRow()
|
||||
|
||||
if (table._rename) {
|
||||
const headers = []
|
||||
for (let header of sheet.headerValues) {
|
||||
if (header === table._rename.old) {
|
||||
headers.push(table._rename.updated)
|
||||
} else {
|
||||
headers.push(header)
|
||||
}
|
||||
}
|
||||
await sheet.setHeaderRow(headers)
|
||||
} else {
|
||||
let newField = Object.keys(table.schema).find(
|
||||
key => !sheet.headerValues.includes(key)
|
||||
)
|
||||
await sheet.setHeaderRow([...sheet.headerValues, newField])
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error updating table in google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async deleteTable(sheet: any) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheetToDelete = await this.client.sheetsByTitle[sheet]
|
||||
return await sheetToDelete.delete()
|
||||
} catch (err) {
|
||||
console.error("Error deleting table in google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async create(query: { sheet: string; row: any }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||
const rowToInsert =
|
||||
typeof query.row === "string" ? JSON.parse(query.row) : query.row
|
||||
const row = await sheet.addRow(rowToInsert)
|
||||
return [
|
||||
this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber),
|
||||
]
|
||||
} catch (err) {
|
||||
console.error("Error writing to google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async read(query: { sheet: string }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||
const rows = await sheet.getRows()
|
||||
const headerValues = sheet.headerValues
|
||||
const response = []
|
||||
for (let row of rows) {
|
||||
response.push(
|
||||
this.buildRowObject(headerValues, row._rawData, row._rowNumber)
|
||||
)
|
||||
}
|
||||
return response
|
||||
} catch (err) {
|
||||
console.error("Error reading from google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async update(query: { sheet: string; rowIndex: number; row: any }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||
const rows = await sheet.getRows()
|
||||
const row = rows[query.rowIndex]
|
||||
if (row) {
|
||||
const updateValues = query.row
|
||||
for (let key in updateValues) {
|
||||
row[key] = updateValues[key]
|
||||
}
|
||||
await row.save()
|
||||
return [
|
||||
this.buildRowObject(
|
||||
sheet.headerValues,
|
||||
row._rawData,
|
||||
row._rowNumber
|
||||
),
|
||||
]
|
||||
} else {
|
||||
throw new Error("Row does not exist.")
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error reading from google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async delete(query: { sheet: string; rowIndex: number }) {
|
||||
async update(query: { sheet: string; rowIndex: number; row: any }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||
const rows = await sheet.getRows()
|
||||
const row = rows[query.rowIndex]
|
||||
if (row) {
|
||||
await row.delete()
|
||||
return [{ deleted: query.rowIndex }]
|
||||
const updateValues = query.row
|
||||
for (let key in updateValues) {
|
||||
row[key] = updateValues[key]
|
||||
}
|
||||
await row.save()
|
||||
return [
|
||||
this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber),
|
||||
]
|
||||
} else {
|
||||
throw new Error("Row does not exist.")
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error reading from google sheets", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: GoogleSheetsIntegration,
|
||||
async delete(query: { sheet: string; rowIndex: number }) {
|
||||
await this.connect()
|
||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||
const rows = await sheet.getRows()
|
||||
const row = rows[query.rowIndex]
|
||||
if (row) {
|
||||
await row.delete()
|
||||
return [{ deleted: query.rowIndex }]
|
||||
} else {
|
||||
throw new Error("Row does not exist.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: GoogleSheetsIntegration,
|
||||
}
|
||||
|
|
|
@ -1,22 +1,24 @@
|
|||
const postgres = require("./postgres")
|
||||
const dynamodb = require("./dynamodb")
|
||||
const mongodb = require("./mongodb")
|
||||
const elasticsearch = require("./elasticsearch")
|
||||
const couchdb = require("./couchdb")
|
||||
const sqlServer = require("./microsoftSqlServer")
|
||||
const s3 = require("./s3")
|
||||
const airtable = require("./airtable")
|
||||
const mysql = require("./mysql")
|
||||
const arangodb = require("./arangodb")
|
||||
const rest = require("./rest")
|
||||
const googlesheets = require("./googlesheets")
|
||||
const firebase = require("./firebase")
|
||||
const redis = require("./redis")
|
||||
const snowflake = require("./snowflake")
|
||||
const { SourceName } = require("@budibase/types")
|
||||
import postgres from "./postgres"
|
||||
import dynamodb from "./dynamodb"
|
||||
import mongodb from "./mongodb"
|
||||
import elasticsearch from "./elasticsearch"
|
||||
import couchdb from "./couchdb"
|
||||
import sqlServer from "./microsoftSqlServer"
|
||||
import s3 from "./s3"
|
||||
import airtable from "./airtable"
|
||||
import mysql from "./mysql"
|
||||
import arangodb from "./arangodb"
|
||||
import rest from "./rest"
|
||||
import googlesheets from "./googlesheets"
|
||||
import firebase from "./firebase"
|
||||
import redis from "./redis"
|
||||
import snowflake from "./snowflake"
|
||||
import { getPlugins } from "../api/controllers/plugin"
|
||||
import { SourceName, Integration, PluginType } from "@budibase/types"
|
||||
const environment = require("../environment")
|
||||
const { cloneDeep } = require("lodash")
|
||||
|
||||
const DEFINITIONS = {
|
||||
const DEFINITIONS: { [key: string]: Integration } = {
|
||||
[SourceName.POSTGRES]: postgres.schema,
|
||||
[SourceName.DYNAMODB]: dynamodb.schema,
|
||||
[SourceName.MONGODB]: mongodb.schema,
|
||||
|
@ -33,7 +35,7 @@ const DEFINITIONS = {
|
|||
[SourceName.SNOWFLAKE]: snowflake.schema,
|
||||
}
|
||||
|
||||
const INTEGRATIONS = {
|
||||
const INTEGRATIONS: { [key: string]: any } = {
|
||||
[SourceName.POSTGRES]: postgres.integration,
|
||||
[SourceName.DYNAMODB]: dynamodb.integration,
|
||||
[SourceName.MONGODB]: mongodb.integration,
|
||||
|
@ -48,7 +50,7 @@ const INTEGRATIONS = {
|
|||
[SourceName.FIRESTORE]: firebase.integration,
|
||||
[SourceName.GOOGLE_SHEETS]: googlesheets.integration,
|
||||
[SourceName.REDIS]: redis.integration,
|
||||
[SourceName.FIREBASE]: firebase.integration,
|
||||
[SourceName.FIRESTORE]: firebase.integration,
|
||||
[SourceName.SNOWFLAKE]: snowflake.integration,
|
||||
}
|
||||
|
||||
|
@ -64,6 +66,9 @@ if (environment.SELF_HOSTED) {
|
|||
}
|
||||
|
||||
module.exports = {
|
||||
definitions: DEFINITIONS,
|
||||
getDefinitions: async () => {
|
||||
const custom = await getPlugins(PluginType.DATASOURCE)
|
||||
return cloneDeep(DEFINITIONS)
|
||||
},
|
||||
integrations: INTEGRATIONS,
|
||||
}
|
||||
|
|
|
@ -18,292 +18,290 @@ import {
|
|||
} from "./utils"
|
||||
import Sql from "./base/sql"
|
||||
|
||||
module MSSQLModule {
|
||||
const sqlServer = require("mssql")
|
||||
const DEFAULT_SCHEMA = "dbo"
|
||||
const sqlServer = require("mssql")
|
||||
const DEFAULT_SCHEMA = "dbo"
|
||||
|
||||
interface MSSQLConfig {
|
||||
user: string
|
||||
password: string
|
||||
server: string
|
||||
port: number
|
||||
database: string
|
||||
schema: string
|
||||
encrypt?: boolean
|
||||
}
|
||||
interface MSSQLConfig {
|
||||
user: string
|
||||
password: string
|
||||
server: string
|
||||
port: number
|
||||
database: string
|
||||
schema: string
|
||||
encrypt?: boolean
|
||||
}
|
||||
|
||||
interface TablesResponse {
|
||||
TABLE_CATALOG: string
|
||||
TABLE_SCHEMA: string
|
||||
TABLE_NAME: string
|
||||
TABLE_TYPE: string
|
||||
}
|
||||
interface TablesResponse {
|
||||
TABLE_CATALOG: string
|
||||
TABLE_SCHEMA: string
|
||||
TABLE_NAME: string
|
||||
TABLE_TYPE: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/tediousjs/node-mssql",
|
||||
plus: true,
|
||||
description:
|
||||
"Microsoft SQL Server is a relational database management system developed by Microsoft. ",
|
||||
friendlyName: "MS SQL Server",
|
||||
type: "Relational",
|
||||
datasource: {
|
||||
user: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "localhost",
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
server: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: false,
|
||||
default: 1433,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
},
|
||||
schema: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: DEFAULT_SCHEMA,
|
||||
},
|
||||
encrypt: {
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: true,
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/tediousjs/node-mssql",
|
||||
plus: true,
|
||||
description:
|
||||
"Microsoft SQL Server is a relational database management system developed by Microsoft. ",
|
||||
friendlyName: "MS SQL Server",
|
||||
type: "Relational",
|
||||
datasource: {
|
||||
user: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "localhost",
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
}
|
||||
server: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: false,
|
||||
default: 1433,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
},
|
||||
schema: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: DEFAULT_SCHEMA,
|
||||
},
|
||||
encrypt: {
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class SqlServerIntegration extends Sql implements DatasourcePlus {
|
||||
private readonly config: MSSQLConfig
|
||||
private index: number = 0
|
||||
private readonly pool: any
|
||||
private client: any
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
class SqlServerIntegration extends Sql implements DatasourcePlus {
|
||||
private readonly config: MSSQLConfig
|
||||
private index: number = 0
|
||||
private readonly pool: any
|
||||
private client: any
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
MASTER_TABLES = [
|
||||
"spt_fallback_db",
|
||||
"spt_fallback_dev",
|
||||
"spt_fallback_usg",
|
||||
"spt_monitor",
|
||||
"MSreplication_options",
|
||||
]
|
||||
TABLES_SQL =
|
||||
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
|
||||
MASTER_TABLES = [
|
||||
"spt_fallback_db",
|
||||
"spt_fallback_dev",
|
||||
"spt_fallback_usg",
|
||||
"spt_monitor",
|
||||
"MSreplication_options",
|
||||
]
|
||||
TABLES_SQL =
|
||||
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
|
||||
|
||||
constructor(config: MSSQLConfig) {
|
||||
super(SqlClient.MS_SQL)
|
||||
this.config = config
|
||||
const clientCfg = {
|
||||
...this.config,
|
||||
options: {
|
||||
encrypt: this.config.encrypt,
|
||||
enableArithAbort: true,
|
||||
},
|
||||
}
|
||||
delete clientCfg.encrypt
|
||||
if (!this.pool) {
|
||||
this.pool = new sqlServer.ConnectionPool(clientCfg)
|
||||
}
|
||||
constructor(config: MSSQLConfig) {
|
||||
super(SqlClient.MS_SQL)
|
||||
this.config = config
|
||||
const clientCfg = {
|
||||
...this.config,
|
||||
options: {
|
||||
encrypt: this.config.encrypt,
|
||||
enableArithAbort: true,
|
||||
},
|
||||
}
|
||||
|
||||
getBindingIdentifier(): string {
|
||||
return `@p${this.index++}`
|
||||
}
|
||||
|
||||
getStringConcat(parts: string[]): string {
|
||||
return `concat(${parts.join(", ")})`
|
||||
}
|
||||
|
||||
async connect() {
|
||||
try {
|
||||
this.client = await this.pool.connect()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
async internalQuery(
|
||||
query: SqlQuery,
|
||||
operation: string | undefined = undefined
|
||||
) {
|
||||
const client = this.client
|
||||
const request = client.request()
|
||||
this.index = 0
|
||||
try {
|
||||
if (Array.isArray(query.bindings)) {
|
||||
let count = 0
|
||||
for (let binding of query.bindings) {
|
||||
request.input(`p${count++}`, binding)
|
||||
}
|
||||
}
|
||||
// this is a hack to get the inserted ID back,
|
||||
// no way to do this with Knex nicely
|
||||
const sql =
|
||||
operation === Operation.CREATE
|
||||
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
|
||||
: query.sql
|
||||
return await request.query(sql)
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
getDefinitionSQL(tableName: string) {
|
||||
return `select *
|
||||
from INFORMATION_SCHEMA.COLUMNS
|
||||
where TABLE_NAME='${tableName}'`
|
||||
}
|
||||
|
||||
getConstraintsSQL(tableName: string) {
|
||||
return `SELECT * FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
|
||||
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KU
|
||||
ON TC.CONSTRAINT_TYPE = 'PRIMARY KEY'
|
||||
AND TC.CONSTRAINT_NAME = KU.CONSTRAINT_NAME
|
||||
AND KU.table_name='${tableName}'
|
||||
ORDER BY
|
||||
KU.TABLE_NAME,
|
||||
KU.ORDINAL_POSITION;`
|
||||
}
|
||||
|
||||
getAutoColumnsSQL(tableName: string) {
|
||||
return `SELECT
|
||||
COLUMNPROPERTY(OBJECT_ID(TABLE_SCHEMA+'.'+TABLE_NAME),COLUMN_NAME,'IsComputed')
|
||||
AS IS_COMPUTED,
|
||||
COLUMNPROPERTY(object_id(TABLE_SCHEMA+'.'+TABLE_NAME), COLUMN_NAME, 'IsIdentity')
|
||||
AS IS_IDENTITY,
|
||||
*
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_NAME='${tableName}'`
|
||||
}
|
||||
|
||||
async runSQL(sql: string) {
|
||||
return (await this.internalQuery(getSqlQuery(sql))).recordset
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the tables from the sql server database and assigns them to the datasource.
|
||||
* @param {*} datasourceId - datasourceId to fetch
|
||||
* @param entities - the tables that are to be built
|
||||
*/
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
await this.connect()
|
||||
let tableInfo: TablesResponse[] = await this.runSQL(this.TABLES_SQL)
|
||||
if (tableInfo == null || !Array.isArray(tableInfo)) {
|
||||
throw "Unable to get list of tables in database"
|
||||
}
|
||||
|
||||
const schema = this.config.schema || DEFAULT_SCHEMA
|
||||
const tableNames = tableInfo
|
||||
.filter((record: any) => record.TABLE_SCHEMA === schema)
|
||||
.map((record: any) => record.TABLE_NAME)
|
||||
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
|
||||
|
||||
const tables: Record<string, Table> = {}
|
||||
for (let tableName of tableNames) {
|
||||
// get the column definition (type)
|
||||
const definition = await this.runSQL(this.getDefinitionSQL(tableName))
|
||||
// find primary key constraints
|
||||
const constraints = await this.runSQL(this.getConstraintsSQL(tableName))
|
||||
// find the computed and identity columns (auto columns)
|
||||
const columns = await this.runSQL(this.getAutoColumnsSQL(tableName))
|
||||
const primaryKeys = constraints
|
||||
.filter(
|
||||
(constraint: any) => constraint.CONSTRAINT_TYPE === "PRIMARY KEY"
|
||||
)
|
||||
.map((constraint: any) => constraint.COLUMN_NAME)
|
||||
const autoColumns = columns
|
||||
.filter((col: any) => col.IS_COMPUTED || col.IS_IDENTITY)
|
||||
.map((col: any) => col.COLUMN_NAME)
|
||||
|
||||
let schema: TableSchema = {}
|
||||
for (let def of definition) {
|
||||
const name = def.COLUMN_NAME
|
||||
if (typeof name !== "string") {
|
||||
continue
|
||||
}
|
||||
schema[name] = {
|
||||
autocolumn: !!autoColumns.find((col: string) => col === name),
|
||||
name: name,
|
||||
...convertSqlType(def.DATA_TYPE),
|
||||
externalType: def.DATA_TYPE,
|
||||
}
|
||||
}
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
primary: primaryKeys,
|
||||
name: tableName,
|
||||
schema,
|
||||
}
|
||||
}
|
||||
const final = finaliseExternalTables(tables, entities)
|
||||
this.tables = final.tables
|
||||
this.schemaErrors = final.errors
|
||||
}
|
||||
|
||||
async read(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset
|
||||
}
|
||||
|
||||
async create(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset || [{ created: true }]
|
||||
}
|
||||
|
||||
async update(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset || [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset || [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
const schema = this.config.schema
|
||||
await this.connect()
|
||||
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
|
||||
json.endpoint.schema = schema
|
||||
}
|
||||
const operation = this._operation(json)
|
||||
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
|
||||
const processFn = (result: any) =>
|
||||
result.recordset ? result.recordset : [{ [operation]: true }]
|
||||
return this.queryWithReturning(json, queryFn, processFn)
|
||||
delete clientCfg.encrypt
|
||||
if (!this.pool) {
|
||||
this.pool = new sqlServer.ConnectionPool(clientCfg)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: SqlServerIntegration,
|
||||
getBindingIdentifier(): string {
|
||||
return `@p${this.index++}`
|
||||
}
|
||||
|
||||
getStringConcat(parts: string[]): string {
|
||||
return `concat(${parts.join(", ")})`
|
||||
}
|
||||
|
||||
async connect() {
|
||||
try {
|
||||
this.client = await this.pool.connect()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
async internalQuery(
|
||||
query: SqlQuery,
|
||||
operation: string | undefined = undefined
|
||||
) {
|
||||
const client = this.client
|
||||
const request = client.request()
|
||||
this.index = 0
|
||||
try {
|
||||
if (Array.isArray(query.bindings)) {
|
||||
let count = 0
|
||||
for (let binding of query.bindings) {
|
||||
request.input(`p${count++}`, binding)
|
||||
}
|
||||
}
|
||||
// this is a hack to get the inserted ID back,
|
||||
// no way to do this with Knex nicely
|
||||
const sql =
|
||||
operation === Operation.CREATE
|
||||
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
|
||||
: query.sql
|
||||
return await request.query(sql)
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
getDefinitionSQL(tableName: string) {
|
||||
return `select *
|
||||
from INFORMATION_SCHEMA.COLUMNS
|
||||
where TABLE_NAME='${tableName}'`
|
||||
}
|
||||
|
||||
getConstraintsSQL(tableName: string) {
|
||||
return `SELECT * FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
|
||||
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KU
|
||||
ON TC.CONSTRAINT_TYPE = 'PRIMARY KEY'
|
||||
AND TC.CONSTRAINT_NAME = KU.CONSTRAINT_NAME
|
||||
AND KU.table_name='${tableName}'
|
||||
ORDER BY
|
||||
KU.TABLE_NAME,
|
||||
KU.ORDINAL_POSITION;`
|
||||
}
|
||||
|
||||
getAutoColumnsSQL(tableName: string) {
|
||||
return `SELECT
|
||||
COLUMNPROPERTY(OBJECT_ID(TABLE_SCHEMA+'.'+TABLE_NAME),COLUMN_NAME,'IsComputed')
|
||||
AS IS_COMPUTED,
|
||||
COLUMNPROPERTY(object_id(TABLE_SCHEMA+'.'+TABLE_NAME), COLUMN_NAME, 'IsIdentity')
|
||||
AS IS_IDENTITY,
|
||||
*
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_NAME='${tableName}'`
|
||||
}
|
||||
|
||||
async runSQL(sql: string) {
|
||||
return (await this.internalQuery(getSqlQuery(sql))).recordset
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the tables from the sql server database and assigns them to the datasource.
|
||||
* @param {*} datasourceId - datasourceId to fetch
|
||||
* @param entities - the tables that are to be built
|
||||
*/
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
await this.connect()
|
||||
let tableInfo: TablesResponse[] = await this.runSQL(this.TABLES_SQL)
|
||||
if (tableInfo == null || !Array.isArray(tableInfo)) {
|
||||
throw "Unable to get list of tables in database"
|
||||
}
|
||||
|
||||
const schema = this.config.schema || DEFAULT_SCHEMA
|
||||
const tableNames = tableInfo
|
||||
.filter((record: any) => record.TABLE_SCHEMA === schema)
|
||||
.map((record: any) => record.TABLE_NAME)
|
||||
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
|
||||
|
||||
const tables: Record<string, Table> = {}
|
||||
for (let tableName of tableNames) {
|
||||
// get the column definition (type)
|
||||
const definition = await this.runSQL(this.getDefinitionSQL(tableName))
|
||||
// find primary key constraints
|
||||
const constraints = await this.runSQL(this.getConstraintsSQL(tableName))
|
||||
// find the computed and identity columns (auto columns)
|
||||
const columns = await this.runSQL(this.getAutoColumnsSQL(tableName))
|
||||
const primaryKeys = constraints
|
||||
.filter(
|
||||
(constraint: any) => constraint.CONSTRAINT_TYPE === "PRIMARY KEY"
|
||||
)
|
||||
.map((constraint: any) => constraint.COLUMN_NAME)
|
||||
const autoColumns = columns
|
||||
.filter((col: any) => col.IS_COMPUTED || col.IS_IDENTITY)
|
||||
.map((col: any) => col.COLUMN_NAME)
|
||||
|
||||
let schema: TableSchema = {}
|
||||
for (let def of definition) {
|
||||
const name = def.COLUMN_NAME
|
||||
if (typeof name !== "string") {
|
||||
continue
|
||||
}
|
||||
schema[name] = {
|
||||
autocolumn: !!autoColumns.find((col: string) => col === name),
|
||||
name: name,
|
||||
...convertSqlType(def.DATA_TYPE),
|
||||
externalType: def.DATA_TYPE,
|
||||
}
|
||||
}
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
primary: primaryKeys,
|
||||
name: tableName,
|
||||
schema,
|
||||
}
|
||||
}
|
||||
const final = finaliseExternalTables(tables, entities)
|
||||
this.tables = final.tables
|
||||
this.schemaErrors = final.errors
|
||||
}
|
||||
|
||||
async read(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset
|
||||
}
|
||||
|
||||
async create(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset || [{ created: true }]
|
||||
}
|
||||
|
||||
async update(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset || [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset || [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
const schema = this.config.schema
|
||||
await this.connect()
|
||||
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
|
||||
json.endpoint.schema = schema
|
||||
}
|
||||
const operation = this._operation(json)
|
||||
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
|
||||
const processFn = (result: any) =>
|
||||
result.recordset ? result.recordset : [{ [operation]: true }]
|
||||
return this.queryWithReturning(json, queryFn, processFn)
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: SqlServerIntegration,
|
||||
}
|
||||
|
|
|
@ -15,313 +15,309 @@ import {
|
|||
CommonOptions,
|
||||
} from "mongodb"
|
||||
|
||||
module MongoDBModule {
|
||||
interface MongoDBConfig {
|
||||
connectionString: string
|
||||
db: string
|
||||
interface MongoDBConfig {
|
||||
connectionString: string
|
||||
db: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/mongodb/node-mongodb-native",
|
||||
friendlyName: "MongoDB",
|
||||
type: "Non-relational",
|
||||
description:
|
||||
"MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.",
|
||||
datasource: {
|
||||
connectionString: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "mongodb://localhost:27017",
|
||||
},
|
||||
db: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
extra: {
|
||||
collection: {
|
||||
displayName: "Collection",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
actionTypes: {
|
||||
displayName: "Action Types",
|
||||
type: DatasourceFieldType.LIST,
|
||||
required: true,
|
||||
data: {
|
||||
read: ["find", "findOne", "findOneAndUpdate", "count", "distinct"],
|
||||
create: ["insertOne", "insertMany"],
|
||||
update: ["updateOne", "updateMany"],
|
||||
delete: ["deleteOne", "deleteMany"],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class MongoIntegration implements IntegrationBase {
|
||||
private config: MongoDBConfig
|
||||
private client: any
|
||||
|
||||
constructor(config: MongoDBConfig) {
|
||||
this.config = config
|
||||
this.client = new MongoClient(config.connectionString)
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/mongodb/node-mongodb-native",
|
||||
friendlyName: "MongoDB",
|
||||
type: "Non-relational",
|
||||
description:
|
||||
"MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.",
|
||||
datasource: {
|
||||
connectionString: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "mongodb://localhost:27017",
|
||||
},
|
||||
db: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
extra: {
|
||||
collection: {
|
||||
displayName: "Collection",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
actionTypes: {
|
||||
displayName: "Action Types",
|
||||
type: DatasourceFieldType.LIST,
|
||||
required: true,
|
||||
data: {
|
||||
read: ["find", "findOne", "findOneAndUpdate", "count", "distinct"],
|
||||
create: ["insertOne", "insertMany"],
|
||||
update: ["updateOne", "updateMany"],
|
||||
delete: ["deleteOne", "deleteMany"],
|
||||
},
|
||||
},
|
||||
},
|
||||
async connect() {
|
||||
return this.client.connect()
|
||||
}
|
||||
|
||||
class MongoIntegration implements IntegrationBase {
|
||||
private config: MongoDBConfig
|
||||
private client: any
|
||||
|
||||
constructor(config: MongoDBConfig) {
|
||||
this.config = config
|
||||
this.client = new MongoClient(config.connectionString)
|
||||
createObjectIds(json: any): object {
|
||||
const self = this
|
||||
function interpolateObjectIds(json: any) {
|
||||
for (let field of Object.keys(json)) {
|
||||
if (json[field] instanceof Object) {
|
||||
json[field] = self.createObjectIds(json[field])
|
||||
}
|
||||
if (
|
||||
(field === "_id" || field?.startsWith("$")) &&
|
||||
typeof json[field] === "string"
|
||||
) {
|
||||
const id = json[field].match(/(?<=objectid\(['"]).*(?=['"]\))/gi)?.[0]
|
||||
if (id) {
|
||||
json[field] = ObjectID.createFromHexString(id)
|
||||
}
|
||||
}
|
||||
}
|
||||
return json
|
||||
}
|
||||
|
||||
async connect() {
|
||||
return this.client.connect()
|
||||
if (Array.isArray(json)) {
|
||||
for (let i = 0; i < json.length; i++) {
|
||||
json[i] = interpolateObjectIds(json[i])
|
||||
}
|
||||
return json
|
||||
}
|
||||
return interpolateObjectIds(json)
|
||||
}
|
||||
|
||||
createObjectIds(json: any): object {
|
||||
const self = this
|
||||
function interpolateObjectIds(json: any) {
|
||||
for (let field of Object.keys(json)) {
|
||||
if (json[field] instanceof Object) {
|
||||
json[field] = self.createObjectIds(json[field])
|
||||
}
|
||||
if (
|
||||
(field === "_id" || field?.startsWith("$")) &&
|
||||
typeof json[field] === "string"
|
||||
) {
|
||||
const id = json[field].match(
|
||||
/(?<=objectid\(['"]).*(?=['"]\))/gi
|
||||
)?.[0]
|
||||
if (id) {
|
||||
json[field] = ObjectID.createFromHexString(id)
|
||||
}
|
||||
}
|
||||
}
|
||||
return json
|
||||
parseQueryParams(params: string, mode: string) {
|
||||
let queryParams = []
|
||||
let openCount = 0
|
||||
let inQuotes = false
|
||||
let i = 0
|
||||
let startIndex = 0
|
||||
for (let c of params) {
|
||||
if (c === '"' && i > 0 && params[i - 1] !== "\\") {
|
||||
inQuotes = !inQuotes
|
||||
}
|
||||
|
||||
if (Array.isArray(json)) {
|
||||
for (let i = 0; i < json.length; i++) {
|
||||
json[i] = interpolateObjectIds(json[i])
|
||||
if (c === "{" && !inQuotes) {
|
||||
openCount++
|
||||
if (openCount === 1) {
|
||||
startIndex = i
|
||||
}
|
||||
return json
|
||||
} else if (c === "}" && !inQuotes) {
|
||||
if (openCount === 1) {
|
||||
queryParams.push(JSON.parse(params.substring(startIndex, i + 1)))
|
||||
}
|
||||
openCount--
|
||||
}
|
||||
return interpolateObjectIds(json)
|
||||
i++
|
||||
}
|
||||
|
||||
parseQueryParams(params: string, mode: string) {
|
||||
let queryParams = []
|
||||
let openCount = 0
|
||||
let inQuotes = false
|
||||
let i = 0
|
||||
let startIndex = 0
|
||||
for (let c of params) {
|
||||
if (c === '"' && i > 0 && params[i - 1] !== "\\") {
|
||||
inQuotes = !inQuotes
|
||||
}
|
||||
if (c === "{" && !inQuotes) {
|
||||
openCount++
|
||||
if (openCount === 1) {
|
||||
startIndex = i
|
||||
}
|
||||
} else if (c === "}" && !inQuotes) {
|
||||
if (openCount === 1) {
|
||||
queryParams.push(JSON.parse(params.substring(startIndex, i + 1)))
|
||||
}
|
||||
openCount--
|
||||
}
|
||||
i++
|
||||
}
|
||||
let group1 = queryParams[0] ?? {}
|
||||
let group2 = queryParams[1] ?? {}
|
||||
let group3 = queryParams[2] ?? {}
|
||||
if (mode === "update") {
|
||||
return {
|
||||
filter: group1,
|
||||
update: group2,
|
||||
options: group3,
|
||||
}
|
||||
}
|
||||
let group1 = queryParams[0] ?? {}
|
||||
let group2 = queryParams[1] ?? {}
|
||||
let group3 = queryParams[2] ?? {}
|
||||
if (mode === "update") {
|
||||
return {
|
||||
filter: group1,
|
||||
options: group2,
|
||||
update: group2,
|
||||
options: group3,
|
||||
}
|
||||
}
|
||||
|
||||
async create(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const db = this.client.db(this.config.db)
|
||||
const collection = db.collection(query.extra.collection)
|
||||
let json = this.createObjectIds(query.json)
|
||||
|
||||
// For mongodb we add an extra actionType to specify
|
||||
// which method we want to call on the collection
|
||||
switch (query.extra.actionTypes) {
|
||||
case "insertOne": {
|
||||
return await collection.insertOne(json)
|
||||
}
|
||||
case "insertMany": {
|
||||
return await collection.insertMany(json)
|
||||
}
|
||||
default: {
|
||||
throw new Error(
|
||||
`actionType ${query.extra.actionTypes} does not exist on DB for create`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error writing to mongodb", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async read(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const db = this.client.db(this.config.db)
|
||||
const collection = db.collection(query.extra.collection)
|
||||
let json = this.createObjectIds(query.json)
|
||||
|
||||
switch (query.extra.actionTypes) {
|
||||
case "find": {
|
||||
return await collection.find(json).toArray()
|
||||
}
|
||||
case "findOne": {
|
||||
return await collection.findOne(json)
|
||||
}
|
||||
case "findOneAndUpdate": {
|
||||
if (typeof query.json === "string") {
|
||||
json = this.parseQueryParams(query.json, "update")
|
||||
}
|
||||
let findAndUpdateJson = this.createObjectIds(json) as {
|
||||
filter: FilterQuery<any>
|
||||
update: UpdateQuery<any>
|
||||
options: FindOneAndUpdateOption<any>
|
||||
}
|
||||
return await collection.findOneAndUpdate(
|
||||
findAndUpdateJson.filter,
|
||||
findAndUpdateJson.update,
|
||||
findAndUpdateJson.options
|
||||
)
|
||||
}
|
||||
case "count": {
|
||||
return await collection.countDocuments(json)
|
||||
}
|
||||
case "distinct": {
|
||||
return await collection.distinct(json)
|
||||
}
|
||||
default: {
|
||||
throw new Error(
|
||||
`actionType ${query.extra.actionTypes} does not exist on DB for read`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error querying mongodb", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async update(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const db = this.client.db(this.config.db)
|
||||
const collection = db.collection(query.extra.collection)
|
||||
let queryJson = query.json
|
||||
if (typeof queryJson === "string") {
|
||||
queryJson = this.parseQueryParams(queryJson, "update")
|
||||
}
|
||||
let json = this.createObjectIds(queryJson) as {
|
||||
filter: FilterQuery<any>
|
||||
update: UpdateQuery<any>
|
||||
options: object
|
||||
}
|
||||
|
||||
switch (query.extra.actionTypes) {
|
||||
case "updateOne": {
|
||||
return await collection.updateOne(
|
||||
json.filter,
|
||||
json.update,
|
||||
json.options as UpdateOneOptions
|
||||
)
|
||||
}
|
||||
case "updateMany": {
|
||||
return await collection.updateMany(
|
||||
json.filter,
|
||||
json.update,
|
||||
json.options as UpdateManyOptions
|
||||
)
|
||||
}
|
||||
default: {
|
||||
throw new Error(
|
||||
`actionType ${query.extra.actionTypes} does not exist on DB for update`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error writing to mongodb", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async delete(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const db = this.client.db(this.config.db)
|
||||
const collection = db.collection(query.extra.collection)
|
||||
let queryJson = query.json
|
||||
if (typeof queryJson === "string") {
|
||||
queryJson = this.parseQueryParams(queryJson, "delete")
|
||||
}
|
||||
let json = this.createObjectIds(queryJson) as {
|
||||
filter: FilterQuery<any>
|
||||
options: CommonOptions
|
||||
}
|
||||
if (!json.options) {
|
||||
json = {
|
||||
filter: json,
|
||||
options: {},
|
||||
}
|
||||
}
|
||||
|
||||
switch (query.extra.actionTypes) {
|
||||
case "deleteOne": {
|
||||
return await collection.deleteOne(json.filter, json.options)
|
||||
}
|
||||
case "deleteMany": {
|
||||
return await collection.deleteMany(json.filter, json.options)
|
||||
}
|
||||
default: {
|
||||
throw new Error(
|
||||
`actionType ${query.extra.actionTypes} does not exist on DB for delete`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error writing to mongodb", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
return {
|
||||
filter: group1,
|
||||
options: group2,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: MongoIntegration,
|
||||
async create(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const db = this.client.db(this.config.db)
|
||||
const collection = db.collection(query.extra.collection)
|
||||
let json = this.createObjectIds(query.json)
|
||||
|
||||
// For mongodb we add an extra actionType to specify
|
||||
// which method we want to call on the collection
|
||||
switch (query.extra.actionTypes) {
|
||||
case "insertOne": {
|
||||
return await collection.insertOne(json)
|
||||
}
|
||||
case "insertMany": {
|
||||
return await collection.insertMany(json)
|
||||
}
|
||||
default: {
|
||||
throw new Error(
|
||||
`actionType ${query.extra.actionTypes} does not exist on DB for create`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error writing to mongodb", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async read(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const db = this.client.db(this.config.db)
|
||||
const collection = db.collection(query.extra.collection)
|
||||
let json = this.createObjectIds(query.json)
|
||||
|
||||
switch (query.extra.actionTypes) {
|
||||
case "find": {
|
||||
return await collection.find(json).toArray()
|
||||
}
|
||||
case "findOne": {
|
||||
return await collection.findOne(json)
|
||||
}
|
||||
case "findOneAndUpdate": {
|
||||
if (typeof query.json === "string") {
|
||||
json = this.parseQueryParams(query.json, "update")
|
||||
}
|
||||
let findAndUpdateJson = this.createObjectIds(json) as {
|
||||
filter: FilterQuery<any>
|
||||
update: UpdateQuery<any>
|
||||
options: FindOneAndUpdateOption<any>
|
||||
}
|
||||
return await collection.findOneAndUpdate(
|
||||
findAndUpdateJson.filter,
|
||||
findAndUpdateJson.update,
|
||||
findAndUpdateJson.options
|
||||
)
|
||||
}
|
||||
case "count": {
|
||||
return await collection.countDocuments(json)
|
||||
}
|
||||
case "distinct": {
|
||||
return await collection.distinct(json)
|
||||
}
|
||||
default: {
|
||||
throw new Error(
|
||||
`actionType ${query.extra.actionTypes} does not exist on DB for read`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error querying mongodb", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async update(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const db = this.client.db(this.config.db)
|
||||
const collection = db.collection(query.extra.collection)
|
||||
let queryJson = query.json
|
||||
if (typeof queryJson === "string") {
|
||||
queryJson = this.parseQueryParams(queryJson, "update")
|
||||
}
|
||||
let json = this.createObjectIds(queryJson) as {
|
||||
filter: FilterQuery<any>
|
||||
update: UpdateQuery<any>
|
||||
options: object
|
||||
}
|
||||
|
||||
switch (query.extra.actionTypes) {
|
||||
case "updateOne": {
|
||||
return await collection.updateOne(
|
||||
json.filter,
|
||||
json.update,
|
||||
json.options as UpdateOneOptions
|
||||
)
|
||||
}
|
||||
case "updateMany": {
|
||||
return await collection.updateMany(
|
||||
json.filter,
|
||||
json.update,
|
||||
json.options as UpdateManyOptions
|
||||
)
|
||||
}
|
||||
default: {
|
||||
throw new Error(
|
||||
`actionType ${query.extra.actionTypes} does not exist on DB for update`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error writing to mongodb", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async delete(query: { json: object; extra: { [key: string]: string } }) {
|
||||
try {
|
||||
await this.connect()
|
||||
const db = this.client.db(this.config.db)
|
||||
const collection = db.collection(query.extra.collection)
|
||||
let queryJson = query.json
|
||||
if (typeof queryJson === "string") {
|
||||
queryJson = this.parseQueryParams(queryJson, "delete")
|
||||
}
|
||||
let json = this.createObjectIds(queryJson) as {
|
||||
filter: FilterQuery<any>
|
||||
options: CommonOptions
|
||||
}
|
||||
if (!json.options) {
|
||||
json = {
|
||||
filter: json,
|
||||
options: {},
|
||||
}
|
||||
}
|
||||
|
||||
switch (query.extra.actionTypes) {
|
||||
case "deleteOne": {
|
||||
return await collection.deleteOne(json.filter, json.options)
|
||||
}
|
||||
case "deleteMany": {
|
||||
return await collection.deleteMany(json.filter, json.options)
|
||||
}
|
||||
default: {
|
||||
throw new Error(
|
||||
`actionType ${query.extra.actionTypes} does not exist on DB for delete`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error writing to mongodb", err)
|
||||
throw err
|
||||
} finally {
|
||||
await this.client.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: MongoIntegration,
|
||||
}
|
||||
|
|
|
@ -19,274 +19,269 @@ import dayjs from "dayjs"
|
|||
const { NUMBER_REGEX } = require("../utilities")
|
||||
import Sql from "./base/sql"
|
||||
|
||||
module MySQLModule {
|
||||
const mysql = require("mysql2/promise")
|
||||
const mysql = require("mysql2/promise")
|
||||
|
||||
interface MySQLConfig {
|
||||
host: string
|
||||
port: number
|
||||
user: string
|
||||
password: string
|
||||
database: string
|
||||
ssl?: { [key: string]: any }
|
||||
rejectUnauthorized: boolean
|
||||
typeCast: Function
|
||||
}
|
||||
interface MySQLConfig {
|
||||
host: string
|
||||
port: number
|
||||
user: string
|
||||
password: string
|
||||
database: string
|
||||
ssl?: { [key: string]: any }
|
||||
rejectUnauthorized: boolean
|
||||
typeCast: Function
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/sidorares/node-mysql2",
|
||||
plus: true,
|
||||
friendlyName: "MySQL",
|
||||
type: "Relational",
|
||||
description:
|
||||
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
|
||||
datasource: {
|
||||
host: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
required: true,
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
default: 3306,
|
||||
required: false,
|
||||
},
|
||||
user: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
ssl: {
|
||||
type: DatasourceFieldType.OBJECT,
|
||||
required: false,
|
||||
},
|
||||
rejectUnauthorized: {
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: true,
|
||||
required: false,
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/sidorares/node-mysql2",
|
||||
plus: true,
|
||||
friendlyName: "MySQL",
|
||||
type: "Relational",
|
||||
description:
|
||||
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
|
||||
datasource: {
|
||||
host: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
required: true,
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
default: 3306,
|
||||
required: false,
|
||||
},
|
||||
user: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
ssl: {
|
||||
type: DatasourceFieldType.OBJECT,
|
||||
required: false,
|
||||
},
|
||||
rejectUnauthorized: {
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: true,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const TimezoneAwareDateTypes = ["timestamp"]
|
||||
|
||||
function bindingTypeCoerce(bindings: any[]) {
|
||||
for (let i = 0; i < bindings.length; i++) {
|
||||
const binding = bindings[i]
|
||||
if (typeof binding !== "string") {
|
||||
continue
|
||||
}
|
||||
const matches = binding.match(NUMBER_REGEX)
|
||||
// check if number first
|
||||
if (matches && matches[0] !== "" && !isNaN(Number(matches[0]))) {
|
||||
bindings[i] = parseFloat(binding)
|
||||
}
|
||||
// if not a number, see if it is a date - important to do in this order as any
|
||||
// integer will be considered a valid date
|
||||
else if (/^\d/.test(binding) && dayjs(binding).isValid()) {
|
||||
bindings[i] = dayjs(binding).toDate()
|
||||
}
|
||||
}
|
||||
return bindings
|
||||
}
|
||||
|
||||
class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||
private config: MySQLConfig
|
||||
private client: any
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
constructor(config: MySQLConfig) {
|
||||
super(SqlClient.MY_SQL)
|
||||
this.config = config
|
||||
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
||||
delete config.ssl
|
||||
}
|
||||
// make sure this defaults to true
|
||||
if (
|
||||
config.rejectUnauthorized != null &&
|
||||
!config.rejectUnauthorized &&
|
||||
config.ssl
|
||||
) {
|
||||
config.ssl.rejectUnauthorized = config.rejectUnauthorized
|
||||
}
|
||||
// @ts-ignore
|
||||
delete config.rejectUnauthorized
|
||||
this.config = {
|
||||
...config,
|
||||
typeCast: function (field: any, next: any) {
|
||||
if (
|
||||
field.type == "DATETIME" ||
|
||||
field.type === "DATE" ||
|
||||
field.type === "TIMESTAMP"
|
||||
) {
|
||||
return field.string()
|
||||
}
|
||||
return next()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const TimezoneAwareDateTypes = ["timestamp"]
|
||||
|
||||
function bindingTypeCoerce(bindings: any[]) {
|
||||
for (let i = 0; i < bindings.length; i++) {
|
||||
const binding = bindings[i]
|
||||
if (typeof binding !== "string") {
|
||||
continue
|
||||
}
|
||||
const matches = binding.match(NUMBER_REGEX)
|
||||
// check if number first
|
||||
if (matches && matches[0] !== "" && !isNaN(Number(matches[0]))) {
|
||||
bindings[i] = parseFloat(binding)
|
||||
}
|
||||
// if not a number, see if it is a date - important to do in this order as any
|
||||
// integer will be considered a valid date
|
||||
else if (/^\d/.test(binding) && dayjs(binding).isValid()) {
|
||||
bindings[i] = dayjs(binding).toDate()
|
||||
}
|
||||
}
|
||||
return bindings
|
||||
getBindingIdentifier(): string {
|
||||
return "?"
|
||||
}
|
||||
|
||||
class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||
private config: MySQLConfig
|
||||
private client: any
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
getStringConcat(parts: string[]): string {
|
||||
return `concat(${parts.join(", ")})`
|
||||
}
|
||||
|
||||
constructor(config: MySQLConfig) {
|
||||
super(SqlClient.MY_SQL)
|
||||
this.config = config
|
||||
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
||||
delete config.ssl
|
||||
async connect() {
|
||||
this.client = await mysql.createConnection(this.config)
|
||||
}
|
||||
|
||||
async disconnect() {
|
||||
await this.client.end()
|
||||
}
|
||||
|
||||
async internalQuery(
|
||||
query: SqlQuery,
|
||||
opts: { connect?: boolean; disableCoercion?: boolean } = {
|
||||
connect: true,
|
||||
disableCoercion: false,
|
||||
}
|
||||
): Promise<any[] | any> {
|
||||
try {
|
||||
if (opts?.connect) {
|
||||
await this.connect()
|
||||
}
|
||||
// make sure this defaults to true
|
||||
if (
|
||||
config.rejectUnauthorized != null &&
|
||||
!config.rejectUnauthorized &&
|
||||
config.ssl
|
||||
) {
|
||||
config.ssl.rejectUnauthorized = config.rejectUnauthorized
|
||||
}
|
||||
// @ts-ignore
|
||||
delete config.rejectUnauthorized
|
||||
this.config = {
|
||||
...config,
|
||||
typeCast: function (field: any, next: any) {
|
||||
if (
|
||||
field.type == "DATETIME" ||
|
||||
field.type === "DATE" ||
|
||||
field.type === "TIMESTAMP"
|
||||
) {
|
||||
return field.string()
|
||||
}
|
||||
return next()
|
||||
},
|
||||
const baseBindings = query.bindings || []
|
||||
const bindings = opts?.disableCoercion
|
||||
? baseBindings
|
||||
: bindingTypeCoerce(baseBindings)
|
||||
// Node MySQL is callback based, so we must wrap our call in a promise
|
||||
const response = await this.client.query(query.sql, bindings)
|
||||
return response[0]
|
||||
} finally {
|
||||
if (opts?.connect) {
|
||||
await this.disconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getBindingIdentifier(): string {
|
||||
return "?"
|
||||
}
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
const tables: { [key: string]: Table } = {}
|
||||
const database = this.config.database
|
||||
await this.connect()
|
||||
|
||||
getStringConcat(parts: string[]): string {
|
||||
return `concat(${parts.join(", ")})`
|
||||
}
|
||||
|
||||
async connect() {
|
||||
this.client = await mysql.createConnection(this.config)
|
||||
}
|
||||
|
||||
async disconnect() {
|
||||
await this.client.end()
|
||||
}
|
||||
|
||||
async internalQuery(
|
||||
query: SqlQuery,
|
||||
opts: { connect?: boolean; disableCoercion?: boolean } = {
|
||||
connect: true,
|
||||
disableCoercion: false,
|
||||
}
|
||||
): Promise<any[] | any> {
|
||||
try {
|
||||
if (opts?.connect) {
|
||||
await this.connect()
|
||||
}
|
||||
const baseBindings = query.bindings || []
|
||||
const bindings = opts?.disableCoercion
|
||||
? baseBindings
|
||||
: bindingTypeCoerce(baseBindings)
|
||||
// Node MySQL is callback based, so we must wrap our call in a promise
|
||||
const response = await this.client.query(query.sql, bindings)
|
||||
return response[0]
|
||||
} finally {
|
||||
if (opts?.connect) {
|
||||
await this.disconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
const tables: { [key: string]: Table } = {}
|
||||
const database = this.config.database
|
||||
await this.connect()
|
||||
|
||||
try {
|
||||
// get the tables first
|
||||
const tablesResp = await this.internalQuery(
|
||||
{ sql: "SHOW TABLES;" },
|
||||
try {
|
||||
// get the tables first
|
||||
const tablesResp = await this.internalQuery(
|
||||
{ sql: "SHOW TABLES;" },
|
||||
{ connect: false }
|
||||
)
|
||||
const tableNames = tablesResp.map(
|
||||
(obj: any) =>
|
||||
obj[`Tables_in_${database}`] ||
|
||||
obj[`Tables_in_${database.toLowerCase()}`]
|
||||
)
|
||||
for (let tableName of tableNames) {
|
||||
const primaryKeys = []
|
||||
const schema: TableSchema = {}
|
||||
const descResp = await this.internalQuery(
|
||||
{ sql: `DESCRIBE \`${tableName}\`;` },
|
||||
{ connect: false }
|
||||
)
|
||||
const tableNames = tablesResp.map(
|
||||
(obj: any) =>
|
||||
obj[`Tables_in_${database}`] ||
|
||||
obj[`Tables_in_${database.toLowerCase()}`]
|
||||
)
|
||||
for (let tableName of tableNames) {
|
||||
const primaryKeys = []
|
||||
const schema: TableSchema = {}
|
||||
const descResp = await this.internalQuery(
|
||||
{ sql: `DESCRIBE \`${tableName}\`;` },
|
||||
{ connect: false }
|
||||
)
|
||||
for (let column of descResp) {
|
||||
const columnName = column.Field
|
||||
if (
|
||||
column.Key === "PRI" &&
|
||||
primaryKeys.indexOf(column.Key) === -1
|
||||
) {
|
||||
primaryKeys.push(columnName)
|
||||
}
|
||||
const constraints = {
|
||||
presence: column.Null !== "YES",
|
||||
}
|
||||
const isAuto: boolean =
|
||||
typeof column.Extra === "string" &&
|
||||
(column.Extra === "auto_increment" ||
|
||||
column.Extra.toLowerCase().includes("generated"))
|
||||
schema[columnName] = {
|
||||
name: columnName,
|
||||
autocolumn: isAuto,
|
||||
constraints,
|
||||
...convertSqlType(column.Type),
|
||||
externalType: column.Type,
|
||||
}
|
||||
for (let column of descResp) {
|
||||
const columnName = column.Field
|
||||
if (column.Key === "PRI" && primaryKeys.indexOf(column.Key) === -1) {
|
||||
primaryKeys.push(columnName)
|
||||
}
|
||||
if (!tables[tableName]) {
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
primary: primaryKeys,
|
||||
name: tableName,
|
||||
schema,
|
||||
}
|
||||
const constraints = {
|
||||
presence: column.Null !== "YES",
|
||||
}
|
||||
const isAuto: boolean =
|
||||
typeof column.Extra === "string" &&
|
||||
(column.Extra === "auto_increment" ||
|
||||
column.Extra.toLowerCase().includes("generated"))
|
||||
schema[columnName] = {
|
||||
name: columnName,
|
||||
autocolumn: isAuto,
|
||||
constraints,
|
||||
...convertSqlType(column.Type),
|
||||
externalType: column.Type,
|
||||
}
|
||||
}
|
||||
if (!tables[tableName]) {
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
primary: primaryKeys,
|
||||
name: tableName,
|
||||
schema,
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
await this.disconnect()
|
||||
}
|
||||
const final = finaliseExternalTables(tables, entities)
|
||||
this.tables = final.tables
|
||||
this.schemaErrors = final.errors
|
||||
}
|
||||
|
||||
async create(query: SqlQuery | string) {
|
||||
const results = await this.internalQuery(getSqlQuery(query))
|
||||
return results.length ? results : [{ created: true }]
|
||||
}
|
||||
|
||||
async read(query: SqlQuery | string) {
|
||||
return this.internalQuery(getSqlQuery(query))
|
||||
}
|
||||
|
||||
async update(query: SqlQuery | string) {
|
||||
const results = await this.internalQuery(getSqlQuery(query))
|
||||
return results.length ? results : [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string) {
|
||||
const results = await this.internalQuery(getSqlQuery(query))
|
||||
return results.length ? results : [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
await this.connect()
|
||||
try {
|
||||
const queryFn = (query: any) =>
|
||||
this.internalQuery(query, { connect: false, disableCoercion: true })
|
||||
return await this.queryWithReturning(json, queryFn)
|
||||
} finally {
|
||||
await this.disconnect()
|
||||
}
|
||||
} finally {
|
||||
await this.disconnect()
|
||||
}
|
||||
const final = finaliseExternalTables(tables, entities)
|
||||
this.tables = final.tables
|
||||
this.schemaErrors = final.errors
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: MySQLIntegration,
|
||||
async create(query: SqlQuery | string) {
|
||||
const results = await this.internalQuery(getSqlQuery(query))
|
||||
return results.length ? results : [{ created: true }]
|
||||
}
|
||||
|
||||
async read(query: SqlQuery | string) {
|
||||
return this.internalQuery(getSqlQuery(query))
|
||||
}
|
||||
|
||||
async update(query: SqlQuery | string) {
|
||||
const results = await this.internalQuery(getSqlQuery(query))
|
||||
return results.length ? results : [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string) {
|
||||
const results = await this.internalQuery(getSqlQuery(query))
|
||||
return results.length ? results : [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
await this.connect()
|
||||
try {
|
||||
const queryFn = (query: any) =>
|
||||
this.internalQuery(query, { connect: false, disableCoercion: true })
|
||||
return await this.queryWithReturning(json, queryFn)
|
||||
} finally {
|
||||
await this.disconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: MySQLIntegration,
|
||||
}
|
||||
|
|
|
@ -25,437 +25,430 @@ import oracledb, {
|
|||
import Sql from "./base/sql"
|
||||
import { FieldTypes } from "../constants"
|
||||
|
||||
module OracleModule {
|
||||
oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT
|
||||
oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT
|
||||
|
||||
interface OracleConfig {
|
||||
host: string
|
||||
port: number
|
||||
database: string
|
||||
user: string
|
||||
password: string
|
||||
}
|
||||
interface OracleConfig {
|
||||
host: string
|
||||
port: number
|
||||
database: string
|
||||
user: string
|
||||
password: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/oracle/node-oracledb",
|
||||
plus: true,
|
||||
friendlyName: "Oracle",
|
||||
type: "Relational",
|
||||
description:
|
||||
"Oracle Database is an object-relational database management system developed by Oracle Corporation",
|
||||
datasource: {
|
||||
host: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
required: true,
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: true,
|
||||
default: 1521,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
user: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/oracle/node-oracledb",
|
||||
plus: true,
|
||||
friendlyName: "Oracle",
|
||||
type: "Relational",
|
||||
description:
|
||||
"Oracle Database is an object-relational database management system developed by Oracle Corporation",
|
||||
datasource: {
|
||||
host: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
required: true,
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: true,
|
||||
default: 1521,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
user: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"]
|
||||
|
||||
/**
|
||||
* Raw query response
|
||||
*/
|
||||
interface ColumnsResponse {
|
||||
TABLE_NAME: string
|
||||
COLUMN_NAME: string
|
||||
DATA_TYPE: string
|
||||
DATA_DEFAULT: string | null
|
||||
COLUMN_ID: number
|
||||
CONSTRAINT_NAME: string | null
|
||||
CONSTRAINT_TYPE: string | null
|
||||
R_CONSTRAINT_NAME: string | null
|
||||
SEARCH_CONDITION: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
* An oracle constraint
|
||||
*/
|
||||
interface OracleConstraint {
|
||||
name: string
|
||||
type: string
|
||||
relatedConstraintName: string | null
|
||||
searchCondition: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
* An oracle column and it's related constraints
|
||||
*/
|
||||
interface OracleColumn {
|
||||
name: string
|
||||
type: string
|
||||
default: string | null
|
||||
id: number
|
||||
constraints: { [key: string]: OracleConstraint }
|
||||
}
|
||||
|
||||
/**
|
||||
* An oracle table and it's related columns
|
||||
*/
|
||||
interface OracleTable {
|
||||
name: string
|
||||
columns: { [key: string]: OracleColumn }
|
||||
}
|
||||
|
||||
const OracleContraintTypes = {
|
||||
PRIMARY: "P",
|
||||
NOT_NULL_OR_CHECK: "C",
|
||||
FOREIGN_KEY: "R",
|
||||
UNIQUE: "U",
|
||||
}
|
||||
|
||||
class OracleIntegration extends Sql implements DatasourcePlus {
|
||||
private readonly config: OracleConfig
|
||||
private index: number = 1
|
||||
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
private readonly COLUMNS_SQL = `
|
||||
SELECT
|
||||
tabs.table_name,
|
||||
cols.column_name,
|
||||
cols.data_type,
|
||||
cols.data_default,
|
||||
cols.column_id,
|
||||
cons.constraint_name,
|
||||
cons.constraint_type,
|
||||
cons.r_constraint_name,
|
||||
cons.search_condition
|
||||
FROM
|
||||
user_tables tabs
|
||||
JOIN
|
||||
user_tab_columns cols
|
||||
ON tabs.table_name = cols.table_name
|
||||
LEFT JOIN
|
||||
user_cons_columns col_cons
|
||||
ON cols.column_name = col_cons.column_name
|
||||
AND cols.table_name = col_cons.table_name
|
||||
LEFT JOIN
|
||||
user_constraints cons
|
||||
ON col_cons.constraint_name = cons.constraint_name
|
||||
AND cons.table_name = cols.table_name
|
||||
WHERE
|
||||
(cons.status = 'ENABLED'
|
||||
OR cons.status IS NULL)
|
||||
`
|
||||
constructor(config: OracleConfig) {
|
||||
super(SqlClient.ORACLE)
|
||||
this.config = config
|
||||
}
|
||||
|
||||
const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"]
|
||||
getBindingIdentifier(): string {
|
||||
return `:${this.index++}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Raw query response
|
||||
*/
|
||||
interface ColumnsResponse {
|
||||
TABLE_NAME: string
|
||||
COLUMN_NAME: string
|
||||
DATA_TYPE: string
|
||||
DATA_DEFAULT: string | null
|
||||
COLUMN_ID: number
|
||||
CONSTRAINT_NAME: string | null
|
||||
CONSTRAINT_TYPE: string | null
|
||||
R_CONSTRAINT_NAME: string | null
|
||||
SEARCH_CONDITION: string | null
|
||||
getStringConcat(parts: string[]): string {
|
||||
return parts.join(" || ")
|
||||
}
|
||||
|
||||
/**
|
||||
* An oracle constraint
|
||||
* Map the flat tabular columns and constraints data into a nested object
|
||||
*/
|
||||
interface OracleConstraint {
|
||||
name: string
|
||||
type: string
|
||||
relatedConstraintName: string | null
|
||||
searchCondition: string | null
|
||||
}
|
||||
private mapColumns(result: Result<ColumnsResponse>): {
|
||||
[key: string]: OracleTable
|
||||
} {
|
||||
const oracleTables: { [key: string]: OracleTable } = {}
|
||||
|
||||
/**
|
||||
* An oracle column and it's related constraints
|
||||
*/
|
||||
interface OracleColumn {
|
||||
name: string
|
||||
type: string
|
||||
default: string | null
|
||||
id: number
|
||||
constraints: { [key: string]: OracleConstraint }
|
||||
}
|
||||
if (result.rows) {
|
||||
result.rows.forEach(row => {
|
||||
const tableName = row.TABLE_NAME
|
||||
const columnName = row.COLUMN_NAME
|
||||
const dataType = row.DATA_TYPE
|
||||
const dataDefault = row.DATA_DEFAULT
|
||||
const columnId = row.COLUMN_ID
|
||||
const constraintName = row.CONSTRAINT_NAME
|
||||
const constraintType = row.CONSTRAINT_TYPE
|
||||
const relatedConstraintName = row.R_CONSTRAINT_NAME
|
||||
const searchCondition = row.SEARCH_CONDITION
|
||||
|
||||
/**
|
||||
* An oracle table and it's related columns
|
||||
*/
|
||||
interface OracleTable {
|
||||
name: string
|
||||
columns: { [key: string]: OracleColumn }
|
||||
}
|
||||
|
||||
const OracleContraintTypes = {
|
||||
PRIMARY: "P",
|
||||
NOT_NULL_OR_CHECK: "C",
|
||||
FOREIGN_KEY: "R",
|
||||
UNIQUE: "U",
|
||||
}
|
||||
|
||||
class OracleIntegration extends Sql implements DatasourcePlus {
|
||||
private readonly config: OracleConfig
|
||||
private index: number = 1
|
||||
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
private readonly COLUMNS_SQL = `
|
||||
SELECT
|
||||
tabs.table_name,
|
||||
cols.column_name,
|
||||
cols.data_type,
|
||||
cols.data_default,
|
||||
cols.column_id,
|
||||
cons.constraint_name,
|
||||
cons.constraint_type,
|
||||
cons.r_constraint_name,
|
||||
cons.search_condition
|
||||
FROM
|
||||
user_tables tabs
|
||||
JOIN
|
||||
user_tab_columns cols
|
||||
ON tabs.table_name = cols.table_name
|
||||
LEFT JOIN
|
||||
user_cons_columns col_cons
|
||||
ON cols.column_name = col_cons.column_name
|
||||
AND cols.table_name = col_cons.table_name
|
||||
LEFT JOIN
|
||||
user_constraints cons
|
||||
ON col_cons.constraint_name = cons.constraint_name
|
||||
AND cons.table_name = cols.table_name
|
||||
WHERE
|
||||
(cons.status = 'ENABLED'
|
||||
OR cons.status IS NULL)
|
||||
`
|
||||
constructor(config: OracleConfig) {
|
||||
super(SqlClient.ORACLE)
|
||||
this.config = config
|
||||
}
|
||||
|
||||
getBindingIdentifier(): string {
|
||||
return `:${this.index++}`
|
||||
}
|
||||
|
||||
getStringConcat(parts: string[]): string {
|
||||
return parts.join(" || ")
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the flat tabular columns and constraints data into a nested object
|
||||
*/
|
||||
private mapColumns(result: Result<ColumnsResponse>): {
|
||||
[key: string]: OracleTable
|
||||
} {
|
||||
const oracleTables: { [key: string]: OracleTable } = {}
|
||||
|
||||
if (result.rows) {
|
||||
result.rows.forEach(row => {
|
||||
const tableName = row.TABLE_NAME
|
||||
const columnName = row.COLUMN_NAME
|
||||
const dataType = row.DATA_TYPE
|
||||
const dataDefault = row.DATA_DEFAULT
|
||||
const columnId = row.COLUMN_ID
|
||||
const constraintName = row.CONSTRAINT_NAME
|
||||
const constraintType = row.CONSTRAINT_TYPE
|
||||
const relatedConstraintName = row.R_CONSTRAINT_NAME
|
||||
const searchCondition = row.SEARCH_CONDITION
|
||||
|
||||
let table = oracleTables[tableName]
|
||||
if (!table) {
|
||||
table = {
|
||||
name: tableName,
|
||||
columns: {},
|
||||
}
|
||||
oracleTables[tableName] = table
|
||||
}
|
||||
|
||||
let column = table.columns[columnName]
|
||||
if (!column) {
|
||||
column = {
|
||||
name: columnName,
|
||||
type: dataType,
|
||||
default: dataDefault,
|
||||
id: columnId,
|
||||
constraints: {},
|
||||
}
|
||||
table.columns[columnName] = column
|
||||
}
|
||||
|
||||
if (constraintName && constraintType) {
|
||||
let constraint = column.constraints[constraintName]
|
||||
if (!constraint) {
|
||||
constraint = {
|
||||
name: constraintName,
|
||||
type: constraintType,
|
||||
relatedConstraintName: relatedConstraintName,
|
||||
searchCondition: searchCondition,
|
||||
}
|
||||
}
|
||||
column.constraints[constraintName] = constraint
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return oracleTables
|
||||
}
|
||||
|
||||
private static isSupportedColumn(column: OracleColumn) {
|
||||
return !UNSUPPORTED_TYPES.includes(column.type)
|
||||
}
|
||||
|
||||
private static isAutoColumn(column: OracleColumn) {
|
||||
return !!(
|
||||
column.default && column.default.toLowerCase().includes("nextval")
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* No native boolean in oracle. Best we can do is to check if a manual 1 or 0 number constraint has been set up
|
||||
* This matches the default behaviour for generating DDL used in knex.
|
||||
*/
|
||||
private isBooleanType(column: OracleColumn): boolean {
|
||||
return (
|
||||
column.type.toLowerCase() === "number" &&
|
||||
Object.values(column.constraints).filter(c => {
|
||||
if (
|
||||
c.type === OracleContraintTypes.NOT_NULL_OR_CHECK &&
|
||||
c.searchCondition
|
||||
) {
|
||||
const condition = c.searchCondition
|
||||
.replace(/\s/g, "") // remove spaces
|
||||
.replace(/[']+/g, "") // remove quotes
|
||||
if (
|
||||
condition.includes("in(0,1)") ||
|
||||
condition.includes("in(1,0)")
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}).length > 0
|
||||
)
|
||||
}
|
||||
|
||||
private internalConvertType(column: OracleColumn): { type: string } {
|
||||
if (this.isBooleanType(column)) {
|
||||
return { type: FieldTypes.BOOLEAN }
|
||||
}
|
||||
|
||||
return convertSqlType(column.type)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the tables from the oracle table and assigns them to the datasource.
|
||||
* @param {*} datasourceId - datasourceId to fetch
|
||||
* @param entities - the tables that are to be built
|
||||
*/
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
const columnsResponse = await this.internalQuery<ColumnsResponse>({
|
||||
sql: this.COLUMNS_SQL,
|
||||
})
|
||||
const oracleTables = this.mapColumns(columnsResponse)
|
||||
|
||||
const tables: { [key: string]: Table } = {}
|
||||
|
||||
// iterate each table
|
||||
Object.values(oracleTables).forEach(oracleTable => {
|
||||
let table = tables[oracleTable.name]
|
||||
let table = oracleTables[tableName]
|
||||
if (!table) {
|
||||
table = {
|
||||
_id: buildExternalTableId(datasourceId, oracleTable.name),
|
||||
primary: [],
|
||||
name: oracleTable.name,
|
||||
schema: {},
|
||||
name: tableName,
|
||||
columns: {},
|
||||
}
|
||||
tables[oracleTable.name] = table
|
||||
oracleTables[tableName] = table
|
||||
}
|
||||
|
||||
// iterate each column on the table
|
||||
Object.values(oracleTable.columns)
|
||||
// remove columns that we can't read / save
|
||||
.filter(oracleColumn =>
|
||||
OracleIntegration.isSupportedColumn(oracleColumn)
|
||||
)
|
||||
// match the order of the columns in the db
|
||||
.sort((c1, c2) => c1.id - c2.id)
|
||||
.forEach(oracleColumn => {
|
||||
const columnName = oracleColumn.name
|
||||
let fieldSchema = table.schema[columnName]
|
||||
if (!fieldSchema) {
|
||||
fieldSchema = {
|
||||
autocolumn: OracleIntegration.isAutoColumn(oracleColumn),
|
||||
name: columnName,
|
||||
...this.internalConvertType(oracleColumn),
|
||||
}
|
||||
table.schema[columnName] = fieldSchema
|
||||
let column = table.columns[columnName]
|
||||
if (!column) {
|
||||
column = {
|
||||
name: columnName,
|
||||
type: dataType,
|
||||
default: dataDefault,
|
||||
id: columnId,
|
||||
constraints: {},
|
||||
}
|
||||
table.columns[columnName] = column
|
||||
}
|
||||
|
||||
if (constraintName && constraintType) {
|
||||
let constraint = column.constraints[constraintName]
|
||||
if (!constraint) {
|
||||
constraint = {
|
||||
name: constraintName,
|
||||
type: constraintType,
|
||||
relatedConstraintName: relatedConstraintName,
|
||||
searchCondition: searchCondition,
|
||||
}
|
||||
|
||||
// iterate each constraint on the column
|
||||
Object.values(oracleColumn.constraints).forEach(
|
||||
oracleConstraint => {
|
||||
if (oracleConstraint.type === OracleContraintTypes.PRIMARY) {
|
||||
table.primary!.push(columnName)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
column.constraints[constraintName] = constraint
|
||||
}
|
||||
})
|
||||
|
||||
const final = finaliseExternalTables(tables, entities)
|
||||
this.tables = final.tables
|
||||
this.schemaErrors = final.errors
|
||||
}
|
||||
|
||||
private async internalQuery<T>(query: SqlQuery): Promise<Result<T>> {
|
||||
let connection
|
||||
try {
|
||||
this.index = 1
|
||||
connection = await this.getConnection()
|
||||
return oracleTables
|
||||
}
|
||||
|
||||
const options: ExecuteOptions = { autoCommit: true }
|
||||
const bindings: BindParameters = query.bindings || []
|
||||
private static isSupportedColumn(column: OracleColumn) {
|
||||
return !UNSUPPORTED_TYPES.includes(column.type)
|
||||
}
|
||||
|
||||
return await connection.execute<T>(query.sql, bindings, options)
|
||||
} finally {
|
||||
if (connection) {
|
||||
try {
|
||||
await connection.close()
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
private static isAutoColumn(column: OracleColumn) {
|
||||
return !!(
|
||||
column.default && column.default.toLowerCase().includes("nextval")
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* No native boolean in oracle. Best we can do is to check if a manual 1 or 0 number constraint has been set up
|
||||
* This matches the default behaviour for generating DDL used in knex.
|
||||
*/
|
||||
private isBooleanType(column: OracleColumn): boolean {
|
||||
return (
|
||||
column.type.toLowerCase() === "number" &&
|
||||
Object.values(column.constraints).filter(c => {
|
||||
if (
|
||||
c.type === OracleContraintTypes.NOT_NULL_OR_CHECK &&
|
||||
c.searchCondition
|
||||
) {
|
||||
const condition = c.searchCondition
|
||||
.replace(/\s/g, "") // remove spaces
|
||||
.replace(/[']+/g, "") // remove quotes
|
||||
if (condition.includes("in(0,1)") || condition.includes("in(1,0)")) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}).length > 0
|
||||
)
|
||||
}
|
||||
|
||||
private internalConvertType(column: OracleColumn): { type: string } {
|
||||
if (this.isBooleanType(column)) {
|
||||
return { type: FieldTypes.BOOLEAN }
|
||||
}
|
||||
|
||||
private getConnection = async (): Promise<Connection> => {
|
||||
//connectString : "(DESCRIPTION =(ADDRESS = (PROTOCOL = TCP)(HOST = localhost)(PORT = 1521))(CONNECT_DATA =(SID= ORCL)))"
|
||||
const connectString = `${this.config.host}:${this.config.port || 1521}/${
|
||||
this.config.database
|
||||
}`
|
||||
const attributes: ConnectionAttributes = {
|
||||
user: this.config.user,
|
||||
password: this.config.password,
|
||||
connectString,
|
||||
}
|
||||
return oracledb.getConnection(attributes)
|
||||
}
|
||||
return convertSqlType(column.type)
|
||||
}
|
||||
|
||||
async create(query: SqlQuery | string): Promise<any[]> {
|
||||
const response = await this.internalQuery<any>(getSqlQuery(query))
|
||||
return response.rows && response.rows.length
|
||||
? response.rows
|
||||
: [{ created: true }]
|
||||
}
|
||||
/**
|
||||
* Fetches the tables from the oracle table and assigns them to the datasource.
|
||||
* @param {*} datasourceId - datasourceId to fetch
|
||||
* @param entities - the tables that are to be built
|
||||
*/
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
const columnsResponse = await this.internalQuery<ColumnsResponse>({
|
||||
sql: this.COLUMNS_SQL,
|
||||
})
|
||||
const oracleTables = this.mapColumns(columnsResponse)
|
||||
|
||||
async read(query: SqlQuery | string): Promise<any[]> {
|
||||
const response = await this.internalQuery<any>(getSqlQuery(query))
|
||||
return response.rows ? response.rows : []
|
||||
}
|
||||
const tables: { [key: string]: Table } = {}
|
||||
|
||||
async update(query: SqlQuery | string): Promise<any[]> {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows && response.rows.length
|
||||
? response.rows
|
||||
: [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string): Promise<any[]> {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows && response.rows.length
|
||||
? response.rows
|
||||
: [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
const operation = this._operation(json)
|
||||
const input = this._query(json, { disableReturning: true })
|
||||
if (Array.isArray(input)) {
|
||||
const responses = []
|
||||
for (let query of input) {
|
||||
responses.push(await this.internalQuery(query))
|
||||
}
|
||||
return responses
|
||||
} else {
|
||||
// read the row to be deleted up front for the return
|
||||
let deletedRows
|
||||
if (operation === Operation.DELETE) {
|
||||
const queryFn = (query: any) => this.internalQuery(query)
|
||||
deletedRows = await this.getReturningRow(queryFn, json)
|
||||
// iterate each table
|
||||
Object.values(oracleTables).forEach(oracleTable => {
|
||||
let table = tables[oracleTable.name]
|
||||
if (!table) {
|
||||
table = {
|
||||
_id: buildExternalTableId(datasourceId, oracleTable.name),
|
||||
primary: [],
|
||||
name: oracleTable.name,
|
||||
schema: {},
|
||||
}
|
||||
tables[oracleTable.name] = table
|
||||
}
|
||||
|
||||
// run the query
|
||||
const response = await this.internalQuery(input)
|
||||
|
||||
// get the results or return the created / updated / deleted row
|
||||
if (deletedRows?.rows?.length) {
|
||||
return deletedRows.rows
|
||||
} else if (response.rows?.length) {
|
||||
return response.rows
|
||||
} else {
|
||||
// get the last row that was updated
|
||||
if (
|
||||
response.lastRowid &&
|
||||
json.endpoint?.entityId &&
|
||||
operation !== Operation.DELETE
|
||||
) {
|
||||
const lastRow = await this.internalQuery({
|
||||
sql: `SELECT * FROM \"${json.endpoint.entityId}\" WHERE ROWID = '${response.lastRowid}'`,
|
||||
})
|
||||
return lastRow.rows
|
||||
} else {
|
||||
return [{ [operation.toLowerCase()]: true }]
|
||||
// iterate each column on the table
|
||||
Object.values(oracleTable.columns)
|
||||
// remove columns that we can't read / save
|
||||
.filter(oracleColumn =>
|
||||
OracleIntegration.isSupportedColumn(oracleColumn)
|
||||
)
|
||||
// match the order of the columns in the db
|
||||
.sort((c1, c2) => c1.id - c2.id)
|
||||
.forEach(oracleColumn => {
|
||||
const columnName = oracleColumn.name
|
||||
let fieldSchema = table.schema[columnName]
|
||||
if (!fieldSchema) {
|
||||
fieldSchema = {
|
||||
autocolumn: OracleIntegration.isAutoColumn(oracleColumn),
|
||||
name: columnName,
|
||||
...this.internalConvertType(oracleColumn),
|
||||
}
|
||||
table.schema[columnName] = fieldSchema
|
||||
}
|
||||
|
||||
// iterate each constraint on the column
|
||||
Object.values(oracleColumn.constraints).forEach(oracleConstraint => {
|
||||
if (oracleConstraint.type === OracleContraintTypes.PRIMARY) {
|
||||
table.primary!.push(columnName)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const final = finaliseExternalTables(tables, entities)
|
||||
this.tables = final.tables
|
||||
this.schemaErrors = final.errors
|
||||
}
|
||||
|
||||
private async internalQuery<T>(query: SqlQuery): Promise<Result<T>> {
|
||||
let connection
|
||||
try {
|
||||
this.index = 1
|
||||
connection = await this.getConnection()
|
||||
|
||||
const options: ExecuteOptions = { autoCommit: true }
|
||||
const bindings: BindParameters = query.bindings || []
|
||||
|
||||
return await connection.execute<T>(query.sql, bindings, options)
|
||||
} finally {
|
||||
if (connection) {
|
||||
try {
|
||||
await connection.close()
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: OracleIntegration,
|
||||
private getConnection = async (): Promise<Connection> => {
|
||||
//connectString : "(DESCRIPTION =(ADDRESS = (PROTOCOL = TCP)(HOST = localhost)(PORT = 1521))(CONNECT_DATA =(SID= ORCL)))"
|
||||
const connectString = `${this.config.host}:${this.config.port || 1521}/${
|
||||
this.config.database
|
||||
}`
|
||||
const attributes: ConnectionAttributes = {
|
||||
user: this.config.user,
|
||||
password: this.config.password,
|
||||
connectString,
|
||||
}
|
||||
return oracledb.getConnection(attributes)
|
||||
}
|
||||
|
||||
async create(query: SqlQuery | string): Promise<any[]> {
|
||||
const response = await this.internalQuery<any>(getSqlQuery(query))
|
||||
return response.rows && response.rows.length
|
||||
? response.rows
|
||||
: [{ created: true }]
|
||||
}
|
||||
|
||||
async read(query: SqlQuery | string): Promise<any[]> {
|
||||
const response = await this.internalQuery<any>(getSqlQuery(query))
|
||||
return response.rows ? response.rows : []
|
||||
}
|
||||
|
||||
async update(query: SqlQuery | string): Promise<any[]> {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows && response.rows.length
|
||||
? response.rows
|
||||
: [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string): Promise<any[]> {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows && response.rows.length
|
||||
? response.rows
|
||||
: [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
const operation = this._operation(json)
|
||||
const input = this._query(json, { disableReturning: true })
|
||||
if (Array.isArray(input)) {
|
||||
const responses = []
|
||||
for (let query of input) {
|
||||
responses.push(await this.internalQuery(query))
|
||||
}
|
||||
return responses
|
||||
} else {
|
||||
// read the row to be deleted up front for the return
|
||||
let deletedRows
|
||||
if (operation === Operation.DELETE) {
|
||||
const queryFn = (query: any) => this.internalQuery(query)
|
||||
deletedRows = await this.getReturningRow(queryFn, json)
|
||||
}
|
||||
|
||||
// run the query
|
||||
const response = await this.internalQuery(input)
|
||||
|
||||
// get the results or return the created / updated / deleted row
|
||||
if (deletedRows?.rows?.length) {
|
||||
return deletedRows.rows
|
||||
} else if (response.rows?.length) {
|
||||
return response.rows
|
||||
} else {
|
||||
// get the last row that was updated
|
||||
if (
|
||||
response.lastRowid &&
|
||||
json.endpoint?.entityId &&
|
||||
operation !== Operation.DELETE
|
||||
) {
|
||||
const lastRow = await this.internalQuery({
|
||||
sql: `SELECT * FROM \"${json.endpoint.entityId}\" WHERE ROWID = '${response.lastRowid}'`,
|
||||
})
|
||||
return lastRow.rows
|
||||
} else {
|
||||
return [{ [operation.toLowerCase()]: true }]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: OracleIntegration,
|
||||
}
|
||||
|
|
|
@ -16,317 +16,313 @@ import {
|
|||
} from "./utils"
|
||||
import Sql from "./base/sql"
|
||||
|
||||
module PostgresModule {
|
||||
const { Client, types } = require("pg")
|
||||
const { escapeDangerousCharacters } = require("../utilities")
|
||||
const { Client, types } = require("pg")
|
||||
const { escapeDangerousCharacters } = require("../utilities")
|
||||
|
||||
// Return "date" and "timestamp" types as plain strings.
|
||||
// This lets us reference the original stored timezone.
|
||||
// types is undefined when running in a test env for some reason.
|
||||
if (types) {
|
||||
types.setTypeParser(1114, (val: any) => val) // timestamp
|
||||
types.setTypeParser(1082, (val: any) => val) // date
|
||||
types.setTypeParser(1184, (val: any) => val) // timestampz
|
||||
}
|
||||
// Return "date" and "timestamp" types as plain strings.
|
||||
// This lets us reference the original stored timezone.
|
||||
// types is undefined when running in a test env for some reason.
|
||||
if (types) {
|
||||
types.setTypeParser(1114, (val: any) => val) // timestamp
|
||||
types.setTypeParser(1082, (val: any) => val) // date
|
||||
types.setTypeParser(1184, (val: any) => val) // timestampz
|
||||
}
|
||||
|
||||
const JSON_REGEX = /'{.*}'::json/s
|
||||
const JSON_REGEX = /'{.*}'::json/s
|
||||
|
||||
interface PostgresConfig {
|
||||
host: string
|
||||
port: number
|
||||
database: string
|
||||
user: string
|
||||
password: string
|
||||
schema: string
|
||||
ssl?: boolean
|
||||
ca?: string
|
||||
rejectUnauthorized?: boolean
|
||||
}
|
||||
interface PostgresConfig {
|
||||
host: string
|
||||
port: number
|
||||
database: string
|
||||
user: string
|
||||
password: string
|
||||
schema: string
|
||||
ssl?: boolean
|
||||
ca?: string
|
||||
rejectUnauthorized?: boolean
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://node-postgres.com",
|
||||
plus: true,
|
||||
friendlyName: "PostgreSQL",
|
||||
type: "Relational",
|
||||
description:
|
||||
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
||||
datasource: {
|
||||
host: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
required: true,
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: true,
|
||||
default: 5432,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "postgres",
|
||||
required: true,
|
||||
},
|
||||
user: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
schema: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "public",
|
||||
required: true,
|
||||
},
|
||||
ssl: {
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
rejectUnauthorized: {
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
ca: {
|
||||
type: DatasourceFieldType.LONGFORM,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://node-postgres.com",
|
||||
plus: true,
|
||||
friendlyName: "PostgreSQL",
|
||||
type: "Relational",
|
||||
description:
|
||||
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
||||
datasource: {
|
||||
host: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
required: true,
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: true,
|
||||
default: 5432,
|
||||
},
|
||||
}
|
||||
database: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "postgres",
|
||||
required: true,
|
||||
},
|
||||
user: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
schema: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "public",
|
||||
required: true,
|
||||
},
|
||||
ssl: {
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
rejectUnauthorized: {
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
ca: {
|
||||
type: DatasourceFieldType.LONGFORM,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class PostgresIntegration extends Sql implements DatasourcePlus {
|
||||
private readonly client: any
|
||||
private readonly config: PostgresConfig
|
||||
private index: number = 1
|
||||
private open: boolean
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
class PostgresIntegration extends Sql implements DatasourcePlus {
|
||||
private readonly client: any
|
||||
private readonly config: PostgresConfig
|
||||
private index: number = 1
|
||||
private open: boolean
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
COLUMNS_SQL!: string
|
||||
COLUMNS_SQL!: string
|
||||
|
||||
PRIMARY_KEYS_SQL = `
|
||||
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
||||
from information_schema.table_constraints tc
|
||||
join
|
||||
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
||||
and kc.table_schema = tc.table_schema
|
||||
and kc.constraint_name = tc.constraint_name
|
||||
where tc.constraint_type = 'PRIMARY KEY';
|
||||
`
|
||||
PRIMARY_KEYS_SQL = `
|
||||
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
||||
from information_schema.table_constraints tc
|
||||
join
|
||||
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
||||
and kc.table_schema = tc.table_schema
|
||||
and kc.constraint_name = tc.constraint_name
|
||||
where tc.constraint_type = 'PRIMARY KEY';
|
||||
`
|
||||
|
||||
constructor(config: PostgresConfig) {
|
||||
super(SqlClient.POSTGRES)
|
||||
this.config = config
|
||||
constructor(config: PostgresConfig) {
|
||||
super(SqlClient.POSTGRES)
|
||||
this.config = config
|
||||
|
||||
let newConfig = {
|
||||
...this.config,
|
||||
ssl: this.config.ssl
|
||||
? {
|
||||
rejectUnauthorized: this.config.rejectUnauthorized,
|
||||
ca: this.config.ca,
|
||||
}
|
||||
: undefined,
|
||||
}
|
||||
this.client = new Client(newConfig)
|
||||
this.open = false
|
||||
}
|
||||
|
||||
getBindingIdentifier(): string {
|
||||
return `$${this.index++}`
|
||||
}
|
||||
|
||||
getStringConcat(parts: string[]): string {
|
||||
return parts.join(" || ")
|
||||
}
|
||||
|
||||
async openConnection() {
|
||||
await this.client.connect()
|
||||
if (!this.config.schema) {
|
||||
this.config.schema = "public"
|
||||
}
|
||||
this.client.query(`SET search_path TO ${this.config.schema}`)
|
||||
this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'`
|
||||
this.open = true
|
||||
}
|
||||
|
||||
closeConnection() {
|
||||
const pg = this
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
this.client.end((err: any) => {
|
||||
pg.open = false
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve()
|
||||
let newConfig = {
|
||||
...this.config,
|
||||
ssl: this.config.ssl
|
||||
? {
|
||||
rejectUnauthorized: this.config.rejectUnauthorized,
|
||||
ca: this.config.ca,
|
||||
}
|
||||
})
|
||||
: undefined,
|
||||
}
|
||||
this.client = new Client(newConfig)
|
||||
this.open = false
|
||||
}
|
||||
|
||||
getBindingIdentifier(): string {
|
||||
return `$${this.index++}`
|
||||
}
|
||||
|
||||
getStringConcat(parts: string[]): string {
|
||||
return parts.join(" || ")
|
||||
}
|
||||
|
||||
async openConnection() {
|
||||
await this.client.connect()
|
||||
if (!this.config.schema) {
|
||||
this.config.schema = "public"
|
||||
}
|
||||
this.client.query(`SET search_path TO ${this.config.schema}`)
|
||||
this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'`
|
||||
this.open = true
|
||||
}
|
||||
|
||||
closeConnection() {
|
||||
const pg = this
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
this.client.end((err: any) => {
|
||||
pg.open = false
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async internalQuery(query: SqlQuery, close: boolean = true) {
|
||||
if (!this.open) {
|
||||
await this.openConnection()
|
||||
}
|
||||
const client = this.client
|
||||
this.index = 1
|
||||
// need to handle a specific issue with json data types in postgres,
|
||||
// new lines inside the JSON data will break it
|
||||
if (query && query.sql) {
|
||||
const matches = query.sql.match(JSON_REGEX)
|
||||
if (matches && matches.length > 0) {
|
||||
for (let match of matches) {
|
||||
const escaped = escapeDangerousCharacters(match)
|
||||
query.sql = query.sql.replace(match, escaped)
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
return await client.query(query.sql, query.bindings || [])
|
||||
} catch (err) {
|
||||
await this.closeConnection()
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
} finally {
|
||||
if (close) {
|
||||
await this.closeConnection()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the tables from the postgres table and assigns them to the datasource.
|
||||
* @param {*} datasourceId - datasourceId to fetch
|
||||
* @param entities - the tables that are to be built
|
||||
*/
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
let tableKeys: { [key: string]: string[] } = {}
|
||||
async internalQuery(query: SqlQuery, close: boolean = true) {
|
||||
if (!this.open) {
|
||||
await this.openConnection()
|
||||
try {
|
||||
const primaryKeysResponse = await this.client.query(
|
||||
this.PRIMARY_KEYS_SQL
|
||||
)
|
||||
for (let table of primaryKeysResponse.rows) {
|
||||
const tableName = table.table_name
|
||||
if (!tableKeys[tableName]) {
|
||||
tableKeys[tableName] = []
|
||||
}
|
||||
const key = table.column_name || table.primary_key
|
||||
// only add the unique keys
|
||||
if (key && tableKeys[tableName].indexOf(key) === -1) {
|
||||
tableKeys[tableName].push(key)
|
||||
}
|
||||
}
|
||||
const client = this.client
|
||||
this.index = 1
|
||||
// need to handle a specific issue with json data types in postgres,
|
||||
// new lines inside the JSON data will break it
|
||||
if (query && query.sql) {
|
||||
const matches = query.sql.match(JSON_REGEX)
|
||||
if (matches && matches.length > 0) {
|
||||
for (let match of matches) {
|
||||
const escaped = escapeDangerousCharacters(match)
|
||||
query.sql = query.sql.replace(match, escaped)
|
||||
}
|
||||
} catch (err) {
|
||||
tableKeys = {}
|
||||
}
|
||||
|
||||
try {
|
||||
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
||||
|
||||
const tables: { [key: string]: Table } = {}
|
||||
|
||||
for (let column of columnsResponse.rows) {
|
||||
const tableName: string = column.table_name
|
||||
const columnName: string = column.column_name
|
||||
|
||||
// table key doesn't exist yet
|
||||
if (!tables[tableName] || !tables[tableName].schema) {
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
primary: tableKeys[tableName] || [],
|
||||
name: tableName,
|
||||
schema: {},
|
||||
}
|
||||
}
|
||||
|
||||
const identity = !!(
|
||||
column.identity_generation ||
|
||||
column.identity_start ||
|
||||
column.identity_increment
|
||||
)
|
||||
const hasDefault =
|
||||
typeof column.column_default === "string" &&
|
||||
column.column_default.startsWith("nextval")
|
||||
const isGenerated =
|
||||
column.is_generated && column.is_generated !== "NEVER"
|
||||
const isAuto: boolean = hasDefault || identity || isGenerated
|
||||
tables[tableName].schema[columnName] = {
|
||||
autocolumn: isAuto,
|
||||
name: columnName,
|
||||
...convertSqlType(column.data_type),
|
||||
externalType: column.data_type,
|
||||
}
|
||||
}
|
||||
|
||||
const final = finaliseExternalTables(tables, entities)
|
||||
this.tables = final.tables
|
||||
this.schemaErrors = final.errors
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
} finally {
|
||||
await this.closeConnection()
|
||||
}
|
||||
}
|
||||
|
||||
async create(query: SqlQuery | string) {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows.length ? response.rows : [{ created: true }]
|
||||
}
|
||||
|
||||
async read(query: SqlQuery | string) {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows
|
||||
}
|
||||
|
||||
async update(query: SqlQuery | string) {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows.length ? response.rows : [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string) {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows.length ? response.rows : [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
const operation = this._operation(json).toLowerCase()
|
||||
const input = this._query(json)
|
||||
if (Array.isArray(input)) {
|
||||
const responses = []
|
||||
for (let query of input) {
|
||||
responses.push(await this.internalQuery(query, false))
|
||||
}
|
||||
try {
|
||||
return await client.query(query.sql, query.bindings || [])
|
||||
} catch (err) {
|
||||
await this.closeConnection()
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
} finally {
|
||||
if (close) {
|
||||
await this.closeConnection()
|
||||
return responses
|
||||
} else {
|
||||
const response = await this.internalQuery(input)
|
||||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: PostgresIntegration,
|
||||
/**
|
||||
* Fetches the tables from the postgres table and assigns them to the datasource.
|
||||
* @param {*} datasourceId - datasourceId to fetch
|
||||
* @param entities - the tables that are to be built
|
||||
*/
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
let tableKeys: { [key: string]: string[] } = {}
|
||||
await this.openConnection()
|
||||
try {
|
||||
const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
|
||||
for (let table of primaryKeysResponse.rows) {
|
||||
const tableName = table.table_name
|
||||
if (!tableKeys[tableName]) {
|
||||
tableKeys[tableName] = []
|
||||
}
|
||||
const key = table.column_name || table.primary_key
|
||||
// only add the unique keys
|
||||
if (key && tableKeys[tableName].indexOf(key) === -1) {
|
||||
tableKeys[tableName].push(key)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
tableKeys = {}
|
||||
}
|
||||
|
||||
try {
|
||||
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
||||
|
||||
const tables: { [key: string]: Table } = {}
|
||||
|
||||
for (let column of columnsResponse.rows) {
|
||||
const tableName: string = column.table_name
|
||||
const columnName: string = column.column_name
|
||||
|
||||
// table key doesn't exist yet
|
||||
if (!tables[tableName] || !tables[tableName].schema) {
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
primary: tableKeys[tableName] || [],
|
||||
name: tableName,
|
||||
schema: {},
|
||||
}
|
||||
}
|
||||
|
||||
const identity = !!(
|
||||
column.identity_generation ||
|
||||
column.identity_start ||
|
||||
column.identity_increment
|
||||
)
|
||||
const hasDefault =
|
||||
typeof column.column_default === "string" &&
|
||||
column.column_default.startsWith("nextval")
|
||||
const isGenerated =
|
||||
column.is_generated && column.is_generated !== "NEVER"
|
||||
const isAuto: boolean = hasDefault || identity || isGenerated
|
||||
tables[tableName].schema[columnName] = {
|
||||
autocolumn: isAuto,
|
||||
name: columnName,
|
||||
...convertSqlType(column.data_type),
|
||||
externalType: column.data_type,
|
||||
}
|
||||
}
|
||||
|
||||
const final = finaliseExternalTables(tables, entities)
|
||||
this.tables = final.tables
|
||||
this.schemaErrors = final.errors
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
} finally {
|
||||
await this.closeConnection()
|
||||
}
|
||||
}
|
||||
|
||||
async create(query: SqlQuery | string) {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows.length ? response.rows : [{ created: true }]
|
||||
}
|
||||
|
||||
async read(query: SqlQuery | string) {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows
|
||||
}
|
||||
|
||||
async update(query: SqlQuery | string) {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows.length ? response.rows : [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string) {
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows.length ? response.rows : [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
const operation = this._operation(json).toLowerCase()
|
||||
const input = this._query(json)
|
||||
if (Array.isArray(input)) {
|
||||
const responses = []
|
||||
for (let query of input) {
|
||||
responses.push(await this.internalQuery(query, false))
|
||||
}
|
||||
await this.closeConnection()
|
||||
return responses
|
||||
} else {
|
||||
const response = await this.internalQuery(input)
|
||||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: PostgresIntegration,
|
||||
}
|
||||
|
|
|
@ -1,149 +1,147 @@
|
|||
import { DatasourceFieldType, Integration, QueryType } from "@budibase/types"
|
||||
import Redis from "ioredis"
|
||||
|
||||
module RedisModule {
|
||||
interface RedisConfig {
|
||||
host: string
|
||||
port: number
|
||||
username: string
|
||||
password?: string
|
||||
}
|
||||
interface RedisConfig {
|
||||
host: string
|
||||
port: number
|
||||
username: string
|
||||
password?: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://redis.io/docs/",
|
||||
description: "",
|
||||
friendlyName: "Redis",
|
||||
type: "Non-relational",
|
||||
datasource: {
|
||||
host: {
|
||||
type: "string",
|
||||
required: true,
|
||||
default: "localhost",
|
||||
},
|
||||
port: {
|
||||
type: "number",
|
||||
required: true,
|
||||
default: 6379,
|
||||
},
|
||||
username: {
|
||||
type: "string",
|
||||
required: false,
|
||||
},
|
||||
password: {
|
||||
type: "password",
|
||||
required: false,
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://redis.io/docs/",
|
||||
description: "",
|
||||
friendlyName: "Redis",
|
||||
type: "Non-relational",
|
||||
datasource: {
|
||||
host: {
|
||||
type: "string",
|
||||
required: true,
|
||||
default: "localhost",
|
||||
},
|
||||
port: {
|
||||
type: "number",
|
||||
required: true,
|
||||
default: 6379,
|
||||
},
|
||||
username: {
|
||||
type: "string",
|
||||
required: false,
|
||||
},
|
||||
password: {
|
||||
type: "password",
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
key: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
value: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
ttl: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
key: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
value: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
ttl: {
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
},
|
||||
read: {
|
||||
readable: true,
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
key: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
read: {
|
||||
readable: true,
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
key: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
key: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
command: {
|
||||
readable: true,
|
||||
displayName: "Redis Command",
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
key: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
command: {
|
||||
readable: true,
|
||||
displayName: "Redis Command",
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class RedisIntegration {
|
||||
private readonly config: RedisConfig
|
||||
private client: any
|
||||
|
||||
constructor(config: RedisConfig) {
|
||||
this.config = config
|
||||
this.client = new Redis({
|
||||
host: this.config.host,
|
||||
port: this.config.port,
|
||||
username: this.config.username,
|
||||
password: this.config.password,
|
||||
})
|
||||
}
|
||||
|
||||
class RedisIntegration {
|
||||
private readonly config: RedisConfig
|
||||
private client: any
|
||||
async disconnect() {
|
||||
this.client.disconnect()
|
||||
}
|
||||
|
||||
constructor(config: RedisConfig) {
|
||||
this.config = config
|
||||
this.client = new Redis({
|
||||
host: this.config.host,
|
||||
port: this.config.port,
|
||||
username: this.config.username,
|
||||
password: this.config.password,
|
||||
})
|
||||
async redisContext(query: Function) {
|
||||
try {
|
||||
return await query()
|
||||
} catch (err) {
|
||||
throw new Error(`Redis error: ${err}`)
|
||||
} finally {
|
||||
this.disconnect()
|
||||
}
|
||||
}
|
||||
|
||||
async disconnect() {
|
||||
this.client.disconnect()
|
||||
}
|
||||
|
||||
async redisContext(query: Function) {
|
||||
try {
|
||||
return await query()
|
||||
} catch (err) {
|
||||
throw new Error(`Redis error: ${err}`)
|
||||
} finally {
|
||||
this.disconnect()
|
||||
async create(query: { key: string; value: string; ttl: number }) {
|
||||
return this.redisContext(async () => {
|
||||
const response = await this.client.set(query.key, query.value)
|
||||
if (query.ttl) {
|
||||
await this.client.expire(query.key, query.ttl)
|
||||
}
|
||||
}
|
||||
|
||||
async create(query: { key: string; value: string; ttl: number }) {
|
||||
return this.redisContext(async () => {
|
||||
const response = await this.client.set(query.key, query.value)
|
||||
if (query.ttl) {
|
||||
await this.client.expire(query.key, query.ttl)
|
||||
}
|
||||
return response
|
||||
})
|
||||
}
|
||||
|
||||
async read(query: { key: string }) {
|
||||
return this.redisContext(async () => {
|
||||
const response = await this.client.get(query.key)
|
||||
return response
|
||||
})
|
||||
}
|
||||
|
||||
async delete(query: { key: string }) {
|
||||
return this.redisContext(async () => {
|
||||
const response = await this.client.del(query.key)
|
||||
return response
|
||||
})
|
||||
}
|
||||
|
||||
async command(query: { json: string }) {
|
||||
return this.redisContext(async () => {
|
||||
const commands = query.json.trim().split(" ")
|
||||
const pipeline = this.client.pipeline([commands])
|
||||
const result = await pipeline.exec()
|
||||
return {
|
||||
response: result[0][1],
|
||||
}
|
||||
})
|
||||
}
|
||||
return response
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: RedisIntegration,
|
||||
async read(query: { key: string }) {
|
||||
return this.redisContext(async () => {
|
||||
const response = await this.client.get(query.key)
|
||||
return response
|
||||
})
|
||||
}
|
||||
|
||||
async delete(query: { key: string }) {
|
||||
return this.redisContext(async () => {
|
||||
const response = await this.client.del(query.key)
|
||||
return response
|
||||
})
|
||||
}
|
||||
|
||||
async command(query: { json: string }) {
|
||||
return this.redisContext(async () => {
|
||||
const commands = query.json.trim().split(" ")
|
||||
const pipeline = this.client.pipeline([commands])
|
||||
const result = await pipeline.exec()
|
||||
return {
|
||||
response: result[0][1],
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: RedisIntegration,
|
||||
}
|
||||
|
|
|
@ -14,6 +14,11 @@ import {
|
|||
BearerAuthConfig,
|
||||
} from "../definitions/datasource"
|
||||
import { get } from "lodash"
|
||||
const fetch = require("node-fetch")
|
||||
const { formatBytes } = require("../utilities")
|
||||
const { performance } = require("perf_hooks")
|
||||
const FormData = require("form-data")
|
||||
const { URLSearchParams } = require("url")
|
||||
|
||||
const BodyTypes = {
|
||||
NONE: "none",
|
||||
|
@ -50,363 +55,353 @@ const coreFields = {
|
|||
},
|
||||
}
|
||||
|
||||
module RestModule {
|
||||
const fetch = require("node-fetch")
|
||||
const { formatBytes } = require("../utilities")
|
||||
const { performance } = require("perf_hooks")
|
||||
const FormData = require("form-data")
|
||||
const { URLSearchParams } = require("url")
|
||||
const {
|
||||
parseStringPromise: xmlParser,
|
||||
Builder: XmlBuilder,
|
||||
} = require("xml2js")
|
||||
const { parseStringPromise: xmlParser, Builder: XmlBuilder } = require("xml2js")
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/node-fetch/node-fetch",
|
||||
description:
|
||||
"With the REST API datasource, you can connect, query and pull data from multiple REST APIs. You can then use the retrieved data to build apps.",
|
||||
friendlyName: "REST API",
|
||||
type: "API",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "",
|
||||
required: false,
|
||||
deprecated: true,
|
||||
},
|
||||
defaultHeaders: {
|
||||
type: DatasourceFieldType.OBJECT,
|
||||
required: false,
|
||||
default: {},
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/node-fetch/node-fetch",
|
||||
description:
|
||||
"With the REST API datasource, you can connect, query and pull data from multiple REST APIs. You can then use the retrieved data to build apps.",
|
||||
friendlyName: "REST API",
|
||||
type: "API",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "",
|
||||
required: false,
|
||||
deprecated: true,
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
readable: true,
|
||||
displayName: "POST",
|
||||
type: QueryType.FIELDS,
|
||||
fields: coreFields,
|
||||
},
|
||||
read: {
|
||||
displayName: "GET",
|
||||
readable: true,
|
||||
type: QueryType.FIELDS,
|
||||
fields: coreFields,
|
||||
},
|
||||
update: {
|
||||
displayName: "PUT",
|
||||
readable: true,
|
||||
type: QueryType.FIELDS,
|
||||
fields: coreFields,
|
||||
},
|
||||
patch: {
|
||||
displayName: "PATCH",
|
||||
readable: true,
|
||||
type: QueryType.FIELDS,
|
||||
fields: coreFields,
|
||||
},
|
||||
delete: {
|
||||
displayName: "DELETE",
|
||||
type: QueryType.FIELDS,
|
||||
fields: coreFields,
|
||||
},
|
||||
defaultHeaders: {
|
||||
type: DatasourceFieldType.OBJECT,
|
||||
required: false,
|
||||
default: {},
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
readable: true,
|
||||
displayName: "POST",
|
||||
type: QueryType.FIELDS,
|
||||
fields: coreFields,
|
||||
},
|
||||
read: {
|
||||
displayName: "GET",
|
||||
readable: true,
|
||||
type: QueryType.FIELDS,
|
||||
fields: coreFields,
|
||||
},
|
||||
update: {
|
||||
displayName: "PUT",
|
||||
readable: true,
|
||||
type: QueryType.FIELDS,
|
||||
fields: coreFields,
|
||||
},
|
||||
patch: {
|
||||
displayName: "PATCH",
|
||||
readable: true,
|
||||
type: QueryType.FIELDS,
|
||||
fields: coreFields,
|
||||
},
|
||||
delete: {
|
||||
displayName: "DELETE",
|
||||
type: QueryType.FIELDS,
|
||||
fields: coreFields,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class RestIntegration implements IntegrationBase {
|
||||
private config: RestConfig
|
||||
private headers: {
|
||||
[key: string]: string
|
||||
} = {}
|
||||
private startTimeMs: number = performance.now()
|
||||
|
||||
constructor(config: RestConfig) {
|
||||
this.config = config
|
||||
}
|
||||
|
||||
class RestIntegration implements IntegrationBase {
|
||||
private config: RestConfig
|
||||
private headers: {
|
||||
[key: string]: string
|
||||
} = {}
|
||||
private startTimeMs: number = performance.now()
|
||||
|
||||
constructor(config: RestConfig) {
|
||||
this.config = config
|
||||
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
||||
let data, raw, headers
|
||||
const contentType = response.headers.get("content-type") || ""
|
||||
try {
|
||||
if (contentType.includes("application/json")) {
|
||||
data = await response.json()
|
||||
raw = JSON.stringify(data)
|
||||
} else if (
|
||||
contentType.includes("text/xml") ||
|
||||
contentType.includes("application/xml")
|
||||
) {
|
||||
const rawXml = await response.text()
|
||||
data =
|
||||
(await xmlParser(rawXml, {
|
||||
explicitArray: false,
|
||||
trim: true,
|
||||
explicitRoot: false,
|
||||
})) || {}
|
||||
// there is only one structure, its an array, return the array so it appears as rows
|
||||
const keys = Object.keys(data)
|
||||
if (keys.length === 1 && Array.isArray(data[keys[0]])) {
|
||||
data = data[keys[0]]
|
||||
}
|
||||
raw = rawXml
|
||||
} else {
|
||||
data = await response.text()
|
||||
raw = data
|
||||
}
|
||||
} catch (err) {
|
||||
throw "Failed to parse response body."
|
||||
}
|
||||
const size = formatBytes(
|
||||
response.headers.get("content-length") || Buffer.byteLength(raw, "utf8")
|
||||
)
|
||||
const time = `${Math.round(performance.now() - this.startTimeMs)}ms`
|
||||
headers = response.headers.raw()
|
||||
for (let [key, value] of Object.entries(headers)) {
|
||||
headers[key] = Array.isArray(value) ? value[0] : value
|
||||
}
|
||||
|
||||
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
||||
let data, raw, headers
|
||||
const contentType = response.headers.get("content-type") || ""
|
||||
try {
|
||||
if (contentType.includes("application/json")) {
|
||||
data = await response.json()
|
||||
raw = JSON.stringify(data)
|
||||
} else if (
|
||||
contentType.includes("text/xml") ||
|
||||
contentType.includes("application/xml")
|
||||
) {
|
||||
const rawXml = await response.text()
|
||||
data =
|
||||
(await xmlParser(rawXml, {
|
||||
explicitArray: false,
|
||||
trim: true,
|
||||
explicitRoot: false,
|
||||
})) || {}
|
||||
// there is only one structure, its an array, return the array so it appears as rows
|
||||
const keys = Object.keys(data)
|
||||
if (keys.length === 1 && Array.isArray(data[keys[0]])) {
|
||||
data = data[keys[0]]
|
||||
}
|
||||
raw = rawXml
|
||||
} else {
|
||||
data = await response.text()
|
||||
raw = data
|
||||
}
|
||||
} catch (err) {
|
||||
throw "Failed to parse response body."
|
||||
}
|
||||
const size = formatBytes(
|
||||
response.headers.get("content-length") || Buffer.byteLength(raw, "utf8")
|
||||
)
|
||||
const time = `${Math.round(performance.now() - this.startTimeMs)}ms`
|
||||
headers = response.headers.raw()
|
||||
for (let [key, value] of Object.entries(headers)) {
|
||||
headers[key] = Array.isArray(value) ? value[0] : value
|
||||
// Check if a pagination cursor exists in the response
|
||||
let nextCursor = null
|
||||
if (pagination?.responseParam) {
|
||||
nextCursor = get(data, pagination.responseParam)
|
||||
}
|
||||
|
||||
return {
|
||||
data,
|
||||
info: {
|
||||
code: response.status,
|
||||
size,
|
||||
time,
|
||||
},
|
||||
extra: {
|
||||
raw,
|
||||
headers,
|
||||
},
|
||||
pagination: {
|
||||
cursor: nextCursor,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
getUrl(
|
||||
path: string,
|
||||
queryString: string,
|
||||
pagination: PaginationConfig | null,
|
||||
paginationValues: PaginationValues | null
|
||||
): string {
|
||||
// Add pagination params to query string if required
|
||||
if (pagination?.location === "query" && paginationValues) {
|
||||
const { pageParam, sizeParam } = pagination
|
||||
const params = new URLSearchParams()
|
||||
|
||||
// Append page number or cursor param if configured
|
||||
if (pageParam && paginationValues.page != null) {
|
||||
params.append(pageParam, paginationValues.page)
|
||||
}
|
||||
|
||||
// Check if a pagination cursor exists in the response
|
||||
let nextCursor = null
|
||||
if (pagination?.responseParam) {
|
||||
nextCursor = get(data, pagination.responseParam)
|
||||
// Append page size param if configured
|
||||
if (sizeParam && paginationValues.limit != null) {
|
||||
params.append(sizeParam, paginationValues.limit)
|
||||
}
|
||||
|
||||
return {
|
||||
data,
|
||||
info: {
|
||||
code: response.status,
|
||||
size,
|
||||
time,
|
||||
},
|
||||
extra: {
|
||||
raw,
|
||||
headers,
|
||||
},
|
||||
pagination: {
|
||||
cursor: nextCursor,
|
||||
},
|
||||
// Prepend query string with pagination params
|
||||
let paginationString = params.toString()
|
||||
if (paginationString) {
|
||||
queryString = `${paginationString}&${queryString}`
|
||||
}
|
||||
}
|
||||
|
||||
getUrl(
|
||||
path: string,
|
||||
queryString: string,
|
||||
pagination: PaginationConfig | null,
|
||||
paginationValues: PaginationValues | null
|
||||
): string {
|
||||
// Add pagination params to query string if required
|
||||
if (pagination?.location === "query" && paginationValues) {
|
||||
const { pageParam, sizeParam } = pagination
|
||||
const params = new URLSearchParams()
|
||||
|
||||
// Append page number or cursor param if configured
|
||||
if (pageParam && paginationValues.page != null) {
|
||||
params.append(pageParam, paginationValues.page)
|
||||
}
|
||||
|
||||
// Append page size param if configured
|
||||
if (sizeParam && paginationValues.limit != null) {
|
||||
params.append(sizeParam, paginationValues.limit)
|
||||
}
|
||||
|
||||
// Prepend query string with pagination params
|
||||
let paginationString = params.toString()
|
||||
if (paginationString) {
|
||||
queryString = `${paginationString}&${queryString}`
|
||||
}
|
||||
}
|
||||
|
||||
const main = `${path}?${queryString}`
|
||||
let complete = main
|
||||
if (this.config.url && !main.startsWith("http")) {
|
||||
complete = !this.config.url ? main : `${this.config.url}/${main}`
|
||||
}
|
||||
if (!complete.startsWith("http")) {
|
||||
complete = `http://${complete}`
|
||||
}
|
||||
return complete
|
||||
const main = `${path}?${queryString}`
|
||||
let complete = main
|
||||
if (this.config.url && !main.startsWith("http")) {
|
||||
complete = !this.config.url ? main : `${this.config.url}/${main}`
|
||||
}
|
||||
if (!complete.startsWith("http")) {
|
||||
complete = `http://${complete}`
|
||||
}
|
||||
return complete
|
||||
}
|
||||
|
||||
addBody(
|
||||
bodyType: string,
|
||||
body: string | any,
|
||||
input: any,
|
||||
pagination: PaginationConfig | null,
|
||||
paginationValues: PaginationValues | null
|
||||
) {
|
||||
if (!input.headers) {
|
||||
input.headers = {}
|
||||
}
|
||||
if (bodyType === BodyTypes.NONE) {
|
||||
return input
|
||||
}
|
||||
let error,
|
||||
object: any = {},
|
||||
string = ""
|
||||
try {
|
||||
if (body) {
|
||||
string = typeof body !== "string" ? JSON.stringify(body) : body
|
||||
object = typeof body === "object" ? body : JSON.parse(body)
|
||||
}
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
// Util to add pagination values to a certain body type
|
||||
const addPaginationToBody = (insertFn: Function) => {
|
||||
if (pagination?.location === "body") {
|
||||
if (pagination?.pageParam && paginationValues?.page != null) {
|
||||
insertFn(pagination.pageParam, paginationValues.page)
|
||||
}
|
||||
if (pagination?.sizeParam && paginationValues?.limit != null) {
|
||||
insertFn(pagination.sizeParam, paginationValues.limit)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch (bodyType) {
|
||||
case BodyTypes.TEXT:
|
||||
// content type defaults to plaintext
|
||||
input.body = string
|
||||
break
|
||||
case BodyTypes.ENCODED:
|
||||
const params = new URLSearchParams()
|
||||
for (let [key, value] of Object.entries(object)) {
|
||||
params.append(key, value)
|
||||
}
|
||||
addPaginationToBody((key: string, value: any) => {
|
||||
params.append(key, value)
|
||||
})
|
||||
input.body = params
|
||||
break
|
||||
case BodyTypes.FORM_DATA:
|
||||
const form = new FormData()
|
||||
for (let [key, value] of Object.entries(object)) {
|
||||
form.append(key, value)
|
||||
}
|
||||
addPaginationToBody((key: string, value: any) => {
|
||||
form.append(key, value)
|
||||
})
|
||||
input.body = form
|
||||
break
|
||||
case BodyTypes.XML:
|
||||
if (object != null && Object.keys(object).length) {
|
||||
string = new XmlBuilder().buildObject(object)
|
||||
}
|
||||
input.body = string
|
||||
input.headers["Content-Type"] = "application/xml"
|
||||
break
|
||||
case BodyTypes.JSON:
|
||||
// if JSON error, throw it
|
||||
if (error) {
|
||||
throw "Invalid JSON for request body"
|
||||
}
|
||||
addPaginationToBody((key: string, value: any) => {
|
||||
object[key] = value
|
||||
})
|
||||
input.body = JSON.stringify(object)
|
||||
input.headers["Content-Type"] = "application/json"
|
||||
break
|
||||
}
|
||||
addBody(
|
||||
bodyType: string,
|
||||
body: string | any,
|
||||
input: any,
|
||||
pagination: PaginationConfig | null,
|
||||
paginationValues: PaginationValues | null
|
||||
) {
|
||||
if (!input.headers) {
|
||||
input.headers = {}
|
||||
}
|
||||
if (bodyType === BodyTypes.NONE) {
|
||||
return input
|
||||
}
|
||||
let error,
|
||||
object: any = {},
|
||||
string = ""
|
||||
try {
|
||||
if (body) {
|
||||
string = typeof body !== "string" ? JSON.stringify(body) : body
|
||||
object = typeof body === "object" ? body : JSON.parse(body)
|
||||
}
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
getAuthHeaders(authConfigId: string): { [key: string]: any } {
|
||||
let headers: any = {}
|
||||
|
||||
if (this.config.authConfigs && authConfigId) {
|
||||
const authConfig = this.config.authConfigs.filter(
|
||||
c => c._id === authConfigId
|
||||
)[0]
|
||||
// check the config still exists before proceeding
|
||||
// if not - do nothing
|
||||
if (authConfig) {
|
||||
let config
|
||||
switch (authConfig.type) {
|
||||
case AuthType.BASIC:
|
||||
config = authConfig.config as BasicAuthConfig
|
||||
headers.Authorization = `Basic ${Buffer.from(
|
||||
`${config.username}:${config.password}`
|
||||
).toString("base64")}`
|
||||
break
|
||||
case AuthType.BEARER:
|
||||
config = authConfig.config as BearerAuthConfig
|
||||
headers.Authorization = `Bearer ${config.token}`
|
||||
break
|
||||
}
|
||||
// Util to add pagination values to a certain body type
|
||||
const addPaginationToBody = (insertFn: Function) => {
|
||||
if (pagination?.location === "body") {
|
||||
if (pagination?.pageParam && paginationValues?.page != null) {
|
||||
insertFn(pagination.pageParam, paginationValues.page)
|
||||
}
|
||||
if (pagination?.sizeParam && paginationValues?.limit != null) {
|
||||
insertFn(pagination.sizeParam, paginationValues.limit)
|
||||
}
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
async _req(query: RestQuery) {
|
||||
const {
|
||||
path = "",
|
||||
queryString = "",
|
||||
headers = {},
|
||||
method = "GET",
|
||||
disabledHeaders,
|
||||
bodyType,
|
||||
requestBody,
|
||||
authConfigId,
|
||||
pagination,
|
||||
paginationValues,
|
||||
} = query
|
||||
const authHeaders = this.getAuthHeaders(authConfigId)
|
||||
|
||||
this.headers = {
|
||||
...this.config.defaultHeaders,
|
||||
...headers,
|
||||
...authHeaders,
|
||||
}
|
||||
|
||||
if (disabledHeaders) {
|
||||
for (let headerKey of Object.keys(this.headers)) {
|
||||
if (disabledHeaders[headerKey]) {
|
||||
delete this.headers[headerKey]
|
||||
}
|
||||
switch (bodyType) {
|
||||
case BodyTypes.TEXT:
|
||||
// content type defaults to plaintext
|
||||
input.body = string
|
||||
break
|
||||
case BodyTypes.ENCODED:
|
||||
const params = new URLSearchParams()
|
||||
for (let [key, value] of Object.entries(object)) {
|
||||
params.append(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
let input: any = { method, headers: this.headers }
|
||||
input = this.addBody(
|
||||
bodyType,
|
||||
requestBody,
|
||||
input,
|
||||
pagination,
|
||||
paginationValues
|
||||
)
|
||||
|
||||
this.startTimeMs = performance.now()
|
||||
const url = this.getUrl(path, queryString, pagination, paginationValues)
|
||||
const response = await fetch(url, input)
|
||||
return await this.parseResponse(response, pagination)
|
||||
}
|
||||
|
||||
async create(opts: RestQuery) {
|
||||
return this._req({ ...opts, method: "POST" })
|
||||
}
|
||||
|
||||
async read(opts: RestQuery) {
|
||||
return this._req({ ...opts, method: "GET" })
|
||||
}
|
||||
|
||||
async update(opts: RestQuery) {
|
||||
return this._req({ ...opts, method: "PUT" })
|
||||
}
|
||||
|
||||
async patch(opts: RestQuery) {
|
||||
return this._req({ ...opts, method: "PATCH" })
|
||||
}
|
||||
|
||||
async delete(opts: RestQuery) {
|
||||
return this._req({ ...opts, method: "DELETE" })
|
||||
addPaginationToBody((key: string, value: any) => {
|
||||
params.append(key, value)
|
||||
})
|
||||
input.body = params
|
||||
break
|
||||
case BodyTypes.FORM_DATA:
|
||||
const form = new FormData()
|
||||
for (let [key, value] of Object.entries(object)) {
|
||||
form.append(key, value)
|
||||
}
|
||||
addPaginationToBody((key: string, value: any) => {
|
||||
form.append(key, value)
|
||||
})
|
||||
input.body = form
|
||||
break
|
||||
case BodyTypes.XML:
|
||||
if (object != null && Object.keys(object).length) {
|
||||
string = new XmlBuilder().buildObject(object)
|
||||
}
|
||||
input.body = string
|
||||
input.headers["Content-Type"] = "application/xml"
|
||||
break
|
||||
case BodyTypes.JSON:
|
||||
// if JSON error, throw it
|
||||
if (error) {
|
||||
throw "Invalid JSON for request body"
|
||||
}
|
||||
addPaginationToBody((key: string, value: any) => {
|
||||
object[key] = value
|
||||
})
|
||||
input.body = JSON.stringify(object)
|
||||
input.headers["Content-Type"] = "application/json"
|
||||
break
|
||||
}
|
||||
return input
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: RestIntegration,
|
||||
AuthType,
|
||||
getAuthHeaders(authConfigId: string): { [key: string]: any } {
|
||||
let headers: any = {}
|
||||
|
||||
if (this.config.authConfigs && authConfigId) {
|
||||
const authConfig = this.config.authConfigs.filter(
|
||||
c => c._id === authConfigId
|
||||
)[0]
|
||||
// check the config still exists before proceeding
|
||||
// if not - do nothing
|
||||
if (authConfig) {
|
||||
let config
|
||||
switch (authConfig.type) {
|
||||
case AuthType.BASIC:
|
||||
config = authConfig.config as BasicAuthConfig
|
||||
headers.Authorization = `Basic ${Buffer.from(
|
||||
`${config.username}:${config.password}`
|
||||
).toString("base64")}`
|
||||
break
|
||||
case AuthType.BEARER:
|
||||
config = authConfig.config as BearerAuthConfig
|
||||
headers.Authorization = `Bearer ${config.token}`
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
async _req(query: RestQuery) {
|
||||
const {
|
||||
path = "",
|
||||
queryString = "",
|
||||
headers = {},
|
||||
method = "GET",
|
||||
disabledHeaders,
|
||||
bodyType,
|
||||
requestBody,
|
||||
authConfigId,
|
||||
pagination,
|
||||
paginationValues,
|
||||
} = query
|
||||
const authHeaders = this.getAuthHeaders(authConfigId)
|
||||
|
||||
this.headers = {
|
||||
...this.config.defaultHeaders,
|
||||
...headers,
|
||||
...authHeaders,
|
||||
}
|
||||
|
||||
if (disabledHeaders) {
|
||||
for (let headerKey of Object.keys(this.headers)) {
|
||||
if (disabledHeaders[headerKey]) {
|
||||
delete this.headers[headerKey]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let input: any = { method, headers: this.headers }
|
||||
input = this.addBody(
|
||||
bodyType,
|
||||
requestBody,
|
||||
input,
|
||||
pagination,
|
||||
paginationValues
|
||||
)
|
||||
|
||||
this.startTimeMs = performance.now()
|
||||
const url = this.getUrl(path, queryString, pagination, paginationValues)
|
||||
const response = await fetch(url, input)
|
||||
return await this.parseResponse(response, pagination)
|
||||
}
|
||||
|
||||
async create(opts: RestQuery) {
|
||||
return this._req({ ...opts, method: "POST" })
|
||||
}
|
||||
|
||||
async read(opts: RestQuery) {
|
||||
return this._req({ ...opts, method: "GET" })
|
||||
}
|
||||
|
||||
async update(opts: RestQuery) {
|
||||
return this._req({ ...opts, method: "PUT" })
|
||||
}
|
||||
|
||||
async patch(opts: RestQuery) {
|
||||
return this._req({ ...opts, method: "PATCH" })
|
||||
}
|
||||
|
||||
async delete(opts: RestQuery) {
|
||||
return this._req({ ...opts, method: "DELETE" })
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: RestIntegration,
|
||||
AuthType,
|
||||
}
|
||||
|
|
|
@ -1,86 +1,83 @@
|
|||
import { Integration, QueryType, IntegrationBase } from "@budibase/types"
|
||||
const AWS = require("aws-sdk")
|
||||
|
||||
module S3Module {
|
||||
const AWS = require("aws-sdk")
|
||||
interface S3Config {
|
||||
region: string
|
||||
accessKeyId: string
|
||||
secretAccessKey: string
|
||||
s3ForcePathStyle: boolean
|
||||
endpoint?: string
|
||||
}
|
||||
|
||||
interface S3Config {
|
||||
region: string
|
||||
accessKeyId: string
|
||||
secretAccessKey: string
|
||||
s3ForcePathStyle: boolean
|
||||
endpoint?: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
||||
description:
|
||||
"Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.",
|
||||
friendlyName: "Amazon S3",
|
||||
type: "Object store",
|
||||
datasource: {
|
||||
region: {
|
||||
type: "string",
|
||||
required: false,
|
||||
default: "us-east-1",
|
||||
},
|
||||
accessKeyId: {
|
||||
type: "password",
|
||||
required: true,
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: "password",
|
||||
required: true,
|
||||
},
|
||||
endpoint: {
|
||||
type: "string",
|
||||
required: false,
|
||||
},
|
||||
signatureVersion: {
|
||||
type: "string",
|
||||
required: false,
|
||||
default: "v4",
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
||||
description:
|
||||
"Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.",
|
||||
friendlyName: "Amazon S3",
|
||||
type: "Object store",
|
||||
datasource: {
|
||||
region: {
|
||||
type: "string",
|
||||
required: false,
|
||||
default: "us-east-1",
|
||||
},
|
||||
query: {
|
||||
read: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
bucket: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
accessKeyId: {
|
||||
type: "password",
|
||||
required: true,
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: "password",
|
||||
required: true,
|
||||
},
|
||||
endpoint: {
|
||||
type: "string",
|
||||
required: false,
|
||||
},
|
||||
signatureVersion: {
|
||||
type: "string",
|
||||
required: false,
|
||||
default: "v4",
|
||||
},
|
||||
},
|
||||
query: {
|
||||
read: {
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
bucket: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
class S3Integration implements IntegrationBase {
|
||||
private readonly config: S3Config
|
||||
private client: any
|
||||
class S3Integration implements IntegrationBase {
|
||||
private readonly config: S3Config
|
||||
private client: any
|
||||
|
||||
constructor(config: S3Config) {
|
||||
this.config = config
|
||||
if (this.config.endpoint) {
|
||||
this.config.s3ForcePathStyle = true
|
||||
} else {
|
||||
delete this.config.endpoint
|
||||
}
|
||||
|
||||
this.client = new AWS.S3(this.config)
|
||||
constructor(config: S3Config) {
|
||||
this.config = config
|
||||
if (this.config.endpoint) {
|
||||
this.config.s3ForcePathStyle = true
|
||||
} else {
|
||||
delete this.config.endpoint
|
||||
}
|
||||
|
||||
async read(query: { bucket: string }) {
|
||||
const response = await this.client
|
||||
.listObjects({
|
||||
Bucket: query.bucket,
|
||||
})
|
||||
.promise()
|
||||
return response.Contents
|
||||
}
|
||||
this.client = new AWS.S3(this.config)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: S3Integration,
|
||||
async read(query: { bucket: string }) {
|
||||
const response = await this.client
|
||||
.listObjects({
|
||||
Bucket: query.bucket,
|
||||
})
|
||||
.promise()
|
||||
return response.Contents
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: S3Integration,
|
||||
}
|
||||
|
|
|
@ -1,99 +1,97 @@
|
|||
import { Integration, QueryType, SqlQuery } from "@budibase/types"
|
||||
import { Snowflake } from "snowflake-promise"
|
||||
|
||||
module SnowflakeModule {
|
||||
interface SnowflakeConfig {
|
||||
account: string
|
||||
username: string
|
||||
password: string
|
||||
warehouse: string
|
||||
database: string
|
||||
schema: string
|
||||
}
|
||||
interface SnowflakeConfig {
|
||||
account: string
|
||||
username: string
|
||||
password: string
|
||||
warehouse: string
|
||||
database: string
|
||||
schema: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://developers.snowflake.com/",
|
||||
description:
|
||||
"Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.",
|
||||
friendlyName: "Snowflake",
|
||||
type: "Relational",
|
||||
datasource: {
|
||||
account: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
username: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: "password",
|
||||
required: true,
|
||||
},
|
||||
warehouse: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
database: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
schema: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://developers.snowflake.com/",
|
||||
description:
|
||||
"Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.",
|
||||
friendlyName: "Snowflake",
|
||||
type: "Relational",
|
||||
datasource: {
|
||||
account: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
username: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: "password",
|
||||
required: true,
|
||||
},
|
||||
warehouse: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
database: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
schema: {
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
class SnowflakeIntegration {
|
||||
private client: Snowflake
|
||||
|
||||
constructor(config: SnowflakeConfig) {
|
||||
this.client = new Snowflake(config)
|
||||
}
|
||||
|
||||
class SnowflakeIntegration {
|
||||
private client: Snowflake
|
||||
|
||||
constructor(config: SnowflakeConfig) {
|
||||
this.client = new Snowflake(config)
|
||||
}
|
||||
|
||||
async internalQuery(query: SqlQuery) {
|
||||
await this.client.connect()
|
||||
try {
|
||||
return await this.client.execute(query.sql)
|
||||
} catch (err: any) {
|
||||
throw err?.message.split(":")[1] || err?.message
|
||||
}
|
||||
}
|
||||
|
||||
async create(query: SqlQuery) {
|
||||
return this.internalQuery(query)
|
||||
}
|
||||
|
||||
async read(query: SqlQuery) {
|
||||
return this.internalQuery(query)
|
||||
}
|
||||
|
||||
async update(query: SqlQuery) {
|
||||
return this.internalQuery(query)
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery) {
|
||||
return this.internalQuery(query)
|
||||
async internalQuery(query: SqlQuery) {
|
||||
await this.client.connect()
|
||||
try {
|
||||
return await this.client.execute(query.sql)
|
||||
} catch (err: any) {
|
||||
throw err?.message.split(":")[1] || err?.message
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: SnowflakeIntegration,
|
||||
async create(query: SqlQuery) {
|
||||
return this.internalQuery(query)
|
||||
}
|
||||
|
||||
async read(query: SqlQuery) {
|
||||
return this.internalQuery(query)
|
||||
}
|
||||
|
||||
async update(query: SqlQuery) {
|
||||
return this.internalQuery(query)
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery) {
|
||||
return this.internalQuery(query)
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
schema: SCHEMA,
|
||||
integration: SnowflakeIntegration,
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
export * from "./account"
|
||||
export * from "./app"
|
||||
export * from "./global"
|
||||
export * from "./plugin"
|
||||
export * from "./platform"
|
||||
export * from "./document"
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
export enum PluginType {
|
||||
DATASOURCE = "datasource",
|
||||
COMPONENT = "component",
|
||||
}
|
Loading…
Reference in New Issue