Making integrations more like what custom integrations look like (to simplify integration).
This commit is contained in:
parent
67298ff44d
commit
97466f183b
|
@ -1,12 +1,11 @@
|
||||||
const { cloneDeep } = require("lodash")
|
const { getDefinitions } = require("../../integrations")
|
||||||
const { definitions } = require("../../integrations")
|
|
||||||
const { SourceName } = require("@budibase/types")
|
const { SourceName } = require("@budibase/types")
|
||||||
const googlesheets = require("../../integrations/googlesheets")
|
const googlesheets = require("../../integrations/googlesheets")
|
||||||
const { featureFlags } = require("@budibase/backend-core")
|
const { featureFlags } = require("@budibase/backend-core")
|
||||||
|
|
||||||
exports.fetch = async function (ctx) {
|
exports.fetch = async function (ctx) {
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
const defs = cloneDeep(definitions)
|
const defs = await getDefinitions()
|
||||||
|
|
||||||
// for google sheets integration google verification
|
// for google sheets integration google verification
|
||||||
if (featureFlags.isEnabled(featureFlags.FeatureFlag.GOOGLE_SHEETS)) {
|
if (featureFlags.isEnabled(featureFlags.FeatureFlag.GOOGLE_SHEETS)) {
|
||||||
|
@ -17,6 +16,7 @@ exports.fetch = async function (ctx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.find = async function (ctx) {
|
exports.find = async function (ctx) {
|
||||||
|
const defs = await getDefinitions()
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
ctx.body = definitions[ctx.params.type]
|
ctx.body = defs[ctx.params.type]
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,22 @@ import { extractPluginTarball } from "../../utilities/fileSystem"
|
||||||
import { getGlobalDB } from "@budibase/backend-core/tenancy"
|
import { getGlobalDB } from "@budibase/backend-core/tenancy"
|
||||||
import { generatePluginID, getPluginParams } from "../../db/utils"
|
import { generatePluginID, getPluginParams } from "../../db/utils"
|
||||||
import { uploadDirectory } from "@budibase/backend-core/objectStore"
|
import { uploadDirectory } from "@budibase/backend-core/objectStore"
|
||||||
|
import { PluginType } from "@budibase/types"
|
||||||
|
|
||||||
|
export async function getPlugins(type?: PluginType) {
|
||||||
|
const db = getGlobalDB()
|
||||||
|
const response = await db.allDocs(
|
||||||
|
getPluginParams(null, {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
const plugins = response.rows.map((row: any) => row.doc)
|
||||||
|
if (type) {
|
||||||
|
return plugins.filter((plugin: any) => plugin.schema?.type === type)
|
||||||
|
} else {
|
||||||
|
return plugins
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function upload(ctx: any) {
|
export async function upload(ctx: any) {
|
||||||
const plugins =
|
const plugins =
|
||||||
|
@ -68,13 +84,7 @@ export async function upload(ctx: any) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(ctx: any) {
|
export async function fetch(ctx: any) {
|
||||||
const db = getGlobalDB()
|
ctx.body = await getPlugins()
|
||||||
const response = await db.allDocs(
|
|
||||||
getPluginParams(null, {
|
|
||||||
include_docs: true,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
ctx.body = response.rows.map((row: any) => row.doc)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function destroy(ctx: any) {}
|
export async function destroy(ctx: any) {}
|
||||||
|
|
|
@ -5,146 +5,144 @@ import {
|
||||||
IntegrationBase,
|
IntegrationBase,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
module AirtableModule {
|
const Airtable = require("airtable")
|
||||||
const Airtable = require("airtable")
|
|
||||||
|
|
||||||
interface AirtableConfig {
|
interface AirtableConfig {
|
||||||
apiKey: string
|
apiKey: string
|
||||||
base: string
|
base: string
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://airtable.com/api",
|
docs: "https://airtable.com/api",
|
||||||
description:
|
description:
|
||||||
"Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.",
|
"Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.",
|
||||||
friendlyName: "Airtable",
|
friendlyName: "Airtable",
|
||||||
type: "Spreadsheet",
|
type: "Spreadsheet",
|
||||||
datasource: {
|
datasource: {
|
||||||
apiKey: {
|
apiKey: {
|
||||||
type: DatasourceFieldType.PASSWORD,
|
type: DatasourceFieldType.PASSWORD,
|
||||||
default: "enter api key",
|
default: "enter api key",
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
base: {
|
base: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
default: "mybase",
|
default: "mybase",
|
||||||
required: true,
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
query: {
|
read: {
|
||||||
create: {
|
type: QueryType.FIELDS,
|
||||||
type: QueryType.FIELDS,
|
fields: {
|
||||||
customisable: true,
|
table: {
|
||||||
fields: {
|
type: DatasourceFieldType.STRING,
|
||||||
table: {
|
required: true,
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
view: {
|
||||||
read: {
|
type: DatasourceFieldType.STRING,
|
||||||
type: QueryType.FIELDS,
|
required: true,
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
view: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
numRecords: {
|
|
||||||
type: DatasourceFieldType.NUMBER,
|
|
||||||
default: 10,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
numRecords: {
|
||||||
update: {
|
type: DatasourceFieldType.NUMBER,
|
||||||
type: QueryType.FIELDS,
|
default: 10,
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
id: {
|
|
||||||
display: "Record ID",
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
table: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
delete: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
id: {
|
||||||
|
display: "Record ID",
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class AirtableIntegration implements IntegrationBase {
|
||||||
|
private config: AirtableConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: AirtableConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.client = new Airtable(config).base(config.base)
|
||||||
}
|
}
|
||||||
|
|
||||||
class AirtableIntegration implements IntegrationBase {
|
async create(query: { table: any; json: any }) {
|
||||||
private config: AirtableConfig
|
const { table, json } = query
|
||||||
private client: any
|
|
||||||
|
|
||||||
constructor(config: AirtableConfig) {
|
try {
|
||||||
this.config = config
|
return await this.client(table).create([
|
||||||
this.client = new Airtable(config).base(config.base)
|
{
|
||||||
}
|
fields: json,
|
||||||
|
},
|
||||||
async create(query: { table: any; json: any }) {
|
])
|
||||||
const { table, json } = query
|
} catch (err) {
|
||||||
|
console.error("Error writing to airtable", err)
|
||||||
try {
|
throw err
|
||||||
return await this.client(table).create([
|
|
||||||
{
|
|
||||||
fields: json,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to airtable", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: { table: any; numRecords: any; view: any }) {
|
|
||||||
try {
|
|
||||||
const records = await this.client(query.table)
|
|
||||||
.select({ maxRecords: query.numRecords || 10, view: query.view })
|
|
||||||
.firstPage()
|
|
||||||
// @ts-ignore
|
|
||||||
return records.map(({ fields }) => fields)
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to airtable", err)
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: { table: any; id: any; json: any }) {
|
|
||||||
const { table, id, json } = query
|
|
||||||
|
|
||||||
try {
|
|
||||||
return await this.client(table).update([
|
|
||||||
{
|
|
||||||
id,
|
|
||||||
fields: json,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to airtable", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: { table: any; ids: any }) {
|
|
||||||
try {
|
|
||||||
return await this.client(query.table).destroy(query.ids)
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to airtable", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async read(query: { table: any; numRecords: any; view: any }) {
|
||||||
schema: SCHEMA,
|
try {
|
||||||
integration: AirtableIntegration,
|
const records = await this.client(query.table)
|
||||||
|
.select({ maxRecords: query.numRecords || 10, view: query.view })
|
||||||
|
.firstPage()
|
||||||
|
// @ts-ignore
|
||||||
|
return records.map(({ fields }) => fields)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to airtable", err)
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: { table: any; id: any; json: any }) {
|
||||||
|
const { table, id, json } = query
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await this.client(table).update([
|
||||||
|
{
|
||||||
|
id,
|
||||||
|
fields: json,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to airtable", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: { table: any; ids: any }) {
|
||||||
|
try {
|
||||||
|
return await this.client(query.table).destroy(query.ids)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to airtable", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: AirtableIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -5,106 +5,104 @@ import {
|
||||||
IntegrationBase,
|
IntegrationBase,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
module ArangoModule {
|
const { Database, aql } = require("arangojs")
|
||||||
const { Database, aql } = require("arangojs")
|
|
||||||
|
|
||||||
interface ArangodbConfig {
|
interface ArangodbConfig {
|
||||||
url: string
|
url: string
|
||||||
username: string
|
username: string
|
||||||
password: string
|
password: string
|
||||||
databaseName: string
|
databaseName: string
|
||||||
collection: string
|
collection: string
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://github.com/arangodb/arangojs",
|
docs: "https://github.com/arangodb/arangojs",
|
||||||
friendlyName: "ArangoDB",
|
friendlyName: "ArangoDB",
|
||||||
type: "Non-relational",
|
type: "Non-relational",
|
||||||
description:
|
description:
|
||||||
"ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ",
|
"ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ",
|
||||||
datasource: {
|
datasource: {
|
||||||
url: {
|
url: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
default: "http://localhost:8529",
|
default: "http://localhost:8529",
|
||||||
required: true,
|
required: true,
|
||||||
},
|
|
||||||
username: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
default: "root",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
password: {
|
|
||||||
type: DatasourceFieldType.PASSWORD,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
databaseName: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
default: "_system",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
collection: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
username: {
|
||||||
read: {
|
type: DatasourceFieldType.STRING,
|
||||||
type: QueryType.SQL,
|
default: "root",
|
||||||
},
|
required: true,
|
||||||
create: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
password: {
|
||||||
|
type: DatasourceFieldType.PASSWORD,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
databaseName: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
default: "_system",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
collection: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
read: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class ArangoDBIntegration implements IntegrationBase {
|
||||||
|
private config: ArangodbConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: ArangodbConfig) {
|
||||||
|
const newConfig = {
|
||||||
|
auth: {
|
||||||
|
username: config.username,
|
||||||
|
password: config.password,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
this.config = config
|
||||||
|
this.client = new Database(newConfig)
|
||||||
}
|
}
|
||||||
|
|
||||||
class ArangoDBIntegration implements IntegrationBase {
|
async read(query: { sql: any }) {
|
||||||
private config: ArangodbConfig
|
try {
|
||||||
private client: any
|
const result = await this.client.query(query.sql)
|
||||||
|
return result.all()
|
||||||
constructor(config: ArangodbConfig) {
|
} catch (err) {
|
||||||
const newConfig = {
|
// @ts-ignore
|
||||||
auth: {
|
console.error("Error querying arangodb", err.message)
|
||||||
username: config.username,
|
throw err
|
||||||
password: config.password,
|
} finally {
|
||||||
},
|
this.client.close()
|
||||||
}
|
|
||||||
|
|
||||||
this.config = config
|
|
||||||
this.client = new Database(newConfig)
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: { sql: any }) {
|
|
||||||
try {
|
|
||||||
const result = await this.client.query(query.sql)
|
|
||||||
return result.all()
|
|
||||||
} catch (err) {
|
|
||||||
// @ts-ignore
|
|
||||||
console.error("Error querying arangodb", err.message)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query: { json: any }) {
|
|
||||||
const clc = this.client.collection(this.config.collection)
|
|
||||||
try {
|
|
||||||
const result = await this.client.query(
|
|
||||||
aql`INSERT ${query.json} INTO ${clc} RETURN NEW`
|
|
||||||
)
|
|
||||||
return result.all()
|
|
||||||
} catch (err) {
|
|
||||||
// @ts-ignore
|
|
||||||
console.error("Error querying arangodb", err.message)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
this.client.close()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async create(query: { json: any }) {
|
||||||
schema: SCHEMA,
|
const clc = this.client.collection(this.config.collection)
|
||||||
integration: ArangoDBIntegration,
|
try {
|
||||||
|
const result = await this.client.query(
|
||||||
|
aql`INSERT ${query.json} INTO ${clc} RETURN NEW`
|
||||||
|
)
|
||||||
|
return result.all()
|
||||||
|
} catch (err) {
|
||||||
|
// @ts-ignore
|
||||||
|
console.error("Error querying arangodb", err.message)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
this.client.close()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: ArangoDBIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -5,109 +5,103 @@ import {
|
||||||
IntegrationBase,
|
IntegrationBase,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
module CouchDBModule {
|
const PouchDB = require("pouchdb")
|
||||||
const PouchDB = require("pouchdb")
|
|
||||||
|
|
||||||
interface CouchDBConfig {
|
interface CouchDBConfig {
|
||||||
url: string
|
url: string
|
||||||
database: string
|
database: string
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://docs.couchdb.org/en/stable/",
|
docs: "https://docs.couchdb.org/en/stable/",
|
||||||
friendlyName: "CouchDB",
|
friendlyName: "CouchDB",
|
||||||
type: "Non-relational",
|
type: "Non-relational",
|
||||||
description:
|
description:
|
||||||
"Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.",
|
"Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.",
|
||||||
datasource: {
|
datasource: {
|
||||||
url: {
|
url: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
required: true,
|
required: true,
|
||||||
default: "http://localhost:5984",
|
default: "http://localhost:5984",
|
||||||
},
|
|
||||||
database: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
database: {
|
||||||
create: {
|
type: DatasourceFieldType.STRING,
|
||||||
type: QueryType.JSON,
|
required: true,
|
||||||
},
|
},
|
||||||
read: {
|
},
|
||||||
type: QueryType.JSON,
|
query: {
|
||||||
},
|
create: {
|
||||||
update: {
|
type: QueryType.JSON,
|
||||||
type: QueryType.JSON,
|
},
|
||||||
},
|
read: {
|
||||||
delete: {
|
type: QueryType.JSON,
|
||||||
type: QueryType.FIELDS,
|
},
|
||||||
fields: {
|
update: {
|
||||||
id: {
|
type: QueryType.JSON,
|
||||||
type: DatasourceFieldType.STRING,
|
},
|
||||||
required: true,
|
delete: {
|
||||||
},
|
type: QueryType.FIELDS,
|
||||||
|
fields: {
|
||||||
|
id: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class CouchDBIntegration implements IntegrationBase {
|
||||||
|
private config: CouchDBConfig
|
||||||
|
private readonly client: any
|
||||||
|
|
||||||
|
constructor(config: CouchDBConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.client = new PouchDB(`${config.url}/${config.database}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
class CouchDBIntegration implements IntegrationBase {
|
async query(
|
||||||
private config: CouchDBConfig
|
command: string,
|
||||||
private readonly client: any
|
errorMsg: string,
|
||||||
|
query: { json?: object; id?: string }
|
||||||
constructor(config: CouchDBConfig) {
|
) {
|
||||||
this.config = config
|
try {
|
||||||
this.client = new PouchDB(`${config.url}/${config.database}`)
|
const response = await this.client[command](query.id || query.json)
|
||||||
}
|
await this.client.close()
|
||||||
|
return response
|
||||||
async query(
|
} catch (err) {
|
||||||
command: string,
|
console.error(errorMsg, err)
|
||||||
errorMsg: string,
|
throw err
|
||||||
query: { json?: object; id?: string }
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
const response = await this.client[command](query.id || query.json)
|
|
||||||
await this.client.close()
|
|
||||||
return response
|
|
||||||
} catch (err) {
|
|
||||||
console.error(errorMsg, err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query: { json: object }) {
|
|
||||||
return this.query("post", "Error writing to couchDB", query)
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: { json: object }) {
|
|
||||||
const result = await this.query("allDocs", "Error querying couchDB", {
|
|
||||||
json: {
|
|
||||||
include_docs: true,
|
|
||||||
...query.json,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
return result.rows.map((row: { doc: object }) => row.doc)
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: { json: object }) {
|
|
||||||
return this.query("put", "Error updating couchDB document", query)
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: { id: string }) {
|
|
||||||
const doc = await this.query(
|
|
||||||
"get",
|
|
||||||
"Cannot find doc to be deleted",
|
|
||||||
query
|
|
||||||
)
|
|
||||||
return this.query("remove", "Error deleting couchDB document", {
|
|
||||||
json: doc,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async create(query: { json: object }) {
|
||||||
schema: SCHEMA,
|
return this.query("post", "Error writing to couchDB", query)
|
||||||
integration: CouchDBIntegration,
|
}
|
||||||
|
|
||||||
|
async read(query: { json: object }) {
|
||||||
|
const result = await this.query("allDocs", "Error querying couchDB", {
|
||||||
|
json: {
|
||||||
|
include_docs: true,
|
||||||
|
...query.json,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return result.rows.map((row: { doc: object }) => row.doc)
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: { json: object }) {
|
||||||
|
return this.query("put", "Error updating couchDB document", query)
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: { id: string }) {
|
||||||
|
const doc = await this.query("get", "Cannot find doc to be deleted", query)
|
||||||
|
return this.query("remove", "Error deleting couchDB document", {
|
||||||
|
json: doc,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: CouchDBIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -5,228 +5,226 @@ import {
|
||||||
IntegrationBase,
|
IntegrationBase,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
module DynamoModule {
|
const AWS = require("aws-sdk")
|
||||||
const AWS = require("aws-sdk")
|
const { AWS_REGION } = require("../db/dynamoClient")
|
||||||
const { AWS_REGION } = require("../db/dynamoClient")
|
|
||||||
|
|
||||||
interface DynamoDBConfig {
|
interface DynamoDBConfig {
|
||||||
region: string
|
region: string
|
||||||
accessKeyId: string
|
accessKeyId: string
|
||||||
secretAccessKey: string
|
secretAccessKey: string
|
||||||
endpoint: string
|
endpoint: string
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://github.com/dabit3/dynamodb-documentclient-cheat-sheet",
|
docs: "https://github.com/dabit3/dynamodb-documentclient-cheat-sheet",
|
||||||
description:
|
description:
|
||||||
"Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.",
|
"Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.",
|
||||||
friendlyName: "DynamoDB",
|
friendlyName: "DynamoDB",
|
||||||
type: "Non-relational",
|
type: "Non-relational",
|
||||||
datasource: {
|
datasource: {
|
||||||
region: {
|
region: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
required: true,
|
required: true,
|
||||||
default: "us-east-1",
|
default: "us-east-1",
|
||||||
},
|
|
||||||
accessKeyId: {
|
|
||||||
type: DatasourceFieldType.PASSWORD,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
secretAccessKey: {
|
|
||||||
type: DatasourceFieldType.PASSWORD,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
endpoint: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: false,
|
|
||||||
default: "https://dynamodb.us-east-1.amazonaws.com",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
accessKeyId: {
|
||||||
create: {
|
type: DatasourceFieldType.PASSWORD,
|
||||||
type: QueryType.FIELDS,
|
required: true,
|
||||||
customisable: true,
|
},
|
||||||
fields: {
|
secretAccessKey: {
|
||||||
table: {
|
type: DatasourceFieldType.PASSWORD,
|
||||||
type: DatasourceFieldType.STRING,
|
required: true,
|
||||||
required: true,
|
},
|
||||||
},
|
endpoint: {
|
||||||
},
|
type: DatasourceFieldType.STRING,
|
||||||
},
|
required: false,
|
||||||
read: {
|
default: "https://dynamodb.us-east-1.amazonaws.com",
|
||||||
type: QueryType.FIELDS,
|
},
|
||||||
customisable: true,
|
},
|
||||||
readable: true,
|
query: {
|
||||||
fields: {
|
create: {
|
||||||
table: {
|
type: QueryType.FIELDS,
|
||||||
type: DatasourceFieldType.STRING,
|
customisable: true,
|
||||||
required: true,
|
fields: {
|
||||||
},
|
table: {
|
||||||
index: {
|
type: DatasourceFieldType.STRING,
|
||||||
type: DatasourceFieldType.STRING,
|
required: true,
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
scan: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
readable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
index: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
describe: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
readable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
get: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
readable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
readable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
scan: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
readable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
describe: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
readable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
get: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
readable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class DynamoDBIntegration implements IntegrationBase {
|
||||||
|
private config: DynamoDBConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: DynamoDBConfig) {
|
||||||
|
this.config = config
|
||||||
|
if (this.config.endpoint && !this.config.endpoint.includes("localhost")) {
|
||||||
|
this.connect()
|
||||||
|
}
|
||||||
|
let options = {
|
||||||
|
correctClockSkew: true,
|
||||||
|
region: this.config.region || AWS_REGION,
|
||||||
|
endpoint: config.endpoint ? config.endpoint : undefined,
|
||||||
|
}
|
||||||
|
this.client = new AWS.DynamoDB.DocumentClient(options)
|
||||||
}
|
}
|
||||||
|
|
||||||
class DynamoDBIntegration implements IntegrationBase {
|
end() {
|
||||||
private config: DynamoDBConfig
|
this.disconnect()
|
||||||
private client: any
|
|
||||||
|
|
||||||
constructor(config: DynamoDBConfig) {
|
|
||||||
this.config = config
|
|
||||||
if (this.config.endpoint && !this.config.endpoint.includes("localhost")) {
|
|
||||||
this.connect()
|
|
||||||
}
|
|
||||||
let options = {
|
|
||||||
correctClockSkew: true,
|
|
||||||
region: this.config.region || AWS_REGION,
|
|
||||||
endpoint: config.endpoint ? config.endpoint : undefined,
|
|
||||||
}
|
|
||||||
this.client = new AWS.DynamoDB.DocumentClient(options)
|
|
||||||
}
|
|
||||||
|
|
||||||
end() {
|
|
||||||
this.disconnect()
|
|
||||||
}
|
|
||||||
|
|
||||||
connect() {
|
|
||||||
AWS.config.update(this.config)
|
|
||||||
}
|
|
||||||
|
|
||||||
disconnect() {
|
|
||||||
AWS.config.update({
|
|
||||||
secretAccessKey: undefined,
|
|
||||||
accessKeyId: undefined,
|
|
||||||
region: AWS_REGION,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query: { table: string; json: object }) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
return this.client.put(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: { table: string; json: object; index: null | string }) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
IndexName: query.index ? query.index : undefined,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
const response = await this.client.query(params).promise()
|
|
||||||
if (response.Items) {
|
|
||||||
return response.Items
|
|
||||||
}
|
|
||||||
return response
|
|
||||||
}
|
|
||||||
|
|
||||||
async scan(query: { table: string; json: object; index: null | string }) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
IndexName: query.index ? query.index : undefined,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
const response = await this.client.scan(params).promise()
|
|
||||||
if (response.Items) {
|
|
||||||
return response.Items
|
|
||||||
}
|
|
||||||
return response
|
|
||||||
}
|
|
||||||
|
|
||||||
async describe(query: { table: string }) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
}
|
|
||||||
return new AWS.DynamoDB().describeTable(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
async get(query: { table: string; json: object }) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
return this.client.get(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: { table: string; json: object }) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
return this.client.update(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: { table: string; json: object }) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
return this.client.delete(params).promise()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
connect() {
|
||||||
schema: SCHEMA,
|
AWS.config.update(this.config)
|
||||||
integration: DynamoDBIntegration,
|
}
|
||||||
|
|
||||||
|
disconnect() {
|
||||||
|
AWS.config.update({
|
||||||
|
secretAccessKey: undefined,
|
||||||
|
accessKeyId: undefined,
|
||||||
|
region: AWS_REGION,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: { table: string; json: object }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
return this.client.put(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: { table: string; json: object; index: null | string }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
IndexName: query.index ? query.index : undefined,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
const response = await this.client.query(params).promise()
|
||||||
|
if (response.Items) {
|
||||||
|
return response.Items
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
|
||||||
|
async scan(query: { table: string; json: object; index: null | string }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
IndexName: query.index ? query.index : undefined,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
const response = await this.client.scan(params).promise()
|
||||||
|
if (response.Items) {
|
||||||
|
return response.Items
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
|
||||||
|
async describe(query: { table: string }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
}
|
||||||
|
return new AWS.DynamoDB().describeTable(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(query: { table: string; json: object }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
return this.client.get(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: { table: string; json: object }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
return this.client.update(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: { table: string; json: object }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
return this.client.delete(params).promise()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: DynamoDBIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -5,151 +5,149 @@ import {
|
||||||
IntegrationBase,
|
IntegrationBase,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
module ElasticsearchModule {
|
const { Client } = require("@elastic/elasticsearch")
|
||||||
const { Client } = require("@elastic/elasticsearch")
|
|
||||||
|
|
||||||
interface ElasticsearchConfig {
|
interface ElasticsearchConfig {
|
||||||
url: string
|
url: string
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||||
description:
|
description:
|
||||||
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
|
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
|
||||||
friendlyName: "ElasticSearch",
|
friendlyName: "ElasticSearch",
|
||||||
type: "Non-relational",
|
type: "Non-relational",
|
||||||
datasource: {
|
datasource: {
|
||||||
url: {
|
url: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
required: true,
|
required: true,
|
||||||
default: "http://localhost:9200",
|
default: "http://localhost:9200",
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
},
|
||||||
create: {
|
query: {
|
||||||
type: QueryType.FIELDS,
|
create: {
|
||||||
customisable: true,
|
type: QueryType.FIELDS,
|
||||||
fields: {
|
customisable: true,
|
||||||
index: {
|
fields: {
|
||||||
type: DatasourceFieldType.STRING,
|
index: {
|
||||||
required: true,
|
type: DatasourceFieldType.STRING,
|
||||||
},
|
required: true,
|
||||||
},
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
index: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
id: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
index: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: {
|
|
||||||
index: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
id: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
id: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: {
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
id: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class ElasticSearchIntegration implements IntegrationBase {
|
||||||
|
private config: ElasticsearchConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: ElasticsearchConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.client = new Client({ node: config.url })
|
||||||
}
|
}
|
||||||
|
|
||||||
class ElasticSearchIntegration implements IntegrationBase {
|
async create(query: { index: string; json: object }) {
|
||||||
private config: ElasticsearchConfig
|
const { index, json } = query
|
||||||
private client: any
|
|
||||||
|
|
||||||
constructor(config: ElasticsearchConfig) {
|
try {
|
||||||
this.config = config
|
const result = await this.client.index({
|
||||||
this.client = new Client({ node: config.url })
|
index,
|
||||||
}
|
body: json,
|
||||||
|
})
|
||||||
async create(query: { index: string; json: object }) {
|
return result.body
|
||||||
const { index, json } = query
|
} catch (err) {
|
||||||
|
console.error("Error writing to elasticsearch", err)
|
||||||
try {
|
throw err
|
||||||
const result = await this.client.index({
|
} finally {
|
||||||
index,
|
await this.client.close()
|
||||||
body: json,
|
|
||||||
})
|
|
||||||
return result.body
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to elasticsearch", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: { index: string; json: object }) {
|
|
||||||
const { index, json } = query
|
|
||||||
try {
|
|
||||||
const result = await this.client.search({
|
|
||||||
index: index,
|
|
||||||
body: json,
|
|
||||||
})
|
|
||||||
return result.body.hits.hits.map(({ _source }: any) => _source)
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error querying elasticsearch", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: { id: string; index: string; json: object }) {
|
|
||||||
const { id, index, json } = query
|
|
||||||
try {
|
|
||||||
const result = await this.client.update({
|
|
||||||
id,
|
|
||||||
index,
|
|
||||||
body: json,
|
|
||||||
})
|
|
||||||
return result.body
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error querying elasticsearch", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: object) {
|
|
||||||
try {
|
|
||||||
const result = await this.client.delete(query)
|
|
||||||
return result.body
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error deleting from elasticsearch", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async read(query: { index: string; json: object }) {
|
||||||
schema: SCHEMA,
|
const { index, json } = query
|
||||||
integration: ElasticSearchIntegration,
|
try {
|
||||||
|
const result = await this.client.search({
|
||||||
|
index: index,
|
||||||
|
body: json,
|
||||||
|
})
|
||||||
|
return result.body.hits.hits.map(({ _source }: any) => _source)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error querying elasticsearch", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: { id: string; index: string; json: object }) {
|
||||||
|
const { id, index, json } = query
|
||||||
|
try {
|
||||||
|
const result = await this.client.update({
|
||||||
|
id,
|
||||||
|
index,
|
||||||
|
body: json,
|
||||||
|
})
|
||||||
|
return result.body
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error querying elasticsearch", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: object) {
|
||||||
|
try {
|
||||||
|
const result = await this.client.delete(query)
|
||||||
|
return result.body
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error deleting from elasticsearch", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: ElasticSearchIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -6,184 +6,182 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { Firestore, WhereFilterOp } from "@google-cloud/firestore"
|
import { Firestore, WhereFilterOp } from "@google-cloud/firestore"
|
||||||
|
|
||||||
module Firebase {
|
interface FirebaseConfig {
|
||||||
interface FirebaseConfig {
|
email: string
|
||||||
email: string
|
privateKey: string
|
||||||
privateKey: string
|
projectId: string
|
||||||
projectId: string
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://firebase.google.com/docs/firestore/quickstart",
|
||||||
|
friendlyName: "Firestore",
|
||||||
|
type: "Non-relational",
|
||||||
|
description:
|
||||||
|
"Cloud Firestore is a flexible, scalable database for mobile, web, and server development from Firebase and Google Cloud.",
|
||||||
|
datasource: {
|
||||||
|
email: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
privateKey: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
projectId: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
extra: {
|
||||||
|
collection: {
|
||||||
|
displayName: "Collection",
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
filterField: {
|
||||||
|
displayName: "Filter field",
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
filter: {
|
||||||
|
displayName: "Filter comparison",
|
||||||
|
type: DatasourceFieldType.LIST,
|
||||||
|
required: false,
|
||||||
|
data: {
|
||||||
|
read: [
|
||||||
|
"==",
|
||||||
|
"<",
|
||||||
|
"<=",
|
||||||
|
"!=",
|
||||||
|
">=",
|
||||||
|
">",
|
||||||
|
"array-contains",
|
||||||
|
"in",
|
||||||
|
"not-in",
|
||||||
|
"array-contains-any",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
filterValue: {
|
||||||
|
displayName: "Filter value",
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class FirebaseIntegration implements IntegrationBase {
|
||||||
|
private config: FirebaseConfig
|
||||||
|
private client: Firestore
|
||||||
|
|
||||||
|
constructor(config: FirebaseConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.client = new Firestore({
|
||||||
|
projectId: config.projectId,
|
||||||
|
credentials: {
|
||||||
|
client_email: config.email,
|
||||||
|
private_key: config.privateKey?.replace(/\\n/g, "\n"),
|
||||||
|
},
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
async create(query: { json: object; extra: { [key: string]: string } }) {
|
||||||
docs: "https://firebase.google.com/docs/firestore/quickstart",
|
try {
|
||||||
friendlyName: "Firestore",
|
const documentReference = this.client
|
||||||
type: "Non-relational",
|
.collection(query.extra.collection)
|
||||||
description:
|
.doc()
|
||||||
"Cloud Firestore is a flexible, scalable database for mobile, web, and server development from Firebase and Google Cloud.",
|
await documentReference.set({ ...query.json, id: documentReference.id })
|
||||||
datasource: {
|
const snapshot = await documentReference.get()
|
||||||
email: {
|
return snapshot.data()
|
||||||
type: DatasourceFieldType.STRING,
|
} catch (err) {
|
||||||
required: true,
|
console.error("Error writing to Firestore", err)
|
||||||
},
|
throw err
|
||||||
privateKey: {
|
}
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
projectId: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
extra: {
|
|
||||||
collection: {
|
|
||||||
displayName: "Collection",
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
filterField: {
|
|
||||||
displayName: "Filter field",
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
filter: {
|
|
||||||
displayName: "Filter comparison",
|
|
||||||
type: DatasourceFieldType.LIST,
|
|
||||||
required: false,
|
|
||||||
data: {
|
|
||||||
read: [
|
|
||||||
"==",
|
|
||||||
"<",
|
|
||||||
"<=",
|
|
||||||
"!=",
|
|
||||||
">=",
|
|
||||||
">",
|
|
||||||
"array-contains",
|
|
||||||
"in",
|
|
||||||
"not-in",
|
|
||||||
"array-contains-any",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
filterValue: {
|
|
||||||
displayName: "Filter value",
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class FirebaseIntegration implements IntegrationBase {
|
async read(query: { json: object; extra: { [key: string]: string } }) {
|
||||||
private config: FirebaseConfig
|
try {
|
||||||
private client: Firestore
|
let snapshot
|
||||||
|
const collectionRef = this.client.collection(query.extra.collection)
|
||||||
constructor(config: FirebaseConfig) {
|
if (
|
||||||
this.config = config
|
query.extra.filterField &&
|
||||||
this.client = new Firestore({
|
query.extra.filter &&
|
||||||
projectId: config.projectId,
|
query.extra.filterValue
|
||||||
credentials: {
|
) {
|
||||||
client_email: config.email,
|
snapshot = await collectionRef
|
||||||
private_key: config.privateKey?.replace(/\\n/g, "\n"),
|
.where(
|
||||||
},
|
query.extra.filterField,
|
||||||
})
|
query.extra.filter as WhereFilterOp,
|
||||||
}
|
query.extra.filterValue
|
||||||
|
)
|
||||||
async create(query: { json: object; extra: { [key: string]: string } }) {
|
.get()
|
||||||
try {
|
} else {
|
||||||
const documentReference = this.client
|
snapshot = await collectionRef.get()
|
||||||
.collection(query.extra.collection)
|
|
||||||
.doc()
|
|
||||||
await documentReference.set({ ...query.json, id: documentReference.id })
|
|
||||||
const snapshot = await documentReference.get()
|
|
||||||
return snapshot.data()
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to Firestore", err)
|
|
||||||
throw err
|
|
||||||
}
|
}
|
||||||
|
const result: any[] = []
|
||||||
|
snapshot.forEach(doc => result.push(doc.data()))
|
||||||
|
|
||||||
|
return result
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error querying Firestore", err)
|
||||||
|
throw err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async read(query: { json: object; extra: { [key: string]: string } }) {
|
async update(query: {
|
||||||
try {
|
json: Record<string, any>
|
||||||
let snapshot
|
extra: { [key: string]: string }
|
||||||
const collectionRef = this.client.collection(query.extra.collection)
|
}) {
|
||||||
if (
|
try {
|
||||||
query.extra.filterField &&
|
await this.client
|
||||||
query.extra.filter &&
|
.collection(query.extra.collection)
|
||||||
query.extra.filterValue
|
.doc(query.json.id)
|
||||||
) {
|
.update(query.json)
|
||||||
snapshot = await collectionRef
|
|
||||||
.where(
|
|
||||||
query.extra.filterField,
|
|
||||||
query.extra.filter as WhereFilterOp,
|
|
||||||
query.extra.filterValue
|
|
||||||
)
|
|
||||||
.get()
|
|
||||||
} else {
|
|
||||||
snapshot = await collectionRef.get()
|
|
||||||
}
|
|
||||||
const result: any[] = []
|
|
||||||
snapshot.forEach(doc => result.push(doc.data()))
|
|
||||||
|
|
||||||
return result
|
return (
|
||||||
} catch (err) {
|
|
||||||
console.error("Error querying Firestore", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: {
|
|
||||||
json: Record<string, any>
|
|
||||||
extra: { [key: string]: string }
|
|
||||||
}) {
|
|
||||||
try {
|
|
||||||
await this.client
|
await this.client
|
||||||
.collection(query.extra.collection)
|
.collection(query.extra.collection)
|
||||||
.doc(query.json.id)
|
.doc(query.json.id)
|
||||||
.update(query.json)
|
.get()
|
||||||
|
).data()
|
||||||
return (
|
} catch (err) {
|
||||||
await this.client
|
console.error("Error writing to Firestore", err)
|
||||||
.collection(query.extra.collection)
|
throw err
|
||||||
.doc(query.json.id)
|
|
||||||
.get()
|
|
||||||
).data()
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to Firestore", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: {
|
|
||||||
json: { id: string }
|
|
||||||
extra: { [key: string]: string }
|
|
||||||
}) {
|
|
||||||
try {
|
|
||||||
await this.client
|
|
||||||
.collection(query.extra.collection)
|
|
||||||
.doc(query.json.id)
|
|
||||||
.delete()
|
|
||||||
return true
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error deleting from Firestore", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async delete(query: {
|
||||||
schema: SCHEMA,
|
json: { id: string }
|
||||||
integration: FirebaseIntegration,
|
extra: { [key: string]: string }
|
||||||
|
}) {
|
||||||
|
try {
|
||||||
|
await this.client
|
||||||
|
.collection(query.extra.collection)
|
||||||
|
.doc(query.json.id)
|
||||||
|
.delete()
|
||||||
|
return true
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error deleting from Firestore", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: FirebaseIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -13,409 +13,400 @@ import { DataSourceOperation, FieldTypes } from "../constants"
|
||||||
import { GoogleSpreadsheet } from "google-spreadsheet"
|
import { GoogleSpreadsheet } from "google-spreadsheet"
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
|
|
||||||
module GoogleSheetsModule {
|
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
||||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
const { getScopedConfig } = require("@budibase/backend-core/db")
|
||||||
const { getScopedConfig } = require("@budibase/backend-core/db")
|
const { Configs } = require("@budibase/backend-core/constants")
|
||||||
const { Configs } = require("@budibase/backend-core/constants")
|
const fetch = require("node-fetch")
|
||||||
const fetch = require("node-fetch")
|
|
||||||
|
|
||||||
interface GoogleSheetsConfig {
|
interface GoogleSheetsConfig {
|
||||||
spreadsheetId: string
|
spreadsheetId: string
|
||||||
auth: OAuthClientConfig
|
auth: OAuthClientConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
interface OAuthClientConfig {
|
interface OAuthClientConfig {
|
||||||
appId: string
|
appId: string
|
||||||
accessToken: string
|
accessToken: string
|
||||||
refreshToken: string
|
refreshToken: string
|
||||||
}
|
}
|
||||||
|
|
||||||
interface AuthTokenRequest {
|
interface AuthTokenRequest {
|
||||||
client_id: string
|
client_id: string
|
||||||
client_secret: string
|
client_secret: string
|
||||||
refresh_token: string
|
refresh_token: string
|
||||||
}
|
}
|
||||||
|
|
||||||
interface AuthTokenResponse {
|
interface AuthTokenResponse {
|
||||||
access_token: string
|
access_token: string
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
plus: true,
|
plus: true,
|
||||||
auth: {
|
auth: {
|
||||||
type: "google",
|
type: "google",
|
||||||
|
},
|
||||||
|
relationships: false,
|
||||||
|
docs: "https://developers.google.com/sheets/api/quickstart/nodejs",
|
||||||
|
description:
|
||||||
|
"Create and collaborate on online spreadsheets in real-time and from any device. ",
|
||||||
|
friendlyName: "Google Sheets",
|
||||||
|
type: "Spreadsheet",
|
||||||
|
datasource: {
|
||||||
|
spreadsheetId: {
|
||||||
|
display: "Google Sheet URL",
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
},
|
},
|
||||||
relationships: false,
|
},
|
||||||
docs: "https://developers.google.com/sheets/api/quickstart/nodejs",
|
query: {
|
||||||
description:
|
create: {
|
||||||
"Create and collaborate on online spreadsheets in real-time and from any device. ",
|
type: QueryType.FIELDS,
|
||||||
friendlyName: "Google Sheets",
|
fields: {
|
||||||
type: "Spreadsheet",
|
sheet: {
|
||||||
datasource: {
|
type: DatasourceFieldType.STRING,
|
||||||
spreadsheetId: {
|
required: true,
|
||||||
display: "Google Sheet URL",
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: {
|
|
||||||
sheet: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
row: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
row: {
|
||||||
read: {
|
type: QueryType.JSON,
|
||||||
type: QueryType.FIELDS,
|
required: true,
|
||||||
fields: {
|
|
||||||
sheet: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: {
|
|
||||||
sheet: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
rowIndex: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
row: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: {
|
|
||||||
sheet: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
rowIndex: {
|
|
||||||
type: DatasourceFieldType.NUMBER,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: {
|
||||||
|
sheet: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: {
|
||||||
|
sheet: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
rowIndex: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
row: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: {
|
||||||
|
sheet: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
rowIndex: {
|
||||||
|
type: DatasourceFieldType.NUMBER,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class GoogleSheetsIntegration implements DatasourcePlus {
|
||||||
|
private readonly config: GoogleSheetsConfig
|
||||||
|
private client: any
|
||||||
|
public tables: Record<string, Table> = {}
|
||||||
|
public schemaErrors: Record<string, string> = {}
|
||||||
|
|
||||||
|
constructor(config: GoogleSheetsConfig) {
|
||||||
|
this.config = config
|
||||||
|
const spreadsheetId = this.cleanSpreadsheetUrl(this.config.spreadsheetId)
|
||||||
|
this.client = new GoogleSpreadsheet(spreadsheetId)
|
||||||
}
|
}
|
||||||
|
|
||||||
class GoogleSheetsIntegration implements DatasourcePlus {
|
getBindingIdentifier() {
|
||||||
private readonly config: GoogleSheetsConfig
|
return ""
|
||||||
private client: any
|
}
|
||||||
public tables: Record<string, Table> = {}
|
|
||||||
public schemaErrors: Record<string, string> = {}
|
|
||||||
|
|
||||||
constructor(config: GoogleSheetsConfig) {
|
getStringConcat(parts: string[]) {
|
||||||
this.config = config
|
return ""
|
||||||
const spreadsheetId = this.cleanSpreadsheetUrl(this.config.spreadsheetId)
|
}
|
||||||
this.client = new GoogleSpreadsheet(spreadsheetId)
|
|
||||||
}
|
|
||||||
|
|
||||||
getBindingIdentifier() {
|
/**
|
||||||
return ""
|
* Pull the spreadsheet ID out from a valid google sheets URL
|
||||||
}
|
* @param spreadsheetId - the URL or standard spreadsheetId of the google sheet
|
||||||
|
* @returns spreadsheet Id of the google sheet
|
||||||
getStringConcat(parts: string[]) {
|
*/
|
||||||
return ""
|
cleanSpreadsheetUrl(spreadsheetId: string) {
|
||||||
}
|
if (!spreadsheetId) {
|
||||||
|
throw new Error(
|
||||||
/**
|
"You must set a spreadsheet ID in your configuration to fetch tables."
|
||||||
* Pull the spreadsheet ID out from a valid google sheets URL
|
|
||||||
* @param spreadsheetId - the URL or standard spreadsheetId of the google sheet
|
|
||||||
* @returns spreadsheet Id of the google sheet
|
|
||||||
*/
|
|
||||||
cleanSpreadsheetUrl(spreadsheetId: string) {
|
|
||||||
if (!spreadsheetId) {
|
|
||||||
throw new Error(
|
|
||||||
"You must set a spreadsheet ID in your configuration to fetch tables."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
const parts = spreadsheetId.split("/")
|
|
||||||
return parts.length > 5 ? parts[5] : spreadsheetId
|
|
||||||
}
|
|
||||||
|
|
||||||
async fetchAccessToken(
|
|
||||||
payload: AuthTokenRequest
|
|
||||||
): Promise<AuthTokenResponse> {
|
|
||||||
const response = await fetch(
|
|
||||||
"https://www.googleapis.com/oauth2/v4/token",
|
|
||||||
{
|
|
||||||
method: "POST",
|
|
||||||
body: JSON.stringify({
|
|
||||||
...payload,
|
|
||||||
grant_type: "refresh_token",
|
|
||||||
}),
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
}
|
||||||
|
const parts = spreadsheetId.split("/")
|
||||||
|
return parts.length > 5 ? parts[5] : spreadsheetId
|
||||||
|
}
|
||||||
|
|
||||||
const json = await response.json()
|
async fetchAccessToken(
|
||||||
|
payload: AuthTokenRequest
|
||||||
|
): Promise<AuthTokenResponse> {
|
||||||
|
const response = await fetch("https://www.googleapis.com/oauth2/v4/token", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify({
|
||||||
|
...payload,
|
||||||
|
grant_type: "refresh_token",
|
||||||
|
}),
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
if (response.status !== 200) {
|
const json = await response.json()
|
||||||
throw new Error(
|
|
||||||
`Error authenticating with google sheets. ${json.error_description}`
|
if (response.status !== 200) {
|
||||||
|
throw new Error(
|
||||||
|
`Error authenticating with google sheets. ${json.error_description}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return json
|
||||||
|
}
|
||||||
|
|
||||||
|
async connect() {
|
||||||
|
try {
|
||||||
|
// Initialise oAuth client
|
||||||
|
const db = getGlobalDB()
|
||||||
|
let googleConfig = await getScopedConfig(db, {
|
||||||
|
type: Configs.GOOGLE,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!googleConfig) {
|
||||||
|
googleConfig = {
|
||||||
|
clientID: env.GOOGLE_CLIENT_ID,
|
||||||
|
clientSecret: env.GOOGLE_CLIENT_SECRET,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const oauthClient = new OAuth2Client({
|
||||||
|
clientId: googleConfig.clientID,
|
||||||
|
clientSecret: googleConfig.clientSecret,
|
||||||
|
})
|
||||||
|
|
||||||
|
const tokenResponse = await this.fetchAccessToken({
|
||||||
|
client_id: googleConfig.clientID,
|
||||||
|
client_secret: googleConfig.clientSecret,
|
||||||
|
refresh_token: this.config.auth.refreshToken,
|
||||||
|
})
|
||||||
|
|
||||||
|
oauthClient.setCredentials({
|
||||||
|
refresh_token: this.config.auth.refreshToken,
|
||||||
|
access_token: tokenResponse.access_token,
|
||||||
|
})
|
||||||
|
|
||||||
|
this.client.useOAuth2Client(oauthClient)
|
||||||
|
await this.client.loadInfo()
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error connecting to google sheets", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async buildSchema(datasourceId: string) {
|
||||||
|
await this.connect()
|
||||||
|
const sheets = await this.client.sheetsByIndex
|
||||||
|
const tables: Record<string, Table> = {}
|
||||||
|
for (let sheet of sheets) {
|
||||||
|
// must fetch rows to determine schema
|
||||||
|
await sheet.getRows()
|
||||||
|
// build schema
|
||||||
|
const schema: TableSchema = {}
|
||||||
|
|
||||||
|
// build schema from headers
|
||||||
|
for (let header of sheet.headerValues) {
|
||||||
|
schema[header] = {
|
||||||
|
name: header,
|
||||||
|
type: FieldTypes.STRING,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// create tables
|
||||||
|
tables[sheet.title] = {
|
||||||
|
_id: buildExternalTableId(datasourceId, sheet.title),
|
||||||
|
name: sheet.title,
|
||||||
|
primary: ["rowNumber"],
|
||||||
|
schema,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.tables = tables
|
||||||
|
}
|
||||||
|
|
||||||
|
async query(json: QueryJson) {
|
||||||
|
const sheet = json.endpoint.entityId
|
||||||
|
|
||||||
|
const handlers = {
|
||||||
|
[DataSourceOperation.CREATE]: () =>
|
||||||
|
this.create({ sheet, row: json.body }),
|
||||||
|
[DataSourceOperation.READ]: () => this.read({ sheet }),
|
||||||
|
[DataSourceOperation.UPDATE]: () =>
|
||||||
|
this.update({
|
||||||
|
// exclude the header row and zero index
|
||||||
|
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
|
||||||
|
sheet,
|
||||||
|
row: json.body,
|
||||||
|
}),
|
||||||
|
[DataSourceOperation.DELETE]: () =>
|
||||||
|
this.delete({
|
||||||
|
// exclude the header row and zero index
|
||||||
|
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
|
||||||
|
sheet,
|
||||||
|
}),
|
||||||
|
[DataSourceOperation.CREATE_TABLE]: () =>
|
||||||
|
this.createTable(json?.table?.name),
|
||||||
|
[DataSourceOperation.UPDATE_TABLE]: () => this.updateTable(json.table),
|
||||||
|
[DataSourceOperation.DELETE_TABLE]: () =>
|
||||||
|
this.deleteTable(json?.table?.name),
|
||||||
|
}
|
||||||
|
|
||||||
|
const internalQueryMethod = handlers[json.endpoint.operation]
|
||||||
|
|
||||||
|
return await internalQueryMethod()
|
||||||
|
}
|
||||||
|
|
||||||
|
buildRowObject(headers: string[], values: string[], rowNumber: number) {
|
||||||
|
const rowObject: { rowNumber: number; [key: string]: any } = { rowNumber }
|
||||||
|
for (let i = 0; i < headers.length; i++) {
|
||||||
|
rowObject._id = rowNumber
|
||||||
|
rowObject[headers[i]] = values[i]
|
||||||
|
}
|
||||||
|
return rowObject
|
||||||
|
}
|
||||||
|
|
||||||
|
async createTable(name?: string) {
|
||||||
|
try {
|
||||||
|
await this.connect()
|
||||||
|
const sheet = await this.client.addSheet({ title: name })
|
||||||
|
return sheet
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error creating new table in google sheets", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateTable(table?: any) {
|
||||||
|
try {
|
||||||
|
await this.connect()
|
||||||
|
const sheet = await this.client.sheetsByTitle[table.name]
|
||||||
|
await sheet.loadHeaderRow()
|
||||||
|
|
||||||
|
if (table._rename) {
|
||||||
|
const headers = []
|
||||||
|
for (let header of sheet.headerValues) {
|
||||||
|
if (header === table._rename.old) {
|
||||||
|
headers.push(table._rename.updated)
|
||||||
|
} else {
|
||||||
|
headers.push(header)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await sheet.setHeaderRow(headers)
|
||||||
|
} else {
|
||||||
|
let newField = Object.keys(table.schema).find(
|
||||||
|
key => !sheet.headerValues.includes(key)
|
||||||
|
)
|
||||||
|
await sheet.setHeaderRow([...sheet.headerValues, newField])
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error updating table in google sheets", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteTable(sheet: any) {
|
||||||
|
try {
|
||||||
|
await this.connect()
|
||||||
|
const sheetToDelete = await this.client.sheetsByTitle[sheet]
|
||||||
|
return await sheetToDelete.delete()
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error deleting table in google sheets", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: { sheet: string; row: any }) {
|
||||||
|
try {
|
||||||
|
await this.connect()
|
||||||
|
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||||
|
const rowToInsert =
|
||||||
|
typeof query.row === "string" ? JSON.parse(query.row) : query.row
|
||||||
|
const row = await sheet.addRow(rowToInsert)
|
||||||
|
return [
|
||||||
|
this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber),
|
||||||
|
]
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to google sheets", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: { sheet: string }) {
|
||||||
|
try {
|
||||||
|
await this.connect()
|
||||||
|
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||||
|
const rows = await sheet.getRows()
|
||||||
|
const headerValues = sheet.headerValues
|
||||||
|
const response = []
|
||||||
|
for (let row of rows) {
|
||||||
|
response.push(
|
||||||
|
this.buildRowObject(headerValues, row._rawData, row._rowNumber)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
return response
|
||||||
return json
|
} catch (err) {
|
||||||
|
console.error("Error reading from google sheets", err)
|
||||||
|
throw err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async connect() {
|
async update(query: { sheet: string; rowIndex: number; row: any }) {
|
||||||
try {
|
try {
|
||||||
// Initialise oAuth client
|
|
||||||
const db = getGlobalDB()
|
|
||||||
let googleConfig = await getScopedConfig(db, {
|
|
||||||
type: Configs.GOOGLE,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!googleConfig) {
|
|
||||||
googleConfig = {
|
|
||||||
clientID: env.GOOGLE_CLIENT_ID,
|
|
||||||
clientSecret: env.GOOGLE_CLIENT_SECRET,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const oauthClient = new OAuth2Client({
|
|
||||||
clientId: googleConfig.clientID,
|
|
||||||
clientSecret: googleConfig.clientSecret,
|
|
||||||
})
|
|
||||||
|
|
||||||
const tokenResponse = await this.fetchAccessToken({
|
|
||||||
client_id: googleConfig.clientID,
|
|
||||||
client_secret: googleConfig.clientSecret,
|
|
||||||
refresh_token: this.config.auth.refreshToken,
|
|
||||||
})
|
|
||||||
|
|
||||||
oauthClient.setCredentials({
|
|
||||||
refresh_token: this.config.auth.refreshToken,
|
|
||||||
access_token: tokenResponse.access_token,
|
|
||||||
})
|
|
||||||
|
|
||||||
this.client.useOAuth2Client(oauthClient)
|
|
||||||
await this.client.loadInfo()
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error connecting to google sheets", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async buildSchema(datasourceId: string) {
|
|
||||||
await this.connect()
|
|
||||||
const sheets = await this.client.sheetsByIndex
|
|
||||||
const tables: Record<string, Table> = {}
|
|
||||||
for (let sheet of sheets) {
|
|
||||||
// must fetch rows to determine schema
|
|
||||||
await sheet.getRows()
|
|
||||||
// build schema
|
|
||||||
const schema: TableSchema = {}
|
|
||||||
|
|
||||||
// build schema from headers
|
|
||||||
for (let header of sheet.headerValues) {
|
|
||||||
schema[header] = {
|
|
||||||
name: header,
|
|
||||||
type: FieldTypes.STRING,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// create tables
|
|
||||||
tables[sheet.title] = {
|
|
||||||
_id: buildExternalTableId(datasourceId, sheet.title),
|
|
||||||
name: sheet.title,
|
|
||||||
primary: ["rowNumber"],
|
|
||||||
schema,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.tables = tables
|
|
||||||
}
|
|
||||||
|
|
||||||
async query(json: QueryJson) {
|
|
||||||
const sheet = json.endpoint.entityId
|
|
||||||
|
|
||||||
const handlers = {
|
|
||||||
[DataSourceOperation.CREATE]: () =>
|
|
||||||
this.create({ sheet, row: json.body }),
|
|
||||||
[DataSourceOperation.READ]: () => this.read({ sheet }),
|
|
||||||
[DataSourceOperation.UPDATE]: () =>
|
|
||||||
this.update({
|
|
||||||
// exclude the header row and zero index
|
|
||||||
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
|
|
||||||
sheet,
|
|
||||||
row: json.body,
|
|
||||||
}),
|
|
||||||
[DataSourceOperation.DELETE]: () =>
|
|
||||||
this.delete({
|
|
||||||
// exclude the header row and zero index
|
|
||||||
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
|
|
||||||
sheet,
|
|
||||||
}),
|
|
||||||
[DataSourceOperation.CREATE_TABLE]: () =>
|
|
||||||
this.createTable(json?.table?.name),
|
|
||||||
[DataSourceOperation.UPDATE_TABLE]: () => this.updateTable(json.table),
|
|
||||||
[DataSourceOperation.DELETE_TABLE]: () =>
|
|
||||||
this.deleteTable(json?.table?.name),
|
|
||||||
}
|
|
||||||
|
|
||||||
const internalQueryMethod = handlers[json.endpoint.operation]
|
|
||||||
|
|
||||||
return await internalQueryMethod()
|
|
||||||
}
|
|
||||||
|
|
||||||
buildRowObject(headers: string[], values: string[], rowNumber: number) {
|
|
||||||
const rowObject: { rowNumber: number; [key: string]: any } = { rowNumber }
|
|
||||||
for (let i = 0; i < headers.length; i++) {
|
|
||||||
rowObject._id = rowNumber
|
|
||||||
rowObject[headers[i]] = values[i]
|
|
||||||
}
|
|
||||||
return rowObject
|
|
||||||
}
|
|
||||||
|
|
||||||
async createTable(name?: string) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const sheet = await this.client.addSheet({ title: name })
|
|
||||||
return sheet
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error creating new table in google sheets", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async updateTable(table?: any) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const sheet = await this.client.sheetsByTitle[table.name]
|
|
||||||
await sheet.loadHeaderRow()
|
|
||||||
|
|
||||||
if (table._rename) {
|
|
||||||
const headers = []
|
|
||||||
for (let header of sheet.headerValues) {
|
|
||||||
if (header === table._rename.old) {
|
|
||||||
headers.push(table._rename.updated)
|
|
||||||
} else {
|
|
||||||
headers.push(header)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await sheet.setHeaderRow(headers)
|
|
||||||
} else {
|
|
||||||
let newField = Object.keys(table.schema).find(
|
|
||||||
key => !sheet.headerValues.includes(key)
|
|
||||||
)
|
|
||||||
await sheet.setHeaderRow([...sheet.headerValues, newField])
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error updating table in google sheets", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async deleteTable(sheet: any) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const sheetToDelete = await this.client.sheetsByTitle[sheet]
|
|
||||||
return await sheetToDelete.delete()
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error deleting table in google sheets", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query: { sheet: string; row: any }) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
|
||||||
const rowToInsert =
|
|
||||||
typeof query.row === "string" ? JSON.parse(query.row) : query.row
|
|
||||||
const row = await sheet.addRow(rowToInsert)
|
|
||||||
return [
|
|
||||||
this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber),
|
|
||||||
]
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to google sheets", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: { sheet: string }) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
|
||||||
const rows = await sheet.getRows()
|
|
||||||
const headerValues = sheet.headerValues
|
|
||||||
const response = []
|
|
||||||
for (let row of rows) {
|
|
||||||
response.push(
|
|
||||||
this.buildRowObject(headerValues, row._rawData, row._rowNumber)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return response
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error reading from google sheets", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: { sheet: string; rowIndex: number; row: any }) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
|
||||||
const rows = await sheet.getRows()
|
|
||||||
const row = rows[query.rowIndex]
|
|
||||||
if (row) {
|
|
||||||
const updateValues = query.row
|
|
||||||
for (let key in updateValues) {
|
|
||||||
row[key] = updateValues[key]
|
|
||||||
}
|
|
||||||
await row.save()
|
|
||||||
return [
|
|
||||||
this.buildRowObject(
|
|
||||||
sheet.headerValues,
|
|
||||||
row._rawData,
|
|
||||||
row._rowNumber
|
|
||||||
),
|
|
||||||
]
|
|
||||||
} else {
|
|
||||||
throw new Error("Row does not exist.")
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error reading from google sheets", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: { sheet: string; rowIndex: number }) {
|
|
||||||
await this.connect()
|
await this.connect()
|
||||||
const sheet = await this.client.sheetsByTitle[query.sheet]
|
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||||
const rows = await sheet.getRows()
|
const rows = await sheet.getRows()
|
||||||
const row = rows[query.rowIndex]
|
const row = rows[query.rowIndex]
|
||||||
if (row) {
|
if (row) {
|
||||||
await row.delete()
|
const updateValues = query.row
|
||||||
return [{ deleted: query.rowIndex }]
|
for (let key in updateValues) {
|
||||||
|
row[key] = updateValues[key]
|
||||||
|
}
|
||||||
|
await row.save()
|
||||||
|
return [
|
||||||
|
this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber),
|
||||||
|
]
|
||||||
} else {
|
} else {
|
||||||
throw new Error("Row does not exist.")
|
throw new Error("Row does not exist.")
|
||||||
}
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error reading from google sheets", err)
|
||||||
|
throw err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async delete(query: { sheet: string; rowIndex: number }) {
|
||||||
schema: SCHEMA,
|
await this.connect()
|
||||||
integration: GoogleSheetsIntegration,
|
const sheet = await this.client.sheetsByTitle[query.sheet]
|
||||||
|
const rows = await sheet.getRows()
|
||||||
|
const row = rows[query.rowIndex]
|
||||||
|
if (row) {
|
||||||
|
await row.delete()
|
||||||
|
return [{ deleted: query.rowIndex }]
|
||||||
|
} else {
|
||||||
|
throw new Error("Row does not exist.")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: GoogleSheetsIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -1,22 +1,24 @@
|
||||||
const postgres = require("./postgres")
|
import postgres from "./postgres"
|
||||||
const dynamodb = require("./dynamodb")
|
import dynamodb from "./dynamodb"
|
||||||
const mongodb = require("./mongodb")
|
import mongodb from "./mongodb"
|
||||||
const elasticsearch = require("./elasticsearch")
|
import elasticsearch from "./elasticsearch"
|
||||||
const couchdb = require("./couchdb")
|
import couchdb from "./couchdb"
|
||||||
const sqlServer = require("./microsoftSqlServer")
|
import sqlServer from "./microsoftSqlServer"
|
||||||
const s3 = require("./s3")
|
import s3 from "./s3"
|
||||||
const airtable = require("./airtable")
|
import airtable from "./airtable"
|
||||||
const mysql = require("./mysql")
|
import mysql from "./mysql"
|
||||||
const arangodb = require("./arangodb")
|
import arangodb from "./arangodb"
|
||||||
const rest = require("./rest")
|
import rest from "./rest"
|
||||||
const googlesheets = require("./googlesheets")
|
import googlesheets from "./googlesheets"
|
||||||
const firebase = require("./firebase")
|
import firebase from "./firebase"
|
||||||
const redis = require("./redis")
|
import redis from "./redis"
|
||||||
const snowflake = require("./snowflake")
|
import snowflake from "./snowflake"
|
||||||
const { SourceName } = require("@budibase/types")
|
import { getPlugins } from "../api/controllers/plugin"
|
||||||
|
import { SourceName, Integration, PluginType } from "@budibase/types"
|
||||||
const environment = require("../environment")
|
const environment = require("../environment")
|
||||||
|
const { cloneDeep } = require("lodash")
|
||||||
|
|
||||||
const DEFINITIONS = {
|
const DEFINITIONS: { [key: string]: Integration } = {
|
||||||
[SourceName.POSTGRES]: postgres.schema,
|
[SourceName.POSTGRES]: postgres.schema,
|
||||||
[SourceName.DYNAMODB]: dynamodb.schema,
|
[SourceName.DYNAMODB]: dynamodb.schema,
|
||||||
[SourceName.MONGODB]: mongodb.schema,
|
[SourceName.MONGODB]: mongodb.schema,
|
||||||
|
@ -33,7 +35,7 @@ const DEFINITIONS = {
|
||||||
[SourceName.SNOWFLAKE]: snowflake.schema,
|
[SourceName.SNOWFLAKE]: snowflake.schema,
|
||||||
}
|
}
|
||||||
|
|
||||||
const INTEGRATIONS = {
|
const INTEGRATIONS: { [key: string]: any } = {
|
||||||
[SourceName.POSTGRES]: postgres.integration,
|
[SourceName.POSTGRES]: postgres.integration,
|
||||||
[SourceName.DYNAMODB]: dynamodb.integration,
|
[SourceName.DYNAMODB]: dynamodb.integration,
|
||||||
[SourceName.MONGODB]: mongodb.integration,
|
[SourceName.MONGODB]: mongodb.integration,
|
||||||
|
@ -48,7 +50,7 @@ const INTEGRATIONS = {
|
||||||
[SourceName.FIRESTORE]: firebase.integration,
|
[SourceName.FIRESTORE]: firebase.integration,
|
||||||
[SourceName.GOOGLE_SHEETS]: googlesheets.integration,
|
[SourceName.GOOGLE_SHEETS]: googlesheets.integration,
|
||||||
[SourceName.REDIS]: redis.integration,
|
[SourceName.REDIS]: redis.integration,
|
||||||
[SourceName.FIREBASE]: firebase.integration,
|
[SourceName.FIRESTORE]: firebase.integration,
|
||||||
[SourceName.SNOWFLAKE]: snowflake.integration,
|
[SourceName.SNOWFLAKE]: snowflake.integration,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,6 +66,9 @@ if (environment.SELF_HOSTED) {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
definitions: DEFINITIONS,
|
getDefinitions: async () => {
|
||||||
|
const custom = await getPlugins(PluginType.DATASOURCE)
|
||||||
|
return cloneDeep(DEFINITIONS)
|
||||||
|
},
|
||||||
integrations: INTEGRATIONS,
|
integrations: INTEGRATIONS,
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,292 +18,290 @@ import {
|
||||||
} from "./utils"
|
} from "./utils"
|
||||||
import Sql from "./base/sql"
|
import Sql from "./base/sql"
|
||||||
|
|
||||||
module MSSQLModule {
|
const sqlServer = require("mssql")
|
||||||
const sqlServer = require("mssql")
|
const DEFAULT_SCHEMA = "dbo"
|
||||||
const DEFAULT_SCHEMA = "dbo"
|
|
||||||
|
|
||||||
interface MSSQLConfig {
|
interface MSSQLConfig {
|
||||||
user: string
|
user: string
|
||||||
password: string
|
password: string
|
||||||
server: string
|
server: string
|
||||||
port: number
|
port: number
|
||||||
database: string
|
database: string
|
||||||
schema: string
|
schema: string
|
||||||
encrypt?: boolean
|
encrypt?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
interface TablesResponse {
|
interface TablesResponse {
|
||||||
TABLE_CATALOG: string
|
TABLE_CATALOG: string
|
||||||
TABLE_SCHEMA: string
|
TABLE_SCHEMA: string
|
||||||
TABLE_NAME: string
|
TABLE_NAME: string
|
||||||
TABLE_TYPE: string
|
TABLE_TYPE: string
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://github.com/tediousjs/node-mssql",
|
docs: "https://github.com/tediousjs/node-mssql",
|
||||||
plus: true,
|
plus: true,
|
||||||
description:
|
description:
|
||||||
"Microsoft SQL Server is a relational database management system developed by Microsoft. ",
|
"Microsoft SQL Server is a relational database management system developed by Microsoft. ",
|
||||||
friendlyName: "MS SQL Server",
|
friendlyName: "MS SQL Server",
|
||||||
type: "Relational",
|
type: "Relational",
|
||||||
datasource: {
|
datasource: {
|
||||||
user: {
|
user: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
required: true,
|
required: true,
|
||||||
default: "localhost",
|
default: "localhost",
|
||||||
},
|
|
||||||
password: {
|
|
||||||
type: DatasourceFieldType.PASSWORD,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
server: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
default: "localhost",
|
|
||||||
},
|
|
||||||
port: {
|
|
||||||
type: DatasourceFieldType.NUMBER,
|
|
||||||
required: false,
|
|
||||||
default: 1433,
|
|
||||||
},
|
|
||||||
database: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
default: "root",
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
default: DEFAULT_SCHEMA,
|
|
||||||
},
|
|
||||||
encrypt: {
|
|
||||||
type: DatasourceFieldType.BOOLEAN,
|
|
||||||
default: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
password: {
|
||||||
create: {
|
type: DatasourceFieldType.PASSWORD,
|
||||||
type: QueryType.SQL,
|
required: true,
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
server: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
default: "localhost",
|
||||||
|
},
|
||||||
|
port: {
|
||||||
|
type: DatasourceFieldType.NUMBER,
|
||||||
|
required: false,
|
||||||
|
default: 1433,
|
||||||
|
},
|
||||||
|
database: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
default: "root",
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
default: DEFAULT_SCHEMA,
|
||||||
|
},
|
||||||
|
encrypt: {
|
||||||
|
type: DatasourceFieldType.BOOLEAN,
|
||||||
|
default: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
class SqlServerIntegration extends Sql implements DatasourcePlus {
|
class SqlServerIntegration extends Sql implements DatasourcePlus {
|
||||||
private readonly config: MSSQLConfig
|
private readonly config: MSSQLConfig
|
||||||
private index: number = 0
|
private index: number = 0
|
||||||
private readonly pool: any
|
private readonly pool: any
|
||||||
private client: any
|
private client: any
|
||||||
public tables: Record<string, Table> = {}
|
public tables: Record<string, Table> = {}
|
||||||
public schemaErrors: Record<string, string> = {}
|
public schemaErrors: Record<string, string> = {}
|
||||||
|
|
||||||
MASTER_TABLES = [
|
MASTER_TABLES = [
|
||||||
"spt_fallback_db",
|
"spt_fallback_db",
|
||||||
"spt_fallback_dev",
|
"spt_fallback_dev",
|
||||||
"spt_fallback_usg",
|
"spt_fallback_usg",
|
||||||
"spt_monitor",
|
"spt_monitor",
|
||||||
"MSreplication_options",
|
"MSreplication_options",
|
||||||
]
|
]
|
||||||
TABLES_SQL =
|
TABLES_SQL =
|
||||||
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
|
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
|
||||||
|
|
||||||
constructor(config: MSSQLConfig) {
|
constructor(config: MSSQLConfig) {
|
||||||
super(SqlClient.MS_SQL)
|
super(SqlClient.MS_SQL)
|
||||||
this.config = config
|
this.config = config
|
||||||
const clientCfg = {
|
const clientCfg = {
|
||||||
...this.config,
|
...this.config,
|
||||||
options: {
|
options: {
|
||||||
encrypt: this.config.encrypt,
|
encrypt: this.config.encrypt,
|
||||||
enableArithAbort: true,
|
enableArithAbort: true,
|
||||||
},
|
},
|
||||||
}
|
|
||||||
delete clientCfg.encrypt
|
|
||||||
if (!this.pool) {
|
|
||||||
this.pool = new sqlServer.ConnectionPool(clientCfg)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
delete clientCfg.encrypt
|
||||||
getBindingIdentifier(): string {
|
if (!this.pool) {
|
||||||
return `@p${this.index++}`
|
this.pool = new sqlServer.ConnectionPool(clientCfg)
|
||||||
}
|
|
||||||
|
|
||||||
getStringConcat(parts: string[]): string {
|
|
||||||
return `concat(${parts.join(", ")})`
|
|
||||||
}
|
|
||||||
|
|
||||||
async connect() {
|
|
||||||
try {
|
|
||||||
this.client = await this.pool.connect()
|
|
||||||
} catch (err) {
|
|
||||||
// @ts-ignore
|
|
||||||
throw new Error(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async internalQuery(
|
|
||||||
query: SqlQuery,
|
|
||||||
operation: string | undefined = undefined
|
|
||||||
) {
|
|
||||||
const client = this.client
|
|
||||||
const request = client.request()
|
|
||||||
this.index = 0
|
|
||||||
try {
|
|
||||||
if (Array.isArray(query.bindings)) {
|
|
||||||
let count = 0
|
|
||||||
for (let binding of query.bindings) {
|
|
||||||
request.input(`p${count++}`, binding)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// this is a hack to get the inserted ID back,
|
|
||||||
// no way to do this with Knex nicely
|
|
||||||
const sql =
|
|
||||||
operation === Operation.CREATE
|
|
||||||
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
|
|
||||||
: query.sql
|
|
||||||
return await request.query(sql)
|
|
||||||
} catch (err) {
|
|
||||||
// @ts-ignore
|
|
||||||
throw new Error(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getDefinitionSQL(tableName: string) {
|
|
||||||
return `select *
|
|
||||||
from INFORMATION_SCHEMA.COLUMNS
|
|
||||||
where TABLE_NAME='${tableName}'`
|
|
||||||
}
|
|
||||||
|
|
||||||
getConstraintsSQL(tableName: string) {
|
|
||||||
return `SELECT * FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
|
|
||||||
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KU
|
|
||||||
ON TC.CONSTRAINT_TYPE = 'PRIMARY KEY'
|
|
||||||
AND TC.CONSTRAINT_NAME = KU.CONSTRAINT_NAME
|
|
||||||
AND KU.table_name='${tableName}'
|
|
||||||
ORDER BY
|
|
||||||
KU.TABLE_NAME,
|
|
||||||
KU.ORDINAL_POSITION;`
|
|
||||||
}
|
|
||||||
|
|
||||||
getAutoColumnsSQL(tableName: string) {
|
|
||||||
return `SELECT
|
|
||||||
COLUMNPROPERTY(OBJECT_ID(TABLE_SCHEMA+'.'+TABLE_NAME),COLUMN_NAME,'IsComputed')
|
|
||||||
AS IS_COMPUTED,
|
|
||||||
COLUMNPROPERTY(object_id(TABLE_SCHEMA+'.'+TABLE_NAME), COLUMN_NAME, 'IsIdentity')
|
|
||||||
AS IS_IDENTITY,
|
|
||||||
*
|
|
||||||
FROM INFORMATION_SCHEMA.COLUMNS
|
|
||||||
WHERE TABLE_NAME='${tableName}'`
|
|
||||||
}
|
|
||||||
|
|
||||||
async runSQL(sql: string) {
|
|
||||||
return (await this.internalQuery(getSqlQuery(sql))).recordset
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetches the tables from the sql server database and assigns them to the datasource.
|
|
||||||
* @param {*} datasourceId - datasourceId to fetch
|
|
||||||
* @param entities - the tables that are to be built
|
|
||||||
*/
|
|
||||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
|
||||||
await this.connect()
|
|
||||||
let tableInfo: TablesResponse[] = await this.runSQL(this.TABLES_SQL)
|
|
||||||
if (tableInfo == null || !Array.isArray(tableInfo)) {
|
|
||||||
throw "Unable to get list of tables in database"
|
|
||||||
}
|
|
||||||
|
|
||||||
const schema = this.config.schema || DEFAULT_SCHEMA
|
|
||||||
const tableNames = tableInfo
|
|
||||||
.filter((record: any) => record.TABLE_SCHEMA === schema)
|
|
||||||
.map((record: any) => record.TABLE_NAME)
|
|
||||||
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
|
|
||||||
|
|
||||||
const tables: Record<string, Table> = {}
|
|
||||||
for (let tableName of tableNames) {
|
|
||||||
// get the column definition (type)
|
|
||||||
const definition = await this.runSQL(this.getDefinitionSQL(tableName))
|
|
||||||
// find primary key constraints
|
|
||||||
const constraints = await this.runSQL(this.getConstraintsSQL(tableName))
|
|
||||||
// find the computed and identity columns (auto columns)
|
|
||||||
const columns = await this.runSQL(this.getAutoColumnsSQL(tableName))
|
|
||||||
const primaryKeys = constraints
|
|
||||||
.filter(
|
|
||||||
(constraint: any) => constraint.CONSTRAINT_TYPE === "PRIMARY KEY"
|
|
||||||
)
|
|
||||||
.map((constraint: any) => constraint.COLUMN_NAME)
|
|
||||||
const autoColumns = columns
|
|
||||||
.filter((col: any) => col.IS_COMPUTED || col.IS_IDENTITY)
|
|
||||||
.map((col: any) => col.COLUMN_NAME)
|
|
||||||
|
|
||||||
let schema: TableSchema = {}
|
|
||||||
for (let def of definition) {
|
|
||||||
const name = def.COLUMN_NAME
|
|
||||||
if (typeof name !== "string") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
schema[name] = {
|
|
||||||
autocolumn: !!autoColumns.find((col: string) => col === name),
|
|
||||||
name: name,
|
|
||||||
...convertSqlType(def.DATA_TYPE),
|
|
||||||
externalType: def.DATA_TYPE,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tables[tableName] = {
|
|
||||||
_id: buildExternalTableId(datasourceId, tableName),
|
|
||||||
primary: primaryKeys,
|
|
||||||
name: tableName,
|
|
||||||
schema,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const final = finaliseExternalTables(tables, entities)
|
|
||||||
this.tables = final.tables
|
|
||||||
this.schemaErrors = final.errors
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: SqlQuery | string) {
|
|
||||||
await this.connect()
|
|
||||||
const response = await this.internalQuery(getSqlQuery(query))
|
|
||||||
return response.recordset
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query: SqlQuery | string) {
|
|
||||||
await this.connect()
|
|
||||||
const response = await this.internalQuery(getSqlQuery(query))
|
|
||||||
return response.recordset || [{ created: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: SqlQuery | string) {
|
|
||||||
await this.connect()
|
|
||||||
const response = await this.internalQuery(getSqlQuery(query))
|
|
||||||
return response.recordset || [{ updated: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: SqlQuery | string) {
|
|
||||||
await this.connect()
|
|
||||||
const response = await this.internalQuery(getSqlQuery(query))
|
|
||||||
return response.recordset || [{ deleted: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async query(json: QueryJson) {
|
|
||||||
const schema = this.config.schema
|
|
||||||
await this.connect()
|
|
||||||
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
|
|
||||||
json.endpoint.schema = schema
|
|
||||||
}
|
|
||||||
const operation = this._operation(json)
|
|
||||||
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
|
|
||||||
const processFn = (result: any) =>
|
|
||||||
result.recordset ? result.recordset : [{ [operation]: true }]
|
|
||||||
return this.queryWithReturning(json, queryFn, processFn)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
getBindingIdentifier(): string {
|
||||||
schema: SCHEMA,
|
return `@p${this.index++}`
|
||||||
integration: SqlServerIntegration,
|
}
|
||||||
|
|
||||||
|
getStringConcat(parts: string[]): string {
|
||||||
|
return `concat(${parts.join(", ")})`
|
||||||
|
}
|
||||||
|
|
||||||
|
async connect() {
|
||||||
|
try {
|
||||||
|
this.client = await this.pool.connect()
|
||||||
|
} catch (err) {
|
||||||
|
// @ts-ignore
|
||||||
|
throw new Error(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async internalQuery(
|
||||||
|
query: SqlQuery,
|
||||||
|
operation: string | undefined = undefined
|
||||||
|
) {
|
||||||
|
const client = this.client
|
||||||
|
const request = client.request()
|
||||||
|
this.index = 0
|
||||||
|
try {
|
||||||
|
if (Array.isArray(query.bindings)) {
|
||||||
|
let count = 0
|
||||||
|
for (let binding of query.bindings) {
|
||||||
|
request.input(`p${count++}`, binding)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// this is a hack to get the inserted ID back,
|
||||||
|
// no way to do this with Knex nicely
|
||||||
|
const sql =
|
||||||
|
operation === Operation.CREATE
|
||||||
|
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
|
||||||
|
: query.sql
|
||||||
|
return await request.query(sql)
|
||||||
|
} catch (err) {
|
||||||
|
// @ts-ignore
|
||||||
|
throw new Error(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getDefinitionSQL(tableName: string) {
|
||||||
|
return `select *
|
||||||
|
from INFORMATION_SCHEMA.COLUMNS
|
||||||
|
where TABLE_NAME='${tableName}'`
|
||||||
|
}
|
||||||
|
|
||||||
|
getConstraintsSQL(tableName: string) {
|
||||||
|
return `SELECT * FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
|
||||||
|
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KU
|
||||||
|
ON TC.CONSTRAINT_TYPE = 'PRIMARY KEY'
|
||||||
|
AND TC.CONSTRAINT_NAME = KU.CONSTRAINT_NAME
|
||||||
|
AND KU.table_name='${tableName}'
|
||||||
|
ORDER BY
|
||||||
|
KU.TABLE_NAME,
|
||||||
|
KU.ORDINAL_POSITION;`
|
||||||
|
}
|
||||||
|
|
||||||
|
getAutoColumnsSQL(tableName: string) {
|
||||||
|
return `SELECT
|
||||||
|
COLUMNPROPERTY(OBJECT_ID(TABLE_SCHEMA+'.'+TABLE_NAME),COLUMN_NAME,'IsComputed')
|
||||||
|
AS IS_COMPUTED,
|
||||||
|
COLUMNPROPERTY(object_id(TABLE_SCHEMA+'.'+TABLE_NAME), COLUMN_NAME, 'IsIdentity')
|
||||||
|
AS IS_IDENTITY,
|
||||||
|
*
|
||||||
|
FROM INFORMATION_SCHEMA.COLUMNS
|
||||||
|
WHERE TABLE_NAME='${tableName}'`
|
||||||
|
}
|
||||||
|
|
||||||
|
async runSQL(sql: string) {
|
||||||
|
return (await this.internalQuery(getSqlQuery(sql))).recordset
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches the tables from the sql server database and assigns them to the datasource.
|
||||||
|
* @param {*} datasourceId - datasourceId to fetch
|
||||||
|
* @param entities - the tables that are to be built
|
||||||
|
*/
|
||||||
|
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||||
|
await this.connect()
|
||||||
|
let tableInfo: TablesResponse[] = await this.runSQL(this.TABLES_SQL)
|
||||||
|
if (tableInfo == null || !Array.isArray(tableInfo)) {
|
||||||
|
throw "Unable to get list of tables in database"
|
||||||
|
}
|
||||||
|
|
||||||
|
const schema = this.config.schema || DEFAULT_SCHEMA
|
||||||
|
const tableNames = tableInfo
|
||||||
|
.filter((record: any) => record.TABLE_SCHEMA === schema)
|
||||||
|
.map((record: any) => record.TABLE_NAME)
|
||||||
|
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
|
||||||
|
|
||||||
|
const tables: Record<string, Table> = {}
|
||||||
|
for (let tableName of tableNames) {
|
||||||
|
// get the column definition (type)
|
||||||
|
const definition = await this.runSQL(this.getDefinitionSQL(tableName))
|
||||||
|
// find primary key constraints
|
||||||
|
const constraints = await this.runSQL(this.getConstraintsSQL(tableName))
|
||||||
|
// find the computed and identity columns (auto columns)
|
||||||
|
const columns = await this.runSQL(this.getAutoColumnsSQL(tableName))
|
||||||
|
const primaryKeys = constraints
|
||||||
|
.filter(
|
||||||
|
(constraint: any) => constraint.CONSTRAINT_TYPE === "PRIMARY KEY"
|
||||||
|
)
|
||||||
|
.map((constraint: any) => constraint.COLUMN_NAME)
|
||||||
|
const autoColumns = columns
|
||||||
|
.filter((col: any) => col.IS_COMPUTED || col.IS_IDENTITY)
|
||||||
|
.map((col: any) => col.COLUMN_NAME)
|
||||||
|
|
||||||
|
let schema: TableSchema = {}
|
||||||
|
for (let def of definition) {
|
||||||
|
const name = def.COLUMN_NAME
|
||||||
|
if (typeof name !== "string") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
schema[name] = {
|
||||||
|
autocolumn: !!autoColumns.find((col: string) => col === name),
|
||||||
|
name: name,
|
||||||
|
...convertSqlType(def.DATA_TYPE),
|
||||||
|
externalType: def.DATA_TYPE,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tables[tableName] = {
|
||||||
|
_id: buildExternalTableId(datasourceId, tableName),
|
||||||
|
primary: primaryKeys,
|
||||||
|
name: tableName,
|
||||||
|
schema,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const final = finaliseExternalTables(tables, entities)
|
||||||
|
this.tables = final.tables
|
||||||
|
this.schemaErrors = final.errors
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: SqlQuery | string) {
|
||||||
|
await this.connect()
|
||||||
|
const response = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return response.recordset
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: SqlQuery | string) {
|
||||||
|
await this.connect()
|
||||||
|
const response = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return response.recordset || [{ created: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: SqlQuery | string) {
|
||||||
|
await this.connect()
|
||||||
|
const response = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return response.recordset || [{ updated: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: SqlQuery | string) {
|
||||||
|
await this.connect()
|
||||||
|
const response = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return response.recordset || [{ deleted: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async query(json: QueryJson) {
|
||||||
|
const schema = this.config.schema
|
||||||
|
await this.connect()
|
||||||
|
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
|
||||||
|
json.endpoint.schema = schema
|
||||||
|
}
|
||||||
|
const operation = this._operation(json)
|
||||||
|
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
|
||||||
|
const processFn = (result: any) =>
|
||||||
|
result.recordset ? result.recordset : [{ [operation]: true }]
|
||||||
|
return this.queryWithReturning(json, queryFn, processFn)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: SqlServerIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -15,313 +15,309 @@ import {
|
||||||
CommonOptions,
|
CommonOptions,
|
||||||
} from "mongodb"
|
} from "mongodb"
|
||||||
|
|
||||||
module MongoDBModule {
|
interface MongoDBConfig {
|
||||||
interface MongoDBConfig {
|
connectionString: string
|
||||||
connectionString: string
|
db: string
|
||||||
db: string
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://github.com/mongodb/node-mongodb-native",
|
||||||
|
friendlyName: "MongoDB",
|
||||||
|
type: "Non-relational",
|
||||||
|
description:
|
||||||
|
"MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.",
|
||||||
|
datasource: {
|
||||||
|
connectionString: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
default: "mongodb://localhost:27017",
|
||||||
|
},
|
||||||
|
db: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
extra: {
|
||||||
|
collection: {
|
||||||
|
displayName: "Collection",
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
actionTypes: {
|
||||||
|
displayName: "Action Types",
|
||||||
|
type: DatasourceFieldType.LIST,
|
||||||
|
required: true,
|
||||||
|
data: {
|
||||||
|
read: ["find", "findOne", "findOneAndUpdate", "count", "distinct"],
|
||||||
|
create: ["insertOne", "insertMany"],
|
||||||
|
update: ["updateOne", "updateMany"],
|
||||||
|
delete: ["deleteOne", "deleteMany"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class MongoIntegration implements IntegrationBase {
|
||||||
|
private config: MongoDBConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: MongoDBConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.client = new MongoClient(config.connectionString)
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
async connect() {
|
||||||
docs: "https://github.com/mongodb/node-mongodb-native",
|
return this.client.connect()
|
||||||
friendlyName: "MongoDB",
|
|
||||||
type: "Non-relational",
|
|
||||||
description:
|
|
||||||
"MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.",
|
|
||||||
datasource: {
|
|
||||||
connectionString: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
default: "mongodb://localhost:27017",
|
|
||||||
},
|
|
||||||
db: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
extra: {
|
|
||||||
collection: {
|
|
||||||
displayName: "Collection",
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
actionTypes: {
|
|
||||||
displayName: "Action Types",
|
|
||||||
type: DatasourceFieldType.LIST,
|
|
||||||
required: true,
|
|
||||||
data: {
|
|
||||||
read: ["find", "findOne", "findOneAndUpdate", "count", "distinct"],
|
|
||||||
create: ["insertOne", "insertMany"],
|
|
||||||
update: ["updateOne", "updateMany"],
|
|
||||||
delete: ["deleteOne", "deleteMany"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class MongoIntegration implements IntegrationBase {
|
createObjectIds(json: any): object {
|
||||||
private config: MongoDBConfig
|
const self = this
|
||||||
private client: any
|
function interpolateObjectIds(json: any) {
|
||||||
|
for (let field of Object.keys(json)) {
|
||||||
constructor(config: MongoDBConfig) {
|
if (json[field] instanceof Object) {
|
||||||
this.config = config
|
json[field] = self.createObjectIds(json[field])
|
||||||
this.client = new MongoClient(config.connectionString)
|
}
|
||||||
|
if (
|
||||||
|
(field === "_id" || field?.startsWith("$")) &&
|
||||||
|
typeof json[field] === "string"
|
||||||
|
) {
|
||||||
|
const id = json[field].match(/(?<=objectid\(['"]).*(?=['"]\))/gi)?.[0]
|
||||||
|
if (id) {
|
||||||
|
json[field] = ObjectID.createFromHexString(id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return json
|
||||||
}
|
}
|
||||||
|
|
||||||
async connect() {
|
if (Array.isArray(json)) {
|
||||||
return this.client.connect()
|
for (let i = 0; i < json.length; i++) {
|
||||||
|
json[i] = interpolateObjectIds(json[i])
|
||||||
|
}
|
||||||
|
return json
|
||||||
}
|
}
|
||||||
|
return interpolateObjectIds(json)
|
||||||
|
}
|
||||||
|
|
||||||
createObjectIds(json: any): object {
|
parseQueryParams(params: string, mode: string) {
|
||||||
const self = this
|
let queryParams = []
|
||||||
function interpolateObjectIds(json: any) {
|
let openCount = 0
|
||||||
for (let field of Object.keys(json)) {
|
let inQuotes = false
|
||||||
if (json[field] instanceof Object) {
|
let i = 0
|
||||||
json[field] = self.createObjectIds(json[field])
|
let startIndex = 0
|
||||||
}
|
for (let c of params) {
|
||||||
if (
|
if (c === '"' && i > 0 && params[i - 1] !== "\\") {
|
||||||
(field === "_id" || field?.startsWith("$")) &&
|
inQuotes = !inQuotes
|
||||||
typeof json[field] === "string"
|
|
||||||
) {
|
|
||||||
const id = json[field].match(
|
|
||||||
/(?<=objectid\(['"]).*(?=['"]\))/gi
|
|
||||||
)?.[0]
|
|
||||||
if (id) {
|
|
||||||
json[field] = ObjectID.createFromHexString(id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return json
|
|
||||||
}
|
}
|
||||||
|
if (c === "{" && !inQuotes) {
|
||||||
if (Array.isArray(json)) {
|
openCount++
|
||||||
for (let i = 0; i < json.length; i++) {
|
if (openCount === 1) {
|
||||||
json[i] = interpolateObjectIds(json[i])
|
startIndex = i
|
||||||
}
|
}
|
||||||
return json
|
} else if (c === "}" && !inQuotes) {
|
||||||
|
if (openCount === 1) {
|
||||||
|
queryParams.push(JSON.parse(params.substring(startIndex, i + 1)))
|
||||||
|
}
|
||||||
|
openCount--
|
||||||
}
|
}
|
||||||
return interpolateObjectIds(json)
|
i++
|
||||||
}
|
}
|
||||||
|
let group1 = queryParams[0] ?? {}
|
||||||
parseQueryParams(params: string, mode: string) {
|
let group2 = queryParams[1] ?? {}
|
||||||
let queryParams = []
|
let group3 = queryParams[2] ?? {}
|
||||||
let openCount = 0
|
if (mode === "update") {
|
||||||
let inQuotes = false
|
|
||||||
let i = 0
|
|
||||||
let startIndex = 0
|
|
||||||
for (let c of params) {
|
|
||||||
if (c === '"' && i > 0 && params[i - 1] !== "\\") {
|
|
||||||
inQuotes = !inQuotes
|
|
||||||
}
|
|
||||||
if (c === "{" && !inQuotes) {
|
|
||||||
openCount++
|
|
||||||
if (openCount === 1) {
|
|
||||||
startIndex = i
|
|
||||||
}
|
|
||||||
} else if (c === "}" && !inQuotes) {
|
|
||||||
if (openCount === 1) {
|
|
||||||
queryParams.push(JSON.parse(params.substring(startIndex, i + 1)))
|
|
||||||
}
|
|
||||||
openCount--
|
|
||||||
}
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
let group1 = queryParams[0] ?? {}
|
|
||||||
let group2 = queryParams[1] ?? {}
|
|
||||||
let group3 = queryParams[2] ?? {}
|
|
||||||
if (mode === "update") {
|
|
||||||
return {
|
|
||||||
filter: group1,
|
|
||||||
update: group2,
|
|
||||||
options: group3,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return {
|
return {
|
||||||
filter: group1,
|
filter: group1,
|
||||||
options: group2,
|
update: group2,
|
||||||
|
options: group3,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return {
|
||||||
async create(query: { json: object; extra: { [key: string]: string } }) {
|
filter: group1,
|
||||||
try {
|
options: group2,
|
||||||
await this.connect()
|
|
||||||
const db = this.client.db(this.config.db)
|
|
||||||
const collection = db.collection(query.extra.collection)
|
|
||||||
let json = this.createObjectIds(query.json)
|
|
||||||
|
|
||||||
// For mongodb we add an extra actionType to specify
|
|
||||||
// which method we want to call on the collection
|
|
||||||
switch (query.extra.actionTypes) {
|
|
||||||
case "insertOne": {
|
|
||||||
return await collection.insertOne(json)
|
|
||||||
}
|
|
||||||
case "insertMany": {
|
|
||||||
return await collection.insertMany(json)
|
|
||||||
}
|
|
||||||
default: {
|
|
||||||
throw new Error(
|
|
||||||
`actionType ${query.extra.actionTypes} does not exist on DB for create`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to mongodb", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: { json: object; extra: { [key: string]: string } }) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const db = this.client.db(this.config.db)
|
|
||||||
const collection = db.collection(query.extra.collection)
|
|
||||||
let json = this.createObjectIds(query.json)
|
|
||||||
|
|
||||||
switch (query.extra.actionTypes) {
|
|
||||||
case "find": {
|
|
||||||
return await collection.find(json).toArray()
|
|
||||||
}
|
|
||||||
case "findOne": {
|
|
||||||
return await collection.findOne(json)
|
|
||||||
}
|
|
||||||
case "findOneAndUpdate": {
|
|
||||||
if (typeof query.json === "string") {
|
|
||||||
json = this.parseQueryParams(query.json, "update")
|
|
||||||
}
|
|
||||||
let findAndUpdateJson = this.createObjectIds(json) as {
|
|
||||||
filter: FilterQuery<any>
|
|
||||||
update: UpdateQuery<any>
|
|
||||||
options: FindOneAndUpdateOption<any>
|
|
||||||
}
|
|
||||||
return await collection.findOneAndUpdate(
|
|
||||||
findAndUpdateJson.filter,
|
|
||||||
findAndUpdateJson.update,
|
|
||||||
findAndUpdateJson.options
|
|
||||||
)
|
|
||||||
}
|
|
||||||
case "count": {
|
|
||||||
return await collection.countDocuments(json)
|
|
||||||
}
|
|
||||||
case "distinct": {
|
|
||||||
return await collection.distinct(json)
|
|
||||||
}
|
|
||||||
default: {
|
|
||||||
throw new Error(
|
|
||||||
`actionType ${query.extra.actionTypes} does not exist on DB for read`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error querying mongodb", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: { json: object; extra: { [key: string]: string } }) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const db = this.client.db(this.config.db)
|
|
||||||
const collection = db.collection(query.extra.collection)
|
|
||||||
let queryJson = query.json
|
|
||||||
if (typeof queryJson === "string") {
|
|
||||||
queryJson = this.parseQueryParams(queryJson, "update")
|
|
||||||
}
|
|
||||||
let json = this.createObjectIds(queryJson) as {
|
|
||||||
filter: FilterQuery<any>
|
|
||||||
update: UpdateQuery<any>
|
|
||||||
options: object
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (query.extra.actionTypes) {
|
|
||||||
case "updateOne": {
|
|
||||||
return await collection.updateOne(
|
|
||||||
json.filter,
|
|
||||||
json.update,
|
|
||||||
json.options as UpdateOneOptions
|
|
||||||
)
|
|
||||||
}
|
|
||||||
case "updateMany": {
|
|
||||||
return await collection.updateMany(
|
|
||||||
json.filter,
|
|
||||||
json.update,
|
|
||||||
json.options as UpdateManyOptions
|
|
||||||
)
|
|
||||||
}
|
|
||||||
default: {
|
|
||||||
throw new Error(
|
|
||||||
`actionType ${query.extra.actionTypes} does not exist on DB for update`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to mongodb", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: { json: object; extra: { [key: string]: string } }) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const db = this.client.db(this.config.db)
|
|
||||||
const collection = db.collection(query.extra.collection)
|
|
||||||
let queryJson = query.json
|
|
||||||
if (typeof queryJson === "string") {
|
|
||||||
queryJson = this.parseQueryParams(queryJson, "delete")
|
|
||||||
}
|
|
||||||
let json = this.createObjectIds(queryJson) as {
|
|
||||||
filter: FilterQuery<any>
|
|
||||||
options: CommonOptions
|
|
||||||
}
|
|
||||||
if (!json.options) {
|
|
||||||
json = {
|
|
||||||
filter: json,
|
|
||||||
options: {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (query.extra.actionTypes) {
|
|
||||||
case "deleteOne": {
|
|
||||||
return await collection.deleteOne(json.filter, json.options)
|
|
||||||
}
|
|
||||||
case "deleteMany": {
|
|
||||||
return await collection.deleteMany(json.filter, json.options)
|
|
||||||
}
|
|
||||||
default: {
|
|
||||||
throw new Error(
|
|
||||||
`actionType ${query.extra.actionTypes} does not exist on DB for delete`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to mongodb", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async create(query: { json: object; extra: { [key: string]: string } }) {
|
||||||
schema: SCHEMA,
|
try {
|
||||||
integration: MongoIntegration,
|
await this.connect()
|
||||||
|
const db = this.client.db(this.config.db)
|
||||||
|
const collection = db.collection(query.extra.collection)
|
||||||
|
let json = this.createObjectIds(query.json)
|
||||||
|
|
||||||
|
// For mongodb we add an extra actionType to specify
|
||||||
|
// which method we want to call on the collection
|
||||||
|
switch (query.extra.actionTypes) {
|
||||||
|
case "insertOne": {
|
||||||
|
return await collection.insertOne(json)
|
||||||
|
}
|
||||||
|
case "insertMany": {
|
||||||
|
return await collection.insertMany(json)
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new Error(
|
||||||
|
`actionType ${query.extra.actionTypes} does not exist on DB for create`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to mongodb", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: { json: object; extra: { [key: string]: string } }) {
|
||||||
|
try {
|
||||||
|
await this.connect()
|
||||||
|
const db = this.client.db(this.config.db)
|
||||||
|
const collection = db.collection(query.extra.collection)
|
||||||
|
let json = this.createObjectIds(query.json)
|
||||||
|
|
||||||
|
switch (query.extra.actionTypes) {
|
||||||
|
case "find": {
|
||||||
|
return await collection.find(json).toArray()
|
||||||
|
}
|
||||||
|
case "findOne": {
|
||||||
|
return await collection.findOne(json)
|
||||||
|
}
|
||||||
|
case "findOneAndUpdate": {
|
||||||
|
if (typeof query.json === "string") {
|
||||||
|
json = this.parseQueryParams(query.json, "update")
|
||||||
|
}
|
||||||
|
let findAndUpdateJson = this.createObjectIds(json) as {
|
||||||
|
filter: FilterQuery<any>
|
||||||
|
update: UpdateQuery<any>
|
||||||
|
options: FindOneAndUpdateOption<any>
|
||||||
|
}
|
||||||
|
return await collection.findOneAndUpdate(
|
||||||
|
findAndUpdateJson.filter,
|
||||||
|
findAndUpdateJson.update,
|
||||||
|
findAndUpdateJson.options
|
||||||
|
)
|
||||||
|
}
|
||||||
|
case "count": {
|
||||||
|
return await collection.countDocuments(json)
|
||||||
|
}
|
||||||
|
case "distinct": {
|
||||||
|
return await collection.distinct(json)
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new Error(
|
||||||
|
`actionType ${query.extra.actionTypes} does not exist on DB for read`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error querying mongodb", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: { json: object; extra: { [key: string]: string } }) {
|
||||||
|
try {
|
||||||
|
await this.connect()
|
||||||
|
const db = this.client.db(this.config.db)
|
||||||
|
const collection = db.collection(query.extra.collection)
|
||||||
|
let queryJson = query.json
|
||||||
|
if (typeof queryJson === "string") {
|
||||||
|
queryJson = this.parseQueryParams(queryJson, "update")
|
||||||
|
}
|
||||||
|
let json = this.createObjectIds(queryJson) as {
|
||||||
|
filter: FilterQuery<any>
|
||||||
|
update: UpdateQuery<any>
|
||||||
|
options: object
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (query.extra.actionTypes) {
|
||||||
|
case "updateOne": {
|
||||||
|
return await collection.updateOne(
|
||||||
|
json.filter,
|
||||||
|
json.update,
|
||||||
|
json.options as UpdateOneOptions
|
||||||
|
)
|
||||||
|
}
|
||||||
|
case "updateMany": {
|
||||||
|
return await collection.updateMany(
|
||||||
|
json.filter,
|
||||||
|
json.update,
|
||||||
|
json.options as UpdateManyOptions
|
||||||
|
)
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new Error(
|
||||||
|
`actionType ${query.extra.actionTypes} does not exist on DB for update`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to mongodb", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: { json: object; extra: { [key: string]: string } }) {
|
||||||
|
try {
|
||||||
|
await this.connect()
|
||||||
|
const db = this.client.db(this.config.db)
|
||||||
|
const collection = db.collection(query.extra.collection)
|
||||||
|
let queryJson = query.json
|
||||||
|
if (typeof queryJson === "string") {
|
||||||
|
queryJson = this.parseQueryParams(queryJson, "delete")
|
||||||
|
}
|
||||||
|
let json = this.createObjectIds(queryJson) as {
|
||||||
|
filter: FilterQuery<any>
|
||||||
|
options: CommonOptions
|
||||||
|
}
|
||||||
|
if (!json.options) {
|
||||||
|
json = {
|
||||||
|
filter: json,
|
||||||
|
options: {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (query.extra.actionTypes) {
|
||||||
|
case "deleteOne": {
|
||||||
|
return await collection.deleteOne(json.filter, json.options)
|
||||||
|
}
|
||||||
|
case "deleteMany": {
|
||||||
|
return await collection.deleteMany(json.filter, json.options)
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new Error(
|
||||||
|
`actionType ${query.extra.actionTypes} does not exist on DB for delete`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to mongodb", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: MongoIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -19,274 +19,269 @@ import dayjs from "dayjs"
|
||||||
const { NUMBER_REGEX } = require("../utilities")
|
const { NUMBER_REGEX } = require("../utilities")
|
||||||
import Sql from "./base/sql"
|
import Sql from "./base/sql"
|
||||||
|
|
||||||
module MySQLModule {
|
const mysql = require("mysql2/promise")
|
||||||
const mysql = require("mysql2/promise")
|
|
||||||
|
|
||||||
interface MySQLConfig {
|
interface MySQLConfig {
|
||||||
host: string
|
host: string
|
||||||
port: number
|
port: number
|
||||||
user: string
|
user: string
|
||||||
password: string
|
password: string
|
||||||
database: string
|
database: string
|
||||||
ssl?: { [key: string]: any }
|
ssl?: { [key: string]: any }
|
||||||
rejectUnauthorized: boolean
|
rejectUnauthorized: boolean
|
||||||
typeCast: Function
|
typeCast: Function
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://github.com/sidorares/node-mysql2",
|
docs: "https://github.com/sidorares/node-mysql2",
|
||||||
plus: true,
|
plus: true,
|
||||||
friendlyName: "MySQL",
|
friendlyName: "MySQL",
|
||||||
type: "Relational",
|
type: "Relational",
|
||||||
description:
|
description:
|
||||||
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
|
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
|
||||||
datasource: {
|
datasource: {
|
||||||
host: {
|
host: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
default: "localhost",
|
default: "localhost",
|
||||||
required: true,
|
required: true,
|
||||||
},
|
|
||||||
port: {
|
|
||||||
type: DatasourceFieldType.NUMBER,
|
|
||||||
default: 3306,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
user: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
default: "root",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
password: {
|
|
||||||
type: DatasourceFieldType.PASSWORD,
|
|
||||||
default: "root",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
database: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
ssl: {
|
|
||||||
type: DatasourceFieldType.OBJECT,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
rejectUnauthorized: {
|
|
||||||
type: DatasourceFieldType.BOOLEAN,
|
|
||||||
default: true,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
port: {
|
||||||
create: {
|
type: DatasourceFieldType.NUMBER,
|
||||||
type: QueryType.SQL,
|
default: 3306,
|
||||||
},
|
required: false,
|
||||||
read: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
user: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
default: "root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
password: {
|
||||||
|
type: DatasourceFieldType.PASSWORD,
|
||||||
|
default: "root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
database: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
ssl: {
|
||||||
|
type: DatasourceFieldType.OBJECT,
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
rejectUnauthorized: {
|
||||||
|
type: DatasourceFieldType.BOOLEAN,
|
||||||
|
default: true,
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const TimezoneAwareDateTypes = ["timestamp"]
|
||||||
|
|
||||||
|
function bindingTypeCoerce(bindings: any[]) {
|
||||||
|
for (let i = 0; i < bindings.length; i++) {
|
||||||
|
const binding = bindings[i]
|
||||||
|
if (typeof binding !== "string") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const matches = binding.match(NUMBER_REGEX)
|
||||||
|
// check if number first
|
||||||
|
if (matches && matches[0] !== "" && !isNaN(Number(matches[0]))) {
|
||||||
|
bindings[i] = parseFloat(binding)
|
||||||
|
}
|
||||||
|
// if not a number, see if it is a date - important to do in this order as any
|
||||||
|
// integer will be considered a valid date
|
||||||
|
else if (/^\d/.test(binding) && dayjs(binding).isValid()) {
|
||||||
|
bindings[i] = dayjs(binding).toDate()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return bindings
|
||||||
|
}
|
||||||
|
|
||||||
|
class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||||
|
private config: MySQLConfig
|
||||||
|
private client: any
|
||||||
|
public tables: Record<string, Table> = {}
|
||||||
|
public schemaErrors: Record<string, string> = {}
|
||||||
|
|
||||||
|
constructor(config: MySQLConfig) {
|
||||||
|
super(SqlClient.MY_SQL)
|
||||||
|
this.config = config
|
||||||
|
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
||||||
|
delete config.ssl
|
||||||
|
}
|
||||||
|
// make sure this defaults to true
|
||||||
|
if (
|
||||||
|
config.rejectUnauthorized != null &&
|
||||||
|
!config.rejectUnauthorized &&
|
||||||
|
config.ssl
|
||||||
|
) {
|
||||||
|
config.ssl.rejectUnauthorized = config.rejectUnauthorized
|
||||||
|
}
|
||||||
|
// @ts-ignore
|
||||||
|
delete config.rejectUnauthorized
|
||||||
|
this.config = {
|
||||||
|
...config,
|
||||||
|
typeCast: function (field: any, next: any) {
|
||||||
|
if (
|
||||||
|
field.type == "DATETIME" ||
|
||||||
|
field.type === "DATE" ||
|
||||||
|
field.type === "TIMESTAMP"
|
||||||
|
) {
|
||||||
|
return field.string()
|
||||||
|
}
|
||||||
|
return next()
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const TimezoneAwareDateTypes = ["timestamp"]
|
getBindingIdentifier(): string {
|
||||||
|
return "?"
|
||||||
function bindingTypeCoerce(bindings: any[]) {
|
|
||||||
for (let i = 0; i < bindings.length; i++) {
|
|
||||||
const binding = bindings[i]
|
|
||||||
if (typeof binding !== "string") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const matches = binding.match(NUMBER_REGEX)
|
|
||||||
// check if number first
|
|
||||||
if (matches && matches[0] !== "" && !isNaN(Number(matches[0]))) {
|
|
||||||
bindings[i] = parseFloat(binding)
|
|
||||||
}
|
|
||||||
// if not a number, see if it is a date - important to do in this order as any
|
|
||||||
// integer will be considered a valid date
|
|
||||||
else if (/^\d/.test(binding) && dayjs(binding).isValid()) {
|
|
||||||
bindings[i] = dayjs(binding).toDate()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return bindings
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class MySQLIntegration extends Sql implements DatasourcePlus {
|
getStringConcat(parts: string[]): string {
|
||||||
private config: MySQLConfig
|
return `concat(${parts.join(", ")})`
|
||||||
private client: any
|
}
|
||||||
public tables: Record<string, Table> = {}
|
|
||||||
public schemaErrors: Record<string, string> = {}
|
|
||||||
|
|
||||||
constructor(config: MySQLConfig) {
|
async connect() {
|
||||||
super(SqlClient.MY_SQL)
|
this.client = await mysql.createConnection(this.config)
|
||||||
this.config = config
|
}
|
||||||
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
|
||||||
delete config.ssl
|
async disconnect() {
|
||||||
|
await this.client.end()
|
||||||
|
}
|
||||||
|
|
||||||
|
async internalQuery(
|
||||||
|
query: SqlQuery,
|
||||||
|
opts: { connect?: boolean; disableCoercion?: boolean } = {
|
||||||
|
connect: true,
|
||||||
|
disableCoercion: false,
|
||||||
|
}
|
||||||
|
): Promise<any[] | any> {
|
||||||
|
try {
|
||||||
|
if (opts?.connect) {
|
||||||
|
await this.connect()
|
||||||
}
|
}
|
||||||
// make sure this defaults to true
|
const baseBindings = query.bindings || []
|
||||||
if (
|
const bindings = opts?.disableCoercion
|
||||||
config.rejectUnauthorized != null &&
|
? baseBindings
|
||||||
!config.rejectUnauthorized &&
|
: bindingTypeCoerce(baseBindings)
|
||||||
config.ssl
|
// Node MySQL is callback based, so we must wrap our call in a promise
|
||||||
) {
|
const response = await this.client.query(query.sql, bindings)
|
||||||
config.ssl.rejectUnauthorized = config.rejectUnauthorized
|
return response[0]
|
||||||
}
|
} finally {
|
||||||
// @ts-ignore
|
if (opts?.connect) {
|
||||||
delete config.rejectUnauthorized
|
await this.disconnect()
|
||||||
this.config = {
|
|
||||||
...config,
|
|
||||||
typeCast: function (field: any, next: any) {
|
|
||||||
if (
|
|
||||||
field.type == "DATETIME" ||
|
|
||||||
field.type === "DATE" ||
|
|
||||||
field.type === "TIMESTAMP"
|
|
||||||
) {
|
|
||||||
return field.string()
|
|
||||||
}
|
|
||||||
return next()
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
getBindingIdentifier(): string {
|
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||||
return "?"
|
const tables: { [key: string]: Table } = {}
|
||||||
}
|
const database = this.config.database
|
||||||
|
await this.connect()
|
||||||
|
|
||||||
getStringConcat(parts: string[]): string {
|
try {
|
||||||
return `concat(${parts.join(", ")})`
|
// get the tables first
|
||||||
}
|
const tablesResp = await this.internalQuery(
|
||||||
|
{ sql: "SHOW TABLES;" },
|
||||||
async connect() {
|
{ connect: false }
|
||||||
this.client = await mysql.createConnection(this.config)
|
)
|
||||||
}
|
const tableNames = tablesResp.map(
|
||||||
|
(obj: any) =>
|
||||||
async disconnect() {
|
obj[`Tables_in_${database}`] ||
|
||||||
await this.client.end()
|
obj[`Tables_in_${database.toLowerCase()}`]
|
||||||
}
|
)
|
||||||
|
for (let tableName of tableNames) {
|
||||||
async internalQuery(
|
const primaryKeys = []
|
||||||
query: SqlQuery,
|
const schema: TableSchema = {}
|
||||||
opts: { connect?: boolean; disableCoercion?: boolean } = {
|
const descResp = await this.internalQuery(
|
||||||
connect: true,
|
{ sql: `DESCRIBE \`${tableName}\`;` },
|
||||||
disableCoercion: false,
|
|
||||||
}
|
|
||||||
): Promise<any[] | any> {
|
|
||||||
try {
|
|
||||||
if (opts?.connect) {
|
|
||||||
await this.connect()
|
|
||||||
}
|
|
||||||
const baseBindings = query.bindings || []
|
|
||||||
const bindings = opts?.disableCoercion
|
|
||||||
? baseBindings
|
|
||||||
: bindingTypeCoerce(baseBindings)
|
|
||||||
// Node MySQL is callback based, so we must wrap our call in a promise
|
|
||||||
const response = await this.client.query(query.sql, bindings)
|
|
||||||
return response[0]
|
|
||||||
} finally {
|
|
||||||
if (opts?.connect) {
|
|
||||||
await this.disconnect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
|
||||||
const tables: { [key: string]: Table } = {}
|
|
||||||
const database = this.config.database
|
|
||||||
await this.connect()
|
|
||||||
|
|
||||||
try {
|
|
||||||
// get the tables first
|
|
||||||
const tablesResp = await this.internalQuery(
|
|
||||||
{ sql: "SHOW TABLES;" },
|
|
||||||
{ connect: false }
|
{ connect: false }
|
||||||
)
|
)
|
||||||
const tableNames = tablesResp.map(
|
for (let column of descResp) {
|
||||||
(obj: any) =>
|
const columnName = column.Field
|
||||||
obj[`Tables_in_${database}`] ||
|
if (column.Key === "PRI" && primaryKeys.indexOf(column.Key) === -1) {
|
||||||
obj[`Tables_in_${database.toLowerCase()}`]
|
primaryKeys.push(columnName)
|
||||||
)
|
|
||||||
for (let tableName of tableNames) {
|
|
||||||
const primaryKeys = []
|
|
||||||
const schema: TableSchema = {}
|
|
||||||
const descResp = await this.internalQuery(
|
|
||||||
{ sql: `DESCRIBE \`${tableName}\`;` },
|
|
||||||
{ connect: false }
|
|
||||||
)
|
|
||||||
for (let column of descResp) {
|
|
||||||
const columnName = column.Field
|
|
||||||
if (
|
|
||||||
column.Key === "PRI" &&
|
|
||||||
primaryKeys.indexOf(column.Key) === -1
|
|
||||||
) {
|
|
||||||
primaryKeys.push(columnName)
|
|
||||||
}
|
|
||||||
const constraints = {
|
|
||||||
presence: column.Null !== "YES",
|
|
||||||
}
|
|
||||||
const isAuto: boolean =
|
|
||||||
typeof column.Extra === "string" &&
|
|
||||||
(column.Extra === "auto_increment" ||
|
|
||||||
column.Extra.toLowerCase().includes("generated"))
|
|
||||||
schema[columnName] = {
|
|
||||||
name: columnName,
|
|
||||||
autocolumn: isAuto,
|
|
||||||
constraints,
|
|
||||||
...convertSqlType(column.Type),
|
|
||||||
externalType: column.Type,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!tables[tableName]) {
|
const constraints = {
|
||||||
tables[tableName] = {
|
presence: column.Null !== "YES",
|
||||||
_id: buildExternalTableId(datasourceId, tableName),
|
}
|
||||||
primary: primaryKeys,
|
const isAuto: boolean =
|
||||||
name: tableName,
|
typeof column.Extra === "string" &&
|
||||||
schema,
|
(column.Extra === "auto_increment" ||
|
||||||
}
|
column.Extra.toLowerCase().includes("generated"))
|
||||||
|
schema[columnName] = {
|
||||||
|
name: columnName,
|
||||||
|
autocolumn: isAuto,
|
||||||
|
constraints,
|
||||||
|
...convertSqlType(column.Type),
|
||||||
|
externalType: column.Type,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!tables[tableName]) {
|
||||||
|
tables[tableName] = {
|
||||||
|
_id: buildExternalTableId(datasourceId, tableName),
|
||||||
|
primary: primaryKeys,
|
||||||
|
name: tableName,
|
||||||
|
schema,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} finally {
|
|
||||||
await this.disconnect()
|
|
||||||
}
|
|
||||||
const final = finaliseExternalTables(tables, entities)
|
|
||||||
this.tables = final.tables
|
|
||||||
this.schemaErrors = final.errors
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query: SqlQuery | string) {
|
|
||||||
const results = await this.internalQuery(getSqlQuery(query))
|
|
||||||
return results.length ? results : [{ created: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: SqlQuery | string) {
|
|
||||||
return this.internalQuery(getSqlQuery(query))
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: SqlQuery | string) {
|
|
||||||
const results = await this.internalQuery(getSqlQuery(query))
|
|
||||||
return results.length ? results : [{ updated: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: SqlQuery | string) {
|
|
||||||
const results = await this.internalQuery(getSqlQuery(query))
|
|
||||||
return results.length ? results : [{ deleted: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async query(json: QueryJson) {
|
|
||||||
await this.connect()
|
|
||||||
try {
|
|
||||||
const queryFn = (query: any) =>
|
|
||||||
this.internalQuery(query, { connect: false, disableCoercion: true })
|
|
||||||
return await this.queryWithReturning(json, queryFn)
|
|
||||||
} finally {
|
|
||||||
await this.disconnect()
|
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
|
await this.disconnect()
|
||||||
}
|
}
|
||||||
|
const final = finaliseExternalTables(tables, entities)
|
||||||
|
this.tables = final.tables
|
||||||
|
this.schemaErrors = final.errors
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async create(query: SqlQuery | string) {
|
||||||
schema: SCHEMA,
|
const results = await this.internalQuery(getSqlQuery(query))
|
||||||
integration: MySQLIntegration,
|
return results.length ? results : [{ created: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: SqlQuery | string) {
|
||||||
|
return this.internalQuery(getSqlQuery(query))
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: SqlQuery | string) {
|
||||||
|
const results = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return results.length ? results : [{ updated: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: SqlQuery | string) {
|
||||||
|
const results = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return results.length ? results : [{ deleted: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async query(json: QueryJson) {
|
||||||
|
await this.connect()
|
||||||
|
try {
|
||||||
|
const queryFn = (query: any) =>
|
||||||
|
this.internalQuery(query, { connect: false, disableCoercion: true })
|
||||||
|
return await this.queryWithReturning(json, queryFn)
|
||||||
|
} finally {
|
||||||
|
await this.disconnect()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: MySQLIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -25,437 +25,430 @@ import oracledb, {
|
||||||
import Sql from "./base/sql"
|
import Sql from "./base/sql"
|
||||||
import { FieldTypes } from "../constants"
|
import { FieldTypes } from "../constants"
|
||||||
|
|
||||||
module OracleModule {
|
oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT
|
||||||
oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT
|
|
||||||
|
|
||||||
interface OracleConfig {
|
interface OracleConfig {
|
||||||
host: string
|
host: string
|
||||||
port: number
|
port: number
|
||||||
database: string
|
database: string
|
||||||
user: string
|
user: string
|
||||||
password: string
|
password: string
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://github.com/oracle/node-oracledb",
|
docs: "https://github.com/oracle/node-oracledb",
|
||||||
plus: true,
|
plus: true,
|
||||||
friendlyName: "Oracle",
|
friendlyName: "Oracle",
|
||||||
type: "Relational",
|
type: "Relational",
|
||||||
description:
|
description:
|
||||||
"Oracle Database is an object-relational database management system developed by Oracle Corporation",
|
"Oracle Database is an object-relational database management system developed by Oracle Corporation",
|
||||||
datasource: {
|
datasource: {
|
||||||
host: {
|
host: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
default: "localhost",
|
default: "localhost",
|
||||||
required: true,
|
required: true,
|
||||||
},
|
|
||||||
port: {
|
|
||||||
type: DatasourceFieldType.NUMBER,
|
|
||||||
required: true,
|
|
||||||
default: 1521,
|
|
||||||
},
|
|
||||||
database: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
user: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
password: {
|
|
||||||
type: DatasourceFieldType.PASSWORD,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
port: {
|
||||||
create: {
|
type: DatasourceFieldType.NUMBER,
|
||||||
type: QueryType.SQL,
|
required: true,
|
||||||
},
|
default: 1521,
|
||||||
read: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
database: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
password: {
|
||||||
|
type: DatasourceFieldType.PASSWORD,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Raw query response
|
||||||
|
*/
|
||||||
|
interface ColumnsResponse {
|
||||||
|
TABLE_NAME: string
|
||||||
|
COLUMN_NAME: string
|
||||||
|
DATA_TYPE: string
|
||||||
|
DATA_DEFAULT: string | null
|
||||||
|
COLUMN_ID: number
|
||||||
|
CONSTRAINT_NAME: string | null
|
||||||
|
CONSTRAINT_TYPE: string | null
|
||||||
|
R_CONSTRAINT_NAME: string | null
|
||||||
|
SEARCH_CONDITION: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An oracle constraint
|
||||||
|
*/
|
||||||
|
interface OracleConstraint {
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
relatedConstraintName: string | null
|
||||||
|
searchCondition: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An oracle column and it's related constraints
|
||||||
|
*/
|
||||||
|
interface OracleColumn {
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
default: string | null
|
||||||
|
id: number
|
||||||
|
constraints: { [key: string]: OracleConstraint }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An oracle table and it's related columns
|
||||||
|
*/
|
||||||
|
interface OracleTable {
|
||||||
|
name: string
|
||||||
|
columns: { [key: string]: OracleColumn }
|
||||||
|
}
|
||||||
|
|
||||||
|
const OracleContraintTypes = {
|
||||||
|
PRIMARY: "P",
|
||||||
|
NOT_NULL_OR_CHECK: "C",
|
||||||
|
FOREIGN_KEY: "R",
|
||||||
|
UNIQUE: "U",
|
||||||
|
}
|
||||||
|
|
||||||
|
class OracleIntegration extends Sql implements DatasourcePlus {
|
||||||
|
private readonly config: OracleConfig
|
||||||
|
private index: number = 1
|
||||||
|
|
||||||
|
public tables: Record<string, Table> = {}
|
||||||
|
public schemaErrors: Record<string, string> = {}
|
||||||
|
|
||||||
|
private readonly COLUMNS_SQL = `
|
||||||
|
SELECT
|
||||||
|
tabs.table_name,
|
||||||
|
cols.column_name,
|
||||||
|
cols.data_type,
|
||||||
|
cols.data_default,
|
||||||
|
cols.column_id,
|
||||||
|
cons.constraint_name,
|
||||||
|
cons.constraint_type,
|
||||||
|
cons.r_constraint_name,
|
||||||
|
cons.search_condition
|
||||||
|
FROM
|
||||||
|
user_tables tabs
|
||||||
|
JOIN
|
||||||
|
user_tab_columns cols
|
||||||
|
ON tabs.table_name = cols.table_name
|
||||||
|
LEFT JOIN
|
||||||
|
user_cons_columns col_cons
|
||||||
|
ON cols.column_name = col_cons.column_name
|
||||||
|
AND cols.table_name = col_cons.table_name
|
||||||
|
LEFT JOIN
|
||||||
|
user_constraints cons
|
||||||
|
ON col_cons.constraint_name = cons.constraint_name
|
||||||
|
AND cons.table_name = cols.table_name
|
||||||
|
WHERE
|
||||||
|
(cons.status = 'ENABLED'
|
||||||
|
OR cons.status IS NULL)
|
||||||
|
`
|
||||||
|
constructor(config: OracleConfig) {
|
||||||
|
super(SqlClient.ORACLE)
|
||||||
|
this.config = config
|
||||||
}
|
}
|
||||||
|
|
||||||
const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"]
|
getBindingIdentifier(): string {
|
||||||
|
return `:${this.index++}`
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
getStringConcat(parts: string[]): string {
|
||||||
* Raw query response
|
return parts.join(" || ")
|
||||||
*/
|
|
||||||
interface ColumnsResponse {
|
|
||||||
TABLE_NAME: string
|
|
||||||
COLUMN_NAME: string
|
|
||||||
DATA_TYPE: string
|
|
||||||
DATA_DEFAULT: string | null
|
|
||||||
COLUMN_ID: number
|
|
||||||
CONSTRAINT_NAME: string | null
|
|
||||||
CONSTRAINT_TYPE: string | null
|
|
||||||
R_CONSTRAINT_NAME: string | null
|
|
||||||
SEARCH_CONDITION: string | null
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An oracle constraint
|
* Map the flat tabular columns and constraints data into a nested object
|
||||||
*/
|
*/
|
||||||
interface OracleConstraint {
|
private mapColumns(result: Result<ColumnsResponse>): {
|
||||||
name: string
|
[key: string]: OracleTable
|
||||||
type: string
|
} {
|
||||||
relatedConstraintName: string | null
|
const oracleTables: { [key: string]: OracleTable } = {}
|
||||||
searchCondition: string | null
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
if (result.rows) {
|
||||||
* An oracle column and it's related constraints
|
result.rows.forEach(row => {
|
||||||
*/
|
const tableName = row.TABLE_NAME
|
||||||
interface OracleColumn {
|
const columnName = row.COLUMN_NAME
|
||||||
name: string
|
const dataType = row.DATA_TYPE
|
||||||
type: string
|
const dataDefault = row.DATA_DEFAULT
|
||||||
default: string | null
|
const columnId = row.COLUMN_ID
|
||||||
id: number
|
const constraintName = row.CONSTRAINT_NAME
|
||||||
constraints: { [key: string]: OracleConstraint }
|
const constraintType = row.CONSTRAINT_TYPE
|
||||||
}
|
const relatedConstraintName = row.R_CONSTRAINT_NAME
|
||||||
|
const searchCondition = row.SEARCH_CONDITION
|
||||||
|
|
||||||
/**
|
let table = oracleTables[tableName]
|
||||||
* An oracle table and it's related columns
|
|
||||||
*/
|
|
||||||
interface OracleTable {
|
|
||||||
name: string
|
|
||||||
columns: { [key: string]: OracleColumn }
|
|
||||||
}
|
|
||||||
|
|
||||||
const OracleContraintTypes = {
|
|
||||||
PRIMARY: "P",
|
|
||||||
NOT_NULL_OR_CHECK: "C",
|
|
||||||
FOREIGN_KEY: "R",
|
|
||||||
UNIQUE: "U",
|
|
||||||
}
|
|
||||||
|
|
||||||
class OracleIntegration extends Sql implements DatasourcePlus {
|
|
||||||
private readonly config: OracleConfig
|
|
||||||
private index: number = 1
|
|
||||||
|
|
||||||
public tables: Record<string, Table> = {}
|
|
||||||
public schemaErrors: Record<string, string> = {}
|
|
||||||
|
|
||||||
private readonly COLUMNS_SQL = `
|
|
||||||
SELECT
|
|
||||||
tabs.table_name,
|
|
||||||
cols.column_name,
|
|
||||||
cols.data_type,
|
|
||||||
cols.data_default,
|
|
||||||
cols.column_id,
|
|
||||||
cons.constraint_name,
|
|
||||||
cons.constraint_type,
|
|
||||||
cons.r_constraint_name,
|
|
||||||
cons.search_condition
|
|
||||||
FROM
|
|
||||||
user_tables tabs
|
|
||||||
JOIN
|
|
||||||
user_tab_columns cols
|
|
||||||
ON tabs.table_name = cols.table_name
|
|
||||||
LEFT JOIN
|
|
||||||
user_cons_columns col_cons
|
|
||||||
ON cols.column_name = col_cons.column_name
|
|
||||||
AND cols.table_name = col_cons.table_name
|
|
||||||
LEFT JOIN
|
|
||||||
user_constraints cons
|
|
||||||
ON col_cons.constraint_name = cons.constraint_name
|
|
||||||
AND cons.table_name = cols.table_name
|
|
||||||
WHERE
|
|
||||||
(cons.status = 'ENABLED'
|
|
||||||
OR cons.status IS NULL)
|
|
||||||
`
|
|
||||||
constructor(config: OracleConfig) {
|
|
||||||
super(SqlClient.ORACLE)
|
|
||||||
this.config = config
|
|
||||||
}
|
|
||||||
|
|
||||||
getBindingIdentifier(): string {
|
|
||||||
return `:${this.index++}`
|
|
||||||
}
|
|
||||||
|
|
||||||
getStringConcat(parts: string[]): string {
|
|
||||||
return parts.join(" || ")
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Map the flat tabular columns and constraints data into a nested object
|
|
||||||
*/
|
|
||||||
private mapColumns(result: Result<ColumnsResponse>): {
|
|
||||||
[key: string]: OracleTable
|
|
||||||
} {
|
|
||||||
const oracleTables: { [key: string]: OracleTable } = {}
|
|
||||||
|
|
||||||
if (result.rows) {
|
|
||||||
result.rows.forEach(row => {
|
|
||||||
const tableName = row.TABLE_NAME
|
|
||||||
const columnName = row.COLUMN_NAME
|
|
||||||
const dataType = row.DATA_TYPE
|
|
||||||
const dataDefault = row.DATA_DEFAULT
|
|
||||||
const columnId = row.COLUMN_ID
|
|
||||||
const constraintName = row.CONSTRAINT_NAME
|
|
||||||
const constraintType = row.CONSTRAINT_TYPE
|
|
||||||
const relatedConstraintName = row.R_CONSTRAINT_NAME
|
|
||||||
const searchCondition = row.SEARCH_CONDITION
|
|
||||||
|
|
||||||
let table = oracleTables[tableName]
|
|
||||||
if (!table) {
|
|
||||||
table = {
|
|
||||||
name: tableName,
|
|
||||||
columns: {},
|
|
||||||
}
|
|
||||||
oracleTables[tableName] = table
|
|
||||||
}
|
|
||||||
|
|
||||||
let column = table.columns[columnName]
|
|
||||||
if (!column) {
|
|
||||||
column = {
|
|
||||||
name: columnName,
|
|
||||||
type: dataType,
|
|
||||||
default: dataDefault,
|
|
||||||
id: columnId,
|
|
||||||
constraints: {},
|
|
||||||
}
|
|
||||||
table.columns[columnName] = column
|
|
||||||
}
|
|
||||||
|
|
||||||
if (constraintName && constraintType) {
|
|
||||||
let constraint = column.constraints[constraintName]
|
|
||||||
if (!constraint) {
|
|
||||||
constraint = {
|
|
||||||
name: constraintName,
|
|
||||||
type: constraintType,
|
|
||||||
relatedConstraintName: relatedConstraintName,
|
|
||||||
searchCondition: searchCondition,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
column.constraints[constraintName] = constraint
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return oracleTables
|
|
||||||
}
|
|
||||||
|
|
||||||
private static isSupportedColumn(column: OracleColumn) {
|
|
||||||
return !UNSUPPORTED_TYPES.includes(column.type)
|
|
||||||
}
|
|
||||||
|
|
||||||
private static isAutoColumn(column: OracleColumn) {
|
|
||||||
return !!(
|
|
||||||
column.default && column.default.toLowerCase().includes("nextval")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* No native boolean in oracle. Best we can do is to check if a manual 1 or 0 number constraint has been set up
|
|
||||||
* This matches the default behaviour for generating DDL used in knex.
|
|
||||||
*/
|
|
||||||
private isBooleanType(column: OracleColumn): boolean {
|
|
||||||
return (
|
|
||||||
column.type.toLowerCase() === "number" &&
|
|
||||||
Object.values(column.constraints).filter(c => {
|
|
||||||
if (
|
|
||||||
c.type === OracleContraintTypes.NOT_NULL_OR_CHECK &&
|
|
||||||
c.searchCondition
|
|
||||||
) {
|
|
||||||
const condition = c.searchCondition
|
|
||||||
.replace(/\s/g, "") // remove spaces
|
|
||||||
.replace(/[']+/g, "") // remove quotes
|
|
||||||
if (
|
|
||||||
condition.includes("in(0,1)") ||
|
|
||||||
condition.includes("in(1,0)")
|
|
||||||
) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}).length > 0
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
private internalConvertType(column: OracleColumn): { type: string } {
|
|
||||||
if (this.isBooleanType(column)) {
|
|
||||||
return { type: FieldTypes.BOOLEAN }
|
|
||||||
}
|
|
||||||
|
|
||||||
return convertSqlType(column.type)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetches the tables from the oracle table and assigns them to the datasource.
|
|
||||||
* @param {*} datasourceId - datasourceId to fetch
|
|
||||||
* @param entities - the tables that are to be built
|
|
||||||
*/
|
|
||||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
|
||||||
const columnsResponse = await this.internalQuery<ColumnsResponse>({
|
|
||||||
sql: this.COLUMNS_SQL,
|
|
||||||
})
|
|
||||||
const oracleTables = this.mapColumns(columnsResponse)
|
|
||||||
|
|
||||||
const tables: { [key: string]: Table } = {}
|
|
||||||
|
|
||||||
// iterate each table
|
|
||||||
Object.values(oracleTables).forEach(oracleTable => {
|
|
||||||
let table = tables[oracleTable.name]
|
|
||||||
if (!table) {
|
if (!table) {
|
||||||
table = {
|
table = {
|
||||||
_id: buildExternalTableId(datasourceId, oracleTable.name),
|
name: tableName,
|
||||||
primary: [],
|
columns: {},
|
||||||
name: oracleTable.name,
|
|
||||||
schema: {},
|
|
||||||
}
|
}
|
||||||
tables[oracleTable.name] = table
|
oracleTables[tableName] = table
|
||||||
}
|
}
|
||||||
|
|
||||||
// iterate each column on the table
|
let column = table.columns[columnName]
|
||||||
Object.values(oracleTable.columns)
|
if (!column) {
|
||||||
// remove columns that we can't read / save
|
column = {
|
||||||
.filter(oracleColumn =>
|
name: columnName,
|
||||||
OracleIntegration.isSupportedColumn(oracleColumn)
|
type: dataType,
|
||||||
)
|
default: dataDefault,
|
||||||
// match the order of the columns in the db
|
id: columnId,
|
||||||
.sort((c1, c2) => c1.id - c2.id)
|
constraints: {},
|
||||||
.forEach(oracleColumn => {
|
}
|
||||||
const columnName = oracleColumn.name
|
table.columns[columnName] = column
|
||||||
let fieldSchema = table.schema[columnName]
|
}
|
||||||
if (!fieldSchema) {
|
|
||||||
fieldSchema = {
|
if (constraintName && constraintType) {
|
||||||
autocolumn: OracleIntegration.isAutoColumn(oracleColumn),
|
let constraint = column.constraints[constraintName]
|
||||||
name: columnName,
|
if (!constraint) {
|
||||||
...this.internalConvertType(oracleColumn),
|
constraint = {
|
||||||
}
|
name: constraintName,
|
||||||
table.schema[columnName] = fieldSchema
|
type: constraintType,
|
||||||
|
relatedConstraintName: relatedConstraintName,
|
||||||
|
searchCondition: searchCondition,
|
||||||
}
|
}
|
||||||
|
}
|
||||||
// iterate each constraint on the column
|
column.constraints[constraintName] = constraint
|
||||||
Object.values(oracleColumn.constraints).forEach(
|
}
|
||||||
oracleConstraint => {
|
|
||||||
if (oracleConstraint.type === OracleContraintTypes.PRIMARY) {
|
|
||||||
table.primary!.push(columnName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const final = finaliseExternalTables(tables, entities)
|
|
||||||
this.tables = final.tables
|
|
||||||
this.schemaErrors = final.errors
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async internalQuery<T>(query: SqlQuery): Promise<Result<T>> {
|
return oracleTables
|
||||||
let connection
|
}
|
||||||
try {
|
|
||||||
this.index = 1
|
|
||||||
connection = await this.getConnection()
|
|
||||||
|
|
||||||
const options: ExecuteOptions = { autoCommit: true }
|
private static isSupportedColumn(column: OracleColumn) {
|
||||||
const bindings: BindParameters = query.bindings || []
|
return !UNSUPPORTED_TYPES.includes(column.type)
|
||||||
|
}
|
||||||
|
|
||||||
return await connection.execute<T>(query.sql, bindings, options)
|
private static isAutoColumn(column: OracleColumn) {
|
||||||
} finally {
|
return !!(
|
||||||
if (connection) {
|
column.default && column.default.toLowerCase().includes("nextval")
|
||||||
try {
|
)
|
||||||
await connection.close()
|
}
|
||||||
} catch (err) {
|
|
||||||
console.error(err)
|
/**
|
||||||
|
* No native boolean in oracle. Best we can do is to check if a manual 1 or 0 number constraint has been set up
|
||||||
|
* This matches the default behaviour for generating DDL used in knex.
|
||||||
|
*/
|
||||||
|
private isBooleanType(column: OracleColumn): boolean {
|
||||||
|
return (
|
||||||
|
column.type.toLowerCase() === "number" &&
|
||||||
|
Object.values(column.constraints).filter(c => {
|
||||||
|
if (
|
||||||
|
c.type === OracleContraintTypes.NOT_NULL_OR_CHECK &&
|
||||||
|
c.searchCondition
|
||||||
|
) {
|
||||||
|
const condition = c.searchCondition
|
||||||
|
.replace(/\s/g, "") // remove spaces
|
||||||
|
.replace(/[']+/g, "") // remove quotes
|
||||||
|
if (condition.includes("in(0,1)") || condition.includes("in(1,0)")) {
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
return false
|
||||||
|
}).length > 0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private internalConvertType(column: OracleColumn): { type: string } {
|
||||||
|
if (this.isBooleanType(column)) {
|
||||||
|
return { type: FieldTypes.BOOLEAN }
|
||||||
}
|
}
|
||||||
|
|
||||||
private getConnection = async (): Promise<Connection> => {
|
return convertSqlType(column.type)
|
||||||
//connectString : "(DESCRIPTION =(ADDRESS = (PROTOCOL = TCP)(HOST = localhost)(PORT = 1521))(CONNECT_DATA =(SID= ORCL)))"
|
}
|
||||||
const connectString = `${this.config.host}:${this.config.port || 1521}/${
|
|
||||||
this.config.database
|
|
||||||
}`
|
|
||||||
const attributes: ConnectionAttributes = {
|
|
||||||
user: this.config.user,
|
|
||||||
password: this.config.password,
|
|
||||||
connectString,
|
|
||||||
}
|
|
||||||
return oracledb.getConnection(attributes)
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query: SqlQuery | string): Promise<any[]> {
|
/**
|
||||||
const response = await this.internalQuery<any>(getSqlQuery(query))
|
* Fetches the tables from the oracle table and assigns them to the datasource.
|
||||||
return response.rows && response.rows.length
|
* @param {*} datasourceId - datasourceId to fetch
|
||||||
? response.rows
|
* @param entities - the tables that are to be built
|
||||||
: [{ created: true }]
|
*/
|
||||||
}
|
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||||
|
const columnsResponse = await this.internalQuery<ColumnsResponse>({
|
||||||
|
sql: this.COLUMNS_SQL,
|
||||||
|
})
|
||||||
|
const oracleTables = this.mapColumns(columnsResponse)
|
||||||
|
|
||||||
async read(query: SqlQuery | string): Promise<any[]> {
|
const tables: { [key: string]: Table } = {}
|
||||||
const response = await this.internalQuery<any>(getSqlQuery(query))
|
|
||||||
return response.rows ? response.rows : []
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: SqlQuery | string): Promise<any[]> {
|
// iterate each table
|
||||||
const response = await this.internalQuery(getSqlQuery(query))
|
Object.values(oracleTables).forEach(oracleTable => {
|
||||||
return response.rows && response.rows.length
|
let table = tables[oracleTable.name]
|
||||||
? response.rows
|
if (!table) {
|
||||||
: [{ updated: true }]
|
table = {
|
||||||
}
|
_id: buildExternalTableId(datasourceId, oracleTable.name),
|
||||||
|
primary: [],
|
||||||
async delete(query: SqlQuery | string): Promise<any[]> {
|
name: oracleTable.name,
|
||||||
const response = await this.internalQuery(getSqlQuery(query))
|
schema: {},
|
||||||
return response.rows && response.rows.length
|
|
||||||
? response.rows
|
|
||||||
: [{ deleted: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async query(json: QueryJson) {
|
|
||||||
const operation = this._operation(json)
|
|
||||||
const input = this._query(json, { disableReturning: true })
|
|
||||||
if (Array.isArray(input)) {
|
|
||||||
const responses = []
|
|
||||||
for (let query of input) {
|
|
||||||
responses.push(await this.internalQuery(query))
|
|
||||||
}
|
|
||||||
return responses
|
|
||||||
} else {
|
|
||||||
// read the row to be deleted up front for the return
|
|
||||||
let deletedRows
|
|
||||||
if (operation === Operation.DELETE) {
|
|
||||||
const queryFn = (query: any) => this.internalQuery(query)
|
|
||||||
deletedRows = await this.getReturningRow(queryFn, json)
|
|
||||||
}
|
}
|
||||||
|
tables[oracleTable.name] = table
|
||||||
|
}
|
||||||
|
|
||||||
// run the query
|
// iterate each column on the table
|
||||||
const response = await this.internalQuery(input)
|
Object.values(oracleTable.columns)
|
||||||
|
// remove columns that we can't read / save
|
||||||
// get the results or return the created / updated / deleted row
|
.filter(oracleColumn =>
|
||||||
if (deletedRows?.rows?.length) {
|
OracleIntegration.isSupportedColumn(oracleColumn)
|
||||||
return deletedRows.rows
|
)
|
||||||
} else if (response.rows?.length) {
|
// match the order of the columns in the db
|
||||||
return response.rows
|
.sort((c1, c2) => c1.id - c2.id)
|
||||||
} else {
|
.forEach(oracleColumn => {
|
||||||
// get the last row that was updated
|
const columnName = oracleColumn.name
|
||||||
if (
|
let fieldSchema = table.schema[columnName]
|
||||||
response.lastRowid &&
|
if (!fieldSchema) {
|
||||||
json.endpoint?.entityId &&
|
fieldSchema = {
|
||||||
operation !== Operation.DELETE
|
autocolumn: OracleIntegration.isAutoColumn(oracleColumn),
|
||||||
) {
|
name: columnName,
|
||||||
const lastRow = await this.internalQuery({
|
...this.internalConvertType(oracleColumn),
|
||||||
sql: `SELECT * FROM \"${json.endpoint.entityId}\" WHERE ROWID = '${response.lastRowid}'`,
|
}
|
||||||
})
|
table.schema[columnName] = fieldSchema
|
||||||
return lastRow.rows
|
|
||||||
} else {
|
|
||||||
return [{ [operation.toLowerCase()]: true }]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// iterate each constraint on the column
|
||||||
|
Object.values(oracleColumn.constraints).forEach(oracleConstraint => {
|
||||||
|
if (oracleConstraint.type === OracleContraintTypes.PRIMARY) {
|
||||||
|
table.primary!.push(columnName)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const final = finaliseExternalTables(tables, entities)
|
||||||
|
this.tables = final.tables
|
||||||
|
this.schemaErrors = final.errors
|
||||||
|
}
|
||||||
|
|
||||||
|
private async internalQuery<T>(query: SqlQuery): Promise<Result<T>> {
|
||||||
|
let connection
|
||||||
|
try {
|
||||||
|
this.index = 1
|
||||||
|
connection = await this.getConnection()
|
||||||
|
|
||||||
|
const options: ExecuteOptions = { autoCommit: true }
|
||||||
|
const bindings: BindParameters = query.bindings || []
|
||||||
|
|
||||||
|
return await connection.execute<T>(query.sql, bindings, options)
|
||||||
|
} finally {
|
||||||
|
if (connection) {
|
||||||
|
try {
|
||||||
|
await connection.close()
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
private getConnection = async (): Promise<Connection> => {
|
||||||
schema: SCHEMA,
|
//connectString : "(DESCRIPTION =(ADDRESS = (PROTOCOL = TCP)(HOST = localhost)(PORT = 1521))(CONNECT_DATA =(SID= ORCL)))"
|
||||||
integration: OracleIntegration,
|
const connectString = `${this.config.host}:${this.config.port || 1521}/${
|
||||||
|
this.config.database
|
||||||
|
}`
|
||||||
|
const attributes: ConnectionAttributes = {
|
||||||
|
user: this.config.user,
|
||||||
|
password: this.config.password,
|
||||||
|
connectString,
|
||||||
|
}
|
||||||
|
return oracledb.getConnection(attributes)
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: SqlQuery | string): Promise<any[]> {
|
||||||
|
const response = await this.internalQuery<any>(getSqlQuery(query))
|
||||||
|
return response.rows && response.rows.length
|
||||||
|
? response.rows
|
||||||
|
: [{ created: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: SqlQuery | string): Promise<any[]> {
|
||||||
|
const response = await this.internalQuery<any>(getSqlQuery(query))
|
||||||
|
return response.rows ? response.rows : []
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: SqlQuery | string): Promise<any[]> {
|
||||||
|
const response = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return response.rows && response.rows.length
|
||||||
|
? response.rows
|
||||||
|
: [{ updated: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: SqlQuery | string): Promise<any[]> {
|
||||||
|
const response = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return response.rows && response.rows.length
|
||||||
|
? response.rows
|
||||||
|
: [{ deleted: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async query(json: QueryJson) {
|
||||||
|
const operation = this._operation(json)
|
||||||
|
const input = this._query(json, { disableReturning: true })
|
||||||
|
if (Array.isArray(input)) {
|
||||||
|
const responses = []
|
||||||
|
for (let query of input) {
|
||||||
|
responses.push(await this.internalQuery(query))
|
||||||
|
}
|
||||||
|
return responses
|
||||||
|
} else {
|
||||||
|
// read the row to be deleted up front for the return
|
||||||
|
let deletedRows
|
||||||
|
if (operation === Operation.DELETE) {
|
||||||
|
const queryFn = (query: any) => this.internalQuery(query)
|
||||||
|
deletedRows = await this.getReturningRow(queryFn, json)
|
||||||
|
}
|
||||||
|
|
||||||
|
// run the query
|
||||||
|
const response = await this.internalQuery(input)
|
||||||
|
|
||||||
|
// get the results or return the created / updated / deleted row
|
||||||
|
if (deletedRows?.rows?.length) {
|
||||||
|
return deletedRows.rows
|
||||||
|
} else if (response.rows?.length) {
|
||||||
|
return response.rows
|
||||||
|
} else {
|
||||||
|
// get the last row that was updated
|
||||||
|
if (
|
||||||
|
response.lastRowid &&
|
||||||
|
json.endpoint?.entityId &&
|
||||||
|
operation !== Operation.DELETE
|
||||||
|
) {
|
||||||
|
const lastRow = await this.internalQuery({
|
||||||
|
sql: `SELECT * FROM \"${json.endpoint.entityId}\" WHERE ROWID = '${response.lastRowid}'`,
|
||||||
|
})
|
||||||
|
return lastRow.rows
|
||||||
|
} else {
|
||||||
|
return [{ [operation.toLowerCase()]: true }]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: OracleIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -16,317 +16,313 @@ import {
|
||||||
} from "./utils"
|
} from "./utils"
|
||||||
import Sql from "./base/sql"
|
import Sql from "./base/sql"
|
||||||
|
|
||||||
module PostgresModule {
|
const { Client, types } = require("pg")
|
||||||
const { Client, types } = require("pg")
|
const { escapeDangerousCharacters } = require("../utilities")
|
||||||
const { escapeDangerousCharacters } = require("../utilities")
|
|
||||||
|
|
||||||
// Return "date" and "timestamp" types as plain strings.
|
// Return "date" and "timestamp" types as plain strings.
|
||||||
// This lets us reference the original stored timezone.
|
// This lets us reference the original stored timezone.
|
||||||
// types is undefined when running in a test env for some reason.
|
// types is undefined when running in a test env for some reason.
|
||||||
if (types) {
|
if (types) {
|
||||||
types.setTypeParser(1114, (val: any) => val) // timestamp
|
types.setTypeParser(1114, (val: any) => val) // timestamp
|
||||||
types.setTypeParser(1082, (val: any) => val) // date
|
types.setTypeParser(1082, (val: any) => val) // date
|
||||||
types.setTypeParser(1184, (val: any) => val) // timestampz
|
types.setTypeParser(1184, (val: any) => val) // timestampz
|
||||||
}
|
}
|
||||||
|
|
||||||
const JSON_REGEX = /'{.*}'::json/s
|
const JSON_REGEX = /'{.*}'::json/s
|
||||||
|
|
||||||
interface PostgresConfig {
|
interface PostgresConfig {
|
||||||
host: string
|
host: string
|
||||||
port: number
|
port: number
|
||||||
database: string
|
database: string
|
||||||
user: string
|
user: string
|
||||||
password: string
|
password: string
|
||||||
schema: string
|
schema: string
|
||||||
ssl?: boolean
|
ssl?: boolean
|
||||||
ca?: string
|
ca?: string
|
||||||
rejectUnauthorized?: boolean
|
rejectUnauthorized?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://node-postgres.com",
|
docs: "https://node-postgres.com",
|
||||||
plus: true,
|
plus: true,
|
||||||
friendlyName: "PostgreSQL",
|
friendlyName: "PostgreSQL",
|
||||||
type: "Relational",
|
type: "Relational",
|
||||||
description:
|
description:
|
||||||
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
||||||
datasource: {
|
datasource: {
|
||||||
host: {
|
host: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
default: "localhost",
|
default: "localhost",
|
||||||
required: true,
|
required: true,
|
||||||
},
|
|
||||||
port: {
|
|
||||||
type: DatasourceFieldType.NUMBER,
|
|
||||||
required: true,
|
|
||||||
default: 5432,
|
|
||||||
},
|
|
||||||
database: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
default: "postgres",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
user: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
default: "root",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
password: {
|
|
||||||
type: DatasourceFieldType.PASSWORD,
|
|
||||||
default: "root",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
default: "public",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
ssl: {
|
|
||||||
type: DatasourceFieldType.BOOLEAN,
|
|
||||||
default: false,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
rejectUnauthorized: {
|
|
||||||
type: DatasourceFieldType.BOOLEAN,
|
|
||||||
default: false,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
ca: {
|
|
||||||
type: DatasourceFieldType.LONGFORM,
|
|
||||||
default: false,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
port: {
|
||||||
create: {
|
type: DatasourceFieldType.NUMBER,
|
||||||
type: QueryType.SQL,
|
required: true,
|
||||||
},
|
default: 5432,
|
||||||
read: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
database: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
default: "postgres",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
default: "root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
password: {
|
||||||
|
type: DatasourceFieldType.PASSWORD,
|
||||||
|
default: "root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
default: "public",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
ssl: {
|
||||||
|
type: DatasourceFieldType.BOOLEAN,
|
||||||
|
default: false,
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
rejectUnauthorized: {
|
||||||
|
type: DatasourceFieldType.BOOLEAN,
|
||||||
|
default: false,
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
ca: {
|
||||||
|
type: DatasourceFieldType.LONGFORM,
|
||||||
|
default: false,
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
class PostgresIntegration extends Sql implements DatasourcePlus {
|
class PostgresIntegration extends Sql implements DatasourcePlus {
|
||||||
private readonly client: any
|
private readonly client: any
|
||||||
private readonly config: PostgresConfig
|
private readonly config: PostgresConfig
|
||||||
private index: number = 1
|
private index: number = 1
|
||||||
private open: boolean
|
private open: boolean
|
||||||
public tables: Record<string, Table> = {}
|
public tables: Record<string, Table> = {}
|
||||||
public schemaErrors: Record<string, string> = {}
|
public schemaErrors: Record<string, string> = {}
|
||||||
|
|
||||||
COLUMNS_SQL!: string
|
COLUMNS_SQL!: string
|
||||||
|
|
||||||
PRIMARY_KEYS_SQL = `
|
PRIMARY_KEYS_SQL = `
|
||||||
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
||||||
from information_schema.table_constraints tc
|
from information_schema.table_constraints tc
|
||||||
join
|
join
|
||||||
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
||||||
and kc.table_schema = tc.table_schema
|
and kc.table_schema = tc.table_schema
|
||||||
and kc.constraint_name = tc.constraint_name
|
and kc.constraint_name = tc.constraint_name
|
||||||
where tc.constraint_type = 'PRIMARY KEY';
|
where tc.constraint_type = 'PRIMARY KEY';
|
||||||
`
|
`
|
||||||
|
|
||||||
constructor(config: PostgresConfig) {
|
constructor(config: PostgresConfig) {
|
||||||
super(SqlClient.POSTGRES)
|
super(SqlClient.POSTGRES)
|
||||||
this.config = config
|
this.config = config
|
||||||
|
|
||||||
let newConfig = {
|
let newConfig = {
|
||||||
...this.config,
|
...this.config,
|
||||||
ssl: this.config.ssl
|
ssl: this.config.ssl
|
||||||
? {
|
? {
|
||||||
rejectUnauthorized: this.config.rejectUnauthorized,
|
rejectUnauthorized: this.config.rejectUnauthorized,
|
||||||
ca: this.config.ca,
|
ca: this.config.ca,
|
||||||
}
|
|
||||||
: undefined,
|
|
||||||
}
|
|
||||||
this.client = new Client(newConfig)
|
|
||||||
this.open = false
|
|
||||||
}
|
|
||||||
|
|
||||||
getBindingIdentifier(): string {
|
|
||||||
return `$${this.index++}`
|
|
||||||
}
|
|
||||||
|
|
||||||
getStringConcat(parts: string[]): string {
|
|
||||||
return parts.join(" || ")
|
|
||||||
}
|
|
||||||
|
|
||||||
async openConnection() {
|
|
||||||
await this.client.connect()
|
|
||||||
if (!this.config.schema) {
|
|
||||||
this.config.schema = "public"
|
|
||||||
}
|
|
||||||
this.client.query(`SET search_path TO ${this.config.schema}`)
|
|
||||||
this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'`
|
|
||||||
this.open = true
|
|
||||||
}
|
|
||||||
|
|
||||||
closeConnection() {
|
|
||||||
const pg = this
|
|
||||||
return new Promise<void>((resolve, reject) => {
|
|
||||||
this.client.end((err: any) => {
|
|
||||||
pg.open = false
|
|
||||||
if (err) {
|
|
||||||
reject(err)
|
|
||||||
} else {
|
|
||||||
resolve()
|
|
||||||
}
|
}
|
||||||
})
|
: undefined,
|
||||||
|
}
|
||||||
|
this.client = new Client(newConfig)
|
||||||
|
this.open = false
|
||||||
|
}
|
||||||
|
|
||||||
|
getBindingIdentifier(): string {
|
||||||
|
return `$${this.index++}`
|
||||||
|
}
|
||||||
|
|
||||||
|
getStringConcat(parts: string[]): string {
|
||||||
|
return parts.join(" || ")
|
||||||
|
}
|
||||||
|
|
||||||
|
async openConnection() {
|
||||||
|
await this.client.connect()
|
||||||
|
if (!this.config.schema) {
|
||||||
|
this.config.schema = "public"
|
||||||
|
}
|
||||||
|
this.client.query(`SET search_path TO ${this.config.schema}`)
|
||||||
|
this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'`
|
||||||
|
this.open = true
|
||||||
|
}
|
||||||
|
|
||||||
|
closeConnection() {
|
||||||
|
const pg = this
|
||||||
|
return new Promise<void>((resolve, reject) => {
|
||||||
|
this.client.end((err: any) => {
|
||||||
|
pg.open = false
|
||||||
|
if (err) {
|
||||||
|
reject(err)
|
||||||
|
} else {
|
||||||
|
resolve()
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
})
|
||||||
|
}
|
||||||
|
|
||||||
async internalQuery(query: SqlQuery, close: boolean = true) {
|
async internalQuery(query: SqlQuery, close: boolean = true) {
|
||||||
if (!this.open) {
|
if (!this.open) {
|
||||||
await this.openConnection()
|
|
||||||
}
|
|
||||||
const client = this.client
|
|
||||||
this.index = 1
|
|
||||||
// need to handle a specific issue with json data types in postgres,
|
|
||||||
// new lines inside the JSON data will break it
|
|
||||||
if (query && query.sql) {
|
|
||||||
const matches = query.sql.match(JSON_REGEX)
|
|
||||||
if (matches && matches.length > 0) {
|
|
||||||
for (let match of matches) {
|
|
||||||
const escaped = escapeDangerousCharacters(match)
|
|
||||||
query.sql = query.sql.replace(match, escaped)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
return await client.query(query.sql, query.bindings || [])
|
|
||||||
} catch (err) {
|
|
||||||
await this.closeConnection()
|
|
||||||
// @ts-ignore
|
|
||||||
throw new Error(err)
|
|
||||||
} finally {
|
|
||||||
if (close) {
|
|
||||||
await this.closeConnection()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetches the tables from the postgres table and assigns them to the datasource.
|
|
||||||
* @param {*} datasourceId - datasourceId to fetch
|
|
||||||
* @param entities - the tables that are to be built
|
|
||||||
*/
|
|
||||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
|
||||||
let tableKeys: { [key: string]: string[] } = {}
|
|
||||||
await this.openConnection()
|
await this.openConnection()
|
||||||
try {
|
}
|
||||||
const primaryKeysResponse = await this.client.query(
|
const client = this.client
|
||||||
this.PRIMARY_KEYS_SQL
|
this.index = 1
|
||||||
)
|
// need to handle a specific issue with json data types in postgres,
|
||||||
for (let table of primaryKeysResponse.rows) {
|
// new lines inside the JSON data will break it
|
||||||
const tableName = table.table_name
|
if (query && query.sql) {
|
||||||
if (!tableKeys[tableName]) {
|
const matches = query.sql.match(JSON_REGEX)
|
||||||
tableKeys[tableName] = []
|
if (matches && matches.length > 0) {
|
||||||
}
|
for (let match of matches) {
|
||||||
const key = table.column_name || table.primary_key
|
const escaped = escapeDangerousCharacters(match)
|
||||||
// only add the unique keys
|
query.sql = query.sql.replace(match, escaped)
|
||||||
if (key && tableKeys[tableName].indexOf(key) === -1) {
|
|
||||||
tableKeys[tableName].push(key)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (err) {
|
|
||||||
tableKeys = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
|
||||||
|
|
||||||
const tables: { [key: string]: Table } = {}
|
|
||||||
|
|
||||||
for (let column of columnsResponse.rows) {
|
|
||||||
const tableName: string = column.table_name
|
|
||||||
const columnName: string = column.column_name
|
|
||||||
|
|
||||||
// table key doesn't exist yet
|
|
||||||
if (!tables[tableName] || !tables[tableName].schema) {
|
|
||||||
tables[tableName] = {
|
|
||||||
_id: buildExternalTableId(datasourceId, tableName),
|
|
||||||
primary: tableKeys[tableName] || [],
|
|
||||||
name: tableName,
|
|
||||||
schema: {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const identity = !!(
|
|
||||||
column.identity_generation ||
|
|
||||||
column.identity_start ||
|
|
||||||
column.identity_increment
|
|
||||||
)
|
|
||||||
const hasDefault =
|
|
||||||
typeof column.column_default === "string" &&
|
|
||||||
column.column_default.startsWith("nextval")
|
|
||||||
const isGenerated =
|
|
||||||
column.is_generated && column.is_generated !== "NEVER"
|
|
||||||
const isAuto: boolean = hasDefault || identity || isGenerated
|
|
||||||
tables[tableName].schema[columnName] = {
|
|
||||||
autocolumn: isAuto,
|
|
||||||
name: columnName,
|
|
||||||
...convertSqlType(column.data_type),
|
|
||||||
externalType: column.data_type,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const final = finaliseExternalTables(tables, entities)
|
|
||||||
this.tables = final.tables
|
|
||||||
this.schemaErrors = final.errors
|
|
||||||
} catch (err) {
|
|
||||||
// @ts-ignore
|
|
||||||
throw new Error(err)
|
|
||||||
} finally {
|
|
||||||
await this.closeConnection()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
try {
|
||||||
async create(query: SqlQuery | string) {
|
return await client.query(query.sql, query.bindings || [])
|
||||||
const response = await this.internalQuery(getSqlQuery(query))
|
} catch (err) {
|
||||||
return response.rows.length ? response.rows : [{ created: true }]
|
await this.closeConnection()
|
||||||
}
|
// @ts-ignore
|
||||||
|
throw new Error(err)
|
||||||
async read(query: SqlQuery | string) {
|
} finally {
|
||||||
const response = await this.internalQuery(getSqlQuery(query))
|
if (close) {
|
||||||
return response.rows
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: SqlQuery | string) {
|
|
||||||
const response = await this.internalQuery(getSqlQuery(query))
|
|
||||||
return response.rows.length ? response.rows : [{ updated: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: SqlQuery | string) {
|
|
||||||
const response = await this.internalQuery(getSqlQuery(query))
|
|
||||||
return response.rows.length ? response.rows : [{ deleted: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async query(json: QueryJson) {
|
|
||||||
const operation = this._operation(json).toLowerCase()
|
|
||||||
const input = this._query(json)
|
|
||||||
if (Array.isArray(input)) {
|
|
||||||
const responses = []
|
|
||||||
for (let query of input) {
|
|
||||||
responses.push(await this.internalQuery(query, false))
|
|
||||||
}
|
|
||||||
await this.closeConnection()
|
await this.closeConnection()
|
||||||
return responses
|
|
||||||
} else {
|
|
||||||
const response = await this.internalQuery(input)
|
|
||||||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
/**
|
||||||
schema: SCHEMA,
|
* Fetches the tables from the postgres table and assigns them to the datasource.
|
||||||
integration: PostgresIntegration,
|
* @param {*} datasourceId - datasourceId to fetch
|
||||||
|
* @param entities - the tables that are to be built
|
||||||
|
*/
|
||||||
|
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||||
|
let tableKeys: { [key: string]: string[] } = {}
|
||||||
|
await this.openConnection()
|
||||||
|
try {
|
||||||
|
const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
|
||||||
|
for (let table of primaryKeysResponse.rows) {
|
||||||
|
const tableName = table.table_name
|
||||||
|
if (!tableKeys[tableName]) {
|
||||||
|
tableKeys[tableName] = []
|
||||||
|
}
|
||||||
|
const key = table.column_name || table.primary_key
|
||||||
|
// only add the unique keys
|
||||||
|
if (key && tableKeys[tableName].indexOf(key) === -1) {
|
||||||
|
tableKeys[tableName].push(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
tableKeys = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
||||||
|
|
||||||
|
const tables: { [key: string]: Table } = {}
|
||||||
|
|
||||||
|
for (let column of columnsResponse.rows) {
|
||||||
|
const tableName: string = column.table_name
|
||||||
|
const columnName: string = column.column_name
|
||||||
|
|
||||||
|
// table key doesn't exist yet
|
||||||
|
if (!tables[tableName] || !tables[tableName].schema) {
|
||||||
|
tables[tableName] = {
|
||||||
|
_id: buildExternalTableId(datasourceId, tableName),
|
||||||
|
primary: tableKeys[tableName] || [],
|
||||||
|
name: tableName,
|
||||||
|
schema: {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const identity = !!(
|
||||||
|
column.identity_generation ||
|
||||||
|
column.identity_start ||
|
||||||
|
column.identity_increment
|
||||||
|
)
|
||||||
|
const hasDefault =
|
||||||
|
typeof column.column_default === "string" &&
|
||||||
|
column.column_default.startsWith("nextval")
|
||||||
|
const isGenerated =
|
||||||
|
column.is_generated && column.is_generated !== "NEVER"
|
||||||
|
const isAuto: boolean = hasDefault || identity || isGenerated
|
||||||
|
tables[tableName].schema[columnName] = {
|
||||||
|
autocolumn: isAuto,
|
||||||
|
name: columnName,
|
||||||
|
...convertSqlType(column.data_type),
|
||||||
|
externalType: column.data_type,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const final = finaliseExternalTables(tables, entities)
|
||||||
|
this.tables = final.tables
|
||||||
|
this.schemaErrors = final.errors
|
||||||
|
} catch (err) {
|
||||||
|
// @ts-ignore
|
||||||
|
throw new Error(err)
|
||||||
|
} finally {
|
||||||
|
await this.closeConnection()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: SqlQuery | string) {
|
||||||
|
const response = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return response.rows.length ? response.rows : [{ created: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: SqlQuery | string) {
|
||||||
|
const response = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return response.rows
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: SqlQuery | string) {
|
||||||
|
const response = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return response.rows.length ? response.rows : [{ updated: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: SqlQuery | string) {
|
||||||
|
const response = await this.internalQuery(getSqlQuery(query))
|
||||||
|
return response.rows.length ? response.rows : [{ deleted: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async query(json: QueryJson) {
|
||||||
|
const operation = this._operation(json).toLowerCase()
|
||||||
|
const input = this._query(json)
|
||||||
|
if (Array.isArray(input)) {
|
||||||
|
const responses = []
|
||||||
|
for (let query of input) {
|
||||||
|
responses.push(await this.internalQuery(query, false))
|
||||||
|
}
|
||||||
|
await this.closeConnection()
|
||||||
|
return responses
|
||||||
|
} else {
|
||||||
|
const response = await this.internalQuery(input)
|
||||||
|
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: PostgresIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -1,149 +1,147 @@
|
||||||
import { DatasourceFieldType, Integration, QueryType } from "@budibase/types"
|
import { DatasourceFieldType, Integration, QueryType } from "@budibase/types"
|
||||||
import Redis from "ioredis"
|
import Redis from "ioredis"
|
||||||
|
|
||||||
module RedisModule {
|
interface RedisConfig {
|
||||||
interface RedisConfig {
|
host: string
|
||||||
host: string
|
port: number
|
||||||
port: number
|
username: string
|
||||||
username: string
|
password?: string
|
||||||
password?: string
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://redis.io/docs/",
|
docs: "https://redis.io/docs/",
|
||||||
description: "",
|
description: "",
|
||||||
friendlyName: "Redis",
|
friendlyName: "Redis",
|
||||||
type: "Non-relational",
|
type: "Non-relational",
|
||||||
datasource: {
|
datasource: {
|
||||||
host: {
|
host: {
|
||||||
type: "string",
|
type: "string",
|
||||||
required: true,
|
required: true,
|
||||||
default: "localhost",
|
default: "localhost",
|
||||||
},
|
},
|
||||||
port: {
|
port: {
|
||||||
type: "number",
|
type: "number",
|
||||||
required: true,
|
required: true,
|
||||||
default: 6379,
|
default: 6379,
|
||||||
},
|
},
|
||||||
username: {
|
username: {
|
||||||
type: "string",
|
type: "string",
|
||||||
required: false,
|
required: false,
|
||||||
},
|
},
|
||||||
password: {
|
password: {
|
||||||
type: "password",
|
type: "password",
|
||||||
required: false,
|
required: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: {
|
||||||
|
key: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
value: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
ttl: {
|
||||||
|
type: DatasourceFieldType.NUMBER,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
query: {
|
read: {
|
||||||
create: {
|
readable: true,
|
||||||
type: QueryType.FIELDS,
|
type: QueryType.FIELDS,
|
||||||
fields: {
|
fields: {
|
||||||
key: {
|
key: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
required: true,
|
required: true,
|
||||||
},
|
|
||||||
value: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
ttl: {
|
|
||||||
type: DatasourceFieldType.NUMBER,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
read: {
|
|
||||||
readable: true,
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: {
|
|
||||||
key: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: {
|
|
||||||
key: {
|
|
||||||
type: DatasourceFieldType.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
command: {
|
|
||||||
readable: true,
|
|
||||||
displayName: "Redis Command",
|
|
||||||
type: QueryType.JSON,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: {
|
||||||
|
key: {
|
||||||
|
type: DatasourceFieldType.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
command: {
|
||||||
|
readable: true,
|
||||||
|
displayName: "Redis Command",
|
||||||
|
type: QueryType.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class RedisIntegration {
|
||||||
|
private readonly config: RedisConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: RedisConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.client = new Redis({
|
||||||
|
host: this.config.host,
|
||||||
|
port: this.config.port,
|
||||||
|
username: this.config.username,
|
||||||
|
password: this.config.password,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
class RedisIntegration {
|
async disconnect() {
|
||||||
private readonly config: RedisConfig
|
this.client.disconnect()
|
||||||
private client: any
|
}
|
||||||
|
|
||||||
constructor(config: RedisConfig) {
|
async redisContext(query: Function) {
|
||||||
this.config = config
|
try {
|
||||||
this.client = new Redis({
|
return await query()
|
||||||
host: this.config.host,
|
} catch (err) {
|
||||||
port: this.config.port,
|
throw new Error(`Redis error: ${err}`)
|
||||||
username: this.config.username,
|
} finally {
|
||||||
password: this.config.password,
|
this.disconnect()
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async disconnect() {
|
async create(query: { key: string; value: string; ttl: number }) {
|
||||||
this.client.disconnect()
|
return this.redisContext(async () => {
|
||||||
}
|
const response = await this.client.set(query.key, query.value)
|
||||||
|
if (query.ttl) {
|
||||||
async redisContext(query: Function) {
|
await this.client.expire(query.key, query.ttl)
|
||||||
try {
|
|
||||||
return await query()
|
|
||||||
} catch (err) {
|
|
||||||
throw new Error(`Redis error: ${err}`)
|
|
||||||
} finally {
|
|
||||||
this.disconnect()
|
|
||||||
}
|
}
|
||||||
}
|
return response
|
||||||
|
})
|
||||||
async create(query: { key: string; value: string; ttl: number }) {
|
|
||||||
return this.redisContext(async () => {
|
|
||||||
const response = await this.client.set(query.key, query.value)
|
|
||||||
if (query.ttl) {
|
|
||||||
await this.client.expire(query.key, query.ttl)
|
|
||||||
}
|
|
||||||
return response
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: { key: string }) {
|
|
||||||
return this.redisContext(async () => {
|
|
||||||
const response = await this.client.get(query.key)
|
|
||||||
return response
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: { key: string }) {
|
|
||||||
return this.redisContext(async () => {
|
|
||||||
const response = await this.client.del(query.key)
|
|
||||||
return response
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async command(query: { json: string }) {
|
|
||||||
return this.redisContext(async () => {
|
|
||||||
const commands = query.json.trim().split(" ")
|
|
||||||
const pipeline = this.client.pipeline([commands])
|
|
||||||
const result = await pipeline.exec()
|
|
||||||
return {
|
|
||||||
response: result[0][1],
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async read(query: { key: string }) {
|
||||||
schema: SCHEMA,
|
return this.redisContext(async () => {
|
||||||
integration: RedisIntegration,
|
const response = await this.client.get(query.key)
|
||||||
|
return response
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: { key: string }) {
|
||||||
|
return this.redisContext(async () => {
|
||||||
|
const response = await this.client.del(query.key)
|
||||||
|
return response
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async command(query: { json: string }) {
|
||||||
|
return this.redisContext(async () => {
|
||||||
|
const commands = query.json.trim().split(" ")
|
||||||
|
const pipeline = this.client.pipeline([commands])
|
||||||
|
const result = await pipeline.exec()
|
||||||
|
return {
|
||||||
|
response: result[0][1],
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: RedisIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -14,6 +14,11 @@ import {
|
||||||
BearerAuthConfig,
|
BearerAuthConfig,
|
||||||
} from "../definitions/datasource"
|
} from "../definitions/datasource"
|
||||||
import { get } from "lodash"
|
import { get } from "lodash"
|
||||||
|
const fetch = require("node-fetch")
|
||||||
|
const { formatBytes } = require("../utilities")
|
||||||
|
const { performance } = require("perf_hooks")
|
||||||
|
const FormData = require("form-data")
|
||||||
|
const { URLSearchParams } = require("url")
|
||||||
|
|
||||||
const BodyTypes = {
|
const BodyTypes = {
|
||||||
NONE: "none",
|
NONE: "none",
|
||||||
|
@ -50,363 +55,353 @@ const coreFields = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
module RestModule {
|
const { parseStringPromise: xmlParser, Builder: XmlBuilder } = require("xml2js")
|
||||||
const fetch = require("node-fetch")
|
|
||||||
const { formatBytes } = require("../utilities")
|
|
||||||
const { performance } = require("perf_hooks")
|
|
||||||
const FormData = require("form-data")
|
|
||||||
const { URLSearchParams } = require("url")
|
|
||||||
const {
|
|
||||||
parseStringPromise: xmlParser,
|
|
||||||
Builder: XmlBuilder,
|
|
||||||
} = require("xml2js")
|
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://github.com/node-fetch/node-fetch",
|
docs: "https://github.com/node-fetch/node-fetch",
|
||||||
description:
|
description:
|
||||||
"With the REST API datasource, you can connect, query and pull data from multiple REST APIs. You can then use the retrieved data to build apps.",
|
"With the REST API datasource, you can connect, query and pull data from multiple REST APIs. You can then use the retrieved data to build apps.",
|
||||||
friendlyName: "REST API",
|
friendlyName: "REST API",
|
||||||
type: "API",
|
type: "API",
|
||||||
datasource: {
|
datasource: {
|
||||||
url: {
|
url: {
|
||||||
type: DatasourceFieldType.STRING,
|
type: DatasourceFieldType.STRING,
|
||||||
default: "",
|
default: "",
|
||||||
required: false,
|
required: false,
|
||||||
deprecated: true,
|
deprecated: true,
|
||||||
},
|
|
||||||
defaultHeaders: {
|
|
||||||
type: DatasourceFieldType.OBJECT,
|
|
||||||
required: false,
|
|
||||||
default: {},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
defaultHeaders: {
|
||||||
create: {
|
type: DatasourceFieldType.OBJECT,
|
||||||
readable: true,
|
required: false,
|
||||||
displayName: "POST",
|
default: {},
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: coreFields,
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
displayName: "GET",
|
|
||||||
readable: true,
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: coreFields,
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
displayName: "PUT",
|
|
||||||
readable: true,
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: coreFields,
|
|
||||||
},
|
|
||||||
patch: {
|
|
||||||
displayName: "PATCH",
|
|
||||||
readable: true,
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: coreFields,
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
displayName: "DELETE",
|
|
||||||
type: QueryType.FIELDS,
|
|
||||||
fields: coreFields,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
readable: true,
|
||||||
|
displayName: "POST",
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: coreFields,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
displayName: "GET",
|
||||||
|
readable: true,
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: coreFields,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
displayName: "PUT",
|
||||||
|
readable: true,
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: coreFields,
|
||||||
|
},
|
||||||
|
patch: {
|
||||||
|
displayName: "PATCH",
|
||||||
|
readable: true,
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: coreFields,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
displayName: "DELETE",
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: coreFields,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class RestIntegration implements IntegrationBase {
|
||||||
|
private config: RestConfig
|
||||||
|
private headers: {
|
||||||
|
[key: string]: string
|
||||||
|
} = {}
|
||||||
|
private startTimeMs: number = performance.now()
|
||||||
|
|
||||||
|
constructor(config: RestConfig) {
|
||||||
|
this.config = config
|
||||||
}
|
}
|
||||||
|
|
||||||
class RestIntegration implements IntegrationBase {
|
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
||||||
private config: RestConfig
|
let data, raw, headers
|
||||||
private headers: {
|
const contentType = response.headers.get("content-type") || ""
|
||||||
[key: string]: string
|
try {
|
||||||
} = {}
|
if (contentType.includes("application/json")) {
|
||||||
private startTimeMs: number = performance.now()
|
data = await response.json()
|
||||||
|
raw = JSON.stringify(data)
|
||||||
constructor(config: RestConfig) {
|
} else if (
|
||||||
this.config = config
|
contentType.includes("text/xml") ||
|
||||||
|
contentType.includes("application/xml")
|
||||||
|
) {
|
||||||
|
const rawXml = await response.text()
|
||||||
|
data =
|
||||||
|
(await xmlParser(rawXml, {
|
||||||
|
explicitArray: false,
|
||||||
|
trim: true,
|
||||||
|
explicitRoot: false,
|
||||||
|
})) || {}
|
||||||
|
// there is only one structure, its an array, return the array so it appears as rows
|
||||||
|
const keys = Object.keys(data)
|
||||||
|
if (keys.length === 1 && Array.isArray(data[keys[0]])) {
|
||||||
|
data = data[keys[0]]
|
||||||
|
}
|
||||||
|
raw = rawXml
|
||||||
|
} else {
|
||||||
|
data = await response.text()
|
||||||
|
raw = data
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
throw "Failed to parse response body."
|
||||||
|
}
|
||||||
|
const size = formatBytes(
|
||||||
|
response.headers.get("content-length") || Buffer.byteLength(raw, "utf8")
|
||||||
|
)
|
||||||
|
const time = `${Math.round(performance.now() - this.startTimeMs)}ms`
|
||||||
|
headers = response.headers.raw()
|
||||||
|
for (let [key, value] of Object.entries(headers)) {
|
||||||
|
headers[key] = Array.isArray(value) ? value[0] : value
|
||||||
}
|
}
|
||||||
|
|
||||||
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
// Check if a pagination cursor exists in the response
|
||||||
let data, raw, headers
|
let nextCursor = null
|
||||||
const contentType = response.headers.get("content-type") || ""
|
if (pagination?.responseParam) {
|
||||||
try {
|
nextCursor = get(data, pagination.responseParam)
|
||||||
if (contentType.includes("application/json")) {
|
}
|
||||||
data = await response.json()
|
|
||||||
raw = JSON.stringify(data)
|
return {
|
||||||
} else if (
|
data,
|
||||||
contentType.includes("text/xml") ||
|
info: {
|
||||||
contentType.includes("application/xml")
|
code: response.status,
|
||||||
) {
|
size,
|
||||||
const rawXml = await response.text()
|
time,
|
||||||
data =
|
},
|
||||||
(await xmlParser(rawXml, {
|
extra: {
|
||||||
explicitArray: false,
|
raw,
|
||||||
trim: true,
|
headers,
|
||||||
explicitRoot: false,
|
},
|
||||||
})) || {}
|
pagination: {
|
||||||
// there is only one structure, its an array, return the array so it appears as rows
|
cursor: nextCursor,
|
||||||
const keys = Object.keys(data)
|
},
|
||||||
if (keys.length === 1 && Array.isArray(data[keys[0]])) {
|
}
|
||||||
data = data[keys[0]]
|
}
|
||||||
}
|
|
||||||
raw = rawXml
|
getUrl(
|
||||||
} else {
|
path: string,
|
||||||
data = await response.text()
|
queryString: string,
|
||||||
raw = data
|
pagination: PaginationConfig | null,
|
||||||
}
|
paginationValues: PaginationValues | null
|
||||||
} catch (err) {
|
): string {
|
||||||
throw "Failed to parse response body."
|
// Add pagination params to query string if required
|
||||||
}
|
if (pagination?.location === "query" && paginationValues) {
|
||||||
const size = formatBytes(
|
const { pageParam, sizeParam } = pagination
|
||||||
response.headers.get("content-length") || Buffer.byteLength(raw, "utf8")
|
const params = new URLSearchParams()
|
||||||
)
|
|
||||||
const time = `${Math.round(performance.now() - this.startTimeMs)}ms`
|
// Append page number or cursor param if configured
|
||||||
headers = response.headers.raw()
|
if (pageParam && paginationValues.page != null) {
|
||||||
for (let [key, value] of Object.entries(headers)) {
|
params.append(pageParam, paginationValues.page)
|
||||||
headers[key] = Array.isArray(value) ? value[0] : value
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if a pagination cursor exists in the response
|
// Append page size param if configured
|
||||||
let nextCursor = null
|
if (sizeParam && paginationValues.limit != null) {
|
||||||
if (pagination?.responseParam) {
|
params.append(sizeParam, paginationValues.limit)
|
||||||
nextCursor = get(data, pagination.responseParam)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
// Prepend query string with pagination params
|
||||||
data,
|
let paginationString = params.toString()
|
||||||
info: {
|
if (paginationString) {
|
||||||
code: response.status,
|
queryString = `${paginationString}&${queryString}`
|
||||||
size,
|
|
||||||
time,
|
|
||||||
},
|
|
||||||
extra: {
|
|
||||||
raw,
|
|
||||||
headers,
|
|
||||||
},
|
|
||||||
pagination: {
|
|
||||||
cursor: nextCursor,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
getUrl(
|
const main = `${path}?${queryString}`
|
||||||
path: string,
|
let complete = main
|
||||||
queryString: string,
|
if (this.config.url && !main.startsWith("http")) {
|
||||||
pagination: PaginationConfig | null,
|
complete = !this.config.url ? main : `${this.config.url}/${main}`
|
||||||
paginationValues: PaginationValues | null
|
|
||||||
): string {
|
|
||||||
// Add pagination params to query string if required
|
|
||||||
if (pagination?.location === "query" && paginationValues) {
|
|
||||||
const { pageParam, sizeParam } = pagination
|
|
||||||
const params = new URLSearchParams()
|
|
||||||
|
|
||||||
// Append page number or cursor param if configured
|
|
||||||
if (pageParam && paginationValues.page != null) {
|
|
||||||
params.append(pageParam, paginationValues.page)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Append page size param if configured
|
|
||||||
if (sizeParam && paginationValues.limit != null) {
|
|
||||||
params.append(sizeParam, paginationValues.limit)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepend query string with pagination params
|
|
||||||
let paginationString = params.toString()
|
|
||||||
if (paginationString) {
|
|
||||||
queryString = `${paginationString}&${queryString}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const main = `${path}?${queryString}`
|
|
||||||
let complete = main
|
|
||||||
if (this.config.url && !main.startsWith("http")) {
|
|
||||||
complete = !this.config.url ? main : `${this.config.url}/${main}`
|
|
||||||
}
|
|
||||||
if (!complete.startsWith("http")) {
|
|
||||||
complete = `http://${complete}`
|
|
||||||
}
|
|
||||||
return complete
|
|
||||||
}
|
}
|
||||||
|
if (!complete.startsWith("http")) {
|
||||||
|
complete = `http://${complete}`
|
||||||
|
}
|
||||||
|
return complete
|
||||||
|
}
|
||||||
|
|
||||||
addBody(
|
addBody(
|
||||||
bodyType: string,
|
bodyType: string,
|
||||||
body: string | any,
|
body: string | any,
|
||||||
input: any,
|
input: any,
|
||||||
pagination: PaginationConfig | null,
|
pagination: PaginationConfig | null,
|
||||||
paginationValues: PaginationValues | null
|
paginationValues: PaginationValues | null
|
||||||
) {
|
) {
|
||||||
if (!input.headers) {
|
if (!input.headers) {
|
||||||
input.headers = {}
|
input.headers = {}
|
||||||
}
|
}
|
||||||
if (bodyType === BodyTypes.NONE) {
|
if (bodyType === BodyTypes.NONE) {
|
||||||
return input
|
|
||||||
}
|
|
||||||
let error,
|
|
||||||
object: any = {},
|
|
||||||
string = ""
|
|
||||||
try {
|
|
||||||
if (body) {
|
|
||||||
string = typeof body !== "string" ? JSON.stringify(body) : body
|
|
||||||
object = typeof body === "object" ? body : JSON.parse(body)
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
error = err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Util to add pagination values to a certain body type
|
|
||||||
const addPaginationToBody = (insertFn: Function) => {
|
|
||||||
if (pagination?.location === "body") {
|
|
||||||
if (pagination?.pageParam && paginationValues?.page != null) {
|
|
||||||
insertFn(pagination.pageParam, paginationValues.page)
|
|
||||||
}
|
|
||||||
if (pagination?.sizeParam && paginationValues?.limit != null) {
|
|
||||||
insertFn(pagination.sizeParam, paginationValues.limit)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (bodyType) {
|
|
||||||
case BodyTypes.TEXT:
|
|
||||||
// content type defaults to plaintext
|
|
||||||
input.body = string
|
|
||||||
break
|
|
||||||
case BodyTypes.ENCODED:
|
|
||||||
const params = new URLSearchParams()
|
|
||||||
for (let [key, value] of Object.entries(object)) {
|
|
||||||
params.append(key, value)
|
|
||||||
}
|
|
||||||
addPaginationToBody((key: string, value: any) => {
|
|
||||||
params.append(key, value)
|
|
||||||
})
|
|
||||||
input.body = params
|
|
||||||
break
|
|
||||||
case BodyTypes.FORM_DATA:
|
|
||||||
const form = new FormData()
|
|
||||||
for (let [key, value] of Object.entries(object)) {
|
|
||||||
form.append(key, value)
|
|
||||||
}
|
|
||||||
addPaginationToBody((key: string, value: any) => {
|
|
||||||
form.append(key, value)
|
|
||||||
})
|
|
||||||
input.body = form
|
|
||||||
break
|
|
||||||
case BodyTypes.XML:
|
|
||||||
if (object != null && Object.keys(object).length) {
|
|
||||||
string = new XmlBuilder().buildObject(object)
|
|
||||||
}
|
|
||||||
input.body = string
|
|
||||||
input.headers["Content-Type"] = "application/xml"
|
|
||||||
break
|
|
||||||
case BodyTypes.JSON:
|
|
||||||
// if JSON error, throw it
|
|
||||||
if (error) {
|
|
||||||
throw "Invalid JSON for request body"
|
|
||||||
}
|
|
||||||
addPaginationToBody((key: string, value: any) => {
|
|
||||||
object[key] = value
|
|
||||||
})
|
|
||||||
input.body = JSON.stringify(object)
|
|
||||||
input.headers["Content-Type"] = "application/json"
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return input
|
return input
|
||||||
}
|
}
|
||||||
|
let error,
|
||||||
|
object: any = {},
|
||||||
|
string = ""
|
||||||
|
try {
|
||||||
|
if (body) {
|
||||||
|
string = typeof body !== "string" ? JSON.stringify(body) : body
|
||||||
|
object = typeof body === "object" ? body : JSON.parse(body)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
|
||||||
getAuthHeaders(authConfigId: string): { [key: string]: any } {
|
// Util to add pagination values to a certain body type
|
||||||
let headers: any = {}
|
const addPaginationToBody = (insertFn: Function) => {
|
||||||
|
if (pagination?.location === "body") {
|
||||||
if (this.config.authConfigs && authConfigId) {
|
if (pagination?.pageParam && paginationValues?.page != null) {
|
||||||
const authConfig = this.config.authConfigs.filter(
|
insertFn(pagination.pageParam, paginationValues.page)
|
||||||
c => c._id === authConfigId
|
}
|
||||||
)[0]
|
if (pagination?.sizeParam && paginationValues?.limit != null) {
|
||||||
// check the config still exists before proceeding
|
insertFn(pagination.sizeParam, paginationValues.limit)
|
||||||
// if not - do nothing
|
|
||||||
if (authConfig) {
|
|
||||||
let config
|
|
||||||
switch (authConfig.type) {
|
|
||||||
case AuthType.BASIC:
|
|
||||||
config = authConfig.config as BasicAuthConfig
|
|
||||||
headers.Authorization = `Basic ${Buffer.from(
|
|
||||||
`${config.username}:${config.password}`
|
|
||||||
).toString("base64")}`
|
|
||||||
break
|
|
||||||
case AuthType.BEARER:
|
|
||||||
config = authConfig.config as BearerAuthConfig
|
|
||||||
headers.Authorization = `Bearer ${config.token}`
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return headers
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async _req(query: RestQuery) {
|
switch (bodyType) {
|
||||||
const {
|
case BodyTypes.TEXT:
|
||||||
path = "",
|
// content type defaults to plaintext
|
||||||
queryString = "",
|
input.body = string
|
||||||
headers = {},
|
break
|
||||||
method = "GET",
|
case BodyTypes.ENCODED:
|
||||||
disabledHeaders,
|
const params = new URLSearchParams()
|
||||||
bodyType,
|
for (let [key, value] of Object.entries(object)) {
|
||||||
requestBody,
|
params.append(key, value)
|
||||||
authConfigId,
|
|
||||||
pagination,
|
|
||||||
paginationValues,
|
|
||||||
} = query
|
|
||||||
const authHeaders = this.getAuthHeaders(authConfigId)
|
|
||||||
|
|
||||||
this.headers = {
|
|
||||||
...this.config.defaultHeaders,
|
|
||||||
...headers,
|
|
||||||
...authHeaders,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (disabledHeaders) {
|
|
||||||
for (let headerKey of Object.keys(this.headers)) {
|
|
||||||
if (disabledHeaders[headerKey]) {
|
|
||||||
delete this.headers[headerKey]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
addPaginationToBody((key: string, value: any) => {
|
||||||
|
params.append(key, value)
|
||||||
let input: any = { method, headers: this.headers }
|
})
|
||||||
input = this.addBody(
|
input.body = params
|
||||||
bodyType,
|
break
|
||||||
requestBody,
|
case BodyTypes.FORM_DATA:
|
||||||
input,
|
const form = new FormData()
|
||||||
pagination,
|
for (let [key, value] of Object.entries(object)) {
|
||||||
paginationValues
|
form.append(key, value)
|
||||||
)
|
}
|
||||||
|
addPaginationToBody((key: string, value: any) => {
|
||||||
this.startTimeMs = performance.now()
|
form.append(key, value)
|
||||||
const url = this.getUrl(path, queryString, pagination, paginationValues)
|
})
|
||||||
const response = await fetch(url, input)
|
input.body = form
|
||||||
return await this.parseResponse(response, pagination)
|
break
|
||||||
}
|
case BodyTypes.XML:
|
||||||
|
if (object != null && Object.keys(object).length) {
|
||||||
async create(opts: RestQuery) {
|
string = new XmlBuilder().buildObject(object)
|
||||||
return this._req({ ...opts, method: "POST" })
|
}
|
||||||
}
|
input.body = string
|
||||||
|
input.headers["Content-Type"] = "application/xml"
|
||||||
async read(opts: RestQuery) {
|
break
|
||||||
return this._req({ ...opts, method: "GET" })
|
case BodyTypes.JSON:
|
||||||
}
|
// if JSON error, throw it
|
||||||
|
if (error) {
|
||||||
async update(opts: RestQuery) {
|
throw "Invalid JSON for request body"
|
||||||
return this._req({ ...opts, method: "PUT" })
|
}
|
||||||
}
|
addPaginationToBody((key: string, value: any) => {
|
||||||
|
object[key] = value
|
||||||
async patch(opts: RestQuery) {
|
})
|
||||||
return this._req({ ...opts, method: "PATCH" })
|
input.body = JSON.stringify(object)
|
||||||
}
|
input.headers["Content-Type"] = "application/json"
|
||||||
|
break
|
||||||
async delete(opts: RestQuery) {
|
|
||||||
return this._req({ ...opts, method: "DELETE" })
|
|
||||||
}
|
}
|
||||||
|
return input
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
getAuthHeaders(authConfigId: string): { [key: string]: any } {
|
||||||
schema: SCHEMA,
|
let headers: any = {}
|
||||||
integration: RestIntegration,
|
|
||||||
AuthType,
|
if (this.config.authConfigs && authConfigId) {
|
||||||
|
const authConfig = this.config.authConfigs.filter(
|
||||||
|
c => c._id === authConfigId
|
||||||
|
)[0]
|
||||||
|
// check the config still exists before proceeding
|
||||||
|
// if not - do nothing
|
||||||
|
if (authConfig) {
|
||||||
|
let config
|
||||||
|
switch (authConfig.type) {
|
||||||
|
case AuthType.BASIC:
|
||||||
|
config = authConfig.config as BasicAuthConfig
|
||||||
|
headers.Authorization = `Basic ${Buffer.from(
|
||||||
|
`${config.username}:${config.password}`
|
||||||
|
).toString("base64")}`
|
||||||
|
break
|
||||||
|
case AuthType.BEARER:
|
||||||
|
config = authConfig.config as BearerAuthConfig
|
||||||
|
headers.Authorization = `Bearer ${config.token}`
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return headers
|
||||||
|
}
|
||||||
|
|
||||||
|
async _req(query: RestQuery) {
|
||||||
|
const {
|
||||||
|
path = "",
|
||||||
|
queryString = "",
|
||||||
|
headers = {},
|
||||||
|
method = "GET",
|
||||||
|
disabledHeaders,
|
||||||
|
bodyType,
|
||||||
|
requestBody,
|
||||||
|
authConfigId,
|
||||||
|
pagination,
|
||||||
|
paginationValues,
|
||||||
|
} = query
|
||||||
|
const authHeaders = this.getAuthHeaders(authConfigId)
|
||||||
|
|
||||||
|
this.headers = {
|
||||||
|
...this.config.defaultHeaders,
|
||||||
|
...headers,
|
||||||
|
...authHeaders,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (disabledHeaders) {
|
||||||
|
for (let headerKey of Object.keys(this.headers)) {
|
||||||
|
if (disabledHeaders[headerKey]) {
|
||||||
|
delete this.headers[headerKey]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let input: any = { method, headers: this.headers }
|
||||||
|
input = this.addBody(
|
||||||
|
bodyType,
|
||||||
|
requestBody,
|
||||||
|
input,
|
||||||
|
pagination,
|
||||||
|
paginationValues
|
||||||
|
)
|
||||||
|
|
||||||
|
this.startTimeMs = performance.now()
|
||||||
|
const url = this.getUrl(path, queryString, pagination, paginationValues)
|
||||||
|
const response = await fetch(url, input)
|
||||||
|
return await this.parseResponse(response, pagination)
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(opts: RestQuery) {
|
||||||
|
return this._req({ ...opts, method: "POST" })
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(opts: RestQuery) {
|
||||||
|
return this._req({ ...opts, method: "GET" })
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(opts: RestQuery) {
|
||||||
|
return this._req({ ...opts, method: "PUT" })
|
||||||
|
}
|
||||||
|
|
||||||
|
async patch(opts: RestQuery) {
|
||||||
|
return this._req({ ...opts, method: "PATCH" })
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(opts: RestQuery) {
|
||||||
|
return this._req({ ...opts, method: "DELETE" })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: RestIntegration,
|
||||||
|
AuthType,
|
||||||
|
}
|
||||||
|
|
|
@ -1,86 +1,83 @@
|
||||||
import { Integration, QueryType, IntegrationBase } from "@budibase/types"
|
import { Integration, QueryType, IntegrationBase } from "@budibase/types"
|
||||||
|
const AWS = require("aws-sdk")
|
||||||
|
|
||||||
module S3Module {
|
interface S3Config {
|
||||||
const AWS = require("aws-sdk")
|
region: string
|
||||||
|
accessKeyId: string
|
||||||
|
secretAccessKey: string
|
||||||
|
s3ForcePathStyle: boolean
|
||||||
|
endpoint?: string
|
||||||
|
}
|
||||||
|
|
||||||
interface S3Config {
|
const SCHEMA: Integration = {
|
||||||
region: string
|
docs: "https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
||||||
accessKeyId: string
|
description:
|
||||||
secretAccessKey: string
|
"Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.",
|
||||||
s3ForcePathStyle: boolean
|
friendlyName: "Amazon S3",
|
||||||
endpoint?: string
|
type: "Object store",
|
||||||
}
|
datasource: {
|
||||||
|
region: {
|
||||||
const SCHEMA: Integration = {
|
type: "string",
|
||||||
docs: "https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
required: false,
|
||||||
description:
|
default: "us-east-1",
|
||||||
"Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.",
|
|
||||||
friendlyName: "Amazon S3",
|
|
||||||
type: "Object store",
|
|
||||||
datasource: {
|
|
||||||
region: {
|
|
||||||
type: "string",
|
|
||||||
required: false,
|
|
||||||
default: "us-east-1",
|
|
||||||
},
|
|
||||||
accessKeyId: {
|
|
||||||
type: "password",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
secretAccessKey: {
|
|
||||||
type: "password",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
endpoint: {
|
|
||||||
type: "string",
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
signatureVersion: {
|
|
||||||
type: "string",
|
|
||||||
required: false,
|
|
||||||
default: "v4",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
accessKeyId: {
|
||||||
read: {
|
type: "password",
|
||||||
type: QueryType.FIELDS,
|
required: true,
|
||||||
fields: {
|
},
|
||||||
bucket: {
|
secretAccessKey: {
|
||||||
type: "string",
|
type: "password",
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
|
endpoint: {
|
||||||
|
type: "string",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
signatureVersion: {
|
||||||
|
type: "string",
|
||||||
|
required: false,
|
||||||
|
default: "v4",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
read: {
|
||||||
|
type: QueryType.FIELDS,
|
||||||
|
fields: {
|
||||||
|
bucket: {
|
||||||
|
type: "string",
|
||||||
|
required: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
|
}
|
||||||
|
|
||||||
class S3Integration implements IntegrationBase {
|
class S3Integration implements IntegrationBase {
|
||||||
private readonly config: S3Config
|
private readonly config: S3Config
|
||||||
private client: any
|
private client: any
|
||||||
|
|
||||||
constructor(config: S3Config) {
|
constructor(config: S3Config) {
|
||||||
this.config = config
|
this.config = config
|
||||||
if (this.config.endpoint) {
|
if (this.config.endpoint) {
|
||||||
this.config.s3ForcePathStyle = true
|
this.config.s3ForcePathStyle = true
|
||||||
} else {
|
} else {
|
||||||
delete this.config.endpoint
|
delete this.config.endpoint
|
||||||
}
|
|
||||||
|
|
||||||
this.client = new AWS.S3(this.config)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async read(query: { bucket: string }) {
|
this.client = new AWS.S3(this.config)
|
||||||
const response = await this.client
|
|
||||||
.listObjects({
|
|
||||||
Bucket: query.bucket,
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
return response.Contents
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async read(query: { bucket: string }) {
|
||||||
schema: SCHEMA,
|
const response = await this.client
|
||||||
integration: S3Integration,
|
.listObjects({
|
||||||
|
Bucket: query.bucket,
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
return response.Contents
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: S3Integration,
|
||||||
|
}
|
||||||
|
|
|
@ -1,99 +1,97 @@
|
||||||
import { Integration, QueryType, SqlQuery } from "@budibase/types"
|
import { Integration, QueryType, SqlQuery } from "@budibase/types"
|
||||||
import { Snowflake } from "snowflake-promise"
|
import { Snowflake } from "snowflake-promise"
|
||||||
|
|
||||||
module SnowflakeModule {
|
interface SnowflakeConfig {
|
||||||
interface SnowflakeConfig {
|
account: string
|
||||||
account: string
|
username: string
|
||||||
username: string
|
password: string
|
||||||
password: string
|
warehouse: string
|
||||||
warehouse: string
|
database: string
|
||||||
database: string
|
schema: string
|
||||||
schema: string
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://developers.snowflake.com/",
|
docs: "https://developers.snowflake.com/",
|
||||||
description:
|
description:
|
||||||
"Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.",
|
"Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.",
|
||||||
friendlyName: "Snowflake",
|
friendlyName: "Snowflake",
|
||||||
type: "Relational",
|
type: "Relational",
|
||||||
datasource: {
|
datasource: {
|
||||||
account: {
|
account: {
|
||||||
type: "string",
|
type: "string",
|
||||||
required: true,
|
required: true,
|
||||||
},
|
|
||||||
username: {
|
|
||||||
type: "string",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
password: {
|
|
||||||
type: "password",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
warehouse: {
|
|
||||||
type: "string",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
database: {
|
|
||||||
type: "string",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
type: "string",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
query: {
|
username: {
|
||||||
create: {
|
type: "string",
|
||||||
type: QueryType.SQL,
|
required: true,
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QueryType.SQL,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
password: {
|
||||||
|
type: "password",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
warehouse: {
|
||||||
|
type: "string",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
database: {
|
||||||
|
type: "string",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
type: "string",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryType.SQL,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class SnowflakeIntegration {
|
||||||
|
private client: Snowflake
|
||||||
|
|
||||||
|
constructor(config: SnowflakeConfig) {
|
||||||
|
this.client = new Snowflake(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
class SnowflakeIntegration {
|
async internalQuery(query: SqlQuery) {
|
||||||
private client: Snowflake
|
await this.client.connect()
|
||||||
|
try {
|
||||||
constructor(config: SnowflakeConfig) {
|
return await this.client.execute(query.sql)
|
||||||
this.client = new Snowflake(config)
|
} catch (err: any) {
|
||||||
}
|
throw err?.message.split(":")[1] || err?.message
|
||||||
|
|
||||||
async internalQuery(query: SqlQuery) {
|
|
||||||
await this.client.connect()
|
|
||||||
try {
|
|
||||||
return await this.client.execute(query.sql)
|
|
||||||
} catch (err: any) {
|
|
||||||
throw err?.message.split(":")[1] || err?.message
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query: SqlQuery) {
|
|
||||||
return this.internalQuery(query)
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query: SqlQuery) {
|
|
||||||
return this.internalQuery(query)
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query: SqlQuery) {
|
|
||||||
return this.internalQuery(query)
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query: SqlQuery) {
|
|
||||||
return this.internalQuery(query)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
async create(query: SqlQuery) {
|
||||||
schema: SCHEMA,
|
return this.internalQuery(query)
|
||||||
integration: SnowflakeIntegration,
|
}
|
||||||
|
|
||||||
|
async read(query: SqlQuery) {
|
||||||
|
return this.internalQuery(query)
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: SqlQuery) {
|
||||||
|
return this.internalQuery(query)
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: SqlQuery) {
|
||||||
|
return this.internalQuery(query)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: SnowflakeIntegration,
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
export * from "./account"
|
export * from "./account"
|
||||||
export * from "./app"
|
export * from "./app"
|
||||||
export * from "./global"
|
export * from "./global"
|
||||||
|
export * from "./plugin"
|
||||||
export * from "./platform"
|
export * from "./platform"
|
||||||
export * from "./document"
|
export * from "./document"
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
export enum PluginType {
|
||||||
|
DATASOURCE = "datasource",
|
||||||
|
COMPONENT = "component",
|
||||||
|
}
|
Loading…
Reference in New Issue