Merge branch 'develop' of github.com:Budibase/budibase into feature/sql-relationships
This commit is contained in:
commit
856be36d9e
|
@ -28,8 +28,8 @@ jobs:
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
- run: yarn
|
- run: yarn
|
||||||
- run: yarn lint
|
|
||||||
- run: yarn bootstrap
|
- run: yarn bootstrap
|
||||||
|
- run: yarn lint
|
||||||
- run: yarn build
|
- run: yarn build
|
||||||
- run: yarn test
|
- run: yarn test
|
||||||
env:
|
env:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "0.9.60",
|
"version": "0.9.65",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
|
@ -37,7 +37,8 @@
|
||||||
"lint": "yarn run lint:eslint && yarn run lint:prettier",
|
"lint": "yarn run lint:eslint && yarn run lint:prettier",
|
||||||
"lint:fix:eslint": "eslint --fix packages",
|
"lint:fix:eslint": "eslint --fix packages",
|
||||||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,svelte}\"",
|
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,svelte}\"",
|
||||||
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
|
"lint:fix:ts": "lerna run lint:fix",
|
||||||
|
"lint:fix": "yarn run lint:fix:ts && yarn run lint:fix:prettier && yarn run lint:fix:eslint",
|
||||||
"test:e2e": "lerna run cy:test",
|
"test:e2e": "lerna run cy:test",
|
||||||
"test:e2e:ci": "lerna run cy:ci",
|
"test:e2e:ci": "lerna run cy:ci",
|
||||||
"build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh && cd -",
|
"build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh && cd -",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/auth",
|
"name": "@budibase/auth",
|
||||||
"version": "0.9.60",
|
"version": "0.9.65",
|
||||||
"description": "Authentication middlewares for budibase builder and apps",
|
"description": "Authentication middlewares for budibase builder and apps",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
|
|
|
@ -18,6 +18,8 @@ function connectionError(timeout, err) {
|
||||||
if (CLOSED) {
|
if (CLOSED) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
CLIENT.end()
|
||||||
|
CLOSED = true
|
||||||
// always clear this on error
|
// always clear this on error
|
||||||
clearTimeout(timeout)
|
clearTimeout(timeout)
|
||||||
CONNECTED = false
|
CONNECTED = false
|
||||||
|
@ -41,7 +43,7 @@ function init() {
|
||||||
// start the timer - only allowed 5 seconds to connect
|
// start the timer - only allowed 5 seconds to connect
|
||||||
timeout = setTimeout(() => {
|
timeout = setTimeout(() => {
|
||||||
if (!CONNECTED) {
|
if (!CONNECTED) {
|
||||||
connectionError(timeout)
|
connectionError(timeout, "Did not successfully connect in timeout")
|
||||||
}
|
}
|
||||||
}, STARTUP_TIMEOUT_MS)
|
}, STARTUP_TIMEOUT_MS)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/bbui",
|
"name": "@budibase/bbui",
|
||||||
"description": "A UI solution used in the different Budibase projects.",
|
"description": "A UI solution used in the different Budibase projects.",
|
||||||
"version": "0.9.60",
|
"version": "0.9.65",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"svelte": "src/index.js",
|
"svelte": "src/index.js",
|
||||||
"module": "dist/bbui.es.js",
|
"module": "dist/bbui.es.js",
|
||||||
|
|
|
@ -4,6 +4,8 @@ const path = require("path")
|
||||||
const tmpdir = path.join(require("os").tmpdir(), ".budibase")
|
const tmpdir = path.join(require("os").tmpdir(), ".budibase")
|
||||||
|
|
||||||
// these run on ports we don't normally use so that they can run alongside the
|
// these run on ports we don't normally use so that they can run alongside the
|
||||||
|
const fs = require("fs")
|
||||||
|
|
||||||
// normal development system
|
// normal development system
|
||||||
const WORKER_PORT = "10002"
|
const WORKER_PORT = "10002"
|
||||||
const MAIN_PORT = cypressConfig.env.PORT
|
const MAIN_PORT = cypressConfig.env.PORT
|
||||||
|
@ -27,10 +29,14 @@ process.env.LOG_LEVEL = "error"
|
||||||
|
|
||||||
async function run() {
|
async function run() {
|
||||||
// require("dotenv").config({ path: resolve(dir, ".env") })
|
// require("dotenv").config({ path: resolve(dir, ".env") })
|
||||||
|
if (!fs.existsSync("../server/dist")) {
|
||||||
|
console.error("Unable to run cypress, need to build server first")
|
||||||
|
process.exit(-1)
|
||||||
|
}
|
||||||
|
|
||||||
// dont make this a variable or top level require
|
// dont make this a variable or top level require
|
||||||
// it will cause environment module to be loaded prematurely
|
// it will cause environment module to be loaded prematurely
|
||||||
const server = require("../../server/src/app")
|
const server = require("../../server/dist/app")
|
||||||
process.env.PORT = WORKER_PORT
|
process.env.PORT = WORKER_PORT
|
||||||
const worker = require("../../worker/src/index")
|
const worker = require("../../worker/src/index")
|
||||||
// reload main port for rest of system
|
// reload main port for rest of system
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/builder",
|
"name": "@budibase/builder",
|
||||||
"version": "0.9.60",
|
"version": "0.9.65",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -65,10 +65,10 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^0.9.60",
|
"@budibase/bbui": "^0.9.65",
|
||||||
"@budibase/client": "^0.9.60",
|
"@budibase/client": "^0.9.65",
|
||||||
"@budibase/colorpicker": "1.1.2",
|
"@budibase/colorpicker": "1.1.2",
|
||||||
"@budibase/string-templates": "^0.9.60",
|
"@budibase/string-templates": "^0.9.65",
|
||||||
"@sentry/browser": "5.19.1",
|
"@sentry/browser": "5.19.1",
|
||||||
"@spectrum-css/page": "^3.0.1",
|
"@spectrum-css/page": "^3.0.1",
|
||||||
"@spectrum-css/vars": "^3.0.1",
|
"@spectrum-css/vars": "^3.0.1",
|
||||||
|
|
|
@ -14,7 +14,6 @@
|
||||||
$: allowDeleteTrigger = !steps.length
|
$: allowDeleteTrigger = !steps.length
|
||||||
|
|
||||||
function deleteStep() {
|
function deleteStep() {
|
||||||
console.log("Running")
|
|
||||||
automationStore.actions.deleteAutomationBlock(block)
|
automationStore.actions.deleteAutomationBlock(block)
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -16,11 +16,13 @@
|
||||||
$: automation = $automationStore.selectedAutomation?.automation
|
$: automation = $automationStore.selectedAutomation?.automation
|
||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
// save the automation initially
|
if (!automation?.definition?.trigger?.inputs.schemaUrl) {
|
||||||
await automationStore.actions.save({
|
// save the automation initially
|
||||||
instanceId,
|
await automationStore.actions.save({
|
||||||
automation,
|
instanceId,
|
||||||
})
|
automation,
|
||||||
|
})
|
||||||
|
}
|
||||||
interval = setInterval(async () => {
|
interval = setInterval(async () => {
|
||||||
await automationStore.actions.fetch()
|
await automationStore.actions.fetch()
|
||||||
const outputs = automation?.definition?.trigger.schema.outputs?.properties
|
const outputs = automation?.definition?.trigger.schema.outputs?.properties
|
||||||
|
|
|
@ -9,6 +9,9 @@
|
||||||
$: appUrl = $hostingStore.appUrl
|
$: appUrl = $hostingStore.appUrl
|
||||||
|
|
||||||
function fullWebhookURL(uri) {
|
function fullWebhookURL(uri) {
|
||||||
|
if (!uri) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
if (production) {
|
if (production) {
|
||||||
return `${appUrl}/${uri}`
|
return `${appUrl}/${uri}`
|
||||||
} else {
|
} else {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/cli",
|
"name": "@budibase/cli",
|
||||||
"version": "0.9.60",
|
"version": "0.9.65",
|
||||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"bin": {
|
"bin": {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/client",
|
"name": "@budibase/client",
|
||||||
"version": "0.9.60",
|
"version": "0.9.65",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"module": "dist/budibase-client.js",
|
"module": "dist/budibase-client.js",
|
||||||
"main": "dist/budibase-client.js",
|
"main": "dist/budibase-client.js",
|
||||||
|
@ -18,9 +18,9 @@
|
||||||
"dev:builder": "rollup -cw"
|
"dev:builder": "rollup -cw"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^0.9.60",
|
"@budibase/bbui": "^0.9.65",
|
||||||
"@budibase/standard-components": "^0.9.60",
|
"@budibase/standard-components": "^0.9.65",
|
||||||
"@budibase/string-templates": "^0.9.60",
|
"@budibase/string-templates": "^0.9.65",
|
||||||
"regexparam": "^1.3.0",
|
"regexparam": "^1.3.0",
|
||||||
"shortid": "^2.2.15",
|
"shortid": "^2.2.15",
|
||||||
"svelte-spa-router": "^3.0.5"
|
"svelte-spa-router": "^3.0.5"
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
"emit": true,
|
"emit": true,
|
||||||
"key": true
|
"key": true
|
||||||
},
|
},
|
||||||
|
"parser": "@typescript-eslint/parser",
|
||||||
"env": {
|
"env": {
|
||||||
"node": true
|
"node": true
|
||||||
},
|
},
|
||||||
|
|
|
@ -9,6 +9,7 @@ ENV BUDIBASE_ENVIRONMENT=PRODUCTION
|
||||||
# copy files and install dependencies
|
# copy files and install dependencies
|
||||||
COPY . ./
|
COPY . ./
|
||||||
RUN yarn
|
RUN yarn
|
||||||
|
RUN yarn build
|
||||||
|
|
||||||
EXPOSE 4001
|
EXPOSE 4001
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
const elastic = {}
|
const elastic: any = {}
|
||||||
|
|
||||||
elastic.Client = function () {
|
elastic.Client = function () {
|
||||||
this.index = jest.fn().mockResolvedValue({ body: [] })
|
this.index = jest.fn().mockResolvedValue({ body: [] })
|
|
@ -1,18 +0,0 @@
|
||||||
class Email {
|
|
||||||
constructor() {
|
|
||||||
this.apiKey = null
|
|
||||||
}
|
|
||||||
|
|
||||||
setApiKey(apiKey) {
|
|
||||||
this.apiKey = apiKey
|
|
||||||
}
|
|
||||||
|
|
||||||
async send(msg) {
|
|
||||||
if (msg.to === "invalid@test.com") {
|
|
||||||
throw "Invalid"
|
|
||||||
}
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = new Email()
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
module SendgridMock {
|
||||||
|
class Email {
|
||||||
|
constructor() {
|
||||||
|
// @ts-ignore
|
||||||
|
this.apiKey = null
|
||||||
|
}
|
||||||
|
|
||||||
|
setApiKey(apiKey: any) {
|
||||||
|
// @ts-ignore
|
||||||
|
this.apiKey = apiKey
|
||||||
|
}
|
||||||
|
|
||||||
|
async send(msg: any) {
|
||||||
|
if (msg.to === "invalid@test.com") {
|
||||||
|
throw "Invalid"
|
||||||
|
}
|
||||||
|
return msg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = new Email()
|
||||||
|
}
|
|
@ -1,5 +0,0 @@
|
||||||
function Airtable() {
|
|
||||||
this.base = jest.fn()
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Airtable
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
module AirtableMock {
|
||||||
|
function Airtable() {
|
||||||
|
// @ts-ignore
|
||||||
|
this.base = jest.fn()
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Airtable
|
||||||
|
}
|
|
@ -1,21 +0,0 @@
|
||||||
const arangodb = {}
|
|
||||||
|
|
||||||
arangodb.Database = function () {
|
|
||||||
this.query = jest.fn(() => ({
|
|
||||||
all: jest.fn(),
|
|
||||||
}))
|
|
||||||
this.collection = jest.fn(() => "collection")
|
|
||||||
this.close = jest.fn()
|
|
||||||
}
|
|
||||||
|
|
||||||
arangodb.aql = (strings, ...args) => {
|
|
||||||
let str = strings.join("{}")
|
|
||||||
|
|
||||||
for (let arg of args) {
|
|
||||||
str = str.replace("{}", arg)
|
|
||||||
}
|
|
||||||
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = arangodb
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
module ArangoMock {
|
||||||
|
const arangodb: any = {}
|
||||||
|
|
||||||
|
arangodb.Database = function () {
|
||||||
|
this.query = jest.fn(() => ({
|
||||||
|
all: jest.fn(),
|
||||||
|
}))
|
||||||
|
this.collection = jest.fn(() => "collection")
|
||||||
|
this.close = jest.fn()
|
||||||
|
}
|
||||||
|
|
||||||
|
// @ts-ignore
|
||||||
|
arangodb.aql = (strings, ...args) => {
|
||||||
|
let str = strings.join("{}")
|
||||||
|
|
||||||
|
for (let arg of args) {
|
||||||
|
str = str.replace("{}", arg)
|
||||||
|
}
|
||||||
|
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = arangodb
|
||||||
|
}
|
|
@ -1,38 +0,0 @@
|
||||||
const aws = {}
|
|
||||||
|
|
||||||
const response = body => () => ({ promise: () => body })
|
|
||||||
|
|
||||||
function DocumentClient() {
|
|
||||||
this.put = jest.fn(response({}))
|
|
||||||
this.query = jest.fn(
|
|
||||||
response({
|
|
||||||
Items: [],
|
|
||||||
})
|
|
||||||
)
|
|
||||||
this.scan = jest.fn(
|
|
||||||
response({
|
|
||||||
Items: [
|
|
||||||
{
|
|
||||||
Name: "test",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
})
|
|
||||||
)
|
|
||||||
this.get = jest.fn(response({}))
|
|
||||||
this.update = jest.fn(response({}))
|
|
||||||
this.delete = jest.fn(response({}))
|
|
||||||
}
|
|
||||||
|
|
||||||
function S3() {
|
|
||||||
this.listObjects = jest.fn(
|
|
||||||
response({
|
|
||||||
Contents: {},
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
aws.DynamoDB = { DocumentClient }
|
|
||||||
aws.S3 = S3
|
|
||||||
aws.config = { update: jest.fn() }
|
|
||||||
|
|
||||||
module.exports = aws
|
|
|
@ -0,0 +1,47 @@
|
||||||
|
module AwsMock {
|
||||||
|
const aws: any = {}
|
||||||
|
|
||||||
|
const response = (body: any) => () => ({promise: () => body})
|
||||||
|
|
||||||
|
function DocumentClient() {
|
||||||
|
// @ts-ignore
|
||||||
|
this.put = jest.fn(response({}))
|
||||||
|
// @ts-ignore
|
||||||
|
this.query = jest.fn(
|
||||||
|
response({
|
||||||
|
Items: [],
|
||||||
|
})
|
||||||
|
)
|
||||||
|
// @ts-ignore
|
||||||
|
this.scan = jest.fn(
|
||||||
|
response({
|
||||||
|
Items: [
|
||||||
|
{
|
||||||
|
Name: "test",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
)
|
||||||
|
// @ts-ignore
|
||||||
|
this.get = jest.fn(response({}))
|
||||||
|
// @ts-ignore
|
||||||
|
this.update = jest.fn(response({}))
|
||||||
|
// @ts-ignore
|
||||||
|
this.delete = jest.fn(response({}))
|
||||||
|
}
|
||||||
|
|
||||||
|
function S3() {
|
||||||
|
// @ts-ignore
|
||||||
|
this.listObjects = jest.fn(
|
||||||
|
response({
|
||||||
|
Contents: {},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
aws.DynamoDB = {DocumentClient}
|
||||||
|
aws.S3 = S3
|
||||||
|
aws.config = {update: jest.fn()}
|
||||||
|
|
||||||
|
module.exports = aws
|
||||||
|
}
|
|
@ -1,19 +0,0 @@
|
||||||
const mongodb = {}
|
|
||||||
|
|
||||||
mongodb.MongoClient = function () {
|
|
||||||
this.connect = jest.fn()
|
|
||||||
this.close = jest.fn()
|
|
||||||
this.insertOne = jest.fn()
|
|
||||||
this.find = jest.fn(() => ({ toArray: () => [] }))
|
|
||||||
|
|
||||||
this.collection = jest.fn(() => ({
|
|
||||||
insertOne: this.insertOne,
|
|
||||||
find: this.find,
|
|
||||||
}))
|
|
||||||
|
|
||||||
this.db = () => ({
|
|
||||||
collection: this.collection,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = mongodb
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
module MongoMock {
|
||||||
|
const mongodb: any = {}
|
||||||
|
|
||||||
|
mongodb.MongoClient = function () {
|
||||||
|
this.connect = jest.fn()
|
||||||
|
this.close = jest.fn()
|
||||||
|
this.insertOne = jest.fn()
|
||||||
|
this.find = jest.fn(() => ({toArray: () => []}))
|
||||||
|
|
||||||
|
this.collection = jest.fn(() => ({
|
||||||
|
insertOne: this.insertOne,
|
||||||
|
find: this.find,
|
||||||
|
}))
|
||||||
|
|
||||||
|
this.db = () => ({
|
||||||
|
collection: this.collection,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = mongodb
|
||||||
|
}
|
|
@ -1,22 +0,0 @@
|
||||||
const mssql = {}
|
|
||||||
|
|
||||||
mssql.query = jest.fn(() => ({
|
|
||||||
recordset: [
|
|
||||||
{
|
|
||||||
a: "string",
|
|
||||||
b: 1,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}))
|
|
||||||
|
|
||||||
// mssql.connect = jest.fn(() => ({ recordset: [] }))
|
|
||||||
|
|
||||||
mssql.ConnectionPool = jest.fn(() => ({
|
|
||||||
connect: jest.fn(() => ({
|
|
||||||
request: jest.fn(() => ({
|
|
||||||
query: jest.fn(() => ({})),
|
|
||||||
})),
|
|
||||||
})),
|
|
||||||
}))
|
|
||||||
|
|
||||||
module.exports = mssql
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
module MsSqlMock {
|
||||||
|
const mssql: any = {}
|
||||||
|
|
||||||
|
mssql.query = jest.fn(() => ({
|
||||||
|
recordset: [
|
||||||
|
{
|
||||||
|
a: "string",
|
||||||
|
b: 1,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}))
|
||||||
|
|
||||||
|
// mssql.connect = jest.fn(() => ({ recordset: [] }))
|
||||||
|
|
||||||
|
mssql.ConnectionPool = jest.fn(() => ({
|
||||||
|
connect: jest.fn(() => ({
|
||||||
|
request: jest.fn(() => ({
|
||||||
|
query: jest.fn(() => ({})),
|
||||||
|
})),
|
||||||
|
})),
|
||||||
|
}))
|
||||||
|
|
||||||
|
module.exports = mssql
|
||||||
|
}
|
|
@ -1,12 +0,0 @@
|
||||||
const mysql = {}
|
|
||||||
|
|
||||||
const client = {
|
|
||||||
connect: jest.fn(),
|
|
||||||
query: jest.fn((query, bindings, fn) => {
|
|
||||||
fn(null, [])
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
|
|
||||||
mysql.createConnection = jest.fn(() => client)
|
|
||||||
|
|
||||||
module.exports = mysql
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
module MySQLMock {
|
||||||
|
const mysql: any = {}
|
||||||
|
|
||||||
|
const client = {
|
||||||
|
connect: jest.fn(),
|
||||||
|
query: jest.fn((query, bindings, fn) => {
|
||||||
|
fn(null, [])
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
|
||||||
|
mysql.createConnection = jest.fn(() => client)
|
||||||
|
|
||||||
|
module.exports = mysql
|
||||||
|
}
|
|
@ -1,53 +0,0 @@
|
||||||
const fetch = jest.requireActual("node-fetch")
|
|
||||||
|
|
||||||
module.exports = async (url, opts) => {
|
|
||||||
function json(body, status = 200) {
|
|
||||||
return {
|
|
||||||
status,
|
|
||||||
json: async () => {
|
|
||||||
return body
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (url.includes("/api/admin")) {
|
|
||||||
return json({
|
|
||||||
email: "test@test.com",
|
|
||||||
_id: "us_test@test.com",
|
|
||||||
status: "active",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// mocked data based on url
|
|
||||||
else if (url.includes("api/apps")) {
|
|
||||||
return json({
|
|
||||||
app1: {
|
|
||||||
url: "/app1",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} else if (url.includes("test.com")) {
|
|
||||||
return json({
|
|
||||||
body: opts.body,
|
|
||||||
url,
|
|
||||||
method: opts.method,
|
|
||||||
})
|
|
||||||
} else if (url.includes("invalid.com")) {
|
|
||||||
return json(
|
|
||||||
{
|
|
||||||
invalid: true,
|
|
||||||
},
|
|
||||||
404
|
|
||||||
)
|
|
||||||
} else if (url.includes("_search")) {
|
|
||||||
return json({
|
|
||||||
rows: [
|
|
||||||
{
|
|
||||||
doc: {
|
|
||||||
_id: "test",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
bookmark: "test",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return fetch(url, opts)
|
|
||||||
}
|
|
|
@ -0,0 +1,60 @@
|
||||||
|
module FetchMock {
|
||||||
|
const fetch = jest.requireActual("node-fetch")
|
||||||
|
|
||||||
|
module.exports = async (url: any, opts: any) => {
|
||||||
|
function json(body: any, status = 200) {
|
||||||
|
return {
|
||||||
|
status,
|
||||||
|
headers: {
|
||||||
|
get: () => {
|
||||||
|
return ["application/json"]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
json: async () => {
|
||||||
|
return body
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.includes("/api/admin")) {
|
||||||
|
return json({
|
||||||
|
email: "test@test.com",
|
||||||
|
_id: "us_test@test.com",
|
||||||
|
status: "active",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// mocked data based on url
|
||||||
|
else if (url.includes("api/apps")) {
|
||||||
|
return json({
|
||||||
|
app1: {
|
||||||
|
url: "/app1",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} else if (url.includes("test.com")) {
|
||||||
|
return json({
|
||||||
|
body: opts.body,
|
||||||
|
url,
|
||||||
|
method: opts.method,
|
||||||
|
})
|
||||||
|
} else if (url.includes("invalid.com")) {
|
||||||
|
return json(
|
||||||
|
{
|
||||||
|
invalid: true,
|
||||||
|
},
|
||||||
|
404
|
||||||
|
)
|
||||||
|
} else if (url.includes("_search")) {
|
||||||
|
return json({
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
doc: {
|
||||||
|
_id: "test",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
bookmark: "test",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return fetch(url, opts)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,29 +0,0 @@
|
||||||
const pg = {}
|
|
||||||
|
|
||||||
const query = jest.fn(() => ({
|
|
||||||
rows: [
|
|
||||||
{
|
|
||||||
a: "string",
|
|
||||||
b: 1,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}))
|
|
||||||
|
|
||||||
// constructor
|
|
||||||
function Client() {}
|
|
||||||
|
|
||||||
Client.prototype.query = query
|
|
||||||
Client.prototype.connect = jest.fn()
|
|
||||||
Client.prototype.release = jest.fn()
|
|
||||||
|
|
||||||
function Pool() {}
|
|
||||||
Pool.prototype.query = query
|
|
||||||
Pool.prototype.connect = jest.fn(() => {
|
|
||||||
return new Client()
|
|
||||||
})
|
|
||||||
|
|
||||||
pg.Client = Client
|
|
||||||
pg.Pool = Pool
|
|
||||||
pg.queryMock = query
|
|
||||||
|
|
||||||
module.exports = pg
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
module PgMock {
|
||||||
|
const pg: any = {}
|
||||||
|
|
||||||
|
const query = jest.fn(() => ({
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
a: "string",
|
||||||
|
b: 1,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}))
|
||||||
|
|
||||||
|
// constructor
|
||||||
|
function Client() {
|
||||||
|
}
|
||||||
|
|
||||||
|
Client.prototype.query = query
|
||||||
|
Client.prototype.connect = jest.fn()
|
||||||
|
Client.prototype.release = jest.fn()
|
||||||
|
|
||||||
|
function Pool() {
|
||||||
|
}
|
||||||
|
|
||||||
|
Pool.prototype.query = query
|
||||||
|
Pool.prototype.connect = jest.fn(() => {
|
||||||
|
// @ts-ignore
|
||||||
|
return new Client()
|
||||||
|
})
|
||||||
|
|
||||||
|
pg.Client = Client
|
||||||
|
pg.Pool = Pool
|
||||||
|
pg.queryMock = query
|
||||||
|
|
||||||
|
module.exports = pg
|
||||||
|
}
|
|
@ -1,27 +1,30 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/server",
|
"name": "@budibase/server",
|
||||||
"email": "hi@budibase.com",
|
"email": "hi@budibase.com",
|
||||||
"version": "0.9.60",
|
"version": "0.9.65",
|
||||||
"description": "Budibase Web Server",
|
"description": "Budibase Web Server",
|
||||||
"main": "src/electron.js",
|
"main": "src/index.js",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/Budibase/budibase.git"
|
"url": "https://github.com/Budibase/budibase.git"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "jest --testPathIgnorePatterns=routes && yarn run test:integration",
|
"build": "rm -rf dist/ && tsc && mv dist/src/* dist/ && rmdir dist/src/",
|
||||||
"test:integration": "jest --coverage --detectOpenHandles",
|
"test": "jest --coverage --maxWorkers=2",
|
||||||
"test:watch": "jest --watch",
|
"test:watch": "jest --watch",
|
||||||
"build:docker": "docker build . -t app-service",
|
"build:docker": "docker build . -t app-service",
|
||||||
"run:docker": "node src/index",
|
"run:docker": "node dist/index.js",
|
||||||
"dev:stack:up": "node scripts/dev/manage.js up",
|
"dev:stack:up": "node scripts/dev/manage.js up",
|
||||||
"dev:stack:down": "node scripts/dev/manage.js down",
|
"dev:stack:down": "node scripts/dev/manage.js down",
|
||||||
"dev:stack:nuke": "node scripts/dev/manage.js nuke",
|
"dev:stack:nuke": "node scripts/dev/manage.js nuke",
|
||||||
"dev:builder": "yarn run dev:stack:up && nodemon src/index.js",
|
"dev:builder": "yarn run dev:stack:up && ts-node src/index.ts",
|
||||||
|
"format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write",
|
||||||
"lint": "eslint --fix src/",
|
"lint": "eslint --fix src/",
|
||||||
|
"lint:fix": "yarn run format && yarn run lint",
|
||||||
"initialise": "node scripts/initialise.js"
|
"initialise": "node scripts/initialise.js"
|
||||||
},
|
},
|
||||||
"jest": {
|
"jest": {
|
||||||
|
"preset": "ts-jest",
|
||||||
"testEnvironment": "node",
|
"testEnvironment": "node",
|
||||||
"setupFiles": [
|
"setupFiles": [
|
||||||
"./scripts/jestSetup.js"
|
"./scripts/jestSetup.js"
|
||||||
|
@ -55,9 +58,9 @@
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "AGPL-3.0-or-later",
|
"license": "AGPL-3.0-or-later",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/auth": "^0.9.60",
|
"@budibase/auth": "^0.9.65",
|
||||||
"@budibase/client": "^0.9.60",
|
"@budibase/client": "^0.9.65",
|
||||||
"@budibase/string-templates": "^0.9.60",
|
"@budibase/string-templates": "^0.9.65",
|
||||||
"@elastic/elasticsearch": "7.10.0",
|
"@elastic/elasticsearch": "7.10.0",
|
||||||
"@koa/router": "8.0.0",
|
"@koa/router": "8.0.0",
|
||||||
"@sendgrid/mail": "7.1.1",
|
"@sendgrid/mail": "7.1.1",
|
||||||
|
@ -110,16 +113,26 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@babel/core": "^7.14.3",
|
"@babel/core": "^7.14.3",
|
||||||
"@babel/preset-env": "^7.14.4",
|
"@babel/preset-env": "^7.14.4",
|
||||||
"@budibase/standard-components": "^0.9.60",
|
"@budibase/standard-components": "^0.9.65",
|
||||||
"@jest/test-sequencer": "^24.8.0",
|
"@jest/test-sequencer": "^24.8.0",
|
||||||
|
"@types/bull": "^3.15.1",
|
||||||
|
"@types/jest": "^26.0.23",
|
||||||
|
"@types/koa": "^2.13.3",
|
||||||
|
"@types/koa-router": "^7.4.2",
|
||||||
|
"@types/node": "^15.12.4",
|
||||||
|
"@typescript-eslint/parser": "^4.28.0",
|
||||||
"babel-jest": "^27.0.2",
|
"babel-jest": "^27.0.2",
|
||||||
"docker-compose": "^0.23.6",
|
"docker-compose": "^0.23.6",
|
||||||
"eslint": "^6.8.0",
|
"eslint": "^6.8.0",
|
||||||
"express": "^4.17.1",
|
"express": "^4.17.1",
|
||||||
"jest": "^24.8.0",
|
"jest": "^27.0.5",
|
||||||
"nodemon": "^2.0.4",
|
"nodemon": "^2.0.4",
|
||||||
"pouchdb-adapter-memory": "^7.2.1",
|
"pouchdb-adapter-memory": "^7.2.1",
|
||||||
"supertest": "^4.0.2"
|
"prettier": "^2.3.1",
|
||||||
|
"supertest": "^4.0.2",
|
||||||
|
"ts-jest": "^27.0.3",
|
||||||
|
"ts-node": "^10.0.0",
|
||||||
|
"typescript": "^4.3.4"
|
||||||
},
|
},
|
||||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,3 +6,4 @@ env._set("JWT_SECRET", "test-jwtsecret")
|
||||||
env._set("CLIENT_ID", "test-client-id")
|
env._set("CLIENT_ID", "test-client-id")
|
||||||
env._set("BUDIBASE_DIR", tmpdir("budibase-unittests"))
|
env._set("BUDIBASE_DIR", tmpdir("budibase-unittests"))
|
||||||
env._set("LOG_LEVEL", "silent")
|
env._set("LOG_LEVEL", "silent")
|
||||||
|
env._set("PORT", 0)
|
||||||
|
|
|
@ -88,6 +88,8 @@ async function checkForCronTriggers({ appId, oldAuto, newAuto }) {
|
||||||
async function checkForWebhooks({ appId, oldAuto, newAuto }) {
|
async function checkForWebhooks({ appId, oldAuto, newAuto }) {
|
||||||
const oldTrigger = oldAuto ? oldAuto.definition.trigger : null
|
const oldTrigger = oldAuto ? oldAuto.definition.trigger : null
|
||||||
const newTrigger = newAuto ? newAuto.definition.trigger : null
|
const newTrigger = newAuto ? newAuto.definition.trigger : null
|
||||||
|
const triggerChanged =
|
||||||
|
oldTrigger && newTrigger && oldTrigger.id !== newTrigger.id
|
||||||
function isWebhookTrigger(auto) {
|
function isWebhookTrigger(auto) {
|
||||||
return (
|
return (
|
||||||
auto &&
|
auto &&
|
||||||
|
@ -98,25 +100,32 @@ async function checkForWebhooks({ appId, oldAuto, newAuto }) {
|
||||||
// need to delete webhook
|
// need to delete webhook
|
||||||
if (
|
if (
|
||||||
isWebhookTrigger(oldAuto) &&
|
isWebhookTrigger(oldAuto) &&
|
||||||
!isWebhookTrigger(newAuto) &&
|
(!isWebhookTrigger(newAuto) || triggerChanged) &&
|
||||||
oldTrigger.webhookId
|
oldTrigger.webhookId
|
||||||
) {
|
) {
|
||||||
let db = new CouchDB(appId)
|
try {
|
||||||
// need to get the webhook to get the rev
|
let db = new CouchDB(appId)
|
||||||
const webhook = await db.get(oldTrigger.webhookId)
|
// need to get the webhook to get the rev
|
||||||
const ctx = {
|
const webhook = await db.get(oldTrigger.webhookId)
|
||||||
appId,
|
const ctx = {
|
||||||
params: { id: webhook._id, rev: webhook._rev },
|
appId,
|
||||||
|
params: { id: webhook._id, rev: webhook._rev },
|
||||||
|
}
|
||||||
|
// might be updating - reset the inputs to remove the URLs
|
||||||
|
if (newTrigger) {
|
||||||
|
delete newTrigger.webhookId
|
||||||
|
newTrigger.inputs = {}
|
||||||
|
}
|
||||||
|
await webhooks.destroy(ctx)
|
||||||
|
} catch (err) {
|
||||||
|
// don't worry about not being able to delete, if it doesn't exist all good
|
||||||
}
|
}
|
||||||
// might be updating - reset the inputs to remove the URLs
|
|
||||||
if (newTrigger) {
|
|
||||||
delete newTrigger.webhookId
|
|
||||||
newTrigger.inputs = {}
|
|
||||||
}
|
|
||||||
await webhooks.destroy(ctx)
|
|
||||||
}
|
}
|
||||||
// need to create webhook
|
// need to create webhook
|
||||||
else if (!isWebhookTrigger(oldAuto) && isWebhookTrigger(newAuto)) {
|
if (
|
||||||
|
(!isWebhookTrigger(oldAuto) || triggerChanged) &&
|
||||||
|
isWebhookTrigger(newAuto)
|
||||||
|
) {
|
||||||
const ctx = {
|
const ctx = {
|
||||||
appId,
|
appId,
|
||||||
request: {
|
request: {
|
||||||
|
|
|
@ -1,7 +1,3 @@
|
||||||
const { clearAllApps, checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
|
||||||
const setup = require("./utilities")
|
|
||||||
const { AppStatus } = require("../../../db/utils")
|
|
||||||
|
|
||||||
jest.mock("../../../utilities/redis", () => ({
|
jest.mock("../../../utilities/redis", () => ({
|
||||||
init: jest.fn(),
|
init: jest.fn(),
|
||||||
getAllLocks: () => {
|
getAllLocks: () => {
|
||||||
|
@ -15,6 +11,10 @@ jest.mock("../../../utilities/redis", () => ({
|
||||||
checkDebounce: jest.fn(),
|
checkDebounce: jest.fn(),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
const { clearAllApps, checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||||
|
const setup = require("./utilities")
|
||||||
|
const { AppStatus } = require("../../../db/utils")
|
||||||
|
|
||||||
describe("/applications", () => {
|
describe("/applications", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
|
jest.mock("../../../utilities/fileSystem/utilities")
|
||||||
|
|
||||||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||||
const setup = require("./utilities")
|
const setup = require("./utilities")
|
||||||
|
|
||||||
jest.mock("../../../utilities/fileSystem/utilities")
|
|
||||||
|
|
||||||
describe("/backups", () => {
|
describe("/backups", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
|
jest.mock("pg")
|
||||||
|
|
||||||
let setup = require("./utilities")
|
let setup = require("./utilities")
|
||||||
let { basicDatasource } = setup.structures
|
let { basicDatasource } = setup.structures
|
||||||
let { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
let { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||||
|
|
||||||
jest.mock("pg")
|
|
||||||
const pg = require("pg")
|
const pg = require("pg")
|
||||||
|
|
||||||
describe("/datasources", () => {
|
describe("/datasources", () => {
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
// need to load environment first
|
// need to load environment first
|
||||||
|
import { ExtendableContext } from "koa"
|
||||||
|
|
||||||
const env = require("./environment")
|
const env = require("./environment")
|
||||||
const CouchDB = require("./db")
|
const CouchDB = require("./db")
|
||||||
require("@budibase/auth").init(CouchDB)
|
require("@budibase/auth").init(CouchDB)
|
||||||
|
@ -40,7 +42,7 @@ app.use(
|
||||||
|
|
||||||
if (!env.isTest()) {
|
if (!env.isTest()) {
|
||||||
const bullApp = bullboard.init()
|
const bullApp = bullboard.init()
|
||||||
app.use(async (ctx, next) => {
|
app.use(async (ctx: ExtendableContext, next: () => any) => {
|
||||||
if (ctx.path.startsWith(bullboard.pathPrefix)) {
|
if (ctx.path.startsWith(bullboard.pathPrefix)) {
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
ctx.respond = false
|
ctx.respond = false
|
||||||
|
@ -61,9 +63,9 @@ if (env.isProd()) {
|
||||||
env._set("NODE_ENV", "production")
|
env._set("NODE_ENV", "production")
|
||||||
Sentry.init()
|
Sentry.init()
|
||||||
|
|
||||||
app.on("error", (err, ctx) => {
|
app.on("error", (err: any, ctx: ExtendableContext) => {
|
||||||
Sentry.withScope(function (scope) {
|
Sentry.withScope(function (scope: any) {
|
||||||
scope.addEventProcessor(function (event) {
|
scope.addEventProcessor(function (event: any) {
|
||||||
return Sentry.Handlers.parseRequest(event, ctx.request)
|
return Sentry.Handlers.parseRequest(event, ctx.request)
|
||||||
})
|
})
|
||||||
Sentry.captureException(err)
|
Sentry.captureException(err)
|
|
@ -77,14 +77,34 @@ module.exports.run = async function ({ inputs }) {
|
||||||
requestBody.length !== 0 &&
|
requestBody.length !== 0 &&
|
||||||
BODY_REQUESTS.indexOf(requestMethod) !== -1
|
BODY_REQUESTS.indexOf(requestMethod) !== -1
|
||||||
) {
|
) {
|
||||||
request.body = JSON.parse(requestBody)
|
request.body =
|
||||||
|
typeof requestBody === "string"
|
||||||
|
? requestBody
|
||||||
|
: JSON.stringify(requestBody)
|
||||||
|
request.headers = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// do a quick JSON parse if there is a body, to generate an error if its invalid
|
||||||
|
if (request.body) {
|
||||||
|
JSON.parse(request.body)
|
||||||
|
}
|
||||||
const response = await fetch(url, request)
|
const response = await fetch(url, request)
|
||||||
|
const contentType = response.headers.get("content-type")
|
||||||
|
const success = response.status === 200
|
||||||
|
let resp
|
||||||
|
if (!success) {
|
||||||
|
resp = response.statusText
|
||||||
|
} else if (contentType && contentType.indexOf("application/json") !== -1) {
|
||||||
|
resp = await response.json()
|
||||||
|
} else {
|
||||||
|
resp = await response.text()
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
response: await response.json(),
|
response: resp,
|
||||||
success: response.status === 200,
|
success: success,
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
|
|
|
@ -1,18 +1,4 @@
|
||||||
require("../../environment")
|
|
||||||
const automation = require("../index")
|
|
||||||
const usageQuota = require("../../utilities/usageQuota")
|
|
||||||
const thread = require("../thread")
|
|
||||||
const triggers = require("../triggers")
|
|
||||||
const { basicAutomation, basicTable } = require("../../tests/utilities/structures")
|
|
||||||
const { wait } = require("../../utilities")
|
|
||||||
const { makePartial } = require("../../tests/utilities")
|
|
||||||
const { cleanInputValues } = require("../automationUtils")
|
|
||||||
const setup = require("./utilities")
|
|
||||||
|
|
||||||
let workerJob
|
|
||||||
|
|
||||||
jest.mock("../../utilities/usageQuota")
|
jest.mock("../../utilities/usageQuota")
|
||||||
usageQuota.getAPIKey.mockReturnValue({ apiKey: "test" })
|
|
||||||
jest.mock("../thread")
|
jest.mock("../thread")
|
||||||
jest.spyOn(global.console, "error")
|
jest.spyOn(global.console, "error")
|
||||||
jest.mock("worker-farm", () => {
|
jest.mock("worker-farm", () => {
|
||||||
|
@ -30,6 +16,21 @@ jest.mock("worker-farm", () => {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
require("../../environment")
|
||||||
|
const automation = require("../index")
|
||||||
|
const usageQuota = require("../../utilities/usageQuota")
|
||||||
|
const thread = require("../thread")
|
||||||
|
const triggers = require("../triggers")
|
||||||
|
const { basicAutomation, basicTable } = require("../../tests/utilities/structures")
|
||||||
|
const { wait } = require("../../utilities")
|
||||||
|
const { makePartial } = require("../../tests/utilities")
|
||||||
|
const { cleanInputValues } = require("../automationUtils")
|
||||||
|
const setup = require("./utilities")
|
||||||
|
|
||||||
|
let workerJob
|
||||||
|
|
||||||
|
usageQuota.getAPIKey.mockReturnValue({ apiKey: "test" })
|
||||||
|
|
||||||
describe("Run through some parts of the automations system", () => {
|
describe("Run through some parts of the automations system", () => {
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
const usageQuota = require("../../utilities/usageQuota")
|
|
||||||
const env = require("../../environment")
|
|
||||||
const setup = require("./utilities")
|
|
||||||
|
|
||||||
jest.mock("../../utilities/usageQuota")
|
jest.mock("../../utilities/usageQuota")
|
||||||
|
|
||||||
|
const usageQuota = require("../../utilities/usageQuota")
|
||||||
|
const setup = require("./utilities")
|
||||||
|
|
||||||
describe("test the create row action", () => {
|
describe("test the create row action", () => {
|
||||||
let table, row
|
let table, row
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
const usageQuota = require("../../utilities/usageQuota")
|
|
||||||
const env = require("../../environment")
|
|
||||||
const setup = require("./utilities")
|
|
||||||
|
|
||||||
jest.mock("../../utilities/usageQuota")
|
jest.mock("../../utilities/usageQuota")
|
||||||
|
|
||||||
|
const usageQuota = require("../../utilities/usageQuota")
|
||||||
|
const setup = require("./utilities")
|
||||||
|
|
||||||
describe("test the delete row action", () => {
|
describe("test the delete row action", () => {
|
||||||
let table, row, inputs
|
let table, row, inputs
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
|
@ -25,7 +25,7 @@ describe("test the outgoing webhook action", () => {
|
||||||
expect(res.success).toEqual(true)
|
expect(res.success).toEqual(true)
|
||||||
expect(res.response.url).toEqual("http://www.test.com")
|
expect(res.response.url).toEqual("http://www.test.com")
|
||||||
expect(res.response.method).toEqual("POST")
|
expect(res.response.method).toEqual("POST")
|
||||||
expect(res.response.body.a).toEqual(1)
|
expect(JSON.parse(res.response.body).a).toEqual(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return an error if something goes wrong in fetch", async () => {
|
it("should return an error if something goes wrong in fetch", async () => {
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
export interface Table {
|
||||||
|
_id: string
|
||||||
|
_rev?: string
|
||||||
|
type?: string
|
||||||
|
views?: {}
|
||||||
|
name?: string
|
||||||
|
primary?: string[]
|
||||||
|
schema: {
|
||||||
|
[key: string]: {
|
||||||
|
// TODO: replace with field types enum when done
|
||||||
|
type: string
|
||||||
|
fieldName?: string
|
||||||
|
name: string
|
||||||
|
constraints?: {
|
||||||
|
type?: string
|
||||||
|
email?: boolean
|
||||||
|
inclusion?: string[]
|
||||||
|
length?: {
|
||||||
|
minimum?: string | number
|
||||||
|
maximum?: string | number
|
||||||
|
}
|
||||||
|
presence?: boolean
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
primaryDisplay?: string
|
||||||
|
sourceId?: string
|
||||||
|
}
|
|
@ -17,6 +17,7 @@ exports.FieldTypes = {
|
||||||
LINK: "link",
|
LINK: "link",
|
||||||
FORMULA: "formula",
|
FORMULA: "formula",
|
||||||
AUTO: "auto",
|
AUTO: "auto",
|
||||||
|
JSON: "json",
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.RelationshipTypes = {
|
exports.RelationshipTypes = {
|
||||||
|
|
|
@ -87,33 +87,27 @@ async function getFullLinkedDocs(appId, links) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update link documents for a row or table - this is to be called by the API controller when a change is occurring.
|
* Update link documents for a row or table - this is to be called by the API controller when a change is occurring.
|
||||||
* @param {string} eventType states what type of change which is occurring, means this can be expanded upon in the
|
* @param {string} args.eventType states what type of change which is occurring, means this can be expanded upon in the
|
||||||
* future quite easily (all updates go through one function).
|
* future quite easily (all updates go through one function).
|
||||||
* @param {string} appId The ID of the instance in which the change is occurring.
|
* @param {string} args.appId The ID of the instance in which the change is occurring.
|
||||||
* @param {string} tableId The ID of the of the table which is being changed.
|
* @param {string} args.tableId The ID of the of the table which is being changed.
|
||||||
* @param {object|null} row The row which is changing, e.g. created, updated or deleted.
|
* @param {object|null} args.row The row which is changing, e.g. created, updated or deleted.
|
||||||
* @param {object|null} table If the table has already been retrieved this can be used to reduce database gets.
|
* @param {object|null} args.table If the table has already been retrieved this can be used to reduce database gets.
|
||||||
* @param {object|null} oldTable If the table is being updated then the old table can be provided for differencing.
|
* @param {object|null} args.oldTable If the table is being updated then the old table can be provided for differencing.
|
||||||
* @returns {Promise<object>} When the update is complete this will respond successfully. Returns the row for
|
* @returns {Promise<object>} When the update is complete this will respond successfully. Returns the row for
|
||||||
* row operations and the table for table operations.
|
* row operations and the table for table operations.
|
||||||
*/
|
*/
|
||||||
exports.updateLinks = async function ({
|
exports.updateLinks = async function (args) {
|
||||||
eventType,
|
const { eventType, appId, row, tableId, table, oldTable } = args
|
||||||
appId,
|
|
||||||
row,
|
|
||||||
tableId,
|
|
||||||
table,
|
|
||||||
oldTable,
|
|
||||||
}) {
|
|
||||||
const baseReturnObj = row == null ? table : row
|
const baseReturnObj = row == null ? table : row
|
||||||
if (appId == null) {
|
if (appId == null) {
|
||||||
throw "Cannot operate without an instance ID."
|
throw "Cannot operate without an instance ID."
|
||||||
}
|
}
|
||||||
// make sure table ID is set
|
// make sure table ID is set
|
||||||
if (tableId == null && table != null) {
|
if (tableId == null && table != null) {
|
||||||
arguments[0].tableId = table._id
|
args.tableId = table._id
|
||||||
}
|
}
|
||||||
let linkController = new LinkController(arguments[0])
|
let linkController = new LinkController(args)
|
||||||
try {
|
try {
|
||||||
if (
|
if (
|
||||||
!(await linkController.doesTableHaveLinkedFields()) &&
|
!(await linkController.doesTableHaveLinkedFields()) &&
|
||||||
|
|
|
@ -17,24 +17,20 @@ exports.createLinkView = createLinkView
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the linking documents, not the linked documents themselves.
|
* Gets the linking documents, not the linked documents themselves.
|
||||||
* @param {string} appId The instance in which we are searching for linked rows.
|
* @param {string} args.appId The instance in which we are searching for linked rows.
|
||||||
* @param {string} tableId The table which we are searching for linked rows against.
|
* @param {string} args.tableId The table which we are searching for linked rows against.
|
||||||
* @param {string|null} fieldName The name of column/field which is being altered, only looking for
|
* @param {string|null} args.fieldName The name of column/field which is being altered, only looking for
|
||||||
* linking documents that are related to it. If this is not specified then the table level will be assumed.
|
* linking documents that are related to it. If this is not specified then the table level will be assumed.
|
||||||
* @param {string|null} rowId The ID of the row which we want to find linking documents for -
|
* @param {string|null} args.rowId The ID of the row which we want to find linking documents for -
|
||||||
* if this is not specified then it will assume table or field level depending on whether the
|
* if this is not specified then it will assume table or field level depending on whether the
|
||||||
* field name has been specified.
|
* field name has been specified.
|
||||||
* @param {boolean|null} includeDocs whether to include docs in the response call, this is considerably slower so only
|
* @param {boolean|null} args.includeDocs whether to include docs in the response call, this is considerably slower so only
|
||||||
* use this if actually interested in the docs themselves.
|
* use this if actually interested in the docs themselves.
|
||||||
* @returns {Promise<object[]>} This will return an array of the linking documents that were found
|
* @returns {Promise<object[]>} This will return an array of the linking documents that were found
|
||||||
* (if any).
|
* (if any).
|
||||||
*/
|
*/
|
||||||
exports.getLinkDocuments = async function ({
|
exports.getLinkDocuments = async function (args) {
|
||||||
appId,
|
const { appId, tableId, rowId, includeDocs } = args
|
||||||
tableId,
|
|
||||||
rowId,
|
|
||||||
includeDocs,
|
|
||||||
}) {
|
|
||||||
const db = new CouchDB(appId)
|
const db = new CouchDB(appId)
|
||||||
let params
|
let params
|
||||||
if (rowId != null) {
|
if (rowId != null) {
|
||||||
|
|
|
@ -1,15 +0,0 @@
|
||||||
exports.QUERY_TYPES = {
|
|
||||||
SQL: "sql",
|
|
||||||
JSON: "json",
|
|
||||||
FIELDS: "fields",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.FIELD_TYPES = {
|
|
||||||
STRING: "string",
|
|
||||||
BOOLEAN: "boolean",
|
|
||||||
NUMBER: "number",
|
|
||||||
PASSWORD: "password",
|
|
||||||
LIST: "list",
|
|
||||||
OBJECT: "object",
|
|
||||||
JSON: "json",
|
|
||||||
}
|
|
|
@ -1,130 +0,0 @@
|
||||||
const Airtable = require("airtable")
|
|
||||||
const { FIELD_TYPES, QUERY_TYPES } = require("./Integration")
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://airtable.com/api",
|
|
||||||
description:
|
|
||||||
"Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.",
|
|
||||||
friendlyName: "Airtable",
|
|
||||||
datasource: {
|
|
||||||
apiKey: {
|
|
||||||
type: FIELD_TYPES.PASSWORD,
|
|
||||||
default: "enter api key",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
base: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "mybase",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
view: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
numRecords: {
|
|
||||||
type: FIELD_TYPES.NUMBER,
|
|
||||||
default: 10,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
id: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QUERY_TYPES.JSON,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
class AirtableIntegration {
|
|
||||||
constructor(config) {
|
|
||||||
this.config = config
|
|
||||||
this.client = new Airtable(config).base(config.base)
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query) {
|
|
||||||
const { table, json } = query
|
|
||||||
|
|
||||||
try {
|
|
||||||
const records = await this.client(table).create([
|
|
||||||
{
|
|
||||||
fields: json,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
return records
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to airtable", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query) {
|
|
||||||
try {
|
|
||||||
const records = await this.client(query.table)
|
|
||||||
.select({ maxRecords: query.numRecords || 10, view: query.view })
|
|
||||||
.firstPage()
|
|
||||||
return records.map(({ fields }) => fields)
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to airtable", err)
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query) {
|
|
||||||
const { table, id, json } = query
|
|
||||||
|
|
||||||
try {
|
|
||||||
const records = await this.client(table).update([
|
|
||||||
{
|
|
||||||
id,
|
|
||||||
fields: json,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
return records
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to airtable", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query) {
|
|
||||||
try {
|
|
||||||
const records = await this.client(query.table).destroy(query.ids)
|
|
||||||
return records
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to airtable", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: AirtableIntegration,
|
|
||||||
}
|
|
|
@ -0,0 +1,143 @@
|
||||||
|
import {
|
||||||
|
Integration,
|
||||||
|
DatasourceFieldTypes,
|
||||||
|
QueryTypes,
|
||||||
|
} from "./base/definitions"
|
||||||
|
|
||||||
|
module AirtableModule {
|
||||||
|
const Airtable = require("airtable")
|
||||||
|
|
||||||
|
interface AirtableConfig {
|
||||||
|
apiKey: string
|
||||||
|
base: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://airtable.com/api",
|
||||||
|
description:
|
||||||
|
"Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.",
|
||||||
|
friendlyName: "Airtable",
|
||||||
|
datasource: {
|
||||||
|
apiKey: {
|
||||||
|
type: DatasourceFieldTypes.PASSWORD,
|
||||||
|
default: "enter api key",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
base: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "mybase",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
view: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
numRecords: {
|
||||||
|
type: DatasourceFieldTypes.NUMBER,
|
||||||
|
default: 10,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
id: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryTypes.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class AirtableIntegration {
|
||||||
|
private config: AirtableConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: AirtableConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.client = new Airtable(config).base(config.base)
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: { table: any; json: any }) {
|
||||||
|
const { table, json } = query
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await this.client(table).create([
|
||||||
|
{
|
||||||
|
fields: json,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to airtable", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: { table: any; numRecords: any; view: any }) {
|
||||||
|
try {
|
||||||
|
const records = await this.client(query.table)
|
||||||
|
.select({ maxRecords: query.numRecords || 10, view: query.view })
|
||||||
|
.firstPage()
|
||||||
|
// @ts-ignore
|
||||||
|
return records.map(({ fields }) => fields)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to airtable", err)
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: { table: any; id: any; json: any }) {
|
||||||
|
const { table, id, json } = query
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await this.client(table).update([
|
||||||
|
{
|
||||||
|
id,
|
||||||
|
fields: json,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to airtable", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: { table: any; ids: any }) {
|
||||||
|
try {
|
||||||
|
return await this.client(query.table).destroy(query.ids)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to airtable", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: AirtableIntegration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,86 +0,0 @@
|
||||||
const { Database, aql } = require("arangojs")
|
|
||||||
const { FIELD_TYPES, QUERY_TYPES } = require("./Integration")
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://github.com/arangodb/arangojs",
|
|
||||||
friendlyName: "ArangoDB",
|
|
||||||
description:
|
|
||||||
"ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ",
|
|
||||||
datasource: {
|
|
||||||
url: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "http://localhost:8529",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
username: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "root",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
password: {
|
|
||||||
type: FIELD_TYPES.PASSWORD,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
databaseName: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "_system",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
collection: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
read: {
|
|
||||||
type: QUERY_TYPES.SQL,
|
|
||||||
},
|
|
||||||
create: {
|
|
||||||
type: QUERY_TYPES.JSON,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
class ArangoDBIntegration {
|
|
||||||
constructor(config) {
|
|
||||||
config.auth = {
|
|
||||||
username: config.username,
|
|
||||||
password: config.password,
|
|
||||||
}
|
|
||||||
|
|
||||||
this.config = config
|
|
||||||
this.client = new Database(config)
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query) {
|
|
||||||
try {
|
|
||||||
const result = await this.client.query(query.sql)
|
|
||||||
return result.all()
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error querying arangodb", err.message)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query) {
|
|
||||||
const clc = this.client.collection(this.config.collection)
|
|
||||||
try {
|
|
||||||
const result = await this.client.query(
|
|
||||||
aql`INSERT ${query.json} INTO ${clc} RETURN NEW`
|
|
||||||
)
|
|
||||||
return result.all()
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error querying arangodb", err.message)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: ArangoDBIntegration,
|
|
||||||
}
|
|
|
@ -0,0 +1,106 @@
|
||||||
|
import {
|
||||||
|
Integration,
|
||||||
|
DatasourceFieldTypes,
|
||||||
|
QueryTypes,
|
||||||
|
} from "./base/definitions"
|
||||||
|
|
||||||
|
module ArangoModule {
|
||||||
|
const { Database, aql } = require("arangojs")
|
||||||
|
|
||||||
|
interface ArangodbConfig {
|
||||||
|
url: string
|
||||||
|
username: string
|
||||||
|
password: string
|
||||||
|
databaseName: string
|
||||||
|
collection: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://github.com/arangodb/arangojs",
|
||||||
|
friendlyName: "ArangoDB",
|
||||||
|
description:
|
||||||
|
"ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ",
|
||||||
|
datasource: {
|
||||||
|
url: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "http://localhost:8529",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
username: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
password: {
|
||||||
|
type: DatasourceFieldTypes.PASSWORD,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
databaseName: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "_system",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
collection: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
read: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
type: QueryTypes.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class ArangoDBIntegration {
|
||||||
|
private config: ArangodbConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: ArangodbConfig) {
|
||||||
|
const newConfig = {
|
||||||
|
auth: {
|
||||||
|
username: config.username,
|
||||||
|
password: config.password,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
this.config = config
|
||||||
|
this.client = new Database(newConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: { sql: any }) {
|
||||||
|
try {
|
||||||
|
const result = await this.client.query(query.sql)
|
||||||
|
return result.all()
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error querying arangodb", err.message)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: { json: any }) {
|
||||||
|
const clc = this.client.collection(this.config.collection)
|
||||||
|
try {
|
||||||
|
const result = await this.client.query(
|
||||||
|
aql`INSERT ${query.json} INTO ${clc} RETURN NEW`
|
||||||
|
)
|
||||||
|
return result.all()
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error querying arangodb", err.message)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: ArangoDBIntegration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,11 +0,0 @@
|
||||||
exports.Operation = {
|
|
||||||
CREATE: "CREATE",
|
|
||||||
READ: "READ",
|
|
||||||
UPDATE: "UPDATE",
|
|
||||||
DELETE: "DELETE",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.SortDirection = {
|
|
||||||
ASCENDING: "ASCENDING",
|
|
||||||
DESCENDING: "DESCENDING",
|
|
||||||
}
|
|
|
@ -0,0 +1,121 @@
|
||||||
|
export enum Operation {
|
||||||
|
CREATE = "CREATE",
|
||||||
|
READ = "READ",
|
||||||
|
UPDATE = "UPDATE",
|
||||||
|
DELETE = "DELETE",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum SortDirection {
|
||||||
|
ASCENDING = "ASCENDING",
|
||||||
|
DESCENDING = "DESCENDING",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum QueryTypes {
|
||||||
|
SQL = "sql",
|
||||||
|
JSON = "json",
|
||||||
|
FIELDS = "fields",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum DatasourceFieldTypes {
|
||||||
|
STRING = "string",
|
||||||
|
BOOLEAN = "boolean",
|
||||||
|
NUMBER = "number",
|
||||||
|
PASSWORD = "password",
|
||||||
|
LIST = "list",
|
||||||
|
OBJECT = "object",
|
||||||
|
JSON = "json",
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface QueryDefinition {
|
||||||
|
type: QueryTypes,
|
||||||
|
displayName?: string,
|
||||||
|
readable?: boolean,
|
||||||
|
customisable?: boolean,
|
||||||
|
fields?: object,
|
||||||
|
urlDisplay?: boolean,
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Integration {
|
||||||
|
docs: string,
|
||||||
|
plus?: boolean,
|
||||||
|
description: string,
|
||||||
|
friendlyName: string,
|
||||||
|
datasource: {},
|
||||||
|
query: {
|
||||||
|
[key: string]: QueryDefinition,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchFilters {
|
||||||
|
allOr: boolean,
|
||||||
|
string?: {
|
||||||
|
[key: string]: string,
|
||||||
|
},
|
||||||
|
fuzzy?: {
|
||||||
|
[key: string]: string,
|
||||||
|
},
|
||||||
|
range?: {
|
||||||
|
[key: string]: {
|
||||||
|
high: number | string,
|
||||||
|
low: number | string,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
equal?: {
|
||||||
|
[key: string]: any,
|
||||||
|
},
|
||||||
|
notEqual?: {
|
||||||
|
[key: string]: any,
|
||||||
|
},
|
||||||
|
empty?: {
|
||||||
|
[key: string]: any,
|
||||||
|
},
|
||||||
|
notEmpty?: {
|
||||||
|
[key: string]: any,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RelationshipsJson {
|
||||||
|
through?: {
|
||||||
|
from: string,
|
||||||
|
to: string,
|
||||||
|
tableName: string,
|
||||||
|
},
|
||||||
|
from: string,
|
||||||
|
to: string,
|
||||||
|
tableName: string,
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface QueryJson {
|
||||||
|
endpoint: {
|
||||||
|
datasourceId: string,
|
||||||
|
entityId: string,
|
||||||
|
operation: Operation,
|
||||||
|
},
|
||||||
|
resource: {
|
||||||
|
fields: string[],
|
||||||
|
},
|
||||||
|
filters?: SearchFilters,
|
||||||
|
sort?: {
|
||||||
|
[key: string]: SortDirection,
|
||||||
|
},
|
||||||
|
paginate?: {
|
||||||
|
limit: number,
|
||||||
|
page: string | number,
|
||||||
|
},
|
||||||
|
body?: object,
|
||||||
|
extra?: {
|
||||||
|
idFilter?: SearchFilters,
|
||||||
|
},
|
||||||
|
relationships?: RelationshipsJson[],
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SqlQuery {
|
||||||
|
sql: string,
|
||||||
|
bindings?: {
|
||||||
|
[key: string]: any,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface QueryOptions {
|
||||||
|
disableReturning?: boolean,
|
||||||
|
}
|
|
@ -1,9 +1,24 @@
|
||||||
const { DataSourceOperation, SortDirection } = require("../../constants")
|
import { Knex, knex } from "knex"
|
||||||
|
|
||||||
const BASE_LIMIT = 5000
|
const BASE_LIMIT = 5000
|
||||||
|
import {
|
||||||
|
QueryJson,
|
||||||
|
SearchFilters,
|
||||||
|
QueryOptions,
|
||||||
|
SortDirection,
|
||||||
|
Operation,
|
||||||
|
RelationshipsJson,
|
||||||
|
} from "./definitions"
|
||||||
|
|
||||||
function addFilters(query, filters) {
|
type KnexQuery = Knex.QueryBuilder | Knex
|
||||||
function iterate(structure, fn) {
|
|
||||||
|
function addFilters(
|
||||||
|
query: KnexQuery,
|
||||||
|
filters: SearchFilters | undefined
|
||||||
|
): KnexQuery {
|
||||||
|
function iterate(
|
||||||
|
structure: { [key: string]: any },
|
||||||
|
fn: (key: string, value: any) => void
|
||||||
|
) {
|
||||||
for (let [key, value] of Object.entries(structure)) {
|
for (let [key, value] of Object.entries(structure)) {
|
||||||
fn(key, value)
|
fn(key, value)
|
||||||
}
|
}
|
||||||
|
@ -12,7 +27,7 @@ function addFilters(query, filters) {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
// if all or specified in filters, then everything is an or
|
// if all or specified in filters, then everything is an or
|
||||||
const allOr = !!filters.allOr
|
const allOr = filters.allOr
|
||||||
if (filters.string) {
|
if (filters.string) {
|
||||||
iterate(filters.string, (key, value) => {
|
iterate(filters.string, (key, value) => {
|
||||||
const fnc = allOr ? "orWhere" : "where"
|
const fnc = allOr ? "orWhere" : "where"
|
||||||
|
@ -55,7 +70,7 @@ function addFilters(query, filters) {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
function addRelationships(query, fromTable, relationships) {
|
function addRelationships(query: KnexQuery, fromTable: string, relationships: RelationshipsJson[] | undefined): KnexQuery {
|
||||||
if (!relationships) {
|
if (!relationships) {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
@ -63,10 +78,12 @@ function addRelationships(query, fromTable, relationships) {
|
||||||
const from = `${fromTable}.${relationship.from}`
|
const from = `${fromTable}.${relationship.from}`
|
||||||
const to = `${relationship.tableName}.${relationship.to}`
|
const to = `${relationship.tableName}.${relationship.to}`
|
||||||
if (!relationship.through) {
|
if (!relationship.through) {
|
||||||
|
// @ts-ignore
|
||||||
query = query.innerJoin(relationship.tableName, from, to)
|
query = query.innerJoin(relationship.tableName, from, to)
|
||||||
} else {
|
} else {
|
||||||
const through = relationship
|
const through = relationship
|
||||||
query = query
|
query = query
|
||||||
|
// @ts-ignore
|
||||||
.innerJoin(through.tableName, from, through.from)
|
.innerJoin(through.tableName, from, through.from)
|
||||||
.innerJoin(relationship.tableName, to, through.to)
|
.innerJoin(relationship.tableName, to, through.to)
|
||||||
}
|
}
|
||||||
|
@ -74,9 +91,9 @@ function addRelationships(query, fromTable, relationships) {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildCreate(knex, json, opts) {
|
function buildCreate(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
const { endpoint, body } = json
|
const { endpoint, body } = json
|
||||||
let query = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
// mysql can't use returning
|
// mysql can't use returning
|
||||||
if (opts.disableReturning) {
|
if (opts.disableReturning) {
|
||||||
return query.insert(body)
|
return query.insert(body)
|
||||||
|
@ -85,10 +102,10 @@ function buildCreate(knex, json, opts) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildRead(knex, json, limit) {
|
function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||||
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
||||||
const tableName = endpoint.entityId
|
const tableName = endpoint.entityId
|
||||||
let query = knex(tableName)
|
let query: KnexQuery = knex(tableName)
|
||||||
// select all if not specified
|
// select all if not specified
|
||||||
if (!resource) {
|
if (!resource) {
|
||||||
resource = { fields: [] }
|
resource = { fields: [] }
|
||||||
|
@ -112,6 +129,7 @@ function buildRead(knex, json, limit) {
|
||||||
}
|
}
|
||||||
// handle pagination
|
// handle pagination
|
||||||
if (paginate && paginate.page && paginate.limit) {
|
if (paginate && paginate.page && paginate.limit) {
|
||||||
|
// @ts-ignore
|
||||||
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
||||||
const offset = page * paginate.limit
|
const offset = page * paginate.limit
|
||||||
query = query.offset(offset).limit(paginate.limit)
|
query = query.offset(offset).limit(paginate.limit)
|
||||||
|
@ -123,9 +141,9 @@ function buildRead(knex, json, limit) {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildUpdate(knex, json, opts) {
|
function buildUpdate(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
const { endpoint, body, filters } = json
|
const { endpoint, body, filters } = json
|
||||||
let query = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
query = addFilters(query, filters)
|
query = addFilters(query, filters)
|
||||||
// mysql can't use returning
|
// mysql can't use returning
|
||||||
if (opts.disableReturning) {
|
if (opts.disableReturning) {
|
||||||
|
@ -135,9 +153,9 @@ function buildUpdate(knex, json, opts) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildDelete(knex, json, opts) {
|
function buildDelete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
const { endpoint, filters } = json
|
const { endpoint, filters } = json
|
||||||
let query = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
query = addFilters(query, filters)
|
query = addFilters(query, filters)
|
||||||
// mysql can't use returning
|
// mysql can't use returning
|
||||||
if (opts.disableReturning) {
|
if (opts.disableReturning) {
|
||||||
|
@ -148,20 +166,19 @@ function buildDelete(knex, json, opts) {
|
||||||
}
|
}
|
||||||
|
|
||||||
class SqlQueryBuilder {
|
class SqlQueryBuilder {
|
||||||
|
private readonly sqlClient: string
|
||||||
|
private readonly limit: number
|
||||||
// pass through client to get flavour of SQL
|
// pass through client to get flavour of SQL
|
||||||
constructor(client, limit = BASE_LIMIT) {
|
constructor(client: string, limit: number = BASE_LIMIT) {
|
||||||
this._client = client
|
this.sqlClient = client
|
||||||
this._limit = limit
|
this.limit = limit
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param json the input JSON structure from which an SQL query will be built.
|
* @param json the input JSON structure from which an SQL query will be built.
|
||||||
* @return {string} the operation that was found in the JSON.
|
* @return {string} the operation that was found in the JSON.
|
||||||
*/
|
*/
|
||||||
_operation(json) {
|
_operation(json: QueryJson): Operation {
|
||||||
if (!json || !json.endpoint) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return json.endpoint.operation
|
return json.endpoint.operation
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -171,25 +188,27 @@ class SqlQueryBuilder {
|
||||||
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
|
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
|
||||||
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
|
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
|
||||||
*/
|
*/
|
||||||
_query(json, opts = {}) {
|
_query(json: QueryJson, opts: QueryOptions = {}) {
|
||||||
const knex = require("knex")({ client: this._client })
|
const client = knex({ client: this.sqlClient })
|
||||||
let query
|
let query
|
||||||
switch (this._operation(json)) {
|
switch (this._operation(json)) {
|
||||||
case DataSourceOperation.CREATE:
|
case Operation.CREATE:
|
||||||
query = buildCreate(knex, json, opts)
|
query = buildCreate(client, json, opts)
|
||||||
break
|
break
|
||||||
case DataSourceOperation.READ:
|
case Operation.READ:
|
||||||
query = buildRead(knex, json, this._limit, opts)
|
query = buildRead(client, json, this.limit)
|
||||||
break
|
break
|
||||||
case DataSourceOperation.UPDATE:
|
case Operation.UPDATE:
|
||||||
query = buildUpdate(knex, json, opts)
|
query = buildUpdate(client, json, opts)
|
||||||
break
|
break
|
||||||
case DataSourceOperation.DELETE:
|
case Operation.DELETE:
|
||||||
query = buildDelete(knex, json, opts)
|
query = buildDelete(client, json, opts)
|
||||||
break
|
break
|
||||||
default:
|
default:
|
||||||
throw `Operation type is not supported by SQL query builder`
|
throw `Operation type is not supported by SQL query builder`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// @ts-ignore
|
||||||
return query.toSQL().toNative()
|
return query.toSQL().toNative()
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,95 +0,0 @@
|
||||||
const PouchDB = require("pouchdb")
|
|
||||||
const { FIELD_TYPES, QUERY_TYPES } = require("./Integration")
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://docs.couchdb.org/en/stable/",
|
|
||||||
friendlyName: "CouchDB",
|
|
||||||
description:
|
|
||||||
"Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.",
|
|
||||||
datasource: {
|
|
||||||
url: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
default: "http://localhost:5984",
|
|
||||||
},
|
|
||||||
database: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: QUERY_TYPES.JSON,
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QUERY_TYPES.JSON,
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QUERY_TYPES.JSON,
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
fields: {
|
|
||||||
id: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
class CouchDBIntegration {
|
|
||||||
constructor(config) {
|
|
||||||
this.config = config
|
|
||||||
this.client = new PouchDB(`${config.url}/${config.database}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query) {
|
|
||||||
try {
|
|
||||||
const result = await this.client.post(query.json)
|
|
||||||
return result
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to couchDB", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query) {
|
|
||||||
try {
|
|
||||||
const result = await this.client.allDocs({
|
|
||||||
include_docs: true,
|
|
||||||
...query.json,
|
|
||||||
})
|
|
||||||
return result.rows.map(row => row.doc)
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error querying couchDB", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query) {
|
|
||||||
try {
|
|
||||||
const result = await this.client.put(query.json)
|
|
||||||
return result
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error updating couchDB document", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query) {
|
|
||||||
try {
|
|
||||||
const result = await this.client.remove(query.id)
|
|
||||||
return result
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error deleting couchDB document", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: CouchDBIntegration,
|
|
||||||
}
|
|
|
@ -0,0 +1,107 @@
|
||||||
|
import {
|
||||||
|
Integration,
|
||||||
|
DatasourceFieldTypes,
|
||||||
|
QueryTypes,
|
||||||
|
} from "./base/definitions"
|
||||||
|
|
||||||
|
module CouchDBModule {
|
||||||
|
const PouchDB = require("pouchdb")
|
||||||
|
|
||||||
|
interface CouchDBConfig {
|
||||||
|
url: string
|
||||||
|
database: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://docs.couchdb.org/en/stable/",
|
||||||
|
friendlyName: "CouchDB",
|
||||||
|
description:
|
||||||
|
"Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.",
|
||||||
|
datasource: {
|
||||||
|
url: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
default: "http://localhost:5984",
|
||||||
|
},
|
||||||
|
database: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryTypes.JSON,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryTypes.JSON,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryTypes.JSON,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
fields: {
|
||||||
|
id: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class CouchDBIntegration {
|
||||||
|
private config: CouchDBConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: CouchDBConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.client = new PouchDB(`${config.url}/${config.database}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: { json: object }) {
|
||||||
|
try {
|
||||||
|
return this.client.post(query.json)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to couchDB", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: { json: object }) {
|
||||||
|
try {
|
||||||
|
const result = await this.client.allDocs({
|
||||||
|
include_docs: true,
|
||||||
|
...query.json,
|
||||||
|
})
|
||||||
|
return result.rows.map((row: { doc: object }) => row.doc)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error querying couchDB", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: { json: object }) {
|
||||||
|
try {
|
||||||
|
return this.client.put(query.json)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error updating couchDB document", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: { id: string }) {
|
||||||
|
try {
|
||||||
|
return await this.client.remove(query.id)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error deleting couchDB document", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: CouchDBIntegration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,200 +0,0 @@
|
||||||
const AWS = require("aws-sdk")
|
|
||||||
const { FIELD_TYPES, QUERY_TYPES } = require("./Integration")
|
|
||||||
const { AWS_REGION } = require("../db/dynamoClient")
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://github.com/dabit3/dynamodb-documentclient-cheat-sheet",
|
|
||||||
description:
|
|
||||||
"Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.",
|
|
||||||
friendlyName: "DynamoDB",
|
|
||||||
datasource: {
|
|
||||||
region: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
default: "us-east-1",
|
|
||||||
},
|
|
||||||
accessKeyId: {
|
|
||||||
type: FIELD_TYPES.PASSWORD,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
secretAccessKey: {
|
|
||||||
type: FIELD_TYPES.PASSWORD,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
endpoint: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: false,
|
|
||||||
default: "https://dynamodb.us-east-1.amazonaws.com",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
readable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
index: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
scan: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
readable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
index: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
get: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
readable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
table: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
class DynamoDBIntegration {
|
|
||||||
constructor(config) {
|
|
||||||
this.config = config
|
|
||||||
this.connect()
|
|
||||||
let options = {
|
|
||||||
correctClockSkew: true,
|
|
||||||
}
|
|
||||||
if (config.endpoint) {
|
|
||||||
options.endpoint = config.endpoint
|
|
||||||
}
|
|
||||||
this.client = new AWS.DynamoDB.DocumentClient({
|
|
||||||
correctClockSkew: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
end() {
|
|
||||||
this.disconnect()
|
|
||||||
}
|
|
||||||
|
|
||||||
connect() {
|
|
||||||
AWS.config.update(this.config)
|
|
||||||
}
|
|
||||||
|
|
||||||
disconnect() {
|
|
||||||
AWS.config.update({
|
|
||||||
secretAccessKey: undefined,
|
|
||||||
accessKeyId: undefined,
|
|
||||||
region: AWS_REGION,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
return this.client.put(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
if (query.index) {
|
|
||||||
params.IndexName = query.index
|
|
||||||
}
|
|
||||||
const response = await this.client.query(params).promise()
|
|
||||||
if (response.Items) {
|
|
||||||
return response.Items
|
|
||||||
}
|
|
||||||
return response
|
|
||||||
}
|
|
||||||
|
|
||||||
async scan(query) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
if (query.index) {
|
|
||||||
params.IndexName = query.index
|
|
||||||
}
|
|
||||||
const response = await this.client.scan(params).promise()
|
|
||||||
if (response.Items) {
|
|
||||||
return response.Items
|
|
||||||
}
|
|
||||||
return response
|
|
||||||
}
|
|
||||||
|
|
||||||
async get(query) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
return this.client.get(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
return this.client.update(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query) {
|
|
||||||
const params = {
|
|
||||||
TableName: query.table,
|
|
||||||
...query.json,
|
|
||||||
}
|
|
||||||
return this.client.delete(params).promise()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: DynamoDBIntegration,
|
|
||||||
}
|
|
|
@ -0,0 +1,211 @@
|
||||||
|
import {
|
||||||
|
Integration,
|
||||||
|
DatasourceFieldTypes,
|
||||||
|
QueryTypes,
|
||||||
|
} from "./base/definitions"
|
||||||
|
|
||||||
|
module DynamoModule {
|
||||||
|
const AWS = require("aws-sdk")
|
||||||
|
const { AWS_REGION } = require("../db/dynamoClient")
|
||||||
|
|
||||||
|
interface DynamoDBConfig {
|
||||||
|
region: string
|
||||||
|
accessKeyId: string
|
||||||
|
secretAccessKey: string
|
||||||
|
endpoint: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://github.com/dabit3/dynamodb-documentclient-cheat-sheet",
|
||||||
|
description:
|
||||||
|
"Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.",
|
||||||
|
friendlyName: "DynamoDB",
|
||||||
|
datasource: {
|
||||||
|
region: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
default: "us-east-1",
|
||||||
|
},
|
||||||
|
accessKeyId: {
|
||||||
|
type: DatasourceFieldTypes.PASSWORD,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
secretAccessKey: {
|
||||||
|
type: DatasourceFieldTypes.PASSWORD,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
endpoint: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: false,
|
||||||
|
default: "https://dynamodb.us-east-1.amazonaws.com",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
readable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
scan: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
readable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
get: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
readable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
table: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class DynamoDBIntegration {
|
||||||
|
private config: DynamoDBConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: DynamoDBConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.connect()
|
||||||
|
let options = {
|
||||||
|
correctClockSkew: true,
|
||||||
|
endpoint: config.endpoint ? config.endpoint : undefined,
|
||||||
|
}
|
||||||
|
this.client = new AWS.DynamoDB.DocumentClient(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
end() {
|
||||||
|
this.disconnect()
|
||||||
|
}
|
||||||
|
|
||||||
|
connect() {
|
||||||
|
AWS.config.update(this.config)
|
||||||
|
}
|
||||||
|
|
||||||
|
disconnect() {
|
||||||
|
AWS.config.update({
|
||||||
|
secretAccessKey: undefined,
|
||||||
|
accessKeyId: undefined,
|
||||||
|
region: AWS_REGION,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: { table: string; json: object }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
return this.client.put(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: { table: string; json: object; index: null | string }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
IndexName: query.index ? query.index : undefined,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
if (query.index) {
|
||||||
|
const response = await this.client.query(params).promise()
|
||||||
|
if (response.Items) {
|
||||||
|
return response.Items
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async scan(query: { table: string; json: object; index: null | string }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
IndexName: query.index ? query.index : undefined,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
const response = await this.client.scan(params).promise()
|
||||||
|
if (response.Items) {
|
||||||
|
return response.Items
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(query: { table: string; json: object }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
return this.client.get(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: { table: string; json: object }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
return this.client.update(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: { table: string; json: object }) {
|
||||||
|
const params = {
|
||||||
|
TableName: query.table,
|
||||||
|
...query.json,
|
||||||
|
}
|
||||||
|
return this.client.delete(params).promise()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: DynamoDBIntegration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,139 +0,0 @@
|
||||||
const { Client } = require("@elastic/elasticsearch")
|
|
||||||
const { QUERY_TYPES, FIELD_TYPES } = require("./Integration")
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
|
||||||
description:
|
|
||||||
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
|
|
||||||
friendlyName: "ElasticSearch",
|
|
||||||
datasource: {
|
|
||||||
url: {
|
|
||||||
type: "string",
|
|
||||||
required: true,
|
|
||||||
default: "http://localhost:9200",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
index: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
index: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
customisable: true,
|
|
||||||
fields: {
|
|
||||||
id: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
index: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
fields: {
|
|
||||||
index: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
id: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
class ElasticSearchIntegration {
|
|
||||||
constructor(config) {
|
|
||||||
this.config = config
|
|
||||||
this.client = new Client({ node: config.url })
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query) {
|
|
||||||
const { index, json } = query
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result = await this.client.index({
|
|
||||||
index,
|
|
||||||
body: json,
|
|
||||||
})
|
|
||||||
return result.body
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to elasticsearch", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query) {
|
|
||||||
const { index, json } = query
|
|
||||||
try {
|
|
||||||
const result = await this.client.search({
|
|
||||||
index: index,
|
|
||||||
body: json,
|
|
||||||
})
|
|
||||||
return result.body.hits.hits.map(({ _source }) => _source)
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error querying elasticsearch", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query) {
|
|
||||||
const { id, index, json } = query
|
|
||||||
try {
|
|
||||||
const result = await this.client.update({
|
|
||||||
id,
|
|
||||||
index,
|
|
||||||
body: json,
|
|
||||||
})
|
|
||||||
return result.body
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error querying elasticsearch", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query) {
|
|
||||||
try {
|
|
||||||
const result = await this.client.delete(query)
|
|
||||||
return result.body
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error deleting from elasticsearch", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: ElasticSearchIntegration,
|
|
||||||
}
|
|
|
@ -0,0 +1,153 @@
|
||||||
|
import {
|
||||||
|
Integration,
|
||||||
|
DatasourceFieldTypes,
|
||||||
|
QueryTypes,
|
||||||
|
} from "./base/definitions"
|
||||||
|
|
||||||
|
module ElasticsearchModule {
|
||||||
|
const { Client } = require("@elastic/elasticsearch")
|
||||||
|
|
||||||
|
interface ElasticsearchConfig {
|
||||||
|
url: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||||
|
description:
|
||||||
|
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
|
||||||
|
friendlyName: "ElasticSearch",
|
||||||
|
datasource: {
|
||||||
|
url: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
default: "http://localhost:9200",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
customisable: true,
|
||||||
|
fields: {
|
||||||
|
id: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
fields: {
|
||||||
|
index: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
id: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class ElasticSearchIntegration {
|
||||||
|
private config: ElasticsearchConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: ElasticsearchConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.client = new Client({ node: config.url })
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: { index: string; json: object }) {
|
||||||
|
const { index, json } = query
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await this.client.index({
|
||||||
|
index,
|
||||||
|
body: json,
|
||||||
|
})
|
||||||
|
return result.body
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to elasticsearch", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: { index: string; json: object }) {
|
||||||
|
const { index, json } = query
|
||||||
|
try {
|
||||||
|
const result = await this.client.search({
|
||||||
|
index: index,
|
||||||
|
body: json,
|
||||||
|
})
|
||||||
|
return result.body.hits.hits.map(({ _source }: any) => _source)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error querying elasticsearch", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: { id: string; index: string; json: object }) {
|
||||||
|
const { id, index, json } = query
|
||||||
|
try {
|
||||||
|
const result = await this.client.update({
|
||||||
|
id,
|
||||||
|
index,
|
||||||
|
body: json,
|
||||||
|
})
|
||||||
|
return result.body
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error querying elasticsearch", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: object) {
|
||||||
|
try {
|
||||||
|
const result = await this.client.delete(query)
|
||||||
|
return result.body
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error deleting from elasticsearch", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: ElasticSearchIntegration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,123 +0,0 @@
|
||||||
const sqlServer = require("mssql")
|
|
||||||
const { FIELD_TYPES } = require("./Integration")
|
|
||||||
const Sql = require("./base/sql")
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://github.com/tediousjs/node-mssql",
|
|
||||||
description:
|
|
||||||
"Microsoft SQL Server is a relational database management system developed by Microsoft. ",
|
|
||||||
friendlyName: "MS SQL Server",
|
|
||||||
datasource: {
|
|
||||||
user: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
default: "localhost",
|
|
||||||
},
|
|
||||||
password: {
|
|
||||||
type: FIELD_TYPES.PASSWORD,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
server: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "localhost",
|
|
||||||
},
|
|
||||||
port: {
|
|
||||||
type: FIELD_TYPES.NUMBER,
|
|
||||||
required: false,
|
|
||||||
default: 1433,
|
|
||||||
},
|
|
||||||
database: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "root",
|
|
||||||
},
|
|
||||||
encrypt: {
|
|
||||||
type: FIELD_TYPES.BOOLEAN,
|
|
||||||
default: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: "sql",
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: "sql",
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: "sql",
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: "sql",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async function internalQuery(client, query) {
|
|
||||||
const sql = typeof query === "string" ? query : query.sql
|
|
||||||
const bindings = typeof query === "string" ? {} : query.bindings
|
|
||||||
try {
|
|
||||||
return await client.query(sql, bindings)
|
|
||||||
} catch (err) {
|
|
||||||
throw new Error(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class SqlServerIntegration extends Sql {
|
|
||||||
static pool
|
|
||||||
|
|
||||||
constructor(config) {
|
|
||||||
super("mssql")
|
|
||||||
this.config = config
|
|
||||||
this.config.options = {
|
|
||||||
encrypt: this.config.encrypt,
|
|
||||||
}
|
|
||||||
delete this.config.encrypt
|
|
||||||
if (!this.pool) {
|
|
||||||
this.pool = new sqlServer.ConnectionPool(this.config)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async connect() {
|
|
||||||
try {
|
|
||||||
const client = await this.pool.connect()
|
|
||||||
this.client = client.request()
|
|
||||||
} catch (err) {
|
|
||||||
throw new Error(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query) {
|
|
||||||
await this.connect()
|
|
||||||
const response = await internalQuery(this.client, query)
|
|
||||||
return response.recordset
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query) {
|
|
||||||
await this.connect()
|
|
||||||
const response = await internalQuery(this.client, query)
|
|
||||||
return response.recordset || [{ created: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query) {
|
|
||||||
await this.connect()
|
|
||||||
const response = await internalQuery(this.client, query)
|
|
||||||
return response.recordset || [{ updated: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query) {
|
|
||||||
await this.connect()
|
|
||||||
const response = await internalQuery(this.client, query)
|
|
||||||
return response.recordset || [{ deleted: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async query(json) {
|
|
||||||
const operation = this._operation(json).toLowerCase()
|
|
||||||
const input = this._query(json)
|
|
||||||
const response = await internalQuery(this.client, input)
|
|
||||||
return response.recordset ? response.recordset : [{ [operation]: true }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: SqlServerIntegration,
|
|
||||||
}
|
|
|
@ -0,0 +1,144 @@
|
||||||
|
import {
|
||||||
|
Integration,
|
||||||
|
DatasourceFieldTypes,
|
||||||
|
QueryTypes,
|
||||||
|
QueryJson,
|
||||||
|
SqlQuery,
|
||||||
|
} from "./base/definitions"
|
||||||
|
import { getSqlQuery } from "./utils"
|
||||||
|
|
||||||
|
module MSSQLModule {
|
||||||
|
const sqlServer = require("mssql")
|
||||||
|
const Sql = require("./base/sql")
|
||||||
|
|
||||||
|
interface MSSQLConfig {
|
||||||
|
user: string
|
||||||
|
password: string
|
||||||
|
server: string
|
||||||
|
port: number
|
||||||
|
database: string
|
||||||
|
encrypt?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://github.com/tediousjs/node-mssql",
|
||||||
|
description:
|
||||||
|
"Microsoft SQL Server is a relational database management system developed by Microsoft. ",
|
||||||
|
friendlyName: "MS SQL Server",
|
||||||
|
datasource: {
|
||||||
|
user: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
default: "localhost",
|
||||||
|
},
|
||||||
|
password: {
|
||||||
|
type: DatasourceFieldTypes.PASSWORD,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
server: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "localhost",
|
||||||
|
},
|
||||||
|
port: {
|
||||||
|
type: DatasourceFieldTypes.NUMBER,
|
||||||
|
required: false,
|
||||||
|
default: 1433,
|
||||||
|
},
|
||||||
|
database: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "root",
|
||||||
|
},
|
||||||
|
encrypt: {
|
||||||
|
type: DatasourceFieldTypes.BOOLEAN,
|
||||||
|
default: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
async function internalQuery(client: any, query: SqlQuery) {
|
||||||
|
try {
|
||||||
|
return await client.query(query.sql, query.bindings || {})
|
||||||
|
} catch (err) {
|
||||||
|
throw new Error(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SqlServerIntegration extends Sql {
|
||||||
|
private readonly config: MSSQLConfig
|
||||||
|
static pool: any
|
||||||
|
|
||||||
|
constructor(config: MSSQLConfig) {
|
||||||
|
super("mssql")
|
||||||
|
this.config = config
|
||||||
|
const clientCfg = {
|
||||||
|
...this.config,
|
||||||
|
options: {
|
||||||
|
encrypt: this.config.encrypt,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
delete clientCfg.encrypt
|
||||||
|
if (!this.pool) {
|
||||||
|
this.pool = new sqlServer.ConnectionPool(clientCfg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async connect() {
|
||||||
|
try {
|
||||||
|
const client = await this.pool.connect()
|
||||||
|
this.client = client.request()
|
||||||
|
} catch (err) {
|
||||||
|
throw new Error(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: SqlQuery | string) {
|
||||||
|
await this.connect()
|
||||||
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return response.recordset
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: SqlQuery | string) {
|
||||||
|
await this.connect()
|
||||||
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return response.recordset || [{ created: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: SqlQuery | string) {
|
||||||
|
await this.connect()
|
||||||
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return response.recordset || [{ updated: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: SqlQuery | string) {
|
||||||
|
await this.connect()
|
||||||
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return response.recordset || [{ deleted: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async query(json: QueryJson) {
|
||||||
|
const operation = this._operation(json).toLowerCase()
|
||||||
|
const input = this._query(json)
|
||||||
|
const response = await internalQuery(this.client, input)
|
||||||
|
return response.recordset ? response.recordset : [{ [operation]: true }]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: SqlServerIntegration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,78 +0,0 @@
|
||||||
const { MongoClient } = require("mongodb")
|
|
||||||
const { FIELD_TYPES, QUERY_TYPES } = require("./Integration")
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://github.com/mongodb/node-mongodb-native",
|
|
||||||
friendlyName: "MongoDB",
|
|
||||||
description:
|
|
||||||
"MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.",
|
|
||||||
datasource: {
|
|
||||||
connectionString: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
default: "mongodb://localhost:27017",
|
|
||||||
},
|
|
||||||
db: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
collection: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: QUERY_TYPES.JSON,
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QUERY_TYPES.JSON,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
class MongoIntegration {
|
|
||||||
constructor(config) {
|
|
||||||
this.config = config
|
|
||||||
this.client = new MongoClient(config.connectionString)
|
|
||||||
}
|
|
||||||
|
|
||||||
async connect() {
|
|
||||||
return this.client.connect()
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const db = this.client.db(this.config.db)
|
|
||||||
const collection = db.collection(this.config.collection)
|
|
||||||
const result = await collection.insertOne(query.json)
|
|
||||||
return result
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error writing to mongodb", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query) {
|
|
||||||
try {
|
|
||||||
await this.connect()
|
|
||||||
const db = this.client.db(this.config.db)
|
|
||||||
const collection = db.collection(this.config.collection)
|
|
||||||
const result = await collection.find(query.json).toArray()
|
|
||||||
return result
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error querying mongodb", err)
|
|
||||||
throw err
|
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: MongoIntegration,
|
|
||||||
}
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
import {
|
||||||
|
Integration,
|
||||||
|
DatasourceFieldTypes,
|
||||||
|
QueryTypes,
|
||||||
|
} from "./base/definitions"
|
||||||
|
|
||||||
|
module MongoDBModule {
|
||||||
|
const { MongoClient } = require("mongodb")
|
||||||
|
|
||||||
|
interface MongoDBConfig {
|
||||||
|
connectionString: string
|
||||||
|
db: string
|
||||||
|
collection: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://github.com/mongodb/node-mongodb-native",
|
||||||
|
friendlyName: "MongoDB",
|
||||||
|
description:
|
||||||
|
"MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.",
|
||||||
|
datasource: {
|
||||||
|
connectionString: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
default: "mongodb://localhost:27017",
|
||||||
|
},
|
||||||
|
db: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
collection: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryTypes.JSON,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryTypes.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class MongoIntegration {
|
||||||
|
private config: MongoDBConfig
|
||||||
|
private client: any
|
||||||
|
|
||||||
|
constructor(config: MongoDBConfig) {
|
||||||
|
this.config = config
|
||||||
|
this.client = new MongoClient(config.connectionString)
|
||||||
|
}
|
||||||
|
|
||||||
|
async connect() {
|
||||||
|
return this.client.connect()
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: { json: object }) {
|
||||||
|
try {
|
||||||
|
await this.connect()
|
||||||
|
const db = this.client.db(this.config.db)
|
||||||
|
const collection = db.collection(this.config.collection)
|
||||||
|
return collection.insertOne(query.json)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error writing to mongodb", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: { json: object }) {
|
||||||
|
try {
|
||||||
|
await this.connect()
|
||||||
|
const db = this.client.db(this.config.db)
|
||||||
|
const collection = db.collection(this.config.collection)
|
||||||
|
return collection.find(query.json).toArray()
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error querying mongodb", err)
|
||||||
|
throw err
|
||||||
|
} finally {
|
||||||
|
await this.client.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: MongoIntegration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,225 +0,0 @@
|
||||||
const mysql = require("mysql")
|
|
||||||
const { FIELD_TYPES, QUERY_TYPES } = require("./Integration")
|
|
||||||
const Sql = require("./base/sql")
|
|
||||||
const { buildExternalTableId, convertType } = require("./utils")
|
|
||||||
const { FieldTypes } = require("../constants")
|
|
||||||
const { Operation } = require("./base/constants")
|
|
||||||
|
|
||||||
const TYPE_MAP = {
|
|
||||||
text: FieldTypes.LONGFORM,
|
|
||||||
blob: FieldTypes.LONGFORM,
|
|
||||||
enum: FieldTypes.STRING,
|
|
||||||
varchar: FieldTypes.STRING,
|
|
||||||
int: FieldTypes.NUMBER,
|
|
||||||
numeric: FieldTypes.NUMBER,
|
|
||||||
bigint: FieldTypes.NUMBER,
|
|
||||||
mediumint: FieldTypes.NUMBER,
|
|
||||||
decimal: FieldTypes.NUMBER,
|
|
||||||
dec: FieldTypes.NUMBER,
|
|
||||||
double: FieldTypes.NUMBER,
|
|
||||||
real: FieldTypes.NUMBER,
|
|
||||||
fixed: FieldTypes.NUMBER,
|
|
||||||
smallint: FieldTypes.NUMBER,
|
|
||||||
timestamp: FieldTypes.DATETIME,
|
|
||||||
date: FieldTypes.DATETIME,
|
|
||||||
datetime: FieldTypes.DATETIME,
|
|
||||||
time: FieldTypes.DATETIME,
|
|
||||||
tinyint: FieldTypes.BOOLEAN,
|
|
||||||
json: FIELD_TYPES.JSON,
|
|
||||||
}
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://github.com/mysqljs/mysql",
|
|
||||||
plus: true,
|
|
||||||
friendlyName: "MySQL",
|
|
||||||
description:
|
|
||||||
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
|
|
||||||
datasource: {
|
|
||||||
host: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "localhost",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
port: {
|
|
||||||
type: FIELD_TYPES.NUMBER,
|
|
||||||
default: 3306,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
user: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "root",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
password: {
|
|
||||||
type: FIELD_TYPES.PASSWORD,
|
|
||||||
default: "root",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
database: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
ssl: {
|
|
||||||
type: FIELD_TYPES.OBJECT,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: QUERY_TYPES.SQL,
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: QUERY_TYPES.SQL,
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: QUERY_TYPES.SQL,
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: QUERY_TYPES.SQL,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
function internalQuery(client, query, connect = true) {
|
|
||||||
const sql = typeof query === "string" ? query : query.sql
|
|
||||||
const bindings = typeof query === "string" ? {} : query.bindings
|
|
||||||
// Node MySQL is callback based, so we must wrap our call in a promise
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
if (connect) {
|
|
||||||
client.connect()
|
|
||||||
}
|
|
||||||
return client.query(sql, bindings, (error, results) => {
|
|
||||||
if (error) {
|
|
||||||
reject(error)
|
|
||||||
} else {
|
|
||||||
resolve(results)
|
|
||||||
}
|
|
||||||
if (connect) {
|
|
||||||
client.end()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
class MySQLIntegration extends Sql {
|
|
||||||
constructor(config) {
|
|
||||||
super("mysql")
|
|
||||||
this.config = config
|
|
||||||
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
|
||||||
delete config.ssl
|
|
||||||
}
|
|
||||||
this.client = mysql.createConnection(config)
|
|
||||||
}
|
|
||||||
|
|
||||||
async buildSchema(datasourceId) {
|
|
||||||
const tables = {}
|
|
||||||
const database = this.config.database
|
|
||||||
this.client.connect()
|
|
||||||
|
|
||||||
// get the tables first
|
|
||||||
const tablesResp = await internalQuery(this.client, "SHOW TABLES;", false)
|
|
||||||
const tableNames = tablesResp.map(obj => obj[`Tables_in_${database}`])
|
|
||||||
for (let tableName of tableNames) {
|
|
||||||
const primaryKeys = []
|
|
||||||
const schema = {}
|
|
||||||
const descResp = await internalQuery(
|
|
||||||
this.client,
|
|
||||||
`DESCRIBE ${tableName};`,
|
|
||||||
false
|
|
||||||
)
|
|
||||||
for (let column of descResp) {
|
|
||||||
const columnName = column.Field
|
|
||||||
if (column.Key === "PRI") {
|
|
||||||
primaryKeys.push(columnName)
|
|
||||||
}
|
|
||||||
const constraints = {}
|
|
||||||
if (column.Null !== "YES") {
|
|
||||||
constraints.required = true
|
|
||||||
}
|
|
||||||
schema[columnName] = {
|
|
||||||
name: columnName,
|
|
||||||
type: convertType(column.Type, TYPE_MAP),
|
|
||||||
constraints,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// for now just default to first column
|
|
||||||
if (primaryKeys.length === 0) {
|
|
||||||
primaryKeys.push(descResp[0].Field)
|
|
||||||
}
|
|
||||||
if (!tables[tableName]) {
|
|
||||||
tables[tableName] = {
|
|
||||||
_id: buildExternalTableId(datasourceId, tableName),
|
|
||||||
primary: primaryKeys,
|
|
||||||
name: tableName,
|
|
||||||
schema,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.client.end()
|
|
||||||
this.tables = tables
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(query) {
|
|
||||||
const results = await internalQuery(this.client, query)
|
|
||||||
return results.length ? results : [{ created: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
read(query) {
|
|
||||||
return internalQuery(this.client, query)
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(query) {
|
|
||||||
const results = await internalQuery(this.client, query)
|
|
||||||
return results.length ? results : [{ updated: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(query) {
|
|
||||||
const results = await internalQuery(this.client, query)
|
|
||||||
return results.length ? results : [{ deleted: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async getReturningRow(json) {
|
|
||||||
if (!json.extra.idFilter) {
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
const input = this._query({
|
|
||||||
endpoint: {
|
|
||||||
...json.endpoint,
|
|
||||||
operation: Operation.READ,
|
|
||||||
},
|
|
||||||
fields: [],
|
|
||||||
filters: json.extra.idFilter,
|
|
||||||
paginate: {
|
|
||||||
limit: 1,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
return internalQuery(this.client, input, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
async query(json) {
|
|
||||||
const operation = this._operation(json)
|
|
||||||
this.client.connect()
|
|
||||||
const input = this._query(json, { disableReturning: true })
|
|
||||||
let row
|
|
||||||
// need to manage returning, a feature mySQL can't do
|
|
||||||
if (operation === Operation.DELETE) {
|
|
||||||
row = this.getReturningRow(json)
|
|
||||||
}
|
|
||||||
const results = await internalQuery(this.client, input, false)
|
|
||||||
// same as delete, manage returning
|
|
||||||
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
|
|
||||||
row = this.getReturningRow(json)
|
|
||||||
}
|
|
||||||
this.client.end()
|
|
||||||
if (operation !== Operation.READ) {
|
|
||||||
return row
|
|
||||||
}
|
|
||||||
return results.length ? results : [{ [operation.toLowerCase()]: true }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: MySQLIntegration,
|
|
||||||
}
|
|
|
@ -0,0 +1,258 @@
|
||||||
|
import {
|
||||||
|
Integration,
|
||||||
|
DatasourceFieldTypes,
|
||||||
|
QueryTypes,
|
||||||
|
Operation,
|
||||||
|
QueryJson,
|
||||||
|
SqlQuery,
|
||||||
|
} from "./base/definitions"
|
||||||
|
import { getSqlQuery } from "./utils"
|
||||||
|
|
||||||
|
module MySQLModule {
|
||||||
|
const mysql = require("mysql")
|
||||||
|
const Sql = require("./base/sql")
|
||||||
|
const { buildExternalTableId, convertType } = require("./utils")
|
||||||
|
const { FieldTypes } = require("../constants")
|
||||||
|
|
||||||
|
interface MySQLConfig {
|
||||||
|
host: string
|
||||||
|
port: number
|
||||||
|
user: string
|
||||||
|
password: string
|
||||||
|
database: string
|
||||||
|
ssl?: object
|
||||||
|
}
|
||||||
|
|
||||||
|
const TYPE_MAP = {
|
||||||
|
text: FieldTypes.LONGFORM,
|
||||||
|
blob: FieldTypes.LONGFORM,
|
||||||
|
enum: FieldTypes.STRING,
|
||||||
|
varchar: FieldTypes.STRING,
|
||||||
|
int: FieldTypes.NUMBER,
|
||||||
|
numeric: FieldTypes.NUMBER,
|
||||||
|
bigint: FieldTypes.NUMBER,
|
||||||
|
mediumint: FieldTypes.NUMBER,
|
||||||
|
decimal: FieldTypes.NUMBER,
|
||||||
|
dec: FieldTypes.NUMBER,
|
||||||
|
double: FieldTypes.NUMBER,
|
||||||
|
real: FieldTypes.NUMBER,
|
||||||
|
fixed: FieldTypes.NUMBER,
|
||||||
|
smallint: FieldTypes.NUMBER,
|
||||||
|
timestamp: FieldTypes.DATETIME,
|
||||||
|
date: FieldTypes.DATETIME,
|
||||||
|
datetime: FieldTypes.DATETIME,
|
||||||
|
time: FieldTypes.DATETIME,
|
||||||
|
tinyint: FieldTypes.BOOLEAN,
|
||||||
|
json: DatasourceFieldTypes.JSON,
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://github.com/mysqljs/mysql",
|
||||||
|
plus: true,
|
||||||
|
friendlyName: "MySQL",
|
||||||
|
description:
|
||||||
|
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
|
||||||
|
datasource: {
|
||||||
|
host: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "localhost",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
port: {
|
||||||
|
type: DatasourceFieldTypes.NUMBER,
|
||||||
|
default: 3306,
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
password: {
|
||||||
|
type: DatasourceFieldTypes.PASSWORD,
|
||||||
|
default: "root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
database: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
ssl: {
|
||||||
|
type: DatasourceFieldTypes.OBJECT,
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function internalQuery(
|
||||||
|
client: any,
|
||||||
|
query: SqlQuery,
|
||||||
|
connect: boolean = true
|
||||||
|
): Promise<any[]> {
|
||||||
|
// Node MySQL is callback based, so we must wrap our call in a promise
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
if (connect) {
|
||||||
|
client.connect()
|
||||||
|
}
|
||||||
|
return client.query(
|
||||||
|
query.sql,
|
||||||
|
query.bindings || {},
|
||||||
|
(error: any, results: object[]) => {
|
||||||
|
if (error) {
|
||||||
|
reject(error)
|
||||||
|
} else {
|
||||||
|
resolve(results)
|
||||||
|
}
|
||||||
|
if (connect) {
|
||||||
|
client.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
class MySQLIntegration extends Sql {
|
||||||
|
private config: MySQLConfig
|
||||||
|
private readonly client: any
|
||||||
|
|
||||||
|
constructor(config: MySQLConfig) {
|
||||||
|
super("mysql")
|
||||||
|
this.config = config
|
||||||
|
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
||||||
|
delete config.ssl
|
||||||
|
}
|
||||||
|
this.client = mysql.createConnection(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
async buildSchema(datasourceId: string) {
|
||||||
|
const tables: any = {}
|
||||||
|
const database = this.config.database
|
||||||
|
this.client.connect()
|
||||||
|
|
||||||
|
// get the tables first
|
||||||
|
const tablesResp = await internalQuery(
|
||||||
|
this.client,
|
||||||
|
{ sql: "SHOW TABLES;" },
|
||||||
|
false
|
||||||
|
)
|
||||||
|
const tableNames = tablesResp.map(
|
||||||
|
(obj: any) => obj[`Tables_in_${database}`]
|
||||||
|
)
|
||||||
|
for (let tableName of tableNames) {
|
||||||
|
const primaryKeys = []
|
||||||
|
const schema: any = {}
|
||||||
|
const descResp = await internalQuery(
|
||||||
|
this.client,
|
||||||
|
{ sql: `DESCRIBE ${tableName};` },
|
||||||
|
false
|
||||||
|
)
|
||||||
|
for (let column of descResp) {
|
||||||
|
const columnName = column.Field
|
||||||
|
if (column.Key === "PRI") {
|
||||||
|
primaryKeys.push(columnName)
|
||||||
|
}
|
||||||
|
const constraints = {
|
||||||
|
required: column.Null !== "YES",
|
||||||
|
}
|
||||||
|
schema[columnName] = {
|
||||||
|
name: columnName,
|
||||||
|
type: convertType(column.Type, TYPE_MAP),
|
||||||
|
constraints,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// for now just default to first column
|
||||||
|
if (primaryKeys.length === 0) {
|
||||||
|
primaryKeys.push(descResp[0].Field)
|
||||||
|
}
|
||||||
|
if (!tables[tableName]) {
|
||||||
|
tables[tableName] = {
|
||||||
|
_id: buildExternalTableId(datasourceId, tableName),
|
||||||
|
primary: primaryKeys,
|
||||||
|
name: tableName,
|
||||||
|
schema,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.client.end()
|
||||||
|
this.tables = tables
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: SqlQuery | string) {
|
||||||
|
const results = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return results.length ? results : [{ created: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
read(query: SqlQuery | string) {
|
||||||
|
return internalQuery(this.client, getSqlQuery(query))
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: SqlQuery | string) {
|
||||||
|
const results = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return results.length ? results : [{ updated: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: SqlQuery | string) {
|
||||||
|
const results = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return results.length ? results : [{ deleted: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async getReturningRow(json: QueryJson) {
|
||||||
|
if (!json.extra.idFilter) {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
const input = this._query({
|
||||||
|
endpoint: {
|
||||||
|
...json.endpoint,
|
||||||
|
operation: Operation.READ,
|
||||||
|
},
|
||||||
|
fields: [],
|
||||||
|
filters: json.extra.idFilter,
|
||||||
|
paginate: {
|
||||||
|
limit: 1,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return internalQuery(this.client, input, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
async query(json: QueryJson) {
|
||||||
|
const operation = this._operation(json)
|
||||||
|
this.client.connect()
|
||||||
|
const input = this._query(json, { disableReturning: true })
|
||||||
|
let row
|
||||||
|
// need to manage returning, a feature mySQL can't do
|
||||||
|
if (operation === "awdawd") {
|
||||||
|
row = this.getReturningRow(json)
|
||||||
|
}
|
||||||
|
const results = await internalQuery(this.client, input, false)
|
||||||
|
// same as delete, manage returning
|
||||||
|
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
|
||||||
|
row = this.getReturningRow(json)
|
||||||
|
}
|
||||||
|
this.client.end()
|
||||||
|
if (operation !== Operation.READ) {
|
||||||
|
return row
|
||||||
|
}
|
||||||
|
return results.length ? results : [{ [operation.toLowerCase()]: true }]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: MySQLIntegration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,205 +0,0 @@
|
||||||
const { Pool } = require("pg")
|
|
||||||
const { FIELD_TYPES } = require("./Integration")
|
|
||||||
const Sql = require("./base/sql")
|
|
||||||
const { FieldTypes } = require("../constants")
|
|
||||||
const { buildExternalTableId, convertType } = require("./utils")
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://node-postgres.com",
|
|
||||||
plus: true,
|
|
||||||
friendlyName: "PostgreSQL",
|
|
||||||
description:
|
|
||||||
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
|
||||||
datasource: {
|
|
||||||
host: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "localhost",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
port: {
|
|
||||||
type: FIELD_TYPES.NUMBER,
|
|
||||||
required: true,
|
|
||||||
default: 5432,
|
|
||||||
},
|
|
||||||
database: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "postgres",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
user: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "root",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
password: {
|
|
||||||
type: FIELD_TYPES.PASSWORD,
|
|
||||||
default: "root",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
ssl: {
|
|
||||||
type: FIELD_TYPES.BOOLEAN,
|
|
||||||
default: false,
|
|
||||||
required: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
type: "sql",
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
type: "sql",
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
type: "sql",
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
type: "sql",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const TYPE_MAP = {
|
|
||||||
text: FieldTypes.LONGFORM,
|
|
||||||
varchar: FieldTypes.STRING,
|
|
||||||
integer: FieldTypes.NUMBER,
|
|
||||||
bigint: FieldTypes.NUMBER,
|
|
||||||
decimal: FieldTypes.NUMBER,
|
|
||||||
smallint: FieldTypes.NUMBER,
|
|
||||||
timestamp: FieldTypes.DATETIME,
|
|
||||||
time: FieldTypes.DATETIME,
|
|
||||||
boolean: FieldTypes.BOOLEAN,
|
|
||||||
json: FIELD_TYPES.JSON,
|
|
||||||
}
|
|
||||||
|
|
||||||
async function internalQuery(client, query) {
|
|
||||||
const sql = typeof query === "string" ? query : query.sql
|
|
||||||
const bindings = typeof query === "string" ? {} : query.bindings
|
|
||||||
try {
|
|
||||||
return await client.query(sql, bindings)
|
|
||||||
} catch (err) {
|
|
||||||
throw new Error(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class PostgresIntegration extends Sql {
|
|
||||||
static pool
|
|
||||||
|
|
||||||
COLUMNS_SQL =
|
|
||||||
"select * from information_schema.columns where table_schema = 'public'"
|
|
||||||
|
|
||||||
PRIMARY_KEYS_SQL = `
|
|
||||||
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
|
||||||
from information_schema.table_constraints tc
|
|
||||||
join
|
|
||||||
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
|
||||||
and kc.table_schema = tc.table_schema
|
|
||||||
and kc.constraint_name = tc.constraint_name
|
|
||||||
where tc.constraint_type = 'PRIMARY KEY';
|
|
||||||
`
|
|
||||||
|
|
||||||
constructor(config) {
|
|
||||||
super("pg")
|
|
||||||
this.config = config
|
|
||||||
if (this.config.ssl) {
|
|
||||||
this.config.ssl = {
|
|
||||||
rejectUnauthorized: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!this.pool) {
|
|
||||||
this.pool = new Pool(this.config)
|
|
||||||
}
|
|
||||||
|
|
||||||
this.client = this.pool
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetches the tables from the postgres table and assigns them to the datasource.
|
|
||||||
* @param {*} datasourceId - datasourceId to fetch
|
|
||||||
*/
|
|
||||||
async buildSchema(datasourceId) {
|
|
||||||
let tableKeys = {}
|
|
||||||
try {
|
|
||||||
const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
|
|
||||||
for (let table of primaryKeysResponse.rows) {
|
|
||||||
const tableName = table.table_name
|
|
||||||
if (!tableKeys[tableName]) {
|
|
||||||
tableKeys[tableName] = []
|
|
||||||
}
|
|
||||||
tableKeys[tableName].push(table.column_name || table.primary_key)
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
tableKeys = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
|
||||||
const tables = {}
|
|
||||||
|
|
||||||
for (let column of columnsResponse.rows) {
|
|
||||||
const tableName = column.table_name
|
|
||||||
const columnName = column.column_name
|
|
||||||
|
|
||||||
// table key doesn't exist yet
|
|
||||||
if (!tables[tableName]) {
|
|
||||||
tables[tableName] = {
|
|
||||||
_id: buildExternalTableId(datasourceId, tableName),
|
|
||||||
primary: tableKeys[tableName] || ["id"],
|
|
||||||
name: tableName,
|
|
||||||
schema: {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tables[tableName].schema[columnName] = {
|
|
||||||
name: columnName,
|
|
||||||
type: convertType(column.data_type, TYPE_MAP),
|
|
||||||
}
|
|
||||||
|
|
||||||
// // TODO: hack for testing
|
|
||||||
// if (tableName === "persons") {
|
|
||||||
// tables[tableName].primaryDisplay = "firstname"
|
|
||||||
// }
|
|
||||||
// if (columnName.toLowerCase() === "personid" && tableName === "tasks") {
|
|
||||||
// tables[tableName].schema[columnName] = {
|
|
||||||
// name: columnName,
|
|
||||||
// type: "link",
|
|
||||||
// tableId: buildExternalTableId(datasourceId, "persons"),
|
|
||||||
// relationshipType: "one-to-many",
|
|
||||||
// fieldName: "personid",
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
}
|
|
||||||
this.tables = tables
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(sql) {
|
|
||||||
const response = await internalQuery(this.client, sql)
|
|
||||||
return response.rows.length ? response.rows : [{ created: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(sql) {
|
|
||||||
const response = await internalQuery(this.client, sql)
|
|
||||||
return response.rows
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(sql) {
|
|
||||||
const response = await internalQuery(this.client, sql)
|
|
||||||
return response.rows.length ? response.rows : [{ updated: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(sql) {
|
|
||||||
const response = await internalQuery(this.client, sql)
|
|
||||||
return response.rows.length ? response.rows : [{ deleted: true }]
|
|
||||||
}
|
|
||||||
|
|
||||||
async query(json) {
|
|
||||||
const operation = this._operation(json).toLowerCase()
|
|
||||||
const input = this._query(json)
|
|
||||||
const response = await internalQuery(this.client, input)
|
|
||||||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: PostgresIntegration,
|
|
||||||
}
|
|
|
@ -0,0 +1,227 @@
|
||||||
|
import {
|
||||||
|
Integration,
|
||||||
|
DatasourceFieldTypes,
|
||||||
|
QueryTypes,
|
||||||
|
QueryJson,
|
||||||
|
SqlQuery,
|
||||||
|
} from "./base/definitions"
|
||||||
|
import { Table } from "../constants/definitions"
|
||||||
|
import { getSqlQuery } from "./utils"
|
||||||
|
|
||||||
|
module PostgresModule {
|
||||||
|
const { Pool } = require("pg")
|
||||||
|
const Sql = require("./base/sql")
|
||||||
|
const { FieldTypes } = require("../constants")
|
||||||
|
const { buildExternalTableId, convertType } = require("./utils")
|
||||||
|
|
||||||
|
interface PostgresConfig {
|
||||||
|
host: string
|
||||||
|
port: number
|
||||||
|
database: string
|
||||||
|
user: string
|
||||||
|
password: string
|
||||||
|
ssl?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://node-postgres.com",
|
||||||
|
plus: true,
|
||||||
|
friendlyName: "PostgreSQL",
|
||||||
|
description:
|
||||||
|
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
||||||
|
datasource: {
|
||||||
|
host: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "localhost",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
port: {
|
||||||
|
type: DatasourceFieldTypes.NUMBER,
|
||||||
|
required: true,
|
||||||
|
default: 5432,
|
||||||
|
},
|
||||||
|
database: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "postgres",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
password: {
|
||||||
|
type: DatasourceFieldTypes.PASSWORD,
|
||||||
|
default: "root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
ssl: {
|
||||||
|
type: DatasourceFieldTypes.BOOLEAN,
|
||||||
|
default: false,
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: QueryTypes.SQL,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const TYPE_MAP = {
|
||||||
|
text: FieldTypes.LONGFORM,
|
||||||
|
varchar: FieldTypes.STRING,
|
||||||
|
integer: FieldTypes.NUMBER,
|
||||||
|
bigint: FieldTypes.NUMBER,
|
||||||
|
decimal: FieldTypes.NUMBER,
|
||||||
|
smallint: FieldTypes.NUMBER,
|
||||||
|
timestamp: FieldTypes.DATETIME,
|
||||||
|
time: FieldTypes.DATETIME,
|
||||||
|
boolean: FieldTypes.BOOLEAN,
|
||||||
|
json: FieldTypes.JSON,
|
||||||
|
}
|
||||||
|
|
||||||
|
async function internalQuery(client: any, query: SqlQuery) {
|
||||||
|
try {
|
||||||
|
return await client.query(query.sql, query.bindings || {})
|
||||||
|
} catch (err) {
|
||||||
|
throw new Error(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class PostgresIntegration extends Sql {
|
||||||
|
static pool: any
|
||||||
|
private readonly client: any
|
||||||
|
private readonly config: PostgresConfig
|
||||||
|
|
||||||
|
COLUMNS_SQL =
|
||||||
|
"select * from information_schema.columns where table_schema = 'public'"
|
||||||
|
|
||||||
|
PRIMARY_KEYS_SQL = `
|
||||||
|
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
||||||
|
from information_schema.table_constraints tc
|
||||||
|
join
|
||||||
|
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
||||||
|
and kc.table_schema = tc.table_schema
|
||||||
|
and kc.constraint_name = tc.constraint_name
|
||||||
|
where tc.constraint_type = 'PRIMARY KEY';
|
||||||
|
`
|
||||||
|
|
||||||
|
constructor(config: PostgresConfig) {
|
||||||
|
super("pg")
|
||||||
|
this.config = config
|
||||||
|
|
||||||
|
let newConfig = {
|
||||||
|
...this.config,
|
||||||
|
ssl: this.config.ssl ? { rejectUnauthorized: true } : undefined,
|
||||||
|
}
|
||||||
|
if (!this.pool) {
|
||||||
|
this.pool = new Pool(newConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.client = this.pool
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches the tables from the postgres table and assigns them to the datasource.
|
||||||
|
* @param {*} datasourceId - datasourceId to fetch
|
||||||
|
*/
|
||||||
|
async buildSchema(datasourceId: string) {
|
||||||
|
let tableKeys: { [key: string]: string[] } = {}
|
||||||
|
try {
|
||||||
|
const primaryKeysResponse = await this.client.query(
|
||||||
|
this.PRIMARY_KEYS_SQL
|
||||||
|
)
|
||||||
|
for (let table of primaryKeysResponse.rows) {
|
||||||
|
const tableName = table.table_name
|
||||||
|
if (!tableKeys[tableName]) {
|
||||||
|
tableKeys[tableName] = []
|
||||||
|
}
|
||||||
|
tableKeys[tableName].push(table.column_name || table.primary_key)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
tableKeys = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
||||||
|
const tables: { [key: string]: Table } = {}
|
||||||
|
|
||||||
|
for (let column of columnsResponse.rows) {
|
||||||
|
const tableName: string = column.table_name
|
||||||
|
const columnName: string = column.column_name
|
||||||
|
|
||||||
|
// table key doesn't exist yet
|
||||||
|
if (!tables[tableName] || !tables[tableName].schema) {
|
||||||
|
tables[tableName] = {
|
||||||
|
_id: buildExternalTableId(datasourceId, tableName),
|
||||||
|
primary: tableKeys[tableName] || ["id"],
|
||||||
|
name: tableName,
|
||||||
|
schema: {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const type: string = convertType(column.data_type, TYPE_MAP)
|
||||||
|
tables[tableName].schema[columnName] = {
|
||||||
|
name: columnName,
|
||||||
|
type,
|
||||||
|
}
|
||||||
|
|
||||||
|
// // TODO: hack for testing
|
||||||
|
// if (tableName === "persons") {
|
||||||
|
// tables[tableName].primaryDisplay = "firstname"
|
||||||
|
// }
|
||||||
|
// if (columnName.toLowerCase() === "personid" && tableName === "tasks") {
|
||||||
|
// tables[tableName].schema[columnName] = {
|
||||||
|
// name: columnName,
|
||||||
|
// type: "link",
|
||||||
|
// tableId: buildExternalTableId(datasourceId, "persons"),
|
||||||
|
// relationshipType: "one-to-many",
|
||||||
|
// fieldName: "personid",
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
this.tables = tables
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(query: SqlQuery | string) {
|
||||||
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return response.rows.length ? response.rows : [{ created: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: SqlQuery | string) {
|
||||||
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return response.rows
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(query: SqlQuery | string) {
|
||||||
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return response.rows.length ? response.rows : [{ updated: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(query: SqlQuery | string) {
|
||||||
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
return response.rows.length ? response.rows : [{ deleted: true }]
|
||||||
|
}
|
||||||
|
|
||||||
|
async query(json: QueryJson) {
|
||||||
|
const operation = this._operation(json).toLowerCase()
|
||||||
|
const input = this._query(json)
|
||||||
|
const response = await internalQuery(this.client, input)
|
||||||
|
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: PostgresIntegration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,178 +0,0 @@
|
||||||
const fetch = require("node-fetch")
|
|
||||||
const { FIELD_TYPES, QUERY_TYPES } = require("./Integration")
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://github.com/node-fetch/node-fetch",
|
|
||||||
description:
|
|
||||||
"Representational state transfer (REST) is a de-facto standard for a software architecture for interactive applications that typically use multiple Web services. ",
|
|
||||||
friendlyName: "REST API",
|
|
||||||
datasource: {
|
|
||||||
url: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
default: "localhost",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
defaultHeaders: {
|
|
||||||
type: FIELD_TYPES.OBJECT,
|
|
||||||
required: false,
|
|
||||||
default: {},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
create: {
|
|
||||||
readable: true,
|
|
||||||
displayName: "POST",
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
urlDisplay: true,
|
|
||||||
fields: {
|
|
||||||
path: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
},
|
|
||||||
queryString: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
},
|
|
||||||
headers: {
|
|
||||||
type: FIELD_TYPES.OBJECT,
|
|
||||||
},
|
|
||||||
requestBody: {
|
|
||||||
type: FIELD_TYPES.JSON,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
read: {
|
|
||||||
displayName: "GET",
|
|
||||||
readable: true,
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
urlDisplay: true,
|
|
||||||
fields: {
|
|
||||||
path: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
},
|
|
||||||
queryString: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
},
|
|
||||||
headers: {
|
|
||||||
type: FIELD_TYPES.OBJECT,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
displayName: "PUT",
|
|
||||||
readable: true,
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
urlDisplay: true,
|
|
||||||
fields: {
|
|
||||||
path: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
},
|
|
||||||
queryString: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
},
|
|
||||||
headers: {
|
|
||||||
type: FIELD_TYPES.OBJECT,
|
|
||||||
},
|
|
||||||
requestBody: {
|
|
||||||
type: FIELD_TYPES.JSON,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
delete: {
|
|
||||||
displayName: "DELETE",
|
|
||||||
type: QUERY_TYPES.FIELDS,
|
|
||||||
urlDisplay: true,
|
|
||||||
fields: {
|
|
||||||
path: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
},
|
|
||||||
queryString: {
|
|
||||||
type: FIELD_TYPES.STRING,
|
|
||||||
},
|
|
||||||
headers: {
|
|
||||||
type: FIELD_TYPES.OBJECT,
|
|
||||||
},
|
|
||||||
requestBody: {
|
|
||||||
type: FIELD_TYPES.JSON,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
class RestIntegration {
|
|
||||||
constructor(config) {
|
|
||||||
this.config = config
|
|
||||||
}
|
|
||||||
|
|
||||||
async parseResponse(response) {
|
|
||||||
switch (this.headers.Accept) {
|
|
||||||
case "application/json":
|
|
||||||
return await response.json()
|
|
||||||
case "text/html":
|
|
||||||
return await response.text()
|
|
||||||
default:
|
|
||||||
return await response.json()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async create({ path = "", queryString = "", headers = {}, json }) {
|
|
||||||
this.headers = {
|
|
||||||
...this.config.defaultHeaders,
|
|
||||||
...headers,
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(this.config.url + path + queryString, {
|
|
||||||
method: "POST",
|
|
||||||
headers: this.headers,
|
|
||||||
body: JSON.stringify(json),
|
|
||||||
})
|
|
||||||
|
|
||||||
return await this.parseResponse(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
async read({ path = "", queryString = "", headers = {} }) {
|
|
||||||
this.headers = {
|
|
||||||
...this.config.defaultHeaders,
|
|
||||||
...headers,
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(this.config.url + path + queryString, {
|
|
||||||
headers: this.headers,
|
|
||||||
})
|
|
||||||
|
|
||||||
return await this.parseResponse(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
async update({ path = "", queryString = "", headers = {}, json }) {
|
|
||||||
this.headers = {
|
|
||||||
...this.config.defaultHeaders,
|
|
||||||
...headers,
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(this.config.url + path + queryString, {
|
|
||||||
method: "POST",
|
|
||||||
headers: this.headers,
|
|
||||||
body: JSON.stringify(json),
|
|
||||||
})
|
|
||||||
|
|
||||||
return await this.parseResponse(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete({ path = "", queryString = "", headers = {} }) {
|
|
||||||
this.headers = {
|
|
||||||
...this.config.defaultHeaders,
|
|
||||||
...headers,
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(this.config.url + path + queryString, {
|
|
||||||
method: "DELETE",
|
|
||||||
headers: this.headers,
|
|
||||||
})
|
|
||||||
|
|
||||||
return await this.parseResponse(response)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: RestIntegration,
|
|
||||||
}
|
|
|
@ -0,0 +1,197 @@
|
||||||
|
import {
|
||||||
|
Integration,
|
||||||
|
DatasourceFieldTypes,
|
||||||
|
QueryTypes,
|
||||||
|
} from "./base/definitions"
|
||||||
|
|
||||||
|
module RestModule {
|
||||||
|
const fetch = require("node-fetch")
|
||||||
|
|
||||||
|
interface RestConfig {
|
||||||
|
url: string
|
||||||
|
defaultHeaders: {
|
||||||
|
[key: string]: any
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://github.com/node-fetch/node-fetch",
|
||||||
|
description:
|
||||||
|
"Representational state transfer (REST) is a de-facto standard for a software architecture for interactive applications that typically use multiple Web services. ",
|
||||||
|
friendlyName: "REST API",
|
||||||
|
datasource: {
|
||||||
|
url: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: "localhost",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
defaultHeaders: {
|
||||||
|
type: DatasourceFieldTypes.OBJECT,
|
||||||
|
required: false,
|
||||||
|
default: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
readable: true,
|
||||||
|
displayName: "POST",
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
urlDisplay: true,
|
||||||
|
fields: {
|
||||||
|
path: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
},
|
||||||
|
queryString: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
},
|
||||||
|
headers: {
|
||||||
|
type: DatasourceFieldTypes.OBJECT,
|
||||||
|
},
|
||||||
|
requestBody: {
|
||||||
|
type: DatasourceFieldTypes.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
displayName: "GET",
|
||||||
|
readable: true,
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
urlDisplay: true,
|
||||||
|
fields: {
|
||||||
|
path: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
},
|
||||||
|
queryString: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
},
|
||||||
|
headers: {
|
||||||
|
type: DatasourceFieldTypes.OBJECT,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
displayName: "PUT",
|
||||||
|
readable: true,
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
urlDisplay: true,
|
||||||
|
fields: {
|
||||||
|
path: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
},
|
||||||
|
queryString: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
},
|
||||||
|
headers: {
|
||||||
|
type: DatasourceFieldTypes.OBJECT,
|
||||||
|
},
|
||||||
|
requestBody: {
|
||||||
|
type: DatasourceFieldTypes.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
displayName: "DELETE",
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
urlDisplay: true,
|
||||||
|
fields: {
|
||||||
|
path: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
},
|
||||||
|
queryString: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
},
|
||||||
|
headers: {
|
||||||
|
type: DatasourceFieldTypes.OBJECT,
|
||||||
|
},
|
||||||
|
requestBody: {
|
||||||
|
type: DatasourceFieldTypes.JSON,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class RestIntegration {
|
||||||
|
private config: RestConfig
|
||||||
|
private headers: {
|
||||||
|
[key: string]: string
|
||||||
|
} = {}
|
||||||
|
|
||||||
|
constructor(config: RestConfig) {
|
||||||
|
this.config = config
|
||||||
|
}
|
||||||
|
|
||||||
|
async parseResponse(response: any) {
|
||||||
|
switch (this.headers.Accept) {
|
||||||
|
case "application/json":
|
||||||
|
return await response.json()
|
||||||
|
case "text/html":
|
||||||
|
return await response.text()
|
||||||
|
default:
|
||||||
|
return await response.json()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async create({ path = "", queryString = "", headers = {}, json = {} }) {
|
||||||
|
this.headers = {
|
||||||
|
...this.config.defaultHeaders,
|
||||||
|
...headers,
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(this.config.url + path + queryString, {
|
||||||
|
method: "POST",
|
||||||
|
headers: this.headers,
|
||||||
|
body: JSON.stringify(json),
|
||||||
|
})
|
||||||
|
|
||||||
|
return await this.parseResponse(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
async read({ path = "", queryString = "", headers = {} }) {
|
||||||
|
this.headers = {
|
||||||
|
...this.config.defaultHeaders,
|
||||||
|
...headers,
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(this.config.url + path + queryString, {
|
||||||
|
headers: this.headers,
|
||||||
|
})
|
||||||
|
|
||||||
|
return await this.parseResponse(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
async update({ path = "", queryString = "", headers = {}, json = {} }) {
|
||||||
|
this.headers = {
|
||||||
|
...this.config.defaultHeaders,
|
||||||
|
...headers,
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(this.config.url + path + queryString, {
|
||||||
|
method: "POST",
|
||||||
|
headers: this.headers,
|
||||||
|
body: JSON.stringify(json),
|
||||||
|
})
|
||||||
|
|
||||||
|
return await this.parseResponse(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete({ path = "", queryString = "", headers = {} }) {
|
||||||
|
this.headers = {
|
||||||
|
...this.config.defaultHeaders,
|
||||||
|
...headers,
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(this.config.url + path + queryString, {
|
||||||
|
method: "DELETE",
|
||||||
|
headers: this.headers,
|
||||||
|
})
|
||||||
|
|
||||||
|
return await this.parseResponse(response)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: RestIntegration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,60 +0,0 @@
|
||||||
const AWS = require("aws-sdk")
|
|
||||||
|
|
||||||
const SCHEMA = {
|
|
||||||
docs: "https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
|
||||||
description:
|
|
||||||
"Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.",
|
|
||||||
friendlyName: "Amazon S3",
|
|
||||||
datasource: {
|
|
||||||
region: {
|
|
||||||
type: "string",
|
|
||||||
required: true,
|
|
||||||
default: "us-east-1",
|
|
||||||
},
|
|
||||||
accessKeyId: {
|
|
||||||
type: "password",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
secretAccessKey: {
|
|
||||||
type: "password",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
read: {
|
|
||||||
type: "fields",
|
|
||||||
fields: {
|
|
||||||
bucket: {
|
|
||||||
type: "string",
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
class S3Integration {
|
|
||||||
constructor(config) {
|
|
||||||
this.config = config
|
|
||||||
this.connect()
|
|
||||||
this.client = new AWS.S3()
|
|
||||||
}
|
|
||||||
|
|
||||||
async connect() {
|
|
||||||
AWS.config.update(this.config)
|
|
||||||
}
|
|
||||||
|
|
||||||
async read(query) {
|
|
||||||
const response = await this.client
|
|
||||||
.listObjects({
|
|
||||||
Bucket: query.bucket,
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
return response.Contents
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
schema: SCHEMA,
|
|
||||||
integration: S3Integration,
|
|
||||||
}
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
import { Integration, QueryTypes } from "./base/definitions"
|
||||||
|
|
||||||
|
module S3Module {
|
||||||
|
const AWS = require("aws-sdk")
|
||||||
|
|
||||||
|
interface S3Config {
|
||||||
|
region: string
|
||||||
|
accessKeyId: string
|
||||||
|
secretAccessKey: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const SCHEMA: Integration = {
|
||||||
|
docs: "https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
||||||
|
description:
|
||||||
|
"Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.",
|
||||||
|
friendlyName: "Amazon S3",
|
||||||
|
datasource: {
|
||||||
|
region: {
|
||||||
|
type: "string",
|
||||||
|
required: true,
|
||||||
|
default: "us-east-1",
|
||||||
|
},
|
||||||
|
accessKeyId: {
|
||||||
|
type: "password",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
secretAccessKey: {
|
||||||
|
type: "password",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
read: {
|
||||||
|
type: QueryTypes.FIELDS,
|
||||||
|
fields: {
|
||||||
|
bucket: {
|
||||||
|
type: "string",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class S3Integration {
|
||||||
|
private readonly config: S3Config
|
||||||
|
private client: any
|
||||||
|
private connectionPromise: Promise<any>
|
||||||
|
|
||||||
|
constructor(config: S3Config) {
|
||||||
|
this.config = config
|
||||||
|
this.connectionPromise = this.connect()
|
||||||
|
this.client = new AWS.S3()
|
||||||
|
}
|
||||||
|
|
||||||
|
async connect() {
|
||||||
|
AWS.config.update(this.config)
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(query: { bucket: string }) {
|
||||||
|
const response = await this.client
|
||||||
|
.listObjects({
|
||||||
|
Bucket: query.bucket,
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
return response.Contents
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schema: SCHEMA,
|
||||||
|
integration: S3Integration,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,5 +1,3 @@
|
||||||
const PouchDB = require("pouchdb")
|
|
||||||
const CouchDBIntegration = require("../couchdb")
|
|
||||||
jest.mock("pouchdb", () => function CouchDBMock() {
|
jest.mock("pouchdb", () => function CouchDBMock() {
|
||||||
this.post = jest.fn()
|
this.post = jest.fn()
|
||||||
this.allDocs = jest.fn(() => ({ rows: [] }))
|
this.allDocs = jest.fn(() => ({ rows: [] }))
|
||||||
|
@ -8,6 +6,7 @@ jest.mock("pouchdb", () => function CouchDBMock() {
|
||||||
this.plugin = jest.fn()
|
this.plugin = jest.fn()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const CouchDBIntegration = require("../couchdb")
|
||||||
|
|
||||||
class TestConfiguration {
|
class TestConfiguration {
|
||||||
constructor(config = {}) {
|
constructor(config = {}) {
|
||||||
|
|
|
@ -20,7 +20,7 @@ describe("MS SQL Server Integration", () => {
|
||||||
const response = await config.integration.create({
|
const response = await config.integration.create({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined)
|
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the read method with the correct params", async () => {
|
it("calls the read method with the correct params", async () => {
|
||||||
|
@ -28,7 +28,7 @@ describe("MS SQL Server Integration", () => {
|
||||||
const response = await config.integration.read({
|
const response = await config.integration.read({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined)
|
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("no rows returned", () => {
|
describe("no rows returned", () => {
|
||||||
|
|
|
@ -19,7 +19,7 @@ describe("MySQL Integration", () => {
|
||||||
await config.integration.create({
|
await config.integration.create({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined, expect.any(Function))
|
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function))
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the read method with the correct params", async () => {
|
it("calls the read method with the correct params", async () => {
|
||||||
|
@ -27,7 +27,7 @@ describe("MySQL Integration", () => {
|
||||||
await config.integration.read({
|
await config.integration.read({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined, expect.any(Function))
|
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function))
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the update method with the correct params", async () => {
|
it("calls the update method with the correct params", async () => {
|
||||||
|
@ -35,7 +35,7 @@ describe("MySQL Integration", () => {
|
||||||
await config.integration.update({
|
await config.integration.update({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined, expect.any(Function))
|
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function))
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the delete method with the correct params", async () => {
|
it("calls the delete method with the correct params", async () => {
|
||||||
|
@ -43,7 +43,7 @@ describe("MySQL Integration", () => {
|
||||||
await config.integration.delete({
|
await config.integration.delete({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined, expect.any(Function))
|
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function))
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("no rows returned", () => {
|
describe("no rows returned", () => {
|
||||||
|
|
|
@ -20,7 +20,7 @@ describe("Postgres Integration", () => {
|
||||||
const response = await config.integration.create({
|
const response = await config.integration.create({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(pg.queryMock).toHaveBeenCalledWith(sql, undefined)
|
expect(pg.queryMock).toHaveBeenCalledWith(sql, {})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the read method with the correct params", async () => {
|
it("calls the read method with the correct params", async () => {
|
||||||
|
@ -28,7 +28,7 @@ describe("Postgres Integration", () => {
|
||||||
const response = await config.integration.read({
|
const response = await config.integration.read({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(pg.queryMock).toHaveBeenCalledWith(sql, undefined)
|
expect(pg.queryMock).toHaveBeenCalledWith(sql, {})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the update method with the correct params", async () => {
|
it("calls the update method with the correct params", async () => {
|
||||||
|
@ -36,7 +36,7 @@ describe("Postgres Integration", () => {
|
||||||
const response = await config.integration.update({
|
const response = await config.integration.update({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(pg.queryMock).toHaveBeenCalledWith(sql, undefined)
|
expect(pg.queryMock).toHaveBeenCalledWith(sql, {})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the delete method with the correct params", async () => {
|
it("calls the delete method with the correct params", async () => {
|
||||||
|
@ -44,7 +44,7 @@ describe("Postgres Integration", () => {
|
||||||
await config.integration.delete({
|
await config.integration.delete({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(pg.queryMock).toHaveBeenCalledWith(sql, undefined)
|
expect(pg.queryMock).toHaveBeenCalledWith(sql, {})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("no rows returned", () => {
|
describe("no rows returned", () => {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
|
jest.mock("node-fetch", () => jest.fn(() => ({ json: jest.fn(), text: jest.fn() })))
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
const RestIntegration = require("../rest")
|
const RestIntegration = require("../rest")
|
||||||
jest.mock("node-fetch", () => jest.fn(() => ({ json: jest.fn(), text: jest.fn() })))
|
|
||||||
|
|
||||||
class TestConfiguration {
|
class TestConfiguration {
|
||||||
constructor(config = {}) {
|
constructor(config = {}) {
|
||||||
|
|
|
@ -1,17 +1,18 @@
|
||||||
|
import { SqlQuery } from "./base/definitions"
|
||||||
const { DocumentTypes, SEPARATOR } = require("../db/utils")
|
const { DocumentTypes, SEPARATOR } = require("../db/utils")
|
||||||
const { FieldTypes } = require("../constants")
|
const { FieldTypes } = require("../constants")
|
||||||
|
|
||||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||||
|
|
||||||
exports.isExternalTable = tableId => {
|
export function isExternalTable(tableId: string) {
|
||||||
return tableId.includes(DocumentTypes.DATASOURCE)
|
return tableId.includes(DocumentTypes.DATASOURCE)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.buildExternalTableId = (datasourceId, tableName) => {
|
export function buildExternalTableId(datasourceId: string, tableName: string) {
|
||||||
return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}`
|
return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}`
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.breakExternalTableId = tableId => {
|
export function breakExternalTableId(tableId: string) {
|
||||||
const parts = tableId.split(DOUBLE_SEPARATOR)
|
const parts = tableId.split(DOUBLE_SEPARATOR)
|
||||||
let tableName = parts.pop()
|
let tableName = parts.pop()
|
||||||
// if they need joined
|
// if they need joined
|
||||||
|
@ -19,23 +20,23 @@ exports.breakExternalTableId = tableId => {
|
||||||
return { datasourceId, tableName }
|
return { datasourceId, tableName }
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.generateRowIdField = (keyProps = []) => {
|
export function generateRowIdField(keyProps: any[] = []) {
|
||||||
if (!Array.isArray(keyProps)) {
|
if (!Array.isArray(keyProps)) {
|
||||||
keyProps = [keyProps]
|
keyProps = [keyProps]
|
||||||
}
|
}
|
||||||
// this conserves order and types
|
// this conserves order and types
|
||||||
return encodeURIComponent(JSON.stringify(keyProps))
|
return encodeURIComponent(JSON.stringify(keyProps).replace(/"/g, "'"))
|
||||||
}
|
}
|
||||||
|
|
||||||
// should always return an array
|
// should always return an array
|
||||||
exports.breakRowIdField = _id => {
|
export function breakRowIdField(_id: string) {
|
||||||
if (!_id) {
|
if (!_id) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
return JSON.parse(decodeURIComponent(_id))
|
return JSON.parse(decodeURIComponent(_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.convertType = (type, map) => {
|
export function convertType(type: string, map: { [key: string]: any }) {
|
||||||
for (let [external, internal] of Object.entries(map)) {
|
for (let [external, internal] of Object.entries(map)) {
|
||||||
if (type.toLowerCase().includes(external)) {
|
if (type.toLowerCase().includes(external)) {
|
||||||
return internal
|
return internal
|
||||||
|
@ -43,3 +44,11 @@ exports.convertType = (type, map) => {
|
||||||
}
|
}
|
||||||
return FieldTypes.STRING
|
return FieldTypes.STRING
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getSqlQuery(query: SqlQuery | string): SqlQuery {
|
||||||
|
if (typeof query === "string") {
|
||||||
|
return { sql: query }
|
||||||
|
} else {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,6 +1,3 @@
|
||||||
const authorizedMiddleware = require("../authorized")
|
|
||||||
const env = require("../../environment")
|
|
||||||
const { PermissionTypes, PermissionLevels } = require("@budibase/auth/permissions")
|
|
||||||
jest.mock("../../environment", () => ({
|
jest.mock("../../environment", () => ({
|
||||||
prod: false,
|
prod: false,
|
||||||
isTest: () => true,
|
isTest: () => true,
|
||||||
|
@ -10,6 +7,9 @@ jest.mock("../../environment", () => ({
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
const authorizedMiddleware = require("../authorized")
|
||||||
|
const env = require("../../environment")
|
||||||
|
const { PermissionTypes, PermissionLevels } = require("@budibase/auth/permissions")
|
||||||
|
|
||||||
class TestConfiguration {
|
class TestConfiguration {
|
||||||
constructor(role) {
|
constructor(role) {
|
||||||
|
@ -77,17 +77,6 @@ describe("Authorization middleware", () => {
|
||||||
config = new TestConfiguration()
|
config = new TestConfiguration()
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("external web hook call", () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
config = new TestConfiguration()
|
|
||||||
config.setEnvironment(true)
|
|
||||||
config.setRequestHeaders({
|
|
||||||
"x-api-key": "abc123",
|
|
||||||
"x-instanceid": "instance123",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("non-webhook call", () => {
|
describe("non-webhook call", () => {
|
||||||
let config
|
let config
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,3 @@
|
||||||
const usageQuotaMiddleware = require("../usageQuota")
|
|
||||||
const usageQuota = require("../../utilities/usageQuota")
|
|
||||||
const CouchDB = require("../../db")
|
|
||||||
const env = require("../../environment")
|
|
||||||
|
|
||||||
jest.mock("../../db")
|
jest.mock("../../db")
|
||||||
jest.mock("../../utilities/usageQuota")
|
jest.mock("../../utilities/usageQuota")
|
||||||
jest.mock("../../environment", () => ({
|
jest.mock("../../environment", () => ({
|
||||||
|
@ -12,6 +7,11 @@ jest.mock("../../environment", () => ({
|
||||||
_set: () => {},
|
_set: () => {},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
const usageQuotaMiddleware = require("../usageQuota")
|
||||||
|
const usageQuota = require("../../utilities/usageQuota")
|
||||||
|
const CouchDB = require("../../db")
|
||||||
|
const env = require("../../environment")
|
||||||
|
|
||||||
class TestConfiguration {
|
class TestConfiguration {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.throw = jest.fn()
|
this.throw = jest.fn()
|
||||||
|
|
|
@ -26,7 +26,8 @@ const PASSWORD = "babs_password"
|
||||||
class TestConfiguration {
|
class TestConfiguration {
|
||||||
constructor(openServer = true) {
|
constructor(openServer = true) {
|
||||||
if (openServer) {
|
if (openServer) {
|
||||||
env.PORT = 4002
|
// use a random port because it doesn't matter
|
||||||
|
env.PORT = 0
|
||||||
this.server = require("../../app")
|
this.server = require("../../app")
|
||||||
// we need the request for logging in, involves cookies, hard to fake
|
// we need the request for logging in, involves cookies, hard to fake
|
||||||
this.request = supertest(this.server)
|
this.request = supertest(this.server)
|
||||||
|
@ -90,6 +91,9 @@ class TestConfiguration {
|
||||||
}
|
}
|
||||||
|
|
||||||
end() {
|
end() {
|
||||||
|
if (!this) {
|
||||||
|
return
|
||||||
|
}
|
||||||
if (this.server) {
|
if (this.server) {
|
||||||
this.server.close()
|
this.server.close()
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "es5",
|
||||||
|
"module": "commonjs",
|
||||||
|
"lib": ["es6"],
|
||||||
|
"allowJs": true,
|
||||||
|
"outDir": "dist",
|
||||||
|
"strict": true,
|
||||||
|
"noImplicitAny": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"resolveJsonModule": true
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"./src/**/*"
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
"node_modules",
|
||||||
|
"**/*.json",
|
||||||
|
"**/*.spec.ts",
|
||||||
|
"**/*.spec.js"
|
||||||
|
]
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
|
@ -242,6 +242,7 @@
|
||||||
"styleable": true,
|
"styleable": true,
|
||||||
"illegalChildren": ["section"],
|
"illegalChildren": ["section"],
|
||||||
"hasChildren": true,
|
"hasChildren": true,
|
||||||
|
"showSettingsBar": true,
|
||||||
"settings": [
|
"settings": [
|
||||||
{
|
{
|
||||||
"type": "dataProvider",
|
"type": "dataProvider",
|
||||||
|
@ -258,6 +259,93 @@
|
||||||
"type": "filter",
|
"type": "filter",
|
||||||
"label": "Filtering",
|
"label": "Filtering",
|
||||||
"key": "filter"
|
"key": "filter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "select",
|
||||||
|
"label": "Direction",
|
||||||
|
"key": "direction",
|
||||||
|
"showInBar": true,
|
||||||
|
"options": [
|
||||||
|
{
|
||||||
|
"label": "Column",
|
||||||
|
"value": "column",
|
||||||
|
"barIcon": "ViewRow",
|
||||||
|
"barTitle": "Column layout"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Row",
|
||||||
|
"value": "row",
|
||||||
|
"barIcon": "ViewColumn",
|
||||||
|
"barTitle": "Row layout"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"defaultValue": "column"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "select",
|
||||||
|
"label": "Horiz. Align",
|
||||||
|
"key": "hAlign",
|
||||||
|
"showInBar": true,
|
||||||
|
"options": [
|
||||||
|
{
|
||||||
|
"label": "Left",
|
||||||
|
"value": "left",
|
||||||
|
"barIcon": "AlignLeft",
|
||||||
|
"barTitle": "Align left"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Center",
|
||||||
|
"value": "center",
|
||||||
|
"barIcon": "AlignCenter",
|
||||||
|
"barTitle": "Align center"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Right",
|
||||||
|
"value": "right",
|
||||||
|
"barIcon": "AlignRight",
|
||||||
|
"barTitle": "Align right"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Stretch",
|
||||||
|
"value": "stretch",
|
||||||
|
"barIcon": "MoveLeftRight",
|
||||||
|
"barTitle": "Align stretched horizontally"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"defaultValue": "stretch"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "select",
|
||||||
|
"label": "Vert. Align",
|
||||||
|
"key": "vAlign",
|
||||||
|
"showInBar": "true",
|
||||||
|
"options": [
|
||||||
|
{
|
||||||
|
"label": "Top",
|
||||||
|
"value": "top",
|
||||||
|
"barIcon": "AlignTop",
|
||||||
|
"barTitle": "Align top"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Middle",
|
||||||
|
"value": "middle",
|
||||||
|
"barIcon": "AlignMiddle",
|
||||||
|
"barTitle": "Align middle"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Bottom",
|
||||||
|
"value": "bottom",
|
||||||
|
"barIcon": "AlignBottom",
|
||||||
|
"barTitle": "Align bottom"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Stretch",
|
||||||
|
"value": "stretch",
|
||||||
|
"barIcon": "MoveUpDown",
|
||||||
|
"barTitle": "Align stretched vertically"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"defaultValue": "top"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"context": {
|
"context": {
|
||||||
|
|
|
@ -29,11 +29,11 @@
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"svelte"
|
"svelte"
|
||||||
],
|
],
|
||||||
"version": "0.9.60",
|
"version": "0.9.65",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc",
|
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^0.9.60",
|
"@budibase/bbui": "^0.9.65",
|
||||||
"@spectrum-css/link": "^3.1.3",
|
"@spectrum-css/link": "^3.1.3",
|
||||||
"@spectrum-css/page": "^3.0.1",
|
"@spectrum-css/page": "^3.0.1",
|
||||||
"@spectrum-css/vars": "^3.0.1",
|
"@spectrum-css/vars": "^3.0.1",
|
||||||
|
|
|
@ -1,18 +1,22 @@
|
||||||
<script>
|
<script>
|
||||||
import { getContext } from "svelte"
|
import { getContext } from "svelte"
|
||||||
import Placeholder from "./Placeholder.svelte"
|
import Placeholder from "./Placeholder.svelte"
|
||||||
|
import Container from "./Container.svelte"
|
||||||
|
|
||||||
export let dataProvider
|
export let dataProvider
|
||||||
export let noRowsMessage
|
export let noRowsMessage
|
||||||
|
export let direction
|
||||||
|
export let hAlign
|
||||||
|
export let vAlign
|
||||||
|
|
||||||
const { styleable, Provider } = getContext("sdk")
|
const { Provider } = getContext("sdk")
|
||||||
const component = getContext("component")
|
const component = getContext("component")
|
||||||
|
|
||||||
$: rows = dataProvider?.rows ?? []
|
$: rows = dataProvider?.rows ?? []
|
||||||
$: loaded = dataProvider?.loaded ?? true
|
$: loaded = dataProvider?.loaded ?? true
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div use:styleable={$component.styles}>
|
<Container {direction} {hAlign} {vAlign}>
|
||||||
{#if $component.empty}
|
{#if $component.empty}
|
||||||
<Placeholder />
|
<Placeholder />
|
||||||
{:else if rows.length > 0}
|
{:else if rows.length > 0}
|
||||||
|
@ -24,7 +28,7 @@
|
||||||
{:else if loaded && noRowsMessage}
|
{:else if loaded && noRowsMessage}
|
||||||
<div class="noRows"><i class="ri-list-check-2" />{noRowsMessage}</div>
|
<div class="noRows"><i class="ri-list-check-2" />{noRowsMessage}</div>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</Container>
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
.noRows {
|
.noRows {
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
if (type !== "Update") {
|
if (type !== "Update") {
|
||||||
return {}
|
return {}
|
||||||
}
|
}
|
||||||
// Only inherit values for forms targetting internal tables
|
// Only inherit values for forms targeting internal tables
|
||||||
if (!dataSource?.tableId) {
|
if (!dataSource?.tableId) {
|
||||||
return {}
|
return {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/string-templates",
|
"name": "@budibase/string-templates",
|
||||||
"version": "0.9.60",
|
"version": "0.9.65",
|
||||||
"description": "Handlebars wrapper for Budibase templating.",
|
"description": "Handlebars wrapper for Budibase templating.",
|
||||||
"main": "src/index.cjs",
|
"main": "src/index.cjs",
|
||||||
"module": "dist/bundle.mjs",
|
"module": "dist/bundle.mjs",
|
||||||
|
|
|
@ -20,10 +20,13 @@ const HELPERS = [
|
||||||
// this help is applied to all statements
|
// this help is applied to all statements
|
||||||
new Helper(HelperFunctionNames.ALL, value => {
|
new Helper(HelperFunctionNames.ALL, value => {
|
||||||
// null/undefined values produce bad results
|
// null/undefined values produce bad results
|
||||||
if (value == null) {
|
if (value == null || typeof value !== "string") {
|
||||||
return ""
|
return value || ""
|
||||||
}
|
}
|
||||||
let text = new SafeString(unescape(value).replace(/&/g, "&"))
|
if (value && value.string) {
|
||||||
|
value = value.string
|
||||||
|
}
|
||||||
|
let text = new SafeString(value.replace(/&/g, "&"))
|
||||||
if (text == null || typeof text !== "string") {
|
if (text == null || typeof text !== "string") {
|
||||||
return text
|
return text
|
||||||
}
|
}
|
||||||
|
|
|
@ -442,4 +442,15 @@ describe("Cover a few complex use cases", () => {
|
||||||
const output = await processObject(input, tableJson)
|
const output = await processObject(input, tableJson)
|
||||||
expect(output.dataProvider).not.toBe("Invalid Binding")
|
expect(output.dataProvider).not.toBe("Invalid Binding")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should be able to handle external ids", async () => {
|
||||||
|
const input = {
|
||||||
|
dataProvider: "{{ literal [_id] }}",
|
||||||
|
}
|
||||||
|
const context = {
|
||||||
|
_id: "%5B%221%22%2C%221%22%5D",
|
||||||
|
}
|
||||||
|
const output = await processObject(input, context)
|
||||||
|
expect(output.dataProvider).toBe("%5B%221%22%2C%221%22%5D")
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/worker",
|
"name": "@budibase/worker",
|
||||||
"email": "hi@budibase.com",
|
"email": "hi@budibase.com",
|
||||||
"version": "0.9.60",
|
"version": "0.9.65",
|
||||||
"description": "Budibase background service",
|
"description": "Budibase background service",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -21,8 +21,8 @@
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "AGPL-3.0-or-later",
|
"license": "AGPL-3.0-or-later",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/auth": "^0.9.60",
|
"@budibase/auth": "^0.9.65",
|
||||||
"@budibase/string-templates": "^0.9.60",
|
"@budibase/string-templates": "^0.9.65",
|
||||||
"@koa/router": "^8.0.0",
|
"@koa/router": "^8.0.0",
|
||||||
"aws-sdk": "^2.811.0",
|
"aws-sdk": "^2.811.0",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
|
|
Loading…
Reference in New Issue