Merge remote-tracking branch 'origin/develop' into feature/export-from-client
This commit is contained in:
commit
e3670d6737
|
@ -38,6 +38,17 @@ jobs:
|
|||
fi
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Tag and release Proxy service docker image
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker:proxy:prod
|
||||
docker tag budibase/proxy:$release_tag budibase/proxy:$PROD_TAG
|
||||
docker push budibase/proxy:$PROD_TAG
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
PROD_TAG: k8s
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
|
|
|
@ -23,12 +23,24 @@ jobs:
|
|||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
|
||||
|
||||
- name: Get the latest budibase release version
|
||||
id: version
|
||||
run: |
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Tag and release Proxy service docker image
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker:proxy:preprod
|
||||
docker tag budibase/proxy:$release_tag budibase/proxy:$PREPROD_TAG
|
||||
docker push budibase/proxy:$PREPROD_TAG
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
PREPROD_TAG: k8s-preprod
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||
|
|
|
@ -19,13 +19,7 @@ http {
|
|||
tcp_nodelay on;
|
||||
server_tokens off;
|
||||
types_hash_max_size 2048;
|
||||
{{#if compose}}
|
||||
resolver 127.0.0.11 ipv6=off;
|
||||
{{/if}}
|
||||
{{#if k8s}}
|
||||
resolver kube-dns.kube-system.svc.cluster.local valid=10s;
|
||||
{{/if}}
|
||||
|
||||
resolver {{ resolver }} valid=10s ipv6=off;
|
||||
|
||||
# buffering
|
||||
client_body_buffer_size 1K;
|
||||
|
@ -55,7 +49,7 @@ http {
|
|||
add_header X-Frame-Options SAMEORIGIN always;
|
||||
add_header X-Content-Type-Options nosniff always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io; style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com; object-src 'none'; base-uri 'self'; connect-src 'self' https://api-iam.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io; font-src 'self' data https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com; frame-src 'self'; img-src http: https: data; manifest-src 'self'; media-src 'self'; worker-src 'none';" always;
|
||||
add_header Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io; style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com; object-src 'none'; base-uri 'self'; connect-src 'self' https://api-iam.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io; font-src 'self' data https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com; frame-src 'self' https:; img-src http: https: data; manifest-src 'self'; media-src 'self'; worker-src 'none';" always;
|
||||
|
||||
# upstreams
|
||||
set $apps {{ apps }};
|
||||
|
|
|
@ -1,145 +0,0 @@
|
|||
user nginx;
|
||||
error_log /var/log/nginx/error.log debug;
|
||||
pid /var/run/nginx.pid;
|
||||
worker_processes auto;
|
||||
worker_rlimit_nofile 33282;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s;
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
charset utf-8;
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
server_tokens off;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
# buffering
|
||||
client_body_buffer_size 1K;
|
||||
client_header_buffer_size 1k;
|
||||
client_max_body_size 1k;
|
||||
ignore_invalid_headers off;
|
||||
|
||||
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default "upgrade";
|
||||
}
|
||||
|
||||
server {
|
||||
listen 10000 default_server;
|
||||
listen [::]:10000 default_server;
|
||||
server_name _;
|
||||
client_max_body_size 1000m;
|
||||
ignore_invalid_headers off;
|
||||
proxy_buffering off;
|
||||
port_in_redirect off;
|
||||
|
||||
# Security Headers
|
||||
add_header X-Frame-Options SAMEORIGIN always;
|
||||
add_header X-Content-Type-Options nosniff always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io; style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me; object-src 'none'; base-uri 'self'; connect-src 'self' https://api-iam.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io; font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me; frame-src 'self'; img-src http: https: data:; manifest-src 'self'; media-src 'self'; worker-src 'none';" always;
|
||||
|
||||
location /app {
|
||||
proxy_pass http://app-service:4002;
|
||||
rewrite ^/app/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location = / {
|
||||
port_in_redirect off;
|
||||
proxy_pass http://app-service:4002;
|
||||
}
|
||||
|
||||
location = /v1/update {
|
||||
proxy_pass http://watchtower-service:8080;
|
||||
}
|
||||
|
||||
location /builder/ {
|
||||
port_in_redirect off;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://app-service:4002;
|
||||
}
|
||||
|
||||
location ~ ^/(builder|app_) {
|
||||
port_in_redirect off;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://app-service:4002;
|
||||
}
|
||||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
proxy_pass http://worker-service:4003;
|
||||
}
|
||||
|
||||
location /worker/ {
|
||||
proxy_pass http://worker-service:4003;
|
||||
rewrite ^/worker/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
# calls to the API are rate limited with bursting
|
||||
limit_req zone=ratelimit burst=20 nodelay;
|
||||
|
||||
# 120s timeout on API requests
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
proxy_pass http://app-service:4002;
|
||||
}
|
||||
|
||||
location /db/ {
|
||||
proxy_pass http://couchdb-service:5984;
|
||||
rewrite ^/db/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $http_host;
|
||||
|
||||
proxy_connect_timeout 300;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
chunked_transfer_encoding off;
|
||||
proxy_pass http://minio-service:9000;
|
||||
}
|
||||
|
||||
client_header_timeout 60;
|
||||
client_body_timeout 60;
|
||||
keepalive_timeout 60;
|
||||
|
||||
# gzip
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_proxied any;
|
||||
gzip_comp_level 6;
|
||||
gzip_types text/plain text/css text/xml application/json application/javascript application/rss+xml application/atom+xml image/svg+xml;
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "1.0.79-alpha.7",
|
||||
"version": "1.0.80-alpha.2",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -47,9 +47,9 @@
|
|||
"build:specs": "lerna run specs",
|
||||
"build:docker": "lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
||||
"build:docker:proxy:compose": "lerna run generate:proxy:compose && npm run build:docker:proxy",
|
||||
"build:docker:proxy:preprod": "lerna run generate:proxy:preprod && npm run build:docker:proxy",
|
||||
"build:docker:proxy:prod": "lerna run generate:proxy:prod && npm run build:docker:proxy",
|
||||
"build:docker:proxy:compose": "node scripts/proxy/generateProxyConfig compose && npm run build:docker:proxy",
|
||||
"build:docker:proxy:preprod": "node scripts/proxy/generateProxyConfig preprod && npm run build:docker:proxy",
|
||||
"build:docker:proxy:prod": "node scripts/proxy/generateProxyConfig prod && npm run build:docker:proxy",
|
||||
"build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
||||
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/backend-core",
|
||||
"version": "1.0.79-alpha.7",
|
||||
"version": "1.0.80-alpha.2",
|
||||
"description": "Budibase backend core libraries used in server and worker",
|
||||
"main": "src/index.js",
|
||||
"author": "Budibase",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "1.0.79-alpha.7",
|
||||
"version": "1.0.80-alpha.2",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
@ -38,7 +38,7 @@
|
|||
],
|
||||
"dependencies": {
|
||||
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
|
||||
"@budibase/string-templates": "^1.0.79-alpha.7",
|
||||
"@budibase/string-templates": "^1.0.80-alpha.2",
|
||||
"@spectrum-css/actionbutton": "^1.0.1",
|
||||
"@spectrum-css/actiongroup": "^1.0.1",
|
||||
"@spectrum-css/avatar": "^3.0.2",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/builder",
|
||||
"version": "1.0.79-alpha.7",
|
||||
"version": "1.0.80-alpha.2",
|
||||
"license": "GPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
@ -65,10 +65,10 @@
|
|||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.79-alpha.7",
|
||||
"@budibase/client": "^1.0.79-alpha.7",
|
||||
"@budibase/frontend-core": "^1.0.79-alpha.7",
|
||||
"@budibase/string-templates": "^1.0.79-alpha.7",
|
||||
"@budibase/bbui": "^1.0.80-alpha.2",
|
||||
"@budibase/client": "^1.0.80-alpha.2",
|
||||
"@budibase/frontend-core": "^1.0.80-alpha.2",
|
||||
"@budibase/string-templates": "^1.0.80-alpha.2",
|
||||
"@sentry/browser": "5.19.1",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
import RoleSelect from "./PropertyControls/RoleSelect.svelte"
|
||||
import ResetFieldsButton from "./PropertyControls/ResetFieldsButton.svelte"
|
||||
import { getComponentForSettingType } from "./PropertyControls/componentSettings"
|
||||
import { Utils } from "@budibase/frontend-core"
|
||||
|
||||
export let componentDefinition
|
||||
export let componentInstance
|
||||
|
@ -40,13 +41,13 @@
|
|||
]
|
||||
}
|
||||
|
||||
const updateProp = async (key, value) => {
|
||||
const updateProp = Utils.sequential(async (key, value) => {
|
||||
try {
|
||||
await store.actions.components.updateProp(key, value)
|
||||
} catch (error) {
|
||||
notifications.error("Error updating component prop")
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const canRenderControl = setting => {
|
||||
const control = getComponentForSettingType(setting?.type)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/cli",
|
||||
"version": "1.0.79-alpha.7",
|
||||
"version": "1.0.80-alpha.2",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "src/index.js",
|
||||
"bin": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/client",
|
||||
"version": "1.0.79-alpha.7",
|
||||
"version": "1.0.80-alpha.2",
|
||||
"license": "MPL-2.0",
|
||||
"module": "dist/budibase-client.js",
|
||||
"main": "dist/budibase-client.js",
|
||||
|
@ -19,9 +19,9 @@
|
|||
"dev:builder": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.79-alpha.7",
|
||||
"@budibase/frontend-core": "^1.0.79-alpha.7",
|
||||
"@budibase/string-templates": "^1.0.79-alpha.7",
|
||||
"@budibase/bbui": "^1.0.80-alpha.2",
|
||||
"@budibase/frontend-core": "^1.0.80-alpha.2",
|
||||
"@budibase/string-templates": "^1.0.80-alpha.2",
|
||||
"@spectrum-css/button": "^3.0.3",
|
||||
"@spectrum-css/card": "^3.0.3",
|
||||
"@spectrum-css/divider": "^1.0.3",
|
||||
|
|
|
@ -17,7 +17,7 @@ export const getOptions = (
|
|||
|
||||
dataProvider?.rows?.forEach(row => {
|
||||
const value = row?.[valueColumn]
|
||||
if (value) {
|
||||
if (value != null) {
|
||||
const label = row[labelColumn] || value
|
||||
optionsSet[value] = { value, label }
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ export const getOptions = (
|
|||
let optionsSet = {}
|
||||
dataProvider?.rows?.forEach(row => {
|
||||
const value = row?.[valueColumn]
|
||||
if (value) {
|
||||
if (value != null) {
|
||||
const label = row[labelColumn] || value
|
||||
optionsSet[value] = { value, label }
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@budibase/frontend-core",
|
||||
"version": "1.0.79-alpha.7",
|
||||
"version": "1.0.80-alpha.2",
|
||||
"description": "Budibase frontend core libraries used in builder and client",
|
||||
"author": "Budibase",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.79-alpha.7",
|
||||
"@budibase/bbui": "^1.0.80-alpha.2",
|
||||
"lodash": "^4.17.21",
|
||||
"svelte": "^3.46.2"
|
||||
}
|
||||
|
|
|
@ -30,9 +30,11 @@ export const buildOtherEndpoints = API => ({
|
|||
* Gets the version of the installed Budibase environment.
|
||||
*/
|
||||
getBudibaseVersion: async () => {
|
||||
return await API.get({
|
||||
url: "/api/dev/version",
|
||||
}).version
|
||||
return (
|
||||
await API.get({
|
||||
url: "/api/dev/version",
|
||||
})
|
||||
).version
|
||||
},
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
export { createAPIClient } from "./api"
|
||||
export { createLocalStorageStore } from "./stores/localStorage"
|
||||
export { fetchData } from "./fetch/fetchData"
|
||||
export * as Constants from "./constants"
|
||||
export * as LuceneUtils from "./utils/lucene"
|
||||
export * as JSONUtils from "./utils/json"
|
||||
export * as CookieUtils from "./utils/cookies"
|
||||
export * from "./stores"
|
||||
export * from "./utils"
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
export { createLocalStorageStore } from "./localStorage"
|
|
@ -0,0 +1,4 @@
|
|||
export * as LuceneUtils from "./lucene"
|
||||
export * as JSONUtils from "./json"
|
||||
export * as CookieUtils from "./cookies"
|
||||
export * as Utils from "./utils"
|
|
@ -0,0 +1,17 @@
|
|||
/**
|
||||
* Utility to wrap an async function and ensure all invocations happen
|
||||
* sequentially.
|
||||
* @param fn the async function to run
|
||||
* @return {Promise} a sequential version of the function
|
||||
*/
|
||||
export const sequential = fn => {
|
||||
let promise
|
||||
return async (...params) => {
|
||||
if (promise) {
|
||||
await promise
|
||||
}
|
||||
promise = fn(...params)
|
||||
await promise
|
||||
promise = null
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
module MySQLMock {
|
||||
const mysql: any = {}
|
||||
|
||||
const client = {
|
||||
connect: jest.fn(),
|
||||
end: jest.fn(),
|
||||
query: jest.fn(async () => {
|
||||
return [[]]
|
||||
}),
|
||||
}
|
||||
|
||||
mysql.createConnection = jest.fn(async () => {
|
||||
return client
|
||||
})
|
||||
|
||||
module.exports = mysql
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/server",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "1.0.79-alpha.7",
|
||||
"version": "1.0.80-alpha.2",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.ts",
|
||||
"repository": {
|
||||
|
@ -21,9 +21,6 @@
|
|||
"dev:stack:down": "node scripts/dev/manage.js down",
|
||||
"dev:stack:nuke": "node scripts/dev/manage.js nuke",
|
||||
"dev:builder": "yarn run dev:stack:up && nodemon",
|
||||
"generate:proxy:compose": "node scripts/proxy/generateProxyConfig compose",
|
||||
"generate:proxy:preprod": "node scripts/proxy/generateProxyConfig preprod",
|
||||
"generate:proxy:prod": "node scripts/proxy/generateProxyConfig prod",
|
||||
"format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write",
|
||||
"specs": "node specs/generate.js && openapi-typescript specs/openapi.yaml --output src/definitions/openapi.ts",
|
||||
"lint": "eslint --fix src/",
|
||||
|
@ -74,9 +71,9 @@
|
|||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@apidevtools/swagger-parser": "^10.0.3",
|
||||
"@budibase/backend-core": "^1.0.79-alpha.7",
|
||||
"@budibase/client": "^1.0.79-alpha.7",
|
||||
"@budibase/string-templates": "^1.0.79-alpha.7",
|
||||
"@budibase/backend-core": "^1.0.80-alpha.2",
|
||||
"@budibase/client": "^1.0.80-alpha.2",
|
||||
"@budibase/string-templates": "^1.0.80-alpha.2",
|
||||
"@bull-board/api": "^3.7.0",
|
||||
"@bull-board/koa": "^3.7.0",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
|
@ -111,6 +108,7 @@
|
|||
"koa-send": "5.0.0",
|
||||
"koa-session": "5.12.0",
|
||||
"koa-static": "5.0.0",
|
||||
"koa2-ratelimit": "^1.1.0",
|
||||
"lodash": "4.17.21",
|
||||
"memorystream": "^0.3.1",
|
||||
"mongodb": "3.6.3",
|
||||
|
@ -126,6 +124,7 @@
|
|||
"pouchdb-all-dbs": "1.0.2",
|
||||
"pouchdb-find": "^7.2.2",
|
||||
"pouchdb-replication-stream": "1.2.9",
|
||||
"redis": "4",
|
||||
"server-destroy": "1.0.1",
|
||||
"svelte": "^3.38.2",
|
||||
"swagger-parser": "^10.0.3",
|
||||
|
@ -151,6 +150,7 @@
|
|||
"@types/koa-router": "^7.4.2",
|
||||
"@types/node": "^15.12.4",
|
||||
"@types/oracledb": "^5.2.1",
|
||||
"@types/redis": "^4.0.11",
|
||||
"@typescript-eslint/parser": "4.28.0",
|
||||
"apidoc": "^0.50.2",
|
||||
"babel-jest": "^27.0.2",
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
USE master;
|
||||
|
||||
|
||||
IF NOT EXISTS(SELECT 1 FROM sys.schemas WHERE name = 'Chains')
|
||||
BEGIN
|
||||
EXEC sys.sp_executesql N'CREATE SCHEMA Chains;'
|
||||
END
|
||||
|
||||
IF OBJECT_ID ('dbo.products', 'U') IS NOT NULL
|
||||
DROP TABLE products;
|
||||
GO
|
||||
|
@ -61,3 +66,15 @@ VALUES ('Bob', '30'),
|
|||
('Bobert', '99'),
|
||||
('Jan', '22'),
|
||||
('Megan', '11');
|
||||
|
||||
|
||||
IF OBJECT_ID ('Chains.sizes', 'U') IS NOT NULL
|
||||
DROP TABLE Chains.sizes;
|
||||
GO
|
||||
CREATE TABLE Chains.sizes
|
||||
(
|
||||
sizeid int IDENTITY(1, 1),
|
||||
name varchar(30),
|
||||
CONSTRAINT pk_size PRIMARY KEY NONCLUSTERED (sizeid)
|
||||
);
|
||||
|
||||
|
|
|
@ -8,9 +8,9 @@
|
|||
|
||||
To install oracle express edition simply run `docker-compose up`
|
||||
|
||||
- A single instance pluggable database (PDB) will be created named `xepdb`
|
||||
- A single instance pluggable database (PDB) will be created named `xepdb1`
|
||||
- The default password is configured in the compose file as `oracle`
|
||||
- The `system`, `sys` and `pdbadmin` users all share this password
|
||||
- The `system` and `pdbadmin` users share this password
|
||||
|
||||
## Instant Client
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
SELECT 'CREATE DATABASE main'
|
||||
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
|
||||
CREATE SCHEMA test;
|
||||
CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer');
|
||||
CREATE TABLE Persons (
|
||||
PersonID SERIAL PRIMARY KEY,
|
||||
|
@ -37,6 +38,10 @@ CREATE TABLE Products_Tasks (
|
|||
REFERENCES Tasks(TaskID),
|
||||
PRIMARY KEY (ProductID, TaskID)
|
||||
);
|
||||
CREATE TABLE test.table1 (
|
||||
id SERIAL PRIMARY KEY,
|
||||
Name varchar(255)
|
||||
);
|
||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Mike', 'Hughes', '123 Fake Street', 'Belfast', 'qa');
|
||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer');
|
||||
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE);
|
||||
|
@ -48,3 +53,4 @@ INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 1);
|
|||
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1);
|
||||
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1);
|
||||
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2);
|
||||
INSERT INTO test.table1 (Name) VALUES ('Test');
|
||||
|
|
|
@ -58,7 +58,7 @@ module External {
|
|||
) {
|
||||
const primary = table.primary
|
||||
// if passed in array need to copy for shifting etc
|
||||
let idCopy = cloneDeep(id)
|
||||
let idCopy: undefined | string | any[] = cloneDeep(id)
|
||||
if (filters) {
|
||||
// need to map over the filters and make sure the _id field isn't present
|
||||
for (let filter of Object.values(filters)) {
|
||||
|
|
|
@ -8,18 +8,54 @@ import authorized from "../../../middleware/authorized"
|
|||
import { paramResource, paramSubResource } from "../../../middleware/resourceId"
|
||||
import { CtxFn } from "./utils/Endpoint"
|
||||
import mapperMiddleware from "./middleware/mapper"
|
||||
import env from "../../../environment"
|
||||
// below imports don't have declaration files
|
||||
const Router = require("@koa/router")
|
||||
const { RateLimit, Stores } = require("koa2-ratelimit")
|
||||
const {
|
||||
PermissionLevels,
|
||||
PermissionTypes,
|
||||
} = require("@budibase/backend-core/permissions")
|
||||
const { getRedisOptions } = require("@budibase/backend-core/redis").utils
|
||||
|
||||
const PREFIX = "/api/public/v1"
|
||||
// allow a lot more requests when in test
|
||||
const DEFAULT_API_REQ_LIMIT_PER_SEC = env.isTest() ? 100 : 10
|
||||
|
||||
function getApiLimitPerSecond(): number {
|
||||
if (!env.API_REQ_LIMIT_PER_SEC) {
|
||||
return DEFAULT_API_REQ_LIMIT_PER_SEC
|
||||
}
|
||||
return parseInt(env.API_REQ_LIMIT_PER_SEC)
|
||||
}
|
||||
|
||||
if (!env.isTest()) {
|
||||
const REDIS_OPTS = getRedisOptions()
|
||||
RateLimit.defaultOptions({
|
||||
store: new Stores.Redis({
|
||||
// @ts-ignore
|
||||
socket: {
|
||||
host: REDIS_OPTS.host,
|
||||
port: REDIS_OPTS.port,
|
||||
},
|
||||
password: REDIS_OPTS.opts.password,
|
||||
database: 1,
|
||||
}),
|
||||
})
|
||||
}
|
||||
// rate limiting, allows for 2 requests per second
|
||||
const limiter = RateLimit.middleware({
|
||||
interval: { sec: 1 },
|
||||
// per ip, per interval
|
||||
max: getApiLimitPerSecond(),
|
||||
})
|
||||
|
||||
const publicRouter = new Router({
|
||||
prefix: PREFIX,
|
||||
})
|
||||
|
||||
publicRouter.use(limiter)
|
||||
|
||||
function addMiddleware(
|
||||
endpoints: any,
|
||||
middleware: CtxFn,
|
||||
|
|
|
@ -153,6 +153,7 @@ export interface QueryJson {
|
|||
datasourceId: string
|
||||
entityId: string
|
||||
operation: Operation
|
||||
schema?: string
|
||||
}
|
||||
resource: {
|
||||
fields: string[]
|
||||
|
|
|
@ -45,6 +45,7 @@ module.exports = {
|
|||
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
|
||||
MULTI_TENANCY: process.env.MULTI_TENANCY,
|
||||
HTTP_MIGRATIONS: process.env.HTTP_MIGRATIONS,
|
||||
API_REQ_LIMIT_PER_SEC: process.env.API_REQ_LIMIT_PER_SEC,
|
||||
// environment
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
JEST_WORKER_ID: process.env.JEST_WORKER_ID,
|
||||
|
|
|
@ -5,5 +5,8 @@ export interface DatasourcePlus extends IntegrationBase {
|
|||
tables: Record<string, Table>
|
||||
schemaErrors: Record<string, string>
|
||||
|
||||
// if the datasource supports the use of bindings directly (to protect against SQL injection)
|
||||
// this returns the format of the identifier
|
||||
getBindingIdentifier(): string
|
||||
buildSchema(datasourceId: string, entities: Record<string, Table>): any
|
||||
}
|
||||
|
|
|
@ -249,6 +249,9 @@ class InternalBuilder {
|
|||
create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||
const { endpoint, body } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
const parsedBody = parseBody(body)
|
||||
// make sure no null values in body for creation
|
||||
for (let [key, value] of Object.entries(parsedBody)) {
|
||||
|
@ -267,6 +270,9 @@ class InternalBuilder {
|
|||
bulkCreate(knex: Knex, json: QueryJson): KnexQuery {
|
||||
const { endpoint, body } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
if (!Array.isArray(body)) {
|
||||
return query
|
||||
}
|
||||
|
@ -275,7 +281,7 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
||||
let { endpoint, resource, filters, paginate, relationships } = json
|
||||
const tableName = endpoint.entityId
|
||||
// select all if not specified
|
||||
if (!resource) {
|
||||
|
@ -302,6 +308,9 @@ class InternalBuilder {
|
|||
}
|
||||
// start building the query
|
||||
let query: KnexQuery = knex(tableName).limit(foundLimit)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
if (foundOffset) {
|
||||
query = query.offset(foundOffset)
|
||||
}
|
||||
|
@ -331,6 +340,9 @@ class InternalBuilder {
|
|||
update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||
const { endpoint, body, filters } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
const parsedBody = parseBody(body)
|
||||
query = this.addFilters(query, filters, { tableName: endpoint.entityId })
|
||||
// mysql can't use returning
|
||||
|
@ -344,6 +356,9 @@ class InternalBuilder {
|
|||
delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||
const { endpoint, filters } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
query = this.addFilters(query, filters, { tableName: endpoint.entityId })
|
||||
// mysql can't use returning
|
||||
if (opts.disableReturning) {
|
||||
|
|
|
@ -101,28 +101,28 @@ function generateSchema(
|
|||
}
|
||||
|
||||
function buildCreateTable(
|
||||
knex: Knex,
|
||||
knex: SchemaBuilder,
|
||||
table: Table,
|
||||
tables: Record<string, Table>
|
||||
): SchemaBuilder {
|
||||
return knex.schema.createTable(table.name, schema => {
|
||||
return knex.createTable(table.name, schema => {
|
||||
generateSchema(schema, table, tables)
|
||||
})
|
||||
}
|
||||
|
||||
function buildUpdateTable(
|
||||
knex: Knex,
|
||||
knex: SchemaBuilder,
|
||||
table: Table,
|
||||
tables: Record<string, Table>,
|
||||
oldTable: Table
|
||||
): SchemaBuilder {
|
||||
return knex.schema.alterTable(table.name, schema => {
|
||||
return knex.alterTable(table.name, schema => {
|
||||
generateSchema(schema, table, tables, oldTable)
|
||||
})
|
||||
}
|
||||
|
||||
function buildDeleteTable(knex: Knex, table: Table): SchemaBuilder {
|
||||
return knex.schema.dropTable(table.name)
|
||||
function buildDeleteTable(knex: SchemaBuilder, table: Table): SchemaBuilder {
|
||||
return knex.dropTable(table.name)
|
||||
}
|
||||
|
||||
class SqlTableQueryBuilder {
|
||||
|
@ -146,7 +146,11 @@ class SqlTableQueryBuilder {
|
|||
}
|
||||
|
||||
_tableQuery(json: QueryJson): any {
|
||||
const client = knex({ client: this.sqlClient })
|
||||
let client = knex({ client: this.sqlClient }).schema
|
||||
if (json?.endpoint?.schema) {
|
||||
client = client.withSchema(json.endpoint.schema)
|
||||
}
|
||||
|
||||
let query
|
||||
if (!json.table || !json.meta || !json.meta.tables) {
|
||||
throw "Cannot execute without table being specified"
|
||||
|
|
|
@ -6,11 +6,10 @@ import {
|
|||
} from "../definitions/datasource"
|
||||
import { OAuth2Client } from "google-auth-library"
|
||||
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||
import { Row, Table, TableSchema } from "../definitions/common"
|
||||
import { Table, TableSchema } from "../definitions/common"
|
||||
import { buildExternalTableId } from "./utils"
|
||||
import { DataSourceOperation, FieldTypes } from "../constants"
|
||||
import { GoogleSpreadsheet } from "google-spreadsheet"
|
||||
import { table } from "console"
|
||||
|
||||
module GoogleSheetsModule {
|
||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
||||
|
@ -112,6 +111,10 @@ module GoogleSheetsModule {
|
|||
this.client = new GoogleSpreadsheet(spreadsheetId)
|
||||
}
|
||||
|
||||
getBindingIdentifier() {
|
||||
return ""
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull the spreadsheet ID out from a valid google sheets URL
|
||||
* @param spreadsheetId - the URL or standard spreadsheetId of the google sheet
|
||||
|
|
|
@ -19,6 +19,7 @@ import { Table, TableSchema } from "../definitions/common"
|
|||
module MSSQLModule {
|
||||
const sqlServer = require("mssql")
|
||||
const Sql = require("./base/sql")
|
||||
const DEFAULT_SCHEMA = "dbo"
|
||||
|
||||
interface MSSQLConfig {
|
||||
user: string
|
||||
|
@ -26,9 +27,17 @@ module MSSQLModule {
|
|||
server: string
|
||||
port: number
|
||||
database: string
|
||||
schema: string
|
||||
encrypt?: boolean
|
||||
}
|
||||
|
||||
interface TablesResponse {
|
||||
TABLE_CATALOG: string
|
||||
TABLE_SCHEMA: string
|
||||
TABLE_NAME: string
|
||||
TABLE_TYPE: string
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/tediousjs/node-mssql",
|
||||
plus: true,
|
||||
|
@ -58,6 +67,10 @@ module MSSQLModule {
|
|||
type: DatasourceFieldTypes.STRING,
|
||||
default: "root",
|
||||
},
|
||||
schema: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
default: DEFAULT_SCHEMA,
|
||||
},
|
||||
encrypt: {
|
||||
type: DatasourceFieldTypes.BOOLEAN,
|
||||
default: true,
|
||||
|
@ -79,34 +92,9 @@ module MSSQLModule {
|
|||
},
|
||||
}
|
||||
|
||||
async function internalQuery(
|
||||
client: any,
|
||||
query: SqlQuery,
|
||||
operation: string | undefined = undefined
|
||||
) {
|
||||
const request = client.request()
|
||||
try {
|
||||
if (Array.isArray(query.bindings)) {
|
||||
let count = 0
|
||||
for (let binding of query.bindings) {
|
||||
request.input(`p${count++}`, binding)
|
||||
}
|
||||
}
|
||||
// this is a hack to get the inserted ID back,
|
||||
// no way to do this with Knex nicely
|
||||
const sql =
|
||||
operation === Operation.CREATE
|
||||
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
|
||||
: query.sql
|
||||
return await request.query(sql)
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
class SqlServerIntegration extends Sql implements DatasourcePlus {
|
||||
private readonly config: MSSQLConfig
|
||||
private index: number = 0
|
||||
static pool: any
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
@ -121,6 +109,62 @@ module MSSQLModule {
|
|||
TABLES_SQL =
|
||||
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
|
||||
|
||||
constructor(config: MSSQLConfig) {
|
||||
super(SqlClients.MS_SQL)
|
||||
this.config = config
|
||||
const clientCfg = {
|
||||
...this.config,
|
||||
options: {
|
||||
encrypt: this.config.encrypt,
|
||||
enableArithAbort: true,
|
||||
},
|
||||
}
|
||||
delete clientCfg.encrypt
|
||||
if (!this.pool) {
|
||||
this.pool = new sqlServer.ConnectionPool(clientCfg)
|
||||
}
|
||||
}
|
||||
|
||||
getBindingIdentifier(): string {
|
||||
return `(@p${this.index++})`
|
||||
}
|
||||
|
||||
async connect() {
|
||||
try {
|
||||
this.client = await this.pool.connect()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
async internalQuery(
|
||||
query: SqlQuery,
|
||||
operation: string | undefined = undefined
|
||||
) {
|
||||
const client = this.client
|
||||
const request = client.request()
|
||||
this.index = 0
|
||||
try {
|
||||
if (Array.isArray(query.bindings)) {
|
||||
let count = 0
|
||||
for (let binding of query.bindings) {
|
||||
request.input(`p${count++}`, binding)
|
||||
}
|
||||
}
|
||||
// this is a hack to get the inserted ID back,
|
||||
// no way to do this with Knex nicely
|
||||
const sql =
|
||||
operation === Operation.CREATE
|
||||
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
|
||||
: query.sql
|
||||
return await request.query(sql)
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
getDefinitionSQL(tableName: string) {
|
||||
return `select *
|
||||
from INFORMATION_SCHEMA.COLUMNS
|
||||
|
@ -149,33 +193,8 @@ module MSSQLModule {
|
|||
WHERE TABLE_NAME='${tableName}'`
|
||||
}
|
||||
|
||||
constructor(config: MSSQLConfig) {
|
||||
super(SqlClients.MS_SQL)
|
||||
this.config = config
|
||||
const clientCfg = {
|
||||
...this.config,
|
||||
options: {
|
||||
encrypt: this.config.encrypt,
|
||||
enableArithAbort: true,
|
||||
},
|
||||
}
|
||||
delete clientCfg.encrypt
|
||||
if (!this.pool) {
|
||||
this.pool = new sqlServer.ConnectionPool(clientCfg)
|
||||
}
|
||||
}
|
||||
|
||||
async connect() {
|
||||
try {
|
||||
this.client = await this.pool.connect()
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
async runSQL(sql: string) {
|
||||
return (await internalQuery(this.client, getSqlQuery(sql))).recordset
|
||||
return (await this.internalQuery(getSqlQuery(sql))).recordset
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -185,11 +204,14 @@ module MSSQLModule {
|
|||
*/
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
await this.connect()
|
||||
let tableNames = await this.runSQL(this.TABLES_SQL)
|
||||
if (tableNames == null || !Array.isArray(tableNames)) {
|
||||
let tableInfo: TablesResponse[] = await this.runSQL(this.TABLES_SQL)
|
||||
if (tableInfo == null || !Array.isArray(tableInfo)) {
|
||||
throw "Unable to get list of tables in database"
|
||||
}
|
||||
tableNames = tableNames
|
||||
|
||||
const schema = this.config.schema || DEFAULT_SCHEMA
|
||||
const tableNames = tableInfo
|
||||
.filter((record: any) => record.TABLE_SCHEMA === schema)
|
||||
.map((record: any) => record.TABLE_NAME)
|
||||
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
|
||||
|
||||
|
@ -238,33 +260,36 @@ module MSSQLModule {
|
|||
|
||||
async read(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset
|
||||
}
|
||||
|
||||
async create(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset || [{ created: true }]
|
||||
}
|
||||
|
||||
async update(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset || [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string) {
|
||||
await this.connect()
|
||||
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.recordset || [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
const schema = this.config.schema
|
||||
await this.connect()
|
||||
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
|
||||
json.endpoint.schema = schema
|
||||
}
|
||||
const operation = this._operation(json)
|
||||
const queryFn = (query: any, op: string) =>
|
||||
internalQuery(this.client, query, op)
|
||||
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
|
||||
const processFn = (result: any) =>
|
||||
result.recordset ? result.recordset : [{ [operation]: true }]
|
||||
return this.queryWithReturning(json, queryFn, processFn)
|
||||
|
|
|
@ -16,7 +16,7 @@ import {
|
|||
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||
|
||||
module MySQLModule {
|
||||
const mysql = require("mysql2")
|
||||
const mysql = require("mysql2/promise")
|
||||
const Sql = require("./base/sql")
|
||||
|
||||
interface MySQLConfig {
|
||||
|
@ -29,7 +29,7 @@ module MySQLModule {
|
|||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/mysqljs/mysql",
|
||||
docs: "https://github.com/sidorares/node-mysql2",
|
||||
plus: true,
|
||||
friendlyName: "MySQL",
|
||||
description:
|
||||
|
@ -80,36 +80,9 @@ module MySQLModule {
|
|||
},
|
||||
}
|
||||
|
||||
function internalQuery(
|
||||
client: any,
|
||||
query: SqlQuery,
|
||||
connect: boolean = true
|
||||
): Promise<any[] | any> {
|
||||
// Node MySQL is callback based, so we must wrap our call in a promise
|
||||
return new Promise((resolve, reject) => {
|
||||
if (connect) {
|
||||
client.connect()
|
||||
}
|
||||
return client.query(
|
||||
query.sql,
|
||||
query.bindings || {},
|
||||
(error: any, results: object[]) => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
resolve(results)
|
||||
}
|
||||
if (connect) {
|
||||
client.end()
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||
private config: MySQLConfig
|
||||
private readonly client: any
|
||||
private client: any
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
|
@ -119,93 +92,131 @@ module MySQLModule {
|
|||
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
||||
delete config.ssl
|
||||
}
|
||||
this.client = mysql.createConnection(config)
|
||||
this.config = config
|
||||
}
|
||||
|
||||
getBindingIdentifier(): string {
|
||||
return "?"
|
||||
}
|
||||
|
||||
async connect() {
|
||||
this.client = await mysql.createConnection(this.config)
|
||||
}
|
||||
|
||||
async disconnect() {
|
||||
await this.client.end()
|
||||
}
|
||||
|
||||
async internalQuery(
|
||||
query: SqlQuery,
|
||||
connect: boolean = true
|
||||
): Promise<any[] | any> {
|
||||
try {
|
||||
if (connect) {
|
||||
await this.connect()
|
||||
}
|
||||
// Node MySQL is callback based, so we must wrap our call in a promise
|
||||
const response = await this.client.query(
|
||||
query.sql,
|
||||
query.bindings || []
|
||||
)
|
||||
return response[0]
|
||||
} finally {
|
||||
if (connect) {
|
||||
await this.disconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
const tables: { [key: string]: Table } = {}
|
||||
const database = this.config.database
|
||||
this.client.connect()
|
||||
await this.connect()
|
||||
|
||||
// get the tables first
|
||||
const tablesResp = await internalQuery(
|
||||
this.client,
|
||||
{ sql: "SHOW TABLES;" },
|
||||
false
|
||||
)
|
||||
const tableNames = tablesResp.map(
|
||||
(obj: any) =>
|
||||
obj[`Tables_in_${database}`] ||
|
||||
obj[`Tables_in_${database.toLowerCase()}`]
|
||||
)
|
||||
for (let tableName of tableNames) {
|
||||
const primaryKeys = []
|
||||
const schema: TableSchema = {}
|
||||
const descResp = await internalQuery(
|
||||
this.client,
|
||||
{ sql: `DESCRIBE \`${tableName}\`;` },
|
||||
try {
|
||||
// get the tables first
|
||||
const tablesResp = await this.internalQuery(
|
||||
{ sql: "SHOW TABLES;" },
|
||||
false
|
||||
)
|
||||
for (let column of descResp) {
|
||||
const columnName = column.Field
|
||||
if (column.Key === "PRI" && primaryKeys.indexOf(column.Key) === -1) {
|
||||
primaryKeys.push(columnName)
|
||||
const tableNames = tablesResp.map(
|
||||
(obj: any) =>
|
||||
obj[`Tables_in_${database}`] ||
|
||||
obj[`Tables_in_${database.toLowerCase()}`]
|
||||
)
|
||||
for (let tableName of tableNames) {
|
||||
const primaryKeys = []
|
||||
const schema: TableSchema = {}
|
||||
const descResp = await this.internalQuery(
|
||||
{ sql: `DESCRIBE \`${tableName}\`;` },
|
||||
false
|
||||
)
|
||||
for (let column of descResp) {
|
||||
const columnName = column.Field
|
||||
if (
|
||||
column.Key === "PRI" &&
|
||||
primaryKeys.indexOf(column.Key) === -1
|
||||
) {
|
||||
primaryKeys.push(columnName)
|
||||
}
|
||||
const constraints = {
|
||||
presence: column.Null !== "YES",
|
||||
}
|
||||
const isAuto: boolean =
|
||||
typeof column.Extra === "string" &&
|
||||
(column.Extra === "auto_increment" ||
|
||||
column.Extra.toLowerCase().includes("generated"))
|
||||
schema[columnName] = {
|
||||
name: columnName,
|
||||
autocolumn: isAuto,
|
||||
type: convertSqlType(column.Type),
|
||||
constraints,
|
||||
}
|
||||
}
|
||||
const constraints = {
|
||||
presence: column.Null !== "YES",
|
||||
}
|
||||
const isAuto: boolean =
|
||||
typeof column.Extra === "string" &&
|
||||
(column.Extra === "auto_increment" ||
|
||||
column.Extra.toLowerCase().includes("generated"))
|
||||
schema[columnName] = {
|
||||
name: columnName,
|
||||
autocolumn: isAuto,
|
||||
type: convertSqlType(column.Type),
|
||||
constraints,
|
||||
}
|
||||
}
|
||||
if (!tables[tableName]) {
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
primary: primaryKeys,
|
||||
name: tableName,
|
||||
schema,
|
||||
if (!tables[tableName]) {
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
primary: primaryKeys,
|
||||
name: tableName,
|
||||
schema,
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
await this.disconnect()
|
||||
}
|
||||
|
||||
this.client.end()
|
||||
const final = finaliseExternalTables(tables, entities)
|
||||
this.tables = final.tables
|
||||
this.schemaErrors = final.errors
|
||||
}
|
||||
|
||||
async create(query: SqlQuery | string) {
|
||||
const results = await internalQuery(this.client, getSqlQuery(query))
|
||||
const results = await this.internalQuery(getSqlQuery(query))
|
||||
return results.length ? results : [{ created: true }]
|
||||
}
|
||||
|
||||
async read(query: SqlQuery | string) {
|
||||
return internalQuery(this.client, getSqlQuery(query))
|
||||
return this.internalQuery(getSqlQuery(query))
|
||||
}
|
||||
|
||||
async update(query: SqlQuery | string) {
|
||||
const results = await internalQuery(this.client, getSqlQuery(query))
|
||||
const results = await this.internalQuery(getSqlQuery(query))
|
||||
return results.length ? results : [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string) {
|
||||
const results = await internalQuery(this.client, getSqlQuery(query))
|
||||
const results = await this.internalQuery(getSqlQuery(query))
|
||||
return results.length ? results : [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
this.client.connect()
|
||||
const queryFn = (query: any) => internalQuery(this.client, query, false)
|
||||
const output = await this.queryWithReturning(json, queryFn)
|
||||
this.client.end()
|
||||
return output
|
||||
await this.connect()
|
||||
try {
|
||||
const queryFn = (query: any) => this.internalQuery(query, false)
|
||||
return await this.queryWithReturning(json, queryFn)
|
||||
} finally {
|
||||
await this.disconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,24 +1,24 @@
|
|||
import {
|
||||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
Integration,
|
||||
Operation,
|
||||
QueryJson,
|
||||
QueryTypes,
|
||||
SqlQuery,
|
||||
QueryJson,
|
||||
Operation,
|
||||
} from "../definitions/datasource"
|
||||
import {
|
||||
finaliseExternalTables,
|
||||
getSqlQuery,
|
||||
buildExternalTableId,
|
||||
convertSqlType,
|
||||
finaliseExternalTables,
|
||||
getSqlQuery,
|
||||
SqlClients,
|
||||
} from "./utils"
|
||||
import oracledb, {
|
||||
ExecuteOptions,
|
||||
Result,
|
||||
BindParameters,
|
||||
Connection,
|
||||
ConnectionAttributes,
|
||||
BindParameters,
|
||||
ExecuteOptions,
|
||||
Result,
|
||||
} from "oracledb"
|
||||
import Sql from "./base/sql"
|
||||
import { Table } from "../definitions/common"
|
||||
|
@ -137,6 +137,7 @@ module OracleModule {
|
|||
|
||||
class OracleIntegration extends Sql implements DatasourcePlus {
|
||||
private readonly config: OracleConfig
|
||||
private index: number = 1
|
||||
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
@ -174,6 +175,10 @@ module OracleModule {
|
|||
this.config = config
|
||||
}
|
||||
|
||||
getBindingIdentifier(): string {
|
||||
return `:${this.index++}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the flat tabular columns and constraints data into a nested object
|
||||
*/
|
||||
|
@ -233,20 +238,14 @@ module OracleModule {
|
|||
return oracleTables
|
||||
}
|
||||
|
||||
private isSupportedColumn(column: OracleColumn) {
|
||||
if (UNSUPPORTED_TYPES.includes(column.type)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
private static isSupportedColumn(column: OracleColumn) {
|
||||
return !UNSUPPORTED_TYPES.includes(column.type)
|
||||
}
|
||||
|
||||
private isAutoColumn(column: OracleColumn) {
|
||||
if (column.default && column.default.toLowerCase().includes("nextval")) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
private static isAutoColumn(column: OracleColumn) {
|
||||
return !!(
|
||||
column.default && column.default.toLowerCase().includes("nextval")
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -254,7 +253,7 @@ module OracleModule {
|
|||
* This matches the default behaviour for generating DDL used in knex.
|
||||
*/
|
||||
private isBooleanType(column: OracleColumn): boolean {
|
||||
if (
|
||||
return (
|
||||
column.type.toLowerCase() === "number" &&
|
||||
Object.values(column.constraints).filter(c => {
|
||||
if (
|
||||
|
@ -273,11 +272,7 @@ module OracleModule {
|
|||
}
|
||||
return false
|
||||
}).length > 0
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
)
|
||||
}
|
||||
|
||||
private internalConvertType(column: OracleColumn): string {
|
||||
|
@ -317,7 +312,9 @@ module OracleModule {
|
|||
// iterate each column on the table
|
||||
Object.values(oracleTable.columns)
|
||||
// remove columns that we can't read / save
|
||||
.filter(oracleColumn => this.isSupportedColumn(oracleColumn))
|
||||
.filter(oracleColumn =>
|
||||
OracleIntegration.isSupportedColumn(oracleColumn)
|
||||
)
|
||||
// match the order of the columns in the db
|
||||
.sort((c1, c2) => c1.id - c2.id)
|
||||
.forEach(oracleColumn => {
|
||||
|
@ -325,7 +322,7 @@ module OracleModule {
|
|||
let fieldSchema = table.schema[columnName]
|
||||
if (!fieldSchema) {
|
||||
fieldSchema = {
|
||||
autocolumn: this.isAutoColumn(oracleColumn),
|
||||
autocolumn: OracleIntegration.isAutoColumn(oracleColumn),
|
||||
name: columnName,
|
||||
type: this.internalConvertType(oracleColumn),
|
||||
}
|
||||
|
@ -351,18 +348,13 @@ module OracleModule {
|
|||
private async internalQuery<T>(query: SqlQuery): Promise<Result<T>> {
|
||||
let connection
|
||||
try {
|
||||
this.index = 1
|
||||
connection = await this.getConnection()
|
||||
|
||||
const options: ExecuteOptions = { autoCommit: true }
|
||||
const bindings: BindParameters = query.bindings || []
|
||||
|
||||
const result: Result<T> = await connection.execute<T>(
|
||||
query.sql,
|
||||
bindings,
|
||||
options
|
||||
)
|
||||
|
||||
return result
|
||||
return await connection.execute<T>(query.sql, bindings, options)
|
||||
} finally {
|
||||
if (connection) {
|
||||
try {
|
||||
|
|
|
@ -103,30 +103,11 @@ module PostgresModule {
|
|||
},
|
||||
}
|
||||
|
||||
async function internalQuery(client: any, query: SqlQuery) {
|
||||
// need to handle a specific issue with json data types in postgres,
|
||||
// new lines inside the JSON data will break it
|
||||
if (query && query.sql) {
|
||||
const matches = query.sql.match(JSON_REGEX)
|
||||
if (matches && matches.length > 0) {
|
||||
for (let match of matches) {
|
||||
const escaped = escapeDangerousCharacters(match)
|
||||
query.sql = query.sql.replace(match, escaped)
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
return await client.query(query.sql, query.bindings || [])
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
class PostgresIntegration extends Sql implements DatasourcePlus {
|
||||
static pool: any
|
||||
private readonly client: any
|
||||
private readonly config: PostgresConfig
|
||||
private index: number = 1
|
||||
public tables: Record<string, Table> = {}
|
||||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
|
@ -163,6 +144,32 @@ module PostgresModule {
|
|||
this.setSchema()
|
||||
}
|
||||
|
||||
getBindingIdentifier(): string {
|
||||
return `$${this.index++}`
|
||||
}
|
||||
|
||||
async internalQuery(query: SqlQuery) {
|
||||
const client = this.client
|
||||
this.index = 1
|
||||
// need to handle a specific issue with json data types in postgres,
|
||||
// new lines inside the JSON data will break it
|
||||
if (query && query.sql) {
|
||||
const matches = query.sql.match(JSON_REGEX)
|
||||
if (matches && matches.length > 0) {
|
||||
for (let match of matches) {
|
||||
const escaped = escapeDangerousCharacters(match)
|
||||
query.sql = query.sql.replace(match, escaped)
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
return await client.query(query.sql, query.bindings || [])
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
setSchema() {
|
||||
if (!this.config.schema) {
|
||||
this.config.schema = "public"
|
||||
|
@ -241,22 +248,22 @@ module PostgresModule {
|
|||
}
|
||||
|
||||
async create(query: SqlQuery | string) {
|
||||
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows.length ? response.rows : [{ created: true }]
|
||||
}
|
||||
|
||||
async read(query: SqlQuery | string) {
|
||||
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows
|
||||
}
|
||||
|
||||
async update(query: SqlQuery | string) {
|
||||
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows.length ? response.rows : [{ updated: true }]
|
||||
}
|
||||
|
||||
async delete(query: SqlQuery | string) {
|
||||
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||
const response = await this.internalQuery(getSqlQuery(query))
|
||||
return response.rows.length ? response.rows : [{ deleted: true }]
|
||||
}
|
||||
|
||||
|
@ -266,11 +273,11 @@ module PostgresModule {
|
|||
if (Array.isArray(input)) {
|
||||
const responses = []
|
||||
for (let query of input) {
|
||||
responses.push(await internalQuery(this.client, query))
|
||||
responses.push(await this.internalQuery(query))
|
||||
}
|
||||
return responses
|
||||
} else {
|
||||
const response = await internalQuery(this.client, input)
|
||||
const response = await this.internalQuery(input)
|
||||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ describe("MySQL Integration", () => {
|
|||
await config.integration.create({
|
||||
sql
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function))
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
it("calls the read method with the correct params", async () => {
|
||||
|
@ -27,7 +27,7 @@ describe("MySQL Integration", () => {
|
|||
await config.integration.read({
|
||||
sql
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function))
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
it("calls the update method with the correct params", async () => {
|
||||
|
@ -35,7 +35,7 @@ describe("MySQL Integration", () => {
|
|||
await config.integration.update({
|
||||
sql
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function))
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
|
@ -43,7 +43,7 @@ describe("MySQL Integration", () => {
|
|||
await config.integration.delete({
|
||||
sql
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function))
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
|
||||
})
|
||||
|
||||
describe("no rows returned", () => {
|
||||
|
|
|
@ -6,41 +6,69 @@ import { FieldTypes, BuildSchemaErrors, InvalidColumns } from "../constants"
|
|||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||
const ROW_ID_REGEX = /^\[.*]$/g
|
||||
|
||||
const SQL_TYPE_MAP = {
|
||||
text: FieldTypes.LONGFORM,
|
||||
varchar: FieldTypes.STRING,
|
||||
const SQL_NUMBER_TYPE_MAP = {
|
||||
integer: FieldTypes.NUMBER,
|
||||
int: FieldTypes.NUMBER,
|
||||
bigint: FieldTypes.NUMBER,
|
||||
decimal: FieldTypes.NUMBER,
|
||||
smallint: FieldTypes.NUMBER,
|
||||
real: FieldTypes.NUMBER,
|
||||
"double precision": FieldTypes.NUMBER,
|
||||
timestamp: FieldTypes.DATETIME,
|
||||
time: FieldTypes.DATETIME,
|
||||
boolean: FieldTypes.BOOLEAN,
|
||||
json: FieldTypes.JSON,
|
||||
date: FieldTypes.DATETIME,
|
||||
blob: FieldTypes.LONGFORM,
|
||||
enum: FieldTypes.STRING,
|
||||
float: FieldTypes.NUMBER,
|
||||
int: FieldTypes.NUMBER,
|
||||
numeric: FieldTypes.NUMBER,
|
||||
mediumint: FieldTypes.NUMBER,
|
||||
dec: FieldTypes.NUMBER,
|
||||
double: FieldTypes.NUMBER,
|
||||
fixed: FieldTypes.NUMBER,
|
||||
datetime: FieldTypes.DATETIME,
|
||||
tinyint: FieldTypes.BOOLEAN,
|
||||
long: FieldTypes.LONGFORM,
|
||||
"double precision": FieldTypes.NUMBER,
|
||||
number: FieldTypes.NUMBER,
|
||||
binary_float: FieldTypes.NUMBER,
|
||||
binary_double: FieldTypes.NUMBER,
|
||||
money: FieldTypes.NUMBER,
|
||||
smallmoney: FieldTypes.NUMBER,
|
||||
}
|
||||
|
||||
const SQL_DATE_TYPE_MAP = {
|
||||
timestamp: FieldTypes.DATETIME,
|
||||
time: FieldTypes.DATETIME,
|
||||
datetime: FieldTypes.DATETIME,
|
||||
smalldatetime: FieldTypes.DATETIME,
|
||||
date: FieldTypes.DATETIME,
|
||||
}
|
||||
|
||||
const SQL_STRING_TYPE_MAP = {
|
||||
varchar: FieldTypes.STRING,
|
||||
char: FieldTypes.STRING,
|
||||
nchar: FieldTypes.STRING,
|
||||
nvarchar: FieldTypes.STRING,
|
||||
ntext: FieldTypes.STRING,
|
||||
enum: FieldTypes.STRING,
|
||||
blob: FieldTypes.LONGFORM,
|
||||
long: FieldTypes.LONGFORM,
|
||||
text: FieldTypes.LONGFORM,
|
||||
}
|
||||
|
||||
const SQL_BOOLEAN_TYPE_MAP = {
|
||||
boolean: FieldTypes.BOOLEAN,
|
||||
bit: FieldTypes.BOOLEAN,
|
||||
tinyint: FieldTypes.BOOLEAN,
|
||||
}
|
||||
|
||||
const SQL_MISC_TYPE_MAP = {
|
||||
json: FieldTypes.JSON,
|
||||
}
|
||||
|
||||
const SQL_TYPE_MAP = {
|
||||
...SQL_NUMBER_TYPE_MAP,
|
||||
...SQL_DATE_TYPE_MAP,
|
||||
...SQL_STRING_TYPE_MAP,
|
||||
...SQL_BOOLEAN_TYPE_MAP,
|
||||
...SQL_MISC_TYPE_MAP,
|
||||
}
|
||||
|
||||
export enum SqlClients {
|
||||
MS_SQL = "mssql",
|
||||
POSTGRES = "pg",
|
||||
MY_SQL = "mysql",
|
||||
MY_SQL = "mysql2",
|
||||
ORACLE = "oracledb",
|
||||
}
|
||||
|
||||
|
|
|
@ -2,8 +2,12 @@ const threadUtils = require("./utils")
|
|||
threadUtils.threadSetup()
|
||||
const ScriptRunner = require("../utilities/scriptRunner")
|
||||
const { integrations } = require("../integrations")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
const {
|
||||
processStringSync,
|
||||
findHBSBlocks,
|
||||
} = require("@budibase/string-templates")
|
||||
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
|
||||
const { isSQL } = require("../integrations/utils")
|
||||
|
||||
class QueryRunner {
|
||||
constructor(input, flags = { noRecursiveQuery: false }) {
|
||||
|
@ -23,23 +27,47 @@ class QueryRunner {
|
|||
this.hasRerun = false
|
||||
}
|
||||
|
||||
interpolateSQL(fields, parameters, integration) {
|
||||
let sql = fields.sql
|
||||
if (!sql) {
|
||||
return fields
|
||||
}
|
||||
const bindings = findHBSBlocks(sql)
|
||||
let variables = []
|
||||
for (let binding of bindings) {
|
||||
let variable = integration.getBindingIdentifier()
|
||||
variables.push(binding)
|
||||
sql = sql.replace(binding, variable)
|
||||
}
|
||||
// replicate the knex structure
|
||||
fields.sql = sql
|
||||
fields.bindings = this.enrichQueryFields(variables, parameters)
|
||||
return fields
|
||||
}
|
||||
|
||||
async execute() {
|
||||
let { datasource, fields, queryVerb, transformer } = this
|
||||
// pre-query, make sure datasource variables are added to parameters
|
||||
const parameters = await this.addDatasourceVariables()
|
||||
let query = this.enrichQueryFields(fields, parameters)
|
||||
|
||||
// Add pagination values for REST queries
|
||||
if (this.pagination) {
|
||||
query.paginationValues = this.pagination
|
||||
}
|
||||
|
||||
const Integration = integrations[datasource.source]
|
||||
if (!Integration) {
|
||||
throw "Integration type does not exist."
|
||||
}
|
||||
const integration = new Integration(datasource.config)
|
||||
|
||||
// pre-query, make sure datasource variables are added to parameters
|
||||
const parameters = await this.addDatasourceVariables()
|
||||
let query
|
||||
// handle SQL injections by interpolating the variables
|
||||
if (isSQL(datasource)) {
|
||||
query = this.interpolateSQL(fields, parameters, integration)
|
||||
} else {
|
||||
query = this.enrichQueryFields(fields, parameters)
|
||||
}
|
||||
|
||||
// Add pagination values for REST queries
|
||||
if (this.pagination) {
|
||||
query.paginationValues = this.pagination
|
||||
}
|
||||
|
||||
let output = threadUtils.formatResponse(await integration[queryVerb](query))
|
||||
let rows = output,
|
||||
info = undefined,
|
||||
|
@ -179,7 +207,7 @@ class QueryRunner {
|
|||
}
|
||||
|
||||
enrichQueryFields(fields, parameters = {}) {
|
||||
const enrichedQuery = {}
|
||||
const enrichedQuery = Array.isArray(fields) ? [] : {}
|
||||
|
||||
// enrich the fields with dynamic parameters
|
||||
for (let key of Object.keys(fields)) {
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/string-templates",
|
||||
"version": "1.0.79-alpha.7",
|
||||
"version": "1.0.80-alpha.2",
|
||||
"description": "Handlebars wrapper for Budibase templating.",
|
||||
"main": "src/index.cjs",
|
||||
"module": "dist/bundle.mjs",
|
||||
|
|
|
@ -18,6 +18,7 @@ module.exports.processObject = templates.processObject
|
|||
module.exports.doesContainStrings = templates.doesContainStrings
|
||||
module.exports.doesContainString = templates.doesContainString
|
||||
module.exports.disableEscaping = templates.disableEscaping
|
||||
module.exports.findHBSBlocks = templates.findHBSBlocks
|
||||
|
||||
/**
|
||||
* Use vm2 to run JS scripts in a node env
|
||||
|
|
|
@ -3,7 +3,11 @@ const { registerAll, registerMinimum } = require("./helpers/index")
|
|||
const processors = require("./processors")
|
||||
const { atob, btoa } = require("./utilities")
|
||||
const manifest = require("../manifest.json")
|
||||
const { FIND_HBS_REGEX, findDoubleHbsInstances } = require("./utilities")
|
||||
const {
|
||||
FIND_HBS_REGEX,
|
||||
FIND_ANY_HBS_REGEX,
|
||||
findDoubleHbsInstances,
|
||||
} = require("./utilities")
|
||||
|
||||
const hbsInstance = handlebars.create()
|
||||
registerAll(hbsInstance)
|
||||
|
@ -310,6 +314,21 @@ module.exports.doesContainStrings = (template, strings) => {
|
|||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a string, this will return any {{ binding }} or {{{ binding }}} type
|
||||
* statements.
|
||||
* @param {string} string The string to search within.
|
||||
* @return {string[]} The found HBS blocks.
|
||||
*/
|
||||
module.exports.findHBSBlocks = string => {
|
||||
let regexp = new RegExp(FIND_ANY_HBS_REGEX)
|
||||
let matches = string.match(regexp)
|
||||
if (matches == null) {
|
||||
return []
|
||||
}
|
||||
return matches
|
||||
}
|
||||
|
||||
/**
|
||||
* This function looks in the supplied template for handlebars instances, if they contain
|
||||
* JS the JS will be decoded and then the supplied string will be looked for. For example
|
||||
|
|
|
@ -18,6 +18,7 @@ export const processObject = templates.processObject
|
|||
export const doesContainStrings = templates.doesContainStrings
|
||||
export const doesContainString = templates.doesContainString
|
||||
export const disableEscaping = templates.disableEscaping
|
||||
export const findHBSBlocks = templates.findHBSBlocks
|
||||
|
||||
/**
|
||||
* Use polyfilled vm to run JS scripts in a browser Env
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const ALPHA_NUMERIC_REGEX = /^[A-Za-z0-9]+$/g
|
||||
|
||||
module.exports.FIND_HBS_REGEX = /{{([^{].*?)}}/g
|
||||
module.exports.FIND_ANY_HBS_REGEX = /{?{{([^{].*?)}}}?/g
|
||||
module.exports.FIND_TRIPLE_HBS_REGEX = /{{{([^{].*?)}}}/g
|
||||
|
||||
// originally this could be done with a single regex using look behinds
|
||||
|
|
|
@ -7,6 +7,7 @@ const {
|
|||
encodeJSBinding,
|
||||
doesContainString,
|
||||
disableEscaping,
|
||||
findHBSBlocks,
|
||||
} = require("../src/index.cjs")
|
||||
|
||||
describe("Test that the string processing works correctly", () => {
|
||||
|
@ -200,3 +201,13 @@ describe("check that disabling escaping function works", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("check find hbs blocks function", () => {
|
||||
it("should find none", () => {
|
||||
expect(findHBSBlocks("hello there")).toEqual([])
|
||||
})
|
||||
|
||||
it("should find two", () => {
|
||||
expect(findHBSBlocks("{{ hello }} there {{{ name }}}")).toEqual(["{{ hello }}", "{{{ name }}}"])
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/worker",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "1.0.79-alpha.7",
|
||||
"version": "1.0.80-alpha.2",
|
||||
"description": "Budibase background service",
|
||||
"main": "src/index.ts",
|
||||
"repository": {
|
||||
|
@ -34,8 +34,8 @@
|
|||
"author": "Budibase",
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@budibase/backend-core": "^1.0.79-alpha.7",
|
||||
"@budibase/string-templates": "^1.0.79-alpha.7",
|
||||
"@budibase/backend-core": "^1.0.80-alpha.2",
|
||||
"@budibase/string-templates": "^1.0.80-alpha.2",
|
||||
"@koa/router": "^8.0.0",
|
||||
"@sentry/node": "^6.0.0",
|
||||
"@techpass/passport-openidconnect": "^0.3.0",
|
||||
|
|
|
@ -1,30 +1,63 @@
|
|||
#!/usr/bin/env node
|
||||
const path = require("path")
|
||||
const fs = require("fs")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
|
||||
function processStringSync(string, env) {
|
||||
let output = ""
|
||||
|
||||
// process if statements
|
||||
let removal = false
|
||||
for (let line of string.split("\n")) {
|
||||
if (new RegExp(`{{\/if}}`, "g").test(line)) {
|
||||
removal = false
|
||||
continue
|
||||
}
|
||||
|
||||
if (!removal) {
|
||||
const match = line.match(new RegExp(`{{#if (.*)}}`))
|
||||
if (match) {
|
||||
const key = match[1]
|
||||
// check the if statement is true
|
||||
if (!env[key]) {
|
||||
removal = true
|
||||
}
|
||||
continue
|
||||
}
|
||||
output += line + "\n"
|
||||
}
|
||||
}
|
||||
|
||||
for (let key in env) {
|
||||
// replace variables
|
||||
const rgx = new RegExp(`{{\\s*${key}\\s*}}`, "g")
|
||||
output = output.replace(rgx, env[key])
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
const Configs = {
|
||||
prod: {
|
||||
k8s: true,
|
||||
apps: "app-service.budibase.svc.cluster.local",
|
||||
worker: "worker-service.budibase.svc.cluster.local",
|
||||
minio: "minio-service.budibase.svc.cluster.local",
|
||||
couchdb: "budibase-prod-svc-couchdb",
|
||||
resolver: "kube-dns.kube-system.svc.cluster.local"
|
||||
},
|
||||
preprod: {
|
||||
k8s: true,
|
||||
apps: "app-service.budibase.svc.cluster.local",
|
||||
worker: "worker-service.budibase.svc.cluster.local",
|
||||
minio: "minio-service.budibase.svc.cluster.local",
|
||||
couchdb: "budibase-preprod-svc-couchdb",
|
||||
resolver: "kube-dns.kube-system.svc.cluster.local"
|
||||
},
|
||||
compose: {
|
||||
compose: true,
|
||||
apps: "app-service",
|
||||
worker: "worker-service",
|
||||
minio: "minio-service",
|
||||
couchdb: "couchdb-service",
|
||||
watchtower: "watchtower-service",
|
||||
resolver: "127.0.0.11"
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -36,7 +69,7 @@ const Commands = {
|
|||
|
||||
async function init(managementCommand) {
|
||||
const config = Configs[managementCommand]
|
||||
const hostingPath = path.join(process.cwd(), "..", "..", "hosting")
|
||||
const hostingPath = path.join(process.cwd(), "hosting")
|
||||
const nginxHbsPath = path.join(hostingPath, "nginx.prod.conf.hbs")
|
||||
const nginxOutputPath = path.join(
|
||||
hostingPath,
|
Loading…
Reference in New Issue