Merge remote-tracking branch 'origin/develop' into feature/export-from-client

This commit is contained in:
Peter Clement 2022-03-07 12:04:56 +00:00
commit e3670d6737
48 changed files with 694 additions and 1422 deletions

View File

@ -38,6 +38,17 @@ jobs:
fi fi
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Proxy service docker image
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:proxy:prod
docker tag budibase/proxy:$release_tag budibase/proxy:$PROD_TAG
docker push budibase/proxy:$PROD_TAG
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
PROD_TAG: k8s
- name: Configure AWS Credentials - name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1 uses: aws-actions/configure-aws-credentials@v1
with: with:

View File

@ -23,12 +23,24 @@ jobs:
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1 aws-region: eu-west-1
- name: Get the latest budibase release version - name: Get the latest budibase release version
id: version id: version
run: | run: |
release_version=$(cat lerna.json | jq -r '.version') release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Proxy service docker image
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:proxy:preprod
docker tag budibase/proxy:$release_tag budibase/proxy:$PREPROD_TAG
docker push budibase/proxy:$PREPROD_TAG
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
PREPROD_TAG: k8s-preprod
- name: Pull values.yaml from budibase-infra - name: Pull values.yaml from budibase-infra
run: | run: |
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \ curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \

View File

@ -19,13 +19,7 @@ http {
tcp_nodelay on; tcp_nodelay on;
server_tokens off; server_tokens off;
types_hash_max_size 2048; types_hash_max_size 2048;
{{#if compose}} resolver {{ resolver }} valid=10s ipv6=off;
resolver 127.0.0.11 ipv6=off;
{{/if}}
{{#if k8s}}
resolver kube-dns.kube-system.svc.cluster.local valid=10s;
{{/if}}
# buffering # buffering
client_body_buffer_size 1K; client_body_buffer_size 1K;
@ -55,7 +49,7 @@ http {
add_header X-Frame-Options SAMEORIGIN always; add_header X-Frame-Options SAMEORIGIN always;
add_header X-Content-Type-Options nosniff always; add_header X-Content-Type-Options nosniff always;
add_header X-XSS-Protection "1; mode=block" always; add_header X-XSS-Protection "1; mode=block" always;
add_header Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io; style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com; object-src 'none'; base-uri 'self'; connect-src 'self' https://api-iam.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io; font-src 'self' data https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com; frame-src 'self'; img-src http: https: data; manifest-src 'self'; media-src 'self'; worker-src 'none';" always; add_header Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io; style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com; object-src 'none'; base-uri 'self'; connect-src 'self' https://api-iam.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io; font-src 'self' data https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com; frame-src 'self' https:; img-src http: https: data; manifest-src 'self'; media-src 'self'; worker-src 'none';" always;
# upstreams # upstreams
set $apps {{ apps }}; set $apps {{ apps }};

View File

@ -1,145 +0,0 @@
user nginx;
error_log /var/log/nginx/error.log debug;
pid /var/run/nginx.pid;
worker_processes auto;
worker_rlimit_nofile 33282;
events {
worker_connections 1024;
}
http {
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s;
include /etc/nginx/mime.types;
default_type application/octet-stream;
charset utf-8;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
server_tokens off;
types_hash_max_size 2048;
# buffering
client_body_buffer_size 1K;
client_header_buffer_size 1k;
client_max_body_size 1k;
ignore_invalid_headers off;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
map $http_upgrade $connection_upgrade {
default "upgrade";
}
server {
listen 10000 default_server;
listen [::]:10000 default_server;
server_name _;
client_max_body_size 1000m;
ignore_invalid_headers off;
proxy_buffering off;
port_in_redirect off;
# Security Headers
add_header X-Frame-Options SAMEORIGIN always;
add_header X-Content-Type-Options nosniff always;
add_header X-XSS-Protection "1; mode=block" always;
add_header Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io; style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me; object-src 'none'; base-uri 'self'; connect-src 'self' https://api-iam.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io; font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me; frame-src 'self'; img-src http: https: data:; manifest-src 'self'; media-src 'self'; worker-src 'none';" always;
location /app {
proxy_pass http://app-service:4002;
rewrite ^/app/(.*)$ /$1 break;
}
location = / {
port_in_redirect off;
proxy_pass http://app-service:4002;
}
location = /v1/update {
proxy_pass http://watchtower-service:8080;
}
location /builder/ {
port_in_redirect off;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://app-service:4002;
}
location ~ ^/(builder|app_) {
port_in_redirect off;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://app-service:4002;
}
location ~ ^/api/(system|admin|global)/ {
proxy_pass http://worker-service:4003;
}
location /worker/ {
proxy_pass http://worker-service:4003;
rewrite ^/worker/(.*)$ /$1 break;
}
location /api/ {
# calls to the API are rate limited with bursting
limit_req zone=ratelimit burst=20 nodelay;
# 120s timeout on API requests
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://app-service:4002;
}
location /db/ {
proxy_pass http://couchdb-service:5984;
rewrite ^/db/(.*)$ /$1 break;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://minio-service:9000;
}
client_header_timeout 60;
client_body_timeout 60;
keepalive_timeout 60;
# gzip
gzip on;
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_types text/plain text/css text/xml application/json application/javascript application/rss+xml application/atom+xml image/svg+xml;
}
}

View File

@ -1,5 +1,5 @@
{ {
"version": "1.0.79-alpha.7", "version": "1.0.80-alpha.2",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -47,9 +47,9 @@
"build:specs": "lerna run specs", "build:specs": "lerna run specs",
"build:docker": "lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -", "build:docker": "lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:proxy": "docker build hosting/proxy -t proxy-service", "build:docker:proxy": "docker build hosting/proxy -t proxy-service",
"build:docker:proxy:compose": "lerna run generate:proxy:compose && npm run build:docker:proxy", "build:docker:proxy:compose": "node scripts/proxy/generateProxyConfig compose && npm run build:docker:proxy",
"build:docker:proxy:preprod": "lerna run generate:proxy:preprod && npm run build:docker:proxy", "build:docker:proxy:preprod": "node scripts/proxy/generateProxyConfig preprod && npm run build:docker:proxy",
"build:docker:proxy:prod": "lerna run generate:proxy:prod && npm run build:docker:proxy", "build:docker:proxy:prod": "node scripts/proxy/generateProxyConfig prod && npm run build:docker:proxy",
"build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -", "build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -", "build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild", "build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/backend-core", "name": "@budibase/backend-core",
"version": "1.0.79-alpha.7", "version": "1.0.80-alpha.2",
"description": "Budibase backend core libraries used in server and worker", "description": "Budibase backend core libraries used in server and worker",
"main": "src/index.js", "main": "src/index.js",
"author": "Budibase", "author": "Budibase",

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/bbui", "name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.", "description": "A UI solution used in the different Budibase projects.",
"version": "1.0.79-alpha.7", "version": "1.0.80-alpha.2",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"module": "dist/bbui.es.js", "module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
], ],
"dependencies": { "dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1", "@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "^1.0.79-alpha.7", "@budibase/string-templates": "^1.0.80-alpha.2",
"@spectrum-css/actionbutton": "^1.0.1", "@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1", "@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2", "@spectrum-css/avatar": "^3.0.2",

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/builder", "name": "@budibase/builder",
"version": "1.0.79-alpha.7", "version": "1.0.80-alpha.2",
"license": "GPL-3.0", "license": "GPL-3.0",
"private": true, "private": true,
"scripts": { "scripts": {
@ -65,10 +65,10 @@
} }
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.79-alpha.7", "@budibase/bbui": "^1.0.80-alpha.2",
"@budibase/client": "^1.0.79-alpha.7", "@budibase/client": "^1.0.80-alpha.2",
"@budibase/frontend-core": "^1.0.79-alpha.7", "@budibase/frontend-core": "^1.0.80-alpha.2",
"@budibase/string-templates": "^1.0.79-alpha.7", "@budibase/string-templates": "^1.0.80-alpha.2",
"@sentry/browser": "5.19.1", "@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1", "@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1", "@spectrum-css/vars": "^3.0.1",

View File

@ -7,6 +7,7 @@
import RoleSelect from "./PropertyControls/RoleSelect.svelte" import RoleSelect from "./PropertyControls/RoleSelect.svelte"
import ResetFieldsButton from "./PropertyControls/ResetFieldsButton.svelte" import ResetFieldsButton from "./PropertyControls/ResetFieldsButton.svelte"
import { getComponentForSettingType } from "./PropertyControls/componentSettings" import { getComponentForSettingType } from "./PropertyControls/componentSettings"
import { Utils } from "@budibase/frontend-core"
export let componentDefinition export let componentDefinition
export let componentInstance export let componentInstance
@ -40,13 +41,13 @@
] ]
} }
const updateProp = async (key, value) => { const updateProp = Utils.sequential(async (key, value) => {
try { try {
await store.actions.components.updateProp(key, value) await store.actions.components.updateProp(key, value)
} catch (error) { } catch (error) {
notifications.error("Error updating component prop") notifications.error("Error updating component prop")
} }
} })
const canRenderControl = setting => { const canRenderControl = setting => {
const control = getComponentForSettingType(setting?.type) const control = getComponentForSettingType(setting?.type)

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/cli", "name": "@budibase/cli",
"version": "1.0.79-alpha.7", "version": "1.0.80-alpha.2",
"description": "Budibase CLI, for developers, self hosting and migrations.", "description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js", "main": "src/index.js",
"bin": { "bin": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/client", "name": "@budibase/client",
"version": "1.0.79-alpha.7", "version": "1.0.80-alpha.2",
"license": "MPL-2.0", "license": "MPL-2.0",
"module": "dist/budibase-client.js", "module": "dist/budibase-client.js",
"main": "dist/budibase-client.js", "main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw" "dev:builder": "rollup -cw"
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.79-alpha.7", "@budibase/bbui": "^1.0.80-alpha.2",
"@budibase/frontend-core": "^1.0.79-alpha.7", "@budibase/frontend-core": "^1.0.80-alpha.2",
"@budibase/string-templates": "^1.0.79-alpha.7", "@budibase/string-templates": "^1.0.80-alpha.2",
"@spectrum-css/button": "^3.0.3", "@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3", "@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3", "@spectrum-css/divider": "^1.0.3",

View File

@ -17,7 +17,7 @@ export const getOptions = (
dataProvider?.rows?.forEach(row => { dataProvider?.rows?.forEach(row => {
const value = row?.[valueColumn] const value = row?.[valueColumn]
if (value) { if (value != null) {
const label = row[labelColumn] || value const label = row[labelColumn] || value
optionsSet[value] = { value, label } optionsSet[value] = { value, label }
} }
@ -30,7 +30,7 @@ export const getOptions = (
let optionsSet = {} let optionsSet = {}
dataProvider?.rows?.forEach(row => { dataProvider?.rows?.forEach(row => {
const value = row?.[valueColumn] const value = row?.[valueColumn]
if (value) { if (value != null) {
const label = row[labelColumn] || value const label = row[labelColumn] || value
optionsSet[value] = { value, label } optionsSet[value] = { value, label }
} }

View File

@ -1,12 +1,12 @@
{ {
"name": "@budibase/frontend-core", "name": "@budibase/frontend-core",
"version": "1.0.79-alpha.7", "version": "1.0.80-alpha.2",
"description": "Budibase frontend core libraries used in builder and client", "description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase", "author": "Budibase",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.79-alpha.7", "@budibase/bbui": "^1.0.80-alpha.2",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"svelte": "^3.46.2" "svelte": "^3.46.2"
} }

View File

@ -30,9 +30,11 @@ export const buildOtherEndpoints = API => ({
* Gets the version of the installed Budibase environment. * Gets the version of the installed Budibase environment.
*/ */
getBudibaseVersion: async () => { getBudibaseVersion: async () => {
return await API.get({ return (
url: "/api/dev/version", await API.get({
}).version url: "/api/dev/version",
})
).version
}, },
/** /**

View File

@ -1,7 +1,5 @@
export { createAPIClient } from "./api" export { createAPIClient } from "./api"
export { createLocalStorageStore } from "./stores/localStorage"
export { fetchData } from "./fetch/fetchData" export { fetchData } from "./fetch/fetchData"
export * as Constants from "./constants" export * as Constants from "./constants"
export * as LuceneUtils from "./utils/lucene" export * from "./stores"
export * as JSONUtils from "./utils/json" export * from "./utils"
export * as CookieUtils from "./utils/cookies"

View File

@ -0,0 +1 @@
export { createLocalStorageStore } from "./localStorage"

View File

@ -0,0 +1,4 @@
export * as LuceneUtils from "./lucene"
export * as JSONUtils from "./json"
export * as CookieUtils from "./cookies"
export * as Utils from "./utils"

View File

@ -0,0 +1,17 @@
/**
* Utility to wrap an async function and ensure all invocations happen
* sequentially.
* @param fn the async function to run
* @return {Promise} a sequential version of the function
*/
export const sequential = fn => {
let promise
return async (...params) => {
if (promise) {
await promise
}
promise = fn(...params)
await promise
promise = null
}
}

View File

@ -0,0 +1,17 @@
module MySQLMock {
const mysql: any = {}
const client = {
connect: jest.fn(),
end: jest.fn(),
query: jest.fn(async () => {
return [[]]
}),
}
mysql.createConnection = jest.fn(async () => {
return client
})
module.exports = mysql
}

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/server", "name": "@budibase/server",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "1.0.79-alpha.7", "version": "1.0.80-alpha.2",
"description": "Budibase Web Server", "description": "Budibase Web Server",
"main": "src/index.ts", "main": "src/index.ts",
"repository": { "repository": {
@ -21,9 +21,6 @@
"dev:stack:down": "node scripts/dev/manage.js down", "dev:stack:down": "node scripts/dev/manage.js down",
"dev:stack:nuke": "node scripts/dev/manage.js nuke", "dev:stack:nuke": "node scripts/dev/manage.js nuke",
"dev:builder": "yarn run dev:stack:up && nodemon", "dev:builder": "yarn run dev:stack:up && nodemon",
"generate:proxy:compose": "node scripts/proxy/generateProxyConfig compose",
"generate:proxy:preprod": "node scripts/proxy/generateProxyConfig preprod",
"generate:proxy:prod": "node scripts/proxy/generateProxyConfig prod",
"format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write", "format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write",
"specs": "node specs/generate.js && openapi-typescript specs/openapi.yaml --output src/definitions/openapi.ts", "specs": "node specs/generate.js && openapi-typescript specs/openapi.yaml --output src/definitions/openapi.ts",
"lint": "eslint --fix src/", "lint": "eslint --fix src/",
@ -74,9 +71,9 @@
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@apidevtools/swagger-parser": "^10.0.3", "@apidevtools/swagger-parser": "^10.0.3",
"@budibase/backend-core": "^1.0.79-alpha.7", "@budibase/backend-core": "^1.0.80-alpha.2",
"@budibase/client": "^1.0.79-alpha.7", "@budibase/client": "^1.0.80-alpha.2",
"@budibase/string-templates": "^1.0.79-alpha.7", "@budibase/string-templates": "^1.0.80-alpha.2",
"@bull-board/api": "^3.7.0", "@bull-board/api": "^3.7.0",
"@bull-board/koa": "^3.7.0", "@bull-board/koa": "^3.7.0",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
@ -111,6 +108,7 @@
"koa-send": "5.0.0", "koa-send": "5.0.0",
"koa-session": "5.12.0", "koa-session": "5.12.0",
"koa-static": "5.0.0", "koa-static": "5.0.0",
"koa2-ratelimit": "^1.1.0",
"lodash": "4.17.21", "lodash": "4.17.21",
"memorystream": "^0.3.1", "memorystream": "^0.3.1",
"mongodb": "3.6.3", "mongodb": "3.6.3",
@ -126,6 +124,7 @@
"pouchdb-all-dbs": "1.0.2", "pouchdb-all-dbs": "1.0.2",
"pouchdb-find": "^7.2.2", "pouchdb-find": "^7.2.2",
"pouchdb-replication-stream": "1.2.9", "pouchdb-replication-stream": "1.2.9",
"redis": "4",
"server-destroy": "1.0.1", "server-destroy": "1.0.1",
"svelte": "^3.38.2", "svelte": "^3.38.2",
"swagger-parser": "^10.0.3", "swagger-parser": "^10.0.3",
@ -151,6 +150,7 @@
"@types/koa-router": "^7.4.2", "@types/koa-router": "^7.4.2",
"@types/node": "^15.12.4", "@types/node": "^15.12.4",
"@types/oracledb": "^5.2.1", "@types/oracledb": "^5.2.1",
"@types/redis": "^4.0.11",
"@typescript-eslint/parser": "4.28.0", "@typescript-eslint/parser": "4.28.0",
"apidoc": "^0.50.2", "apidoc": "^0.50.2",
"babel-jest": "^27.0.2", "babel-jest": "^27.0.2",

View File

@ -1,5 +1,10 @@
USE master; USE master;
IF NOT EXISTS(SELECT 1 FROM sys.schemas WHERE name = 'Chains')
BEGIN
EXEC sys.sp_executesql N'CREATE SCHEMA Chains;'
END
IF OBJECT_ID ('dbo.products', 'U') IS NOT NULL IF OBJECT_ID ('dbo.products', 'U') IS NOT NULL
DROP TABLE products; DROP TABLE products;
GO GO
@ -61,3 +66,15 @@ VALUES ('Bob', '30'),
('Bobert', '99'), ('Bobert', '99'),
('Jan', '22'), ('Jan', '22'),
('Megan', '11'); ('Megan', '11');
IF OBJECT_ID ('Chains.sizes', 'U') IS NOT NULL
DROP TABLE Chains.sizes;
GO
CREATE TABLE Chains.sizes
(
sizeid int IDENTITY(1, 1),
name varchar(30),
CONSTRAINT pk_size PRIMARY KEY NONCLUSTERED (sizeid)
);

View File

@ -8,9 +8,9 @@
To install oracle express edition simply run `docker-compose up` To install oracle express edition simply run `docker-compose up`
- A single instance pluggable database (PDB) will be created named `xepdb` - A single instance pluggable database (PDB) will be created named `xepdb1`
- The default password is configured in the compose file as `oracle` - The default password is configured in the compose file as `oracle`
- The `system`, `sys` and `pdbadmin` users all share this password - The `system` and `pdbadmin` users share this password
## Instant Client ## Instant Client

View File

@ -1,5 +1,6 @@
SELECT 'CREATE DATABASE main' SELECT 'CREATE DATABASE main'
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
CREATE SCHEMA test;
CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer'); CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer');
CREATE TABLE Persons ( CREATE TABLE Persons (
PersonID SERIAL PRIMARY KEY, PersonID SERIAL PRIMARY KEY,
@ -37,6 +38,10 @@ CREATE TABLE Products_Tasks (
REFERENCES Tasks(TaskID), REFERENCES Tasks(TaskID),
PRIMARY KEY (ProductID, TaskID) PRIMARY KEY (ProductID, TaskID)
); );
CREATE TABLE test.table1 (
id SERIAL PRIMARY KEY,
Name varchar(255)
);
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Mike', 'Hughes', '123 Fake Street', 'Belfast', 'qa'); INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Mike', 'Hughes', '123 Fake Street', 'Belfast', 'qa');
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer'); INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer');
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE); INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE);
@ -48,3 +53,4 @@ INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 1);
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1); INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1);
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1); INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1);
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2); INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2);
INSERT INTO test.table1 (Name) VALUES ('Test');

View File

@ -58,7 +58,7 @@ module External {
) { ) {
const primary = table.primary const primary = table.primary
// if passed in array need to copy for shifting etc // if passed in array need to copy for shifting etc
let idCopy = cloneDeep(id) let idCopy: undefined | string | any[] = cloneDeep(id)
if (filters) { if (filters) {
// need to map over the filters and make sure the _id field isn't present // need to map over the filters and make sure the _id field isn't present
for (let filter of Object.values(filters)) { for (let filter of Object.values(filters)) {

View File

@ -8,18 +8,54 @@ import authorized from "../../../middleware/authorized"
import { paramResource, paramSubResource } from "../../../middleware/resourceId" import { paramResource, paramSubResource } from "../../../middleware/resourceId"
import { CtxFn } from "./utils/Endpoint" import { CtxFn } from "./utils/Endpoint"
import mapperMiddleware from "./middleware/mapper" import mapperMiddleware from "./middleware/mapper"
import env from "../../../environment"
// below imports don't have declaration files
const Router = require("@koa/router") const Router = require("@koa/router")
const { RateLimit, Stores } = require("koa2-ratelimit")
const { const {
PermissionLevels, PermissionLevels,
PermissionTypes, PermissionTypes,
} = require("@budibase/backend-core/permissions") } = require("@budibase/backend-core/permissions")
const { getRedisOptions } = require("@budibase/backend-core/redis").utils
const PREFIX = "/api/public/v1" const PREFIX = "/api/public/v1"
// allow a lot more requests when in test
const DEFAULT_API_REQ_LIMIT_PER_SEC = env.isTest() ? 100 : 10
function getApiLimitPerSecond(): number {
if (!env.API_REQ_LIMIT_PER_SEC) {
return DEFAULT_API_REQ_LIMIT_PER_SEC
}
return parseInt(env.API_REQ_LIMIT_PER_SEC)
}
if (!env.isTest()) {
const REDIS_OPTS = getRedisOptions()
RateLimit.defaultOptions({
store: new Stores.Redis({
// @ts-ignore
socket: {
host: REDIS_OPTS.host,
port: REDIS_OPTS.port,
},
password: REDIS_OPTS.opts.password,
database: 1,
}),
})
}
// rate limiting, allows for 2 requests per second
const limiter = RateLimit.middleware({
interval: { sec: 1 },
// per ip, per interval
max: getApiLimitPerSecond(),
})
const publicRouter = new Router({ const publicRouter = new Router({
prefix: PREFIX, prefix: PREFIX,
}) })
publicRouter.use(limiter)
function addMiddleware( function addMiddleware(
endpoints: any, endpoints: any,
middleware: CtxFn, middleware: CtxFn,

View File

@ -153,6 +153,7 @@ export interface QueryJson {
datasourceId: string datasourceId: string
entityId: string entityId: string
operation: Operation operation: Operation
schema?: string
} }
resource: { resource: {
fields: string[] fields: string[]

View File

@ -45,6 +45,7 @@ module.exports = {
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY, INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
MULTI_TENANCY: process.env.MULTI_TENANCY, MULTI_TENANCY: process.env.MULTI_TENANCY,
HTTP_MIGRATIONS: process.env.HTTP_MIGRATIONS, HTTP_MIGRATIONS: process.env.HTTP_MIGRATIONS,
API_REQ_LIMIT_PER_SEC: process.env.API_REQ_LIMIT_PER_SEC,
// environment // environment
NODE_ENV: process.env.NODE_ENV, NODE_ENV: process.env.NODE_ENV,
JEST_WORKER_ID: process.env.JEST_WORKER_ID, JEST_WORKER_ID: process.env.JEST_WORKER_ID,

View File

@ -5,5 +5,8 @@ export interface DatasourcePlus extends IntegrationBase {
tables: Record<string, Table> tables: Record<string, Table>
schemaErrors: Record<string, string> schemaErrors: Record<string, string>
// if the datasource supports the use of bindings directly (to protect against SQL injection)
// this returns the format of the identifier
getBindingIdentifier(): string
buildSchema(datasourceId: string, entities: Record<string, Table>): any buildSchema(datasourceId: string, entities: Record<string, Table>): any
} }

View File

@ -249,6 +249,9 @@ class InternalBuilder {
create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
const { endpoint, body } = json const { endpoint, body } = json
let query: KnexQuery = knex(endpoint.entityId) let query: KnexQuery = knex(endpoint.entityId)
if (endpoint.schema) {
query = query.withSchema(endpoint.schema)
}
const parsedBody = parseBody(body) const parsedBody = parseBody(body)
// make sure no null values in body for creation // make sure no null values in body for creation
for (let [key, value] of Object.entries(parsedBody)) { for (let [key, value] of Object.entries(parsedBody)) {
@ -267,6 +270,9 @@ class InternalBuilder {
bulkCreate(knex: Knex, json: QueryJson): KnexQuery { bulkCreate(knex: Knex, json: QueryJson): KnexQuery {
const { endpoint, body } = json const { endpoint, body } = json
let query: KnexQuery = knex(endpoint.entityId) let query: KnexQuery = knex(endpoint.entityId)
if (endpoint.schema) {
query = query.withSchema(endpoint.schema)
}
if (!Array.isArray(body)) { if (!Array.isArray(body)) {
return query return query
} }
@ -275,7 +281,7 @@ class InternalBuilder {
} }
read(knex: Knex, json: QueryJson, limit: number): KnexQuery { read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
let { endpoint, resource, filters, sort, paginate, relationships } = json let { endpoint, resource, filters, paginate, relationships } = json
const tableName = endpoint.entityId const tableName = endpoint.entityId
// select all if not specified // select all if not specified
if (!resource) { if (!resource) {
@ -302,6 +308,9 @@ class InternalBuilder {
} }
// start building the query // start building the query
let query: KnexQuery = knex(tableName).limit(foundLimit) let query: KnexQuery = knex(tableName).limit(foundLimit)
if (endpoint.schema) {
query = query.withSchema(endpoint.schema)
}
if (foundOffset) { if (foundOffset) {
query = query.offset(foundOffset) query = query.offset(foundOffset)
} }
@ -331,6 +340,9 @@ class InternalBuilder {
update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
const { endpoint, body, filters } = json const { endpoint, body, filters } = json
let query: KnexQuery = knex(endpoint.entityId) let query: KnexQuery = knex(endpoint.entityId)
if (endpoint.schema) {
query = query.withSchema(endpoint.schema)
}
const parsedBody = parseBody(body) const parsedBody = parseBody(body)
query = this.addFilters(query, filters, { tableName: endpoint.entityId }) query = this.addFilters(query, filters, { tableName: endpoint.entityId })
// mysql can't use returning // mysql can't use returning
@ -344,6 +356,9 @@ class InternalBuilder {
delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
const { endpoint, filters } = json const { endpoint, filters } = json
let query: KnexQuery = knex(endpoint.entityId) let query: KnexQuery = knex(endpoint.entityId)
if (endpoint.schema) {
query = query.withSchema(endpoint.schema)
}
query = this.addFilters(query, filters, { tableName: endpoint.entityId }) query = this.addFilters(query, filters, { tableName: endpoint.entityId })
// mysql can't use returning // mysql can't use returning
if (opts.disableReturning) { if (opts.disableReturning) {

View File

@ -101,28 +101,28 @@ function generateSchema(
} }
function buildCreateTable( function buildCreateTable(
knex: Knex, knex: SchemaBuilder,
table: Table, table: Table,
tables: Record<string, Table> tables: Record<string, Table>
): SchemaBuilder { ): SchemaBuilder {
return knex.schema.createTable(table.name, schema => { return knex.createTable(table.name, schema => {
generateSchema(schema, table, tables) generateSchema(schema, table, tables)
}) })
} }
function buildUpdateTable( function buildUpdateTable(
knex: Knex, knex: SchemaBuilder,
table: Table, table: Table,
tables: Record<string, Table>, tables: Record<string, Table>,
oldTable: Table oldTable: Table
): SchemaBuilder { ): SchemaBuilder {
return knex.schema.alterTable(table.name, schema => { return knex.alterTable(table.name, schema => {
generateSchema(schema, table, tables, oldTable) generateSchema(schema, table, tables, oldTable)
}) })
} }
function buildDeleteTable(knex: Knex, table: Table): SchemaBuilder { function buildDeleteTable(knex: SchemaBuilder, table: Table): SchemaBuilder {
return knex.schema.dropTable(table.name) return knex.dropTable(table.name)
} }
class SqlTableQueryBuilder { class SqlTableQueryBuilder {
@ -146,7 +146,11 @@ class SqlTableQueryBuilder {
} }
_tableQuery(json: QueryJson): any { _tableQuery(json: QueryJson): any {
const client = knex({ client: this.sqlClient }) let client = knex({ client: this.sqlClient }).schema
if (json?.endpoint?.schema) {
client = client.withSchema(json.endpoint.schema)
}
let query let query
if (!json.table || !json.meta || !json.meta.tables) { if (!json.table || !json.meta || !json.meta.tables) {
throw "Cannot execute without table being specified" throw "Cannot execute without table being specified"

View File

@ -6,11 +6,10 @@ import {
} from "../definitions/datasource" } from "../definitions/datasource"
import { OAuth2Client } from "google-auth-library" import { OAuth2Client } from "google-auth-library"
import { DatasourcePlus } from "./base/datasourcePlus" import { DatasourcePlus } from "./base/datasourcePlus"
import { Row, Table, TableSchema } from "../definitions/common" import { Table, TableSchema } from "../definitions/common"
import { buildExternalTableId } from "./utils" import { buildExternalTableId } from "./utils"
import { DataSourceOperation, FieldTypes } from "../constants" import { DataSourceOperation, FieldTypes } from "../constants"
import { GoogleSpreadsheet } from "google-spreadsheet" import { GoogleSpreadsheet } from "google-spreadsheet"
import { table } from "console"
module GoogleSheetsModule { module GoogleSheetsModule {
const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { getGlobalDB } = require("@budibase/backend-core/tenancy")
@ -112,6 +111,10 @@ module GoogleSheetsModule {
this.client = new GoogleSpreadsheet(spreadsheetId) this.client = new GoogleSpreadsheet(spreadsheetId)
} }
getBindingIdentifier() {
return ""
}
/** /**
* Pull the spreadsheet ID out from a valid google sheets URL * Pull the spreadsheet ID out from a valid google sheets URL
* @param spreadsheetId - the URL or standard spreadsheetId of the google sheet * @param spreadsheetId - the URL or standard spreadsheetId of the google sheet

View File

@ -19,6 +19,7 @@ import { Table, TableSchema } from "../definitions/common"
module MSSQLModule { module MSSQLModule {
const sqlServer = require("mssql") const sqlServer = require("mssql")
const Sql = require("./base/sql") const Sql = require("./base/sql")
const DEFAULT_SCHEMA = "dbo"
interface MSSQLConfig { interface MSSQLConfig {
user: string user: string
@ -26,9 +27,17 @@ module MSSQLModule {
server: string server: string
port: number port: number
database: string database: string
schema: string
encrypt?: boolean encrypt?: boolean
} }
interface TablesResponse {
TABLE_CATALOG: string
TABLE_SCHEMA: string
TABLE_NAME: string
TABLE_TYPE: string
}
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/tediousjs/node-mssql", docs: "https://github.com/tediousjs/node-mssql",
plus: true, plus: true,
@ -58,6 +67,10 @@ module MSSQLModule {
type: DatasourceFieldTypes.STRING, type: DatasourceFieldTypes.STRING,
default: "root", default: "root",
}, },
schema: {
type: DatasourceFieldTypes.STRING,
default: DEFAULT_SCHEMA,
},
encrypt: { encrypt: {
type: DatasourceFieldTypes.BOOLEAN, type: DatasourceFieldTypes.BOOLEAN,
default: true, default: true,
@ -79,34 +92,9 @@ module MSSQLModule {
}, },
} }
async function internalQuery(
client: any,
query: SqlQuery,
operation: string | undefined = undefined
) {
const request = client.request()
try {
if (Array.isArray(query.bindings)) {
let count = 0
for (let binding of query.bindings) {
request.input(`p${count++}`, binding)
}
}
// this is a hack to get the inserted ID back,
// no way to do this with Knex nicely
const sql =
operation === Operation.CREATE
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
: query.sql
return await request.query(sql)
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}
class SqlServerIntegration extends Sql implements DatasourcePlus { class SqlServerIntegration extends Sql implements DatasourcePlus {
private readonly config: MSSQLConfig private readonly config: MSSQLConfig
private index: number = 0
static pool: any static pool: any
public tables: Record<string, Table> = {} public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {} public schemaErrors: Record<string, string> = {}
@ -121,6 +109,62 @@ module MSSQLModule {
TABLES_SQL = TABLES_SQL =
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'" "SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
constructor(config: MSSQLConfig) {
super(SqlClients.MS_SQL)
this.config = config
const clientCfg = {
...this.config,
options: {
encrypt: this.config.encrypt,
enableArithAbort: true,
},
}
delete clientCfg.encrypt
if (!this.pool) {
this.pool = new sqlServer.ConnectionPool(clientCfg)
}
}
getBindingIdentifier(): string {
return `(@p${this.index++})`
}
async connect() {
try {
this.client = await this.pool.connect()
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}
async internalQuery(
query: SqlQuery,
operation: string | undefined = undefined
) {
const client = this.client
const request = client.request()
this.index = 0
try {
if (Array.isArray(query.bindings)) {
let count = 0
for (let binding of query.bindings) {
request.input(`p${count++}`, binding)
}
}
// this is a hack to get the inserted ID back,
// no way to do this with Knex nicely
const sql =
operation === Operation.CREATE
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
: query.sql
return await request.query(sql)
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}
getDefinitionSQL(tableName: string) { getDefinitionSQL(tableName: string) {
return `select * return `select *
from INFORMATION_SCHEMA.COLUMNS from INFORMATION_SCHEMA.COLUMNS
@ -149,33 +193,8 @@ module MSSQLModule {
WHERE TABLE_NAME='${tableName}'` WHERE TABLE_NAME='${tableName}'`
} }
constructor(config: MSSQLConfig) {
super(SqlClients.MS_SQL)
this.config = config
const clientCfg = {
...this.config,
options: {
encrypt: this.config.encrypt,
enableArithAbort: true,
},
}
delete clientCfg.encrypt
if (!this.pool) {
this.pool = new sqlServer.ConnectionPool(clientCfg)
}
}
async connect() {
try {
this.client = await this.pool.connect()
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}
async runSQL(sql: string) { async runSQL(sql: string) {
return (await internalQuery(this.client, getSqlQuery(sql))).recordset return (await this.internalQuery(getSqlQuery(sql))).recordset
} }
/** /**
@ -185,11 +204,14 @@ module MSSQLModule {
*/ */
async buildSchema(datasourceId: string, entities: Record<string, Table>) { async buildSchema(datasourceId: string, entities: Record<string, Table>) {
await this.connect() await this.connect()
let tableNames = await this.runSQL(this.TABLES_SQL) let tableInfo: TablesResponse[] = await this.runSQL(this.TABLES_SQL)
if (tableNames == null || !Array.isArray(tableNames)) { if (tableInfo == null || !Array.isArray(tableInfo)) {
throw "Unable to get list of tables in database" throw "Unable to get list of tables in database"
} }
tableNames = tableNames
const schema = this.config.schema || DEFAULT_SCHEMA
const tableNames = tableInfo
.filter((record: any) => record.TABLE_SCHEMA === schema)
.map((record: any) => record.TABLE_NAME) .map((record: any) => record.TABLE_NAME)
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1) .filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
@ -238,33 +260,36 @@ module MSSQLModule {
async read(query: SqlQuery | string) { async read(query: SqlQuery | string) {
await this.connect() await this.connect()
const response = await internalQuery(this.client, getSqlQuery(query)) const response = await this.internalQuery(getSqlQuery(query))
return response.recordset return response.recordset
} }
async create(query: SqlQuery | string) { async create(query: SqlQuery | string) {
await this.connect() await this.connect()
const response = await internalQuery(this.client, getSqlQuery(query)) const response = await this.internalQuery(getSqlQuery(query))
return response.recordset || [{ created: true }] return response.recordset || [{ created: true }]
} }
async update(query: SqlQuery | string) { async update(query: SqlQuery | string) {
await this.connect() await this.connect()
const response = await internalQuery(this.client, getSqlQuery(query)) const response = await this.internalQuery(getSqlQuery(query))
return response.recordset || [{ updated: true }] return response.recordset || [{ updated: true }]
} }
async delete(query: SqlQuery | string) { async delete(query: SqlQuery | string) {
await this.connect() await this.connect()
const response = await internalQuery(this.client, getSqlQuery(query)) const response = await this.internalQuery(getSqlQuery(query))
return response.recordset || [{ deleted: true }] return response.recordset || [{ deleted: true }]
} }
async query(json: QueryJson) { async query(json: QueryJson) {
const schema = this.config.schema
await this.connect() await this.connect()
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
json.endpoint.schema = schema
}
const operation = this._operation(json) const operation = this._operation(json)
const queryFn = (query: any, op: string) => const queryFn = (query: any, op: string) => this.internalQuery(query, op)
internalQuery(this.client, query, op)
const processFn = (result: any) => const processFn = (result: any) =>
result.recordset ? result.recordset : [{ [operation]: true }] result.recordset ? result.recordset : [{ [operation]: true }]
return this.queryWithReturning(json, queryFn, processFn) return this.queryWithReturning(json, queryFn, processFn)

View File

@ -16,7 +16,7 @@ import {
import { DatasourcePlus } from "./base/datasourcePlus" import { DatasourcePlus } from "./base/datasourcePlus"
module MySQLModule { module MySQLModule {
const mysql = require("mysql2") const mysql = require("mysql2/promise")
const Sql = require("./base/sql") const Sql = require("./base/sql")
interface MySQLConfig { interface MySQLConfig {
@ -29,7 +29,7 @@ module MySQLModule {
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/mysqljs/mysql", docs: "https://github.com/sidorares/node-mysql2",
plus: true, plus: true,
friendlyName: "MySQL", friendlyName: "MySQL",
description: description:
@ -80,36 +80,9 @@ module MySQLModule {
}, },
} }
function internalQuery(
client: any,
query: SqlQuery,
connect: boolean = true
): Promise<any[] | any> {
// Node MySQL is callback based, so we must wrap our call in a promise
return new Promise((resolve, reject) => {
if (connect) {
client.connect()
}
return client.query(
query.sql,
query.bindings || {},
(error: any, results: object[]) => {
if (error) {
reject(error)
} else {
resolve(results)
}
if (connect) {
client.end()
}
}
)
})
}
class MySQLIntegration extends Sql implements DatasourcePlus { class MySQLIntegration extends Sql implements DatasourcePlus {
private config: MySQLConfig private config: MySQLConfig
private readonly client: any private client: any
public tables: Record<string, Table> = {} public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {} public schemaErrors: Record<string, string> = {}
@ -119,93 +92,131 @@ module MySQLModule {
if (config.ssl && Object.keys(config.ssl).length === 0) { if (config.ssl && Object.keys(config.ssl).length === 0) {
delete config.ssl delete config.ssl
} }
this.client = mysql.createConnection(config) this.config = config
}
getBindingIdentifier(): string {
return "?"
}
async connect() {
this.client = await mysql.createConnection(this.config)
}
async disconnect() {
await this.client.end()
}
async internalQuery(
query: SqlQuery,
connect: boolean = true
): Promise<any[] | any> {
try {
if (connect) {
await this.connect()
}
// Node MySQL is callback based, so we must wrap our call in a promise
const response = await this.client.query(
query.sql,
query.bindings || []
)
return response[0]
} finally {
if (connect) {
await this.disconnect()
}
}
} }
async buildSchema(datasourceId: string, entities: Record<string, Table>) { async buildSchema(datasourceId: string, entities: Record<string, Table>) {
const tables: { [key: string]: Table } = {} const tables: { [key: string]: Table } = {}
const database = this.config.database const database = this.config.database
this.client.connect() await this.connect()
// get the tables first try {
const tablesResp = await internalQuery( // get the tables first
this.client, const tablesResp = await this.internalQuery(
{ sql: "SHOW TABLES;" }, { sql: "SHOW TABLES;" },
false
)
const tableNames = tablesResp.map(
(obj: any) =>
obj[`Tables_in_${database}`] ||
obj[`Tables_in_${database.toLowerCase()}`]
)
for (let tableName of tableNames) {
const primaryKeys = []
const schema: TableSchema = {}
const descResp = await internalQuery(
this.client,
{ sql: `DESCRIBE \`${tableName}\`;` },
false false
) )
for (let column of descResp) { const tableNames = tablesResp.map(
const columnName = column.Field (obj: any) =>
if (column.Key === "PRI" && primaryKeys.indexOf(column.Key) === -1) { obj[`Tables_in_${database}`] ||
primaryKeys.push(columnName) obj[`Tables_in_${database.toLowerCase()}`]
)
for (let tableName of tableNames) {
const primaryKeys = []
const schema: TableSchema = {}
const descResp = await this.internalQuery(
{ sql: `DESCRIBE \`${tableName}\`;` },
false
)
for (let column of descResp) {
const columnName = column.Field
if (
column.Key === "PRI" &&
primaryKeys.indexOf(column.Key) === -1
) {
primaryKeys.push(columnName)
}
const constraints = {
presence: column.Null !== "YES",
}
const isAuto: boolean =
typeof column.Extra === "string" &&
(column.Extra === "auto_increment" ||
column.Extra.toLowerCase().includes("generated"))
schema[columnName] = {
name: columnName,
autocolumn: isAuto,
type: convertSqlType(column.Type),
constraints,
}
} }
const constraints = { if (!tables[tableName]) {
presence: column.Null !== "YES", tables[tableName] = {
} _id: buildExternalTableId(datasourceId, tableName),
const isAuto: boolean = primary: primaryKeys,
typeof column.Extra === "string" && name: tableName,
(column.Extra === "auto_increment" || schema,
column.Extra.toLowerCase().includes("generated")) }
schema[columnName] = {
name: columnName,
autocolumn: isAuto,
type: convertSqlType(column.Type),
constraints,
}
}
if (!tables[tableName]) {
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
primary: primaryKeys,
name: tableName,
schema,
} }
} }
} finally {
await this.disconnect()
} }
this.client.end()
const final = finaliseExternalTables(tables, entities) const final = finaliseExternalTables(tables, entities)
this.tables = final.tables this.tables = final.tables
this.schemaErrors = final.errors this.schemaErrors = final.errors
} }
async create(query: SqlQuery | string) { async create(query: SqlQuery | string) {
const results = await internalQuery(this.client, getSqlQuery(query)) const results = await this.internalQuery(getSqlQuery(query))
return results.length ? results : [{ created: true }] return results.length ? results : [{ created: true }]
} }
async read(query: SqlQuery | string) { async read(query: SqlQuery | string) {
return internalQuery(this.client, getSqlQuery(query)) return this.internalQuery(getSqlQuery(query))
} }
async update(query: SqlQuery | string) { async update(query: SqlQuery | string) {
const results = await internalQuery(this.client, getSqlQuery(query)) const results = await this.internalQuery(getSqlQuery(query))
return results.length ? results : [{ updated: true }] return results.length ? results : [{ updated: true }]
} }
async delete(query: SqlQuery | string) { async delete(query: SqlQuery | string) {
const results = await internalQuery(this.client, getSqlQuery(query)) const results = await this.internalQuery(getSqlQuery(query))
return results.length ? results : [{ deleted: true }] return results.length ? results : [{ deleted: true }]
} }
async query(json: QueryJson) { async query(json: QueryJson) {
this.client.connect() await this.connect()
const queryFn = (query: any) => internalQuery(this.client, query, false) try {
const output = await this.queryWithReturning(json, queryFn) const queryFn = (query: any) => this.internalQuery(query, false)
this.client.end() return await this.queryWithReturning(json, queryFn)
return output } finally {
await this.disconnect()
}
} }
} }

View File

@ -1,24 +1,24 @@
import { import {
Integration,
DatasourceFieldTypes, DatasourceFieldTypes,
Integration,
Operation,
QueryJson,
QueryTypes, QueryTypes,
SqlQuery, SqlQuery,
QueryJson,
Operation,
} from "../definitions/datasource" } from "../definitions/datasource"
import { import {
finaliseExternalTables,
getSqlQuery,
buildExternalTableId, buildExternalTableId,
convertSqlType, convertSqlType,
finaliseExternalTables,
getSqlQuery,
SqlClients, SqlClients,
} from "./utils" } from "./utils"
import oracledb, { import oracledb, {
ExecuteOptions, BindParameters,
Result,
Connection, Connection,
ConnectionAttributes, ConnectionAttributes,
BindParameters, ExecuteOptions,
Result,
} from "oracledb" } from "oracledb"
import Sql from "./base/sql" import Sql from "./base/sql"
import { Table } from "../definitions/common" import { Table } from "../definitions/common"
@ -137,6 +137,7 @@ module OracleModule {
class OracleIntegration extends Sql implements DatasourcePlus { class OracleIntegration extends Sql implements DatasourcePlus {
private readonly config: OracleConfig private readonly config: OracleConfig
private index: number = 1
public tables: Record<string, Table> = {} public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {} public schemaErrors: Record<string, string> = {}
@ -174,6 +175,10 @@ module OracleModule {
this.config = config this.config = config
} }
getBindingIdentifier(): string {
return `:${this.index++}`
}
/** /**
* Map the flat tabular columns and constraints data into a nested object * Map the flat tabular columns and constraints data into a nested object
*/ */
@ -233,20 +238,14 @@ module OracleModule {
return oracleTables return oracleTables
} }
private isSupportedColumn(column: OracleColumn) { private static isSupportedColumn(column: OracleColumn) {
if (UNSUPPORTED_TYPES.includes(column.type)) { return !UNSUPPORTED_TYPES.includes(column.type)
return false
}
return true
} }
private isAutoColumn(column: OracleColumn) { private static isAutoColumn(column: OracleColumn) {
if (column.default && column.default.toLowerCase().includes("nextval")) { return !!(
return true column.default && column.default.toLowerCase().includes("nextval")
} )
return false
} }
/** /**
@ -254,7 +253,7 @@ module OracleModule {
* This matches the default behaviour for generating DDL used in knex. * This matches the default behaviour for generating DDL used in knex.
*/ */
private isBooleanType(column: OracleColumn): boolean { private isBooleanType(column: OracleColumn): boolean {
if ( return (
column.type.toLowerCase() === "number" && column.type.toLowerCase() === "number" &&
Object.values(column.constraints).filter(c => { Object.values(column.constraints).filter(c => {
if ( if (
@ -273,11 +272,7 @@ module OracleModule {
} }
return false return false
}).length > 0 }).length > 0
) { )
return true
}
return false
} }
private internalConvertType(column: OracleColumn): string { private internalConvertType(column: OracleColumn): string {
@ -317,7 +312,9 @@ module OracleModule {
// iterate each column on the table // iterate each column on the table
Object.values(oracleTable.columns) Object.values(oracleTable.columns)
// remove columns that we can't read / save // remove columns that we can't read / save
.filter(oracleColumn => this.isSupportedColumn(oracleColumn)) .filter(oracleColumn =>
OracleIntegration.isSupportedColumn(oracleColumn)
)
// match the order of the columns in the db // match the order of the columns in the db
.sort((c1, c2) => c1.id - c2.id) .sort((c1, c2) => c1.id - c2.id)
.forEach(oracleColumn => { .forEach(oracleColumn => {
@ -325,7 +322,7 @@ module OracleModule {
let fieldSchema = table.schema[columnName] let fieldSchema = table.schema[columnName]
if (!fieldSchema) { if (!fieldSchema) {
fieldSchema = { fieldSchema = {
autocolumn: this.isAutoColumn(oracleColumn), autocolumn: OracleIntegration.isAutoColumn(oracleColumn),
name: columnName, name: columnName,
type: this.internalConvertType(oracleColumn), type: this.internalConvertType(oracleColumn),
} }
@ -351,18 +348,13 @@ module OracleModule {
private async internalQuery<T>(query: SqlQuery): Promise<Result<T>> { private async internalQuery<T>(query: SqlQuery): Promise<Result<T>> {
let connection let connection
try { try {
this.index = 1
connection = await this.getConnection() connection = await this.getConnection()
const options: ExecuteOptions = { autoCommit: true } const options: ExecuteOptions = { autoCommit: true }
const bindings: BindParameters = query.bindings || [] const bindings: BindParameters = query.bindings || []
const result: Result<T> = await connection.execute<T>( return await connection.execute<T>(query.sql, bindings, options)
query.sql,
bindings,
options
)
return result
} finally { } finally {
if (connection) { if (connection) {
try { try {

View File

@ -103,30 +103,11 @@ module PostgresModule {
}, },
} }
async function internalQuery(client: any, query: SqlQuery) {
// need to handle a specific issue with json data types in postgres,
// new lines inside the JSON data will break it
if (query && query.sql) {
const matches = query.sql.match(JSON_REGEX)
if (matches && matches.length > 0) {
for (let match of matches) {
const escaped = escapeDangerousCharacters(match)
query.sql = query.sql.replace(match, escaped)
}
}
}
try {
return await client.query(query.sql, query.bindings || [])
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}
class PostgresIntegration extends Sql implements DatasourcePlus { class PostgresIntegration extends Sql implements DatasourcePlus {
static pool: any static pool: any
private readonly client: any private readonly client: any
private readonly config: PostgresConfig private readonly config: PostgresConfig
private index: number = 1
public tables: Record<string, Table> = {} public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {} public schemaErrors: Record<string, string> = {}
@ -163,6 +144,32 @@ module PostgresModule {
this.setSchema() this.setSchema()
} }
getBindingIdentifier(): string {
return `$${this.index++}`
}
async internalQuery(query: SqlQuery) {
const client = this.client
this.index = 1
// need to handle a specific issue with json data types in postgres,
// new lines inside the JSON data will break it
if (query && query.sql) {
const matches = query.sql.match(JSON_REGEX)
if (matches && matches.length > 0) {
for (let match of matches) {
const escaped = escapeDangerousCharacters(match)
query.sql = query.sql.replace(match, escaped)
}
}
}
try {
return await client.query(query.sql, query.bindings || [])
} catch (err) {
// @ts-ignore
throw new Error(err)
}
}
setSchema() { setSchema() {
if (!this.config.schema) { if (!this.config.schema) {
this.config.schema = "public" this.config.schema = "public"
@ -241,22 +248,22 @@ module PostgresModule {
} }
async create(query: SqlQuery | string) { async create(query: SqlQuery | string) {
const response = await internalQuery(this.client, getSqlQuery(query)) const response = await this.internalQuery(getSqlQuery(query))
return response.rows.length ? response.rows : [{ created: true }] return response.rows.length ? response.rows : [{ created: true }]
} }
async read(query: SqlQuery | string) { async read(query: SqlQuery | string) {
const response = await internalQuery(this.client, getSqlQuery(query)) const response = await this.internalQuery(getSqlQuery(query))
return response.rows return response.rows
} }
async update(query: SqlQuery | string) { async update(query: SqlQuery | string) {
const response = await internalQuery(this.client, getSqlQuery(query)) const response = await this.internalQuery(getSqlQuery(query))
return response.rows.length ? response.rows : [{ updated: true }] return response.rows.length ? response.rows : [{ updated: true }]
} }
async delete(query: SqlQuery | string) { async delete(query: SqlQuery | string) {
const response = await internalQuery(this.client, getSqlQuery(query)) const response = await this.internalQuery(getSqlQuery(query))
return response.rows.length ? response.rows : [{ deleted: true }] return response.rows.length ? response.rows : [{ deleted: true }]
} }
@ -266,11 +273,11 @@ module PostgresModule {
if (Array.isArray(input)) { if (Array.isArray(input)) {
const responses = [] const responses = []
for (let query of input) { for (let query of input) {
responses.push(await internalQuery(this.client, query)) responses.push(await this.internalQuery(query))
} }
return responses return responses
} else { } else {
const response = await internalQuery(this.client, input) const response = await this.internalQuery(input)
return response.rows.length ? response.rows : [{ [operation]: true }] return response.rows.length ? response.rows : [{ [operation]: true }]
} }
} }

View File

@ -19,7 +19,7 @@ describe("MySQL Integration", () => {
await config.integration.create({ await config.integration.create({
sql sql
}) })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function)) expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
}) })
it("calls the read method with the correct params", async () => { it("calls the read method with the correct params", async () => {
@ -27,7 +27,7 @@ describe("MySQL Integration", () => {
await config.integration.read({ await config.integration.read({
sql sql
}) })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function)) expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
}) })
it("calls the update method with the correct params", async () => { it("calls the update method with the correct params", async () => {
@ -35,7 +35,7 @@ describe("MySQL Integration", () => {
await config.integration.update({ await config.integration.update({
sql sql
}) })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function)) expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
}) })
it("calls the delete method with the correct params", async () => { it("calls the delete method with the correct params", async () => {
@ -43,7 +43,7 @@ describe("MySQL Integration", () => {
await config.integration.delete({ await config.integration.delete({
sql sql
}) })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {}, expect.any(Function)) expect(config.integration.client.query).toHaveBeenCalledWith(sql, [])
}) })
describe("no rows returned", () => { describe("no rows returned", () => {

View File

@ -6,41 +6,69 @@ import { FieldTypes, BuildSchemaErrors, InvalidColumns } from "../constants"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g const ROW_ID_REGEX = /^\[.*]$/g
const SQL_TYPE_MAP = { const SQL_NUMBER_TYPE_MAP = {
text: FieldTypes.LONGFORM,
varchar: FieldTypes.STRING,
integer: FieldTypes.NUMBER, integer: FieldTypes.NUMBER,
int: FieldTypes.NUMBER,
bigint: FieldTypes.NUMBER, bigint: FieldTypes.NUMBER,
decimal: FieldTypes.NUMBER, decimal: FieldTypes.NUMBER,
smallint: FieldTypes.NUMBER, smallint: FieldTypes.NUMBER,
real: FieldTypes.NUMBER, real: FieldTypes.NUMBER,
"double precision": FieldTypes.NUMBER,
timestamp: FieldTypes.DATETIME,
time: FieldTypes.DATETIME,
boolean: FieldTypes.BOOLEAN,
json: FieldTypes.JSON,
date: FieldTypes.DATETIME,
blob: FieldTypes.LONGFORM,
enum: FieldTypes.STRING,
float: FieldTypes.NUMBER, float: FieldTypes.NUMBER,
int: FieldTypes.NUMBER,
numeric: FieldTypes.NUMBER, numeric: FieldTypes.NUMBER,
mediumint: FieldTypes.NUMBER, mediumint: FieldTypes.NUMBER,
dec: FieldTypes.NUMBER, dec: FieldTypes.NUMBER,
double: FieldTypes.NUMBER, double: FieldTypes.NUMBER,
fixed: FieldTypes.NUMBER, fixed: FieldTypes.NUMBER,
datetime: FieldTypes.DATETIME, "double precision": FieldTypes.NUMBER,
tinyint: FieldTypes.BOOLEAN,
long: FieldTypes.LONGFORM,
number: FieldTypes.NUMBER, number: FieldTypes.NUMBER,
binary_float: FieldTypes.NUMBER, binary_float: FieldTypes.NUMBER,
binary_double: FieldTypes.NUMBER, binary_double: FieldTypes.NUMBER,
money: FieldTypes.NUMBER,
smallmoney: FieldTypes.NUMBER,
}
const SQL_DATE_TYPE_MAP = {
timestamp: FieldTypes.DATETIME,
time: FieldTypes.DATETIME,
datetime: FieldTypes.DATETIME,
smalldatetime: FieldTypes.DATETIME,
date: FieldTypes.DATETIME,
}
const SQL_STRING_TYPE_MAP = {
varchar: FieldTypes.STRING,
char: FieldTypes.STRING,
nchar: FieldTypes.STRING,
nvarchar: FieldTypes.STRING,
ntext: FieldTypes.STRING,
enum: FieldTypes.STRING,
blob: FieldTypes.LONGFORM,
long: FieldTypes.LONGFORM,
text: FieldTypes.LONGFORM,
}
const SQL_BOOLEAN_TYPE_MAP = {
boolean: FieldTypes.BOOLEAN,
bit: FieldTypes.BOOLEAN,
tinyint: FieldTypes.BOOLEAN,
}
const SQL_MISC_TYPE_MAP = {
json: FieldTypes.JSON,
}
const SQL_TYPE_MAP = {
...SQL_NUMBER_TYPE_MAP,
...SQL_DATE_TYPE_MAP,
...SQL_STRING_TYPE_MAP,
...SQL_BOOLEAN_TYPE_MAP,
...SQL_MISC_TYPE_MAP,
} }
export enum SqlClients { export enum SqlClients {
MS_SQL = "mssql", MS_SQL = "mssql",
POSTGRES = "pg", POSTGRES = "pg",
MY_SQL = "mysql", MY_SQL = "mysql2",
ORACLE = "oracledb", ORACLE = "oracledb",
} }

View File

@ -2,8 +2,12 @@ const threadUtils = require("./utils")
threadUtils.threadSetup() threadUtils.threadSetup()
const ScriptRunner = require("../utilities/scriptRunner") const ScriptRunner = require("../utilities/scriptRunner")
const { integrations } = require("../integrations") const { integrations } = require("../integrations")
const { processStringSync } = require("@budibase/string-templates") const {
processStringSync,
findHBSBlocks,
} = require("@budibase/string-templates")
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
const { isSQL } = require("../integrations/utils")
class QueryRunner { class QueryRunner {
constructor(input, flags = { noRecursiveQuery: false }) { constructor(input, flags = { noRecursiveQuery: false }) {
@ -23,23 +27,47 @@ class QueryRunner {
this.hasRerun = false this.hasRerun = false
} }
interpolateSQL(fields, parameters, integration) {
let sql = fields.sql
if (!sql) {
return fields
}
const bindings = findHBSBlocks(sql)
let variables = []
for (let binding of bindings) {
let variable = integration.getBindingIdentifier()
variables.push(binding)
sql = sql.replace(binding, variable)
}
// replicate the knex structure
fields.sql = sql
fields.bindings = this.enrichQueryFields(variables, parameters)
return fields
}
async execute() { async execute() {
let { datasource, fields, queryVerb, transformer } = this let { datasource, fields, queryVerb, transformer } = this
// pre-query, make sure datasource variables are added to parameters
const parameters = await this.addDatasourceVariables()
let query = this.enrichQueryFields(fields, parameters)
// Add pagination values for REST queries
if (this.pagination) {
query.paginationValues = this.pagination
}
const Integration = integrations[datasource.source] const Integration = integrations[datasource.source]
if (!Integration) { if (!Integration) {
throw "Integration type does not exist." throw "Integration type does not exist."
} }
const integration = new Integration(datasource.config) const integration = new Integration(datasource.config)
// pre-query, make sure datasource variables are added to parameters
const parameters = await this.addDatasourceVariables()
let query
// handle SQL injections by interpolating the variables
if (isSQL(datasource)) {
query = this.interpolateSQL(fields, parameters, integration)
} else {
query = this.enrichQueryFields(fields, parameters)
}
// Add pagination values for REST queries
if (this.pagination) {
query.paginationValues = this.pagination
}
let output = threadUtils.formatResponse(await integration[queryVerb](query)) let output = threadUtils.formatResponse(await integration[queryVerb](query))
let rows = output, let rows = output,
info = undefined, info = undefined,
@ -179,7 +207,7 @@ class QueryRunner {
} }
enrichQueryFields(fields, parameters = {}) { enrichQueryFields(fields, parameters = {}) {
const enrichedQuery = {} const enrichedQuery = Array.isArray(fields) ? [] : {}
// enrich the fields with dynamic parameters // enrich the fields with dynamic parameters
for (let key of Object.keys(fields)) { for (let key of Object.keys(fields)) {

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/string-templates", "name": "@budibase/string-templates",
"version": "1.0.79-alpha.7", "version": "1.0.80-alpha.2",
"description": "Handlebars wrapper for Budibase templating.", "description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs", "main": "src/index.cjs",
"module": "dist/bundle.mjs", "module": "dist/bundle.mjs",

View File

@ -18,6 +18,7 @@ module.exports.processObject = templates.processObject
module.exports.doesContainStrings = templates.doesContainStrings module.exports.doesContainStrings = templates.doesContainStrings
module.exports.doesContainString = templates.doesContainString module.exports.doesContainString = templates.doesContainString
module.exports.disableEscaping = templates.disableEscaping module.exports.disableEscaping = templates.disableEscaping
module.exports.findHBSBlocks = templates.findHBSBlocks
/** /**
* Use vm2 to run JS scripts in a node env * Use vm2 to run JS scripts in a node env

View File

@ -3,7 +3,11 @@ const { registerAll, registerMinimum } = require("./helpers/index")
const processors = require("./processors") const processors = require("./processors")
const { atob, btoa } = require("./utilities") const { atob, btoa } = require("./utilities")
const manifest = require("../manifest.json") const manifest = require("../manifest.json")
const { FIND_HBS_REGEX, findDoubleHbsInstances } = require("./utilities") const {
FIND_HBS_REGEX,
FIND_ANY_HBS_REGEX,
findDoubleHbsInstances,
} = require("./utilities")
const hbsInstance = handlebars.create() const hbsInstance = handlebars.create()
registerAll(hbsInstance) registerAll(hbsInstance)
@ -310,6 +314,21 @@ module.exports.doesContainStrings = (template, strings) => {
return false return false
} }
/**
* Given a string, this will return any {{ binding }} or {{{ binding }}} type
* statements.
* @param {string} string The string to search within.
* @return {string[]} The found HBS blocks.
*/
module.exports.findHBSBlocks = string => {
let regexp = new RegExp(FIND_ANY_HBS_REGEX)
let matches = string.match(regexp)
if (matches == null) {
return []
}
return matches
}
/** /**
* This function looks in the supplied template for handlebars instances, if they contain * This function looks in the supplied template for handlebars instances, if they contain
* JS the JS will be decoded and then the supplied string will be looked for. For example * JS the JS will be decoded and then the supplied string will be looked for. For example

View File

@ -18,6 +18,7 @@ export const processObject = templates.processObject
export const doesContainStrings = templates.doesContainStrings export const doesContainStrings = templates.doesContainStrings
export const doesContainString = templates.doesContainString export const doesContainString = templates.doesContainString
export const disableEscaping = templates.disableEscaping export const disableEscaping = templates.disableEscaping
export const findHBSBlocks = templates.findHBSBlocks
/** /**
* Use polyfilled vm to run JS scripts in a browser Env * Use polyfilled vm to run JS scripts in a browser Env

View File

@ -1,6 +1,7 @@
const ALPHA_NUMERIC_REGEX = /^[A-Za-z0-9]+$/g const ALPHA_NUMERIC_REGEX = /^[A-Za-z0-9]+$/g
module.exports.FIND_HBS_REGEX = /{{([^{].*?)}}/g module.exports.FIND_HBS_REGEX = /{{([^{].*?)}}/g
module.exports.FIND_ANY_HBS_REGEX = /{?{{([^{].*?)}}}?/g
module.exports.FIND_TRIPLE_HBS_REGEX = /{{{([^{].*?)}}}/g module.exports.FIND_TRIPLE_HBS_REGEX = /{{{([^{].*?)}}}/g
// originally this could be done with a single regex using look behinds // originally this could be done with a single regex using look behinds

View File

@ -7,6 +7,7 @@ const {
encodeJSBinding, encodeJSBinding,
doesContainString, doesContainString,
disableEscaping, disableEscaping,
findHBSBlocks,
} = require("../src/index.cjs") } = require("../src/index.cjs")
describe("Test that the string processing works correctly", () => { describe("Test that the string processing works correctly", () => {
@ -200,3 +201,13 @@ describe("check that disabling escaping function works", () => {
}) })
}) })
describe("check find hbs blocks function", () => {
it("should find none", () => {
expect(findHBSBlocks("hello there")).toEqual([])
})
it("should find two", () => {
expect(findHBSBlocks("{{ hello }} there {{{ name }}}")).toEqual(["{{ hello }}", "{{{ name }}}"])
})
})

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/worker", "name": "@budibase/worker",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "1.0.79-alpha.7", "version": "1.0.80-alpha.2",
"description": "Budibase background service", "description": "Budibase background service",
"main": "src/index.ts", "main": "src/index.ts",
"repository": { "repository": {
@ -34,8 +34,8 @@
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@budibase/backend-core": "^1.0.79-alpha.7", "@budibase/backend-core": "^1.0.80-alpha.2",
"@budibase/string-templates": "^1.0.79-alpha.7", "@budibase/string-templates": "^1.0.80-alpha.2",
"@koa/router": "^8.0.0", "@koa/router": "^8.0.0",
"@sentry/node": "^6.0.0", "@sentry/node": "^6.0.0",
"@techpass/passport-openidconnect": "^0.3.0", "@techpass/passport-openidconnect": "^0.3.0",

View File

@ -1,30 +1,63 @@
#!/usr/bin/env node #!/usr/bin/env node
const path = require("path") const path = require("path")
const fs = require("fs") const fs = require("fs")
const { processStringSync } = require("@budibase/string-templates")
function processStringSync(string, env) {
let output = ""
// process if statements
let removal = false
for (let line of string.split("\n")) {
if (new RegExp(`{{\/if}}`, "g").test(line)) {
removal = false
continue
}
if (!removal) {
const match = line.match(new RegExp(`{{#if (.*)}}`))
if (match) {
const key = match[1]
// check the if statement is true
if (!env[key]) {
removal = true
}
continue
}
output += line + "\n"
}
}
for (let key in env) {
// replace variables
const rgx = new RegExp(`{{\\s*${key}\\s*}}`, "g")
output = output.replace(rgx, env[key])
}
return output
}
const Configs = { const Configs = {
prod: { prod: {
k8s: true,
apps: "app-service.budibase.svc.cluster.local", apps: "app-service.budibase.svc.cluster.local",
worker: "worker-service.budibase.svc.cluster.local", worker: "worker-service.budibase.svc.cluster.local",
minio: "minio-service.budibase.svc.cluster.local", minio: "minio-service.budibase.svc.cluster.local",
couchdb: "budibase-prod-svc-couchdb", couchdb: "budibase-prod-svc-couchdb",
resolver: "kube-dns.kube-system.svc.cluster.local"
}, },
preprod: { preprod: {
k8s: true,
apps: "app-service.budibase.svc.cluster.local", apps: "app-service.budibase.svc.cluster.local",
worker: "worker-service.budibase.svc.cluster.local", worker: "worker-service.budibase.svc.cluster.local",
minio: "minio-service.budibase.svc.cluster.local", minio: "minio-service.budibase.svc.cluster.local",
couchdb: "budibase-preprod-svc-couchdb", couchdb: "budibase-preprod-svc-couchdb",
resolver: "kube-dns.kube-system.svc.cluster.local"
}, },
compose: { compose: {
compose: true,
apps: "app-service", apps: "app-service",
worker: "worker-service", worker: "worker-service",
minio: "minio-service", minio: "minio-service",
couchdb: "couchdb-service", couchdb: "couchdb-service",
watchtower: "watchtower-service", watchtower: "watchtower-service",
resolver: "127.0.0.11"
}, },
} }
@ -36,7 +69,7 @@ const Commands = {
async function init(managementCommand) { async function init(managementCommand) {
const config = Configs[managementCommand] const config = Configs[managementCommand]
const hostingPath = path.join(process.cwd(), "..", "..", "hosting") const hostingPath = path.join(process.cwd(), "hosting")
const nginxHbsPath = path.join(hostingPath, "nginx.prod.conf.hbs") const nginxHbsPath = path.join(hostingPath, "nginx.prod.conf.hbs")
const nginxOutputPath = path.join( const nginxOutputPath = path.join(
hostingPath, hostingPath,