scripting block
This commit is contained in:
commit
8f5fd32f74
|
@ -83,4 +83,7 @@ typings/
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
|
||||||
# Nova Editor
|
# Nova Editor
|
||||||
.nova
|
.nova
|
||||||
|
|
||||||
|
# swap files (linux)
|
||||||
|
*.swp
|
||||||
|
|
|
@ -1,16 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
services:
|
|
||||||
app-service:
|
|
||||||
build: ./server
|
|
||||||
volumes:
|
|
||||||
- ./server:/app
|
|
||||||
environment:
|
|
||||||
SELF_HOSTED: 1
|
|
||||||
PORT: 4002
|
|
||||||
|
|
||||||
worker-service:
|
|
||||||
build: ./worker
|
|
||||||
environment:
|
|
||||||
SELF_HOSTED: 1,
|
|
||||||
PORT: 4003
|
|
|
@ -1 +0,0 @@
|
||||||
../../packages/server/
|
|
|
@ -1 +0,0 @@
|
||||||
../../packages/worker/
|
|
|
@ -0,0 +1,76 @@
|
||||||
|
version: "3"
|
||||||
|
|
||||||
|
# optional ports are specified throughout for more advanced use cases.
|
||||||
|
|
||||||
|
services:
|
||||||
|
minio-service:
|
||||||
|
container_name: budi-minio-dev
|
||||||
|
restart: always
|
||||||
|
image: minio/minio
|
||||||
|
volumes:
|
||||||
|
- minio_data:/data
|
||||||
|
ports:
|
||||||
|
- "${MINIO_PORT}:9000"
|
||||||
|
environment:
|
||||||
|
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
|
||||||
|
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
|
||||||
|
MINIO_BROWSER: "off"
|
||||||
|
command: server /data
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 20s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
proxy-service:
|
||||||
|
container_name: budi-envoy-dev
|
||||||
|
restart: always
|
||||||
|
image: envoyproxy/envoy:v1.16-latest
|
||||||
|
volumes:
|
||||||
|
- ./envoy.dev.yaml:/etc/envoy/envoy.yaml
|
||||||
|
ports:
|
||||||
|
- "${MAIN_PORT}:10000"
|
||||||
|
#- "9901:9901"
|
||||||
|
depends_on:
|
||||||
|
- minio-service
|
||||||
|
- couchdb-service
|
||||||
|
|
||||||
|
couchdb-service:
|
||||||
|
container_name: budi-couchdb-dev
|
||||||
|
restart: always
|
||||||
|
image: apache/couchdb:3.0
|
||||||
|
environment:
|
||||||
|
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
|
||||||
|
- COUCHDB_USER=${COUCH_DB_USER}
|
||||||
|
ports:
|
||||||
|
- "${COUCH_DB_PORT}:5984"
|
||||||
|
#- "4369:4369"
|
||||||
|
#- "9100:9100"
|
||||||
|
volumes:
|
||||||
|
- couchdb_data:/opt/couchdb/data
|
||||||
|
|
||||||
|
couch-init:
|
||||||
|
image: curlimages/curl
|
||||||
|
environment:
|
||||||
|
PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984"
|
||||||
|
depends_on:
|
||||||
|
- couchdb-service
|
||||||
|
command: ["sh","-c","sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;"]
|
||||||
|
|
||||||
|
redis-service:
|
||||||
|
container_name: budi-redis-dev
|
||||||
|
restart: always
|
||||||
|
image: redis
|
||||||
|
ports:
|
||||||
|
- "${REDIS_PORT}:6379"
|
||||||
|
volumes:
|
||||||
|
- redis_data:/data
|
||||||
|
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
couchdb_data:
|
||||||
|
driver: local
|
||||||
|
minio_data:
|
||||||
|
driver: local
|
||||||
|
redis_data:
|
||||||
|
driver: local
|
|
@ -11,13 +11,18 @@ services:
|
||||||
- "${APP_PORT}:4002"
|
- "${APP_PORT}:4002"
|
||||||
environment:
|
environment:
|
||||||
SELF_HOSTED: 1
|
SELF_HOSTED: 1
|
||||||
CLOUD: 1
|
|
||||||
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
|
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
|
||||||
WORKER_URL: http://worker-service:4003
|
WORKER_URL: http://worker-service:4003
|
||||||
|
MINIO_URL: http://minio-service:9000
|
||||||
|
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
|
||||||
|
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
|
||||||
HOSTING_KEY: ${HOSTING_KEY}
|
HOSTING_KEY: ${HOSTING_KEY}
|
||||||
BUDIBASE_ENVIRONMENT: ${BUDIBASE_ENVIRONMENT}
|
BUDIBASE_ENVIRONMENT: ${BUDIBASE_ENVIRONMENT}
|
||||||
PORT: 4002
|
PORT: 4002
|
||||||
JWT_SECRET: ${JWT_SECRET}
|
JWT_SECRET: ${JWT_SECRET}
|
||||||
|
LOG_LEVEL: info
|
||||||
|
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
|
||||||
|
ENABLE_ANALYTICS: true
|
||||||
depends_on:
|
depends_on:
|
||||||
- worker-service
|
- worker-service
|
||||||
|
|
||||||
|
@ -28,7 +33,7 @@ services:
|
||||||
ports:
|
ports:
|
||||||
- "${WORKER_PORT}:4003"
|
- "${WORKER_PORT}:4003"
|
||||||
environment:
|
environment:
|
||||||
SELF_HOSTED: 1,
|
SELF_HOSTED: 1
|
||||||
PORT: 4003
|
PORT: 4003
|
||||||
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
|
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
|
||||||
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
|
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
|
||||||
|
@ -93,9 +98,19 @@ services:
|
||||||
depends_on:
|
depends_on:
|
||||||
- couchdb-service
|
- couchdb-service
|
||||||
command: ["sh","-c","sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;"]
|
command: ["sh","-c","sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;"]
|
||||||
|
|
||||||
|
redis-service:
|
||||||
|
restart: always
|
||||||
|
image: redis
|
||||||
|
ports:
|
||||||
|
- "${REDIS_PORT}:6379"
|
||||||
|
volumes:
|
||||||
|
- redis_data:/data
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
couchdb_data:
|
couchdb_data:
|
||||||
driver: local
|
driver: local
|
||||||
minio_data:
|
minio_data:
|
||||||
driver: local
|
driver: local
|
||||||
|
redis_data:
|
||||||
|
driver: local
|
||||||
|
|
|
@ -0,0 +1,79 @@
|
||||||
|
static_resources:
|
||||||
|
listeners:
|
||||||
|
- name: main_listener
|
||||||
|
address:
|
||||||
|
socket_address: { address: 0.0.0.0, port_value: 10000 }
|
||||||
|
filter_chains:
|
||||||
|
- filters:
|
||||||
|
- name: envoy.filters.network.http_connection_manager
|
||||||
|
typed_config:
|
||||||
|
"@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager
|
||||||
|
stat_prefix: ingress
|
||||||
|
codec_type: auto
|
||||||
|
route_config:
|
||||||
|
name: local_route
|
||||||
|
virtual_hosts:
|
||||||
|
- name: local_services
|
||||||
|
domains: ["*"]
|
||||||
|
routes:
|
||||||
|
- match: { prefix: "/db/" }
|
||||||
|
route:
|
||||||
|
cluster: couchdb-service
|
||||||
|
prefix_rewrite: "/"
|
||||||
|
|
||||||
|
- match: { prefix: "/cache/" }
|
||||||
|
route:
|
||||||
|
cluster: redis-service
|
||||||
|
prefix_rewrite: "/"
|
||||||
|
|
||||||
|
# minio is on the default route because this works
|
||||||
|
# best, minio + AWS SDK doesn't handle path proxy
|
||||||
|
- match: { prefix: "/" }
|
||||||
|
route:
|
||||||
|
cluster: minio-service
|
||||||
|
|
||||||
|
http_filters:
|
||||||
|
- name: envoy.filters.http.router
|
||||||
|
|
||||||
|
clusters:
|
||||||
|
- name: minio-service
|
||||||
|
connect_timeout: 0.25s
|
||||||
|
type: strict_dns
|
||||||
|
lb_policy: round_robin
|
||||||
|
load_assignment:
|
||||||
|
cluster_name: minio-service
|
||||||
|
endpoints:
|
||||||
|
- lb_endpoints:
|
||||||
|
- endpoint:
|
||||||
|
address:
|
||||||
|
socket_address:
|
||||||
|
address: minio-service
|
||||||
|
port_value: 9000
|
||||||
|
|
||||||
|
- name: couchdb-service
|
||||||
|
connect_timeout: 0.25s
|
||||||
|
type: strict_dns
|
||||||
|
lb_policy: round_robin
|
||||||
|
load_assignment:
|
||||||
|
cluster_name: couchdb-service
|
||||||
|
endpoints:
|
||||||
|
- lb_endpoints:
|
||||||
|
- endpoint:
|
||||||
|
address:
|
||||||
|
socket_address:
|
||||||
|
address: couchdb-service
|
||||||
|
port_value: 5984
|
||||||
|
|
||||||
|
- name: redis-service
|
||||||
|
connect_timeout: 0.25s
|
||||||
|
type: strict_dns
|
||||||
|
lb_policy: round_robin
|
||||||
|
load_assignment:
|
||||||
|
cluster_name: redis-service
|
||||||
|
endpoints:
|
||||||
|
- lb_endpoints:
|
||||||
|
- endpoint:
|
||||||
|
address:
|
||||||
|
socket_address:
|
||||||
|
address: redis-service
|
||||||
|
port_value: 6379
|
|
@ -36,6 +36,11 @@ static_resources:
|
||||||
cluster: worker-service
|
cluster: worker-service
|
||||||
prefix_rewrite: "/"
|
prefix_rewrite: "/"
|
||||||
|
|
||||||
|
- match: { prefix: "/cache/" }
|
||||||
|
route:
|
||||||
|
cluster: redis-service
|
||||||
|
prefix_rewrite: "/"
|
||||||
|
|
||||||
- match: { prefix: "/db/" }
|
- match: { prefix: "/db/" }
|
||||||
route:
|
route:
|
||||||
cluster: couchdb-service
|
cluster: couchdb-service
|
||||||
|
@ -107,3 +112,18 @@ static_resources:
|
||||||
address: couchdb-service
|
address: couchdb-service
|
||||||
port_value: 5984
|
port_value: 5984
|
||||||
|
|
||||||
|
- name: redis-service
|
||||||
|
connect_timeout: 0.25s
|
||||||
|
type: strict_dns
|
||||||
|
lb_policy: round_robin
|
||||||
|
load_assignment:
|
||||||
|
cluster_name: redis-service
|
||||||
|
endpoints:
|
||||||
|
- lb_endpoints:
|
||||||
|
- endpoint:
|
||||||
|
address:
|
||||||
|
socket_address:
|
||||||
|
address: redis-service
|
||||||
|
port_value: 6379
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -18,4 +18,5 @@ APP_PORT=4002
|
||||||
WORKER_PORT=4003
|
WORKER_PORT=4003
|
||||||
MINIO_PORT=4004
|
MINIO_PORT=4004
|
||||||
COUCH_DB_PORT=4005
|
COUCH_DB_PORT=4005
|
||||||
|
REDIS_PORT=6379
|
||||||
BUDIBASE_ENVIRONMENT=PRODUCTION
|
BUDIBASE_ENVIRONMENT=PRODUCTION
|
||||||
|
|
|
@ -35,5 +35,6 @@
|
||||||
"test:e2e:ci": "lerna run cy:ci",
|
"test:e2e:ci": "lerna run cy:ci",
|
||||||
"build:docker": "cd hosting/scripts/linux/ && ./release-to-docker-hub.sh && cd -",
|
"build:docker": "cd hosting/scripts/linux/ && ./release-to-docker-hub.sh && cd -",
|
||||||
"build:docker:staging": "cd hosting/scripts/linux/ && ./release-to-docker-hub.sh staging && cd -"
|
"build:docker:staging": "cd hosting/scripts/linux/ && ./release-to-docker-hub.sh staging && cd -"
|
||||||
}
|
},
|
||||||
|
"dependencies": {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
"video": true,
|
"video": true,
|
||||||
"projectId": "bmbemn",
|
"projectId": "bmbemn",
|
||||||
"env": {
|
"env": {
|
||||||
"PORT": "4001"
|
"PORT": "4001",
|
||||||
|
"JWT_SECRET": "test"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,25 +1,26 @@
|
||||||
// What this script does:
|
|
||||||
// 1. Removes the old test folder if it exists (.budibase)
|
|
||||||
// 2. Initialises using `.budibase`
|
|
||||||
// 3. Runs the server using said folder
|
|
||||||
|
|
||||||
const { join, resolve } = require("path")
|
|
||||||
const initialiseBudibase = require("../../server/src/utilities/initialiseBudibase")
|
|
||||||
const cypressConfig = require("../cypress.json")
|
const cypressConfig = require("../cypress.json")
|
||||||
|
const path = require("path")
|
||||||
|
|
||||||
const homedir = join(require("os").homedir(), ".budibase")
|
const tmpdir = path.join(require("os").tmpdir(), ".budibase")
|
||||||
|
|
||||||
process.env.BUDIBASE_API_KEY = "6BE826CB-6B30-4AEC-8777-2E90464633DE"
|
process.env.BUDIBASE_API_KEY = "6BE826CB-6B30-4AEC-8777-2E90464633DE"
|
||||||
process.env.NODE_ENV = "cypress"
|
process.env.NODE_ENV = "cypress"
|
||||||
process.env.ENABLE_ANALYTICS = "false"
|
process.env.ENABLE_ANALYTICS = "false"
|
||||||
process.env.PORT = cypressConfig.env.PORT
|
process.env.PORT = cypressConfig.env.PORT
|
||||||
|
process.env.JWT_SECRET = cypressConfig.env.JWT_SECRET
|
||||||
|
process.env.COUCH_URL = `leveldb://${tmpdir}/.data/`
|
||||||
|
process.env.SELF_HOSTED = 1
|
||||||
|
process.env.MINIO_URL = "http://localhost:10000/"
|
||||||
|
process.env.MINIO_ACCESS_KEY = "budibase"
|
||||||
|
process.env.MINIO_SECRET_KEY = "budibase"
|
||||||
|
process.env.COUCH_DB_USER = "budibase"
|
||||||
|
process.env.COUCH_DB_PASSWORD = "budibase"
|
||||||
|
|
||||||
// Stop info logs polluting test outputs
|
// Stop info logs polluting test outputs
|
||||||
process.env.LOG_LEVEL = "error"
|
process.env.LOG_LEVEL = "error"
|
||||||
|
|
||||||
async function run(dir) {
|
async function run() {
|
||||||
process.env.BUDIBASE_DIR = resolve(dir)
|
// require("dotenv").config({ path: resolve(dir, ".env") })
|
||||||
require("dotenv").config({ path: resolve(dir, ".env") })
|
|
||||||
|
|
||||||
// dont make this a variable or top level require
|
// dont make this a variable or top level require
|
||||||
// it will cause environment module to be loaded prematurely
|
// it will cause environment module to be loaded prematurely
|
||||||
|
@ -27,12 +28,15 @@ async function run(dir) {
|
||||||
server.on("close", () => console.log("Server Closed"))
|
server.on("close", () => console.log("Server Closed"))
|
||||||
}
|
}
|
||||||
|
|
||||||
initialiseBudibase({ dir: homedir, clientId: "cypress-test" })
|
run()
|
||||||
.then(() => {
|
|
||||||
delete require.cache[require.resolve("../../server/src/environment")]
|
// TODO: ensure that this still works
|
||||||
const xPlatHomeDir = homedir.startsWith("~")
|
// initialiseBudibase({ dir: homedir, clientId: "cypress-test" })
|
||||||
? join(homedir(), homedir.substring(1))
|
// .then(() => {
|
||||||
: homedir
|
// delete require.cache[require.resolve("../../server/src/environment")]
|
||||||
run(xPlatHomeDir)
|
// const xPlatHomeDir = homedir.startsWith("~")
|
||||||
})
|
// ? join(homedir(), homedir.substring(1))
|
||||||
.catch(e => console.error(e))
|
// : homedir
|
||||||
|
// run(xPlatHomeDir)
|
||||||
|
// })
|
||||||
|
// .catch(e => console.error(e))
|
||||||
|
|
|
@ -7,6 +7,8 @@
|
||||||
import WebhookDisplay from "../Shared/WebhookDisplay.svelte"
|
import WebhookDisplay from "../Shared/WebhookDisplay.svelte"
|
||||||
import DrawerBindableInput from "../../common/DrawerBindableInput.svelte"
|
import DrawerBindableInput from "../../common/DrawerBindableInput.svelte"
|
||||||
import AutomationBindingPanel from "./AutomationBindingPanel.svelte"
|
import AutomationBindingPanel from "./AutomationBindingPanel.svelte"
|
||||||
|
import Editor from "components/integration/QueryEditor.svelte"
|
||||||
|
import CodeEditorModal from "./CodeEditorModal.svelte"
|
||||||
|
|
||||||
export let block
|
export let block
|
||||||
export let webhookModal
|
export let webhookModal
|
||||||
|
@ -80,6 +82,17 @@
|
||||||
<WebhookDisplay value={block.inputs[key]} />
|
<WebhookDisplay value={block.inputs[key]} />
|
||||||
{:else if value.customType === 'triggerSchema'}
|
{:else if value.customType === 'triggerSchema'}
|
||||||
<SchemaSetup bind:value={block.inputs[key]} />
|
<SchemaSetup bind:value={block.inputs[key]} />
|
||||||
|
{:else if value.customType === 'code'}
|
||||||
|
<CodeEditorModal>
|
||||||
|
<pre>{JSON.stringify(bindings, null, 2)}</pre>
|
||||||
|
<Editor
|
||||||
|
mode="javascript"
|
||||||
|
on:change={e => {
|
||||||
|
block.inputs[key] = e.detail.value
|
||||||
|
}}
|
||||||
|
value={block.inputs[key]}
|
||||||
|
/>
|
||||||
|
</CodeEditorModal>
|
||||||
{:else if value.type === 'string' || value.type === 'number'}
|
{:else if value.type === 'string' || value.type === 'number'}
|
||||||
<DrawerBindableInput
|
<DrawerBindableInput
|
||||||
panel={AutomationBindingPanel}
|
panel={AutomationBindingPanel}
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
<script>
|
||||||
|
import { Button, Modal, ModalContent } from "@budibase/bbui"
|
||||||
|
|
||||||
|
let modal
|
||||||
|
|
||||||
|
export const show = () => {
|
||||||
|
modal.show()
|
||||||
|
}
|
||||||
|
export const hide = () => {
|
||||||
|
modal.hide()
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<Modal bind:this={modal} width="60%">
|
||||||
|
<ModalContent
|
||||||
|
title="Edit Code"
|
||||||
|
showConfirmButton={false}
|
||||||
|
showCancelButton={false}>
|
||||||
|
<div class="container">
|
||||||
|
<slot />
|
||||||
|
</div>
|
||||||
|
</ModalContent>
|
||||||
|
</Modal>
|
||||||
|
<Button primary on:click={show}>Edit Code</Button>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.container :global(section > header) {
|
||||||
|
/* Fix margin defined in BBUI as L rather than XL */
|
||||||
|
margin-bottom: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
.container :global(textarea) {
|
||||||
|
min-height: 60px;
|
||||||
|
}
|
||||||
|
</style>
|
|
@ -27,7 +27,7 @@
|
||||||
notifier.success("Datasource deleted")
|
notifier.success("Datasource deleted")
|
||||||
// navigate to first index page if the source you are deleting is selected
|
// navigate to first index page if the source you are deleting is selected
|
||||||
if (wasSelectedSource === datasource._id) {
|
if (wasSelectedSource === datasource._id) {
|
||||||
$goto('./datasource')
|
$goto("./datasource")
|
||||||
}
|
}
|
||||||
hideEditor()
|
hideEditor()
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,13 +37,13 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
async function deleteTable() {
|
async function deleteTable() {
|
||||||
const wasSelectedTable = $backendUiStore.selectedTable
|
const wasSelectedTable = $backendUiStore.selectedTable
|
||||||
await backendUiStore.actions.tables.delete(table)
|
await backendUiStore.actions.tables.delete(table)
|
||||||
store.actions.screens.delete(templateScreens)
|
store.actions.screens.delete(templateScreens)
|
||||||
await backendUiStore.actions.tables.fetch()
|
await backendUiStore.actions.tables.fetch()
|
||||||
notifier.success("Table deleted")
|
notifier.success("Table deleted")
|
||||||
if (wasSelectedTable._id === table._id) {
|
if (wasSelectedTable._id === table._id) {
|
||||||
$goto('./table')
|
$goto("./table")
|
||||||
}
|
}
|
||||||
hideEditor()
|
hideEditor()
|
||||||
}
|
}
|
||||||
|
|
|
@ -132,6 +132,10 @@
|
||||||
|
|
||||||
if (destroyed) return
|
if (destroyed) return
|
||||||
|
|
||||||
|
CodeMirror.commands.autocomplete = function(cm) {
|
||||||
|
CodeMirror.showHint(cm, CodeMirror.hint.javascript)
|
||||||
|
}
|
||||||
|
|
||||||
editor = CodeMirror.fromTextArea(refs.editor, opts)
|
editor = CodeMirror.fromTextArea(refs.editor, opts)
|
||||||
|
|
||||||
editor.on("change", instance => {
|
editor.on("change", instance => {
|
||||||
|
|
|
@ -26,6 +26,7 @@ APP_PORT=4002
|
||||||
WORKER_PORT=4003
|
WORKER_PORT=4003
|
||||||
MINIO_PORT=4004
|
MINIO_PORT=4004
|
||||||
COUCH_DB_PORT=4005
|
COUCH_DB_PORT=4005
|
||||||
|
REDIS_PORT=6379
|
||||||
BUDIBASE_ENVIRONMENT=PRODUCTION`
|
BUDIBASE_ENVIRONMENT=PRODUCTION`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
# url of couch db, including username and password
|
|
||||||
# http://admin:password@localhost:5984
|
|
||||||
COUCH_DB_URL={{couchDbUrl}}
|
|
||||||
|
|
||||||
# identifies a client database - i.e. group of apps
|
|
||||||
CLIENT_ID={{clientId}}
|
|
||||||
|
|
||||||
# used to create cookie hashes
|
|
||||||
JWT_SECRET={{cookieKey1}}
|
|
||||||
|
|
||||||
# error level for koa-pino
|
|
||||||
LOG_LEVEL=info
|
|
||||||
|
|
||||||
DEPLOYMENT_CREDENTIALS_URL="https://dt4mpwwap8.execute-api.eu-west-1.amazonaws.com/prod/"
|
|
||||||
DEPLOYMENT_DB_URL="https://couchdb.budi.live:5984"
|
|
||||||
SENTRY_DSN=https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
|
|
||||||
ENABLE_ANALYTICS="true"
|
|
|
@ -2,7 +2,6 @@ FROM node:12-alpine
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV CLOUD=1
|
|
||||||
ENV PORT=4001
|
ENV PORT=4001
|
||||||
ENV COUCH_DB_URL=https://couchdb.budi.live:5984
|
ENV COUCH_DB_URL=https://couchdb.budi.live:5984
|
||||||
ENV BUDIBASE_ENVIRONMENT=PRODUCTION
|
ENV BUDIBASE_ENVIRONMENT=PRODUCTION
|
||||||
|
|
|
@ -36,11 +36,13 @@
|
||||||
"test:integration": "jest --coverage --detectOpenHandles",
|
"test:integration": "jest --coverage --detectOpenHandles",
|
||||||
"test:watch": "jest --watch",
|
"test:watch": "jest --watch",
|
||||||
"run:docker": "node src/index",
|
"run:docker": "node src/index",
|
||||||
"dev:builder": "cross-env PORT=4001 nodemon src/index.js",
|
"dev:stack:up": "node scripts/dev/manage.js up",
|
||||||
|
"dev:stack:down": "node scripts/dev/manage.js down",
|
||||||
|
"dev:stack:nuke": "node scripts/dev/manage.js nuke",
|
||||||
|
"dev:builder": "npm run dev:stack:up && nodemon src/index.js",
|
||||||
"electron": "electron src/electron.js",
|
"electron": "electron src/electron.js",
|
||||||
"build:electron": "electron-builder --dir",
|
"build:electron": "electron-builder --dir",
|
||||||
"publish:electron": "electron-builder -mwl --publish always",
|
"publish:electron": "electron-builder -mwl --publish always",
|
||||||
"postinstall": "electron-builder install-app-deps",
|
|
||||||
"lint": "eslint --fix src/",
|
"lint": "eslint --fix src/",
|
||||||
"initialise": "node scripts/initialise.js"
|
"initialise": "node scripts/initialise.js"
|
||||||
},
|
},
|
||||||
|
@ -63,7 +65,7 @@
|
||||||
"!src/tests/**/*",
|
"!src/tests/**/*",
|
||||||
"!src/automations/tests/**/*",
|
"!src/automations/tests/**/*",
|
||||||
"!src/utilities/fileProcessor.js",
|
"!src/utilities/fileProcessor.js",
|
||||||
"!src/utilities/initialiseBudibase.js"
|
"!src/utilities/fileSystem/**/*"
|
||||||
],
|
],
|
||||||
"coverageReporters": [
|
"coverageReporters": [
|
||||||
"lcov",
|
"lcov",
|
||||||
|
@ -87,6 +89,8 @@
|
||||||
"arangojs": "7.2.0",
|
"arangojs": "7.2.0",
|
||||||
"aws-sdk": "^2.767.0",
|
"aws-sdk": "^2.767.0",
|
||||||
"bcryptjs": "2.4.3",
|
"bcryptjs": "2.4.3",
|
||||||
|
"bull": "^3.22.0",
|
||||||
|
"bull-board": "^1.5.1",
|
||||||
"chmodr": "1.2.0",
|
"chmodr": "1.2.0",
|
||||||
"csvtojson": "2.0.10",
|
"csvtojson": "2.0.10",
|
||||||
"dotenv": "8.2.0",
|
"dotenv": "8.2.0",
|
||||||
|
@ -104,6 +108,7 @@
|
||||||
"koa": "2.7.0",
|
"koa": "2.7.0",
|
||||||
"koa-body": "4.2.0",
|
"koa-body": "4.2.0",
|
||||||
"koa-compress": "4.0.1",
|
"koa-compress": "4.0.1",
|
||||||
|
"koa-connect": "^2.1.0",
|
||||||
"koa-pino-logger": "3.0.0",
|
"koa-pino-logger": "3.0.0",
|
||||||
"koa-send": "5.0.0",
|
"koa-send": "5.0.0",
|
||||||
"koa-session": "5.12.0",
|
"koa-session": "5.12.0",
|
||||||
|
@ -134,7 +139,7 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@budibase/standard-components": "^0.8.9",
|
"@budibase/standard-components": "^0.8.9",
|
||||||
"@jest/test-sequencer": "^24.8.0",
|
"@jest/test-sequencer": "^24.8.0",
|
||||||
"cross-env": "^7.0.3",
|
"docker-compose": "^0.23.6",
|
||||||
"electron": "10.1.3",
|
"electron": "10.1.3",
|
||||||
"electron-builder": "^22.9.1",
|
"electron-builder": "^22.9.1",
|
||||||
"electron-builder-notarize": "^1.1.2",
|
"electron-builder-notarize": "^1.1.2",
|
||||||
|
|
|
@ -0,0 +1,97 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
const compose = require("docker-compose")
|
||||||
|
const path = require("path")
|
||||||
|
const fs = require("fs")
|
||||||
|
|
||||||
|
// This script wraps docker-compose allowing you to manage your dev infrastructure with simple commands.
|
||||||
|
const CONFIG = {
|
||||||
|
cwd: path.resolve(process.cwd(), "../../hosting"),
|
||||||
|
config: "docker-compose.dev.yaml",
|
||||||
|
log: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
const Commands = {
|
||||||
|
Up: "up",
|
||||||
|
Down: "down",
|
||||||
|
Nuke: "nuke",
|
||||||
|
}
|
||||||
|
|
||||||
|
async function init() {
|
||||||
|
const envFilePath = path.join(process.cwd(), ".env")
|
||||||
|
if (fs.existsSync(envFilePath)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const envFileJson = {
|
||||||
|
PORT: 4001,
|
||||||
|
MINIO_URL: "http://localhost:10000/",
|
||||||
|
COUCH_DB_URL: "http://budibase:budibase@localhost:10000/db/",
|
||||||
|
// REDIS_URL: "http://localhost:10000/cache/",
|
||||||
|
WORKER_URL: "http://localhost:4002",
|
||||||
|
JWT_SECRET: "testsecret",
|
||||||
|
MINIO_ACCESS_KEY: "budibase",
|
||||||
|
MINIO_SECRET_KEY: "budibase",
|
||||||
|
COUCH_DB_PASSWORD: "budibase",
|
||||||
|
COUCH_DB_USER: "budibase",
|
||||||
|
SELF_HOSTED: 1,
|
||||||
|
}
|
||||||
|
let envFile = ""
|
||||||
|
Object.keys(envFileJson).forEach(key => {
|
||||||
|
envFile += `${key}=${envFileJson[key]}\n`
|
||||||
|
})
|
||||||
|
fs.writeFileSync(envFilePath, envFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function up() {
|
||||||
|
console.log("Spinning up your budibase dev environment... 🔧✨")
|
||||||
|
await init()
|
||||||
|
await compose.upAll(CONFIG)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function down() {
|
||||||
|
console.log("Spinning down your budibase dev environment... 🌇")
|
||||||
|
await compose.stop(CONFIG)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function nuke() {
|
||||||
|
console.log(
|
||||||
|
"Clearing down your budibase dev environment, including all containers and volumes... 💥"
|
||||||
|
)
|
||||||
|
await compose.down(CONFIG)
|
||||||
|
}
|
||||||
|
|
||||||
|
const managementCommand = process.argv.slice(2)[0]
|
||||||
|
|
||||||
|
if (
|
||||||
|
!managementCommand ||
|
||||||
|
!Object.values(Commands).some(command => managementCommand === command)
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
"You must supply either an 'up', 'down' or 'nuke' commmand to manage the budibase development environment."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let command
|
||||||
|
switch (managementCommand) {
|
||||||
|
case Commands.Up:
|
||||||
|
command = up
|
||||||
|
break
|
||||||
|
case Commands.Down:
|
||||||
|
command = down
|
||||||
|
break
|
||||||
|
case Commands.Nuke:
|
||||||
|
command = nuke
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
command = up
|
||||||
|
}
|
||||||
|
|
||||||
|
command()
|
||||||
|
.then(() => {
|
||||||
|
console.log("Done! 🎉")
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
console.error(
|
||||||
|
"Something went wrong while managing budibase dev environment:",
|
||||||
|
err.message
|
||||||
|
)
|
||||||
|
})
|
|
@ -1,6 +1,9 @@
|
||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
const { exportTemplateFromApp } = require("../src/utilities/templates")
|
|
||||||
const yargs = require("yargs")
|
const yargs = require("yargs")
|
||||||
|
const fs = require("fs")
|
||||||
|
const { join } = require("path")
|
||||||
|
const CouchDB = require("../src/db")
|
||||||
|
const { budibaseAppsDir } = require("../src/utilities/budibaseDir")
|
||||||
|
|
||||||
// Script to export a chosen budibase app into a package
|
// Script to export a chosen budibase app into a package
|
||||||
// Usage: ./scripts/exportAppTemplate.js export --name=Funky --appId=appId
|
// Usage: ./scripts/exportAppTemplate.js export --name=Funky --appId=appId
|
||||||
|
@ -22,18 +25,22 @@ yargs
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
async args => {
|
async args => {
|
||||||
|
const name = args.name,
|
||||||
|
appId = args.appId
|
||||||
console.log("Exporting app..")
|
console.log("Exporting app..")
|
||||||
if (args.name == null || args.appId == null) {
|
if (name == null || appId == null) {
|
||||||
console.error(
|
console.error(
|
||||||
"Unable to export without a name and app ID being specified, check help for more info."
|
"Unable to export without a name and app ID being specified, check help for more info."
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const exportPath = await exportTemplateFromApp({
|
const exportPath = join(budibaseAppsDir(), "templates", "app", name, "db")
|
||||||
templateName: args.name,
|
fs.ensureDirSync(exportPath)
|
||||||
appId: args.appId,
|
const writeStream = fs.createWriteStream(join(exportPath, "dump.text"))
|
||||||
})
|
// perform couch dump
|
||||||
console.log(`Template ${args.name} exported to ${exportPath}`)
|
const instanceDb = new CouchDB(appId)
|
||||||
|
await instanceDb.dump(writeStream, {})
|
||||||
|
console.log(`Template ${name} exported to ${exportPath}`)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
.help()
|
.help()
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
const { join } = require("path")
|
|
||||||
const { homedir } = require("os")
|
|
||||||
|
|
||||||
const initialiseBudibase = require("../src/utilities/initialiseBudibase")
|
|
||||||
const DIRECTORY = "~/.budibase"
|
|
||||||
|
|
||||||
function run() {
|
|
||||||
let opts = {}
|
|
||||||
let dir = DIRECTORY
|
|
||||||
opts.quiet = true
|
|
||||||
opts.dir = dir.startsWith("~") ? join(homedir(), dir.substring(1)) : dir
|
|
||||||
return initialiseBudibase(opts)
|
|
||||||
}
|
|
||||||
|
|
||||||
run().then(() => {
|
|
||||||
console.log("Init complete.")
|
|
||||||
})
|
|
|
@ -1,56 +1,32 @@
|
||||||
const fs = require("fs")
|
const builderDB = require("../../db/builder")
|
||||||
const { join } = require("../../utilities/centralPath")
|
|
||||||
const readline = require("readline")
|
|
||||||
const { budibaseAppsDir } = require("../../utilities/budibaseDir")
|
|
||||||
const env = require("../../environment")
|
|
||||||
const ENV_FILE_PATH = "/.env"
|
|
||||||
|
|
||||||
exports.fetch = async function(ctx) {
|
exports.fetch = async function(ctx) {
|
||||||
ctx.status = 200
|
try {
|
||||||
ctx.body = {
|
const mainDoc = await builderDB.getBuilderMainDoc()
|
||||||
budibase: env.BUDIBASE_API_KEY,
|
ctx.body = mainDoc.apiKeys ? mainDoc.apiKeys : {}
|
||||||
userId: env.USERID_API_KEY,
|
} catch (err) {
|
||||||
|
/* istanbul ignore next */
|
||||||
|
ctx.throw(400, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.update = async function(ctx) {
|
exports.update = async function(ctx) {
|
||||||
const key = `${ctx.params.key.toUpperCase()}_API_KEY`
|
const key = ctx.params.key
|
||||||
const value = ctx.request.body.value
|
const value = ctx.request.body.value
|
||||||
|
|
||||||
// set environment variables
|
try {
|
||||||
env._set(key, value)
|
const mainDoc = await builderDB.getBuilderMainDoc()
|
||||||
|
if (mainDoc.apiKeys == null) {
|
||||||
// Write to file
|
mainDoc.apiKeys = {}
|
||||||
await updateValues([key, value])
|
|
||||||
|
|
||||||
ctx.status = 200
|
|
||||||
ctx.message = `Updated ${ctx.params.key} API key succesfully.`
|
|
||||||
ctx.body = { [ctx.params.key]: ctx.request.body.value }
|
|
||||||
}
|
|
||||||
|
|
||||||
async function updateValues([key, value]) {
|
|
||||||
let newContent = ""
|
|
||||||
let keyExists = false
|
|
||||||
let envPath = join(budibaseAppsDir(), ENV_FILE_PATH)
|
|
||||||
const readInterface = readline.createInterface({
|
|
||||||
input: fs.createReadStream(envPath),
|
|
||||||
output: process.stdout,
|
|
||||||
console: false,
|
|
||||||
})
|
|
||||||
readInterface.on("line", function(line) {
|
|
||||||
// Mutate lines and change API Key
|
|
||||||
if (line.startsWith(key)) {
|
|
||||||
line = `${key}=${value}`
|
|
||||||
keyExists = true
|
|
||||||
}
|
}
|
||||||
newContent = `${newContent}\n${line}`
|
mainDoc.apiKeys[key] = value
|
||||||
})
|
const resp = await builderDB.setBuilderMainDoc(mainDoc)
|
||||||
readInterface.on("close", function() {
|
ctx.body = {
|
||||||
// Write file here
|
_id: resp.id,
|
||||||
if (!keyExists) {
|
_rev: resp.rev,
|
||||||
// Add API Key if it doesn't exist in the file at all
|
|
||||||
newContent = `${newContent}\n${key}=${value}`
|
|
||||||
}
|
}
|
||||||
fs.writeFileSync(envPath, newContent)
|
} catch (err) {
|
||||||
})
|
/* istanbul ignore next */
|
||||||
|
ctx.throw(400, err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,15 +1,14 @@
|
||||||
const CouchDB = require("../../db")
|
const CouchDB = require("../../db")
|
||||||
const compileStaticAssets = require("../../utilities/builder/compileStaticAssets")
|
|
||||||
const env = require("../../environment")
|
const env = require("../../environment")
|
||||||
const { existsSync } = require("fs-extra")
|
|
||||||
const { budibaseAppsDir } = require("../../utilities/budibaseDir")
|
|
||||||
const setBuilderToken = require("../../utilities/builder/setBuilderToken")
|
const setBuilderToken = require("../../utilities/builder/setBuilderToken")
|
||||||
const fs = require("fs-extra")
|
|
||||||
const { join, resolve } = require("../../utilities/centralPath")
|
|
||||||
const packageJson = require("../../../package.json")
|
const packageJson = require("../../../package.json")
|
||||||
const { createLinkView } = require("../../db/linkedRows")
|
const { createLinkView } = require("../../db/linkedRows")
|
||||||
const { createRoutingView } = require("../../utilities/routing")
|
const { createRoutingView } = require("../../utilities/routing")
|
||||||
const { downloadTemplate } = require("../../utilities/templates")
|
const {
|
||||||
|
getTemplateStream,
|
||||||
|
createApp,
|
||||||
|
deleteApp,
|
||||||
|
} = require("../../utilities/fileSystem")
|
||||||
const {
|
const {
|
||||||
generateAppID,
|
generateAppID,
|
||||||
getLayoutParams,
|
getLayoutParams,
|
||||||
|
@ -20,9 +19,6 @@ const {
|
||||||
BUILTIN_ROLE_IDS,
|
BUILTIN_ROLE_IDS,
|
||||||
AccessController,
|
AccessController,
|
||||||
} = require("../../utilities/security/roles")
|
} = require("../../utilities/security/roles")
|
||||||
const {
|
|
||||||
downloadExtractComponentLibraries,
|
|
||||||
} = require("../../utilities/createAppPackage")
|
|
||||||
const { BASE_LAYOUTS } = require("../../constants/layouts")
|
const { BASE_LAYOUTS } = require("../../constants/layouts")
|
||||||
const {
|
const {
|
||||||
createHomeScreen,
|
createHomeScreen,
|
||||||
|
@ -32,11 +28,7 @@ const { cloneDeep } = require("lodash/fp")
|
||||||
const { processObject } = require("@budibase/string-templates")
|
const { processObject } = require("@budibase/string-templates")
|
||||||
const { getAllApps } = require("../../utilities")
|
const { getAllApps } = require("../../utilities")
|
||||||
const { USERS_TABLE_SCHEMA } = require("../../constants")
|
const { USERS_TABLE_SCHEMA } = require("../../constants")
|
||||||
const {
|
const { getDeployedApps } = require("../../utilities/builder/hosting")
|
||||||
getDeployedApps,
|
|
||||||
getHostingInfo,
|
|
||||||
HostingTypes,
|
|
||||||
} = require("../../utilities/builder/hosting")
|
|
||||||
|
|
||||||
const URL_REGEX_SLASH = /\/|\\/g
|
const URL_REGEX_SLASH = /\/|\\/g
|
||||||
|
|
||||||
|
@ -75,8 +67,7 @@ async function getAppUrlIfNotInUse(ctx) {
|
||||||
url = encodeURI(`${ctx.request.body.name}`)
|
url = encodeURI(`${ctx.request.body.name}`)
|
||||||
}
|
}
|
||||||
url = `/${url.replace(URL_REGEX_SLASH, "")}`.toLowerCase()
|
url = `/${url.replace(URL_REGEX_SLASH, "")}`.toLowerCase()
|
||||||
const hostingInfo = await getHostingInfo()
|
if (!env.SELF_HOSTED) {
|
||||||
if (hostingInfo.type === HostingTypes.CLOUD) {
|
|
||||||
return url
|
return url
|
||||||
}
|
}
|
||||||
const deployedApps = await getDeployedApps()
|
const deployedApps = await getDeployedApps()
|
||||||
|
@ -106,16 +97,7 @@ async function createInstance(template) {
|
||||||
// this is currently very hard to test, downloading and importing template files
|
// this is currently very hard to test, downloading and importing template files
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
if (template && template.useTemplate === "true") {
|
if (template && template.useTemplate === "true") {
|
||||||
let dbDumpReadStream
|
const { ok } = await db.load(await getTemplateStream(template))
|
||||||
if (template.file) {
|
|
||||||
dbDumpReadStream = fs.createReadStream(template.file.path)
|
|
||||||
} else {
|
|
||||||
const templatePath = await downloadTemplate(...template.key.split("/"))
|
|
||||||
dbDumpReadStream = fs.createReadStream(
|
|
||||||
join(templatePath, "db", "dump.txt")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
const { ok } = await db.load(dbDumpReadStream)
|
|
||||||
if (!ok) {
|
if (!ok) {
|
||||||
throw "Error loading database dump from template."
|
throw "Error loading database dump from template."
|
||||||
}
|
}
|
||||||
|
@ -190,10 +172,10 @@ exports.create = async function(ctx) {
|
||||||
const instanceDb = new CouchDB(appId)
|
const instanceDb = new CouchDB(appId)
|
||||||
await instanceDb.put(newApplication)
|
await instanceDb.put(newApplication)
|
||||||
|
|
||||||
const newAppFolder = await createEmptyAppPackage(ctx, newApplication)
|
await createEmptyAppPackage(ctx, newApplication)
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
if (env.NODE_ENV !== "jest") {
|
if (env.NODE_ENV !== "jest") {
|
||||||
await downloadExtractComponentLibraries(newAppFolder)
|
await createApp(appId)
|
||||||
}
|
}
|
||||||
|
|
||||||
await setBuilderToken(ctx, appId, version)
|
await setBuilderToken(ctx, appId, version)
|
||||||
|
@ -223,10 +205,9 @@ exports.delete = async function(ctx) {
|
||||||
const app = await db.get(ctx.params.appId)
|
const app = await db.get(ctx.params.appId)
|
||||||
const result = await db.destroy()
|
const result = await db.destroy()
|
||||||
|
|
||||||
// remove top level directory
|
if (env.NODE_ENV !== "jest") {
|
||||||
await fs.rmdir(join(budibaseAppsDir(), ctx.params.appId), {
|
await deleteApp(ctx.params.appId)
|
||||||
recursive: true,
|
}
|
||||||
})
|
|
||||||
|
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
ctx.message = `Application ${app.name} deleted successfully.`
|
ctx.message = `Application ${app.name} deleted successfully.`
|
||||||
|
@ -234,17 +215,8 @@ exports.delete = async function(ctx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const createEmptyAppPackage = async (ctx, app) => {
|
const createEmptyAppPackage = async (ctx, app) => {
|
||||||
const appsFolder = budibaseAppsDir()
|
|
||||||
const newAppFolder = resolve(appsFolder, app._id)
|
|
||||||
|
|
||||||
const db = new CouchDB(app._id)
|
const db = new CouchDB(app._id)
|
||||||
|
|
||||||
if (existsSync(newAppFolder)) {
|
|
||||||
ctx.throw(400, "App folder already exists for this application")
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.mkdirpSync(newAppFolder)
|
|
||||||
|
|
||||||
let screensAndLayouts = []
|
let screensAndLayouts = []
|
||||||
for (let layout of BASE_LAYOUTS) {
|
for (let layout of BASE_LAYOUTS) {
|
||||||
const cloned = cloneDeep(layout)
|
const cloned = cloneDeep(layout)
|
||||||
|
@ -260,6 +232,4 @@ const createEmptyAppPackage = async (ctx, app) => {
|
||||||
screensAndLayouts.push(loginScreen)
|
screensAndLayouts.push(loginScreen)
|
||||||
|
|
||||||
await db.bulkDocs(screensAndLayouts)
|
await db.bulkDocs(screensAndLayouts)
|
||||||
await compileStaticAssets(app._id)
|
|
||||||
return newAppFolder
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,9 +45,9 @@ exports.authenticate = async ctx => {
|
||||||
roleId: dbUser.roleId,
|
roleId: dbUser.roleId,
|
||||||
version: app.version,
|
version: app.version,
|
||||||
}
|
}
|
||||||
// if in cloud add the user api key, unless self hosted
|
// if in prod add the user api key, unless self hosted
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
if (env.CLOUD && !env.SELF_HOSTED) {
|
if (env.isProd() && !env.SELF_HOSTED) {
|
||||||
const { apiKey } = await getAPIKey(ctx.user.appId)
|
const { apiKey } = await getAPIKey(ctx.user.appId)
|
||||||
payload.apiKey = apiKey
|
payload.apiKey = apiKey
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,28 +1,10 @@
|
||||||
const { performDump } = require("../../utilities/templates")
|
const { performBackup } = require("../../utilities/fileSystem")
|
||||||
const path = require("path")
|
|
||||||
const os = require("os")
|
|
||||||
const fs = require("fs-extra")
|
|
||||||
|
|
||||||
exports.exportAppDump = async function(ctx) {
|
exports.exportAppDump = async function(ctx) {
|
||||||
const { appId } = ctx.query
|
const { appId } = ctx.query
|
||||||
|
|
||||||
const appname = decodeURI(ctx.query.appname)
|
const appname = decodeURI(ctx.query.appname)
|
||||||
|
|
||||||
const backupsDir = path.join(os.homedir(), ".budibase", "backups")
|
|
||||||
fs.ensureDirSync(backupsDir)
|
|
||||||
|
|
||||||
const backupIdentifier = `${appname}Backup${new Date().getTime()}.txt`
|
const backupIdentifier = `${appname}Backup${new Date().getTime()}.txt`
|
||||||
|
|
||||||
await performDump({
|
|
||||||
dir: backupsDir,
|
|
||||||
appId,
|
|
||||||
name: backupIdentifier,
|
|
||||||
})
|
|
||||||
|
|
||||||
ctx.status = 200
|
|
||||||
|
|
||||||
const backupFile = path.join(backupsDir, backupIdentifier)
|
|
||||||
|
|
||||||
ctx.attachment(backupIdentifier)
|
ctx.attachment(backupIdentifier)
|
||||||
ctx.body = fs.createReadStream(backupFile)
|
ctx.body = await performBackup(appId, backupIdentifier)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,44 +1,37 @@
|
||||||
const CouchDB = require("../../db")
|
const CouchDB = require("../../db")
|
||||||
const { resolve, join } = require("../../utilities/centralPath")
|
const { join } = require("../../utilities/centralPath")
|
||||||
const {
|
const { budibaseTempDir } = require("../../utilities/budibaseDir")
|
||||||
budibaseTempDir,
|
const fileSystem = require("../../utilities/fileSystem")
|
||||||
budibaseAppsDir,
|
const env = require("../../environment")
|
||||||
} = require("../../utilities/budibaseDir")
|
|
||||||
|
|
||||||
exports.fetchAppComponentDefinitions = async function(ctx) {
|
exports.fetchAppComponentDefinitions = async function(ctx) {
|
||||||
const appId = ctx.params.appId || ctx.appId
|
const appId = ctx.params.appId || ctx.appId
|
||||||
const db = new CouchDB(appId)
|
const db = new CouchDB(appId)
|
||||||
const app = await db.get(appId)
|
const app = await db.get(appId)
|
||||||
|
|
||||||
ctx.body = app.componentLibraries.reduce((acc, componentLibrary) => {
|
let componentManifests = await Promise.all(
|
||||||
let appDirectory = resolve(budibaseAppsDir(), appId, "node_modules")
|
app.componentLibraries.map(async library => {
|
||||||
|
let manifest
|
||||||
if (ctx.isDev) {
|
if (env.isDev()) {
|
||||||
appDirectory = budibaseTempDir()
|
manifest = require(join(budibaseTempDir(), library, "manifest.json"))
|
||||||
}
|
} else {
|
||||||
|
manifest = await fileSystem.getComponentLibraryManifest(appId, library)
|
||||||
const componentJson = require(join(
|
}
|
||||||
appDirectory,
|
return {
|
||||||
componentLibrary,
|
manifest,
|
||||||
ctx.isDev ? "" : "package",
|
library,
|
||||||
"manifest.json"
|
}
|
||||||
))
|
})
|
||||||
|
)
|
||||||
const result = {}
|
const definitions = {}
|
||||||
|
for (let { manifest, library } of componentManifests) {
|
||||||
// map over the components.json and add the library identifier as a key
|
for (let key of Object.keys(manifest)) {
|
||||||
// button -> @budibase/standard-components/button
|
const fullComponentName = `${library}/${key}`.toLowerCase()
|
||||||
for (let key of Object.keys(componentJson)) {
|
definitions[fullComponentName] = {
|
||||||
const fullComponentName = `${componentLibrary}/${key}`.toLowerCase()
|
|
||||||
result[fullComponentName] = {
|
|
||||||
component: fullComponentName,
|
component: fullComponentName,
|
||||||
...componentJson[key],
|
...manifest[key],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return {
|
ctx.body = definitions
|
||||||
...acc,
|
|
||||||
...result,
|
|
||||||
}
|
|
||||||
}, {})
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,12 +66,7 @@ exports.deploy = async function(deployment) {
|
||||||
const appId = deployment.getAppId()
|
const appId = deployment.getAppId()
|
||||||
const { bucket, accountId } = deployment.getVerification()
|
const { bucket, accountId } = deployment.getVerification()
|
||||||
const metadata = { accountId }
|
const metadata = { accountId }
|
||||||
const s3Client = new AWS.S3({
|
await deployToObjectStore(appId, bucket, metadata)
|
||||||
params: {
|
|
||||||
Bucket: bucket,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
await deployToObjectStore(appId, s3Client, metadata)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.replicateDb = async function(deployment) {
|
exports.replicateDb = async function(deployment) {
|
||||||
|
|
|
@ -7,7 +7,6 @@ const {
|
||||||
const {
|
const {
|
||||||
getWorkerUrl,
|
getWorkerUrl,
|
||||||
getCouchUrl,
|
getCouchUrl,
|
||||||
getMinioUrl,
|
|
||||||
getSelfHostKey,
|
getSelfHostKey,
|
||||||
} = require("../../../utilities/builder/hosting")
|
} = require("../../../utilities/builder/hosting")
|
||||||
|
|
||||||
|
@ -45,17 +44,9 @@ exports.postDeployment = async function() {
|
||||||
exports.deploy = async function(deployment) {
|
exports.deploy = async function(deployment) {
|
||||||
const appId = deployment.getAppId()
|
const appId = deployment.getAppId()
|
||||||
const verification = deployment.getVerification()
|
const verification = deployment.getVerification()
|
||||||
const objClient = new AWS.S3({
|
|
||||||
endpoint: await getMinioUrl(),
|
|
||||||
s3ForcePathStyle: true, // needed with minio?
|
|
||||||
signatureVersion: "v4",
|
|
||||||
params: {
|
|
||||||
Bucket: verification.bucket,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
// no metadata, aws has account ID in metadata
|
// no metadata, aws has account ID in metadata
|
||||||
const metadata = {}
|
const metadata = {}
|
||||||
await deployToObjectStore(appId, objClient, metadata)
|
await deployToObjectStore(appId, verification.bucket, metadata)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.replicateDb = async function(deployment) {
|
exports.replicateDb = async function(deployment) {
|
||||||
|
|
|
@ -1,16 +1,24 @@
|
||||||
const fs = require("fs")
|
|
||||||
const sanitize = require("sanitize-s3-objectkey")
|
|
||||||
const { walkDir } = require("../../../utilities")
|
|
||||||
const { join } = require("../../../utilities/centralPath")
|
const { join } = require("../../../utilities/centralPath")
|
||||||
|
const fs = require("fs")
|
||||||
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
const PouchDB = require("../../../db")
|
const PouchDB = require("../../../db")
|
||||||
const CouchDB = require("pouchdb")
|
const CouchDB = require("pouchdb")
|
||||||
|
const { upload } = require("../../../utilities/fileSystem")
|
||||||
|
|
||||||
const CONTENT_TYPE_MAP = {
|
// TODO: everything in this file is to be removed
|
||||||
html: "text/html",
|
|
||||||
css: "text/css",
|
function walkDir(dirPath, callback) {
|
||||||
js: "application/javascript",
|
for (let filename of fs.readdirSync(dirPath)) {
|
||||||
|
const filePath = `${dirPath}/${filename}`
|
||||||
|
const stat = fs.lstatSync(filePath)
|
||||||
|
|
||||||
|
if (stat.isFile()) {
|
||||||
|
callback(filePath)
|
||||||
|
} else {
|
||||||
|
walkDir(filePath, callback)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchCredentials = async function(url, body) {
|
exports.fetchCredentials = async function(url, body) {
|
||||||
|
@ -34,30 +42,25 @@ exports.fetchCredentials = async function(url, body) {
|
||||||
return json
|
return json
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.prepareUpload = async function({ s3Key, metadata, client, file }) {
|
exports.prepareUpload = async function({ s3Key, bucket, metadata, file }) {
|
||||||
const extension = [...file.name.split(".")].pop()
|
const response = await upload({
|
||||||
const fileBytes = fs.readFileSync(file.path)
|
bucket,
|
||||||
|
metadata,
|
||||||
const upload = await client
|
filename: s3Key,
|
||||||
.upload({
|
path: file.path,
|
||||||
// windows file paths need to be converted to forward slashes for s3
|
type: file.type,
|
||||||
Key: sanitize(s3Key).replace(/\\/g, "/"),
|
})
|
||||||
Body: fileBytes,
|
|
||||||
ContentType: file.type || CONTENT_TYPE_MAP[extension.toLowerCase()],
|
|
||||||
Metadata: metadata,
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
size: file.size,
|
size: file.size,
|
||||||
name: file.name,
|
name: file.name,
|
||||||
extension,
|
extension: [...file.name.split(".")].pop(),
|
||||||
url: upload.Location,
|
url: response.Location,
|
||||||
key: upload.Key,
|
key: response.Key,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.deployToObjectStore = async function(appId, objectClient, metadata) {
|
exports.deployToObjectStore = async function(appId, bucket, metadata) {
|
||||||
const appAssetsPath = join(budibaseAppsDir(), appId, "public")
|
const appAssetsPath = join(budibaseAppsDir(), appId, "public")
|
||||||
|
|
||||||
let uploads = []
|
let uploads = []
|
||||||
|
@ -66,12 +69,12 @@ exports.deployToObjectStore = async function(appId, objectClient, metadata) {
|
||||||
walkDir(appAssetsPath, function(filePath) {
|
walkDir(appAssetsPath, function(filePath) {
|
||||||
const filePathParts = filePath.split("/")
|
const filePathParts = filePath.split("/")
|
||||||
const appAssetUpload = exports.prepareUpload({
|
const appAssetUpload = exports.prepareUpload({
|
||||||
|
bucket,
|
||||||
file: {
|
file: {
|
||||||
path: filePath,
|
path: filePath,
|
||||||
name: filePathParts.pop(),
|
name: filePathParts.pop(),
|
||||||
},
|
},
|
||||||
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
|
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
|
||||||
client: objectClient,
|
|
||||||
metadata,
|
metadata,
|
||||||
})
|
})
|
||||||
uploads.push(appAssetUpload)
|
uploads.push(appAssetUpload)
|
||||||
|
@ -92,7 +95,7 @@ exports.deployToObjectStore = async function(appId, objectClient, metadata) {
|
||||||
const attachmentUpload = exports.prepareUpload({
|
const attachmentUpload = exports.prepareUpload({
|
||||||
file,
|
file,
|
||||||
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
|
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
|
||||||
client: objectClient,
|
bucket,
|
||||||
metadata,
|
metadata,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
const CouchDB = require("../../db")
|
const CouchDB = require("../../db")
|
||||||
const { BUILDER_CONFIG_DB, HOSTING_DOC } = require("../../constants")
|
|
||||||
const {
|
const {
|
||||||
getHostingInfo,
|
getHostingInfo,
|
||||||
getDeployedApps,
|
getDeployedApps,
|
||||||
HostingTypes,
|
HostingTypes,
|
||||||
getAppUrl,
|
getAppUrl,
|
||||||
} = require("../../utilities/builder/hosting")
|
} = require("../../utilities/builder/hosting")
|
||||||
|
const { StaticDatabases } = require("../../db/utils")
|
||||||
|
|
||||||
exports.fetchInfo = async ctx => {
|
exports.fetchInfo = async ctx => {
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
|
@ -14,17 +14,17 @@ exports.fetchInfo = async ctx => {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.save = async ctx => {
|
exports.save = async ctx => {
|
||||||
const db = new CouchDB(BUILDER_CONFIG_DB)
|
const db = new CouchDB(StaticDatabases.BUILDER_HOSTING.name)
|
||||||
const { type } = ctx.request.body
|
const { type } = ctx.request.body
|
||||||
if (type === HostingTypes.CLOUD && ctx.request.body._rev) {
|
if (type === HostingTypes.CLOUD && ctx.request.body._rev) {
|
||||||
ctx.body = await db.remove({
|
ctx.body = await db.remove({
|
||||||
...ctx.request.body,
|
...ctx.request.body,
|
||||||
_id: HOSTING_DOC,
|
_id: StaticDatabases.BUILDER_HOSTING.baseDoc,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
ctx.body = await db.put({
|
ctx.body = await db.put({
|
||||||
...ctx.request.body,
|
...ctx.request.body,
|
||||||
_id: HOSTING_DOC,
|
_id: StaticDatabases.BUILDER_HOSTING.baseDoc,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -93,7 +93,7 @@ exports.find = async function(ctx) {
|
||||||
const db = new CouchDB(ctx.user.appId)
|
const db = new CouchDB(ctx.user.appId)
|
||||||
const query = enrichQueries(await db.get(ctx.params.queryId))
|
const query = enrichQueries(await db.get(ctx.params.queryId))
|
||||||
// remove properties that could be dangerous in real app
|
// remove properties that could be dangerous in real app
|
||||||
if (env.CLOUD) {
|
if (env.isProd()) {
|
||||||
delete query.fields
|
delete query.fields
|
||||||
delete query.parameters
|
delete query.parameters
|
||||||
delete query.schema
|
delete query.schema
|
||||||
|
|
|
@ -1,23 +1,21 @@
|
||||||
const CouchDB = require("../../db")
|
const fetch = require("node-fetch")
|
||||||
const vm = require("vm")
|
const vm = require("vm")
|
||||||
|
|
||||||
class ScriptExecutor {
|
class ScriptExecutor {
|
||||||
constructor(script) {
|
constructor(body) {
|
||||||
this.script = script
|
this.script = new vm.Script(body.script)
|
||||||
|
this.context = vm.createContext(body.context)
|
||||||
|
this.context.fetch = fetch
|
||||||
}
|
}
|
||||||
|
|
||||||
execute() {
|
execute() {
|
||||||
vm.runInNewContext(this.script, {
|
this.script.runInContext(this.context)
|
||||||
require: require,
|
return this.context
|
||||||
console: console,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.execute = async function(ctx) {
|
exports.execute = async function(ctx) {
|
||||||
const appId = ctx.user.appId
|
const executor = new ScriptExecutor(ctx.request.body)
|
||||||
|
|
||||||
const executor = new ScriptExecutor(ctx.request.body.script)
|
|
||||||
|
|
||||||
const result = executor.execute()
|
const result = executor.execute()
|
||||||
|
|
||||||
|
|
|
@ -3,9 +3,7 @@ require("svelte/register")
|
||||||
const send = require("koa-send")
|
const send = require("koa-send")
|
||||||
const { resolve, join } = require("../../../utilities/centralPath")
|
const { resolve, join } = require("../../../utilities/centralPath")
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
const fs = require("fs-extra")
|
|
||||||
const uuid = require("uuid")
|
const uuid = require("uuid")
|
||||||
const AWS = require("aws-sdk")
|
|
||||||
const { prepareUpload } = require("../deploy/utils")
|
const { prepareUpload } = require("../deploy/utils")
|
||||||
const { processString } = require("@budibase/string-templates")
|
const { processString } = require("@budibase/string-templates")
|
||||||
const {
|
const {
|
||||||
|
@ -15,9 +13,10 @@ const {
|
||||||
const { getDeployedApps } = require("../../../utilities/builder/hosting")
|
const { getDeployedApps } = require("../../../utilities/builder/hosting")
|
||||||
const CouchDB = require("../../../db")
|
const CouchDB = require("../../../db")
|
||||||
const setBuilderToken = require("../../../utilities/builder/setBuilderToken")
|
const setBuilderToken = require("../../../utilities/builder/setBuilderToken")
|
||||||
const fileProcessor = require("../../../utilities/fileProcessor")
|
const { loadHandlebarsFile } = require("../../../utilities/fileSystem")
|
||||||
const env = require("../../../environment")
|
const env = require("../../../environment")
|
||||||
const { OBJ_STORE_DIRECTORY } = require("../../../constants")
|
const { OBJ_STORE_DIRECTORY } = require("../../../constants")
|
||||||
|
const fileProcessor = require("../../../utilities/fileSystem/processor")
|
||||||
|
|
||||||
function objectStoreUrl() {
|
function objectStoreUrl() {
|
||||||
if (env.SELF_HOSTED) {
|
if (env.SELF_HOSTED) {
|
||||||
|
@ -50,106 +49,31 @@ exports.serveBuilder = async function(ctx) {
|
||||||
await send(ctx, ctx.file, { root: ctx.devPath || builderPath })
|
await send(ctx, ctx.file, { root: ctx.devPath || builderPath })
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.serveSelfHostPage = async function(ctx) {
|
|
||||||
const logo = fs.readFileSync(resolve(__dirname, "selfhost/logo.svg"), "utf8")
|
|
||||||
const hostingHbs = fs.readFileSync(
|
|
||||||
resolve(__dirname, "selfhost/index.hbs"),
|
|
||||||
"utf8"
|
|
||||||
)
|
|
||||||
ctx.body = await processString(hostingHbs, {
|
|
||||||
logo,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.uploadFile = async function(ctx) {
|
exports.uploadFile = async function(ctx) {
|
||||||
let files
|
let files =
|
||||||
files =
|
|
||||||
ctx.request.files.file.length > 1
|
ctx.request.files.file.length > 1
|
||||||
? Array.from(ctx.request.files.file)
|
? Array.from(ctx.request.files.file)
|
||||||
: [ctx.request.files.file]
|
: [ctx.request.files.file]
|
||||||
|
|
||||||
const attachmentsPath = resolve(
|
const uploads = files.map(async file => {
|
||||||
budibaseAppsDir(),
|
|
||||||
ctx.user.appId,
|
|
||||||
"attachments"
|
|
||||||
)
|
|
||||||
|
|
||||||
if (env.CLOUD) {
|
|
||||||
// remote upload
|
|
||||||
const s3 = new AWS.S3({
|
|
||||||
params: {
|
|
||||||
Bucket: "prod-budi-app-assets",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const uploads = files.map(file => {
|
|
||||||
const fileExtension = [...file.name.split(".")].pop()
|
|
||||||
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
|
||||||
|
|
||||||
return prepareUpload({
|
|
||||||
file,
|
|
||||||
s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
|
|
||||||
s3,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
ctx.body = await Promise.all(uploads)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.body = await processLocalFileUploads({
|
|
||||||
files,
|
|
||||||
outputPath: attachmentsPath,
|
|
||||||
appId: ctx.user.appId,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async function processLocalFileUploads({ files, outputPath, appId }) {
|
|
||||||
// create attachments dir if it doesnt exist
|
|
||||||
!fs.existsSync(outputPath) && fs.mkdirSync(outputPath, { recursive: true })
|
|
||||||
|
|
||||||
const filesToProcess = files.map(file => {
|
|
||||||
const fileExtension = [...file.name.split(".")].pop()
|
const fileExtension = [...file.name.split(".")].pop()
|
||||||
// filenames converted to UUIDs so they are unique
|
// filenames converted to UUIDs so they are unique
|
||||||
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
||||||
|
|
||||||
return {
|
// need to handle image processing
|
||||||
name: file.name,
|
await fileProcessor.process({
|
||||||
path: file.path,
|
...file,
|
||||||
size: file.size,
|
|
||||||
type: file.type,
|
|
||||||
processedFileName,
|
|
||||||
extension: fileExtension,
|
extension: fileExtension,
|
||||||
outputPath: join(outputPath, processedFileName),
|
})
|
||||||
url: join("/attachments", processedFileName),
|
|
||||||
}
|
return prepareUpload({
|
||||||
|
file,
|
||||||
|
s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
|
||||||
|
bucket: "prod-budi-app-assets",
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
const fileProcessOperations = filesToProcess.map(fileProcessor.process)
|
ctx.body = await Promise.all(uploads)
|
||||||
|
|
||||||
const processedFiles = await Promise.all(fileProcessOperations)
|
|
||||||
|
|
||||||
let pendingFileUploads
|
|
||||||
// local document used to track which files need to be uploaded
|
|
||||||
// db.get throws an error if the document doesn't exist
|
|
||||||
// need to use a promise to default
|
|
||||||
const db = new CouchDB(appId)
|
|
||||||
await db
|
|
||||||
.get("_local/fileuploads")
|
|
||||||
.then(data => {
|
|
||||||
pendingFileUploads = data
|
|
||||||
})
|
|
||||||
.catch(() => {
|
|
||||||
pendingFileUploads = { _id: "_local/fileuploads", uploads: [] }
|
|
||||||
})
|
|
||||||
|
|
||||||
pendingFileUploads.uploads = [
|
|
||||||
...processedFiles,
|
|
||||||
...pendingFileUploads.uploads,
|
|
||||||
]
|
|
||||||
await db.put(pendingFileUploads)
|
|
||||||
|
|
||||||
return processedFiles
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.serveApp = async function(ctx) {
|
exports.serveApp = async function(ctx) {
|
||||||
|
@ -163,12 +87,12 @@ exports.serveApp = async function(ctx) {
|
||||||
|
|
||||||
const { head, html, css } = App.render({
|
const { head, html, css } = App.render({
|
||||||
title: appInfo.name,
|
title: appInfo.name,
|
||||||
production: env.CLOUD,
|
production: env.isProd(),
|
||||||
appId,
|
appId,
|
||||||
objectStoreUrl: objectStoreUrl(),
|
objectStoreUrl: objectStoreUrl(),
|
||||||
})
|
})
|
||||||
|
|
||||||
const appHbs = fs.readFileSync(`${__dirname}/templates/app.hbs`, "utf8")
|
const appHbs = loadHandlebarsFile(`${__dirname}/templates/app.hbs`)
|
||||||
ctx.body = await processString(appHbs, {
|
ctx.body = await processString(appHbs, {
|
||||||
head,
|
head,
|
||||||
body: html,
|
body: html,
|
||||||
|
@ -182,7 +106,7 @@ exports.serveAttachment = async function(ctx) {
|
||||||
const attachmentsPath = resolve(budibaseAppsDir(), appId, "attachments")
|
const attachmentsPath = resolve(budibaseAppsDir(), appId, "attachments")
|
||||||
|
|
||||||
// Serve from object store
|
// Serve from object store
|
||||||
if (env.CLOUD) {
|
if (env.isProd()) {
|
||||||
const S3_URL = join(objectStoreUrl(), appId, "attachments", ctx.file)
|
const S3_URL = join(objectStoreUrl(), appId, "attachments", ctx.file)
|
||||||
const response = await fetch(S3_URL)
|
const response = await fetch(S3_URL)
|
||||||
const body = await response.text()
|
const body = await response.text()
|
||||||
|
@ -213,15 +137,13 @@ exports.serveComponentLibrary = async function(ctx) {
|
||||||
"dist"
|
"dist"
|
||||||
)
|
)
|
||||||
|
|
||||||
if (ctx.isDev) {
|
if (env.isDev()) {
|
||||||
componentLibraryPath = join(
|
componentLibraryPath = join(
|
||||||
budibaseTempDir(),
|
budibaseTempDir(),
|
||||||
decodeURI(ctx.query.library),
|
decodeURI(ctx.query.library),
|
||||||
"dist"
|
"dist"
|
||||||
)
|
)
|
||||||
}
|
} else {
|
||||||
|
|
||||||
if (env.CLOUD) {
|
|
||||||
let componentLib = "componentlibrary"
|
let componentLib = "componentlibrary"
|
||||||
if (ctx.user.version) {
|
if (ctx.user.version) {
|
||||||
componentLib += `-${ctx.user.version}`
|
componentLib += `-${ctx.user.version}`
|
||||||
|
|
|
@ -1,173 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>Budibase self hosting️</title>
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
font-family: Inter, -apple-system, BlinkMacSystemFont, Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
background: #fafafa;
|
|
||||||
}
|
|
||||||
|
|
||||||
.main {
|
|
||||||
padding: 0 20px;
|
|
||||||
margin: 30px auto;
|
|
||||||
width: 60%;
|
|
||||||
}
|
|
||||||
|
|
||||||
h2 {
|
|
||||||
font-size: clamp(24px, 1.5vw, 30px);
|
|
||||||
text-align: center;
|
|
||||||
line-height: 1.3;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card-grid {
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: 1fr 1fr;
|
|
||||||
gap: 3rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card {
|
|
||||||
display: grid;
|
|
||||||
background-color: #222222;
|
|
||||||
grid-template-columns: 1fr;
|
|
||||||
align-items: center;
|
|
||||||
padding: 2.5rem 1.75rem;
|
|
||||||
border-radius: 12px;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card h3 {
|
|
||||||
margin: 0;
|
|
||||||
font-size: 24px;
|
|
||||||
font-family: sans-serif;
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card h3 b {
|
|
||||||
text-wrap: normal;
|
|
||||||
font-size: 36px;
|
|
||||||
padding-right: 14px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card p {
|
|
||||||
color: #ffffff;
|
|
||||||
opacity: 0.8;
|
|
||||||
font-size: 18px;
|
|
||||||
text-align: left;
|
|
||||||
line-height: 1.3;
|
|
||||||
margin-top: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.logo {
|
|
||||||
width: 60px;
|
|
||||||
height: 60px;
|
|
||||||
margin: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.top-text {
|
|
||||||
text-align: center;
|
|
||||||
color: #707070;
|
|
||||||
margin: 0 0 1.5rem 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.button {
|
|
||||||
cursor: pointer;
|
|
||||||
display: block;
|
|
||||||
background: #4285f4;
|
|
||||||
color: white;
|
|
||||||
padding: 12px 16px;
|
|
||||||
font-size: 16px;
|
|
||||||
font-weight: 600;
|
|
||||||
border-radius: 6px;
|
|
||||||
max-width: 120px;
|
|
||||||
text-align: center;
|
|
||||||
transition: 200ms background ease;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.info {
|
|
||||||
background: #f5f5f5;
|
|
||||||
padding: 1rem 1rem 1rem 1rem;
|
|
||||||
border: #ccc 1px solid;
|
|
||||||
border-radius: 6px;
|
|
||||||
margin-top: 40px;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.info p {
|
|
||||||
margin-left: 20px;
|
|
||||||
color: #222222;
|
|
||||||
font-family: sans-serif;
|
|
||||||
}
|
|
||||||
|
|
||||||
.info p {
|
|
||||||
margin-right: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.info svg {
|
|
||||||
margin-left: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.info a {
|
|
||||||
color: #4285f4;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="main">
|
|
||||||
<div class="logo">
|
|
||||||
{{logo}}
|
|
||||||
</div>
|
|
||||||
<h2>Get started with Budibase Self Hosting</h2>
|
|
||||||
<p class="top-text">Use the address <b id="url"></b> in your Builder</p>
|
|
||||||
<div class="card-grid">
|
|
||||||
<div class="card">
|
|
||||||
<h3><b>📚</b>Documentation</h3>
|
|
||||||
<p>
|
|
||||||
Find out more about your self hosted platform.
|
|
||||||
</p>
|
|
||||||
<a class="button"
|
|
||||||
href="https://docs.budibase.com/self-hosting/introduction-to-self-hosting">
|
|
||||||
Documentation
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
<div class="card">
|
|
||||||
<h3><b>💻</b>Next steps</h3>
|
|
||||||
<p>
|
|
||||||
Find out how to make use of your self hosted Budibase platform.
|
|
||||||
</p>
|
|
||||||
<a class="button"
|
|
||||||
href="https://docs.budibase.com/self-hosting/builder-settings">
|
|
||||||
Next steps
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="info">
|
|
||||||
<svg preserveAspectRatio="xMidYMid meet" height="28px" width="28px" fill="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:xlink="http://www.w3.org/1999/xlink" stroke="none" class="icon-7f6730be--text-3f89f380">
|
|
||||||
<g>
|
|
||||||
<path d="M12.2 8.98c.06-.01.12-.03.18-.06.06-.02.12-.05.18-.09l.15-.12c.18-.19.29-.45.29-.71 0-.06-.01-.13-.02-.19a.603.603 0 0 0-.06-.19.757.757 0 0 0-.09-.18c-.03-.05-.08-.1-.12-.15-.28-.27-.72-.37-1.09-.21-.13.05-.23.12-.33.21-.04.05-.09.1-.12.15-.04.06-.07.12-.09.18-.03.06-.05.12-.06.19-.01.06-.02.13-.02.19 0 .26.11.52.29.71.1.09.2.16.33.21.12.05.25.08.38.08.06 0 .13-.01.2-.02M13 16v-4a1 1 0 1 0-2 0v4a1 1 0 1 0 2 0M12 3c-4.962 0-9 4.038-9 9 0 4.963 4.038 9 9 9 4.963 0 9-4.037 9-9 0-4.962-4.037-9-9-9m0 20C5.935 23 1 18.065 1 12S5.935 1 12 1c6.066 0 11 4.935 11 11s-4.934 11-11 11" fill-rule="evenodd">
|
|
||||||
</path>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
||||||
<p>A <b>Hosting Key</b> will also be required, this can be found in your hosting properties, info found <a href="https://docs.budibase.com/self-hosting/hosting-settings">here</a>.</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<script>
|
|
||||||
window.addEventListener("load", () => {
|
|
||||||
let url = document.URL.split("//")[1]
|
|
||||||
if (url.substring(url.length - 1) === "/") {
|
|
||||||
url = url.substring(0, url.length - 1)
|
|
||||||
}
|
|
||||||
document.getElementById("url").innerHTML = url
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,17 +0,0 @@
|
||||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" x="0" y="0" viewBox="0 0 48 48" xml:space="preserve">
|
|
||||||
<style>
|
|
||||||
.st0{fill:#393c44}.st1{fill:#fff}
|
|
||||||
</style>
|
|
||||||
<path class="st0" d="M-152.17-24.17H-56V72h-96.17z"/>
|
|
||||||
<path class="st1" d="M-83.19 48h-41.79c-1.76 0-3.19-1.43-3.19-3.19V3.02c0-1.76 1.43-3.19 3.19-3.19h41.79c1.76 0 3.19 1.43 3.19 3.19v41.79c0 1.76-1.43 3.19-3.19 3.19z"/>
|
|
||||||
<path class="st0" d="M-99.62 12.57v9.94c1.15-1.21 2.59-1.81 4.32-1.81 1.03 0 1.97.19 2.82.58.86.39 1.59.91 2.19 1.57.6.66 1.08 1.43 1.42 2.32.34.89.51 1.84.51 2.85 0 1.03-.18 1.99-.53 2.89-.35.9-.84 1.68-1.47 2.35-.63.67-1.37 1.19-2.23 1.58-.86.39-1.78.58-2.77.58-1.8 0-3.22-.66-4.27-1.97V35h-4.89V12.57h4.9zm6.16 15.54c0-.43-.08-.84-.24-1.23-.16-.39-.39-.72-.68-1.01-.29-.29-.62-.52-1-.69-.38-.17-.79-.26-1.24-.26-.43 0-.84.08-1.22.24-.38.16-.71.39-.99.68-.28.29-.5.63-.68 1.01-.17.39-.26.8-.26 1.23 0 .43.08.84.24 1.22.16.38.39.71.68.99.29.28.63.5 1.01.68.39.17.8.26 1.23.26.43 0 .84-.08 1.22-.24.38-.16.71-.39.99-.68.28-.29.5-.62.68-1 .17-.39.26-.79.26-1.2z"/>
|
|
||||||
<path class="st0" d="M-114.76 12.57v9.94c1.15-1.21 2.59-1.81 4.32-1.81 1.03 0 1.97.19 2.82.58.86.39 1.59.91 2.19 1.57.6.66 1.08 1.43 1.42 2.32.34.89.51 1.84.51 2.85 0 1.03-.18 1.99-.53 2.89-.35.9-.84 1.68-1.47 2.35-.63.67-1.37 1.19-2.23 1.58-.86.39-1.78.58-2.77.58-1.8 0-3.22-.66-4.27-1.97V35h-4.89V12.57h4.9zm6.16 15.54c0-.43-.08-.84-.24-1.23-.16-.39-.39-.72-.68-1.01-.29-.29-.62-.52-1-.69-.38-.17-.79-.26-1.24-.26-.43 0-.84.08-1.22.24-.38.16-.71.39-.99.68-.28.29-.5.63-.68 1.01-.17.39-.26.8-.26 1.23 0 .43.08.84.24 1.22.16.38.39.71.68.99.29.28.63.5 1.01.68.39.17.8.26 1.23.26.43 0 .84-.08 1.22-.24.38-.16.71-.39.99-.68.28-.29.5-.62.68-1 .18-.39.26-.79.26-1.2z"/>
|
|
||||||
<path d="M44.81 159H3.02c-1.76 0-3.19-1.43-3.19-3.19v-41.79c0-1.76 1.43-3.19 3.19-3.19h41.79c1.76 0 3.19 1.43 3.19 3.19v41.79c0 1.76-1.43 3.19-3.19 3.19z" fill="#4285f4"/>
|
|
||||||
<path class="st1" d="M28.38 123.57v9.94c1.15-1.21 2.59-1.81 4.32-1.81 1.03 0 1.97.19 2.82.58.86.39 1.59.91 2.19 1.57.6.66 1.08 1.43 1.42 2.32.34.89.51 1.84.51 2.85 0 1.03-.18 1.99-.53 2.89-.35.9-.84 1.68-1.47 2.35-.63.67-1.37 1.19-2.23 1.58-.86.39-1.78.58-2.77.58-1.8 0-3.22-.66-4.27-1.97V146h-4.89v-22.43h4.9zm6.16 15.54c0-.43-.08-.84-.24-1.23-.16-.39-.39-.72-.68-1.01-.29-.29-.62-.52-1-.69-.38-.17-.79-.26-1.24-.26-.43 0-.84.08-1.22.24-.38.16-.71.39-.99.68-.28.29-.5.63-.68 1.01-.17.39-.26.8-.26 1.23 0 .43.08.84.24 1.22.16.38.39.71.68.99.29.28.63.5 1.01.68.39.17.8.26 1.23.26.43 0 .84-.08 1.22-.24.38-.16.71-.39.99-.68.28-.29.5-.62.68-1 .17-.39.26-.79.26-1.2z"/>
|
|
||||||
<path class="st1" d="M13.24 123.57v9.94c1.15-1.21 2.59-1.81 4.32-1.81 1.03 0 1.97.19 2.82.58.86.39 1.59.91 2.19 1.57.6.66 1.08 1.43 1.42 2.32.34.89.51 1.84.51 2.85 0 1.03-.18 1.99-.53 2.89-.35.9-.84 1.68-1.47 2.35-.63.67-1.37 1.19-2.23 1.58-.86.39-1.78.58-2.77.58-1.8 0-3.22-.66-4.27-1.97V146H8.35v-22.43h4.89zm6.16 15.54c0-.43-.08-.84-.24-1.23-.16-.39-.39-.72-.68-1.01-.29-.29-.62-.52-1-.69-.38-.17-.79-.26-1.24-.26-.43 0-.84.08-1.22.24-.38.16-.71.39-.99.68-.28.29-.5.63-.68 1.01-.17.39-.26.8-.26 1.23 0 .43.08.84.24 1.22.16.38.39.71.68.99.29.28.63.5 1.01.68.39.17.8.26 1.23.26.43 0 .84-.08 1.22-.24.38-.16.71-.39.99-.68.28-.29.5-.62.68-1 .18-.39.26-.79.26-1.2z"/>
|
|
||||||
<g>
|
|
||||||
<path class="st0" d="M44 48H4c-2.21 0-4-1.79-4-4V4c0-2.21 1.79-4 4-4h40c2.21 0 4 1.79 4 4v40c0 2.21-1.79 4-4 4z"/>
|
|
||||||
<path class="st1" d="M28.48 12v10.44c1.18-1.27 2.65-1.9 4.42-1.9 1.05 0 2.01.2 2.89.61.87.41 1.62.96 2.24 1.65.62.69 1.1 1.5 1.45 2.44.35.94.52 1.93.52 2.99 0 1.08-.18 2.09-.54 3.04-.36.95-.86 1.77-1.51 2.47-.64.7-1.4 1.25-2.28 1.66-.87.4-1.81.6-2.83.6-1.84 0-3.3-.69-4.37-2.07v1.62h-5V12h5.01zm6.3 16.31c0-.45-.08-.88-.25-1.29-.17-.41-.4-.76-.69-1.06-.3-.3-.64-.54-1.02-.72-.39-.18-.81-.27-1.27-.27-.44 0-.86.09-1.24.26-.39.17-.72.41-1.01.71-.29.3-.52.66-.69 1.06-.18.41-.26.84-.26 1.29s.08.88.25 1.28c.17.4.4.74.69 1.04.29.29.64.53 1.04.71.4.18.82.27 1.26.27.44 0 .86-.09 1.24-.26.39-.17.72-.41 1.01-.71.29-.3.52-.65.69-1.05.16-.41.25-.82.25-1.26z"/>
|
|
||||||
<path class="st1" d="M13 12v10.44c1.18-1.27 2.65-1.9 4.42-1.9 1.05 0 2.01.2 2.89.61.87.41 1.62.96 2.24 1.65.62.69 1.1 1.5 1.45 2.44.35.94.52 1.93.52 2.99 0 1.08-.18 2.09-.54 3.04-.36.95-.86 1.77-1.51 2.47-.64.7-1.4 1.25-2.28 1.66-.87.4-1.81.6-2.82.6-1.84 0-3.3-.69-4.37-2.07v1.62H8V12h5zm6.3 16.31c0-.45-.08-.88-.25-1.29-.17-.41-.4-.76-.69-1.06-.3-.3-.64-.54-1.02-.72-.39-.18-.81-.27-1.27-.27-.44 0-.86.09-1.24.26-.39.17-.72.41-1.01.71-.29.3-.52.66-.69 1.06-.18.41-.26.84-.26 1.29s.08.88.25 1.28c.17.4.4.74.69 1.04.29.29.64.53 1.04.71.4.18.82.27 1.26.27.44 0 .86-.09 1.24-.26.39-.17.72-.41 1.01-.71.29-.3.52-.65.69-1.05.16-.41.25-.82.25-1.26z"/>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 4.6 KiB |
|
@ -1,10 +1,5 @@
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
const {
|
const { downloadTemplate } = require("../../utilities/fileSystem")
|
||||||
downloadTemplate,
|
|
||||||
exportTemplateFromApp,
|
|
||||||
getLocalTemplates,
|
|
||||||
} = require("../../utilities/templates")
|
|
||||||
const env = require("../../environment")
|
|
||||||
|
|
||||||
// development flag, can be used to test against templates exported locally
|
// development flag, can be used to test against templates exported locally
|
||||||
const DEFAULT_TEMPLATES_BUCKET =
|
const DEFAULT_TEMPLATES_BUCKET =
|
||||||
|
@ -12,16 +7,11 @@ const DEFAULT_TEMPLATES_BUCKET =
|
||||||
|
|
||||||
exports.fetch = async function(ctx) {
|
exports.fetch = async function(ctx) {
|
||||||
const { type = "app" } = ctx.query
|
const { type = "app" } = ctx.query
|
||||||
|
const response = await fetch(
|
||||||
if (env.LOCAL_TEMPLATES) {
|
`https://${DEFAULT_TEMPLATES_BUCKET}/manifest.json`
|
||||||
ctx.body = Object.values(getLocalTemplates()[type])
|
)
|
||||||
} else {
|
const json = await response.json()
|
||||||
const response = await fetch(
|
ctx.body = Object.values(json.templates[type])
|
||||||
`https://${DEFAULT_TEMPLATES_BUCKET}/manifest.json`
|
|
||||||
)
|
|
||||||
const json = await response.json()
|
|
||||||
ctx.body = Object.values(json.templates[type])
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// can't currently test this, have to ignore from coverage
|
// can't currently test this, have to ignore from coverage
|
||||||
|
@ -29,26 +19,9 @@ exports.fetch = async function(ctx) {
|
||||||
exports.downloadTemplate = async function(ctx) {
|
exports.downloadTemplate = async function(ctx) {
|
||||||
const { type, name } = ctx.params
|
const { type, name } = ctx.params
|
||||||
|
|
||||||
if (!env.LOCAL_TEMPLATES) {
|
await downloadTemplate(type, name)
|
||||||
await downloadTemplate(type, name)
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
message: `template ${type}:${name} downloaded successfully.`,
|
message: `template ${type}:${name} downloaded successfully.`,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.exportTemplateFromApp = async function(ctx) {
|
|
||||||
const { appId } = ctx.user
|
|
||||||
const { templateName } = ctx.request.body
|
|
||||||
|
|
||||||
await exportTemplateFromApp({
|
|
||||||
appId,
|
|
||||||
templateName,
|
|
||||||
})
|
|
||||||
|
|
||||||
ctx.status = 200
|
|
||||||
ctx.body = {
|
|
||||||
message: `Created template: ${templateName}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
const CouchDB = require("../../../db")
|
const CouchDB = require("../../../db")
|
||||||
const viewTemplate = require("./viewBuilder")
|
const viewTemplate = require("./viewBuilder")
|
||||||
const fs = require("fs")
|
const { apiFileReturn } = require("../../../utilities/fileSystem")
|
||||||
const { join } = require("../../../utilities/centralPath")
|
|
||||||
const os = require("os")
|
|
||||||
const exporters = require("./exporters")
|
const exporters = require("./exporters")
|
||||||
const { fetchView } = require("../row")
|
const { fetchView } = require("../row")
|
||||||
const { ViewNames } = require("../../../db/utils")
|
const { ViewNames } = require("../../../db/utils")
|
||||||
|
@ -120,12 +118,10 @@ const controller = {
|
||||||
// Export part
|
// Export part
|
||||||
let headers = Object.keys(schema)
|
let headers = Object.keys(schema)
|
||||||
const exporter = exporters[format]
|
const exporter = exporters[format]
|
||||||
const exportedFile = exporter(headers, ctx.body)
|
|
||||||
const filename = `${viewName}.${format}`
|
const filename = `${viewName}.${format}`
|
||||||
fs.writeFileSync(join(os.tmpdir(), filename), exportedFile)
|
// send down the file
|
||||||
|
|
||||||
ctx.attachment(filename)
|
ctx.attachment(filename)
|
||||||
ctx.body = fs.createReadStream(join(os.tmpdir(), filename))
|
ctx.body = apiFileReturn(exporter(headers, ctx.body))
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,9 +3,12 @@ const authenticated = require("../middleware/authenticated")
|
||||||
const compress = require("koa-compress")
|
const compress = require("koa-compress")
|
||||||
const zlib = require("zlib")
|
const zlib = require("zlib")
|
||||||
const { budibaseAppsDir } = require("../utilities/budibaseDir")
|
const { budibaseAppsDir } = require("../utilities/budibaseDir")
|
||||||
const { isDev } = require("../utilities")
|
|
||||||
const { mainRoutes, authRoutes, staticRoutes } = require("./routes")
|
const { mainRoutes, authRoutes, staticRoutes } = require("./routes")
|
||||||
const pkg = require("../../package.json")
|
const pkg = require("../../package.json")
|
||||||
|
const bullboard = require("bull-board")
|
||||||
|
const expressApp = require("express")()
|
||||||
|
|
||||||
|
expressApp.use("/bulladmin", bullboard.router)
|
||||||
|
|
||||||
const router = new Router()
|
const router = new Router()
|
||||||
const env = require("../environment")
|
const env = require("../environment")
|
||||||
|
@ -29,9 +32,13 @@ router
|
||||||
jwtSecret: env.JWT_SECRET,
|
jwtSecret: env.JWT_SECRET,
|
||||||
useAppRootPath: true,
|
useAppRootPath: true,
|
||||||
}
|
}
|
||||||
ctx.isDev = isDev()
|
|
||||||
await next()
|
await next()
|
||||||
})
|
})
|
||||||
|
.use("/bulladmin", ctx => {
|
||||||
|
ctx.status = 200
|
||||||
|
ctx.respond = false
|
||||||
|
expressApp(ctx.req, ctx.res)
|
||||||
|
})
|
||||||
.use("/health", ctx => (ctx.status = 200))
|
.use("/health", ctx => (ctx.status = 200))
|
||||||
.use("/version", ctx => (ctx.body = pkg.version))
|
.use("/version", ctx => (ctx.body = pkg.version))
|
||||||
.use(authenticated)
|
.use(authenticated)
|
||||||
|
@ -68,8 +75,6 @@ for (let route of mainRoutes) {
|
||||||
router.use(staticRoutes.routes())
|
router.use(staticRoutes.routes())
|
||||||
router.use(staticRoutes.allowedMethods())
|
router.use(staticRoutes.allowedMethods())
|
||||||
|
|
||||||
if (!env.SELF_HOSTED && !env.CLOUD) {
|
router.redirect("/", "/_builder")
|
||||||
router.redirect("/", "/_builder")
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = router
|
module.exports = router
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
const app = require("express")()
|
||||||
|
const { router } = require("bull-board")
|
||||||
|
|
||||||
|
app.use("/admin/queues", router)
|
|
@ -0,0 +1,14 @@
|
||||||
|
const Router = require("@koa/router")
|
||||||
|
const controller = require("../controllers/hosting")
|
||||||
|
const authorized = require("../../middleware/authorized")
|
||||||
|
const selfhost = require("../../middleware/selfhost")
|
||||||
|
const { BUILDER } = require("../../utilities/security/permissions")
|
||||||
|
|
||||||
|
const router = Router()
|
||||||
|
|
||||||
|
router
|
||||||
|
.post("/api/script", authorized(BUILDER), controller.save)
|
||||||
|
// this isn't risky, doesn't return anything about apps other than names and URLs
|
||||||
|
.get("/api/hosting/apps", selfhost, controller.getDeployedApps)
|
||||||
|
|
||||||
|
module.exports = router
|
|
@ -1,10 +1,10 @@
|
||||||
const Router = require("@koa/router")
|
const Router = require("@koa/router")
|
||||||
const controller = require("../controllers/static")
|
const controller = require("../controllers/static")
|
||||||
const { budibaseTempDir } = require("../../utilities/budibaseDir")
|
const { budibaseTempDir } = require("../../utilities/budibaseDir")
|
||||||
const env = require("../../environment")
|
|
||||||
const authorized = require("../../middleware/authorized")
|
const authorized = require("../../middleware/authorized")
|
||||||
const { BUILDER } = require("../../utilities/security/permissions")
|
const { BUILDER } = require("../../utilities/security/permissions")
|
||||||
const usage = require("../../middleware/usageQuota")
|
const usage = require("../../middleware/usageQuota")
|
||||||
|
const env = require("../../environment")
|
||||||
|
|
||||||
const router = Router()
|
const router = Router()
|
||||||
|
|
||||||
|
@ -13,22 +13,16 @@ router.param("file", async (file, ctx, next) => {
|
||||||
ctx.file = file && file.includes(".") ? file : "index.html"
|
ctx.file = file && file.includes(".") ? file : "index.html"
|
||||||
|
|
||||||
// Serving the client library from your local dir in dev
|
// Serving the client library from your local dir in dev
|
||||||
if (ctx.isDev && ctx.file.startsWith("budibase-client")) {
|
if (env.isDev() && ctx.file.startsWith("budibase-client")) {
|
||||||
ctx.devPath = budibaseTempDir()
|
ctx.devPath = budibaseTempDir()
|
||||||
}
|
}
|
||||||
|
|
||||||
await next()
|
await next()
|
||||||
})
|
})
|
||||||
|
|
||||||
if (env.NODE_ENV !== "production") {
|
|
||||||
router.get("/_builder/:file*", controller.serveBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (env.SELF_HOSTED) {
|
|
||||||
router.get("/", controller.serveSelfHostPage)
|
|
||||||
}
|
|
||||||
|
|
||||||
router
|
router
|
||||||
|
// TODO: for now this _builder endpoint is not authorized/secured, will need to be
|
||||||
|
.get("/_builder/:file*", controller.serveBuilder)
|
||||||
.post("/api/attachments/process", authorized(BUILDER), controller.uploadFile)
|
.post("/api/attachments/process", authorized(BUILDER), controller.uploadFile)
|
||||||
.post("/api/attachments/upload", usage, controller.uploadFile)
|
.post("/api/attachments/upload", usage, controller.uploadFile)
|
||||||
.get("/componentlibrary", controller.serveComponentLibrary)
|
.get("/componentlibrary", controller.serveComponentLibrary)
|
||||||
|
|
|
@ -12,6 +12,5 @@ router
|
||||||
authorized(BUILDER),
|
authorized(BUILDER),
|
||||||
controller.downloadTemplate
|
controller.downloadTemplate
|
||||||
)
|
)
|
||||||
.post("/api/templates", authorized(BUILDER), controller.exportTemplateFromApp)
|
|
||||||
|
|
||||||
module.exports = router
|
module.exports = router
|
||||||
|
|
|
@ -1,8 +1,5 @@
|
||||||
const setup = require("./utilities")
|
const setup = require("./utilities")
|
||||||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||||
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
|
||||||
const fs = require("fs")
|
|
||||||
const path = require("path")
|
|
||||||
|
|
||||||
describe("/api/keys", () => {
|
describe("/api/keys", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
|
@ -16,12 +13,14 @@ describe("/api/keys", () => {
|
||||||
|
|
||||||
describe("fetch", () => {
|
describe("fetch", () => {
|
||||||
it("should allow fetching", async () => {
|
it("should allow fetching", async () => {
|
||||||
const res = await request
|
await setup.switchToSelfHosted(async () => {
|
||||||
.get(`/api/keys`)
|
const res = await request
|
||||||
.set(config.defaultHeaders())
|
.get(`/api/keys`)
|
||||||
.expect("Content-Type", /json/)
|
.set(config.defaultHeaders())
|
||||||
.expect(200)
|
.expect("Content-Type", /json/)
|
||||||
expect(res.body).toBeDefined()
|
.expect(200)
|
||||||
|
expect(res.body).toBeDefined()
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should check authorization for builder", async () => {
|
it("should check authorization for builder", async () => {
|
||||||
|
@ -35,17 +34,18 @@ describe("/api/keys", () => {
|
||||||
|
|
||||||
describe("update", () => {
|
describe("update", () => {
|
||||||
it("should allow updating a value", async () => {
|
it("should allow updating a value", async () => {
|
||||||
fs.writeFileSync(path.join(budibaseAppsDir(), ".env"), "TEST_API_KEY=thing")
|
await setup.switchToSelfHosted(async () => {
|
||||||
const res = await request
|
const res = await request
|
||||||
.put(`/api/keys/TEST`)
|
.put(`/api/keys/TEST`)
|
||||||
.send({
|
.send({
|
||||||
value: "test"
|
value: "test"
|
||||||
})
|
})
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(200)
|
.expect(200)
|
||||||
expect(res.body["TEST"]).toEqual("test")
|
expect(res.body._id).toBeDefined()
|
||||||
expect(process.env.TEST_API_KEY).toEqual("test")
|
expect(res.body._rev).toBeDefined()
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should check authorization for builder", async () => {
|
it("should check authorization for builder", async () => {
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
|
||||||
const setup = require("./utilities")
|
const setup = require("./utilities")
|
||||||
|
|
||||||
describe("/authenticate", () => {
|
describe("/authenticate", () => {
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||||
const setup = require("./utilities")
|
const setup = require("./utilities")
|
||||||
|
|
||||||
|
jest.mock("../../../utilities/fileSystem/utilities")
|
||||||
|
|
||||||
describe("/backups", () => {
|
describe("/backups", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
@ -14,7 +16,7 @@ describe("/backups", () => {
|
||||||
describe("exportAppDump", () => {
|
describe("exportAppDump", () => {
|
||||||
it("should be able to export app", async () => {
|
it("should be able to export app", async () => {
|
||||||
const res = await request
|
const res = await request
|
||||||
.get(`/api/backups/export?appId=${config.getAppId()}`)
|
.get(`/api/backups/export?appId=${config.getAppId()}&appname=test`)
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
.expect(200)
|
.expect(200)
|
||||||
expect(res.text).toBeDefined()
|
expect(res.text).toBeDefined()
|
||||||
|
|
|
@ -1,16 +0,0 @@
|
||||||
const setup = require("./utilities")
|
|
||||||
|
|
||||||
describe("test things in the Cloud/Self hosted", () => {
|
|
||||||
describe("test self hosted static page", () => {
|
|
||||||
it("should be able to load the static page", async () => {
|
|
||||||
await setup.switchToCloudForFunction(async () => {
|
|
||||||
let request = setup.getRequest()
|
|
||||||
let config = setup.getConfig()
|
|
||||||
await config.init()
|
|
||||||
const res = await request.get(`/`).expect(200)
|
|
||||||
expect(res.text.includes("<title>Budibase self hosting️</title>")).toEqual(true)
|
|
||||||
setup.afterAll()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
|
@ -1,8 +1,15 @@
|
||||||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||||
const setup = require("./utilities")
|
const setup = require("./utilities")
|
||||||
const fs = require("fs")
|
|
||||||
const { resolve, join } = require("path")
|
jest.mock("../../../utilities/fileSystem/utilities", () => ({
|
||||||
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
...jest.requireActual("../../../utilities/fileSystem/utilities"),
|
||||||
|
retrieve: () => {
|
||||||
|
const { join } = require("path")
|
||||||
|
const library = join("@budibase", "standard-components")
|
||||||
|
const path = require.resolve(library).split(join("dist", "index.js"))[0] + "manifest.json"
|
||||||
|
return JSON.stringify(require(path))
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
describe("/component", () => {
|
describe("/component", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
|
@ -14,23 +21,8 @@ describe("/component", () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
})
|
})
|
||||||
|
|
||||||
function mock() {
|
|
||||||
const manifestFile = "manifest.json"
|
|
||||||
const appId = config.getAppId()
|
|
||||||
const libraries = [join("@budibase", "standard-components")]
|
|
||||||
for (let library of libraries) {
|
|
||||||
let appDirectory = resolve(budibaseAppsDir(), appId, "node_modules", library, "package")
|
|
||||||
fs.mkdirSync(appDirectory, { recursive: true })
|
|
||||||
|
|
||||||
const file = require.resolve(library).split(join("dist", "index.js"))[0] + manifestFile
|
|
||||||
fs.copyFileSync(file, join(appDirectory, manifestFile))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("fetch definitions", () => {
|
describe("fetch definitions", () => {
|
||||||
it("should be able to fetch definitions", async () => {
|
it("should be able to fetch definitions", async () => {
|
||||||
// have to "mock" the files required
|
|
||||||
mock()
|
|
||||||
const res = await request
|
const res = await request
|
||||||
.get(`/${config.getAppId()}/components/definitions`)
|
.get(`/${config.getAppId()}/components/definitions`)
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
|
|
|
@ -107,17 +107,16 @@ describe("/hosting", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("getDeployedApps", () => {
|
describe("getDeployedApps", () => {
|
||||||
it("should get apps when in builder", async () => {
|
it("should fail when not self hosted", async () => {
|
||||||
const res = await request
|
await request
|
||||||
.get(`/api/hosting/apps`)
|
.get(`/api/hosting/apps`)
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(200)
|
.expect(400)
|
||||||
expect(res.body.app1).toEqual({url: "/app1"})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should get apps when in cloud", async () => {
|
it("should get apps when in cloud", async () => {
|
||||||
await setup.switchToCloudForFunction(async () => {
|
await setup.switchToSelfHosted(async () => {
|
||||||
const res = await request
|
const res = await request
|
||||||
.get(`/api/hosting/apps`)
|
.get(`/api/hosting/apps`)
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
|
|
|
@ -89,7 +89,7 @@ describe("/queries", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should find a query in cloud", async () => {
|
it("should find a query in cloud", async () => {
|
||||||
await setup.switchToCloudForFunction(async () => {
|
await setup.switchToSelfHosted(async () => {
|
||||||
const query = await config.createQuery()
|
const query = await config.createQuery()
|
||||||
const res = await request
|
const res = await request
|
||||||
.get(`/api/queries/${query._id}`)
|
.get(`/api/queries/${query._id}`)
|
||||||
|
|
|
@ -410,7 +410,7 @@ describe("/rows", () => {
|
||||||
tableId: table._id,
|
tableId: table._id,
|
||||||
})
|
})
|
||||||
// the environment needs configured for this
|
// the environment needs configured for this
|
||||||
await setup.switchToCloudForFunction(async () => {
|
await setup.switchToSelfHosted(async () => {
|
||||||
const enriched = await outputProcessing(config.getAppId(), table, [row])
|
const enriched = await outputProcessing(config.getAppId(), table, [row])
|
||||||
expect(enriched[0].attachment[0].url).toBe(`/app-assets/assets/${config.getAppId()}/test/thing`)
|
expect(enriched[0].attachment[0].url).toBe(`/app-assets/assets/${config.getAppId()}/test/thing`)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,7 +1,4 @@
|
||||||
const setup = require("./utilities")
|
const setup = require("./utilities")
|
||||||
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
|
||||||
const fs = require("fs")
|
|
||||||
const { join } = require("path")
|
|
||||||
|
|
||||||
describe("/templates", () => {
|
describe("/templates", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
|
@ -24,26 +21,4 @@ describe("/templates", () => {
|
||||||
expect(Array.isArray(res.body)).toEqual(true)
|
expect(Array.isArray(res.body)).toEqual(true)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("export", () => {
|
|
||||||
it("should be able to export the basic app", async () => {
|
|
||||||
const res = await request
|
|
||||||
.post(`/api/templates`)
|
|
||||||
.send({
|
|
||||||
templateName: "test",
|
|
||||||
})
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(200)
|
|
||||||
expect(res.body.message).toEqual("Created template: test")
|
|
||||||
const dir = join(
|
|
||||||
budibaseAppsDir(),
|
|
||||||
"templates",
|
|
||||||
"app",
|
|
||||||
"test",
|
|
||||||
"db"
|
|
||||||
)
|
|
||||||
expect(fs.existsSync(dir)).toEqual(true)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
})
|
|
@ -35,18 +35,18 @@ exports.getConfig = () => {
|
||||||
return config
|
return config
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.switchToCloudForFunction = async func => {
|
exports.switchToSelfHosted = async func => {
|
||||||
// self hosted stops any attempts to Dynamo
|
// self hosted stops any attempts to Dynamo
|
||||||
env.CLOUD = true
|
env._set("NODE_ENV", "production")
|
||||||
env.SELF_HOSTED = true
|
env._set("SELF_HOSTED", true)
|
||||||
let error
|
let error
|
||||||
try {
|
try {
|
||||||
await func()
|
await func()
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
error = err
|
error = err
|
||||||
}
|
}
|
||||||
env.CLOUD = false
|
env._set("NODE_ENV", "jest")
|
||||||
env.SELF_HOSTED = false
|
env._set("SELF_HOSTED", false)
|
||||||
// don't throw error until after reset
|
// don't throw error until after reset
|
||||||
if (error) {
|
if (error) {
|
||||||
throw error
|
throw error
|
||||||
|
|
|
@ -9,6 +9,7 @@ const env = require("./environment")
|
||||||
const eventEmitter = require("./events")
|
const eventEmitter = require("./events")
|
||||||
const automations = require("./automations/index")
|
const automations = require("./automations/index")
|
||||||
const Sentry = require("@sentry/node")
|
const Sentry = require("@sentry/node")
|
||||||
|
const fileSystem = require("./utilities/fileSystem")
|
||||||
|
|
||||||
const app = new Koa()
|
const app = new Koa()
|
||||||
|
|
||||||
|
@ -65,6 +66,7 @@ module.exports = server.listen(env.PORT || 0, async () => {
|
||||||
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
|
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
|
||||||
env._set("PORT", server.address().port)
|
env._set("PORT", server.address().port)
|
||||||
eventEmitter.emitPort(env.PORT)
|
eventEmitter.emitPort(env.PORT)
|
||||||
|
fileSystem.init()
|
||||||
await automations.init()
|
await automations.init()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -3,19 +3,16 @@ const createRow = require("./steps/createRow")
|
||||||
const updateRow = require("./steps/updateRow")
|
const updateRow = require("./steps/updateRow")
|
||||||
const deleteRow = require("./steps/deleteRow")
|
const deleteRow = require("./steps/deleteRow")
|
||||||
const createUser = require("./steps/createUser")
|
const createUser = require("./steps/createUser")
|
||||||
|
const executeScript = require("./steps/executeScript")
|
||||||
|
const executeQuery = require("./steps/executeQuery")
|
||||||
const outgoingWebhook = require("./steps/outgoingWebhook")
|
const outgoingWebhook = require("./steps/outgoingWebhook")
|
||||||
const env = require("../environment")
|
const env = require("../environment")
|
||||||
const download = require("download")
|
|
||||||
const fetch = require("node-fetch")
|
|
||||||
const { join } = require("../utilities/centralPath")
|
|
||||||
const os = require("os")
|
|
||||||
const fs = require("fs")
|
|
||||||
const Sentry = require("@sentry/node")
|
const Sentry = require("@sentry/node")
|
||||||
|
const {
|
||||||
|
automationInit,
|
||||||
|
getExternalAutomationStep,
|
||||||
|
} = require("../utilities/fileSystem")
|
||||||
|
|
||||||
const DEFAULT_BUCKET =
|
|
||||||
"https://prod-budi-automations.s3-eu-west-1.amazonaws.com"
|
|
||||||
const DEFAULT_DIRECTORY = ".budibase-automations"
|
|
||||||
const AUTOMATION_MANIFEST = "manifest.json"
|
|
||||||
const BUILTIN_ACTIONS = {
|
const BUILTIN_ACTIONS = {
|
||||||
SEND_EMAIL: sendEmail.run,
|
SEND_EMAIL: sendEmail.run,
|
||||||
CREATE_ROW: createRow.run,
|
CREATE_ROW: createRow.run,
|
||||||
|
@ -23,6 +20,8 @@ const BUILTIN_ACTIONS = {
|
||||||
DELETE_ROW: deleteRow.run,
|
DELETE_ROW: deleteRow.run,
|
||||||
CREATE_USER: createUser.run,
|
CREATE_USER: createUser.run,
|
||||||
OUTGOING_WEBHOOK: outgoingWebhook.run,
|
OUTGOING_WEBHOOK: outgoingWebhook.run,
|
||||||
|
EXECUTE_SCRIPT: executeScript.run,
|
||||||
|
EXECUTE_QUERY: executeQuery.run,
|
||||||
}
|
}
|
||||||
const BUILTIN_DEFINITIONS = {
|
const BUILTIN_DEFINITIONS = {
|
||||||
SEND_EMAIL: sendEmail.definition,
|
SEND_EMAIL: sendEmail.definition,
|
||||||
|
@ -31,10 +30,10 @@ const BUILTIN_DEFINITIONS = {
|
||||||
DELETE_ROW: deleteRow.definition,
|
DELETE_ROW: deleteRow.definition,
|
||||||
CREATE_USER: createUser.definition,
|
CREATE_USER: createUser.definition,
|
||||||
OUTGOING_WEBHOOK: outgoingWebhook.definition,
|
OUTGOING_WEBHOOK: outgoingWebhook.definition,
|
||||||
|
EXECUTE_SCRIPT: executeScript.definition,
|
||||||
|
EXECUTE_QUERY: executeQuery.definition,
|
||||||
}
|
}
|
||||||
|
|
||||||
let AUTOMATION_BUCKET = env.AUTOMATION_BUCKET
|
|
||||||
let AUTOMATION_DIRECTORY = env.AUTOMATION_DIRECTORY
|
|
||||||
let MANIFEST = null
|
let MANIFEST = null
|
||||||
|
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
|
@ -42,22 +41,13 @@ function buildBundleName(pkgName, version) {
|
||||||
return `${pkgName}@${version}.min.js`
|
return `${pkgName}@${version}.min.js`
|
||||||
}
|
}
|
||||||
|
|
||||||
/* istanbul ignore next */
|
|
||||||
async function downloadPackage(name, version, bundleName) {
|
|
||||||
await download(
|
|
||||||
`${AUTOMATION_BUCKET}/${name}/${version}/${bundleName}`,
|
|
||||||
AUTOMATION_DIRECTORY
|
|
||||||
)
|
|
||||||
return require(join(AUTOMATION_DIRECTORY, bundleName))
|
|
||||||
}
|
|
||||||
|
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
module.exports.getAction = async function(actionName) {
|
module.exports.getAction = async function(actionName) {
|
||||||
if (BUILTIN_ACTIONS[actionName] != null) {
|
if (BUILTIN_ACTIONS[actionName] != null) {
|
||||||
return BUILTIN_ACTIONS[actionName]
|
return BUILTIN_ACTIONS[actionName]
|
||||||
}
|
}
|
||||||
// worker pools means that a worker may not have manifest
|
// worker pools means that a worker may not have manifest
|
||||||
if (env.CLOUD && MANIFEST == null) {
|
if (env.isProd() && MANIFEST == null) {
|
||||||
MANIFEST = await module.exports.init()
|
MANIFEST = await module.exports.init()
|
||||||
}
|
}
|
||||||
// env setup to get async packages
|
// env setup to get async packages
|
||||||
|
@ -66,28 +56,12 @@ module.exports.getAction = async function(actionName) {
|
||||||
}
|
}
|
||||||
const pkg = MANIFEST.packages[actionName]
|
const pkg = MANIFEST.packages[actionName]
|
||||||
const bundleName = buildBundleName(pkg.stepId, pkg.version)
|
const bundleName = buildBundleName(pkg.stepId, pkg.version)
|
||||||
try {
|
return getExternalAutomationStep(pkg.stepId, pkg.version, bundleName)
|
||||||
return require(join(AUTOMATION_DIRECTORY, bundleName))
|
|
||||||
} catch (err) {
|
|
||||||
return downloadPackage(pkg.stepId, pkg.version, bundleName)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.init = async function() {
|
module.exports.init = async function() {
|
||||||
// set defaults
|
|
||||||
if (!AUTOMATION_DIRECTORY) {
|
|
||||||
AUTOMATION_DIRECTORY = join(os.homedir(), DEFAULT_DIRECTORY)
|
|
||||||
}
|
|
||||||
if (!AUTOMATION_BUCKET) {
|
|
||||||
AUTOMATION_BUCKET = DEFAULT_BUCKET
|
|
||||||
}
|
|
||||||
if (!fs.existsSync(AUTOMATION_DIRECTORY)) {
|
|
||||||
fs.mkdirSync(AUTOMATION_DIRECTORY, { recursive: true })
|
|
||||||
}
|
|
||||||
// env setup to get async packages
|
|
||||||
try {
|
try {
|
||||||
let response = await fetch(`${AUTOMATION_BUCKET}/${AUTOMATION_MANIFEST}`)
|
MANIFEST = await automationInit()
|
||||||
MANIFEST = await response.json()
|
|
||||||
module.exports.DEFINITIONS =
|
module.exports.DEFINITIONS =
|
||||||
MANIFEST && MANIFEST.packages
|
MANIFEST && MANIFEST.packages
|
||||||
? Object.assign(MANIFEST.packages, BUILTIN_DEFINITIONS)
|
? Object.assign(MANIFEST.packages, BUILTIN_DEFINITIONS)
|
||||||
|
|
|
@ -34,10 +34,10 @@ module.exports.init = async function() {
|
||||||
await actions.init()
|
await actions.init()
|
||||||
triggers.automationQueue.process(async job => {
|
triggers.automationQueue.process(async job => {
|
||||||
try {
|
try {
|
||||||
if (env.CLOUD && job.data.automation && !env.SELF_HOSTED) {
|
if (env.USE_QUOTAS) {
|
||||||
job.data.automation.apiKey = await updateQuota(job.data.automation)
|
job.data.automation.apiKey = await updateQuota(job.data.automation)
|
||||||
}
|
}
|
||||||
if (env.BUDIBASE_ENVIRONMENT === "PRODUCTION") {
|
if (env.isProd()) {
|
||||||
await runWorker(job)
|
await runWorker(job)
|
||||||
} else {
|
} else {
|
||||||
await singleThread(job)
|
await singleThread(job)
|
||||||
|
|
|
@ -85,7 +85,7 @@ module.exports.run = async function({ inputs, appId, apiKey, emitter }) {
|
||||||
inputs.row.tableId,
|
inputs.row.tableId,
|
||||||
inputs.row
|
inputs.row
|
||||||
)
|
)
|
||||||
if (env.CLOUD) {
|
if (env.isProd()) {
|
||||||
await usage.update(apiKey, usage.Properties.ROW, 1)
|
await usage.update(apiKey, usage.Properties.ROW, 1)
|
||||||
}
|
}
|
||||||
await rowController.save(ctx)
|
await rowController.save(ctx)
|
||||||
|
|
|
@ -72,7 +72,7 @@ module.exports.run = async function({ inputs, appId, apiKey, emitter }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (env.CLOUD) {
|
if (env.isProd()) {
|
||||||
await usage.update(apiKey, usage.Properties.USER, 1)
|
await usage.update(apiKey, usage.Properties.USER, 1)
|
||||||
}
|
}
|
||||||
await userController.create(ctx)
|
await userController.create(ctx)
|
||||||
|
|
|
@ -70,7 +70,7 @@ module.exports.run = async function({ inputs, appId, apiKey, emitter }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (env.CLOUD) {
|
if (env.isProd()) {
|
||||||
await usage.update(apiKey, usage.Properties.ROW, -1)
|
await usage.update(apiKey, usage.Properties.ROW, -1)
|
||||||
}
|
}
|
||||||
await rowController.destroy(ctx)
|
await rowController.destroy(ctx)
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
const queryController = require("../../api/controllers/query")
|
||||||
|
|
||||||
|
module.exports.definition = {
|
||||||
|
name: "External Data Connector",
|
||||||
|
tagline: "Execute Data Connector",
|
||||||
|
icon: "ri-database-2-line",
|
||||||
|
description: "Execute a query in an external data connector",
|
||||||
|
type: "ACTION",
|
||||||
|
stepId: "EXECUTE_QUERY",
|
||||||
|
inputs: {},
|
||||||
|
schema: {
|
||||||
|
inputs: {
|
||||||
|
properties: {
|
||||||
|
query: {
|
||||||
|
type: "string",
|
||||||
|
customType: "query",
|
||||||
|
title: "Query",
|
||||||
|
},
|
||||||
|
parameters: {
|
||||||
|
title: "Query Parameters",
|
||||||
|
type: "object",
|
||||||
|
customType: "query",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
outputs: {
|
||||||
|
properties: {
|
||||||
|
response: {
|
||||||
|
type: "object",
|
||||||
|
description: "The response from the datasource execution",
|
||||||
|
},
|
||||||
|
success: {
|
||||||
|
type: "boolean",
|
||||||
|
description: "Whether the action was successful",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ["response", "success"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.run = async function({ inputs, appId, emitter }) {
|
||||||
|
if (inputs.query == null) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
response: {
|
||||||
|
message: "Invalid inputs",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const ctx = {
|
||||||
|
params: {
|
||||||
|
queryId: inputs.query,
|
||||||
|
},
|
||||||
|
request: {
|
||||||
|
body: {
|
||||||
|
parameters: inputs.parameters,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: { appId },
|
||||||
|
eventEmitter: emitter,
|
||||||
|
}
|
||||||
|
|
||||||
|
await queryController.execute(ctx)
|
||||||
|
|
||||||
|
try {
|
||||||
|
return {
|
||||||
|
response: ctx.body,
|
||||||
|
success: ctx.status === 200,
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
response: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,6 +1,4 @@
|
||||||
const scriptController = require("../../api/controllers/script")
|
const scriptController = require("../../api/controllers/script")
|
||||||
const automationUtils = require("../automationUtils")
|
|
||||||
const env = require("../../environment")
|
|
||||||
|
|
||||||
module.exports.definition = {
|
module.exports.definition = {
|
||||||
name: "Scripting",
|
name: "Scripting",
|
||||||
|
@ -14,7 +12,8 @@ module.exports.definition = {
|
||||||
inputs: {
|
inputs: {
|
||||||
properties: {
|
properties: {
|
||||||
code: {
|
code: {
|
||||||
type: "code",
|
type: "string",
|
||||||
|
customType: "code",
|
||||||
title: "Code",
|
title: "Code",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -22,34 +21,17 @@ module.exports.definition = {
|
||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
properties: {
|
properties: {
|
||||||
row: {
|
|
||||||
type: "object",
|
|
||||||
customType: "row",
|
|
||||||
description: "The new row",
|
|
||||||
},
|
|
||||||
response: {
|
|
||||||
type: "object",
|
|
||||||
description: "The response from the table",
|
|
||||||
},
|
|
||||||
success: {
|
success: {
|
||||||
type: "boolean",
|
type: "boolean",
|
||||||
description: "Whether the action was successful",
|
description: "Whether the action was successful",
|
||||||
},
|
},
|
||||||
id: {
|
|
||||||
type: "string",
|
|
||||||
description: "The identifier of the new row",
|
|
||||||
},
|
|
||||||
revision: {
|
|
||||||
type: "string",
|
|
||||||
description: "The revision of the new row",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
required: ["success", "id", "revision"],
|
|
||||||
},
|
},
|
||||||
|
required: ["success"],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.run = async function({ inputs, appId, emitter }) {
|
module.exports.run = async function({ inputs, appId, context, emitter }) {
|
||||||
if (inputs.code == null) {
|
if (inputs.code == null) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
|
@ -60,12 +42,10 @@ module.exports.run = async function({ inputs, appId, emitter }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const ctx = {
|
const ctx = {
|
||||||
params: {
|
|
||||||
tableId: inputs.row.tableId,
|
|
||||||
},
|
|
||||||
request: {
|
request: {
|
||||||
body: {
|
body: {
|
||||||
script: inputs.script,
|
script: inputs.code,
|
||||||
|
context,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
user: { appId },
|
user: { appId },
|
||||||
|
@ -73,13 +53,8 @@ module.exports.run = async function({ inputs, appId, emitter }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// inputs.row = await automationUtils.cleanUpRow(appId, inputs.script)
|
|
||||||
await scriptController.execute(ctx)
|
await scriptController.execute(ctx)
|
||||||
return {
|
return {
|
||||||
// row: inputs.row,
|
|
||||||
response: ctx.body,
|
|
||||||
// id: ctx.body._id,
|
|
||||||
// revision: ctx.body._rev,
|
|
||||||
success: ctx.status === 200,
|
success: ctx.status === 200,
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
|
@ -47,27 +47,23 @@ describe("Run through some parts of the automations system", () => {
|
||||||
expect(thread).toHaveBeenCalled()
|
expect(thread).toHaveBeenCalled()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to init in cloud", async () => {
|
it("should be able to init in prod", async () => {
|
||||||
env.CLOUD = true
|
await setup.runInProd(async () => {
|
||||||
env.BUDIBASE_ENVIRONMENT = "PRODUCTION"
|
await triggers.externalTrigger(basicAutomation(), { a: 1 })
|
||||||
await triggers.externalTrigger(basicAutomation(), { a: 1 })
|
await wait(100)
|
||||||
await wait(100)
|
// haven't added a mock implementation so getAPIKey of usageQuota just returns undefined
|
||||||
// haven't added a mock implementation so getAPIKey of usageQuota just returns undefined
|
expect(usageQuota.update).toHaveBeenCalledWith("test", "automationRuns", 1)
|
||||||
expect(usageQuota.update).toHaveBeenCalledWith("test", "automationRuns", 1)
|
expect(workerJob).toBeDefined()
|
||||||
expect(workerJob).toBeDefined()
|
})
|
||||||
env.BUDIBASE_ENVIRONMENT = "JEST"
|
|
||||||
env.CLOUD = false
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("try error scenario", async () => {
|
it("try error scenario", async () => {
|
||||||
env.CLOUD = true
|
await setup.runInProd(async () => {
|
||||||
env.BUDIBASE_ENVIRONMENT = "PRODUCTION"
|
// the second call will throw an error
|
||||||
// the second call will throw an error
|
await triggers.externalTrigger(basicAutomation(), { a: 1 })
|
||||||
await triggers.externalTrigger(basicAutomation(), { a: 1 })
|
await wait(100)
|
||||||
await wait(100)
|
expect(console.error).toHaveBeenCalled()
|
||||||
expect(console.error).toHaveBeenCalled()
|
})
|
||||||
env.BUDIBASE_ENVIRONMENT = "JEST"
|
|
||||||
env.CLOUD = false
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to check triggering row filling", async () => {
|
it("should be able to check triggering row filling", async () => {
|
||||||
|
|
|
@ -42,12 +42,12 @@ describe("test the create row action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("check usage quota attempts", async () => {
|
it("check usage quota attempts", async () => {
|
||||||
env.CLOUD = true
|
await setup.runInProd(async () => {
|
||||||
await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||||
row
|
row
|
||||||
|
})
|
||||||
|
expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", 1)
|
||||||
})
|
})
|
||||||
expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", 1)
|
|
||||||
env.CLOUD = false
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should check invalid inputs return an error", async () => {
|
it("should check invalid inputs return an error", async () => {
|
||||||
|
|
|
@ -35,9 +35,9 @@ describe("test the create user action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("check usage quota attempts", async () => {
|
it("check usage quota attempts", async () => {
|
||||||
env.CLOUD = true
|
await setup.runInProd(async () => {
|
||||||
await setup.runStep(setup.actions.CREATE_USER.stepId, user)
|
await setup.runStep(setup.actions.CREATE_USER.stepId, user)
|
||||||
expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "users", 1)
|
expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "users", 1)
|
||||||
env.CLOUD = false
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -36,10 +36,10 @@ describe("test the delete row action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("check usage quota attempts", async () => {
|
it("check usage quota attempts", async () => {
|
||||||
env.CLOUD = true
|
await setup.runInProd(async () => {
|
||||||
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
|
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
|
||||||
expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", -1)
|
expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", -1)
|
||||||
env.CLOUD = false
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should check invalid inputs return an error", async () => {
|
it("should check invalid inputs return an error", async () => {
|
||||||
|
|
|
@ -2,6 +2,7 @@ const TestConfig = require("../../../tests/utilities/TestConfiguration")
|
||||||
const actions = require("../../actions")
|
const actions = require("../../actions")
|
||||||
const logic = require("../../logic")
|
const logic = require("../../logic")
|
||||||
const emitter = require("../../../events/index")
|
const emitter = require("../../../events/index")
|
||||||
|
const env = require("../../../environment")
|
||||||
|
|
||||||
let config
|
let config
|
||||||
|
|
||||||
|
@ -16,6 +17,22 @@ exports.afterAll = () => {
|
||||||
config.end()
|
config.end()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.runInProd = async fn => {
|
||||||
|
env._set("NODE_ENV", "production")
|
||||||
|
env._set("USE_QUOTAS", 1)
|
||||||
|
let error
|
||||||
|
try {
|
||||||
|
await fn()
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
env._set("NODE_ENV", "jest")
|
||||||
|
env._set("USE_QUOTAS", null)
|
||||||
|
if (error) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
exports.runStep = async function runStep(stepId, inputs) {
|
exports.runStep = async function runStep(stepId, inputs) {
|
||||||
let step
|
let step
|
||||||
if (
|
if (
|
||||||
|
|
|
@ -56,6 +56,7 @@ class Orchestrator {
|
||||||
appId: this._appId,
|
appId: this._appId,
|
||||||
apiKey: automation.apiKey,
|
apiKey: automation.apiKey,
|
||||||
emitter: this._emitter,
|
emitter: this._emitter,
|
||||||
|
context: this._context,
|
||||||
})
|
})
|
||||||
if (step.stepId === FILTER_STEP_ID && !outputs.success) {
|
if (step.stepId === FILTER_STEP_ID && !outputs.success) {
|
||||||
break
|
break
|
||||||
|
|
|
@ -1,10 +1,15 @@
|
||||||
const CouchDB = require("../db")
|
const CouchDB = require("../db")
|
||||||
const emitter = require("../events/index")
|
const emitter = require("../events/index")
|
||||||
const InMemoryQueue = require("../utilities/queue/inMemoryQueue")
|
// const InMemoryQueue = require("../utilities/queue/inMemoryQueue")
|
||||||
|
const Queue = require("bull")
|
||||||
|
const { setQueues, BullAdapter } = require("bull-board")
|
||||||
const { getAutomationParams } = require("../db/utils")
|
const { getAutomationParams } = require("../db/utils")
|
||||||
const { coerce } = require("../utilities/rowProcessor")
|
const { coerce } = require("../utilities/rowProcessor")
|
||||||
|
|
||||||
let automationQueue = new InMemoryQueue("automationQueue")
|
let automationQueue = new Queue("automationQueue")
|
||||||
|
|
||||||
|
// Set up queues for bull board admin
|
||||||
|
setQueues([new BullAdapter(automationQueue)])
|
||||||
|
|
||||||
const FAKE_STRING = "TEST"
|
const FAKE_STRING = "TEST"
|
||||||
const FAKE_BOOL = false
|
const FAKE_BOOL = false
|
||||||
|
|
|
@ -80,8 +80,6 @@ exports.AutoFieldSubTypes = {
|
||||||
AUTO_ID: "autoID",
|
AUTO_ID: "autoID",
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.BUILDER_CONFIG_DB = "builder-config-db"
|
|
||||||
exports.HOSTING_DOC = "hosting-doc"
|
|
||||||
exports.OBJ_STORE_DIRECTORY = "/app-assets/assets"
|
exports.OBJ_STORE_DIRECTORY = "/app-assets/assets"
|
||||||
exports.BaseQueryVerbs = {
|
exports.BaseQueryVerbs = {
|
||||||
CREATE: "create",
|
CREATE: "create",
|
||||||
|
@ -89,3 +87,9 @@ exports.BaseQueryVerbs = {
|
||||||
UPDATE: "update",
|
UPDATE: "update",
|
||||||
DELETE: "delete",
|
DELETE: "delete",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.ObjectStoreBuckets = {
|
||||||
|
BACKUPS: "backups",
|
||||||
|
APPS: "prod-budi-app-assets",
|
||||||
|
TEMPLATES: "templates",
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
const CouchDB = require("./index")
|
||||||
|
const { StaticDatabases } = require("./utils")
|
||||||
|
const env = require("../environment")
|
||||||
|
|
||||||
|
const SELF_HOST_ERR = "Unable to access builder DB/doc - not self hosted."
|
||||||
|
const BUILDER_DB = StaticDatabases.BUILDER
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is the builder database, right now this is a single, static database
|
||||||
|
* that is present across the whole system and determines some core functionality
|
||||||
|
* for the builder (e.g. storage of API keys). This has been limited to self hosting
|
||||||
|
* as it doesn't make as much sense against the currently design Cloud system.
|
||||||
|
*/
|
||||||
|
|
||||||
|
exports.getBuilderMainDoc = async () => {
|
||||||
|
if (!env.SELF_HOSTED) {
|
||||||
|
throw SELF_HOST_ERR
|
||||||
|
}
|
||||||
|
const db = new CouchDB(BUILDER_DB.name)
|
||||||
|
try {
|
||||||
|
return await db.get(BUILDER_DB.baseDoc)
|
||||||
|
} catch (err) {
|
||||||
|
// doesn't exist yet, nothing to get
|
||||||
|
return {
|
||||||
|
_id: BUILDER_DB.baseDoc,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.setBuilderMainDoc = async doc => {
|
||||||
|
if (!env.SELF_HOSTED) {
|
||||||
|
throw SELF_HOST_ERR
|
||||||
|
}
|
||||||
|
// make sure to override the ID
|
||||||
|
doc._id = BUILDER_DB.baseDoc
|
||||||
|
const db = new CouchDB(BUILDER_DB.name)
|
||||||
|
return db.put(doc)
|
||||||
|
}
|
|
@ -1,12 +1,10 @@
|
||||||
const PouchDB = require("pouchdb")
|
const PouchDB = require("pouchdb")
|
||||||
const replicationStream = require("pouchdb-replication-stream")
|
const replicationStream = require("pouchdb-replication-stream")
|
||||||
const allDbs = require("pouchdb-all-dbs")
|
const allDbs = require("pouchdb-all-dbs")
|
||||||
const { budibaseAppsDir } = require("../utilities/budibaseDir")
|
|
||||||
const find = require("pouchdb-find")
|
const find = require("pouchdb-find")
|
||||||
const env = require("../environment")
|
const env = require("../environment")
|
||||||
|
|
||||||
const COUCH_DB_URL = env.COUCH_DB_URL || `leveldb://${budibaseAppsDir()}/.data/`
|
const COUCH_DB_URL = env.COUCH_DB_URL || "http://localhost:10000/db/"
|
||||||
const isInMemory = env.NODE_ENV === "jest"
|
|
||||||
|
|
||||||
PouchDB.plugin(replicationStream.plugin)
|
PouchDB.plugin(replicationStream.plugin)
|
||||||
PouchDB.plugin(find)
|
PouchDB.plugin(find)
|
||||||
|
@ -14,10 +12,10 @@ PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
|
||||||
|
|
||||||
let POUCH_DB_DEFAULTS = {
|
let POUCH_DB_DEFAULTS = {
|
||||||
prefix: COUCH_DB_URL,
|
prefix: COUCH_DB_URL,
|
||||||
skip_setup: !!env.CLOUD,
|
skip_setup: env.isProd(),
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isInMemory) {
|
if (env.isTest()) {
|
||||||
PouchDB.plugin(require("pouchdb-adapter-memory"))
|
PouchDB.plugin(require("pouchdb-adapter-memory"))
|
||||||
POUCH_DB_DEFAULTS = {
|
POUCH_DB_DEFAULTS = {
|
||||||
prefix: undefined,
|
prefix: undefined,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
let _ = require("lodash")
|
let { merge } = require("lodash")
|
||||||
let env = require("../environment")
|
let env = require("../environment")
|
||||||
|
|
||||||
const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
|
const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
|
||||||
|
@ -38,7 +38,7 @@ class Table {
|
||||||
params.Key[this._sort] = sort
|
params.Key[this._sort] = sort
|
||||||
}
|
}
|
||||||
if (otherProps) {
|
if (otherProps) {
|
||||||
params = _.merge(params, otherProps)
|
params = merge(params, otherProps)
|
||||||
}
|
}
|
||||||
let response = await docClient.get(params).promise()
|
let response = await docClient.get(params).promise()
|
||||||
return response.Item
|
return response.Item
|
||||||
|
@ -77,7 +77,7 @@ class Table {
|
||||||
params.ConditionExpression += "attribute_exists(#PRIMARY)"
|
params.ConditionExpression += "attribute_exists(#PRIMARY)"
|
||||||
}
|
}
|
||||||
if (otherProps) {
|
if (otherProps) {
|
||||||
params = _.merge(params, otherProps)
|
params = merge(params, otherProps)
|
||||||
}
|
}
|
||||||
return docClient.update(params).promise()
|
return docClient.update(params).promise()
|
||||||
}
|
}
|
||||||
|
@ -94,7 +94,7 @@ class Table {
|
||||||
Item: item,
|
Item: item,
|
||||||
}
|
}
|
||||||
if (otherProps) {
|
if (otherProps) {
|
||||||
params = _.merge(params, otherProps)
|
params = merge(params, otherProps)
|
||||||
}
|
}
|
||||||
return docClient.put(params).promise()
|
return docClient.put(params).promise()
|
||||||
}
|
}
|
||||||
|
@ -119,7 +119,7 @@ exports.init = endpoint => {
|
||||||
exports.apiKeyTable = new Table(TableInfo.API_KEYS)
|
exports.apiKeyTable = new Table(TableInfo.API_KEYS)
|
||||||
exports.userTable = new Table(TableInfo.USERS)
|
exports.userTable = new Table(TableInfo.USERS)
|
||||||
|
|
||||||
if (env.CLOUD) {
|
if (env.isProd()) {
|
||||||
exports.init(`https://dynamodb.${AWS_REGION}.amazonaws.com`)
|
exports.init(`https://dynamodb.${AWS_REGION}.amazonaws.com`)
|
||||||
} else {
|
} else {
|
||||||
env._set("AWS_ACCESS_KEY_ID", "KEY_ID")
|
env._set("AWS_ACCESS_KEY_ID", "KEY_ID")
|
||||||
|
|
|
@ -3,6 +3,18 @@ const newid = require("./newid")
|
||||||
const UNICODE_MAX = "\ufff0"
|
const UNICODE_MAX = "\ufff0"
|
||||||
const SEPARATOR = "_"
|
const SEPARATOR = "_"
|
||||||
|
|
||||||
|
const StaticDatabases = {
|
||||||
|
BUILDER: {
|
||||||
|
name: "builder-db",
|
||||||
|
baseDoc: "builder-doc",
|
||||||
|
},
|
||||||
|
// TODO: needs removed
|
||||||
|
BUILDER_HOSTING: {
|
||||||
|
name: "builder-config-db",
|
||||||
|
baseDoc: "hosting-doc",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
const DocumentTypes = {
|
const DocumentTypes = {
|
||||||
TABLE: "ta",
|
TABLE: "ta",
|
||||||
ROW: "ro",
|
ROW: "ro",
|
||||||
|
@ -25,6 +37,7 @@ const ViewNames = {
|
||||||
USERS: "ta_users",
|
USERS: "ta_users",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.StaticDatabases = StaticDatabases
|
||||||
exports.ViewNames = ViewNames
|
exports.ViewNames = ViewNames
|
||||||
exports.DocumentTypes = DocumentTypes
|
exports.DocumentTypes = DocumentTypes
|
||||||
exports.SEPARATOR = SEPARATOR
|
exports.SEPARATOR = SEPARATOR
|
||||||
|
|
|
@ -1,45 +1,64 @@
|
||||||
const { resolve, join } = require("./utilities/centralPath")
|
function isTest() {
|
||||||
const { homedir } = require("os")
|
return (
|
||||||
const { app } = require("electron")
|
process.env.NODE_ENV === "jest" ||
|
||||||
|
process.env.NODE_ENV === "cypress" ||
|
||||||
|
process.env.JEST_WORKER_ID != null
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function isDev() {
|
||||||
|
return (
|
||||||
|
process.env.NODE_ENV !== "production" &&
|
||||||
|
process.env.BUDIBASE_ENVIRONMENT !== "production"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
let LOADED = false
|
let LOADED = false
|
||||||
|
if (!LOADED && isDev() && !isTest()) {
|
||||||
if (!LOADED) {
|
require("dotenv").config()
|
||||||
const homeDir = app ? app.getPath("home") : homedir()
|
|
||||||
const budibaseDir = join(homeDir, ".budibase")
|
|
||||||
process.env.BUDIBASE_DIR = budibaseDir
|
|
||||||
require("dotenv").config({ path: resolve(budibaseDir, ".env") })
|
|
||||||
LOADED = true
|
LOADED = true
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
CLIENT_ID: process.env.CLIENT_ID,
|
// important
|
||||||
NODE_ENV: process.env.NODE_ENV,
|
|
||||||
JWT_SECRET: process.env.JWT_SECRET,
|
|
||||||
BUDIBASE_DIR: process.env.BUDIBASE_DIR,
|
|
||||||
PORT: process.env.PORT,
|
PORT: process.env.PORT,
|
||||||
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
COUCH_DB_URL: process.env.COUCH_DB_URL,
|
COUCH_DB_URL: process.env.COUCH_DB_URL,
|
||||||
|
MINIO_URL: process.env.MINIO_URL,
|
||||||
|
WORKER_URL: process.env.WORKER_URL,
|
||||||
|
SELF_HOSTED: process.env.SELF_HOSTED,
|
||||||
|
AWS_REGION: process.env.AWS_REGION,
|
||||||
|
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
||||||
|
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||||
|
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||||
|
USE_QUOTAS: process.env.USE_QUOTAS,
|
||||||
|
// environment
|
||||||
|
NODE_ENV: process.env.NODE_ENV,
|
||||||
|
JEST_WORKER_ID: process.env.JEST_WORKER_ID,
|
||||||
|
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
|
||||||
|
// minor
|
||||||
SALT_ROUNDS: process.env.SALT_ROUNDS,
|
SALT_ROUNDS: process.env.SALT_ROUNDS,
|
||||||
LOGGER: process.env.LOGGER,
|
LOGGER: process.env.LOGGER,
|
||||||
LOG_LEVEL: process.env.LOG_LEVEL,
|
LOG_LEVEL: process.env.LOG_LEVEL,
|
||||||
AUTOMATION_DIRECTORY: process.env.AUTOMATION_DIRECTORY,
|
AUTOMATION_DIRECTORY: process.env.AUTOMATION_DIRECTORY,
|
||||||
AUTOMATION_BUCKET: process.env.AUTOMATION_BUCKET,
|
AUTOMATION_BUCKET: process.env.AUTOMATION_BUCKET,
|
||||||
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
|
|
||||||
SENDGRID_API_KEY: process.env.SENDGRID_API_KEY,
|
SENDGRID_API_KEY: process.env.SENDGRID_API_KEY,
|
||||||
CLOUD: process.env.CLOUD,
|
|
||||||
SELF_HOSTED: process.env.SELF_HOSTED,
|
|
||||||
WORKER_URL: process.env.WORKER_URL,
|
|
||||||
HOSTING_KEY: process.env.HOSTING_KEY,
|
|
||||||
DYNAMO_ENDPOINT: process.env.DYNAMO_ENDPOINT,
|
DYNAMO_ENDPOINT: process.env.DYNAMO_ENDPOINT,
|
||||||
AWS_REGION: process.env.AWS_REGION,
|
// old - to remove
|
||||||
DEPLOYMENT_CREDENTIALS_URL: process.env.DEPLOYMENT_CREDENTIALS_URL,
|
CLIENT_ID: process.env.CLIENT_ID,
|
||||||
|
BUDIBASE_DIR: process.env.BUDIBASE_DIR,
|
||||||
|
DEPLOYMENT_DB_URL: process.env.DEPLOYMENT_DB_URL,
|
||||||
BUDIBASE_API_KEY: process.env.BUDIBASE_API_KEY,
|
BUDIBASE_API_KEY: process.env.BUDIBASE_API_KEY,
|
||||||
USERID_API_KEY: process.env.USERID_API_KEY,
|
USERID_API_KEY: process.env.USERID_API_KEY,
|
||||||
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
DEPLOYMENT_CREDENTIALS_URL: process.env.DEPLOYMENT_CREDENTIALS_URL,
|
||||||
DEPLOYMENT_DB_URL: process.env.DEPLOYMENT_DB_URL,
|
HOSTING_KEY: process.env.HOSTING_KEY,
|
||||||
LOCAL_TEMPLATES: process.env.LOCAL_TEMPLATES,
|
|
||||||
_set(key, value) {
|
_set(key, value) {
|
||||||
process.env[key] = value
|
process.env[key] = value
|
||||||
module.exports[key] = value
|
module.exports[key] = value
|
||||||
},
|
},
|
||||||
|
isTest,
|
||||||
|
isDev,
|
||||||
|
isProd: () => {
|
||||||
|
return !isDev()
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +1,10 @@
|
||||||
const { budibaseTempDir } = require("./utilities/budibaseDir")
|
|
||||||
const { isDev } = require("./utilities")
|
|
||||||
|
|
||||||
const fixPath = require("fix-path")
|
const fixPath = require("fix-path")
|
||||||
const fs = require("fs")
|
const { checkDevelopmentEnvironment } = require("./utilities/fileSystem")
|
||||||
|
|
||||||
async function runServer() {
|
|
||||||
if (isDev() && !fs.existsSync(budibaseTempDir())) {
|
|
||||||
console.error(
|
|
||||||
"Please run a build before attempting to run server independently to fill 'tmp' directory."
|
|
||||||
)
|
|
||||||
process.exit(-1)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
function runServer() {
|
||||||
|
// this will shutdown the system if development environment not ready
|
||||||
|
// will print an error explaining what to do
|
||||||
|
checkDevelopmentEnvironment()
|
||||||
fixPath()
|
fixPath()
|
||||||
require("./app")
|
require("./app")
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,7 +59,6 @@ module.exports = async (ctx, next) => {
|
||||||
role: await getRole(appId, jwtPayload.roleId),
|
role: await getRole(appId, jwtPayload.roleId),
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err)
|
|
||||||
if (authType === AuthTypes.BUILDER) {
|
if (authType === AuthTypes.BUILDER) {
|
||||||
clearCookie(ctx)
|
clearCookie(ctx)
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
|
|
|
@ -13,19 +13,12 @@ const { AuthTypes } = require("../constants")
|
||||||
|
|
||||||
const ADMIN_ROLES = [BUILTIN_ROLE_IDS.ADMIN, BUILTIN_ROLE_IDS.BUILDER]
|
const ADMIN_ROLES = [BUILTIN_ROLE_IDS.ADMIN, BUILTIN_ROLE_IDS.BUILDER]
|
||||||
|
|
||||||
const LOCAL_PASS = new RegExp(["webhooks/trigger"].join("|"))
|
|
||||||
|
|
||||||
function hasResource(ctx) {
|
function hasResource(ctx) {
|
||||||
return ctx.resourceId != null
|
return ctx.resourceId != null
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = (permType, permLevel = null) => async (ctx, next) => {
|
module.exports = (permType, permLevel = null) => async (ctx, next) => {
|
||||||
// webhooks can pass locally
|
if (env.isProd() && ctx.headers["x-api-key"] && ctx.headers["x-instanceid"]) {
|
||||||
if (!env.CLOUD && LOCAL_PASS.test(ctx.request.url)) {
|
|
||||||
return next()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (env.CLOUD && ctx.headers["x-api-key"] && ctx.headers["x-instanceid"]) {
|
|
||||||
// api key header passed by external webhook
|
// api key header passed by external webhook
|
||||||
if (await isAPIKeyValid(ctx.headers["x-api-key"])) {
|
if (await isAPIKeyValid(ctx.headers["x-api-key"])) {
|
||||||
ctx.auth = {
|
ctx.auth = {
|
||||||
|
@ -41,20 +34,23 @@ module.exports = (permType, permLevel = null) => async (ctx, next) => {
|
||||||
return ctx.throw(403, "API key invalid")
|
return ctx.throw(403, "API key invalid")
|
||||||
}
|
}
|
||||||
|
|
||||||
// don't expose builder endpoints in the cloud
|
|
||||||
if (env.CLOUD && permType === PermissionTypes.BUILDER) return
|
|
||||||
|
|
||||||
if (!ctx.user) {
|
if (!ctx.user) {
|
||||||
return ctx.throw(403, "No user info found")
|
return ctx.throw(403, "No user info found")
|
||||||
}
|
}
|
||||||
|
|
||||||
const role = ctx.user.role
|
const role = ctx.user.role
|
||||||
|
const isBuilder = role._id === BUILTIN_ROLE_IDS.BUILDER
|
||||||
|
const isAdmin = ADMIN_ROLES.includes(role._id)
|
||||||
|
const isAuthed = ctx.auth.authenticated
|
||||||
|
|
||||||
|
if (permType === PermissionTypes.BUILDER && isBuilder) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
|
||||||
const { basePermissions, permissions } = await getUserPermissions(
|
const { basePermissions, permissions } = await getUserPermissions(
|
||||||
ctx.appId,
|
ctx.appId,
|
||||||
role._id
|
role._id
|
||||||
)
|
)
|
||||||
const isAdmin = ADMIN_ROLES.includes(role._id)
|
|
||||||
const isAuthed = ctx.auth.authenticated
|
|
||||||
|
|
||||||
// this may need to change in the future, right now only admins
|
// this may need to change in the future, right now only admins
|
||||||
// can have access to builder features, this is hard coded into
|
// can have access to builder features, this is hard coded into
|
||||||
|
|
|
@ -1,14 +1,8 @@
|
||||||
const env = require("../environment")
|
const env = require("../environment")
|
||||||
const hosting = require("../utilities/builder/hosting")
|
|
||||||
// if added as a middleware will stop requests unless builder is in self host mode
|
// if added as a middleware will stop requests unless builder is in self host mode
|
||||||
// or cloud is in self host
|
// or cloud is in self host
|
||||||
module.exports = async (ctx, next) => {
|
module.exports = async (ctx, next) => {
|
||||||
if (env.CLOUD && env.SELF_HOSTED) {
|
if (env.SELF_HOSTED) {
|
||||||
await next()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const hostingInfo = await hosting.getHostingInfo()
|
|
||||||
if (hostingInfo.type === hosting.HostingTypes.SELF) {
|
|
||||||
await next()
|
await next()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,15 @@ const env = require("../../environment")
|
||||||
const apiKey = require("../../utilities/security/apikey")
|
const apiKey = require("../../utilities/security/apikey")
|
||||||
const { AuthTypes } = require("../../constants")
|
const { AuthTypes } = require("../../constants")
|
||||||
const { PermissionTypes, PermissionLevels } = require("../../utilities/security/permissions")
|
const { PermissionTypes, PermissionLevels } = require("../../utilities/security/permissions")
|
||||||
const { Test } = require("supertest")
|
jest.mock("../../environment", () => ({
|
||||||
jest.mock("../../environment")
|
prod: false,
|
||||||
|
isTest: () => true,
|
||||||
|
isProd: () => this.prod,
|
||||||
|
_set: (key, value) => {
|
||||||
|
this.prod = value === "production"
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
jest.mock("../../utilities/security/apikey")
|
jest.mock("../../utilities/security/apikey")
|
||||||
|
|
||||||
class TestConfiguration {
|
class TestConfiguration {
|
||||||
|
@ -47,8 +54,8 @@ class TestConfiguration {
|
||||||
this.ctx.request.url = url
|
this.ctx.request.url = url
|
||||||
}
|
}
|
||||||
|
|
||||||
setCloudEnv(isCloud) {
|
setEnvironment(isProd) {
|
||||||
env.CLOUD = isCloud
|
env._set("NODE_ENV", isProd ? "production" : "jest")
|
||||||
}
|
}
|
||||||
|
|
||||||
setRequestHeaders(headers) {
|
setRequestHeaders(headers) {
|
||||||
|
@ -71,12 +78,6 @@ describe("Authorization middleware", () => {
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = new TestConfiguration()
|
config = new TestConfiguration()
|
||||||
})
|
|
||||||
|
|
||||||
it("passes the middleware for local webhooks", async () => {
|
|
||||||
config.setRequestUrl("https://something/webhooks/trigger")
|
|
||||||
await config.executeMiddleware()
|
|
||||||
expect(config.next).toHaveBeenCalled()
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("external web hook call", () => {
|
describe("external web hook call", () => {
|
||||||
|
@ -85,7 +86,7 @@ describe("Authorization middleware", () => {
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = new TestConfiguration()
|
config = new TestConfiguration()
|
||||||
config.setCloudEnv(true)
|
config.setEnvironment(true)
|
||||||
config.setRequestHeaders({
|
config.setRequestHeaders({
|
||||||
"x-api-key": "abc123",
|
"x-api-key": "abc123",
|
||||||
"x-instanceid": "instance123",
|
"x-instanceid": "instance123",
|
||||||
|
@ -121,7 +122,7 @@ describe("Authorization middleware", () => {
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = new TestConfiguration()
|
config = new TestConfiguration()
|
||||||
config.setCloudEnv(true)
|
config.setEnvironment(true)
|
||||||
config.setAuthenticated(true)
|
config.setAuthenticated(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -144,7 +145,7 @@ describe("Authorization middleware", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("throws if the user has only builder permissions", async () => {
|
it("throws if the user has only builder permissions", async () => {
|
||||||
config.setCloudEnv(false)
|
config.setEnvironment(false)
|
||||||
config.setMiddlewareRequiredPermission(PermissionTypes.BUILDER)
|
config.setMiddlewareRequiredPermission(PermissionTypes.BUILDER)
|
||||||
config.setUser({
|
config.setUser({
|
||||||
role: {
|
role: {
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
const selfHostMiddleware = require("../selfhost");
|
const selfHostMiddleware = require("../selfhost")
|
||||||
const env = require("../../environment")
|
const env = require("../../environment")
|
||||||
const hosting = require("../../utilities/builder/hosting");
|
|
||||||
jest.mock("../../environment")
|
jest.mock("../../environment")
|
||||||
jest.mock("../../utilities/builder/hosting")
|
jest.mock("../../utilities/builder/hosting")
|
||||||
|
|
||||||
|
@ -20,16 +19,6 @@ class TestConfiguration {
|
||||||
return this.middleware(this.ctx, this.next)
|
return this.middleware(this.ctx, this.next)
|
||||||
}
|
}
|
||||||
|
|
||||||
setCloudHosted() {
|
|
||||||
env.CLOUD = 1
|
|
||||||
env.SELF_HOSTED = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
setSelfHosted() {
|
|
||||||
env.CLOUD = 0
|
|
||||||
env.SELF_HOSTED = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
afterEach() {
|
afterEach() {
|
||||||
jest.clearAllMocks()
|
jest.clearAllMocks()
|
||||||
}
|
}
|
||||||
|
@ -46,30 +35,10 @@ describe("Self host middleware", () => {
|
||||||
config.afterEach()
|
config.afterEach()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls next() when CLOUD and SELF_HOSTED env vars are set", async () => {
|
it("calls next() when SELF_HOSTED env var is set", async () => {
|
||||||
env.CLOUD = 1
|
|
||||||
env.SELF_HOSTED = 1
|
env.SELF_HOSTED = 1
|
||||||
|
|
||||||
await config.executeMiddleware()
|
await config.executeMiddleware()
|
||||||
expect(config.next).toHaveBeenCalled()
|
expect(config.next).toHaveBeenCalled()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("throws when hostingInfo type is cloud", async () => {
|
|
||||||
config.setSelfHosted()
|
|
||||||
|
|
||||||
hosting.getHostingInfo.mockImplementationOnce(() => ({ type: hosting.HostingTypes.CLOUD }))
|
|
||||||
|
|
||||||
await config.executeMiddleware()
|
|
||||||
expect(config.throw).toHaveBeenCalledWith(400, "Endpoint unavailable in cloud hosting.")
|
|
||||||
expect(config.next).not.toHaveBeenCalled()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("calls the self hosting middleware to pass through to next() when the hostingInfo type is self", async () => {
|
|
||||||
config.setSelfHosted()
|
|
||||||
|
|
||||||
hosting.getHostingInfo.mockImplementationOnce(() => ({ type: hosting.HostingTypes.SELF }))
|
|
||||||
|
|
||||||
await config.executeMiddleware()
|
|
||||||
expect(config.next).toHaveBeenCalled()
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
|
@ -5,7 +5,12 @@ const env = require("../../environment")
|
||||||
|
|
||||||
jest.mock("../../db")
|
jest.mock("../../db")
|
||||||
jest.mock("../../utilities/usageQuota")
|
jest.mock("../../utilities/usageQuota")
|
||||||
jest.mock("../../environment")
|
jest.mock("../../environment", () => ({
|
||||||
|
isTest: () => true,
|
||||||
|
isProd: () => false,
|
||||||
|
isDev: () => true,
|
||||||
|
_set: () => {},
|
||||||
|
}))
|
||||||
|
|
||||||
class TestConfiguration {
|
class TestConfiguration {
|
||||||
constructor() {
|
constructor() {
|
||||||
|
@ -32,12 +37,14 @@ class TestConfiguration {
|
||||||
return this.middleware(this.ctx, this.next)
|
return this.middleware(this.ctx, this.next)
|
||||||
}
|
}
|
||||||
|
|
||||||
cloudHosted(bool) {
|
setProd(bool) {
|
||||||
if (bool) {
|
if (bool) {
|
||||||
env.CLOUD = 1
|
env.isDev = () => false
|
||||||
|
env.isProd = () => true
|
||||||
this.ctx.auth = { apiKey: "test" }
|
this.ctx.auth = { apiKey: "test" }
|
||||||
} else {
|
} else {
|
||||||
env.CLOUD = 0
|
env.isDev = () => true
|
||||||
|
env.isProd = () => false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,7 +109,7 @@ describe("usageQuota middleware", () => {
|
||||||
|
|
||||||
it("calculates and persists the correct usage quota for the relevant action", async () => {
|
it("calculates and persists the correct usage quota for the relevant action", async () => {
|
||||||
config.setUrl("/rows")
|
config.setUrl("/rows")
|
||||||
config.cloudHosted(true)
|
config.setProd(true)
|
||||||
|
|
||||||
await config.executeMiddleware()
|
await config.executeMiddleware()
|
||||||
|
|
||||||
|
@ -112,7 +119,7 @@ describe("usageQuota middleware", () => {
|
||||||
|
|
||||||
it("calculates the correct file size from a file upload call and adds it to quota", async () => {
|
it("calculates the correct file size from a file upload call and adds it to quota", async () => {
|
||||||
config.setUrl("/upload")
|
config.setUrl("/upload")
|
||||||
config.cloudHosted(true)
|
config.setProd(true)
|
||||||
config.setFiles([
|
config.setFiles([
|
||||||
{
|
{
|
||||||
size: 100
|
size: 100
|
||||||
|
|
|
@ -44,8 +44,8 @@ module.exports = async (ctx, next) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// if running in builder or a self hosted cloud usage quotas should not be executed
|
// if in development or a self hosted cloud usage quotas should not be executed
|
||||||
if (!env.CLOUD || env.SELF_HOSTED) {
|
if (env.isDev() || env.SELF_HOSTED) {
|
||||||
return next()
|
return next()
|
||||||
}
|
}
|
||||||
// update usage for uploads to be the total size
|
// update usage for uploads to be the total size
|
||||||
|
|
|
@ -14,9 +14,6 @@ const {
|
||||||
} = require("./structures")
|
} = require("./structures")
|
||||||
const controllers = require("./controllers")
|
const controllers = require("./controllers")
|
||||||
const supertest = require("supertest")
|
const supertest = require("supertest")
|
||||||
const fs = require("fs")
|
|
||||||
const { budibaseAppsDir } = require("../../utilities/budibaseDir")
|
|
||||||
const { join } = require("path")
|
|
||||||
|
|
||||||
const EMAIL = "babs@babs.com"
|
const EMAIL = "babs@babs.com"
|
||||||
const PASSWORD = "babs_password"
|
const PASSWORD = "babs_password"
|
||||||
|
@ -66,13 +63,6 @@ class TestConfiguration {
|
||||||
if (this.server) {
|
if (this.server) {
|
||||||
this.server.close()
|
this.server.close()
|
||||||
}
|
}
|
||||||
const appDir = budibaseAppsDir()
|
|
||||||
const files = fs.readdirSync(appDir)
|
|
||||||
for (let file of files) {
|
|
||||||
if (this.allApps.some(app => file.includes(app._id))) {
|
|
||||||
fs.rmdirSync(join(appDir, file), { recursive: true })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultHeaders() {
|
defaultHeaders() {
|
||||||
|
@ -81,9 +71,11 @@ class TestConfiguration {
|
||||||
roleId: BUILTIN_ROLE_IDS.BUILDER,
|
roleId: BUILTIN_ROLE_IDS.BUILDER,
|
||||||
}
|
}
|
||||||
const builderToken = jwt.sign(builderUser, env.JWT_SECRET)
|
const builderToken = jwt.sign(builderUser, env.JWT_SECRET)
|
||||||
|
// can be "production" for test case
|
||||||
|
const type = env.isProd() ? "cloud" : "local"
|
||||||
const headers = {
|
const headers = {
|
||||||
Accept: "application/json",
|
Accept: "application/json",
|
||||||
Cookie: [`budibase:builder:local=${builderToken}`],
|
Cookie: [`budibase:builder:${type}=${builderToken}`],
|
||||||
}
|
}
|
||||||
if (this.appId) {
|
if (this.appId) {
|
||||||
headers["x-budibase-app-id"] = this.appId
|
headers["x-budibase-app-id"] = this.appId
|
||||||
|
|
|
@ -1,35 +0,0 @@
|
||||||
const { ensureDir, constants, copyFile } = require("fs-extra")
|
|
||||||
const { join } = require("../centralPath")
|
|
||||||
const { budibaseAppsDir } = require("../budibaseDir")
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Compile all the non-db static web assets that are required for the running of
|
|
||||||
* a budibase application. This includes the JSON structure of the DOM and
|
|
||||||
* the client library, a script responsible for reading the JSON structure
|
|
||||||
* and rendering the application.
|
|
||||||
* @param {string} appId id of the application we want to compile static assets for
|
|
||||||
*/
|
|
||||||
module.exports = async appId => {
|
|
||||||
const publicPath = join(budibaseAppsDir(), appId, "public")
|
|
||||||
await ensureDir(publicPath)
|
|
||||||
await copyClientLib(publicPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Copy the budibase client library and sourcemap from NPM to <appId>/public/.
|
|
||||||
* The client library is then served as a static asset when the budibase application
|
|
||||||
* is running in preview or prod
|
|
||||||
* @param {String} publicPath - path to write the client library to
|
|
||||||
*/
|
|
||||||
const copyClientLib = async publicPath => {
|
|
||||||
const sourcepath = require.resolve("@budibase/client")
|
|
||||||
const destPath = join(publicPath, "budibase-client.js")
|
|
||||||
|
|
||||||
await copyFile(sourcepath, destPath, constants.COPYFILE_FICLONE)
|
|
||||||
|
|
||||||
await copyFile(
|
|
||||||
sourcepath + ".map",
|
|
||||||
destPath + ".map",
|
|
||||||
constants.COPYFILE_FICLONE
|
|
||||||
)
|
|
||||||
}
|
|
|
@ -1,5 +1,5 @@
|
||||||
const CouchDB = require("../../db")
|
const CouchDB = require("../../db")
|
||||||
const { BUILDER_CONFIG_DB, HOSTING_DOC } = require("../../constants")
|
const { StaticDatabases } = require("../../db/utils")
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
const env = require("../../environment")
|
const env = require("../../environment")
|
||||||
|
|
||||||
|
@ -23,16 +23,16 @@ exports.HostingTypes = {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getHostingInfo = async () => {
|
exports.getHostingInfo = async () => {
|
||||||
const db = new CouchDB(BUILDER_CONFIG_DB)
|
const db = new CouchDB(StaticDatabases.BUILDER_HOSTING.name)
|
||||||
let doc
|
let doc
|
||||||
try {
|
try {
|
||||||
doc = await db.get(HOSTING_DOC)
|
doc = await db.get(StaticDatabases.BUILDER_HOSTING.baseDoc)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// don't write this doc, want to be able to update these default props
|
// don't write this doc, want to be able to update these default props
|
||||||
// for our servers with a new release without needing to worry about state of
|
// for our servers with a new release without needing to worry about state of
|
||||||
// PouchDB in peoples installations
|
// PouchDB in peoples installations
|
||||||
doc = {
|
doc = {
|
||||||
_id: HOSTING_DOC,
|
_id: StaticDatabases.BUILDER_HOSTING.baseDoc,
|
||||||
type: exports.HostingTypes.CLOUD,
|
type: exports.HostingTypes.CLOUD,
|
||||||
hostingUrl: PROD_HOSTING_URL,
|
hostingUrl: PROD_HOSTING_URL,
|
||||||
selfHostKey: "",
|
selfHostKey: "",
|
||||||
|
@ -85,15 +85,11 @@ exports.getTemplatesUrl = async (appId, type, name) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getDeployedApps = async () => {
|
exports.getDeployedApps = async () => {
|
||||||
const hostingInfo = await exports.getHostingInfo()
|
if (!env.SELF_HOSTED) {
|
||||||
if (
|
|
||||||
(!env.CLOUD && hostingInfo.type === exports.HostingTypes.CLOUD) ||
|
|
||||||
(env.CLOUD && !env.SELF_HOSTED)
|
|
||||||
) {
|
|
||||||
throw "Can only check apps for self hosted environments"
|
throw "Can only check apps for self hosted environments"
|
||||||
}
|
}
|
||||||
const workerUrl = !env.CLOUD ? await exports.getWorkerUrl() : env.WORKER_URL
|
const workerUrl = env.WORKER_URL
|
||||||
const hostingKey = !env.CLOUD ? hostingInfo.selfHostKey : env.HOSTING_KEY
|
const hostingKey = env.HOSTING_KEY
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`${workerUrl}/api/apps`, {
|
const response = await fetch(`${workerUrl}/api/apps`, {
|
||||||
method: "GET",
|
method: "GET",
|
||||||
|
|
|
@ -1,29 +0,0 @@
|
||||||
const stream = require("stream")
|
|
||||||
const fetch = require("node-fetch")
|
|
||||||
const tar = require("tar-fs")
|
|
||||||
const zlib = require("zlib")
|
|
||||||
const { promisify } = require("util")
|
|
||||||
const packageJson = require("../../package.json")
|
|
||||||
|
|
||||||
const streamPipeline = promisify(stream.pipeline)
|
|
||||||
|
|
||||||
// can't really test this due to the downloading nature of it, wouldn't be a great test case
|
|
||||||
/* istanbul ignore next */
|
|
||||||
exports.downloadExtractComponentLibraries = async appFolder => {
|
|
||||||
const LIBRARIES = ["standard-components"]
|
|
||||||
|
|
||||||
// Need to download tarballs directly from NPM as our users may not have node on their machine
|
|
||||||
for (let lib of LIBRARIES) {
|
|
||||||
// download tarball
|
|
||||||
const registryUrl = `https://registry.npmjs.org/@budibase/${lib}/-/${lib}-${packageJson.version}.tgz`
|
|
||||||
const response = await fetch(registryUrl)
|
|
||||||
if (!response.ok)
|
|
||||||
throw new Error(`unexpected response ${response.statusText}`)
|
|
||||||
|
|
||||||
await streamPipeline(
|
|
||||||
response.body,
|
|
||||||
zlib.Unzip(),
|
|
||||||
tar.extract(`${appFolder}/node_modules/@budibase/${lib}`)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,204 @@
|
||||||
|
const { budibaseTempDir } = require("../budibaseDir")
|
||||||
|
const { isDev } = require("../index")
|
||||||
|
const fs = require("fs")
|
||||||
|
const { join } = require("path")
|
||||||
|
const uuid = require("uuid/v4")
|
||||||
|
const CouchDB = require("../../db")
|
||||||
|
const { ObjectStoreBuckets } = require("../../constants")
|
||||||
|
const {
|
||||||
|
upload,
|
||||||
|
retrieve,
|
||||||
|
streamUpload,
|
||||||
|
deleteFolder,
|
||||||
|
downloadTarball,
|
||||||
|
} = require("./utilities")
|
||||||
|
const { downloadLibraries, newAppPublicPath } = require("./newApp")
|
||||||
|
const download = require("download")
|
||||||
|
const env = require("../../environment")
|
||||||
|
const { homedir } = require("os")
|
||||||
|
const fetch = require("node-fetch")
|
||||||
|
|
||||||
|
const DEFAULT_AUTOMATION_BUCKET =
|
||||||
|
"https://prod-budi-automations.s3-eu-west-1.amazonaws.com"
|
||||||
|
const DEFAULT_AUTOMATION_DIRECTORY = ".budibase-automations"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The single stack system (Cloud and Builder) should not make use of the file system where possible,
|
||||||
|
* this file handles all of the file access for the system with the intention of limiting it all to one
|
||||||
|
* place. Keeping all of this logic in one place means that when we need to do file system access (like
|
||||||
|
* downloading a package or opening a temporary file) in can be done in way that we can confirm it shouldn't
|
||||||
|
* be done through an object store instead.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Upon first startup of instance there may not be everything we need in tmp directory, set it up.
|
||||||
|
*/
|
||||||
|
exports.init = () => {
|
||||||
|
const tempDir = budibaseTempDir()
|
||||||
|
if (!fs.existsSync(tempDir)) {
|
||||||
|
fs.mkdirSync(tempDir)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if the system is currently in development mode and if it is makes sure
|
||||||
|
* everything required to function is ready.
|
||||||
|
*/
|
||||||
|
exports.checkDevelopmentEnvironment = () => {
|
||||||
|
if (!isDev()) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let error
|
||||||
|
if (!fs.existsSync(budibaseTempDir())) {
|
||||||
|
error =
|
||||||
|
"Please run a build before attempting to run server independently to fill 'tmp' directory."
|
||||||
|
}
|
||||||
|
if (!fs.existsSync(join(process.cwd(), ".env"))) {
|
||||||
|
error = "Must run via yarn once to generate environment."
|
||||||
|
}
|
||||||
|
if (error) {
|
||||||
|
console.error(error)
|
||||||
|
process.exit(-1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function manages temporary template files which are stored by Koa.
|
||||||
|
* @param {Object} template The template object retrieved from the Koa context object.
|
||||||
|
* @returns {Object} Returns an fs read stream which can be loaded into the database.
|
||||||
|
*/
|
||||||
|
exports.getTemplateStream = async template => {
|
||||||
|
if (template.file) {
|
||||||
|
return fs.createReadStream(template.file.path)
|
||||||
|
} else {
|
||||||
|
const tmpPath = await exports.downloadTemplate(...template.key.split("/"))
|
||||||
|
return fs.createReadStream(join(tmpPath, "db", "dump.txt"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to retrieve a handlebars file from the system which will be used as a template.
|
||||||
|
* This is allowable as the template handlebars files should be static and identical across
|
||||||
|
* the cluster.
|
||||||
|
* @param {string} path The path to the handlebars file which is to be loaded.
|
||||||
|
* @returns {string} The loaded handlebars file as a string - loaded as utf8.
|
||||||
|
*/
|
||||||
|
exports.loadHandlebarsFile = path => {
|
||||||
|
return fs.readFileSync(path, "utf8")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When return a file from the API need to write the file to the system temporarily so we
|
||||||
|
* can create a read stream to send.
|
||||||
|
* @param {string} contents the contents of the file which is to be returned from the API.
|
||||||
|
* @return {Object} the read stream which can be put into the koa context body.
|
||||||
|
*/
|
||||||
|
exports.apiFileReturn = contents => {
|
||||||
|
const path = join(budibaseTempDir(), uuid())
|
||||||
|
fs.writeFileSync(path, contents)
|
||||||
|
return fs.createReadStream(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a copy of the database state for an app to the object store.
|
||||||
|
* @param {string} appId The ID of the app which is to be backed up.
|
||||||
|
* @param {string} backupName The name of the backup located in the object store.
|
||||||
|
* @return The backup has been completed when this promise completes and returns a file stream
|
||||||
|
* to the temporary backup file (to return via API if required).
|
||||||
|
*/
|
||||||
|
exports.performBackup = async (appId, backupName) => {
|
||||||
|
const path = join(budibaseTempDir(), backupName)
|
||||||
|
const writeStream = fs.createWriteStream(path)
|
||||||
|
// perform couch dump
|
||||||
|
const instanceDb = new CouchDB(appId)
|
||||||
|
await instanceDb.dump(writeStream, {})
|
||||||
|
// write the file to the object store
|
||||||
|
await streamUpload(
|
||||||
|
ObjectStoreBuckets.BACKUPS,
|
||||||
|
join(appId, backupName),
|
||||||
|
fs.createReadStream(path)
|
||||||
|
)
|
||||||
|
return fs.createReadStream(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Downloads required libraries and creates a new path in the object store.
|
||||||
|
* @param {string} appId The ID of the app which is being created.
|
||||||
|
* @return {Promise<void>} once promise completes app resources should be ready in object store.
|
||||||
|
*/
|
||||||
|
exports.createApp = async appId => {
|
||||||
|
await downloadLibraries(appId)
|
||||||
|
await newAppPublicPath(appId)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes all of the assets created for an app in the object store.
|
||||||
|
* @param {string} appId The ID of the app which is being deleted.
|
||||||
|
* @return {Promise<void>} once promise completes the app resources will be removed from object store.
|
||||||
|
*/
|
||||||
|
exports.deleteApp = async appId => {
|
||||||
|
await deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves a template and pipes it to minio as well as making it available temporarily.
|
||||||
|
* @param {string} type The type of template which is to be retrieved.
|
||||||
|
* @param name
|
||||||
|
* @return {Promise<*>}
|
||||||
|
*/
|
||||||
|
exports.downloadTemplate = async (type, name) => {
|
||||||
|
const DEFAULT_TEMPLATES_BUCKET =
|
||||||
|
"prod-budi-templates.s3-eu-west-1.amazonaws.com"
|
||||||
|
const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
|
||||||
|
return downloadTarball(templateUrl, ObjectStoreBuckets.TEMPLATES, type)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves component libraries from object store (or tmp symlink if in local)
|
||||||
|
*/
|
||||||
|
exports.getComponentLibraryManifest = async (appId, library) => {
|
||||||
|
const path = join(appId, "node_modules", library, "package", "manifest.json")
|
||||||
|
let resp = await retrieve(ObjectStoreBuckets.APPS, path)
|
||||||
|
if (typeof resp !== "string") {
|
||||||
|
resp = resp.toString("utf8")
|
||||||
|
}
|
||||||
|
return JSON.parse(resp)
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.automationInit = async () => {
|
||||||
|
const directory =
|
||||||
|
env.AUTOMATION_DIRECTORY || join(homedir(), DEFAULT_AUTOMATION_DIRECTORY)
|
||||||
|
const bucket = env.AUTOMATION_BUCKET || DEFAULT_AUTOMATION_BUCKET
|
||||||
|
if (!fs.existsSync(directory)) {
|
||||||
|
fs.mkdirSync(directory, { recursive: true })
|
||||||
|
}
|
||||||
|
// env setup to get async packages
|
||||||
|
let response = await fetch(`${bucket}/manifest.json`)
|
||||||
|
return response.json()
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.getExternalAutomationStep = async (name, version, bundleName) => {
|
||||||
|
const directory =
|
||||||
|
env.AUTOMATION_DIRECTORY || join(homedir(), DEFAULT_AUTOMATION_DIRECTORY)
|
||||||
|
const bucket = env.AUTOMATION_BUCKET || DEFAULT_AUTOMATION_BUCKET
|
||||||
|
try {
|
||||||
|
return require(join(directory, bundleName))
|
||||||
|
} catch (err) {
|
||||||
|
await download(`${bucket}/${name}/${version}/${bundleName}`, directory)
|
||||||
|
return require(join(directory, bundleName))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All file reads come through here just to make sure all of them make sense
|
||||||
|
* allows a centralised location to check logic is all good.
|
||||||
|
*/
|
||||||
|
exports.readFileSync = (filepath, options = "utf8") => {
|
||||||
|
return fs.readFileSync(filepath, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Full function definition for below can be found in the utilities.
|
||||||
|
*/
|
||||||
|
exports.upload = upload
|
||||||
|
exports.retrieve = retrieve
|
|
@ -0,0 +1,34 @@
|
||||||
|
const packageJson = require("../../../package.json")
|
||||||
|
const { join } = require("path")
|
||||||
|
const { ObjectStoreBuckets } = require("../../constants")
|
||||||
|
const { streamUpload, downloadTarball } = require("./utilities")
|
||||||
|
const fs = require("fs")
|
||||||
|
|
||||||
|
const BUCKET_NAME = ObjectStoreBuckets.APPS
|
||||||
|
|
||||||
|
// can't really test this due to the downloading nature of it, wouldn't be a great test case
|
||||||
|
/* istanbul ignore next */
|
||||||
|
exports.downloadLibraries = async appId => {
|
||||||
|
const LIBRARIES = ["standard-components"]
|
||||||
|
|
||||||
|
// Need to download tarballs directly from NPM as our users may not have node on their machine
|
||||||
|
for (let lib of LIBRARIES) {
|
||||||
|
// download tarball
|
||||||
|
const registryUrl = `https://registry.npmjs.org/@budibase/${lib}/-/${lib}-${packageJson.version}.tgz`
|
||||||
|
const path = join(appId, "node_modules", "@budibase", lib)
|
||||||
|
await downloadTarball(registryUrl, BUCKET_NAME, path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.newAppPublicPath = async appId => {
|
||||||
|
const path = join(appId, "public")
|
||||||
|
const sourcepath = require.resolve("@budibase/client")
|
||||||
|
const destPath = join(path, "budibase-client.js")
|
||||||
|
|
||||||
|
await streamUpload(BUCKET_NAME, destPath, fs.createReadStream(sourcepath))
|
||||||
|
await streamUpload(
|
||||||
|
BUCKET_NAME,
|
||||||
|
destPath + ".map",
|
||||||
|
fs.createReadStream(sourcepath + ".map")
|
||||||
|
)
|
||||||
|
}
|
|
@ -1,25 +1,20 @@
|
||||||
const fs = require("fs")
|
|
||||||
const jimp = require("jimp")
|
const jimp = require("jimp")
|
||||||
const fsPromises = fs.promises
|
|
||||||
|
|
||||||
const FORMATS = {
|
const FORMATS = {
|
||||||
IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"],
|
IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"],
|
||||||
}
|
}
|
||||||
|
|
||||||
function processImage(file) {
|
function processImage(file) {
|
||||||
|
// this will overwrite the temp file
|
||||||
return jimp.read(file.path).then(img => {
|
return jimp.read(file.path).then(img => {
|
||||||
return img.resize(300, jimp.AUTO).write(file.outputPath)
|
return img.resize(300, jimp.AUTO).write(file.path)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async function process(file) {
|
async function process(file) {
|
||||||
if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) {
|
if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) {
|
||||||
await processImage(file)
|
await processImage(file)
|
||||||
return file
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// No processing required
|
|
||||||
await fsPromises.copyFile(file.path, file.outputPath)
|
|
||||||
return file
|
return file
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,221 @@
|
||||||
|
const sanitize = require("sanitize-s3-objectkey")
|
||||||
|
const AWS = require("aws-sdk")
|
||||||
|
const stream = require("stream")
|
||||||
|
const fetch = require("node-fetch")
|
||||||
|
const tar = require("tar-fs")
|
||||||
|
const zlib = require("zlib")
|
||||||
|
const { promisify } = require("util")
|
||||||
|
const { join } = require("path")
|
||||||
|
const fs = require("fs")
|
||||||
|
const { budibaseTempDir } = require("../budibaseDir")
|
||||||
|
const env = require("../../environment")
|
||||||
|
const { ObjectStoreBuckets } = require("../../constants")
|
||||||
|
|
||||||
|
const streamPipeline = promisify(stream.pipeline)
|
||||||
|
|
||||||
|
const CONTENT_TYPE_MAP = {
|
||||||
|
html: "text/html",
|
||||||
|
css: "text/css",
|
||||||
|
js: "application/javascript",
|
||||||
|
}
|
||||||
|
const STRING_CONTENT_TYPES = [
|
||||||
|
CONTENT_TYPE_MAP.html,
|
||||||
|
CONTENT_TYPE_MAP.css,
|
||||||
|
CONTENT_TYPE_MAP.js,
|
||||||
|
]
|
||||||
|
|
||||||
|
function publicPolicy(bucketName) {
|
||||||
|
return {
|
||||||
|
Version: "2012-10-17",
|
||||||
|
Statement: [
|
||||||
|
{
|
||||||
|
Effect: "Allow",
|
||||||
|
Principal: {
|
||||||
|
AWS: ["*"],
|
||||||
|
},
|
||||||
|
Action: "s3:GetObject",
|
||||||
|
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a connection to the object store using the S3 SDK.
|
||||||
|
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
||||||
|
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
exports.ObjectStore = bucket => {
|
||||||
|
if (env.SELF_HOSTED) {
|
||||||
|
AWS.config.update({
|
||||||
|
accessKeyId: env.MINIO_ACCESS_KEY,
|
||||||
|
secretAccessKey: env.MINIO_SECRET_KEY,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
const config = {
|
||||||
|
s3ForcePathStyle: true,
|
||||||
|
signatureVersion: "v4",
|
||||||
|
params: {
|
||||||
|
Bucket: bucket,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if (env.MINIO_URL) {
|
||||||
|
config.endpoint = env.MINIO_URL
|
||||||
|
}
|
||||||
|
return new AWS.S3(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||||
|
* if it does not exist then it will create it.
|
||||||
|
*/
|
||||||
|
exports.makeSureBucketExists = async (client, bucketName) => {
|
||||||
|
try {
|
||||||
|
await client
|
||||||
|
.headBucket({
|
||||||
|
Bucket: bucketName,
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
} catch (err) {
|
||||||
|
// bucket doesn't exist create it
|
||||||
|
if (err.statusCode === 404) {
|
||||||
|
await client
|
||||||
|
.createBucket({
|
||||||
|
Bucket: bucketName,
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
// public buckets are quite hidden in the system, make sure
|
||||||
|
// no bucket is set accidentally
|
||||||
|
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
||||||
|
await client
|
||||||
|
.putBucketPolicy({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Policy: JSON.stringify(publicPolicy(bucketName)),
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads the contents of a file given the required parameters, useful when
|
||||||
|
* temp files in use (for example file uploaded as an attachment).
|
||||||
|
*/
|
||||||
|
exports.upload = async ({ bucket, filename, path, type, metadata }) => {
|
||||||
|
const extension = [...filename.split(".")].pop()
|
||||||
|
const fileBytes = fs.readFileSync(path)
|
||||||
|
|
||||||
|
const objectStore = exports.ObjectStore(bucket)
|
||||||
|
await exports.makeSureBucketExists(objectStore, bucket)
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
// windows file paths need to be converted to forward slashes for s3
|
||||||
|
Key: sanitize(filename).replace(/\\/g, "/"),
|
||||||
|
Body: fileBytes,
|
||||||
|
ContentType: type || CONTENT_TYPE_MAP[extension.toLowerCase()],
|
||||||
|
}
|
||||||
|
if (metadata) {
|
||||||
|
config.Metadata = metadata
|
||||||
|
}
|
||||||
|
return objectStore.upload(config).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Similar to the upload function but can be used to send a file stream
|
||||||
|
* through to the object store.
|
||||||
|
*/
|
||||||
|
exports.streamUpload = async (bucket, filename, stream) => {
|
||||||
|
const objectStore = exports.ObjectStore(bucket)
|
||||||
|
await exports.makeSureBucketExists(objectStore, bucket)
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: sanitize(filename).replace(/\\/g, "/"),
|
||||||
|
Body: stream,
|
||||||
|
}
|
||||||
|
return objectStore.upload(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||||
|
* will be converted, otherwise it will be returned as a buffer stream.
|
||||||
|
*/
|
||||||
|
exports.retrieve = async (bucket, filename) => {
|
||||||
|
const objectStore = exports.ObjectStore(bucket)
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: sanitize(filename).replace(/\\/g, "/"),
|
||||||
|
}
|
||||||
|
const response = await objectStore.getObject(params).promise()
|
||||||
|
// currently these are all strings
|
||||||
|
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
||||||
|
return response.Body.toString("utf8")
|
||||||
|
} else {
|
||||||
|
return response.Body
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.deleteFolder = async (bucket, folder) => {
|
||||||
|
const client = exports.ObjectStore(bucket)
|
||||||
|
const listParams = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Prefix: folder,
|
||||||
|
}
|
||||||
|
|
||||||
|
let response = await client.listObjects(listParams).promise()
|
||||||
|
if (response.Contents.length === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const deleteParams = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Delete: {
|
||||||
|
Objects: [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Contents.forEach(content => {
|
||||||
|
deleteParams.Delete.Objects.push({ Key: content.Key })
|
||||||
|
})
|
||||||
|
|
||||||
|
response = await client.deleteObjects(deleteParams).promise()
|
||||||
|
// can only empty 1000 items at once
|
||||||
|
if (response.Deleted.length === 1000) {
|
||||||
|
return exports.deleteFolder(bucket, folder)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.uploadDirectory = async (bucket, localPath, bucketPath) => {
|
||||||
|
let uploads = []
|
||||||
|
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
||||||
|
for (let file of files) {
|
||||||
|
const path = join(bucketPath, file.name)
|
||||||
|
const local = join(localPath, file.name)
|
||||||
|
if (file.isDirectory()) {
|
||||||
|
uploads.push(exports.uploadDirectory(bucket, local, path))
|
||||||
|
} else {
|
||||||
|
uploads.push(
|
||||||
|
exports.streamUpload(bucket, path, fs.createReadStream(local))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await Promise.all(uploads)
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.downloadTarball = async (url, bucket, path) => {
|
||||||
|
const response = await fetch(url)
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`unexpected response ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const tmpPath = join(budibaseTempDir(), path)
|
||||||
|
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
||||||
|
await exports.uploadDirectory(bucket, tmpPath, path)
|
||||||
|
// return the temporary path incase there is a use for it
|
||||||
|
return tmpPath
|
||||||
|
}
|
|
@ -1,6 +1,5 @@
|
||||||
const env = require("../environment")
|
const env = require("../environment")
|
||||||
const { DocumentTypes, SEPARATOR } = require("../db/utils")
|
const { DocumentTypes, SEPARATOR } = require("../db/utils")
|
||||||
const fs = require("fs")
|
|
||||||
const CouchDB = require("../db")
|
const CouchDB = require("../db")
|
||||||
|
|
||||||
const APP_PREFIX = DocumentTypes.APP + SEPARATOR
|
const APP_PREFIX = DocumentTypes.APP + SEPARATOR
|
||||||
|
@ -13,14 +12,7 @@ function confirmAppId(possibleAppId) {
|
||||||
|
|
||||||
exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms))
|
exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms))
|
||||||
|
|
||||||
exports.isDev = () => {
|
exports.isDev = env.isDev
|
||||||
return (
|
|
||||||
!env.CLOUD &&
|
|
||||||
env.NODE_ENV !== "production" &&
|
|
||||||
env.NODE_ENV !== "jest" &&
|
|
||||||
env.NODE_ENV !== "cypress"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given a request tries to find the appId, which can be located in various places
|
* Given a request tries to find the appId, which can be located in various places
|
||||||
|
@ -28,10 +20,18 @@ exports.isDev = () => {
|
||||||
* @returns {string|undefined} If an appId was found it will be returned.
|
* @returns {string|undefined} If an appId was found it will be returned.
|
||||||
*/
|
*/
|
||||||
exports.getAppId = ctx => {
|
exports.getAppId = ctx => {
|
||||||
let appId = confirmAppId(ctx.headers["x-budibase-app-id"])
|
const options = [ctx.headers["x-budibase-app-id"], ctx.params.appId]
|
||||||
if (!appId) {
|
if (ctx.subdomains) {
|
||||||
appId = confirmAppId(env.CLOUD ? ctx.subdomains[1] : ctx.params.appId)
|
options.push(ctx.subdomains[1])
|
||||||
}
|
}
|
||||||
|
let appId
|
||||||
|
for (let option of options) {
|
||||||
|
appId = confirmAppId(option)
|
||||||
|
if (appId) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// look in body if can't find it in subdomain
|
// look in body if can't find it in subdomain
|
||||||
if (!appId && ctx.request.body && ctx.request.body.appId) {
|
if (!appId && ctx.request.body && ctx.request.body.appId) {
|
||||||
appId = confirmAppId(ctx.request.body.appId)
|
appId = confirmAppId(ctx.request.body.appId)
|
||||||
|
@ -51,7 +51,7 @@ exports.getAppId = ctx => {
|
||||||
* @returns {string} The name of the token trying to find
|
* @returns {string} The name of the token trying to find
|
||||||
*/
|
*/
|
||||||
exports.getCookieName = (name = "builder") => {
|
exports.getCookieName = (name = "builder") => {
|
||||||
let environment = env.CLOUD ? "cloud" : "local"
|
let environment = env.isProd() ? "cloud" : "local"
|
||||||
return `budibase:${name}:${environment}`
|
return `budibase:${name}:${environment}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,24 +89,6 @@ exports.isClient = ctx => {
|
||||||
return ctx.headers["x-budibase-type"] === "client"
|
return ctx.headers["x-budibase-type"] === "client"
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Recursively walk a directory tree and execute a callback on all files.
|
|
||||||
* @param {String} dirPath - Directory to traverse
|
|
||||||
* @param {Function} callback - callback to execute on files
|
|
||||||
*/
|
|
||||||
exports.walkDir = (dirPath, callback) => {
|
|
||||||
for (let filename of fs.readdirSync(dirPath)) {
|
|
||||||
const filePath = `${dirPath}/${filename}`
|
|
||||||
const stat = fs.lstatSync(filePath)
|
|
||||||
|
|
||||||
if (stat.isFile()) {
|
|
||||||
callback(filePath)
|
|
||||||
} else {
|
|
||||||
exports.walkDir(filePath, callback)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getLogoUrl = () => {
|
exports.getLogoUrl = () => {
|
||||||
return "https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg"
|
return "https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg"
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
const { existsSync, readFile, writeFile, ensureDir } = require("fs-extra")
|
|
||||||
const { join, resolve } = require("./centralPath")
|
|
||||||
const { processString } = require("@budibase/string-templates")
|
|
||||||
const uuid = require("uuid")
|
|
||||||
|
|
||||||
module.exports = async opts => {
|
|
||||||
await ensureDir(opts.dir)
|
|
||||||
await setCouchDbUrl(opts)
|
|
||||||
|
|
||||||
// need an env file
|
|
||||||
await createDevEnvFile(opts)
|
|
||||||
}
|
|
||||||
|
|
||||||
const setCouchDbUrl = async opts => {
|
|
||||||
if (!opts.couchDbUrl) {
|
|
||||||
const dataDir = join(opts.dir, ".data")
|
|
||||||
await ensureDir(dataDir)
|
|
||||||
opts.couchDbUrl =
|
|
||||||
dataDir + (dataDir.endsWith("/") || dataDir.endsWith("\\") ? "" : "/")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const createDevEnvFile = async opts => {
|
|
||||||
const destConfigFile = join(opts.dir, "./.env")
|
|
||||||
let createConfig = !existsSync(destConfigFile) || opts.quiet
|
|
||||||
if (createConfig) {
|
|
||||||
const template = await readFile(
|
|
||||||
resolve(__dirname, "..", "..", ".env.template"),
|
|
||||||
{
|
|
||||||
encoding: "utf8",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
opts.cookieKey1 = opts.cookieKey1 || uuid.v4()
|
|
||||||
const config = await processString(template, opts)
|
|
||||||
await writeFile(destConfigFile, config, { flag: "w+" })
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -180,7 +180,7 @@ exports.outputProcessing = async (appId, table, rows) => {
|
||||||
rows
|
rows
|
||||||
)
|
)
|
||||||
// update the attachments URL depending on hosting
|
// update the attachments URL depending on hosting
|
||||||
if (env.CLOUD && env.SELF_HOSTED) {
|
if (env.isProd() && env.SELF_HOSTED) {
|
||||||
for (let [property, column] of Object.entries(table.schema)) {
|
for (let [property, column] of Object.entries(table.schema)) {
|
||||||
if (column.type === FieldTypes.ATTACHMENT) {
|
if (column.type === FieldTypes.ATTACHMENT) {
|
||||||
for (let row of outputRows) {
|
for (let row of outputRows) {
|
||||||
|
|
|
@ -1,81 +0,0 @@
|
||||||
const fs = require("fs-extra")
|
|
||||||
const { join } = require("./centralPath")
|
|
||||||
const os = require("os")
|
|
||||||
const fetch = require("node-fetch")
|
|
||||||
const stream = require("stream")
|
|
||||||
const tar = require("tar-fs")
|
|
||||||
const zlib = require("zlib")
|
|
||||||
const { promisify } = require("util")
|
|
||||||
const streamPipeline = promisify(stream.pipeline)
|
|
||||||
const { budibaseAppsDir } = require("./budibaseDir")
|
|
||||||
const env = require("../environment")
|
|
||||||
const CouchDB = require("../db")
|
|
||||||
|
|
||||||
const DEFAULT_TEMPLATES_BUCKET =
|
|
||||||
"prod-budi-templates.s3-eu-west-1.amazonaws.com"
|
|
||||||
|
|
||||||
exports.getLocalTemplates = function() {
|
|
||||||
const templatesDir = join(os.homedir(), ".budibase", "templates", "app")
|
|
||||||
const templateObj = { app: {} }
|
|
||||||
fs.ensureDirSync(templatesDir)
|
|
||||||
const templateNames = fs.readdirSync(templatesDir)
|
|
||||||
for (let name of templateNames) {
|
|
||||||
templateObj.app[name] = {
|
|
||||||
name,
|
|
||||||
category: "local",
|
|
||||||
description: "local template",
|
|
||||||
type: "app",
|
|
||||||
key: `app/${name}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return templateObj
|
|
||||||
}
|
|
||||||
|
|
||||||
// can't really test this, downloading is just not something we should do in a behavioural test
|
|
||||||
/* istanbul ignore next */
|
|
||||||
exports.downloadTemplate = async function(type, name) {
|
|
||||||
const dirName = join(budibaseAppsDir(), "templates", type, name)
|
|
||||||
if (env.LOCAL_TEMPLATES) {
|
|
||||||
return dirName
|
|
||||||
}
|
|
||||||
const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
|
|
||||||
const response = await fetch(templateUrl)
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(
|
|
||||||
`Error downloading template ${type}:${name}: ${response.statusText}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stream the response, unzip and extract
|
|
||||||
await streamPipeline(
|
|
||||||
response.body,
|
|
||||||
zlib.Unzip(),
|
|
||||||
tar.extract(join(budibaseAppsDir(), "templates", type))
|
|
||||||
)
|
|
||||||
|
|
||||||
return dirName
|
|
||||||
}
|
|
||||||
|
|
||||||
async function performDump({ dir, appId, name = "dump.txt" }) {
|
|
||||||
const writeStream = fs.createWriteStream(join(dir, name))
|
|
||||||
// perform couch dump
|
|
||||||
const instanceDb = new CouchDB(appId)
|
|
||||||
await instanceDb.dump(writeStream, {})
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.performDump = performDump
|
|
||||||
|
|
||||||
exports.exportTemplateFromApp = async function({ templateName, appId }) {
|
|
||||||
// Copy frontend files
|
|
||||||
const templatesDir = join(
|
|
||||||
budibaseAppsDir(),
|
|
||||||
"templates",
|
|
||||||
"app",
|
|
||||||
templateName,
|
|
||||||
"db"
|
|
||||||
)
|
|
||||||
fs.ensureDirSync(templatesDir)
|
|
||||||
await performDump({ dir: templatesDir, appId })
|
|
||||||
return templatesDir
|
|
||||||
}
|
|
|
@ -1,4 +1,4 @@
|
||||||
const fs = require("fs")
|
const { readFileSync } = require("../fileSystem")
|
||||||
const csvParser = require("../csvParser")
|
const csvParser = require("../csvParser")
|
||||||
|
|
||||||
const CSV_PATH = __dirname + "/test.csv"
|
const CSV_PATH = __dirname + "/test.csv"
|
||||||
|
@ -33,7 +33,7 @@ const SCHEMAS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("CSV Parser", () => {
|
describe("CSV Parser", () => {
|
||||||
const csvString = fs.readFileSync(CSV_PATH, "utf8")
|
const csvString = readFileSync(CSV_PATH, "utf8")
|
||||||
|
|
||||||
describe("parsing", () => {
|
describe("parsing", () => {
|
||||||
it("returns status and types for a valid CSV transformation", async () => {
|
it("returns status and types for a valid CSV transformation", async () => {
|
||||||
|
|
|
@ -50,7 +50,7 @@ exports.Properties = {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getAPIKey = async appId => {
|
exports.getAPIKey = async appId => {
|
||||||
if (env.SELF_HOSTED) {
|
if (!env.USE_QUOTAS) {
|
||||||
return { apiKey: null }
|
return { apiKey: null }
|
||||||
}
|
}
|
||||||
return apiKeyTable.get({ primary: appId })
|
return apiKeyTable.get({ primary: appId })
|
||||||
|
@ -65,8 +65,7 @@ exports.getAPIKey = async appId => {
|
||||||
* also been reset after this call.
|
* also been reset after this call.
|
||||||
*/
|
*/
|
||||||
exports.update = async (apiKey, property, usage) => {
|
exports.update = async (apiKey, property, usage) => {
|
||||||
// don't try validate in builder
|
if (!env.USE_QUOTAS) {
|
||||||
if (!env.CLOUD || env.SELF_HOSTED) {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -33,6 +33,5 @@
|
||||||
"pouchdb": "^7.2.2",
|
"pouchdb": "^7.2.2",
|
||||||
"pouchdb-all-dbs": "^1.0.2",
|
"pouchdb-all-dbs": "^1.0.2",
|
||||||
"server-destroy": "^1.0.1"
|
"server-destroy": "^1.0.1"
|
||||||
},
|
}
|
||||||
"gitHead": "1b95326b20d1352d36305910259228b96a683dc7"
|
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue