+ import { params } from "@sveltech/routify"
+ import { backendUiStore } from "builderStore"
+
+ if ($params.query) {
+ const query = $backendUiStore.queries.find(m => m._id === $params.query)
+ if (query) {
+ backendUiStore.actions.queries.select(query)
+ }
+ }
+
+
+
diff --git a/packages/builder/src/pages/[application]/data/datasource/[selectedDatasource]/_layout.svelte b/packages/builder/src/pages/[application]/data/datasource/[selectedDatasource]/_layout.svelte
index 48e3c35daf..6401fbc435 100644
--- a/packages/builder/src/pages/[application]/data/datasource/[selectedDatasource]/_layout.svelte
+++ b/packages/builder/src/pages/[application]/data/datasource/[selectedDatasource]/_layout.svelte
@@ -2,12 +2,12 @@
import { params } from "@sveltech/routify"
import { backendUiStore } from "builderStore"
- if ($params.selectedDatasourceId) {
+ if ($params.selectedDatasource) {
const datasource = $backendUiStore.datasources.find(
m => m._id === $params.selectedDatasource
)
if (datasource) {
- backendUiStore.actions.datasources.select(datasource)
+ backendUiStore.actions.datasources.select(datasource._id)
}
}
diff --git a/packages/builder/src/pages/[application]/data/datasource/[selectedDatasource]/index.svelte b/packages/builder/src/pages/[application]/data/datasource/[selectedDatasource]/index.svelte
index f859e8d397..46ae11bd07 100644
--- a/packages/builder/src/pages/[application]/data/datasource/[selectedDatasource]/index.svelte
+++ b/packages/builder/src/pages/[application]/data/datasource/[selectedDatasource]/index.svelte
@@ -1,6 +1,6 @@
+
+{#if $backendUiStore.tables.length === 0}
+ Connect your first datasource to start building.
+{:else}Select a datasource to edit{/if}
+
+
diff --git a/packages/builder/src/pages/[application]/data/table/index.svelte b/packages/builder/src/pages/[application]/data/table/index.svelte
index 6c661bca59..134d9e510f 100644
--- a/packages/builder/src/pages/[application]/data/table/index.svelte
+++ b/packages/builder/src/pages/[application]/data/table/index.svelte
@@ -1,23 +1,11 @@
diff --git a/packages/cli/package.json b/packages/cli/package.json
index 5c58c80560..f369111372 100644
--- a/packages/cli/package.json
+++ b/packages/cli/package.json
@@ -1,9 +1,11 @@
{
- "name": "cli",
- "version": "0.8.7",
+ "name": "@budibase/cli",
+ "version": "0.8.10",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
- "bin": "src/index.js",
+ "bin": {
+ "budi": "src/index.js"
+ },
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"scripts": {
diff --git a/packages/cli/src/constants.js b/packages/cli/src/constants.js
index a601909232..c8da56fa5c 100644
--- a/packages/cli/src/constants.js
+++ b/packages/cli/src/constants.js
@@ -2,3 +2,8 @@ exports.CommandWords = {
HOSTING: "hosting",
HELP: "help",
}
+
+exports.InitTypes = {
+ QUICK: "quick",
+ DIGITAL_OCEAN: "do",
+}
diff --git a/packages/cli/src/hosting/index.js b/packages/cli/src/hosting/index.js
index a8e77f49d7..710397f301 100644
--- a/packages/cli/src/hosting/index.js
+++ b/packages/cli/src/hosting/index.js
@@ -1,11 +1,18 @@
const Command = require("../structures/Command")
-const { CommandWords } = require("../constants")
+const { CommandWords, InitTypes } = require("../constants")
const { lookpath } = require("lookpath")
-const { downloadFile, logErrorToFile, success, info } = require("../utils")
+const {
+ downloadFile,
+ logErrorToFile,
+ success,
+ info,
+ parseEnv,
+} = require("../utils")
const { confirmation } = require("../questions")
const fs = require("fs")
const compose = require("docker-compose")
-const envFile = require("./makeEnv")
+const makeEnv = require("./makeEnv")
+const axios = require("axios")
const BUDIBASE_SERVICES = ["app-service", "worker-service"]
const ERROR_FILE = "docker-error.log"
@@ -13,6 +20,7 @@ const FILE_URLS = [
"https://raw.githubusercontent.com/Budibase/budibase/master/hosting/docker-compose.yaml",
"https://raw.githubusercontent.com/Budibase/budibase/master/hosting/envoy.yaml",
]
+const DO_USER_DATA_URL = "http://169.254.169.254/metadata/v1/user-data"
async function downloadFiles() {
const promises = []
@@ -34,7 +42,7 @@ async function checkDockerConfigured() {
}
function checkInitComplete() {
- if (!fs.existsSync(envFile.filePath)) {
+ if (!fs.existsSync(makeEnv.filePath)) {
throw "Please run the hosting --init command before any other hosting command."
}
}
@@ -50,24 +58,41 @@ async function handleError(func) {
}
}
-async function init() {
+async function init(type) {
+ const isQuick = type === InitTypes.QUICK || type === InitTypes.DIGITAL_OCEAN
await checkDockerConfigured()
- const shouldContinue = await confirmation(
- "This will create multiple files in current directory, should continue?"
- )
- if (!shouldContinue) {
- console.log("Stopping.")
- return
+ if (!isQuick) {
+ const shouldContinue = await confirmation(
+ "This will create multiple files in current directory, should continue?"
+ )
+ if (!shouldContinue) {
+ console.log("Stopping.")
+ return
+ }
}
await downloadFiles()
- await envFile.make()
+ const config = isQuick ? makeEnv.QUICK_CONFIG : {}
+ if (type === InitTypes.DIGITAL_OCEAN) {
+ try {
+ const output = await axios.get(DO_USER_DATA_URL)
+ const response = parseEnv(output.data)
+ for (let [key, value] of Object.entries(makeEnv.ConfigMap)) {
+ if (response[key]) {
+ config[value] = response[key]
+ }
+ }
+ } catch (err) {
+ // don't need to handle error, just don't do anything
+ }
+ }
+ await makeEnv.make(config)
}
async function start() {
await checkDockerConfigured()
checkInitComplete()
console.log(info("Starting services, this may take a moment."))
- const port = envFile.get("MAIN_PORT")
+ const port = makeEnv.get("MAIN_PORT")
await handleError(async () => {
await compose.upAll({ cwd: "./", log: false })
})
@@ -128,8 +153,8 @@ async function update() {
const command = new Command(`${CommandWords.HOSTING}`)
.addHelp("Controls self hosting on the Budibase platform.")
.addSubOption(
- "--init",
- "Configure a self hosted platform in current directory.",
+ "--init [type]",
+ "Configure a self hosted platform in current directory, type can be unspecified or 'quick'.",
init
)
.addSubOption(
diff --git a/packages/cli/src/hosting/makeEnv.js b/packages/cli/src/hosting/makeEnv.js
index b7d3b9e849..c8359dd5e4 100644
--- a/packages/cli/src/hosting/makeEnv.js
+++ b/packages/cli/src/hosting/makeEnv.js
@@ -30,15 +30,27 @@ BUDIBASE_ENVIRONMENT=PRODUCTION`
}
module.exports.filePath = FILE_PATH
+module.exports.ConfigMap = {
+ HOSTING_KEY: "key",
+ MAIN_PORT: "port",
+}
+module.exports.QUICK_CONFIG = {
+ key: "budibase",
+ port: 10000,
+}
-module.exports.make = async () => {
- const hostingKey = await string(
- "Please input the password you'd like to use as your hosting key: "
- )
- const hostingPort = await number(
- "Please enter the port on which you want your installation to run: ",
- 10000
- )
+module.exports.make = async (inputs = {}) => {
+ const hostingKey =
+ inputs.key ||
+ (await string(
+ "Please input the password you'd like to use as your hosting key: "
+ ))
+ const hostingPort =
+ inputs.port ||
+ (await number(
+ "Please enter the port on which you want your installation to run: ",
+ 10000
+ ))
const fileContents = getContents(hostingPort, hostingKey)
fs.writeFileSync(FILE_PATH, fileContents)
console.log(
diff --git a/packages/cli/src/index.js b/packages/cli/src/index.js
index 526f259fc4..6693446b39 100644
--- a/packages/cli/src/index.js
+++ b/packages/cli/src/index.js
@@ -1,3 +1,4 @@
+#!/usr/bin/env node
const { getCommands } = require("./options")
const { Command } = require("commander")
const { getHelpDescription } = require("./utils")
diff --git a/packages/cli/src/structures/Command.js b/packages/cli/src/structures/Command.js
index 5ca0ce67ee..a8d24566be 100644
--- a/packages/cli/src/structures/Command.js
+++ b/packages/cli/src/structures/Command.js
@@ -13,8 +13,8 @@ class Command {
return this
}
- addSubOption(command, help, func) {
- this.opts.push({ command, help, func })
+ addSubOption(command, help, func, extras = []) {
+ this.opts.push({ command, help, func, extras })
return this
}
@@ -37,13 +37,10 @@ class Command {
command.action(async options => {
try {
let executed = false
- if (thisCmd.func) {
- await thisCmd.func(options)
- executed = true
- }
for (let opt of thisCmd.opts) {
- if (options[opt.command.replace("--", "")]) {
- await opt.func(options)
+ const lookup = opt.command.split(" ")[0].replace("--", "")
+ if (options[lookup]) {
+ await opt.func(options[lookup])
executed = true
}
}
diff --git a/packages/cli/src/utils.js b/packages/cli/src/utils.js
index 1605c277c0..05bb8d4991 100644
--- a/packages/cli/src/utils.js
+++ b/packages/cli/src/utils.js
@@ -44,3 +44,15 @@ exports.info = info => {
exports.logErrorToFile = (file, error) => {
fs.writeFileSync(path.resolve(`./${file}`), `Budiase Error\n${error}`)
}
+
+exports.parseEnv = env => {
+ const lines = env.toString().split("\n")
+ let result = {}
+ for (const line of lines) {
+ const match = line.match(/^([^=:#]+?)[=:](.*)/)
+ if (match) {
+ result[match[1].trim()] = match[2].trim()
+ }
+ }
+ return result
+}
diff --git a/packages/cli/yarn.lock b/packages/cli/yarn.lock
index c03030576c..298fb78061 100644
--- a/packages/cli/yarn.lock
+++ b/packages/cli/yarn.lock
@@ -15,30 +15,30 @@
integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==
"@babel/highlight@^7.10.4":
- version "7.12.13"
- resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.12.13.tgz#8ab538393e00370b26271b01fa08f7f27f2e795c"
- integrity sha512-kocDQvIbgMKlWxXe9fof3TQ+gkIPOUSEYhJjqUjvKMez3krV7vbzYCDq39Oj11UAVK7JqPVGQPlgE85dPNlQww==
+ version "7.13.10"
+ resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.13.10.tgz#a8b2a66148f5b27d666b15d81774347a731d52d1"
+ integrity sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg==
dependencies:
"@babel/helper-validator-identifier" "^7.12.11"
chalk "^2.0.0"
js-tokens "^4.0.0"
"@babel/parser@^7.9.4":
- version "7.13.4"
- resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.13.4.tgz#340211b0da94a351a6f10e63671fa727333d13ab"
- integrity sha512-uvoOulWHhI+0+1f9L4BoozY7U5cIkZ9PgJqvb041d6vypgUmtVPG4vmGm4pSggjl8BELzvHyUeJSUyEMY6b+qA==
+ version "7.13.11"
+ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.13.11.tgz#f93ebfc99d21c1772afbbaa153f47e7ce2f50b88"
+ integrity sha512-PhuoqeHoO9fc4ffMEVk4qb/w/s2iOSWohvbHxLtxui0eBg3Lg5gN1U8wp1V1u61hOWkPQJJyJzGH6Y+grwkq8Q==
"@babel/runtime@^7.9.2":
- version "7.13.7"
- resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.13.7.tgz#d494e39d198ee9ca04f4dcb76d25d9d7a1dc961a"
- integrity sha512-h+ilqoX998mRVM5FtB5ijRuHUDVt5l3yfoOi2uh18Z/O3hvyaHQ39NpxVkCIG5yFs+mLq/ewFp8Bss6zmWv6ZA==
+ version "7.13.10"
+ resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.13.10.tgz#47d42a57b6095f4468da440388fdbad8bebf0d7d"
+ integrity sha512-4QPkjJq6Ns3V/RgpEahRk+AGfL0eO6RHHtTWoNNr5mO49G6B5+X6d6THgWEAvTrznU5xYpbAlVKRYcsCgh/Akw==
dependencies:
regenerator-runtime "^0.13.4"
-"@eslint/eslintrc@^0.3.0":
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.3.0.tgz#d736d6963d7003b6514e6324bec9c602ac340318"
- integrity sha512-1JTKgrOKAHVivSvOYw+sJOunkBjUOvjqWk1DPja7ZFhIS2mX/4EgTT8M7eTK9jrKhL/FvXXEbQwIs3pg1xp3dg==
+"@eslint/eslintrc@^0.4.0":
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.0.tgz#99cc0a0584d72f1df38b900fb062ba995f395547"
+ integrity sha512-2ZPCc+uNbjV5ERJr+aKSPRwZgKd2z11x0EgLvb1PURmUrn9QNRXFqje0Ldq454PfAVyaJYyrDvvIKSFP4NnBog==
dependencies:
ajv "^6.12.4"
debug "^4.1.1"
@@ -47,7 +47,6 @@
ignore "^4.0.6"
import-fresh "^3.2.1"
js-yaml "^3.13.1"
- lodash "^4.17.20"
minimatch "^3.0.4"
strip-json-comments "^3.1.1"
@@ -93,9 +92,9 @@ ajv@^6.10.0, ajv@^6.12.3, ajv@^6.12.4:
uri-js "^4.2.2"
ajv@^7.0.2:
- version "7.1.1"
- resolved "https://registry.yarnpkg.com/ajv/-/ajv-7.1.1.tgz#1e6b37a454021fa9941713f38b952fc1c8d32a84"
- integrity sha512-ga/aqDYnUy/o7vbsRTFhhTsNeXiYb5JWDIcRIeZfwRNCefwjNTVYCGdGSUrEmiu3yDK3vFvNbgJxvrQW4JXrYQ==
+ version "7.2.1"
+ resolved "https://registry.yarnpkg.com/ajv/-/ajv-7.2.1.tgz#a5ac226171912447683524fa2f1248fcf8bac83d"
+ integrity sha512-+nu0HDv7kNSOua9apAVc979qd932rrZeb3WOvoiD31A/p1mIE5/9bN2027pE2rOPYEdS3UHzsvof4hY+lM9/WQ==
dependencies:
fast-deep-equal "^3.1.1"
json-schema-traverse "^1.0.0"
@@ -434,12 +433,12 @@ eslint-visitor-keys@^2.0.0:
integrity sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ==
eslint@^7.20.0:
- version "7.20.0"
- resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.20.0.tgz#db07c4ca4eda2e2316e7aa57ac7fc91ec550bdc7"
- integrity sha512-qGi0CTcOGP2OtCQBgWZlQjcTuP0XkIpYFj25XtRTQSHC+umNnp7UMshr2G8SLsRFYDdAPFeHOsiteadmMH02Yw==
+ version "7.22.0"
+ resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.22.0.tgz#07ecc61052fec63661a2cab6bd507127c07adc6f"
+ integrity sha512-3VawOtjSJUQiiqac8MQc+w457iGLfuNGLFn8JmF051tTKbh5/x/0vlcEj8OgDCaw7Ysa2Jn8paGshV7x2abKXg==
dependencies:
"@babel/code-frame" "7.12.11"
- "@eslint/eslintrc" "^0.3.0"
+ "@eslint/eslintrc" "^0.4.0"
ajv "^6.10.0"
chalk "^4.0.0"
cross-spawn "^7.0.2"
@@ -452,10 +451,10 @@ eslint@^7.20.0:
espree "^7.3.1"
esquery "^1.4.0"
esutils "^2.0.2"
- file-entry-cache "^6.0.0"
+ file-entry-cache "^6.0.1"
functional-red-black-tree "^1.0.1"
glob-parent "^5.0.0"
- globals "^12.1.0"
+ globals "^13.6.0"
ignore "^4.0.6"
import-fresh "^3.0.0"
imurmurhash "^0.1.4"
@@ -463,7 +462,7 @@ eslint@^7.20.0:
js-yaml "^3.13.1"
json-stable-stringify-without-jsonify "^1.0.1"
levn "^0.4.1"
- lodash "^4.17.20"
+ lodash "^4.17.21"
minimatch "^3.0.4"
natural-compare "^1.4.0"
optionator "^0.9.1"
@@ -589,7 +588,7 @@ figures@^3.0.0:
dependencies:
escape-string-regexp "^1.0.5"
-file-entry-cache@^6.0.0:
+file-entry-cache@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027"
integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==
@@ -617,9 +616,9 @@ flatted@^3.1.0:
integrity sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA==
follow-redirects@^1.10.0:
- version "1.13.2"
- resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.2.tgz#dd73c8effc12728ba5cf4259d760ea5fb83e3147"
- integrity sha512-6mPTgLxYm3r6Bkkg0vNM0HTjfGrOEtsfbhagQvbxDEsEkpNhw582upBaoRZylzen6krEmxXJgt9Ju6HiI4O7BA==
+ version "1.13.3"
+ resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.3.tgz#e5598ad50174c1bc4e872301e82ac2cd97f90267"
+ integrity sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==
forever-agent@~0.6.1:
version "0.6.1"
@@ -675,9 +674,9 @@ getpass@^0.1.1:
assert-plus "^1.0.0"
glob-parent@^5.0.0, glob-parent@^5.1.0:
- version "5.1.1"
- resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229"
- integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
+ integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
dependencies:
is-glob "^4.0.1"
@@ -700,6 +699,13 @@ globals@^12.1.0:
dependencies:
type-fest "^0.8.1"
+globals@^13.6.0:
+ version "13.7.0"
+ resolved "https://registry.yarnpkg.com/globals/-/globals-13.7.0.tgz#aed3bcefd80ad3ec0f0be2cf0c895110c0591795"
+ integrity sha512-Aipsz6ZKRxa/xQkZhNg0qIWXT6x6rD46f6x/PCnBomlttdIyAPak4YD9jTmKpZ72uROSMU87qJtcgpgHaVchiA==
+ dependencies:
+ type-fest "^0.20.2"
+
globby@^11.0.0:
version "11.0.2"
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.2.tgz#1af538b766a3b540ebfb58a32b2e2d5897321d83"
@@ -957,9 +963,9 @@ lodash@^4.17.20, lodash@^4.17.21:
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
lookpath@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/lookpath/-/lookpath-1.1.0.tgz#932d68371a2f0b4a5644f03d6a2b4728edba96d2"
- integrity sha512-B9NM7XpVfkyWqfOBI/UW0kVhGw7pJztsduch+1wkbYDi90mYK6/InFul3lG0hYko/VEcVMARVBJ5daFRc5aKCw==
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/lookpath/-/lookpath-1.2.0.tgz#5fccf91497acec085e66d98cb12446c21fe665ae"
+ integrity sha512-cUl+R2bGJcSJiHLVKzGHRTYTBhudbHIgd7s63gfGHteaz0BBKEEz2yw2rgbxZAFze92KlbkiWzL1ylYOmqIPVA==
lru-cache@^6.0.0:
version "6.0.0"
@@ -1417,9 +1423,9 @@ stream-meter@^1.0.4:
readable-stream "^2.1.4"
string-width@^4.1.0, string-width@^4.2.0:
- version "4.2.1"
- resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.1.tgz#1933ce1f470973d224368009bd1316cad81d5f4f"
- integrity sha512-LL0OLyN6AnfV9xqGQpDBwedT2Rt63737LxvsRxbcwpa2aIeynBApG2Sm//F3TaLHIR1aJBN52DWklc06b94o5Q==
+ version "4.2.2"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5"
+ integrity sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==
dependencies:
emoji-regex "^8.0.0"
is-fullwidth-code-point "^3.0.0"
@@ -1541,6 +1547,11 @@ type-fest@^0.11.0:
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.11.0.tgz#97abf0872310fed88a5c466b25681576145e33f1"
integrity sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==
+type-fest@^0.20.2:
+ version "0.20.2"
+ resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
+ integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
+
type-fest@^0.8.1:
version "0.8.1"
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
@@ -1583,9 +1594,9 @@ uuid@^3.3.2:
integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
v8-compile-cache@^2.0.3:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.2.0.tgz#9471efa3ef9128d2f7c6a7ca39c4dd6b5055b132"
- integrity sha512-gTpR5XQNKFwOd4clxfnhaqvfqMpqEwr4tOtCyz4MtYZX2JYhfr1JvBFKdS+7K/9rfpZR3VLX+YWBbKoxCgS43Q==
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee"
+ integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==
verror@1.10.0:
version "1.10.0"
diff --git a/packages/server/__mocks__/@elastic/elasticsearch.js b/packages/server/__mocks__/@elastic/elasticsearch.js
new file mode 100644
index 0000000000..c6b2bad48a
--- /dev/null
+++ b/packages/server/__mocks__/@elastic/elasticsearch.js
@@ -0,0 +1,24 @@
+const elastic = {}
+
+elastic.Client = function() {
+ this.index = jest.fn().mockResolvedValue({ body: [] })
+ this.search = jest.fn().mockResolvedValue({
+ body: {
+ hits: {
+ hits: [
+ {
+ _source: {
+ name: "test",
+ },
+ },
+ ],
+ },
+ },
+ })
+ this.update = jest.fn().mockResolvedValue({ body: [] })
+ this.delete = jest.fn().mockResolvedValue({ body: [] })
+
+ this.close = jest.fn()
+}
+
+module.exports = elastic
diff --git a/packages/server/__mocks__/@sendgrid/mail.js b/packages/server/__mocks__/@sendgrid/mail.js
new file mode 100644
index 0000000000..e162237ff4
--- /dev/null
+++ b/packages/server/__mocks__/@sendgrid/mail.js
@@ -0,0 +1,18 @@
+class Email {
+ constructor() {
+ this.apiKey = null
+ }
+
+ setApiKey(apiKey) {
+ this.apiKey = apiKey
+ }
+
+ async send(msg) {
+ if (msg.to === "invalid@test.com") {
+ throw "Invalid"
+ }
+ return msg
+ }
+}
+
+module.exports = new Email()
diff --git a/packages/server/__mocks__/airtable.js b/packages/server/__mocks__/airtable.js
new file mode 100644
index 0000000000..f9b3a1b35d
--- /dev/null
+++ b/packages/server/__mocks__/airtable.js
@@ -0,0 +1,5 @@
+function Airtable() {
+ this.base = jest.fn()
+}
+
+module.exports = Airtable
diff --git a/packages/server/__mocks__/arangojs.js b/packages/server/__mocks__/arangojs.js
new file mode 100644
index 0000000000..1a40529ca0
--- /dev/null
+++ b/packages/server/__mocks__/arangojs.js
@@ -0,0 +1,21 @@
+const arangodb = {}
+
+arangodb.Database = function() {
+ this.query = jest.fn(() => ({
+ all: jest.fn(),
+ }))
+ this.collection = jest.fn(() => "collection")
+ this.close = jest.fn()
+}
+
+arangodb.aql = (strings, ...args) => {
+ let str = strings.join("{}")
+
+ for (let arg of args) {
+ str = str.replace("{}", arg)
+ }
+
+ return str
+}
+
+module.exports = arangodb
diff --git a/packages/server/__mocks__/aws-sdk.js b/packages/server/__mocks__/aws-sdk.js
new file mode 100644
index 0000000000..503d098256
--- /dev/null
+++ b/packages/server/__mocks__/aws-sdk.js
@@ -0,0 +1,38 @@
+const aws = {}
+
+const response = body => () => ({ promise: () => body })
+
+function DocumentClient() {
+ this.put = jest.fn(response({}))
+ this.query = jest.fn(
+ response({
+ Items: [],
+ })
+ )
+ this.scan = jest.fn(
+ response({
+ Items: [
+ {
+ Name: "test",
+ },
+ ],
+ })
+ )
+ this.get = jest.fn(response({}))
+ this.update = jest.fn(response({}))
+ this.delete = jest.fn(response({}))
+}
+
+function S3() {
+ this.listObjects = jest.fn(
+ response({
+ Contents: {},
+ })
+ )
+}
+
+aws.DynamoDB = { DocumentClient }
+aws.S3 = S3
+aws.config = { update: jest.fn() }
+
+module.exports = aws
diff --git a/packages/server/__mocks__/mongodb.js b/packages/server/__mocks__/mongodb.js
new file mode 100644
index 0000000000..160ca89ebe
--- /dev/null
+++ b/packages/server/__mocks__/mongodb.js
@@ -0,0 +1,19 @@
+const mongodb = {}
+
+mongodb.MongoClient = function() {
+ this.connect = jest.fn()
+ this.close = jest.fn()
+ this.insertOne = jest.fn()
+ this.find = jest.fn(() => ({ toArray: () => [] }))
+
+ this.collection = jest.fn(() => ({
+ insertOne: this.insertOne,
+ find: this.find,
+ }))
+
+ this.db = () => ({
+ collection: this.collection,
+ })
+}
+
+module.exports = mongodb
diff --git a/packages/server/__mocks__/mssql.js b/packages/server/__mocks__/mssql.js
new file mode 100644
index 0000000000..6119c014da
--- /dev/null
+++ b/packages/server/__mocks__/mssql.js
@@ -0,0 +1,14 @@
+const mssql = {}
+
+mssql.query = jest.fn(() => ({
+ recordset: [
+ {
+ a: "string",
+ b: 1,
+ },
+ ],
+}))
+
+mssql.connect = jest.fn(() => ({ recordset: [] }))
+
+module.exports = mssql
diff --git a/packages/server/__mocks__/mysql.js b/packages/server/__mocks__/mysql.js
new file mode 100644
index 0000000000..2b4df3e44b
--- /dev/null
+++ b/packages/server/__mocks__/mysql.js
@@ -0,0 +1,10 @@
+const mysql = {}
+
+const client = {
+ connect: jest.fn(),
+ query: jest.fn(),
+}
+
+mysql.createConnection = jest.fn(() => client)
+
+module.exports = mysql
diff --git a/packages/server/__mocks__/node-fetch.js b/packages/server/__mocks__/node-fetch.js
index 1113791ec2..3cc412b1c6 100644
--- a/packages/server/__mocks__/node-fetch.js
+++ b/packages/server/__mocks__/node-fetch.js
@@ -1,17 +1,35 @@
const fetch = jest.requireActual("node-fetch")
module.exports = async (url, opts) => {
- // mocked data based on url
- if (url.includes("api/apps")) {
+ function json(body, status = 200) {
return {
+ status,
json: async () => {
- return {
- app1: {
- url: "/app1",
- },
- }
+ return body
},
}
}
+
+ // mocked data based on url
+ if (url.includes("api/apps")) {
+ return json({
+ app1: {
+ url: "/app1",
+ },
+ })
+ } else if (url.includes("test.com")) {
+ return json({
+ body: opts.body,
+ url,
+ method: opts.method,
+ })
+ } else if (url.includes("invalid.com")) {
+ return json(
+ {
+ invalid: true,
+ },
+ 404
+ )
+ }
return fetch(url, opts)
}
diff --git a/packages/server/__mocks__/pg.js b/packages/server/__mocks__/pg.js
index 2bda8afad0..0d8b8cc26a 100644
--- a/packages/server/__mocks__/pg.js
+++ b/packages/server/__mocks__/pg.js
@@ -3,18 +3,16 @@ const pg = {}
// constructor
function Client() {}
-Client.prototype.query = async function() {
- return {
- rows: [
- {
- a: "string",
- b: 1,
- },
- ],
- }
-}
+Client.prototype.query = jest.fn(() => ({
+ rows: [
+ {
+ a: "string",
+ b: 1,
+ },
+ ],
+}))
-Client.prototype.connect = async function() {}
+Client.prototype.connect = jest.fn()
pg.Client = Client
diff --git a/packages/server/build/pouchdb.js b/packages/server/build/pouchdb.js
new file mode 100644
index 0000000000..16942a7a86
--- /dev/null
+++ b/packages/server/build/pouchdb.js
@@ -0,0 +1,9 @@
+function CouchDB() {
+ this.post = jest.fn()
+ this.allDocs = jest.fn(() => ({ rows: [] }))
+ this.put = jest.fn()
+ this.remove = jest.fn()
+ this.plugin = jest.fn()
+}
+
+module.exports = CouchDB
diff --git a/packages/server/package.json b/packages/server/package.json
index 0a53aa8f55..6746b01c89 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -33,7 +33,7 @@
},
"scripts": {
"test": "jest --testPathIgnorePatterns=routes && npm run test:integration",
- "test:integration": "jest --runInBand --coverage",
+ "test:integration": "jest --coverage --detectOpenHandles",
"test:watch": "jest --watch",
"run:docker": "node src/index",
"dev:builder": "cross-env PORT=4001 nodemon src/index.js",
@@ -53,11 +53,17 @@
"src/**/*.js",
"!**/node_modules/**",
"!src/db/views/*.js",
- "!src/api/routes/tests/**/*.js",
"!src/api/controllers/deploy/**/*.js",
- "!src/api/controllers/static/templates/**/*",
- "!src/api/controllers/static/selfhost/**/*",
- "!src/*.js"
+ "!src/*.js",
+ "!src/api/controllers/static/**/*",
+ "!src/db/dynamoClient.js",
+ "!src/utilities/usageQuota.js",
+ "!src/api/routes/tests/**/*",
+ "!src/db/tests/**/*",
+ "!src/tests/**/*",
+ "!src/automations/tests/**/*",
+ "!src/utilities/fileProcessor.js",
+ "!src/utilities/initialiseBudibase.js"
],
"coverageReporters": [
"lcov",
diff --git a/packages/server/src/api/controllers/application.js b/packages/server/src/api/controllers/application.js
index cba3e6455a..9e50319d5d 100644
--- a/packages/server/src/api/controllers/application.js
+++ b/packages/server/src/api/controllers/application.js
@@ -91,7 +91,6 @@ async function getAppUrlIfNotInUse(ctx) {
async function createInstance(template) {
const appId = generateAppID()
-
const db = new CouchDB(appId)
await db.put({
_id: "_design/database",
@@ -106,10 +105,10 @@ async function createInstance(template) {
// replicate the template data to the instance DB
// this is currently very hard to test, downloading and importing template files
/* istanbul ignore next */
- if (template) {
+ if (template && template.useTemplate === "true") {
let dbDumpReadStream
- if (template.fileImportPath) {
- dbDumpReadStream = fs.createReadStream(template.fileImportPath)
+ if (template.file) {
+ dbDumpReadStream = fs.createReadStream(template.file.path)
} else {
const templatePath = await downloadTemplate(...template.key.split("/"))
dbDumpReadStream = fs.createReadStream(
@@ -162,8 +161,17 @@ exports.fetchAppPackage = async function(ctx) {
}
exports.create = async function(ctx) {
+ const { useTemplate, templateKey } = ctx.request.body
+ const instanceConfig = {
+ useTemplate,
+ key: templateKey,
+ }
+ if (ctx.request.files && ctx.request.files.templateFile) {
+ instanceConfig.file = ctx.request.files.templateFile
+ }
+ const instance = await createInstance(instanceConfig)
+
const url = await getAppUrlIfNotInUse(ctx)
- const instance = await createInstance(ctx.request.body.template)
const appId = instance._id
const version = packageJson.version
const newApplication = {
diff --git a/packages/server/src/api/controllers/static/index.js b/packages/server/src/api/controllers/static/index.js
index bf01314795..ff1ecf5b2e 100644
--- a/packages/server/src/api/controllers/static/index.js
+++ b/packages/server/src/api/controllers/static/index.js
@@ -152,26 +152,6 @@ async function processLocalFileUploads({ files, outputPath, appId }) {
return processedFiles
}
-exports.performLocalFileProcessing = async function(ctx) {
- const { files } = ctx.request.body
-
- const processedFileOutputPath = resolve(
- budibaseAppsDir(),
- ctx.user.appId,
- "attachments"
- )
-
- try {
- ctx.body = await processLocalFileUploads({
- files,
- outputPath: processedFileOutputPath,
- appId: ctx.user.appId,
- })
- } catch (err) {
- ctx.throw(500, err)
- }
-}
-
exports.serveApp = async function(ctx) {
let appId = ctx.params.appId
if (env.SELF_HOSTED) {
diff --git a/packages/server/src/api/controllers/table/index.js b/packages/server/src/api/controllers/table/index.js
index 93f4ec9f94..4cb1d16146 100644
--- a/packages/server/src/api/controllers/table/index.js
+++ b/packages/server/src/api/controllers/table/index.js
@@ -65,12 +65,14 @@ exports.save = async function(ctx) {
// Don't rename if the name is the same
let { _rename } = tableToSave
+ /* istanbul ignore next */
if (_rename && _rename.old === _rename.updated) {
_rename = null
delete tableToSave._rename
}
// rename row fields when table column is renamed
+ /* istanbul ignore next */
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
ctx.throw(400, "Cannot rename a linked column.")
} else if (_rename && tableToSave.primaryDisplay === _rename.old) {
@@ -159,7 +161,7 @@ exports.destroy = async function(ctx) {
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:delete`, appId, tableToDelete)
ctx.status = 200
- ctx.message = `Table ${ctx.params.tableId} deleted.`
+ ctx.body = { message: `Table ${ctx.params.tableId} deleted.` }
}
exports.validateCSVSchema = async function(ctx) {
diff --git a/packages/server/src/api/controllers/table/utils.js b/packages/server/src/api/controllers/table/utils.js
index 73e6e60551..66b3651ccf 100644
--- a/packages/server/src/api/controllers/table/utils.js
+++ b/packages/server/src/api/controllers/table/utils.js
@@ -90,7 +90,8 @@ exports.handleDataImport = async (user, table, dataImport) => {
return table
}
-exports.handleSearchIndexes = async (db, table) => {
+exports.handleSearchIndexes = async (appId, table) => {
+ const db = new CouchDB(appId)
// create relevant search indexes
if (table.indexes && table.indexes.length > 0) {
const currentIndexes = await db.getIndexes()
@@ -150,6 +151,9 @@ class TableSaveFunctions {
constructor({ db, ctx, oldTable, dataImport }) {
this.db = db
this.ctx = ctx
+ if (this.ctx && this.ctx.user) {
+ this.appId = this.ctx.user.appId
+ }
this.oldTable = oldTable
this.dataImport = dataImport
// any rows that need updated
@@ -178,7 +182,7 @@ class TableSaveFunctions {
// after saving
async after(table) {
- table = await exports.handleSearchIndexes(this.db, table)
+ table = await exports.handleSearchIndexes(this.appId, table)
table = await exports.handleDataImport(
this.ctx.user,
table,
diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js
index 05dc299754..f482f3f2a6 100644
--- a/packages/server/src/api/controllers/view/index.js
+++ b/packages/server/src/api/controllers/view/index.js
@@ -29,11 +29,13 @@ const controller = {
save: async ctx => {
const db = new CouchDB(ctx.user.appId)
const { originalName, ...viewToSave } = ctx.request.body
-
const designDoc = await db.get("_design/database")
-
const view = viewTemplate(viewToSave)
+ if (!viewToSave.name) {
+ ctx.throw(400, "Cannot create view without a name")
+ }
+
designDoc.views = {
...designDoc.views,
[viewToSave.name]: view,
@@ -60,17 +62,16 @@ const controller = {
await db.put(table)
- ctx.body = table.views[viewToSave.name]
- ctx.message = `View ${viewToSave.name} saved successfully.`
+ ctx.body = {
+ ...table.views[viewToSave.name],
+ name: viewToSave.name,
+ }
},
destroy: async ctx => {
const db = new CouchDB(ctx.user.appId)
const designDoc = await db.get("_design/database")
-
const viewName = decodeURI(ctx.params.viewName)
-
const view = designDoc.views[viewName]
-
delete designDoc.views[viewName]
await db.put(designDoc)
@@ -80,16 +81,17 @@ const controller = {
await db.put(table)
ctx.body = view
- ctx.message = `View ${ctx.params.viewName} saved successfully.`
},
exportView: async ctx => {
const db = new CouchDB(ctx.user.appId)
const designDoc = await db.get("_design/database")
-
const viewName = decodeURI(ctx.query.view)
const view = designDoc.views[viewName]
const format = ctx.query.format
+ if (!format) {
+ ctx.throw(400, "Format must be specified, either csv or json")
+ }
if (view) {
ctx.params.viewName = viewName
@@ -102,6 +104,7 @@ const controller = {
}
} else {
// table all_ view
+ /* istanbul ignore next */
ctx.params.viewName = viewName
}
diff --git a/packages/server/src/api/routes/static.js b/packages/server/src/api/routes/static.js
index c812c4d3b1..30701d578b 100644
--- a/packages/server/src/api/routes/static.js
+++ b/packages/server/src/api/routes/static.js
@@ -29,11 +29,7 @@ if (env.SELF_HOSTED) {
}
router
- .post(
- "/api/attachments/process",
- authorized(BUILDER),
- controller.performLocalFileProcessing
- )
+ .post("/api/attachments/process", authorized(BUILDER), controller.uploadFile)
.post("/api/attachments/upload", usage, controller.uploadFile)
.get("/componentlibrary", controller.serveComponentLibrary)
.get("/assets/:file*", controller.serveAppAsset)
diff --git a/packages/server/src/api/routes/tests/automation.spec.js b/packages/server/src/api/routes/tests/automation.spec.js
index 9d11219506..5654c14c17 100644
--- a/packages/server/src/api/routes/tests/automation.spec.js
+++ b/packages/server/src/api/routes/tests/automation.spec.js
@@ -3,8 +3,8 @@ const {
getAllTableRows,
clearAllAutomations,
} = require("./utilities/TestFunctions")
-const { basicAutomation } = require("./utilities/structures")
const setup = require("./utilities")
+const { basicAutomation } = setup.structures
const MAX_RETRIES = 4
diff --git a/packages/server/src/api/routes/tests/datasource.spec.js b/packages/server/src/api/routes/tests/datasource.spec.js
index ee1a1c47f5..c1448894b1 100644
--- a/packages/server/src/api/routes/tests/datasource.spec.js
+++ b/packages/server/src/api/routes/tests/datasource.spec.js
@@ -1,6 +1,6 @@
-let {basicDatasource} = require("./utilities/structures")
-let {checkBuilderEndpoint} = require("./utilities/TestFunctions")
let setup = require("./utilities")
+let { basicDatasource } = setup.structures
+let { checkBuilderEndpoint } = require("./utilities/TestFunctions")
describe("/datasources", () => {
let request = setup.getRequest()
diff --git a/packages/server/src/api/routes/tests/layout.spec.js b/packages/server/src/api/routes/tests/layout.spec.js
index 6b21554d71..4842b2cc8e 100644
--- a/packages/server/src/api/routes/tests/layout.spec.js
+++ b/packages/server/src/api/routes/tests/layout.spec.js
@@ -1,6 +1,6 @@
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const setup = require("./utilities")
-const { basicLayout } = require("./utilities/structures")
+const { basicLayout } = setup.structures
describe("/layouts", () => {
let request = setup.getRequest()
diff --git a/packages/server/src/api/routes/tests/misc.spec.js b/packages/server/src/api/routes/tests/misc.spec.js
index 3d3b6047e2..2957e42d90 100644
--- a/packages/server/src/api/routes/tests/misc.spec.js
+++ b/packages/server/src/api/routes/tests/misc.spec.js
@@ -1,6 +1,7 @@
const setup = require("./utilities")
+const tableUtils = require("../../controllers/table/utils")
-describe("/analytics", () => {
+describe("run misc tests", () => {
let request = setup.getRequest()
let config = setup.getConfig()
@@ -10,29 +11,44 @@ describe("/analytics", () => {
await config.init()
})
- describe("isEnabled", () => {
- it("check if analytics enabled", async () => {
- const res = await request
- .get(`/api/analytics`)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(typeof res.body.enabled).toEqual("boolean")
+ describe("/analytics", () => {
+ it("check if analytics enabled", async () => {
+ const res = await request
+ .get(`/api/analytics`)
+ .set(config.defaultHeaders())
+ .expect("Content-Type", /json/)
+ .expect(200)
+ expect(typeof res.body.enabled).toEqual("boolean")
+ })
+ })
+
+ describe("/health", () => {
+ it("should confirm healthy", async () => {
+ await request.get("/health").expect(200)
})
})
-})
-describe("/health", () => {
- it("should confirm healthy", async () => {
- let config = setup.getConfig()
- await config.getRequest().get("/health").expect(200)
+ describe("/version", () => {
+ it("should confirm version", async () => {
+ const res = await request.get("/version").expect(200)
+ expect(res.text.split(".").length).toEqual(3)
+ })
})
-})
-describe("/version", () => {
- it("should confirm version", async () => {
- const config = setup.getConfig()
- const res = await config.getRequest().get("/version").expect(200)
- expect(res.text.split(".").length).toEqual(3)
+ describe("test table utilities", () => {
+ it("should be able to import a CSV", async () => {
+ const table = await config.createTable()
+ const dataImport = {
+ csvString: "a,b,c,d\n1,2,3,4"
+ }
+ await tableUtils.handleDataImport({
+ appId: config.getAppId(),
+ userId: "test",
+ }, table, dataImport)
+ const rows = await config.getRows()
+ expect(rows[0].a).toEqual("1")
+ expect(rows[0].b).toEqual("2")
+ expect(rows[0].c).toEqual("3")
+ })
})
})
\ No newline at end of file
diff --git a/packages/server/src/api/routes/tests/permissions.spec.js b/packages/server/src/api/routes/tests/permissions.spec.js
index b24fac57c0..aab5567881 100644
--- a/packages/server/src/api/routes/tests/permissions.spec.js
+++ b/packages/server/src/api/routes/tests/permissions.spec.js
@@ -1,6 +1,6 @@
const { BUILTIN_ROLE_IDS } = require("../../../utilities/security/roles")
const setup = require("./utilities")
-const { basicRow } = require("./utilities/structures")
+const { basicRow } = setup.structures
const HIGHER_ROLE_ID = BUILTIN_ROLE_IDS.BASIC
const STD_ROLE_ID = BUILTIN_ROLE_IDS.PUBLIC
diff --git a/packages/server/src/api/routes/tests/query.spec.js b/packages/server/src/api/routes/tests/query.spec.js
index aa0e5428c5..87938c6a37 100644
--- a/packages/server/src/api/routes/tests/query.spec.js
+++ b/packages/server/src/api/routes/tests/query.spec.js
@@ -1,9 +1,9 @@
// mock out postgres for this
jest.mock("pg")
-const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
-const { basicQuery, basicDatasource } = require("./utilities/structures")
const setup = require("./utilities")
+const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
+const { basicQuery, basicDatasource } = setup.structures
describe("/queries", () => {
let request = setup.getRequest()
diff --git a/packages/server/src/api/routes/tests/role.spec.js b/packages/server/src/api/routes/tests/role.spec.js
index 9bb38b295a..062450cf63 100644
--- a/packages/server/src/api/routes/tests/role.spec.js
+++ b/packages/server/src/api/routes/tests/role.spec.js
@@ -2,8 +2,8 @@ const { BUILTIN_ROLE_IDS } = require("../../../utilities/security/roles")
const {
BUILTIN_PERMISSION_IDS,
} = require("../../../utilities/security/permissions")
-const { basicRole } = require("./utilities/structures")
const setup = require("./utilities")
+const { basicRole } = setup.structures
describe("/roles", () => {
let request = setup.getRequest()
diff --git a/packages/server/src/api/routes/tests/routing.spec.js b/packages/server/src/api/routes/tests/routing.spec.js
index 70d1632bf3..beb1659b2a 100644
--- a/packages/server/src/api/routes/tests/routing.spec.js
+++ b/packages/server/src/api/routes/tests/routing.spec.js
@@ -1,5 +1,5 @@
const setup = require("./utilities")
-const { basicScreen } = require("./utilities/structures")
+const { basicScreen } = setup.structures
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const { BUILTIN_ROLE_IDS } = require("../../../utilities/security/roles")
diff --git a/packages/server/src/api/routes/tests/row.spec.js b/packages/server/src/api/routes/tests/row.spec.js
index 1442e4eb75..652a17366d 100644
--- a/packages/server/src/api/routes/tests/row.spec.js
+++ b/packages/server/src/api/routes/tests/row.spec.js
@@ -1,7 +1,6 @@
const { outputProcessing } = require("../../../utilities/rowProcessor")
-const env = require("../../../environment")
-const { basicRow } = require("./utilities/structures")
const setup = require("./utilities")
+const { basicRow } = setup.structures
describe("/rows", () => {
let request = setup.getRequest()
@@ -349,7 +348,7 @@ describe("/rows", () => {
const view = await config.createView()
const row = await config.createRow()
const res = await request
- .get(`/api/views/${view._id}`)
+ .get(`/api/views/${view.name}`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
diff --git a/packages/server/src/api/routes/tests/screen.spec.js b/packages/server/src/api/routes/tests/screen.spec.js
index ae30afd29c..5533bc5e59 100644
--- a/packages/server/src/api/routes/tests/screen.spec.js
+++ b/packages/server/src/api/routes/tests/screen.spec.js
@@ -1,6 +1,6 @@
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const setup = require("./utilities")
-const { basicScreen } = require("./utilities/structures")
+const { basicScreen } = setup.structures
describe("/screens", () => {
let request = setup.getRequest()
diff --git a/packages/server/src/api/routes/tests/table.spec.js b/packages/server/src/api/routes/tests/table.spec.js
index 1a2df624f1..df28eed0c2 100644
--- a/packages/server/src/api/routes/tests/table.spec.js
+++ b/packages/server/src/api/routes/tests/table.spec.js
@@ -1,5 +1,6 @@
-const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
+const { checkBuilderEndpoint, getDB } = require("./utilities/TestFunctions")
const setup = require("./utilities")
+const { basicTable } = setup.structures
describe("/tables", () => {
let request = setup.getRequest()
@@ -12,25 +13,22 @@ describe("/tables", () => {
})
describe("create", () => {
- it("returns a success message when the table is successfully created", done => {
- request
+ it("returns a success message when the table is successfully created", async () => {
+ const res = await request
.post(`/api/tables`)
- .send({
+ .send({
name: "TestTable",
key: "name",
schema: {
- name: { type: "string" }
+ name: {type: "string"}
}
})
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
- .end(async (err, res) => {
- expect(res.res.statusMessage).toEqual("Table TestTable saved successfully.")
- expect(res.body.name).toEqual("TestTable")
- done()
- })
- })
+ expect(res.res.statusMessage).toEqual("Table TestTable saved successfully.")
+ expect(res.body.name).toEqual("TestTable")
+ })
it("renames all the row fields for a table when a schema key is renamed", async () => {
const testTable = await config.createTable()
@@ -46,7 +44,7 @@ describe("/tables", () => {
const updatedTable = await request
.post(`/api/tables`)
- .send({
+ .send({
_id: testTable._id,
_rev: testTable._rev,
name: "TestTable",
@@ -56,41 +54,40 @@ describe("/tables", () => {
updated: "updatedName"
},
schema: {
- updatedName: { type: "string" }
+ updatedName: {type: "string"}
}
})
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
+ expect(updatedTable.res.statusMessage).toEqual("Table TestTable saved successfully.")
+ expect(updatedTable.body.name).toEqual("TestTable")
- expect(updatedTable.res.statusMessage).toEqual("Table TestTable saved successfully.")
- expect(updatedTable.body.name).toEqual("TestTable")
+ const res = await request
+ .get(`/api/${testTable._id}/rows/${testRow.body._id}`)
+ .set(config.defaultHeaders())
+ .expect('Content-Type', /json/)
+ .expect(200)
- const res = await request
- .get(`/api/${testTable._id}/rows/${testRow.body._id}`)
- .set(config.defaultHeaders())
- .expect('Content-Type', /json/)
- .expect(200)
+ expect(res.body.updatedName).toEqual("test")
+ expect(res.body.name).toBeUndefined()
+ })
- expect(res.body.updatedName).toEqual("test")
- expect(res.body.name).toBeUndefined()
- })
-
- it("should apply authorization to endpoint", async () => {
- await checkBuilderEndpoint({
- config,
- method: "POST",
- url: `/api/tables`,
- body: {
- name: "TestTable",
- key: "name",
- schema: {
- name: { type: "string" }
- }
+ it("should apply authorization to endpoint", async () => {
+ await checkBuilderEndpoint({
+ config,
+ method: "POST",
+ url: `/api/tables`,
+ body: {
+ name: "TestTable",
+ key: "name",
+ schema: {
+ name: {type: "string"}
}
- })
+ }
})
})
+ })
describe("fetch", () => {
let testTable
@@ -103,28 +100,91 @@ describe("/tables", () => {
delete testTable._rev
})
- it("returns all the tables for that instance in the response body", done => {
- request
+ it("returns all the tables for that instance in the response body", async () => {
+ const res = await request
.get(`/api/tables`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
- .end(async (_, res) => {
- const fetchedTable = res.body[0]
- expect(fetchedTable.name).toEqual(testTable.name)
- expect(fetchedTable.type).toEqual("table")
- done()
- })
+ const fetchedTable = res.body[0]
+ expect(fetchedTable.name).toEqual(testTable.name)
+ expect(fetchedTable.type).toEqual("table")
})
it("should apply authorization to endpoint", async () => {
- await checkBuilderEndpoint({
- config,
- method: "GET",
- url: `/api/tables`,
- })
+ await checkBuilderEndpoint({
+ config,
+ method: "GET",
+ url: `/api/tables`,
})
})
+ })
+
+ describe("indexing", () => {
+ it("should be able to create a table with indexes", async () => {
+ const db = getDB(config)
+ const indexCount = (await db.getIndexes()).total_rows
+ const table = basicTable()
+ table.indexes = ["name"]
+ const res = await request
+ .post(`/api/tables`)
+ .send(table)
+ .set(config.defaultHeaders())
+ .expect('Content-Type', /json/)
+ .expect(200)
+ expect(res.body._id).toBeDefined()
+ expect(res.body._rev).toBeDefined()
+ expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1)
+ // update index to see what happens
+ table.indexes = ["name", "description"]
+ await request
+ .post(`/api/tables`)
+ .send({
+ ...table,
+ _id: res.body._id,
+ _rev: res.body._rev,
+ })
+ .set(config.defaultHeaders())
+ .expect('Content-Type', /json/)
+ .expect(200)
+ // shouldn't have created a new index
+ expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1)
+ })
+ })
+
+ describe("updating user table", () => {
+ it("should add roleId and email field when adjusting user table schema", async () => {
+ const res = await request
+ .post(`/api/tables`)
+ .send({
+ ...basicTable(),
+ _id: "ta_users",
+ })
+ .set(config.defaultHeaders())
+ .expect('Content-Type', /json/)
+ .expect(200)
+ expect(res.body.schema.email).toBeDefined()
+ expect(res.body.schema.roleId).toBeDefined()
+ })
+ })
+
+ describe("validate csv", () => {
+ it("should be able to validate a CSV layout", async () => {
+ const res = await request
+ .post(`/api/tables/csv/validate`)
+ .send({
+ csvString: "a,b,c,d\n1,2,3,4"
+ })
+ .set(config.defaultHeaders())
+ .expect('Content-Type', /json/)
+ .expect(200)
+ expect(res.body.schema).toBeDefined()
+ expect(res.body.schema.a).toEqual({
+ type: "string",
+ success: true,
+ })
+ })
+ })
describe("destroy", () => {
let testTable
@@ -137,19 +197,16 @@ describe("/tables", () => {
delete testTable._rev
})
- it("returns a success response when a table is deleted.", async done => {
- request
+ it("returns a success response when a table is deleted.", async () => {
+ const res = await request
.delete(`/api/tables/${testTable._id}/${testTable._rev}`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
- .end(async (_, res) => {
- expect(res.res.statusMessage).toEqual(`Table ${testTable._id} deleted.`)
- done()
- })
- })
+ expect(res.body.message).toEqual(`Table ${testTable._id} deleted.`)
+ })
- it("deletes linked references to the table after deletion", async done => {
+ it("deletes linked references to the table after deletion", async () => {
const linkedTable = await config.createTable({
name: "LinkedTable",
type: "table",
@@ -171,18 +228,15 @@ describe("/tables", () => {
},
})
- request
+ const res = await request
.delete(`/api/tables/${testTable._id}/${testTable._rev}`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
- .end(async (_, res) => {
- expect(res.res.statusMessage).toEqual(`Table ${testTable._id} deleted.`)
- const dependentTable = await config.getTable(linkedTable._id)
- expect(dependentTable.schema.TestTable).not.toBeDefined()
- done()
- })
- })
+ expect(res.body.message).toEqual(`Table ${testTable._id} deleted.`)
+ const dependentTable = await config.getTable(linkedTable._id)
+ expect(dependentTable.schema.TestTable).not.toBeDefined()
+ })
it("should apply authorization to endpoint", async () => {
await checkBuilderEndpoint({
@@ -191,6 +245,5 @@ describe("/tables", () => {
url: `/api/tables/${testTable._id}/${testTable._rev}`,
})
})
-
})
})
diff --git a/packages/server/src/api/routes/tests/user.spec.js b/packages/server/src/api/routes/tests/user.spec.js
index 5e7ec9e9d4..808f1a2622 100644
--- a/packages/server/src/api/routes/tests/user.spec.js
+++ b/packages/server/src/api/routes/tests/user.spec.js
@@ -1,7 +1,7 @@
const { BUILTIN_ROLE_IDS } = require("../../../utilities/security/roles")
const { checkPermissionsEndpoint } = require("./utilities/TestFunctions")
-const { basicUser } = require("./utilities/structures")
const setup = require("./utilities")
+const { basicUser } = setup.structures
describe("/users", () => {
let request = setup.getRequest()
diff --git a/packages/server/src/api/routes/tests/utilities/TestFunctions.js b/packages/server/src/api/routes/tests/utilities/TestFunctions.js
index 534119d279..313b9e63a8 100644
--- a/packages/server/src/api/routes/tests/utilities/TestFunctions.js
+++ b/packages/server/src/api/routes/tests/utilities/TestFunctions.js
@@ -1,5 +1,6 @@
const rowController = require("../../../controllers/row")
const appController = require("../../../controllers/application")
+const CouchDB = require("../../../../db")
function Request(appId, params) {
this.user = { appId }
@@ -77,3 +78,7 @@ exports.checkPermissionsEndpoint = async ({
.set(failHeader)
.expect(403)
}
+
+exports.getDB = config => {
+ return new CouchDB(config.getAppId())
+}
diff --git a/packages/server/src/api/routes/tests/utilities/controllers.js b/packages/server/src/api/routes/tests/utilities/controllers.js
deleted file mode 100644
index a4eb9ac9de..0000000000
--- a/packages/server/src/api/routes/tests/utilities/controllers.js
+++ /dev/null
@@ -1,15 +0,0 @@
-module.exports = {
- table: require("../../../controllers/table"),
- row: require("../../../controllers/row"),
- role: require("../../../controllers/role"),
- perms: require("../../../controllers/permission"),
- view: require("../../../controllers/view"),
- app: require("../../../controllers/application"),
- user: require("../../../controllers/user"),
- automation: require("../../../controllers/automation"),
- datasource: require("../../../controllers/datasource"),
- query: require("../../../controllers/query"),
- screen: require("../../../controllers/screen"),
- webhook: require("../../../controllers/webhook"),
- layout: require("../../../controllers/layout"),
-}
diff --git a/packages/server/src/api/routes/tests/utilities/index.js b/packages/server/src/api/routes/tests/utilities/index.js
index 7126f141e2..ed5c98cc48 100644
--- a/packages/server/src/api/routes/tests/utilities/index.js
+++ b/packages/server/src/api/routes/tests/utilities/index.js
@@ -1,4 +1,5 @@
-const TestConfig = require("./TestConfiguration")
+const TestConfig = require("../../../../tests/utilities/TestConfiguration")
+const structures = require("../../../../tests/utilities/structures")
const env = require("../../../../environment")
exports.delay = ms => new Promise(resolve => setTimeout(resolve, ms))
@@ -51,3 +52,5 @@ exports.switchToCloudForFunction = async func => {
throw error
}
}
+
+exports.structures = structures
diff --git a/packages/server/src/api/routes/tests/view.spec.js b/packages/server/src/api/routes/tests/view.spec.js
index a80b09d3a0..3bfbacccbe 100644
--- a/packages/server/src/api/routes/tests/view.spec.js
+++ b/packages/server/src/api/routes/tests/view.spec.js
@@ -29,9 +29,7 @@ describe("/views", () => {
.expect("Content-Type", /json/)
.expect(200)
- expect(res.res.statusMessage).toEqual(
- "View TestView saved successfully."
- )
+ expect(res.body.tableId).toBe(table._id)
})
it("updates the table row with the new view metadata", async () => {
@@ -46,10 +44,8 @@ describe("/views", () => {
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
+ expect(res.body.tableId).toBe(table._id)
- expect(res.res.statusMessage).toEqual(
- "View TestView saved successfully."
- )
const updatedTable = await config.getTable(table._id)
expect(updatedTable.views).toEqual({
TestView: {
@@ -173,4 +169,49 @@ describe("/views", () => {
expect(res.body).toMatchSnapshot()
})
})
+
+ describe("destroy", () => {
+ it("should be able to delete a view", async () => {
+ const table = await config.createTable()
+ const view = await config.createView()
+ const res = await request
+ .delete(`/api/views/${view.name}`)
+ .set(config.defaultHeaders())
+ .expect("Content-Type", /json/)
+ .expect(200)
+ expect(res.body.map).toBeDefined()
+ expect(res.body.meta.tableId).toEqual(table._id)
+ })
+ })
+
+ describe("exportView", () => {
+ it("should be able to delete a view", async () => {
+ await config.createTable()
+ await config.createRow()
+ const view = await config.createView()
+ let res = await request
+ .get(`/api/views/export?view=${view.name}&format=json`)
+ .set(config.defaultHeaders())
+ .expect(200)
+ let error
+ try {
+ const obj = JSON.parse(res.text)
+ expect(obj.length).toBe(1)
+ } catch (err) {
+ error = err
+ }
+ expect(error).toBeUndefined()
+ res = await request
+ .get(`/api/views/export?view=${view.name}&format=csv`)
+ .set(config.defaultHeaders())
+ .expect(200)
+ // this shouldn't be JSON
+ try {
+ JSON.parse(res.text)
+ } catch (err) {
+ error = err
+ }
+ expect(error).toBeDefined()
+ })
+ })
})
diff --git a/packages/server/src/api/routes/tests/webhook.spec.js b/packages/server/src/api/routes/tests/webhook.spec.js
index 2bf5445a09..7fb7a26fc1 100644
--- a/packages/server/src/api/routes/tests/webhook.spec.js
+++ b/packages/server/src/api/routes/tests/webhook.spec.js
@@ -1,6 +1,6 @@
const setup = require("./utilities")
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
-const { basicWebhook, basicAutomation } = require("./utilities/structures")
+const { basicWebhook, basicAutomation } = setup.structures
describe("/webhooks", () => {
let request = setup.getRequest()
diff --git a/packages/server/src/app.js b/packages/server/src/app.js
index 15e996cfe6..8bbea00474 100644
--- a/packages/server/src/app.js
+++ b/packages/server/src/app.js
@@ -9,7 +9,6 @@ const env = require("./environment")
const eventEmitter = require("./events")
const automations = require("./automations/index")
const Sentry = require("@sentry/node")
-const selfhost = require("./selfhost")
const app = new Koa()
@@ -66,11 +65,7 @@ module.exports = server.listen(env.PORT || 0, async () => {
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
env._set("PORT", server.address().port)
eventEmitter.emitPort(env.PORT)
- automations.init()
- // only init the self hosting DB info in the Pouch, not needed in self hosting prod
- if (!env.CLOUD) {
- await selfhost.init()
- }
+ await automations.init()
})
process.on("uncaughtException", err => {
diff --git a/packages/server/src/automations/actions.js b/packages/server/src/automations/actions.js
index ea88c2d1d6..ee57f5a109 100644
--- a/packages/server/src/automations/actions.js
+++ b/packages/server/src/automations/actions.js
@@ -37,10 +37,12 @@ let AUTOMATION_BUCKET = env.AUTOMATION_BUCKET
let AUTOMATION_DIRECTORY = env.AUTOMATION_DIRECTORY
let MANIFEST = null
+/* istanbul ignore next */
function buildBundleName(pkgName, version) {
return `${pkgName}@${version}.min.js`
}
+/* istanbul ignore next */
async function downloadPackage(name, version, bundleName) {
await download(
`${AUTOMATION_BUCKET}/${name}/${version}/${bundleName}`,
@@ -49,6 +51,7 @@ async function downloadPackage(name, version, bundleName) {
return require(join(AUTOMATION_DIRECTORY, bundleName))
}
+/* istanbul ignore next */
module.exports.getAction = async function(actionName) {
if (BUILTIN_ACTIONS[actionName] != null) {
return BUILTIN_ACTIONS[actionName]
@@ -96,5 +99,6 @@ module.exports.init = async function() {
return MANIFEST
}
+// definitions will have downloaded ones added to it, while builtin won't
module.exports.DEFINITIONS = BUILTIN_DEFINITIONS
module.exports.BUILTIN_DEFINITIONS = BUILTIN_DEFINITIONS
diff --git a/packages/server/src/automations/index.js b/packages/server/src/automations/index.js
index a983495fb5..9aba399133 100644
--- a/packages/server/src/automations/index.js
+++ b/packages/server/src/automations/index.js
@@ -30,23 +30,22 @@ async function updateQuota(automation) {
/**
* This module is built purely to kick off the worker farm and manage the inputs/outputs
*/
-module.exports.init = function() {
- actions.init().then(() => {
- triggers.automationQueue.process(async job => {
- try {
- if (env.CLOUD && job.data.automation && !env.SELF_HOSTED) {
- job.data.automation.apiKey = await updateQuota(job.data.automation)
- }
- if (env.BUDIBASE_ENVIRONMENT === "PRODUCTION") {
- await runWorker(job)
- } else {
- await singleThread(job)
- }
- } catch (err) {
- console.error(
- `${job.data.automation.appId} automation ${job.data.automation._id} was unable to run - ${err}`
- )
+module.exports.init = async function() {
+ await actions.init()
+ triggers.automationQueue.process(async job => {
+ try {
+ if (env.CLOUD && job.data.automation && !env.SELF_HOSTED) {
+ job.data.automation.apiKey = await updateQuota(job.data.automation)
}
- })
+ if (env.BUDIBASE_ENVIRONMENT === "PRODUCTION") {
+ await runWorker(job)
+ } else {
+ await singleThread(job)
+ }
+ } catch (err) {
+ console.error(
+ `${job.data.automation.appId} automation ${job.data.automation._id} was unable to run - ${err}`
+ )
+ }
})
}
diff --git a/packages/server/src/automations/steps/createRow.js b/packages/server/src/automations/steps/createRow.js
index aeb75958f6..ef136e1131 100644
--- a/packages/server/src/automations/steps/createRow.js
+++ b/packages/server/src/automations/steps/createRow.js
@@ -59,15 +59,14 @@ module.exports.definition = {
}
module.exports.run = async function({ inputs, appId, apiKey, emitter }) {
- // TODO: better logging of when actions are missed due to missing parameters
if (inputs.row == null || inputs.row.tableId == null) {
- return
+ return {
+ success: false,
+ response: {
+ message: "Invalid inputs",
+ },
+ }
}
- inputs.row = await automationUtils.cleanUpRow(
- appId,
- inputs.row.tableId,
- inputs.row
- )
// have to clean up the row, remove the table from it
const ctx = {
params: {
@@ -81,6 +80,11 @@ module.exports.run = async function({ inputs, appId, apiKey, emitter }) {
}
try {
+ inputs.row = await automationUtils.cleanUpRow(
+ appId,
+ inputs.row.tableId,
+ inputs.row
+ )
if (env.CLOUD) {
await usage.update(apiKey, usage.Properties.ROW, 1)
}
diff --git a/packages/server/src/automations/steps/deleteRow.js b/packages/server/src/automations/steps/deleteRow.js
index 8edee38dee..ea4d60a04e 100644
--- a/packages/server/src/automations/steps/deleteRow.js
+++ b/packages/server/src/automations/steps/deleteRow.js
@@ -51,9 +51,13 @@ module.exports.definition = {
}
module.exports.run = async function({ inputs, appId, apiKey, emitter }) {
- // TODO: better logging of when actions are missed due to missing parameters
if (inputs.id == null || inputs.revision == null) {
- return
+ return {
+ success: false,
+ response: {
+ message: "Invalid inputs",
+ },
+ }
}
let ctx = {
params: {
diff --git a/packages/server/src/automations/steps/filter.js b/packages/server/src/automations/steps/filter.js
index 4286cd44e8..586e424cc4 100644
--- a/packages/server/src/automations/steps/filter.js
+++ b/packages/server/src/automations/steps/filter.js
@@ -12,6 +12,9 @@ const PrettyLogicConditions = {
[LogicConditions.LESS_THAN]: "Less than",
}
+module.exports.LogicConditions = LogicConditions
+module.exports.PrettyLogicConditions = PrettyLogicConditions
+
module.exports.definition = {
name: "Filter",
tagline: "{{inputs.field}} {{inputs.condition}} {{inputs.value}}",
@@ -64,7 +67,7 @@ module.exports.run = async function filter({ inputs }) {
value = Date.parse(value)
field = Date.parse(field)
}
- let success
+ let success = false
if (typeof field !== "object" && typeof value !== "object") {
switch (condition) {
case LogicConditions.EQUAL:
@@ -79,8 +82,6 @@ module.exports.run = async function filter({ inputs }) {
case LogicConditions.LESS_THAN:
success = field < value
break
- default:
- return
}
} else {
success = false
diff --git a/packages/server/src/automations/steps/outgoingWebhook.js b/packages/server/src/automations/steps/outgoingWebhook.js
index 817ec424b2..ab8c747c58 100644
--- a/packages/server/src/automations/steps/outgoingWebhook.js
+++ b/packages/server/src/automations/steps/outgoingWebhook.js
@@ -87,6 +87,7 @@ module.exports.run = async function({ inputs }) {
success: response.status === 200,
}
} catch (err) {
+ /* istanbul ignore next */
return {
success: false,
response: err,
diff --git a/packages/server/src/automations/steps/updateRow.js b/packages/server/src/automations/steps/updateRow.js
index 3b83f961f5..a545662cf8 100644
--- a/packages/server/src/automations/steps/updateRow.js
+++ b/packages/server/src/automations/steps/updateRow.js
@@ -55,14 +55,14 @@ module.exports.definition = {
module.exports.run = async function({ inputs, appId, emitter }) {
if (inputs.rowId == null || inputs.row == null) {
- return
+ return {
+ success: false,
+ response: {
+ message: "Invalid inputs",
+ },
+ }
}
- inputs.row = await automationUtils.cleanUpRowById(
- appId,
- inputs.rowId,
- inputs.row
- )
// clear any falsy properties so that they aren't updated
for (let propKey of Object.keys(inputs.row)) {
if (!inputs.row[propKey] || inputs.row[propKey] === "") {
@@ -73,7 +73,7 @@ module.exports.run = async function({ inputs, appId, emitter }) {
// have to clean up the row, remove the table from it
const ctx = {
params: {
- id: inputs.rowId,
+ rowId: inputs.rowId,
},
request: {
body: inputs.row,
@@ -83,6 +83,11 @@ module.exports.run = async function({ inputs, appId, emitter }) {
}
try {
+ inputs.row = await automationUtils.cleanUpRowById(
+ appId,
+ inputs.rowId,
+ inputs.row
+ )
await rowController.patch(ctx)
return {
row: ctx.body,
diff --git a/packages/server/src/automations/tests/automation.spec.js b/packages/server/src/automations/tests/automation.spec.js
new file mode 100644
index 0000000000..f4d3b4c865
--- /dev/null
+++ b/packages/server/src/automations/tests/automation.spec.js
@@ -0,0 +1,152 @@
+const automation = require("../index")
+const usageQuota = require("../../utilities/usageQuota")
+const thread = require("../thread")
+const triggers = require("../triggers")
+const { basicAutomation, basicTable } = require("../../tests/utilities/structures")
+const { wait } = require("../../utilities")
+const env = require("../../environment")
+const { makePartial } = require("../../tests/utilities")
+const { cleanInputValues } = require("../automationUtils")
+const setup = require("./utilities")
+
+let workerJob
+
+jest.mock("../../utilities/usageQuota")
+usageQuota.getAPIKey.mockReturnValue({ apiKey: "test" })
+jest.mock("../thread")
+jest.spyOn(global.console, "error")
+jest.mock("worker-farm", () => {
+ return () => {
+ const value = jest
+ .fn()
+ .mockReturnValueOnce(undefined)
+ .mockReturnValueOnce("Error")
+ return (input, callback) => {
+ workerJob = input
+ if (callback) {
+ callback(value())
+ }
+ }
+ }
+})
+
+describe("Run through some parts of the automations system", () => {
+ let config = setup.getConfig()
+
+ beforeEach(async () => {
+ await automation.init()
+ await config.init()
+ })
+
+ afterAll(setup.afterAll)
+
+ it("should be able to init in builder", async () => {
+ await triggers.externalTrigger(basicAutomation(), { a: 1 })
+ await wait(100)
+ expect(workerJob).toBeUndefined()
+ expect(thread).toHaveBeenCalled()
+ })
+
+ it("should be able to init in cloud", async () => {
+ env.CLOUD = true
+ env.BUDIBASE_ENVIRONMENT = "PRODUCTION"
+ await triggers.externalTrigger(basicAutomation(), { a: 1 })
+ await wait(100)
+ // haven't added a mock implementation so getAPIKey of usageQuota just returns undefined
+ expect(usageQuota.update).toHaveBeenCalledWith("test", "automationRuns", 1)
+ expect(workerJob).toBeDefined()
+ env.BUDIBASE_ENVIRONMENT = "JEST"
+ env.CLOUD = false
+ })
+
+ it("try error scenario", async () => {
+ env.CLOUD = true
+ env.BUDIBASE_ENVIRONMENT = "PRODUCTION"
+ // the second call will throw an error
+ await triggers.externalTrigger(basicAutomation(), { a: 1 })
+ await wait(100)
+ expect(console.error).toHaveBeenCalled()
+ env.BUDIBASE_ENVIRONMENT = "JEST"
+ env.CLOUD = false
+ })
+
+ it("should be able to check triggering row filling", async () => {
+ const automation = basicAutomation()
+ let table = basicTable()
+ table.schema.boolean = {
+ type: "boolean",
+ constraints: {
+ type: "boolean",
+ },
+ }
+ table.schema.number = {
+ type: "number",
+ constraints: {
+ type: "number",
+ },
+ }
+ table.schema.datetime = {
+ type: "datetime",
+ constraints: {
+ type: "datetime",
+ },
+ }
+ table = await config.createTable(table)
+ automation.definition.trigger.inputs.tableId = table._id
+ const params = await triggers.fillRowOutput(automation, { appId: config.getAppId() })
+ expect(params.row).toBeDefined()
+ const date = new Date(params.row.datetime)
+ expect(typeof params.row.name).toBe("string")
+ expect(typeof params.row.boolean).toBe("boolean")
+ expect(typeof params.row.number).toBe("number")
+ expect(date.getFullYear()).toBe(1970)
+ })
+
+ it("should check coercion", async () => {
+ const table = await config.createTable()
+ const automation = basicAutomation()
+ automation.definition.trigger.inputs.tableId = table._id
+ automation.definition.trigger.stepId = "APP"
+ automation.definition.trigger.inputs.fields = { a: "number" }
+ await triggers.externalTrigger(automation, {
+ appId: config.getAppId(),
+ fields: {
+ a: "1"
+ }
+ })
+ await wait(100)
+ expect(thread).toHaveBeenCalledWith(makePartial({
+ data: {
+ event: {
+ fields: {
+ a: 1
+ }
+ }
+ }
+ }))
+ })
+
+ it("should be able to clean inputs with the utilities", () => {
+ // can't clean without a schema
+ let output = cleanInputValues({a: "1"})
+ expect(output.a).toBe("1")
+ output = cleanInputValues({a: "1", b: "true", c: "false", d: 1, e: "help"}, {
+ properties: {
+ a: {
+ type: "number",
+ },
+ b: {
+ type: "boolean",
+ },
+ c: {
+ type: "boolean",
+ }
+ }
+ })
+ expect(output.a).toBe(1)
+ expect(output.b).toBe(true)
+ expect(output.c).toBe(false)
+ expect(output.d).toBe(1)
+ expect(output.e).toBe("help")
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/automations/tests/createRow.spec.js b/packages/server/src/automations/tests/createRow.spec.js
new file mode 100644
index 0000000000..0be2803e47
--- /dev/null
+++ b/packages/server/src/automations/tests/createRow.spec.js
@@ -0,0 +1,57 @@
+const usageQuota = require("../../utilities/usageQuota")
+const env = require("../../environment")
+const setup = require("./utilities")
+
+jest.mock("../../utilities/usageQuota")
+
+describe("test the create row action", () => {
+ let table, row
+ let config = setup.getConfig()
+
+ beforeEach(async () => {
+ await config.init()
+ table = await config.createTable()
+ row = {
+ tableId: table._id,
+ name: "test",
+ description: "test",
+ }
+ })
+
+ afterAll(setup.afterAll)
+
+ it("should be able to run the action", async () => {
+ const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
+ row,
+ })
+ expect(res.id).toBeDefined()
+ expect(res.revision).toBeDefined()
+ const gottenRow = await config.getRow(table._id, res.id)
+ expect(gottenRow.name).toEqual("test")
+ expect(gottenRow.description).toEqual("test")
+ })
+
+ it("should return an error (not throw) when bad info provided", async () => {
+ const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
+ row: {
+ tableId: "invalid",
+ invalid: "invalid",
+ }
+ })
+ expect(res.success).toEqual(false)
+ })
+
+ it("check usage quota attempts", async () => {
+ env.CLOUD = true
+ await setup.runStep(setup.actions.CREATE_ROW.stepId, {
+ row
+ })
+ expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", 1)
+ env.CLOUD = false
+ })
+
+ it("should check invalid inputs return an error", async () => {
+ const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {})
+ expect(res.success).toEqual(false)
+ })
+})
diff --git a/packages/server/src/automations/tests/createUser.spec.js b/packages/server/src/automations/tests/createUser.spec.js
new file mode 100644
index 0000000000..5f65e260a9
--- /dev/null
+++ b/packages/server/src/automations/tests/createUser.spec.js
@@ -0,0 +1,43 @@
+const usageQuota = require("../../utilities/usageQuota")
+const env = require("../../environment")
+const setup = require("./utilities")
+const { BUILTIN_ROLE_IDS } = require("../../utilities/security/roles")
+const { ViewNames } = require("../../db/utils")
+
+jest.mock("../../utilities/usageQuota")
+
+describe("test the create user action", () => {
+ let config = setup.getConfig()
+ let user
+
+ beforeEach(async () => {
+ await config.init()
+ user = {
+ email: "test@test.com",
+ password: "password",
+ roleId: BUILTIN_ROLE_IDS.POWER
+ }
+ })
+
+ afterAll(setup.afterAll)
+
+ it("should be able to run the action", async () => {
+ const res = await setup.runStep(setup.actions.CREATE_USER.stepId, user)
+ expect(res.id).toBeDefined()
+ expect(res.revision).toBeDefined()
+ const userDoc = await config.getRow(ViewNames.USERS, res.id)
+ expect(userDoc.email).toEqual(user.email)
+ })
+
+ it("should return an error if no inputs provided", async () => {
+ const res = await setup.runStep(setup.actions.CREATE_USER.stepId, {})
+ expect(res.success).toEqual(false)
+ })
+
+ it("check usage quota attempts", async () => {
+ env.CLOUD = true
+ await setup.runStep(setup.actions.CREATE_USER.stepId, user)
+ expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "users", 1)
+ env.CLOUD = false
+ })
+})
diff --git a/packages/server/src/automations/tests/delay.spec.js b/packages/server/src/automations/tests/delay.spec.js
new file mode 100644
index 0000000000..99046e8171
--- /dev/null
+++ b/packages/server/src/automations/tests/delay.spec.js
@@ -0,0 +1,12 @@
+const setup = require("./utilities")
+
+describe("test the delay logic", () => {
+ it("should be able to run the delay", async () => {
+ const time = 100
+ const before = Date.now()
+ await setup.runStep(setup.logic.DELAY.stepId, { time: time })
+ const now = Date.now()
+ // divide by two just so that test will always pass as long as there was some sort of delay
+ expect(now - before).toBeGreaterThanOrEqual(time / 2)
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/automations/tests/deleteRow.spec.js b/packages/server/src/automations/tests/deleteRow.spec.js
new file mode 100644
index 0000000000..0d5ff47ed8
--- /dev/null
+++ b/packages/server/src/automations/tests/deleteRow.spec.js
@@ -0,0 +1,58 @@
+const usageQuota = require("../../utilities/usageQuota")
+const env = require("../../environment")
+const setup = require("./utilities")
+
+jest.mock("../../utilities/usageQuota")
+
+describe("test the delete row action", () => {
+ let table, row, inputs
+ let config = setup.getConfig()
+
+ beforeEach(async () => {
+ await config.init()
+ table = await config.createTable()
+ row = await config.createRow()
+ inputs = {
+ tableId: table._id,
+ id: row._id,
+ revision: row._rev,
+ }
+ })
+
+ afterAll(setup.afterAll)
+
+ it("should be able to run the action", async () => {
+ const res = await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
+ expect(res.success).toEqual(true)
+ expect(res.response).toBeDefined()
+ expect(res.row._id).toEqual(row._id)
+ let error
+ try {
+ await config.getRow(table._id, res.id)
+ } catch (err) {
+ error = err
+ }
+ expect(error).toBeDefined()
+ })
+
+ it("check usage quota attempts", async () => {
+ env.CLOUD = true
+ await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
+ expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", -1)
+ env.CLOUD = false
+ })
+
+ it("should check invalid inputs return an error", async () => {
+ const res = await setup.runStep(setup.actions.DELETE_ROW.stepId, {})
+ expect(res.success).toEqual(false)
+ })
+
+ it("should return an error when table doesn't exist", async () => {
+ const res = await setup.runStep(setup.actions.DELETE_ROW.stepId, {
+ tableId: "invalid",
+ id: "invalid",
+ revision: "invalid",
+ })
+ expect(res.success).toEqual(false)
+ })
+})
diff --git a/packages/server/src/automations/tests/filter.spec.js b/packages/server/src/automations/tests/filter.spec.js
new file mode 100644
index 0000000000..05361f43ed
--- /dev/null
+++ b/packages/server/src/automations/tests/filter.spec.js
@@ -0,0 +1,48 @@
+const setup = require("./utilities")
+const { LogicConditions } = require("../steps/filter")
+
+describe("test the filter logic", () => {
+ async function checkFilter(field, condition, value, pass = true) {
+ let res = await setup.runStep(setup.logic.FILTER.stepId,
+ { field, condition, value }
+ )
+ expect(res.success).toEqual(pass)
+ }
+
+ it("should be able test equality", async () => {
+ await checkFilter("hello", LogicConditions.EQUAL, "hello", true)
+ await checkFilter("hello", LogicConditions.EQUAL, "no", false)
+ })
+
+ it("should be able to test greater than", async () => {
+ await checkFilter(10, LogicConditions.GREATER_THAN, 5, true)
+ await checkFilter(10, LogicConditions.GREATER_THAN, 15, false)
+ })
+
+ it("should be able to test less than", async () => {
+ await checkFilter(5, LogicConditions.LESS_THAN, 10, true)
+ await checkFilter(15, LogicConditions.LESS_THAN, 10, false)
+ })
+
+ it("should be able to in-equality", async () => {
+ await checkFilter("hello", LogicConditions.NOT_EQUAL, "no", true)
+ await checkFilter(10, LogicConditions.NOT_EQUAL, 10, false)
+ })
+
+ it("check number coercion", async () => {
+ await checkFilter("10", LogicConditions.GREATER_THAN, "5", true)
+ })
+
+ it("check date coercion", async () => {
+ await checkFilter(
+ (new Date()).toISOString(),
+ LogicConditions.GREATER_THAN,
+ (new Date(-10000)).toISOString(),
+ true
+ )
+ })
+
+ it("check objects always false", async () => {
+ await checkFilter({}, LogicConditions.EQUAL, {}, false)
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/automations/tests/outgoingWebhook.spec.js b/packages/server/src/automations/tests/outgoingWebhook.spec.js
new file mode 100644
index 0000000000..f1d8d25ba8
--- /dev/null
+++ b/packages/server/src/automations/tests/outgoingWebhook.spec.js
@@ -0,0 +1,39 @@
+const setup = require("./utilities")
+const fetch = require("node-fetch")
+
+jest.mock("node-fetch")
+
+describe("test the outgoing webhook action", () => {
+ let inputs
+ let config = setup.getConfig()
+
+ beforeEach(async () => {
+ await config.init()
+ inputs = {
+ requestMethod: "POST",
+ url: "www.test.com",
+ requestBody: JSON.stringify({
+ a: 1,
+ }),
+ }
+ })
+
+ afterAll(setup.afterAll)
+
+ it("should be able to run the action", async () => {
+ const res = await setup.runStep(setup.actions.OUTGOING_WEBHOOK.stepId, inputs)
+ expect(res.success).toEqual(true)
+ expect(res.response.url).toEqual("http://www.test.com")
+ expect(res.response.method).toEqual("POST")
+ expect(res.response.body.a).toEqual(1)
+ })
+
+ it("should return an error if something goes wrong in fetch", async () => {
+ const res = await setup.runStep(setup.actions.OUTGOING_WEBHOOK.stepId, {
+ requestMethod: "GET",
+ url: "www.invalid.com"
+ })
+ expect(res.success).toEqual(false)
+ })
+
+})
diff --git a/packages/server/src/automations/tests/sendEmail.spec.js b/packages/server/src/automations/tests/sendEmail.spec.js
new file mode 100644
index 0000000000..5f3aabfff8
--- /dev/null
+++ b/packages/server/src/automations/tests/sendEmail.spec.js
@@ -0,0 +1,36 @@
+const setup = require("./utilities")
+
+jest.mock("@sendgrid/mail")
+
+describe("test the send email action", () => {
+ let inputs
+ let config = setup.getConfig()
+
+ beforeEach(async () => {
+ await config.init()
+ inputs = {
+ to: "me@test.com",
+ from: "budibase@test.com",
+ subject: "Testing",
+ text: "Email contents",
+ }
+ })
+
+ afterAll(setup.afterAll)
+
+ it("should be able to run the action", async () => {
+ const res = await setup.runStep(setup.actions.SEND_EMAIL.stepId, inputs)
+ expect(res.success).toEqual(true)
+ // the mocked module throws back the input
+ expect(res.response.to).toEqual("me@test.com")
+ })
+
+ it("should return an error if input an invalid email address", async () => {
+ const res = await setup.runStep(setup.actions.SEND_EMAIL.stepId, {
+ ...inputs,
+ to: "invalid@test.com",
+ })
+ expect(res.success).toEqual(false)
+ })
+
+})
diff --git a/packages/server/src/automations/tests/updateRow.spec.js b/packages/server/src/automations/tests/updateRow.spec.js
new file mode 100644
index 0000000000..79c998459b
--- /dev/null
+++ b/packages/server/src/automations/tests/updateRow.spec.js
@@ -0,0 +1,45 @@
+const env = require("../../environment")
+const setup = require("./utilities")
+
+describe("test the update row action", () => {
+ let table, row, inputs
+ let config = setup.getConfig()
+
+ beforeEach(async () => {
+ await config.init()
+ table = await config.createTable()
+ row = await config.createRow()
+ inputs = {
+ rowId: row._id,
+ row: {
+ ...row,
+ name: "Updated name",
+ // put a falsy option in to be removed
+ description: "",
+ }
+ }
+ })
+
+ afterAll(setup.afterAll)
+
+ it("should be able to run the action", async () => {
+ const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, inputs)
+ expect(res.success).toEqual(true)
+ const updatedRow = await config.getRow(table._id, res.id)
+ expect(updatedRow.name).toEqual("Updated name")
+ expect(updatedRow.description).not.toEqual("")
+ })
+
+ it("should check invalid inputs return an error", async () => {
+ const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {})
+ expect(res.success).toEqual(false)
+ })
+
+ it("should return an error when table doesn't exist", async () => {
+ const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
+ row: { _id: "invalid" },
+ rowId: "invalid",
+ })
+ expect(res.success).toEqual(false)
+ })
+})
diff --git a/packages/server/src/automations/tests/utilities/index.js b/packages/server/src/automations/tests/utilities/index.js
new file mode 100644
index 0000000000..ad149d6bde
--- /dev/null
+++ b/packages/server/src/automations/tests/utilities/index.js
@@ -0,0 +1,43 @@
+const TestConfig = require("../../../tests/utilities/TestConfiguration")
+const actions = require("../../actions")
+const logic = require("../../logic")
+const emitter = require("../../../events/index")
+
+let config
+
+exports.getConfig = () => {
+ if (!config) {
+ config = new TestConfig(false)
+ }
+ return config
+}
+
+exports.afterAll = () => {
+ config.end()
+}
+
+exports.runStep = async function runStep(stepId, inputs) {
+ let step
+ if (
+ Object.values(exports.actions)
+ .map(action => action.stepId)
+ .includes(stepId)
+ ) {
+ step = await actions.getAction(stepId)
+ } else {
+ step = logic.getLogic(stepId)
+ }
+ expect(step).toBeDefined()
+ return step({
+ inputs,
+ appId: config ? config.getAppId() : null,
+ // don't really need an API key, mocked out usage quota, not being tested here
+ apiKey: exports.apiKey,
+ emitter,
+ })
+}
+
+exports.apiKey = "test"
+
+exports.actions = actions.BUILTIN_DEFINITIONS
+exports.logic = logic.BUILTIN_DEFINITIONS
diff --git a/packages/server/src/automations/triggers.js b/packages/server/src/automations/triggers.js
index 73ce9edeed..7e50e5ee74 100644
--- a/packages/server/src/automations/triggers.js
+++ b/packages/server/src/automations/triggers.js
@@ -225,6 +225,7 @@ async function queueRelevantRowAutomations(event, eventType) {
}
emitter.on("row:save", async function(event) {
+ /* istanbul ignore next */
if (!event || !event.row || !event.row.tableId) {
return
}
@@ -232,6 +233,7 @@ emitter.on("row:save", async function(event) {
})
emitter.on("row:update", async function(event) {
+ /* istanbul ignore next */
if (!event || !event.row || !event.row.tableId) {
return
}
@@ -239,6 +241,7 @@ emitter.on("row:update", async function(event) {
})
emitter.on("row:delete", async function(event) {
+ /* istanbul ignore next */
if (!event || !event.row || !event.row.tableId) {
return
}
@@ -272,6 +275,7 @@ async function fillRowOutput(automation, params) {
}
params.row = row
} catch (err) {
+ /* istanbul ignore next */
throw "Failed to find table for trigger"
}
return params
@@ -297,6 +301,7 @@ module.exports.externalTrigger = async function(automation, params) {
automationQueue.add({ automation, event: params })
}
+module.exports.fillRowOutput = fillRowOutput
module.exports.automationQueue = automationQueue
module.exports.BUILTIN_DEFINITIONS = BUILTIN_DEFINITIONS
diff --git a/packages/server/src/db/client.js b/packages/server/src/db/client.js
index b5edb1e877..f6dea33a40 100644
--- a/packages/server/src/db/client.js
+++ b/packages/server/src/db/client.js
@@ -30,6 +30,7 @@ const Pouch = PouchDB.defaults(POUCH_DB_DEFAULTS)
allDbs(Pouch)
// replicate your local levelDB pouch to a running HTTP compliant couch or pouchdb server.
+/* istanbul ignore next */
// eslint-disable-next-line no-unused-vars
function replicateLocal() {
Pouch.allDbs().then(dbs => {
diff --git a/packages/server/src/db/linkedRows/LinkController.js b/packages/server/src/db/linkedRows/LinkController.js
index 433bf57ad4..53ce8e45ad 100644
--- a/packages/server/src/db/linkedRows/LinkController.js
+++ b/packages/server/src/db/linkedRows/LinkController.js
@@ -133,12 +133,19 @@ class LinkController {
}
/**
- * Returns whether the two schemas are equal (in the important parts, not a pure equality check)
+ * Returns whether the two link schemas are equal (in the important parts, not a pure equality check)
*/
- areSchemasEqual(schema1, schema2) {
- const compareFields = ["name", "type", "tableId", "fieldName", "autocolumn"]
+ areLinkSchemasEqual(linkSchema1, linkSchema2) {
+ const compareFields = [
+ "name",
+ "type",
+ "tableId",
+ "fieldName",
+ "autocolumn",
+ "relationshipType",
+ ]
for (let field of compareFields) {
- if (schema1[field] !== schema2[field]) {
+ if (linkSchema1[field] !== linkSchema2[field]) {
return false
}
}
@@ -146,24 +153,24 @@ class LinkController {
}
/**
- * Given two the field of this table, and the field of the linked table, this makes sure
+ * Given the link field of this table, and the link field of the linked table, this makes sure
* the state of relationship type is accurate on both.
*/
- handleRelationshipType(field, linkedField) {
+ handleRelationshipType(linkerField, linkedField) {
if (
- !field.relationshipType ||
- field.relationshipType === RelationshipTypes.MANY_TO_MANY
+ !linkerField.relationshipType ||
+ linkerField.relationshipType === RelationshipTypes.MANY_TO_MANY
) {
linkedField.relationshipType = RelationshipTypes.MANY_TO_MANY
// make sure by default all are many to many (if not specified)
- field.relationshipType = RelationshipTypes.MANY_TO_MANY
- } else if (field.relationshipType === RelationshipTypes.MANY_TO_ONE) {
+ linkerField.relationshipType = RelationshipTypes.MANY_TO_MANY
+ } else if (linkerField.relationshipType === RelationshipTypes.MANY_TO_ONE) {
// Ensure that the other side of the relationship is locked to one record
linkedField.relationshipType = RelationshipTypes.ONE_TO_MANY
- } else if (field.relationshipType === RelationshipTypes.ONE_TO_MANY) {
+ } else if (linkerField.relationshipType === RelationshipTypes.ONE_TO_MANY) {
linkedField.relationshipType = RelationshipTypes.MANY_TO_ONE
}
- return { field, linkedField }
+ return { linkerField, linkedField }
}
// all operations here will assume that the table
@@ -336,6 +343,7 @@ class LinkController {
try {
linkedTable = await this._db.get(field.tableId)
} catch (err) {
+ /* istanbul ignore next */
continue
}
const fields = this.handleRelationshipType(field, {
@@ -347,7 +355,7 @@ class LinkController {
})
// update table schema after checking relationship types
- schema[fieldName] = fields.field
+ schema[fieldName] = fields.linkerField
const linkedField = fields.linkedField
if (field.autocolumn) {
@@ -358,7 +366,7 @@ class LinkController {
const existingSchema = linkedTable.schema[field.fieldName]
if (
existingSchema != null &&
- !this.areSchemasEqual(existingSchema, linkedField)
+ !this.areLinkSchemasEqual(existingSchema, linkedField)
) {
throw new Error("Cannot overwrite existing column.")
}
@@ -416,6 +424,7 @@ class LinkController {
await this._db.put(linkedTable)
}
} catch (err) {
+ /* istanbul ignore next */
Sentry.captureException(err)
}
}
diff --git a/packages/server/src/db/linkedRows/linkUtils.js b/packages/server/src/db/linkedRows/linkUtils.js
index 7193e59465..9200a05b98 100644
--- a/packages/server/src/db/linkedRows/linkUtils.js
+++ b/packages/server/src/db/linkedRows/linkUtils.js
@@ -75,6 +75,7 @@ exports.getLinkDocuments = async function({
await exports.createLinkView(appId)
return exports.getLinkDocuments(arguments[0])
} else {
+ /* istanbul ignore next */
Sentry.captureException(err)
}
}
diff --git a/packages/server/src/db/tests/linkController.spec.js b/packages/server/src/db/tests/linkController.spec.js
new file mode 100644
index 0000000000..d45bd99ea2
--- /dev/null
+++ b/packages/server/src/db/tests/linkController.spec.js
@@ -0,0 +1,218 @@
+const TestConfig = require("../../tests/utilities/TestConfiguration")
+const { basicRow, basicLinkedRow, basicTable } = require("../../tests/utilities/structures")
+const LinkController = require("../linkedRows/LinkController")
+const { RelationshipTypes } = require("../../constants")
+const { cloneDeep } = require("lodash/fp")
+
+describe("test the link controller", () => {
+ let config = new TestConfig(false)
+ let table1, table2
+
+ beforeEach(async () => {
+ await config.init()
+ const { _id } = await config.createTable()
+ table2 = await config.createLinkedTable(RelationshipTypes.MANY_TO_MANY, ["link", "link2"])
+ // update table after creating link
+ table1 = await config.getTable(_id)
+ })
+
+ afterAll(config.end)
+
+ function createLinkController(table, row = null, oldTable = null) {
+ const linkConfig = {
+ appId: config.getAppId(),
+ tableId: table._id,
+ table,
+ }
+ if (row) {
+ linkConfig.row = row
+ }
+ if (oldTable) {
+ linkConfig.oldTable = oldTable
+ }
+ return new LinkController(linkConfig)
+ }
+
+ async function createLinkedRow(linkField = "link", t1 = table1, t2 = table2) {
+ const row = await config.createRow(basicRow(t2._id))
+ const { _id } = await config.createRow(basicLinkedRow(t1._id, row._id, linkField))
+ return config.getRow(t1._id, _id)
+ }
+
+ it("should be able to confirm if two table schemas are equal", () => {
+ const controller = createLinkController(table1)
+ let equal = controller.areLinkSchemasEqual(table2.schema.link, table2.schema.link)
+ expect(equal).toEqual(true)
+ equal = controller.areLinkSchemasEqual(table1.schema.link, table2.schema.link)
+ expect(equal).toEqual(false)
+ })
+
+ it("should be able to check the relationship types across two fields", () => {
+ const controller = createLinkController(table1)
+ // empty case
+ let output = controller.handleRelationshipType({}, {})
+ expect(output.linkedField.relationshipType).toEqual(RelationshipTypes.MANY_TO_MANY)
+ expect(output.linkerField.relationshipType).toEqual(RelationshipTypes.MANY_TO_MANY)
+ output = controller.handleRelationshipType({ relationshipType: RelationshipTypes.MANY_TO_MANY }, {})
+ expect(output.linkedField.relationshipType).toEqual(RelationshipTypes.MANY_TO_MANY)
+ expect(output.linkerField.relationshipType).toEqual(RelationshipTypes.MANY_TO_MANY)
+ output = controller.handleRelationshipType({ relationshipType: RelationshipTypes.MANY_TO_ONE }, {})
+ expect(output.linkedField.relationshipType).toEqual(RelationshipTypes.ONE_TO_MANY)
+ expect(output.linkerField.relationshipType).toEqual(RelationshipTypes.MANY_TO_ONE)
+ output = controller.handleRelationshipType({ relationshipType: RelationshipTypes.ONE_TO_MANY }, {})
+ expect(output.linkedField.relationshipType).toEqual(RelationshipTypes.MANY_TO_ONE)
+ expect(output.linkerField.relationshipType).toEqual(RelationshipTypes.ONE_TO_MANY)
+ })
+
+ it("should be able to delete a row", async () => {
+ const row = await createLinkedRow()
+ const controller = createLinkController(table1, row)
+ // get initial count
+ const beforeLinks = await controller.getRowLinkDocs(row._id)
+ await controller.rowDeleted()
+ let afterLinks = await controller.getRowLinkDocs(row._id)
+ expect(beforeLinks.length).toEqual(1)
+ expect(afterLinks.length).toEqual(0)
+ })
+
+ it("shouldn't throw an error when deleting a row with no links", async () => {
+ const row = await config.createRow(basicRow(table1._id))
+ const controller = createLinkController(table1, row)
+ let error
+ try {
+ await controller.rowDeleted()
+ } catch (err) {
+ error = err
+ }
+ expect(error).toBeUndefined()
+ })
+
+ it("should throw an error when validating a table which is invalid", () => {
+ const controller = createLinkController(table1)
+ const copyTable = {
+ ...table1
+ }
+ copyTable.schema.otherTableLink = {
+ type: "link",
+ fieldName: "link",
+ tableId: table2._id,
+ }
+ let error
+ try {
+ controller.validateTable(copyTable)
+ } catch (err) {
+ error = err
+ }
+ expect(error).toBeDefined()
+ expect(error.message).toEqual("Cannot re-use the linked column name for a linked table.")
+ })
+
+ it("should be able to remove a link when saving/updating the row", async () => {
+ const row = await createLinkedRow()
+ // remove the link from the row
+ row.link = []
+ const controller = createLinkController(table1, row)
+ await controller.rowSaved()
+ let links = await controller.getRowLinkDocs(row._id)
+ expect(links.length).toEqual(0)
+ })
+
+ it("should be able to delete a table and have links deleted", async () => {
+ await createLinkedRow()
+ const controller = createLinkController(table1)
+ let before = await controller.getTableLinkDocs()
+ await controller.tableDeleted()
+ let after = await controller.getTableLinkDocs()
+ expect(before.length).toEqual(1)
+ expect(after.length).toEqual(0)
+ })
+
+ it("should be able to remove a linked field from a table", async () => {
+ await createLinkedRow()
+ await createLinkedRow("link2")
+ const controller = createLinkController(table1, null, table1)
+ let before = await controller.getTableLinkDocs()
+ await controller.removeFieldFromTable("link")
+ let after = await controller.getTableLinkDocs()
+ expect(before.length).toEqual(2)
+ // shouldn't delete the other field
+ expect(after.length).toEqual(1)
+ })
+
+ it("should throw an error when overwriting a link column", async () => {
+ const update = cloneDeep(table1)
+ update.schema.link.relationshipType = RelationshipTypes.MANY_TO_ONE
+ let error
+ try {
+ const controller = createLinkController(update)
+ await controller.tableSaved()
+ } catch (err) {
+ error = err
+ }
+ expect(error).toBeDefined()
+ })
+
+ it("should be able to remove a field view table update", async () => {
+ await createLinkedRow()
+ await createLinkedRow()
+ const newTable = cloneDeep(table1)
+ delete newTable.schema.link
+ const controller = createLinkController(newTable, null, table1)
+ await controller.tableUpdated()
+ const links = await controller.getTableLinkDocs()
+ expect(links.length).toEqual(0)
+ })
+
+ it("shouldn't allow one to many having many relationships against it", async () => {
+ const firstTable = await config.createTable()
+ const { _id } = await config.createLinkedTable(RelationshipTypes.MANY_TO_ONE, ["link"])
+ const linkTable = await config.getTable(_id)
+ // an initial row to link around
+ const row = await createLinkedRow("link", linkTable, firstTable)
+ let error
+ try {
+ // create another row to initiate the error
+ await config.createRow(basicLinkedRow(row.tableId, row.link[0]))
+ } catch (err) {
+ error = err
+ }
+ expect(error).toBeDefined()
+ })
+
+ it("should not error if a link being created doesn't exist", async () => {
+ let error
+ try {
+ await config.createRow(basicLinkedRow(table1._id, "invalid"))
+ } catch (err) {
+ error = err
+ }
+ expect(error).toBeUndefined()
+ })
+
+ it("make sure auto column goes onto other row too", async () => {
+ const table = await config.createTable()
+ const tableCfg = basicTable()
+ tableCfg.schema.link = {
+ type: "link",
+ fieldName: "link",
+ tableId: table._id,
+ name: "link",
+ autocolumn: true,
+ }
+ await config.createTable(tableCfg)
+ const afterTable = await config.getTable(table._id)
+ expect(afterTable.schema.link.autocolumn).toBe(true)
+ })
+
+ it("should be able to link to self", async () => {
+ const table = await config.createTable()
+ table.schema.link = {
+ type: "link",
+ fieldName: "link",
+ tableId: table._id,
+ name: "link",
+ autocolumn: true,
+ }
+ await config.updateTable(table)
+ })
+})
diff --git a/packages/server/src/db/tests/linkTests.spec.js b/packages/server/src/db/tests/linkTests.spec.js
new file mode 100644
index 0000000000..3fed6938b7
--- /dev/null
+++ b/packages/server/src/db/tests/linkTests.spec.js
@@ -0,0 +1,74 @@
+const TestConfig = require("../../tests/utilities/TestConfiguration")
+const { basicTable, basicLinkedRow } = require("../../tests/utilities/structures")
+const linkUtils = require("../linkedRows/linkUtils")
+const links = require("../linkedRows")
+const CouchDB = require("../index")
+
+describe("test link functionality", () => {
+ const config = new TestConfig(false)
+
+ describe("getLinkedTable", () => {
+ let db, table
+ beforeEach(async () => {
+ await config.init()
+ db = new CouchDB(config.getAppId())
+ table = await config.createTable()
+ })
+
+ it("should be able to retrieve a linked table from a list", async () => {
+ const retrieved = await linkUtils.getLinkedTable(db, table._id, [table])
+ expect(retrieved._id).toBe(table._id)
+ })
+
+ it("should be able to retrieve a table from DB and update list", async () => {
+ const tables = []
+ const retrieved = await linkUtils.getLinkedTable(db, table._id, tables)
+ expect(retrieved._id).toBe(table._id)
+ expect(tables[0]).toBeDefined()
+ })
+ })
+
+ describe("getRelatedTableForField", () => {
+ let link = basicTable()
+ link.schema.link = {
+ fieldName: "otherLink",
+ tableId: "tableID",
+ type: "link",
+ }
+
+ it("should get the field from the table directly", () => {
+ expect(linkUtils.getRelatedTableForField(link, "link")).toBe("tableID")
+ })
+
+ it("should get the field from the link", () => {
+ expect(linkUtils.getRelatedTableForField(link, "otherLink")).toBe("tableID")
+ })
+ })
+
+ describe("getLinkDocuments", () => {
+ it("should create the link view when it doesn't exist", async () => {
+ // create the DB and a very basic app design DB
+ const db = new CouchDB("test")
+ await db.put({ _id: "_design/database", views: {} })
+ const output = await linkUtils.getLinkDocuments({
+ appId: "test",
+ tableId: "test",
+ rowId: "test",
+ includeDocs: false,
+ })
+ expect(Array.isArray(output)).toBe(true)
+ })
+ })
+
+ describe("attachLinkIDs", () => {
+ it("should be able to attach linkIDs", async () => {
+ await config.init()
+ await config.createTable()
+ const table = await config.createLinkedTable()
+ const row = await config.createRow()
+ const linkRow = await config.createRow(basicLinkedRow(table._id, row._id))
+ const attached = await links.attachLinkIDs(config.getAppId(), [linkRow])
+ expect(attached[0].link[0]).toBe(row._id)
+ })
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/dynamodb.js b/packages/server/src/integrations/dynamodb.js
index 668e11e263..4897690075 100644
--- a/packages/server/src/integrations/dynamodb.js
+++ b/packages/server/src/integrations/dynamodb.js
@@ -166,7 +166,7 @@ class DynamoDBIntegration {
async update(query) {
const params = {
- TableName: query.Table,
+ TableName: query.table,
...query.json,
}
return this.client.update(params).promise()
diff --git a/packages/server/src/integrations/microsoftSqlServer.js b/packages/server/src/integrations/microsoftSqlServer.js
index eea67a7256..f5e30fd65b 100644
--- a/packages/server/src/integrations/microsoftSqlServer.js
+++ b/packages/server/src/integrations/microsoftSqlServer.js
@@ -65,7 +65,7 @@ class SqlServerIntegration {
try {
await this.connect()
const response = await this.client.query(query.sql)
- return response.recordset
+ return response.recordset || [{ created: true }]
} catch (err) {
console.error("Error querying MS SQL Server", err)
throw err
diff --git a/packages/server/src/integrations/mysql.js b/packages/server/src/integrations/mysql.js
index af1a1baf92..c505c4fc14 100644
--- a/packages/server/src/integrations/mysql.js
+++ b/packages/server/src/integrations/mysql.js
@@ -73,20 +73,23 @@ class MySQLIntegration {
})
}
- create(query) {
- return this.query(query)
+ async create(query) {
+ const results = await this.query(query)
+ return results.length ? results : [{ created: true }]
}
read(query) {
return this.query(query)
}
- update(query) {
- return this.query(query)
+ async update(query) {
+ const results = await this.query(query)
+ return results.length ? results : [{ updated: true }]
}
- delete(query) {
- return this.query(query)
+ async delete(query) {
+ const results = await this.query(query)
+ return results.length ? results : [{ deleted: true }]
}
}
diff --git a/packages/server/src/integrations/tests/TestConfiguration.js b/packages/server/src/integrations/tests/TestConfiguration.js
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/server/src/integrations/tests/airtable.spec.js b/packages/server/src/integrations/tests/airtable.spec.js
new file mode 100644
index 0000000000..e6654f6f71
--- /dev/null
+++ b/packages/server/src/integrations/tests/airtable.spec.js
@@ -0,0 +1,70 @@
+const Airtable = require("airtable")
+const AirtableIntegration = require("../airtable")
+jest.mock("airtable")
+
+class TestConfiguration {
+ constructor(config = {}) {
+ this.integration = new AirtableIntegration.integration(config)
+ this.client = {
+ create: jest.fn(),
+ select: jest.fn(),
+ update: jest.fn(),
+ destroy: jest.fn(),
+ }
+ this.integration.client = () => this.client
+ }
+}
+
+describe("Airtable Integration", () => {
+ let config
+
+ beforeEach(() => {
+ config = new TestConfiguration()
+ })
+
+ it("calls the create method with the correct params", async () => {
+ const response = await config.integration.create({
+ table: "test",
+ json: {}
+ })
+ expect(config.client.create).toHaveBeenCalledWith([
+ {
+ fields: {}
+ }
+ ])
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const response = await config.integration.read({
+ table: "test",
+ view: "Grid view"
+ })
+ expect(config.client.select).toHaveBeenCalledWith({
+ maxRecords: 10, view: "Grid view"
+ })
+ })
+
+ it("calls the update method with the correct params", async () => {
+ const response = await config.integration.update({
+ table: "test",
+ id: "123",
+ json: {
+ name: "test"
+ }
+ })
+ expect(config.client.update).toHaveBeenCalledWith([
+ {
+ id: "123",
+ fields: { name: "test" }
+ }
+ ])
+ })
+
+ it("calls the delete method with the correct params", async () => {
+ const ids = [1,2,3,4]
+ const response = await config.integration.delete({
+ ids
+ })
+ expect(config.client.destroy).toHaveBeenCalledWith(ids)
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/tests/arangodb.spec.js b/packages/server/src/integrations/tests/arangodb.spec.js
new file mode 100644
index 0000000000..437a7fd3ec
--- /dev/null
+++ b/packages/server/src/integrations/tests/arangodb.spec.js
@@ -0,0 +1,35 @@
+const arangodb = require("arangojs")
+const ArangoDBIntegration = require("../arangodb")
+jest.mock("arangojs")
+
+class TestConfiguration {
+ constructor(config = {}) {
+ this.integration = new ArangoDBIntegration.integration(config)
+ }
+}
+
+describe("ArangoDB Integration", () => {
+ let config
+ let indexName = "Users"
+
+ beforeEach(() => {
+ config = new TestConfiguration()
+ })
+
+ it("calls the create method with the correct params", async () => {
+ const body = {
+ json: "Hello"
+ }
+
+ const response = await config.integration.create(body)
+ expect(config.integration.client.query).toHaveBeenCalledWith(`INSERT Hello INTO collection RETURN NEW`)
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const query = {
+ json: `test`,
+ }
+ const response = await config.integration.read(query)
+ expect(config.integration.client.query).toHaveBeenCalledWith(query.sql)
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/tests/couchdb.spec.js b/packages/server/src/integrations/tests/couchdb.spec.js
new file mode 100644
index 0000000000..cfe522c617
--- /dev/null
+++ b/packages/server/src/integrations/tests/couchdb.spec.js
@@ -0,0 +1,68 @@
+const PouchDB = require("pouchdb")
+const CouchDBIntegration = require("../couchdb")
+jest.mock("pouchdb", () => function CouchDBMock() {
+ this.post = jest.fn()
+ this.allDocs = jest.fn(() => ({ rows: [] }))
+ this.put = jest.fn()
+ this.remove = jest.fn()
+ this.plugin = jest.fn()
+})
+
+
+class TestConfiguration {
+ constructor(config = {}) {
+ this.integration = new CouchDBIntegration.integration(config)
+ }
+}
+
+describe("CouchDB Integration", () => {
+ let config
+
+ beforeEach(() => {
+ config = new TestConfiguration()
+ })
+
+ it("calls the create method with the correct params", async () => {
+ const doc = {
+ test: 1
+ }
+ const response = await config.integration.create({
+ json: doc
+ })
+ expect(config.integration.client.post).toHaveBeenCalledWith(doc)
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const doc = {
+ name: "search"
+ }
+
+ const response = await config.integration.read({
+ json: doc
+ })
+
+ expect(config.integration.client.allDocs).toHaveBeenCalledWith({
+ include_docs: true,
+ name: "search"
+ })
+ })
+
+ it("calls the update method with the correct params", async () => {
+ const doc = {
+ _id: "1234",
+ name: "search"
+ }
+
+ const response = await config.integration.update({
+ json: doc
+ })
+
+ expect(config.integration.client.put).toHaveBeenCalledWith(doc)
+ })
+
+ it("calls the delete method with the correct params", async () => {
+ const id = "1234"
+ const response = await config.integration.delete({ id })
+ expect(config.integration.client.remove).toHaveBeenCalledWith(id)
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/tests/dynamodb.spec.js b/packages/server/src/integrations/tests/dynamodb.spec.js
new file mode 100644
index 0000000000..4c6b931090
--- /dev/null
+++ b/packages/server/src/integrations/tests/dynamodb.spec.js
@@ -0,0 +1,103 @@
+const AWS = require("aws-sdk")
+const DynamoDBIntegration = require("../dynamodb")
+jest.mock("aws-sdk")
+
+class TestConfiguration {
+ constructor(config = {}) {
+ this.integration = new DynamoDBIntegration.integration(config)
+ }
+}
+
+describe("DynamoDB Integration", () => {
+ let config
+ let tableName = "Users"
+
+ beforeEach(() => {
+ config = new TestConfiguration()
+ })
+
+ it("calls the create method with the correct params", async () => {
+ const response = await config.integration.create({
+ table: tableName,
+ json: {
+ Name: "John"
+ }
+ })
+ expect(config.integration.client.put).toHaveBeenCalledWith({
+ TableName: tableName,
+ Name: "John"
+ })
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const indexName = "Test"
+
+ const response = await config.integration.read({
+ table: tableName,
+ index: indexName,
+ json: {}
+ })
+ expect(config.integration.client.query).toHaveBeenCalledWith({
+ TableName: tableName,
+ IndexName: indexName,
+ })
+ expect(response).toEqual([])
+ })
+
+ it("calls the scan method with the correct params", async () => {
+ const indexName = "Test"
+
+ const response = await config.integration.scan({
+ table: tableName,
+ index: indexName,
+ json: {}
+ })
+ expect(config.integration.client.scan).toHaveBeenCalledWith({
+ TableName: tableName,
+ IndexName: indexName,
+ })
+ expect(response).toEqual([{
+ Name: "test"
+ }])
+ })
+
+ it("calls the get method with the correct params", async () => {
+ const response = await config.integration.get({
+ table: tableName,
+ json: {
+ Id: 123
+ }
+ })
+
+ expect(config.integration.client.get).toHaveBeenCalledWith({
+ TableName: tableName,
+ Id: 123
+ })
+ })
+
+ it("calls the update method with the correct params", async () => {
+ const response = await config.integration.update({
+ table: tableName,
+ json: {
+ Name: "John"
+ }
+ })
+ expect(config.integration.client.update).toHaveBeenCalledWith({
+ TableName: tableName,
+ Name: "John"
+ })
+ })
+
+ it("calls the delete method with the correct params", async () => {
+ const response = await config.integration.delete({
+ table: tableName,
+ json: {
+ Name: "John"
+ }
+ })
+ expect(config.integration.client.delete).toHaveBeenCalledWith({
+ TableName: tableName,
+ Name: "John"
+ })
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/tests/elasticsearch.spec.js b/packages/server/src/integrations/tests/elasticsearch.spec.js
new file mode 100644
index 0000000000..fc97e04bcc
--- /dev/null
+++ b/packages/server/src/integrations/tests/elasticsearch.spec.js
@@ -0,0 +1,81 @@
+const elasticsearch = require("@elastic/elasticsearch")
+const ElasticSearchIntegration = require("../elasticsearch")
+jest.mock("@elastic/elasticsearch")
+
+class TestConfiguration {
+ constructor(config = {}) {
+ this.integration = new ElasticSearchIntegration.integration(config)
+ }
+}
+
+describe("Elasticsearch Integration", () => {
+ let config
+ let indexName = "Users"
+
+ beforeEach(() => {
+ config = new TestConfiguration()
+ })
+
+ it("calls the create method with the correct params", async () => {
+ const body = {
+ name: "Hello"
+ }
+ const response = await config.integration.create({
+ index: indexName,
+ json: body
+ })
+ expect(config.integration.client.index).toHaveBeenCalledWith({
+ index: indexName,
+ body
+ })
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const body = {
+ query: {
+ term: {
+ name: "kimchy"
+ }
+ }
+ }
+ const response = await config.integration.read({
+ index: indexName,
+ json: body
+ })
+ expect(config.integration.client.search).toHaveBeenCalledWith({
+ index: indexName,
+ body
+ })
+ expect(response).toEqual(expect.any(Array))
+ })
+
+ it("calls the update method with the correct params", async () => {
+ const body = {
+ name: "updated"
+ }
+
+ const response = await config.integration.update({
+ id: "1234",
+ index: indexName,
+ json: body
+ })
+
+ expect(config.integration.client.update).toHaveBeenCalledWith({
+ id: "1234",
+ index: indexName,
+ body
+ })
+ expect(response).toEqual(expect.any(Array))
+ })
+
+ it("calls the delete method with the correct params", async () => {
+ const body = {
+ id: "1234"
+ }
+
+ const response = await config.integration.delete(body)
+
+ expect(config.integration.client.delete).toHaveBeenCalledWith(body)
+ expect(response).toEqual(expect.any(Array))
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/tests/microsoftSqlServer.spec.js b/packages/server/src/integrations/tests/microsoftSqlServer.spec.js
new file mode 100644
index 0000000000..29399b840f
--- /dev/null
+++ b/packages/server/src/integrations/tests/microsoftSqlServer.spec.js
@@ -0,0 +1,47 @@
+const sqlServer = require("mssql")
+const MSSQLIntegration = require("../microsoftSqlServer")
+jest.mock("mssql")
+
+class TestConfiguration {
+ constructor(config = {}) {
+ this.integration = new MSSQLIntegration.integration(config)
+ }
+}
+
+describe("MS SQL Server Integration", () => {
+ let config
+
+ beforeEach(() => {
+ config = new TestConfiguration()
+ })
+
+ it("calls the create method with the correct params", async () => {
+ const sql = "insert into users (name, age) values ('Joe', 123);"
+ const response = await config.integration.create({
+ sql
+ })
+ expect(config.integration.client.query).toHaveBeenCalledWith(sql)
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const sql = "select * from users;"
+ const response = await config.integration.read({
+ sql
+ })
+ expect(config.integration.client.query).toHaveBeenCalledWith(sql)
+ })
+
+ describe("no rows returned", () => {
+ beforeEach(() => {
+ config.integration.client.query.mockImplementation(() => ({ rows: [] }))
+ })
+
+ it("returns the correct response when the create response has no rows", async () => {
+ const sql = "insert into users (name, age) values ('Joe', 123);"
+ const response = await config.integration.create({
+ sql
+ })
+ expect(response).toEqual([{ created: true }])
+ })
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/tests/mongo.spec.js b/packages/server/src/integrations/tests/mongo.spec.js
new file mode 100644
index 0000000000..1e37d5dd70
--- /dev/null
+++ b/packages/server/src/integrations/tests/mongo.spec.js
@@ -0,0 +1,40 @@
+const mongo = require("mongodb")
+const MongoDBIntegration = require("../mongodb")
+jest.mock("mongodb")
+
+class TestConfiguration {
+ constructor(config = {}) {
+ this.integration = new MongoDBIntegration.integration(config)
+ }
+}
+
+describe("MongoDB Integration", () => {
+ let config
+ let indexName = "Users"
+
+ beforeEach(() => {
+ config = new TestConfiguration()
+ })
+
+ it("calls the create method with the correct params", async () => {
+ const body = {
+ name: "Hello"
+ }
+ const response = await config.integration.create({
+ index: indexName,
+ json: body
+ })
+ expect(config.integration.client.insertOne).toHaveBeenCalledWith(body)
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const query = {
+ json: {
+ address: "test"
+ }
+ }
+ const response = await config.integration.read(query)
+ expect(config.integration.client.find).toHaveBeenCalledWith(query.json)
+ expect(response).toEqual(expect.any(Array))
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/tests/mysql.spec.js b/packages/server/src/integrations/tests/mysql.spec.js
new file mode 100644
index 0000000000..eca3e523b0
--- /dev/null
+++ b/packages/server/src/integrations/tests/mysql.spec.js
@@ -0,0 +1,83 @@
+const pg = require("mysql")
+const MySQLIntegration = require("../mysql")
+jest.mock("mysql")
+
+class TestConfiguration {
+ constructor(config = { ssl: {} }) {
+ this.integration = new MySQLIntegration.integration(config)
+ this.query = jest.fn(() => [{ id: 1 }])
+ this.integration.query = this.query
+ }
+}
+
+describe("MySQL Integration", () => {
+ let config
+
+ beforeEach(() => {
+ config = new TestConfiguration()
+ })
+
+ it("calls the create method with the correct params", async () => {
+ const sql = "insert into users (name, age) values ('Joe', 123);"
+ const response = await config.integration.create({
+ sql
+ })
+ expect(config.query).toHaveBeenCalledWith({ sql })
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const sql = "select * from users;"
+ const response = await config.integration.read({
+ sql
+ })
+ expect(config.query).toHaveBeenCalledWith({
+ sql
+ })
+ })
+
+ it("calls the update method with the correct params", async () => {
+ const sql = "update table users set name = 'test';"
+ const response = await config.integration.update({
+ sql
+ })
+ expect(config.query).toHaveBeenCalledWith({ sql })
+ })
+
+ it("calls the delete method with the correct params", async () => {
+ const sql = "delete from users where name = 'todelete';"
+ const response = await config.integration.delete({
+ sql
+ })
+ expect(config.query).toHaveBeenCalledWith({ sql })
+ })
+
+ describe("no rows returned", () => {
+ beforeEach(() => {
+ config.query.mockImplementation(() => [])
+ })
+
+ it("returns the correct response when the create response has no rows", async () => {
+ const sql = "insert into users (name, age) values ('Joe', 123);"
+ const response = await config.integration.create({
+ sql
+ })
+ expect(response).toEqual([{ created: true }])
+ })
+
+ it("returns the correct response when the update response has no rows", async () => {
+ const sql = "update table users set name = 'test';"
+ const response = await config.integration.update({
+ sql
+ })
+ expect(response).toEqual([{ updated: true }])
+ })
+
+ it("returns the correct response when the delete response has no rows", async () => {
+ const sql = "delete from users where name = 'todelete';"
+ const response = await config.integration.delete({
+ sql
+ })
+ expect(response).toEqual([{ deleted: true }])
+ })
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/tests/postgres.spec.js b/packages/server/src/integrations/tests/postgres.spec.js
new file mode 100644
index 0000000000..8a8876a556
--- /dev/null
+++ b/packages/server/src/integrations/tests/postgres.spec.js
@@ -0,0 +1,79 @@
+const pg = require("pg")
+const PostgresIntegration = require("../postgres")
+jest.mock("pg")
+
+class TestConfiguration {
+ constructor(config = {}) {
+ this.integration = new PostgresIntegration.integration(config)
+ }
+}
+
+describe("Postgres Integration", () => {
+ let config
+
+ beforeEach(() => {
+ config = new TestConfiguration()
+ })
+
+ it("calls the create method with the correct params", async () => {
+ const sql = "insert into users (name, age) values ('Joe', 123);"
+ const response = await config.integration.create({
+ sql
+ })
+ expect(config.integration.client.query).toHaveBeenCalledWith(sql)
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const sql = "select * from users;"
+ const response = await config.integration.read({
+ sql
+ })
+ expect(config.integration.client.query).toHaveBeenCalledWith(sql)
+ })
+
+ it("calls the update method with the correct params", async () => {
+ const sql = "update table users set name = 'test';"
+ const response = await config.integration.update({
+ sql
+ })
+ expect(config.integration.client.query).toHaveBeenCalledWith(sql)
+ })
+
+ it("calls the delete method with the correct params", async () => {
+ const sql = "delete from users where name = 'todelete';"
+ const response = await config.integration.delete({
+ sql
+ })
+ expect(config.integration.client.query).toHaveBeenCalledWith(sql)
+ })
+
+ describe("no rows returned", () => {
+ beforeEach(() => {
+ config.integration.client.query.mockImplementation(() => ({ rows: [] }))
+ })
+
+ it("returns the correct response when the create response has no rows", async () => {
+ const sql = "insert into users (name, age) values ('Joe', 123);"
+ const response = await config.integration.create({
+ sql
+ })
+ expect(response).toEqual([{ created: true }])
+ })
+
+ it("returns the correct response when the update response has no rows", async () => {
+ const sql = "update table users set name = 'test';"
+ const response = await config.integration.update({
+ sql
+ })
+ expect(response).toEqual([{ updated: true }])
+ })
+
+ it("returns the correct response when the delete response has no rows", async () => {
+ const sql = "delete from users where name = 'todelete';"
+ const response = await config.integration.delete({
+ sql
+ })
+ expect(response).toEqual([{ deleted: true }])
+ })
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/tests/rest.spec.js b/packages/server/src/integrations/tests/rest.spec.js
new file mode 100644
index 0000000000..db8749baa1
--- /dev/null
+++ b/packages/server/src/integrations/tests/rest.spec.js
@@ -0,0 +1,98 @@
+const fetch = require("node-fetch")
+const RestIntegration = require("../rest")
+jest.mock("node-fetch", () => jest.fn(() => ({ json: jest.fn(), text: jest.fn() })))
+
+class TestConfiguration {
+ constructor(config = {}) {
+ this.integration = new RestIntegration.integration(config)
+ }
+}
+
+describe("REST Integration", () => {
+ const BASE_URL = "https://myapi.com"
+ let config
+
+ beforeEach(() => {
+ config = new TestConfiguration({
+ url: BASE_URL
+ })
+ })
+
+ it("calls the create method with the correct params", async () => {
+ const query = {
+ path: "/api",
+ queryString: "?test=1",
+ headers: {
+ Accept: "application/json"
+ },
+ json: {
+ name: "test"
+ }
+ }
+ const response = await config.integration.create(query)
+ expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1`, {
+ method: "POST",
+ body: "{\"name\":\"test\"}",
+ headers: {
+ Accept: "application/json"
+ }
+ })
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const query = {
+ path: "/api",
+ queryString: "?test=1",
+ headers: {
+ Accept: "text/html"
+ }
+ }
+ const response = await config.integration.read(query)
+ expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1`, {
+ headers: {
+ Accept: "text/html"
+ }
+ })
+ })
+
+ it("calls the update method with the correct params", async () => {
+ const query = {
+ path: "/api",
+ queryString: "?test=1",
+ headers: {
+ Accept: "application/json"
+ },
+ json: {
+ name: "test"
+ }
+ }
+ const response = await config.integration.update(query)
+ expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1`, {
+ method: "POST",
+ body: "{\"name\":\"test\"}",
+ headers: {
+ Accept: "application/json"
+ }
+ })
+ })
+
+ it("calls the delete method with the correct params", async () => {
+ const query = {
+ path: "/api",
+ queryString: "?test=1",
+ headers: {
+ Accept: "application/json"
+ },
+ json: {
+ name: "test"
+ }
+ }
+ const response = await config.integration.delete(query)
+ expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1`, {
+ method: "DELETE",
+ headers: {
+ Accept: "application/json"
+ }
+ })
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/integrations/tests/s3.spec.js b/packages/server/src/integrations/tests/s3.spec.js
new file mode 100644
index 0000000000..7ac403dbd4
--- /dev/null
+++ b/packages/server/src/integrations/tests/s3.spec.js
@@ -0,0 +1,26 @@
+const AWS = require("aws-sdk")
+const S3Integration = require("../s3")
+jest.mock("aws-sdk")
+
+class TestConfiguration {
+ constructor(config = {}) {
+ this.integration = new S3Integration.integration(config)
+ }
+}
+
+describe("S3 Integration", () => {
+ let config
+
+ beforeEach(() => {
+ config = new TestConfiguration()
+ })
+
+ it("calls the read method with the correct params", async () => {
+ const response = await config.integration.read({
+ bucket: "test"
+ })
+ expect(config.integration.client.listObjects).toHaveBeenCalledWith({
+ Bucket: "test"
+ })
+ })
+})
\ No newline at end of file
diff --git a/packages/server/src/middleware/tests/usageQuota.spec.js b/packages/server/src/middleware/tests/usageQuota.spec.js
index c76acb47d2..395f14c1ed 100644
--- a/packages/server/src/middleware/tests/usageQuota.spec.js
+++ b/packages/server/src/middleware/tests/usageQuota.spec.js
@@ -3,7 +3,7 @@ const usageQuota = require("../../utilities/usageQuota")
const CouchDB = require("../../db")
const env = require("../../environment")
-jest.mock("../../db");
+jest.mock("../../db")
jest.mock("../../utilities/usageQuota")
jest.mock("../../environment")
diff --git a/packages/server/src/selfhost/README.md b/packages/server/src/selfhost/README.md
deleted file mode 100644
index a02743a58c..0000000000
--- a/packages/server/src/selfhost/README.md
+++ /dev/null
@@ -1,7 +0,0 @@
-### Self hosting
-This directory contains utilities that are needed for self hosted platforms to operate.
-These will mostly be utilities, necessary to the operation of the server e.g. storing self
-hosting specific options and attributes to CouchDB.
-
-All the internal operations should be exposed through the `index.js` so importing
-the self host directory should give you everything you need.
\ No newline at end of file
diff --git a/packages/server/src/selfhost/index.js b/packages/server/src/selfhost/index.js
deleted file mode 100644
index f77d1f0b6c..0000000000
--- a/packages/server/src/selfhost/index.js
+++ /dev/null
@@ -1,44 +0,0 @@
-const CouchDB = require("../db")
-const env = require("../environment")
-const newid = require("../db/newid")
-
-const SELF_HOST_DB = "self-host-db"
-const SELF_HOST_DOC = "self-host-info"
-
-async function createSelfHostDB(db) {
- await db.put({
- _id: "_design/database",
- views: {},
- })
- const selfHostInfo = {
- _id: SELF_HOST_DOC,
- apiKeyId: newid(),
- }
- await db.put(selfHostInfo)
- return selfHostInfo
-}
-
-exports.init = async () => {
- if (!env.SELF_HOSTED) {
- return
- }
- const db = new CouchDB(SELF_HOST_DB)
- try {
- await db.get(SELF_HOST_DOC)
- } catch (err) {
- // failed to retrieve
- if (err.status === 404) {
- await createSelfHostDB(db)
- }
- }
-}
-
-exports.getSelfHostInfo = async () => {
- const db = new CouchDB(SELF_HOST_DB)
- return db.get(SELF_HOST_DOC)
-}
-
-exports.getSelfHostAPIKey = async () => {
- const info = await exports.getSelfHostInfo()
- return info ? info.apiKeyId : null
-}
diff --git a/packages/server/src/api/routes/tests/utilities/TestConfiguration.js b/packages/server/src/tests/utilities/TestConfiguration.js
similarity index 82%
rename from packages/server/src/api/routes/tests/utilities/TestConfiguration.js
rename to packages/server/src/tests/utilities/TestConfiguration.js
index 0ff742293d..a12d596534 100644
--- a/packages/server/src/api/routes/tests/utilities/TestConfiguration.js
+++ b/packages/server/src/tests/utilities/TestConfiguration.js
@@ -1,6 +1,6 @@
-const { BUILTIN_ROLE_IDS } = require("../../../../utilities/security/roles")
+const { BUILTIN_ROLE_IDS } = require("../../utilities/security/roles")
const jwt = require("jsonwebtoken")
-const env = require("../../../../environment")
+const env = require("../../environment")
const {
basicTable,
basicRow,
@@ -15,18 +15,20 @@ const {
const controllers = require("./controllers")
const supertest = require("supertest")
const fs = require("fs")
-const { budibaseAppsDir } = require("../../../../utilities/budibaseDir")
+const { budibaseAppsDir } = require("../../utilities/budibaseDir")
const { join } = require("path")
const EMAIL = "babs@babs.com"
const PASSWORD = "babs_password"
class TestConfiguration {
- constructor() {
- env.PORT = 4002
- this.server = require("../../../../app")
- // we need the request for logging in, involves cookies, hard to fake
- this.request = supertest(this.server)
+ constructor(openServer = true) {
+ if (openServer) {
+ env.PORT = 4002
+ this.server = require("../../app")
+ // we need the request for logging in, involves cookies, hard to fake
+ this.request = supertest(this.server)
+ }
this.appId = null
this.allApps = []
}
@@ -61,7 +63,9 @@ class TestConfiguration {
}
end() {
- this.server.close()
+ if (this.server) {
+ this.server.close()
+ }
const appDir = budibaseAppsDir()
const files = fs.readdirSync(appDir)
for (let file of files) {
@@ -131,16 +135,22 @@ class TestConfiguration {
return this._req(null, { id: tableId }, controllers.table.find)
}
- async createLinkedTable() {
+ async createLinkedTable(relationshipType = null, links = ["link"]) {
if (!this.table) {
throw "Must have created a table first."
}
const tableConfig = basicTable()
tableConfig.primaryDisplay = "name"
- tableConfig.schema.link = {
- type: "link",
- fieldName: "link",
- tableId: this.table._id,
+ for (let link of links) {
+ tableConfig.schema[link] = {
+ type: "link",
+ fieldName: link,
+ tableId: this.table._id,
+ name: link,
+ }
+ if (relationshipType) {
+ tableConfig.schema[link].relationshipType = relationshipType
+ }
}
const linkedTable = await this.createTable(tableConfig)
this.linkedTable = linkedTable
@@ -159,8 +169,20 @@ class TestConfiguration {
if (!this.table) {
throw "Test requires table to be configured."
}
- config = config || basicRow(this.table._id)
- return this._req(config, { tableId: this.table._id }, controllers.row.save)
+ const tableId = (config && config.tableId) || this.table._id
+ config = config || basicRow(tableId)
+ return this._req(config, { tableId }, controllers.row.save)
+ }
+
+ async getRow(tableId, rowId) {
+ return this._req(null, { tableId, rowId }, controllers.row.find)
+ }
+
+ async getRows(tableId) {
+ if (!tableId && this.table) {
+ tableId = this.table._id
+ }
+ return this._req(null, { tableId }, controllers.row.fetchTableRows)
}
async createRole(config = null) {
@@ -187,6 +209,7 @@ class TestConfiguration {
const view = config || {
map: "function(doc) { emit(doc[doc.key], doc._id); } ",
tableId: this.table._id,
+ name: "ViewTest",
}
return this._req(view, null, controllers.view.save)
}
@@ -285,6 +308,9 @@ class TestConfiguration {
}
async login(email, password) {
+ if (!this.request) {
+ throw "Server has not been opened, cannot login."
+ }
if (!email || !password) {
await this.createUser()
email = EMAIL
diff --git a/packages/server/src/tests/utilities/controllers.js b/packages/server/src/tests/utilities/controllers.js
new file mode 100644
index 0000000000..b07754038f
--- /dev/null
+++ b/packages/server/src/tests/utilities/controllers.js
@@ -0,0 +1,15 @@
+module.exports = {
+ table: require("../../api/controllers/table"),
+ row: require("../../api/controllers/row"),
+ role: require("../../api/controllers/role"),
+ perms: require("../../api/controllers/permission"),
+ view: require("../../api/controllers/view"),
+ app: require("../../api/controllers/application"),
+ user: require("../../api/controllers/user"),
+ automation: require("../../api/controllers/automation"),
+ datasource: require("../../api/controllers/datasource"),
+ query: require("../../api/controllers/query"),
+ screen: require("../../api/controllers/screen"),
+ webhook: require("../../api/controllers/webhook"),
+ layout: require("../../api/controllers/layout"),
+}
diff --git a/packages/server/src/tests/utilities/index.js b/packages/server/src/tests/utilities/index.js
new file mode 100644
index 0000000000..aa8039ce2f
--- /dev/null
+++ b/packages/server/src/tests/utilities/index.js
@@ -0,0 +1,11 @@
+exports.makePartial = obj => {
+ const newObj = {}
+ for (let key of Object.keys(obj)) {
+ if (typeof obj[key] === "object") {
+ newObj[key] = exports.makePartial(obj[key])
+ } else {
+ newObj[key] = obj[key]
+ }
+ }
+ return expect.objectContaining(newObj)
+}
diff --git a/packages/server/src/api/routes/tests/utilities/structures.js b/packages/server/src/tests/utilities/structures.js
similarity index 78%
rename from packages/server/src/api/routes/tests/utilities/structures.js
rename to packages/server/src/tests/utilities/structures.js
index ff3a239211..5e27ff4a3b 100644
--- a/packages/server/src/api/routes/tests/utilities/structures.js
+++ b/packages/server/src/tests/utilities/structures.js
@@ -1,9 +1,9 @@
-const { BUILTIN_ROLE_IDS } = require("../../../../utilities/security/roles")
+const { BUILTIN_ROLE_IDS } = require("../../utilities/security/roles")
const {
BUILTIN_PERMISSION_IDS,
-} = require("../../../../utilities/security/permissions")
-const { createHomeScreen } = require("../../../../constants/screens")
-const { EMPTY_LAYOUT } = require("../../../../constants/layouts")
+} = require("../../utilities/security/permissions")
+const { createHomeScreen } = require("../../constants/screens")
+const { EMPTY_LAYOUT } = require("../../constants/layouts")
const { cloneDeep } = require("lodash/fp")
exports.basicTable = () => {
@@ -53,6 +53,14 @@ exports.basicRow = tableId => {
}
}
+exports.basicLinkedRow = (tableId, linkedRowId, linkField = "link") => {
+ // this is based on the basic linked tables you get from the test configuration
+ return {
+ ...exports.basicRow(tableId),
+ [linkField]: [linkedRowId],
+ }
+}
+
exports.basicRole = () => {
return {
name: "NewRole",
diff --git a/packages/server/src/utilities/createAppPackage.js b/packages/server/src/utilities/createAppPackage.js
index a62e8c96df..9500554227 100644
--- a/packages/server/src/utilities/createAppPackage.js
+++ b/packages/server/src/utilities/createAppPackage.js
@@ -7,6 +7,8 @@ const packageJson = require("../../package.json")
const streamPipeline = promisify(stream.pipeline)
+// can't really test this due to the downloading nature of it, wouldn't be a great test case
+/* istanbul ignore next */
exports.downloadExtractComponentLibraries = async appFolder => {
const LIBRARIES = ["standard-components"]
diff --git a/packages/server/src/utilities/exceptions.js b/packages/server/src/utilities/exceptions.js
deleted file mode 100644
index e02c88eec3..0000000000
--- a/packages/server/src/utilities/exceptions.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const statusCodes = require("./statusCodes")
-
-const errorWithStatus = (message, statusCode) => {
- const e = new Error(message)
- e.statusCode = statusCode
- return e
-}
-
-module.exports.unauthorized = message =>
- errorWithStatus(message, statusCodes.UNAUTHORIZED)
-
-module.exports.forbidden = message =>
- errorWithStatus(message, statusCodes.FORBIDDEN)
-
-module.exports.notfound = message =>
- errorWithStatus(message, statusCodes.NOT_FOUND)
diff --git a/packages/server/src/utilities/routing/index.js b/packages/server/src/utilities/routing/index.js
index f4af585dc6..541733dcc4 100644
--- a/packages/server/src/utilities/routing/index.js
+++ b/packages/server/src/utilities/routing/index.js
@@ -12,6 +12,7 @@ exports.getRoutingInfo = async appId => {
return allRouting.rows.map(row => row.value)
} catch (err) {
// check if the view doesn't exist, it should for all new instances
+ /* istanbul ignore next */
if (err != null && err.name === "not_found") {
await createRoutingView(appId)
return exports.getRoutingInfo(appId)
diff --git a/packages/server/src/utilities/security/apikey.js b/packages/server/src/utilities/security/apikey.js
index c8965cee43..3d5f428bb7 100644
--- a/packages/server/src/utilities/security/apikey.js
+++ b/packages/server/src/utilities/security/apikey.js
@@ -1,6 +1,5 @@
const { apiKeyTable } = require("../../db/dynamoClient")
const env = require("../../environment")
-const { getSelfHostAPIKey } = require("../../selfhost")
/**
* This file purely exists so that we can centralise all logic pertaining to API keys, as their usage differs
@@ -8,16 +7,13 @@ const { getSelfHostAPIKey } = require("../../selfhost")
*/
exports.isAPIKeyValid = async apiKeyId => {
- if (env.CLOUD && !env.SELF_HOSTED) {
+ if (!env.SELF_HOSTED) {
let apiKeyInfo = await apiKeyTable.get({
primary: apiKeyId,
})
return apiKeyInfo != null
- }
- if (env.SELF_HOSTED) {
- const selfHostKey = await getSelfHostAPIKey()
+ } else {
// if the api key supplied is correct then return structure similar
- return apiKeyId === selfHostKey ? { pk: apiKeyId } : null
+ return apiKeyId === env.HOSTING_KEY
}
- return false
}
diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json
index 6e0b573656..4f992e6cb4 100644
--- a/packages/string-templates/package.json
+++ b/packages/string-templates/package.json
@@ -25,7 +25,7 @@
"@rollup/plugin-json": "^4.1.0",
"doctrine": "^3.0.0",
"jest": "^26.6.3",
- "marked": "^1.2.8",
+ "marked": "^2.0.0",
"rollup": "^2.36.2",
"rollup-plugin-node-builtins": "^2.1.2",
"rollup-plugin-node-globals": "^1.4.0",
diff --git a/packages/string-templates/yarn.lock b/packages/string-templates/yarn.lock
index cb96e4e81c..3fd39762b2 100644
--- a/packages/string-templates/yarn.lock
+++ b/packages/string-templates/yarn.lock
@@ -3424,10 +3424,10 @@ map-visit@^1.0.0:
dependencies:
object-visit "^1.0.0"
-marked@^1.2.8:
- version "1.2.8"
- resolved "https://registry.yarnpkg.com/marked/-/marked-1.2.8.tgz#5008ece15cfa43e653e85845f3525af4beb6bdd4"
- integrity sha512-lzmFjGnzWHkmbk85q/ILZjFoHHJIQGF+SxGEfIdGk/XhiTPhqGs37gbru6Kkd48diJnEyYwnG67nru0Z2gQtuQ==
+marked@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/marked/-/marked-2.0.0.tgz#9662bbcb77ebbded0662a7be66ff929a8611cee5"
+ integrity sha512-NqRSh2+LlN2NInpqTQnS614Y/3NkVMFFU6sJlRFEpxJ/LHuK/qJECH7/fXZjk4VZstPW/Pevjil/VtSONsLc7Q==
md5.js@^1.3.4:
version "1.3.5"