chore: resolved merge conflicts from develop branch
This commit is contained in:
commit
7ffe77c72d
|
@ -7,6 +7,15 @@ assignees: ''
|
|||
|
||||
---
|
||||
|
||||
**Hosting**
|
||||
<!-- Delete as appropriate -->
|
||||
- Self
|
||||
- Method: <method> <!-- One of: k8s, docker single image, docker compose, digital ocean: -->
|
||||
- Budibase Version: <version> <!-- e.g. 1.0.105 -->
|
||||
- App Version: <version> <!-- Indicate app version if bug is related to an application -->
|
||||
- Cloud
|
||||
- Tenant ID: <tenantId> <!-- shown in URL as <tenantID>.budibase.app -->
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
|
|
|
@ -12,6 +12,11 @@ on:
|
|||
- master
|
||||
- develop
|
||||
|
||||
env:
|
||||
BRANCH: ${{ github.event.pull_request.head.ref }}
|
||||
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
|
||||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -27,6 +32,10 @@ jobs:
|
|||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Pro
|
||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn lint
|
||||
|
|
|
@ -19,6 +19,7 @@ env:
|
|||
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
|
||||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
|
||||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
|
||||
jobs:
|
||||
release:
|
||||
|
@ -29,6 +30,10 @@ jobs:
|
|||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
|
||||
- name: Install Pro
|
||||
run: yarn install:pro develop
|
||||
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn lint
|
||||
|
@ -46,9 +51,9 @@ jobs:
|
|||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
run: |
|
||||
# setup the username and email. I tend to use 'GitHub Actions Bot' with no email by default
|
||||
git config user.name "Budibase Staging Release Bot"
|
||||
git config user.email "<>"
|
||||
# setup the username and email.
|
||||
git config --global user.name "Budibase Staging Release Bot"
|
||||
git config --global user.email "<>"
|
||||
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
|
||||
yarn release:develop
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ env:
|
|||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
|
||||
jobs:
|
||||
release:
|
||||
|
@ -30,6 +31,10 @@ jobs:
|
|||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
|
||||
- name: Install Pro
|
||||
run: yarn install:pro master
|
||||
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn lint
|
||||
|
|
|
@ -11,7 +11,7 @@ sources:
|
|||
- https://github.com/Budibase/budibase
|
||||
- https://budibase.com
|
||||
type: application
|
||||
version: 0.2.8
|
||||
version: 0.2.9
|
||||
appVersion: 1.0.48
|
||||
dependencies:
|
||||
- name: couchdb
|
||||
|
|
|
@ -98,10 +98,6 @@ spec:
|
|||
value: http://worker-service:{{ .Values.services.worker.port }}
|
||||
- name: PLATFORM_URL
|
||||
value: {{ .Values.globals.platformUrl | quote }}
|
||||
- name: USE_QUOTAS
|
||||
value: {{ .Values.globals.useQuotas | quote }}
|
||||
- name: EXCLUDE_QUOTAS_TENANTS
|
||||
value: {{ .Values.globals.excludeQuotasTenants | quote }}
|
||||
- name: ACCOUNT_PORTAL_URL
|
||||
value: {{ .Values.globals.accountPortalUrl | quote }}
|
||||
- name: ACCOUNT_PORTAL_API_KEY
|
||||
|
@ -114,12 +110,23 @@ spec:
|
|||
value: {{ .Values.globals.google.clientId | quote }}
|
||||
- name: GOOGLE_CLIENT_SECRET
|
||||
value: {{ .Values.globals.google.secret | quote }}
|
||||
- name: AUTOMATION_MAX_ITERATIONS
|
||||
value: {{ .Values.globals.automationMaxIterations | quote }}
|
||||
|
||||
image: budibase/apps:{{ .Values.globals.appVersion }}
|
||||
imagePullPolicy: Always
|
||||
name: bbapps
|
||||
ports:
|
||||
- containerPort: {{ .Values.services.apps.port }}
|
||||
resources: {}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
restartPolicy: Always
|
||||
serviceAccountName: ""
|
||||
status: {}
|
||||
|
|
|
@ -39,5 +39,13 @@ spec:
|
|||
imagePullPolicy: Always
|
||||
name: couchdb-backup
|
||||
resources: {}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
status: {}
|
||||
{{- end }}
|
||||
|
|
|
@ -60,6 +60,14 @@ spec:
|
|||
volumeMounts:
|
||||
- mountPath: /data
|
||||
name: minio-data
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
restartPolicy: Always
|
||||
serviceAccountName: ""
|
||||
volumes:
|
||||
|
|
|
@ -32,6 +32,14 @@ spec:
|
|||
- containerPort: {{ .Values.services.proxy.port }}
|
||||
resources: {}
|
||||
volumeMounts:
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
restartPolicy: Always
|
||||
serviceAccountName: ""
|
||||
volumes:
|
||||
|
|
|
@ -39,6 +39,14 @@ spec:
|
|||
volumeMounts:
|
||||
- mountPath: /data
|
||||
name: redis-data
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
restartPolicy: Always
|
||||
serviceAccountName: ""
|
||||
volumes:
|
||||
|
|
|
@ -121,6 +121,14 @@ spec:
|
|||
ports:
|
||||
- containerPort: {{ .Values.services.worker.port }}
|
||||
resources: {}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
restartPolicy: Always
|
||||
serviceAccountName: ""
|
||||
status: {}
|
||||
|
|
|
@ -93,16 +93,15 @@ globals:
|
|||
logLevel: info
|
||||
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
||||
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
||||
useQuotas: "0"
|
||||
excludeQuotasTenants: "" # comma seperated list of tenants to exclude from quotas
|
||||
accountPortalUrl: ""
|
||||
accountPortalApiKey: ""
|
||||
cookieDomain: ""
|
||||
platformUrl: ""
|
||||
httpMigrations: "0"
|
||||
google:
|
||||
clientId: ""
|
||||
clientId: ""
|
||||
secret: ""
|
||||
automationMaxIterations: "500"
|
||||
|
||||
createSecrets: true # creates an internal API key, JWT secrets and redis password for you
|
||||
|
||||
|
@ -230,6 +229,8 @@ couchdb:
|
|||
## Optional tolerations
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
|
||||
service:
|
||||
# annotations:
|
||||
enabled: true
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "1.0.105-alpha.19",
|
||||
"version": "1.0.105-alpha.29",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
14
package.json
14
package.json
|
@ -21,18 +21,17 @@
|
|||
},
|
||||
"scripts": {
|
||||
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
|
||||
"bootstrap": "lerna link && lerna bootstrap",
|
||||
"bootstrap": "lerna link && lerna bootstrap && ./scripts/link-dependencies.sh",
|
||||
"build": "lerna run build",
|
||||
"publishdev": "lerna run publishdev",
|
||||
"publishnpm": "yarn build && lerna publish --force-publish",
|
||||
"release": "lerna publish patch --yes --force-publish",
|
||||
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop",
|
||||
"release": "lerna publish patch --yes --force-publish && yarn release:pro",
|
||||
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop && yarn release:pro:develop",
|
||||
"release:pro": "sh scripts/pro/release.sh",
|
||||
"release:pro:develop": "sh scripts/pro/release.sh develop",
|
||||
"restore": "yarn run clean && yarn run bootstrap && yarn run build",
|
||||
"nuke": "yarn run nuke:packages && yarn run nuke:docker",
|
||||
"nuke:packages": "yarn run restore",
|
||||
"nuke:docker": "lerna run --parallel dev:stack:nuke",
|
||||
"clean": "lerna clean",
|
||||
"kill-port": "kill-port 4001",
|
||||
"kill-builder": "kill-port 3000",
|
||||
"kill-server": "kill-port 4001 4002",
|
||||
"kill-all": "yarn run kill-builder && yarn run kill-server",
|
||||
|
@ -74,6 +73,7 @@
|
|||
"mode:cloud": "yarn env:selfhost:disable && yarn env:multi:enable && yarn env:account:disable",
|
||||
"mode:account": "yarn mode:cloud && yarn env:account:enable",
|
||||
"security:audit": "node scripts/audit.js",
|
||||
"postinstall": "husky install"
|
||||
"postinstall": "husky install",
|
||||
"install:pro": "sh ./scripts/pro/install.sh"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/backend-core",
|
||||
"version": "1.0.105-alpha.19",
|
||||
"version": "1.0.105-alpha.29",
|
||||
"description": "Budibase backend core libraries used in server and worker",
|
||||
"main": "src/index.js",
|
||||
"author": "Budibase",
|
||||
|
|
|
@ -13,6 +13,7 @@ exports.Cookies = {
|
|||
|
||||
exports.Headers = {
|
||||
API_KEY: "x-budibase-api-key",
|
||||
LICENSE_KEY: "x-budibase-license-key",
|
||||
API_VER: "x-budibase-api-version",
|
||||
APP_ID: "x-budibase-app-id",
|
||||
TYPE: "x-budibase-type",
|
||||
|
|
|
@ -23,6 +23,7 @@ exports.StaticDatabases = {
|
|||
docs: {
|
||||
apiKeys: "apikeys",
|
||||
usageQuota: "usage_quota",
|
||||
licenseInfo: "license_info",
|
||||
},
|
||||
},
|
||||
// contains information about tenancy and so on
|
||||
|
|
|
@ -27,6 +27,7 @@ const UNICODE_MAX = "\ufff0"
|
|||
exports.ViewNames = {
|
||||
USER_BY_EMAIL: "by_email",
|
||||
BY_API_KEY: "by_api_key",
|
||||
USER_BY_BUILDERS: "by_builders",
|
||||
}
|
||||
|
||||
exports.StaticDatabases = StaticDatabases
|
||||
|
@ -429,34 +430,9 @@ async function getScopedConfig(db, params) {
|
|||
return configDoc && configDoc.config ? configDoc.config : configDoc
|
||||
}
|
||||
|
||||
function generateNewUsageQuotaDoc() {
|
||||
return {
|
||||
_id: StaticDatabases.GLOBAL.docs.usageQuota,
|
||||
quotaReset: Date.now() + 2592000000,
|
||||
usageQuota: {
|
||||
automationRuns: 0,
|
||||
rows: 0,
|
||||
storage: 0,
|
||||
apps: 0,
|
||||
users: 0,
|
||||
views: 0,
|
||||
emails: 0,
|
||||
},
|
||||
usageLimits: {
|
||||
automationRuns: 1000,
|
||||
rows: 4000,
|
||||
apps: 4,
|
||||
storage: 1000,
|
||||
users: 10,
|
||||
emails: 50,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
exports.Replication = Replication
|
||||
exports.getScopedConfig = getScopedConfig
|
||||
exports.generateConfigID = generateConfigID
|
||||
exports.getConfigParams = getConfigParams
|
||||
exports.getScopedFullConfig = getScopedFullConfig
|
||||
exports.generateNewUsageQuotaDoc = generateNewUsageQuotaDoc
|
||||
exports.generateDevInfoID = generateDevInfoID
|
||||
|
|
|
@ -56,10 +56,34 @@ exports.createApiKeyView = async () => {
|
|||
await db.put(designDoc)
|
||||
}
|
||||
|
||||
exports.createUserBuildersView = async () => {
|
||||
const db = getGlobalDB()
|
||||
let designDoc
|
||||
try {
|
||||
designDoc = await db.get("_design/database")
|
||||
} catch (err) {
|
||||
// no design doc, make one
|
||||
designDoc = DesignDoc()
|
||||
}
|
||||
const view = {
|
||||
map: `function(doc) {
|
||||
if (doc.builder && doc.builder.global === true) {
|
||||
emit(doc._id, doc._id)
|
||||
}
|
||||
}`,
|
||||
}
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[ViewNames.USER_BY_BUILDERS]: view,
|
||||
}
|
||||
await db.put(designDoc)
|
||||
}
|
||||
|
||||
exports.queryGlobalView = async (viewName, params, db = null) => {
|
||||
const CreateFuncByName = {
|
||||
[ViewNames.USER_BY_EMAIL]: exports.createUserEmailView,
|
||||
[ViewNames.BY_API_KEY]: exports.createApiKeyView,
|
||||
[ViewNames.USER_BY_BUILDERS]: exports.createUserBuildersView,
|
||||
}
|
||||
// can pass DB in if working with something specific
|
||||
if (!db) {
|
||||
|
|
|
@ -28,6 +28,7 @@ module.exports = {
|
|||
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED),
|
||||
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
|
||||
PLATFORM_URL: process.env.PLATFORM_URL,
|
||||
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
|
||||
isTest,
|
||||
_set(key, value) {
|
||||
process.env[key] = value
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
class BudibaseError extends Error {
|
||||
constructor(message, type, code) {
|
||||
super(message)
|
||||
this.type = type
|
||||
this.code = code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
BudibaseError,
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
const licensing = require("./licensing")
|
||||
|
||||
const codes = {
|
||||
...licensing.codes,
|
||||
}
|
||||
|
||||
const types = {
|
||||
...licensing.types,
|
||||
}
|
||||
|
||||
const context = {
|
||||
...licensing.context,
|
||||
}
|
||||
|
||||
const getPublicError = err => {
|
||||
let error
|
||||
if (err.code || err.type) {
|
||||
// add generic error information
|
||||
error = {
|
||||
code: err.code,
|
||||
type: err.type,
|
||||
}
|
||||
|
||||
if (err.code && context[err.code]) {
|
||||
error = {
|
||||
...error,
|
||||
// get any additional context from this error
|
||||
...context[err.code](err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return error
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
codes,
|
||||
types,
|
||||
UsageLimitError: licensing.UsageLimitError,
|
||||
getPublicError,
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
const { BudibaseError } = require("./base")
|
||||
|
||||
const types = {
|
||||
LICENSE_ERROR: "license_error",
|
||||
}
|
||||
|
||||
const codes = {
|
||||
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
|
||||
}
|
||||
|
||||
const context = {
|
||||
[codes.USAGE_LIMIT_EXCEEDED]: err => {
|
||||
return {
|
||||
limitName: err.limitName,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
class UsageLimitError extends BudibaseError {
|
||||
constructor(message, limitName) {
|
||||
super(message, types.LICENSE_ERROR, codes.USAGE_LIMIT_EXCEEDED)
|
||||
this.limitName = limitName
|
||||
this.status = 400
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
types,
|
||||
codes,
|
||||
context,
|
||||
UsageLimitError,
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
const env = require("../environment")
|
||||
const tenancy = require("../tenancy")
|
||||
|
||||
/**
|
||||
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
|
||||
* The env var is formatted as:
|
||||
* tenant1:feature1:feature2,tenant2:feature1
|
||||
*/
|
||||
const getFeatureFlags = () => {
|
||||
if (!env.TENANT_FEATURE_FLAGS) {
|
||||
return
|
||||
}
|
||||
|
||||
const tenantFeatureFlags = {}
|
||||
|
||||
env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => {
|
||||
const [tenantId, ...features] = tenantToFeatures.split(":")
|
||||
|
||||
features.forEach(feature => {
|
||||
if (!tenantFeatureFlags[tenantId]) {
|
||||
tenantFeatureFlags[tenantId] = []
|
||||
}
|
||||
tenantFeatureFlags[tenantId].push(feature)
|
||||
})
|
||||
})
|
||||
|
||||
return tenantFeatureFlags
|
||||
}
|
||||
|
||||
const TENANT_FEATURE_FLAGS = getFeatureFlags()
|
||||
|
||||
exports.isEnabled = featureFlag => {
|
||||
const tenantId = tenancy.getTenantId()
|
||||
|
||||
return (
|
||||
TENANT_FEATURE_FLAGS &&
|
||||
TENANT_FEATURE_FLAGS[tenantId] &&
|
||||
TENANT_FEATURE_FLAGS[tenantId].includes(featureFlag)
|
||||
)
|
||||
}
|
||||
|
||||
exports.getTenantFeatureFlags = tenantId => {
|
||||
if (TENANT_FEATURE_FLAGS && TENANT_FEATURE_FLAGS[tenantId]) {
|
||||
return TENANT_FEATURE_FLAGS[tenantId]
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
exports.FeatureFlag = {
|
||||
LICENSING: "LICENSING",
|
||||
}
|
|
@ -15,4 +15,9 @@ module.exports = {
|
|||
auth: require("../auth"),
|
||||
constants: require("../constants"),
|
||||
migrations: require("../migrations"),
|
||||
errors: require("./errors"),
|
||||
env: require("./environment"),
|
||||
accounts: require("./cloud/accounts"),
|
||||
tenancy: require("./tenancy"),
|
||||
featureFlags: require("./featureFlags"),
|
||||
}
|
||||
|
|
|
@ -2,24 +2,27 @@ const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
|||
|
||||
const { authenticateThirdParty } = require("./third-party-common")
|
||||
|
||||
async function authenticate(accessToken, refreshToken, profile, done) {
|
||||
const thirdPartyUser = {
|
||||
provider: profile.provider, // should always be 'google'
|
||||
providerType: "google",
|
||||
userId: profile.id,
|
||||
profile: profile,
|
||||
email: profile._json.email,
|
||||
oauth2: {
|
||||
accessToken: accessToken,
|
||||
refreshToken: refreshToken,
|
||||
},
|
||||
}
|
||||
const buildVerifyFn = async saveUserFn => {
|
||||
return (accessToken, refreshToken, profile, done) => {
|
||||
const thirdPartyUser = {
|
||||
provider: profile.provider, // should always be 'google'
|
||||
providerType: "google",
|
||||
userId: profile.id,
|
||||
profile: profile,
|
||||
email: profile._json.email,
|
||||
oauth2: {
|
||||
accessToken: accessToken,
|
||||
refreshToken: refreshToken,
|
||||
},
|
||||
}
|
||||
|
||||
return authenticateThirdParty(
|
||||
thirdPartyUser,
|
||||
true, // require local accounts to exist
|
||||
done
|
||||
)
|
||||
return authenticateThirdParty(
|
||||
thirdPartyUser,
|
||||
true, // require local accounts to exist
|
||||
done,
|
||||
saveUserFn
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -27,11 +30,7 @@ async function authenticate(accessToken, refreshToken, profile, done) {
|
|||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||
* @returns Dynamically configured Passport Google Strategy
|
||||
*/
|
||||
exports.strategyFactory = async function (
|
||||
config,
|
||||
callbackUrl,
|
||||
verify = authenticate
|
||||
) {
|
||||
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
|
||||
try {
|
||||
const { clientID, clientSecret } = config
|
||||
|
||||
|
@ -41,6 +40,7 @@ exports.strategyFactory = async function (
|
|||
)
|
||||
}
|
||||
|
||||
const verify = buildVerifyFn(saveUserFn)
|
||||
return new GoogleStrategy(
|
||||
{
|
||||
clientID: config.clientID,
|
||||
|
@ -58,4 +58,4 @@ exports.strategyFactory = async function (
|
|||
}
|
||||
}
|
||||
// expose for testing
|
||||
exports.authenticate = authenticate
|
||||
exports.buildVerifyFn = buildVerifyFn
|
||||
|
|
|
@ -2,46 +2,49 @@ const fetch = require("node-fetch")
|
|||
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||
const { authenticateThirdParty } = require("./third-party-common")
|
||||
|
||||
/**
|
||||
* @param {*} issuer The identity provider base URL
|
||||
* @param {*} sub The user ID
|
||||
* @param {*} profile The user profile information. Created by passport from the /userinfo response
|
||||
* @param {*} jwtClaims The parsed id_token claims
|
||||
* @param {*} accessToken The access_token for contacting the identity provider - may or may not be a JWT
|
||||
* @param {*} refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
|
||||
* @param {*} idToken The id_token - always a JWT
|
||||
* @param {*} params The response body from requesting an access_token
|
||||
* @param {*} done The passport callback: err, user, info
|
||||
*/
|
||||
async function authenticate(
|
||||
issuer,
|
||||
sub,
|
||||
profile,
|
||||
jwtClaims,
|
||||
accessToken,
|
||||
refreshToken,
|
||||
idToken,
|
||||
params,
|
||||
done
|
||||
) {
|
||||
const thirdPartyUser = {
|
||||
// store the issuer info to enable sync in future
|
||||
provider: issuer,
|
||||
providerType: "oidc",
|
||||
userId: profile.id,
|
||||
profile: profile,
|
||||
email: getEmail(profile, jwtClaims),
|
||||
oauth2: {
|
||||
accessToken: accessToken,
|
||||
refreshToken: refreshToken,
|
||||
},
|
||||
}
|
||||
|
||||
return authenticateThirdParty(
|
||||
thirdPartyUser,
|
||||
false, // don't require local accounts to exist
|
||||
const buildVerifyFn = saveUserFn => {
|
||||
/**
|
||||
* @param {*} issuer The identity provider base URL
|
||||
* @param {*} sub The user ID
|
||||
* @param {*} profile The user profile information. Created by passport from the /userinfo response
|
||||
* @param {*} jwtClaims The parsed id_token claims
|
||||
* @param {*} accessToken The access_token for contacting the identity provider - may or may not be a JWT
|
||||
* @param {*} refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
|
||||
* @param {*} idToken The id_token - always a JWT
|
||||
* @param {*} params The response body from requesting an access_token
|
||||
* @param {*} done The passport callback: err, user, info
|
||||
*/
|
||||
return async (
|
||||
issuer,
|
||||
sub,
|
||||
profile,
|
||||
jwtClaims,
|
||||
accessToken,
|
||||
refreshToken,
|
||||
idToken,
|
||||
params,
|
||||
done
|
||||
)
|
||||
) => {
|
||||
const thirdPartyUser = {
|
||||
// store the issuer info to enable sync in future
|
||||
provider: issuer,
|
||||
providerType: "oidc",
|
||||
userId: profile.id,
|
||||
profile: profile,
|
||||
email: getEmail(profile, jwtClaims),
|
||||
oauth2: {
|
||||
accessToken: accessToken,
|
||||
refreshToken: refreshToken,
|
||||
},
|
||||
}
|
||||
|
||||
return authenticateThirdParty(
|
||||
thirdPartyUser,
|
||||
false, // don't require local accounts to exist
|
||||
done,
|
||||
saveUserFn
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -86,7 +89,7 @@ function validEmail(value) {
|
|||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||
* @returns Dynamically configured Passport OIDC Strategy
|
||||
*/
|
||||
exports.strategyFactory = async function (config, callbackUrl) {
|
||||
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
|
||||
try {
|
||||
const { clientID, clientSecret, configUrl } = config
|
||||
|
||||
|
@ -106,6 +109,7 @@ exports.strategyFactory = async function (config, callbackUrl) {
|
|||
|
||||
const body = await response.json()
|
||||
|
||||
const verify = buildVerifyFn(saveUserFn)
|
||||
return new OIDCStrategy(
|
||||
{
|
||||
issuer: body.issuer,
|
||||
|
@ -116,7 +120,7 @@ exports.strategyFactory = async function (config, callbackUrl) {
|
|||
clientSecret: clientSecret,
|
||||
callbackURL: callbackUrl,
|
||||
},
|
||||
authenticate
|
||||
verify
|
||||
)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
|
@ -125,4 +129,4 @@ exports.strategyFactory = async function (config, callbackUrl) {
|
|||
}
|
||||
|
||||
// expose for testing
|
||||
exports.authenticate = authenticate
|
||||
exports.buildVerifyFn = buildVerifyFn
|
||||
|
|
|
@ -58,8 +58,10 @@ describe("google", () => {
|
|||
|
||||
it("delegates authentication to third party common", async () => {
|
||||
const google = require("../google")
|
||||
const mockSaveUserFn = jest.fn()
|
||||
const authenticate = await google.buildVerifyFn(mockSaveUserFn)
|
||||
|
||||
await google.authenticate(
|
||||
await authenticate(
|
||||
data.accessToken,
|
||||
data.refreshToken,
|
||||
profile,
|
||||
|
@ -69,7 +71,8 @@ describe("google", () => {
|
|||
expect(authenticateThirdParty).toHaveBeenCalledWith(
|
||||
user,
|
||||
true,
|
||||
mockDone)
|
||||
mockDone,
|
||||
mockSaveUserFn)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -83,8 +83,10 @@ describe("oidc", () => {
|
|||
|
||||
async function doAuthenticate() {
|
||||
const oidc = require("../oidc")
|
||||
const mockSaveUserFn = jest.fn()
|
||||
const authenticate = await oidc.buildVerifyFn(mockSaveUserFn)
|
||||
|
||||
await oidc.authenticate(
|
||||
await authenticate(
|
||||
issuer,
|
||||
sub,
|
||||
profile,
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
const env = require("../../environment")
|
||||
const jwt = require("jsonwebtoken")
|
||||
const { generateGlobalUserID } = require("../../db/utils")
|
||||
const { saveUser } = require("../../utils")
|
||||
const { authError } = require("./utils")
|
||||
const { newid } = require("../../hashing")
|
||||
const { createASession } = require("../../security/sessions")
|
||||
|
@ -16,8 +15,11 @@ exports.authenticateThirdParty = async function (
|
|||
thirdPartyUser,
|
||||
requireLocalAccount = true,
|
||||
done,
|
||||
saveUserFn = saveUser
|
||||
saveUserFn
|
||||
) {
|
||||
if (!saveUserFn) {
|
||||
throw new Error("Save user function must be provided")
|
||||
}
|
||||
if (!thirdPartyUser.provider) {
|
||||
return authError(done, "third party user provider required")
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ exports.Databases = {
|
|||
FLAGS: "flags",
|
||||
APP_METADATA: "appMetadata",
|
||||
QUERY_VARS: "queryVars",
|
||||
LICENSES: "license",
|
||||
}
|
||||
|
||||
exports.SEPARATOR = SEPARATOR
|
||||
|
|
|
@ -176,6 +176,13 @@ exports.getGlobalUserByEmail = async email => {
|
|||
})
|
||||
}
|
||||
|
||||
exports.getBuildersCount = async () => {
|
||||
const builders = await queryGlobalView(ViewNames.USER_BY_BUILDERS, {
|
||||
include_docs: false,
|
||||
})
|
||||
return builders ? builders.length : 0
|
||||
}
|
||||
|
||||
exports.saveUser = async (
|
||||
user,
|
||||
tenantId,
|
||||
|
@ -289,4 +296,5 @@ exports.platformLogout = async ({ ctx, userId, keepActiveSession }) => {
|
|||
userId,
|
||||
sessions.map(({ sessionId }) => sessionId)
|
||||
)
|
||||
await userCache.invalidateUser(userId)
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "1.0.105-alpha.19",
|
||||
"version": "1.0.105-alpha.29",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
@ -38,7 +38,7 @@
|
|||
],
|
||||
"dependencies": {
|
||||
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
|
||||
"@budibase/string-templates": "^1.0.105-alpha.19",
|
||||
"@budibase/string-templates": "^1.0.105-alpha.29",
|
||||
"@spectrum-css/actionbutton": "^1.0.1",
|
||||
"@spectrum-css/actiongroup": "^1.0.1",
|
||||
"@spectrum-css/avatar": "^3.0.2",
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
<script>
|
||||
export let wide = false
|
||||
export let maxWidth = "80ch"
|
||||
</script>
|
||||
|
||||
<div class:wide>
|
||||
<div style="--max-width: {maxWidth}" class:wide>
|
||||
<slot />
|
||||
</div>
|
||||
|
||||
|
@ -12,7 +13,7 @@
|
|||
flex-direction: column;
|
||||
justify-content: flex-start;
|
||||
align-items: stretch;
|
||||
max-width: 80ch;
|
||||
max-width: var(--max-width);
|
||||
margin: 0 auto;
|
||||
padding: calc(var(--spacing-xl) * 2);
|
||||
min-height: calc(100% - var(--spacing-xl) * 4);
|
||||
|
|
|
@ -16,11 +16,11 @@
|
|||
easing: easing,
|
||||
})
|
||||
|
||||
$: if (value) $progress = value
|
||||
$: if (value || value === 0) $progress = value
|
||||
</script>
|
||||
|
||||
<div
|
||||
class:spectrum-ProgressBar--indeterminate={!value}
|
||||
class:spectrum-ProgressBar--indeterminate={!value && value !== 0}
|
||||
class:spectrum-ProgressBar--sideLabel={sideLabel}
|
||||
class="spectrum-ProgressBar spectrum-ProgressBar--size{size}"
|
||||
value={$progress}
|
||||
|
@ -28,7 +28,7 @@
|
|||
aria-valuenow={$progress}
|
||||
aria-valuemin="0"
|
||||
aria-valuemax="100"
|
||||
style={width ? `width: ${width}px;` : ""}
|
||||
style={width ? `width: ${width};` : ""}
|
||||
>
|
||||
{#if $$slots}
|
||||
<div
|
||||
|
@ -37,7 +37,7 @@
|
|||
<slot />
|
||||
</div>
|
||||
{/if}
|
||||
{#if value}
|
||||
{#if value || value === 0}
|
||||
<div
|
||||
class="spectrum-FieldLabel spectrum-ProgressBar-percentage spectrum-FieldLabel--size{size}"
|
||||
>
|
||||
|
@ -47,7 +47,7 @@
|
|||
<div class="spectrum-ProgressBar-track">
|
||||
<div
|
||||
class="spectrum-ProgressBar-fill"
|
||||
style={value ? `width: ${$progress}%` : ""}
|
||||
style={value || value === 0 ? `width: ${$progress}%` : ""}
|
||||
/>
|
||||
</div>
|
||||
<div class="spectrum-ProgressBar-label" hidden="" />
|
||||
|
|
|
@ -5,12 +5,14 @@
|
|||
export let serif = false
|
||||
export let weight = null
|
||||
export let textAlign = null
|
||||
export let color = null
|
||||
</script>
|
||||
|
||||
<p
|
||||
style={`
|
||||
${weight ? `font-weight:${weight};` : ""}
|
||||
${textAlign ? `text-align:${textAlign};` : ""}
|
||||
${color ? `color:${color};` : ""}
|
||||
`}
|
||||
class="spectrum-Body spectrum-Body--size{size}"
|
||||
class:spectrum-Body--serif={serif}
|
||||
|
|
|
@ -5,12 +5,13 @@
|
|||
export let size = "M"
|
||||
export let textAlign
|
||||
export let noPadding = false
|
||||
export let weight = "default" // light, heavy, default
|
||||
</script>
|
||||
|
||||
<h1
|
||||
style={textAlign ? `text-align:${textAlign}` : ``}
|
||||
class:noPadding
|
||||
class="spectrum-Heading spectrum-Heading--size{size}"
|
||||
class="spectrum-Heading spectrum-Heading--size{size} spectrum-Heading--{weight}"
|
||||
>
|
||||
<slot />
|
||||
</h1>
|
||||
|
|
|
@ -20,7 +20,6 @@ filterTests(['smoke', 'all'], () => {
|
|||
})
|
||||
|
||||
// Setup trigger
|
||||
cy.contains("Setup").click()
|
||||
cy.get(".spectrum-Picker-label").click()
|
||||
cy.wait(500)
|
||||
cy.contains("dog").click()
|
||||
|
@ -32,12 +31,11 @@ filterTests(['smoke', 'all'], () => {
|
|||
cy.contains("Create Row").trigger('mouseover').click().click()
|
||||
cy.get(".spectrum-Button--cta").click()
|
||||
})
|
||||
cy.contains("Setup").click()
|
||||
cy.get(".spectrum-Picker-label").eq(1).click()
|
||||
cy.contains("dog").click()
|
||||
cy.get(".spectrum-Textfield-input")
|
||||
.first()
|
||||
.type("{{ trigger.row.name }}", { parseSpecialCharSequences: false })
|
||||
.first()
|
||||
.type("{{ trigger.row.name }}", { parseSpecialCharSequences: false })
|
||||
cy.get(".spectrum-Textfield-input")
|
||||
.eq(1)
|
||||
.type("11")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/builder",
|
||||
"version": "1.0.105-alpha.19",
|
||||
"version": "1.0.105-alpha.29",
|
||||
"license": "GPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
@ -65,10 +65,10 @@
|
|||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.105-alpha.19",
|
||||
"@budibase/client": "^1.0.105-alpha.19",
|
||||
"@budibase/frontend-core": "^1.0.105-alpha.19",
|
||||
"@budibase/string-templates": "^1.0.105-alpha.19",
|
||||
"@budibase/bbui": "^1.0.105-alpha.29",
|
||||
"@budibase/client": "^1.0.105-alpha.29",
|
||||
"@budibase/frontend-core": "^1.0.105-alpha.29",
|
||||
"@budibase/string-templates": "^1.0.105-alpha.29",
|
||||
"@sentry/browser": "5.19.1",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
if (v.internal) {
|
||||
acc[k] = v
|
||||
}
|
||||
delete acc.LOOP
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
|
|
|
@ -72,7 +72,9 @@
|
|||
animate:flip={{ duration: 500 }}
|
||||
in:fly|local={{ x: 500, duration: 1500 }}
|
||||
>
|
||||
<FlowItem {testDataModal} {block} />
|
||||
{#if block.stepId !== "LOOP"}
|
||||
<FlowItem {testDataModal} {block} />
|
||||
{/if}
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
|
|
|
@ -9,8 +9,8 @@
|
|||
Modal,
|
||||
Button,
|
||||
StatusLight,
|
||||
ActionButton,
|
||||
Select,
|
||||
ActionButton,
|
||||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
|
||||
|
@ -25,8 +25,8 @@
|
|||
let webhookModal
|
||||
let actionModal
|
||||
let resultsModal
|
||||
let setupToggled
|
||||
let blockComplete
|
||||
let showLooping = false
|
||||
|
||||
$: rowControl = $automationStore.selectedAutomation.automation.rowControl
|
||||
$: showBindingPicker =
|
||||
|
@ -52,8 +52,21 @@
|
|||
block.schema?.inputs?.properties || {}
|
||||
).every(x => block?.inputs[x])
|
||||
|
||||
$: loopingSelected =
|
||||
$automationStore.selectedAutomation?.automation.definition.steps.find(
|
||||
x => x.blockToLoop === block.id
|
||||
)
|
||||
|
||||
async function deleteStep() {
|
||||
let loopBlock =
|
||||
$automationStore.selectedAutomation?.automation.definition.steps.find(
|
||||
x => x.blockToLoop === block.id
|
||||
)
|
||||
|
||||
try {
|
||||
if (loopBlock) {
|
||||
automationStore.actions.deleteAutomationBlock(loopBlock)
|
||||
}
|
||||
automationStore.actions.deleteAutomationBlock(block)
|
||||
await automationStore.actions.save(
|
||||
$automationStore.selectedAutomation?.automation
|
||||
|
@ -76,6 +89,23 @@
|
|||
)
|
||||
}
|
||||
|
||||
async function addLooping() {
|
||||
loopingSelected = true
|
||||
const loopDefinition = $automationStore.blockDefinitions.ACTION.LOOP
|
||||
|
||||
const loopBlock = $automationStore.selectedAutomation.constructBlock(
|
||||
"ACTION",
|
||||
"LOOP",
|
||||
loopDefinition
|
||||
)
|
||||
loopBlock.blockToLoop = block.id
|
||||
block.loopBlock = loopBlock.id
|
||||
automationStore.actions.addBlockToAutomation(loopBlock, blockIdx)
|
||||
await automationStore.actions.save(
|
||||
$automationStore.selectedAutomation?.automation
|
||||
)
|
||||
}
|
||||
|
||||
async function onSelect(block) {
|
||||
await automationStore.update(state => {
|
||||
state.selectedBlock = block
|
||||
|
@ -84,13 +114,68 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<div
|
||||
class={`block ${block.type} hoverable`}
|
||||
class:selected
|
||||
on:click={() => {
|
||||
onSelect(block)
|
||||
}}
|
||||
>
|
||||
<div class={`block ${block.type} hoverable`} class:selected on:click={() => {}}>
|
||||
{#if loopingSelected}
|
||||
<div class="blockSection">
|
||||
<div
|
||||
on:click={() => {
|
||||
showLooping = !showLooping
|
||||
}}
|
||||
class="splitHeader"
|
||||
>
|
||||
<div class="center-items">
|
||||
<svg
|
||||
width="28px"
|
||||
height="28px"
|
||||
class="spectrum-Icon"
|
||||
style="color:grey;"
|
||||
focusable="false"
|
||||
>
|
||||
<use xlink:href="#spectrum-icon-18-Reuse" />
|
||||
</svg>
|
||||
<div class="iconAlign">
|
||||
<Detail size="S">Looping</Detail>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="blockTitle">
|
||||
<div
|
||||
style="margin-left: 10px;"
|
||||
on:click={() => {
|
||||
onSelect(block)
|
||||
}}
|
||||
>
|
||||
<Icon name={showLooping ? "ChevronDown" : "ChevronUp"} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Divider noMargin />
|
||||
{#if !showLooping}
|
||||
<div class="blockSection">
|
||||
<div class="block-options">
|
||||
<div class="delete-padding" on:click={() => deleteStep()}>
|
||||
<Icon name="DeleteOutline" />
|
||||
</div>
|
||||
</div>
|
||||
<Layout noPadding gap="S">
|
||||
<AutomationBlockSetup
|
||||
schemaProperties={Object.entries(
|
||||
$automationStore.blockDefinitions.ACTION.LOOP.schema.inputs
|
||||
.properties
|
||||
)}
|
||||
block={$automationStore.selectedAutomation?.automation.definition.steps.find(
|
||||
x => x.blockToLoop === block.id
|
||||
)}
|
||||
{webhookModal}
|
||||
/>
|
||||
</Layout>
|
||||
</div>
|
||||
<Divider noMargin />
|
||||
{/if}
|
||||
{/if}
|
||||
|
||||
<div class="blockSection">
|
||||
<div
|
||||
on:click={() => {
|
||||
|
@ -127,65 +212,66 @@
|
|||
<Detail size="S">{block?.name?.toUpperCase() || ""}</Detail>
|
||||
</div>
|
||||
</div>
|
||||
{#if testResult && testResult[0]}
|
||||
<span on:click={() => resultsModal.show()}>
|
||||
<StatusLight
|
||||
positive={isTrigger || testResult[0].outputs?.success}
|
||||
negative={!testResult[0].outputs?.success}
|
||||
><Body size="XS">View response</Body></StatusLight
|
||||
>
|
||||
</span>
|
||||
{/if}
|
||||
<div class="blockTitle">
|
||||
{#if testResult && testResult[0]}
|
||||
<div style="float: right;" on:click={() => resultsModal.show()}>
|
||||
<StatusLight
|
||||
positive={isTrigger || testResult[0].outputs?.success}
|
||||
negative={!testResult[0].outputs?.success}
|
||||
><Body size="XS">View response</Body></StatusLight
|
||||
>
|
||||
</div>
|
||||
{/if}
|
||||
<div
|
||||
style="margin-left: 10px;"
|
||||
on:click={() => {
|
||||
onSelect(block)
|
||||
}}
|
||||
>
|
||||
<Icon name={blockComplete ? "ChevronDown" : "ChevronUp"} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{#if !blockComplete}
|
||||
<Divider noMargin />
|
||||
<div class="blockSection">
|
||||
<Layout noPadding gap="S">
|
||||
<div class="splitHeader">
|
||||
<ActionButton
|
||||
on:click={() => {
|
||||
onSelect(block)
|
||||
setupToggled = !setupToggled
|
||||
}}
|
||||
quiet
|
||||
icon={setupToggled ? "ChevronDown" : "ChevronRight"}
|
||||
>
|
||||
<Detail size="S">Setup</Detail>
|
||||
</ActionButton>
|
||||
{#if !isTrigger}
|
||||
{#if !isTrigger}
|
||||
<div>
|
||||
<div class="block-options">
|
||||
{#if showBindingPicker}
|
||||
<div>
|
||||
<Select
|
||||
on:change={toggleFieldControl}
|
||||
quiet
|
||||
defaultValue="Use values"
|
||||
autoWidth
|
||||
value={rowControl ? "Use bindings" : "Use values"}
|
||||
options={["Use values", "Use bindings"]}
|
||||
placeholder={null}
|
||||
/>
|
||||
</div>
|
||||
{#if !loopingSelected}
|
||||
<ActionButton on:click={() => addLooping()} icon="Reuse"
|
||||
>Add Looping</ActionButton
|
||||
>
|
||||
{/if}
|
||||
<div class="delete-padding" on:click={() => deleteStep()}>
|
||||
<Icon name="DeleteOutline" />
|
||||
</div>
|
||||
{#if showBindingPicker}
|
||||
<Select
|
||||
on:change={toggleFieldControl}
|
||||
defaultValue="Use values"
|
||||
autoWidth
|
||||
value={rowControl ? "Use bindings" : "Use values"}
|
||||
options={["Use values", "Use bindings"]}
|
||||
placeholder={null}
|
||||
/>
|
||||
{/if}
|
||||
<ActionButton
|
||||
on:click={() => deleteStep()}
|
||||
icon="DeleteOutline"
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if setupToggled}
|
||||
<AutomationBlockSetup
|
||||
schemaProperties={Object.entries(block.schema.inputs.properties)}
|
||||
{block}
|
||||
{webhookModal}
|
||||
/>
|
||||
{#if lastStep}
|
||||
<Button on:click={() => testDataModal.show()} cta
|
||||
>Finish and test automation</Button
|
||||
>
|
||||
{/if}
|
||||
<AutomationBlockSetup
|
||||
schemaProperties={Object.entries(block.schema.inputs.properties)}
|
||||
{block}
|
||||
{webhookModal}
|
||||
/>
|
||||
{#if lastStep}
|
||||
<Button on:click={() => testDataModal.show()} cta
|
||||
>Finish and test automation</Button
|
||||
>
|
||||
{/if}
|
||||
</Layout>
|
||||
</div>
|
||||
|
@ -220,8 +306,10 @@
|
|||
padding-left: 30px;
|
||||
}
|
||||
.block-options {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
display: flex;
|
||||
gap: var(--spacing-m);
|
||||
}
|
||||
.center-items {
|
||||
display: flex;
|
||||
|
@ -256,4 +344,9 @@
|
|||
/* center horizontally */
|
||||
align-self: center;
|
||||
}
|
||||
|
||||
.blockTitle {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { ModalContent, Icon, Detail, TextArea } from "@budibase/bbui"
|
||||
import { ModalContent, Icon, Detail, TextArea, Label } from "@budibase/bbui"
|
||||
|
||||
export let testResult
|
||||
export let isTrigger
|
||||
|
@ -10,7 +10,7 @@
|
|||
<ModalContent
|
||||
showCloseIcon={false}
|
||||
showConfirmButton={false}
|
||||
title="Test Automation"
|
||||
title="Test Results"
|
||||
cancelText="Close"
|
||||
>
|
||||
<div slot="header">
|
||||
|
@ -26,7 +26,18 @@
|
|||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<span>
|
||||
{#if testResult[0].outputs.iterations}
|
||||
<div style="display: flex;">
|
||||
<Icon name="Reuse" />
|
||||
<div style="margin-left: 10px;">
|
||||
<Label>
|
||||
This loop ran {testResult[0].outputs.iterations} times.</Label
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
</span>
|
||||
<div
|
||||
on:click={() => {
|
||||
inputToggled = !inputToggled
|
||||
|
|
|
@ -88,36 +88,65 @@
|
|||
if (!block || !automation) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Find previous steps to the selected one
|
||||
let allSteps = [...automation.steps]
|
||||
|
||||
if (automation.trigger) {
|
||||
allSteps = [automation.trigger, ...allSteps]
|
||||
}
|
||||
const blockIdx = allSteps.findIndex(step => step.id === block.id)
|
||||
let blockIdx = allSteps.findIndex(step => step.id === block.id)
|
||||
|
||||
// Extract all outputs from all previous steps as available bindings
|
||||
// Extract all outputs from all previous steps as available bindins
|
||||
let bindings = []
|
||||
for (let idx = 0; idx < blockIdx; idx++) {
|
||||
const outputs = Object.entries(
|
||||
allSteps[idx].schema?.outputs?.properties ?? {}
|
||||
)
|
||||
let wasLoopBlock = allSteps[idx]?.stepId === "LOOP"
|
||||
let isLoopBlock =
|
||||
allSteps[idx]?.stepId === "LOOP" &&
|
||||
allSteps.find(x => x.blockToLoop === block.id)
|
||||
|
||||
// If the previous block was a loop block, decerement the index so the following
|
||||
// steps are in the correct order
|
||||
if (wasLoopBlock) {
|
||||
blockIdx--
|
||||
}
|
||||
|
||||
let schema = allSteps[idx]?.schema?.outputs?.properties ?? {}
|
||||
|
||||
// If its a Loop Block, we need to add this custom schema
|
||||
if (isLoopBlock) {
|
||||
schema = {
|
||||
currentItem: {
|
||||
type: "string",
|
||||
description: "the item currently being executed",
|
||||
},
|
||||
}
|
||||
}
|
||||
const outputs = Object.entries(schema)
|
||||
|
||||
bindings = bindings.concat(
|
||||
outputs.map(([name, value]) => {
|
||||
const stepsLabel = block.name.startsWith("JS")
|
||||
let runtimeName = isLoopBlock
|
||||
? `loop.${name}`
|
||||
: block.name.startsWith("JS")
|
||||
? `steps[${idx}].${name}`
|
||||
: `steps.${idx}.${name}`
|
||||
const runtime = idx === 0 ? `trigger.${name}` : stepsLabel
|
||||
const runtime = idx === 0 ? `trigger.${name}` : runtimeName
|
||||
return {
|
||||
label: runtime,
|
||||
type: value.type,
|
||||
description: value.description,
|
||||
category: idx === 0 ? "Trigger outputs" : `Step ${idx} outputs`,
|
||||
category:
|
||||
idx === 0
|
||||
? "Trigger outputs"
|
||||
: isLoopBlock
|
||||
? "Loop Outputs"
|
||||
: `Step ${idx} outputs`,
|
||||
path: runtime,
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
return bindings
|
||||
}
|
||||
|
||||
|
@ -264,6 +293,14 @@
|
|||
value={inputData[key]}
|
||||
/>
|
||||
</CodeEditorModal>
|
||||
{:else if value.customType === "loopOption"}
|
||||
<Select
|
||||
on:change={e => onChange(e, key)}
|
||||
autoWidth
|
||||
value={inputData[key]}
|
||||
options={["Array", "String"]}
|
||||
defaultValue={"Array"}
|
||||
/>
|
||||
{:else if value.type === "string" || value.type === "number" || value.type === "integer"}
|
||||
{#if isTestModal}
|
||||
<ModalBindableInput
|
||||
|
|
|
@ -34,10 +34,10 @@
|
|||
$: label = meta.name ? capitalise(meta.name) : ""
|
||||
|
||||
const timeStamp = resolveTimeStamp(value)
|
||||
const isTimeStamp = timeStamp ? true : false
|
||||
const isTimeStamp = !!timeStamp
|
||||
</script>
|
||||
|
||||
{#if type === "options"}
|
||||
{#if type === "options" && meta.constraints.inclusion.length !== 0}
|
||||
<Select
|
||||
{label}
|
||||
data-cy="{meta.name}-select"
|
||||
|
@ -51,7 +51,7 @@
|
|||
<Dropzone {label} bind:value />
|
||||
{:else if type === "boolean"}
|
||||
<Toggle text={label} bind:value data-cy="{meta.name}-input" />
|
||||
{:else if type === "array"}
|
||||
{:else if type === "array" && meta.constraints.inclusion.length !== 0}
|
||||
<Multiselect bind:value {label} options={meta.constraints.inclusion} />
|
||||
{:else if type === "link"}
|
||||
<LinkedRowSelector bind:linkedRows={value} schema={meta} />
|
||||
|
|
|
@ -26,14 +26,6 @@
|
|||
on:change={value => (parameters.rowId = value.detail)}
|
||||
/>
|
||||
|
||||
<Label small>Row Rev</Label>
|
||||
<DrawerBindableInput
|
||||
{bindings}
|
||||
title="Row rev to delete"
|
||||
value={parameters.revId}
|
||||
on:change={value => (parameters.revId = value.detail)}
|
||||
/>
|
||||
|
||||
<Label small />
|
||||
<Checkbox text="Require confirmation" bind:value={parameters.confirm} />
|
||||
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
import { auth } from "../stores/portal"
|
||||
import { get } from "svelte/store"
|
||||
|
||||
export const FEATURE_FLAGS = {
|
||||
LICENSING: "LICENSING",
|
||||
}
|
||||
|
||||
export const isEnabled = featureFlag => {
|
||||
const user = get(auth).user
|
||||
if (user?.featureFlags?.includes(featureFlag)) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
|
@ -14,7 +14,7 @@
|
|||
notifications.success("Invitation accepted successfully")
|
||||
$goto("../auth/login")
|
||||
} catch (error) {
|
||||
notifications.error("Error accepting invitation")
|
||||
notifications.error(error.message)
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
import ChangePasswordModal from "components/settings/ChangePasswordModal.svelte"
|
||||
import UpdateAPIKeyModal from "components/settings/UpdateAPIKeyModal.svelte"
|
||||
import Logo from "assets/bb-emblem.svg"
|
||||
import { isEnabled, FEATURE_FLAGS } from "../../../helpers/featureFlags"
|
||||
|
||||
let loaded = false
|
||||
let userInfoModal
|
||||
|
@ -54,10 +55,17 @@
|
|||
if (!$adminStore.cloud) {
|
||||
menu = menu.concat([
|
||||
{
|
||||
title: "Updates",
|
||||
title: "Update",
|
||||
href: "/builder/portal/settings/update",
|
||||
},
|
||||
])
|
||||
|
||||
if (isEnabled(FEATURE_FLAGS.LICENSING)) {
|
||||
menu = menu.concat({
|
||||
title: "Upgrade",
|
||||
href: "/builder/portal/settings/upgrade",
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
menu = menu.concat([
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
})
|
||||
notifications.success("Successfully created user")
|
||||
} catch (error) {
|
||||
notifications.error("Error creating user")
|
||||
notifications.error(error.message)
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -0,0 +1,151 @@
|
|||
<script>
|
||||
import {
|
||||
Layout,
|
||||
Heading,
|
||||
Body,
|
||||
Divider,
|
||||
Link,
|
||||
Button,
|
||||
Input,
|
||||
Label,
|
||||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
import { auth, admin } from "stores/portal"
|
||||
import { redirect } from "@roxi/routify"
|
||||
import { processStringSync } from "@budibase/string-templates"
|
||||
import { API } from "api"
|
||||
import { onMount } from "svelte"
|
||||
|
||||
$: license = $auth.user.license
|
||||
$: upgradeUrl = `${$admin.accountPortalUrl}/portal/upgrade`
|
||||
|
||||
$: activateDisabled = !licenseKey || licenseKeyDisabled
|
||||
|
||||
let licenseInfo
|
||||
|
||||
let licenseKeyDisabled = false
|
||||
let licenseKeyType = "text"
|
||||
let licenseKey = ""
|
||||
|
||||
// Make sure page can't be visited directly in cloud
|
||||
$: {
|
||||
if ($admin.cloud) {
|
||||
$redirect("../../portal")
|
||||
}
|
||||
}
|
||||
|
||||
const activate = async () => {
|
||||
await API.activateLicenseKey({ licenseKey })
|
||||
await auth.getSelf()
|
||||
await setLicenseInfo()
|
||||
notifications.success("Successfully activated")
|
||||
}
|
||||
|
||||
const refresh = async () => {
|
||||
try {
|
||||
await API.refreshLicense()
|
||||
await auth.getSelf()
|
||||
notifications.success("Refreshed license")
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
notifications.error("Error refreshing license")
|
||||
}
|
||||
}
|
||||
|
||||
// deactivate the license key field if there is a license key set
|
||||
$: {
|
||||
if (licenseInfo?.licenseKey) {
|
||||
licenseKey = "**********************************************"
|
||||
licenseKeyType = "password"
|
||||
licenseKeyDisabled = true
|
||||
activateDisabled = true
|
||||
}
|
||||
}
|
||||
|
||||
const setLicenseInfo = async () => {
|
||||
licenseInfo = await API.getLicenseInfo()
|
||||
}
|
||||
|
||||
onMount(async () => {
|
||||
await setLicenseInfo()
|
||||
})
|
||||
</script>
|
||||
|
||||
{#if $auth.isAdmin}
|
||||
<Layout noPadding>
|
||||
<Layout gap="XS" noPadding>
|
||||
<Heading size="M">Upgrade</Heading>
|
||||
<Body size="M">
|
||||
{#if license.plan.type === "free"}
|
||||
Upgrade your budibase installation to unlock additional features. To
|
||||
subscribe to a plan visit your <Link size="L" href={upgradeUrl}
|
||||
>Account</Link
|
||||
>.
|
||||
{:else}
|
||||
To manage your plan visit your <Link size="L" href={upgradeUrl}
|
||||
>Account</Link
|
||||
>.
|
||||
{/if}
|
||||
</Body>
|
||||
</Layout>
|
||||
<Divider size="S" />
|
||||
<Layout gap="XS" noPadding>
|
||||
<Heading size="S">Activate</Heading>
|
||||
<Body size="S">Enter your license key below to activate your plan</Body>
|
||||
</Layout>
|
||||
<Layout noPadding>
|
||||
<div class="fields">
|
||||
<div class="field">
|
||||
<Label size="L">License Key</Label>
|
||||
<Input
|
||||
thin
|
||||
bind:value={licenseKey}
|
||||
type={licenseKeyType}
|
||||
disabled={licenseKeyDisabled}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<Button cta on:click={activate} disabled={activateDisabled}
|
||||
>Activate</Button
|
||||
>
|
||||
</div>
|
||||
</Layout>
|
||||
<Divider size="S" />
|
||||
<Layout gap="L" noPadding>
|
||||
<Layout gap="S" noPadding>
|
||||
<Heading size="S">Plan</Heading>
|
||||
<Layout noPadding gap="XXS">
|
||||
<Body size="S">You are currently on the {license.plan.type} plan</Body
|
||||
>
|
||||
<Body size="XS">
|
||||
{processStringSync(
|
||||
"Updated {{ duration time 'millisecond' }} ago",
|
||||
{
|
||||
time:
|
||||
new Date().getTime() -
|
||||
new Date(license.refreshedAt).getTime(),
|
||||
}
|
||||
)}
|
||||
</Body>
|
||||
</Layout>
|
||||
</Layout>
|
||||
<div>
|
||||
<Button secondary on:click={refresh}>Refresh</Button>
|
||||
</div>
|
||||
</Layout>
|
||||
</Layout>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.fields {
|
||||
display: grid;
|
||||
grid-gap: var(--spacing-m);
|
||||
}
|
||||
.field {
|
||||
display: grid;
|
||||
grid-template-columns: 100px 1fr;
|
||||
grid-gap: var(--spacing-l);
|
||||
align-items: center;
|
||||
}
|
||||
</style>
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/cli",
|
||||
"version": "1.0.105-alpha.19",
|
||||
"version": "1.0.105-alpha.29",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "src/index.js",
|
||||
"bin": {
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Publish Dev",
|
||||
"program": "${workspaceFolder}/scripts/publishDev.js"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/client",
|
||||
"version": "1.0.105-alpha.19",
|
||||
"version": "1.0.105-alpha.29",
|
||||
"license": "MPL-2.0",
|
||||
"module": "dist/budibase-client.js",
|
||||
"main": "dist/budibase-client.js",
|
||||
|
@ -19,9 +19,9 @@
|
|||
"dev:builder": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.105-alpha.19",
|
||||
"@budibase/frontend-core": "^1.0.105-alpha.19",
|
||||
"@budibase/string-templates": "^1.0.105-alpha.19",
|
||||
"@budibase/bbui": "^1.0.105-alpha.29",
|
||||
"@budibase/frontend-core": "^1.0.105-alpha.29",
|
||||
"@budibase/string-templates": "^1.0.105-alpha.29",
|
||||
"@spectrum-css/button": "^3.0.3",
|
||||
"@spectrum-css/card": "^3.0.3",
|
||||
"@spectrum-css/divider": "^1.0.3",
|
||||
|
|
|
@ -81,7 +81,7 @@ const duplicateRowHandler = async (action, context) => {
|
|||
|
||||
const deleteRowHandler = async action => {
|
||||
const { tableId, revId, rowId } = action.parameters
|
||||
if (tableId && revId && rowId) {
|
||||
if (tableId && rowId) {
|
||||
try {
|
||||
await API.deleteRow({ tableId, rowId, revId })
|
||||
notificationStore.actions.success("Row deleted")
|
||||
|
|
|
@ -775,9 +775,9 @@ minimatch@^3.0.2, minimatch@^3.0.4:
|
|||
brace-expansion "^1.1.7"
|
||||
|
||||
minimist@^1.2.0:
|
||||
version "1.2.6"
|
||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"
|
||||
integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==
|
||||
version "1.2.5"
|
||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
|
||||
integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==
|
||||
|
||||
nanoid@^2.1.0:
|
||||
version "2.1.11"
|
||||
|
@ -790,9 +790,9 @@ nanoid@^3.1.30, nanoid@^3.1.32:
|
|||
integrity sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==
|
||||
|
||||
nanoid@^3.3.1:
|
||||
version "3.3.1"
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.1.tgz#6347a18cac88af88f58af0b3594b723d5e99bb35"
|
||||
integrity sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw==
|
||||
version "3.3.2"
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.2.tgz#c89622fafb4381cd221421c69ec58547a1eec557"
|
||||
integrity sha512-CuHBogktKwpm5g2sRgv83jEy2ijFzBwMoYA60orPDR7ynsLijJDqgsi4RDGj3OJpy3Ieb+LYwiRmIOGyytgITA==
|
||||
|
||||
node-releases@^2.0.1:
|
||||
version "2.0.1"
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@budibase/frontend-core",
|
||||
"version": "1.0.105-alpha.19",
|
||||
"version": "1.0.105-alpha.29",
|
||||
"description": "Budibase frontend core libraries used in builder and client",
|
||||
"author": "Budibase",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^1.0.105-alpha.19",
|
||||
"@budibase/bbui": "^1.0.105-alpha.29",
|
||||
"lodash": "^4.17.21",
|
||||
"svelte": "^3.46.2"
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import { buildTemplateEndpoints } from "./templates"
|
|||
import { buildUserEndpoints } from "./user"
|
||||
import { buildSelfEndpoints } from "./self"
|
||||
import { buildViewEndpoints } from "./views"
|
||||
import { buildLicensingEndpoints } from "./licensing"
|
||||
|
||||
const defaultAPIClientConfig = {
|
||||
/**
|
||||
|
@ -233,5 +234,6 @@ export const createAPIClient = config => {
|
|||
...buildUserEndpoints(API),
|
||||
...buildViewEndpoints(API),
|
||||
...buildSelfEndpoints(API),
|
||||
...buildLicensingEndpoints(API),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
export const buildLicensingEndpoints = API => ({
|
||||
/**
|
||||
* Activates a self hosted license key
|
||||
*/
|
||||
activateLicenseKey: async data => {
|
||||
return API.post({
|
||||
url: `/api/global/license/activate`,
|
||||
body: data,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Get the license info - metadata about the license including the
|
||||
* obfuscated license key.
|
||||
*/
|
||||
getLicenseInfo: async () => {
|
||||
return API.get({
|
||||
url: "/api/global/license/info",
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Refreshes the license cache
|
||||
*/
|
||||
refreshLicense: async () => {
|
||||
return API.post({
|
||||
url: "/api/global/license/refresh",
|
||||
})
|
||||
},
|
||||
})
|
|
@ -35,7 +35,7 @@ export const buildRowEndpoints = API => ({
|
|||
* @param revId the rev of the row to delete
|
||||
*/
|
||||
deleteRow: async ({ tableId, rowId, revId }) => {
|
||||
if (!tableId || !rowId || !revId) {
|
||||
if (!tableId || !rowId) {
|
||||
return
|
||||
}
|
||||
return await API.delete({
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"watch": ["src", "../backend-core"],
|
||||
"watch": ["src", "../backend-core", "../../../budibase-pro/packages/pro"],
|
||||
"ext": "js,ts,json",
|
||||
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js"],
|
||||
"exec": "ts-node src/index.ts"
|
||||
|
|
|
@ -1,84 +1,20 @@
|
|||
{
|
||||
"name": "@budibase/server",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "1.0.105-alpha.19",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Budibase/budibase.git"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rimraf dist/ && tsc && mv dist/src/* dist/ && rimraf dist/src/ && yarn postbuild",
|
||||
"postbuild": "copyfiles -u 1 src/**/*.svelte dist/ && copyfiles -u 1 src/**/*.hbs dist/ && copyfiles -u 1 src/**/*.json dist/",
|
||||
"test": "jest --coverage --maxWorkers=2",
|
||||
"test:watch": "jest --watch",
|
||||
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client",
|
||||
"build:docker": "yarn run predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
|
||||
"build:docs": "node ./scripts/docs/generate.js open",
|
||||
"run:docker": "node dist/index.js",
|
||||
"dev:stack:up": "node scripts/dev/manage.js up",
|
||||
"dev:stack:down": "node scripts/dev/manage.js down",
|
||||
"dev:stack:nuke": "node scripts/dev/manage.js nuke",
|
||||
"dev:builder": "yarn run dev:stack:up && nodemon",
|
||||
"specs": "node specs/generate.js && openapi-typescript specs/openapi.yaml --output src/definitions/openapi.ts",
|
||||
"initialise": "node scripts/initialise.js",
|
||||
"env:multi:enable": "node scripts/multiTenancy.js enable",
|
||||
"env:multi:disable": "node scripts/multiTenancy.js disable",
|
||||
"env:selfhost:enable": "node scripts/selfhost.js enable",
|
||||
"env:selfhost:disable": "node scripts/selfhost.js disable",
|
||||
"env:localdomain:enable": "node scripts/localdomain.js enable",
|
||||
"env:localdomain:disable": "node scripts/localdomain.js disable",
|
||||
"env:account:enable": "node scripts/account.js enable",
|
||||
"env:account:disable": "node scripts/account.js disable"
|
||||
},
|
||||
"jest": {
|
||||
"preset": "ts-jest",
|
||||
"testEnvironment": "node",
|
||||
"setupFiles": [
|
||||
"./scripts/jestSetup.js"
|
||||
],
|
||||
"collectCoverageFrom": [
|
||||
"src/**/*.js",
|
||||
"!**/node_modules/**",
|
||||
"!src/db/views/*.js",
|
||||
"!src/api/controllers/deploy/**/*.js",
|
||||
"!src/*.js",
|
||||
"!src/api/controllers/static/**/*",
|
||||
"!src/db/dynamoClient.js",
|
||||
"!src/utilities/usageQuota.js",
|
||||
"!src/api/routes/tests/**/*",
|
||||
"!src/db/tests/**/*",
|
||||
"!src/tests/**/*",
|
||||
"!src/automations/tests/**/*",
|
||||
"!src/utilities/fileProcessor.js",
|
||||
"!src/utilities/fileSystem/**/*",
|
||||
"!src/utilities/redis.js"
|
||||
],
|
||||
"coverageReporters": [
|
||||
"lcov",
|
||||
"json",
|
||||
"clover"
|
||||
]
|
||||
},
|
||||
"keywords": [
|
||||
"budibase"
|
||||
],
|
||||
"author": "Budibase",
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@apidevtools/swagger-parser": "^10.0.3",
|
||||
"@budibase/backend-core": "^1.0.105-alpha.19",
|
||||
"@budibase/client": "^1.0.105-alpha.19",
|
||||
"@budibase/string-templates": "^1.0.105-alpha.19",
|
||||
"@budibase/backend-core": "^1.0.105-alpha.29",
|
||||
"@budibase/client": "^1.0.105-alpha.29",
|
||||
"@budibase/pro": "^1.0.0",
|
||||
"@budibase/string-templates": "^1.0.105-alpha.29",
|
||||
"@bull-board/api": "^3.7.0",
|
||||
"@bull-board/koa": "^3.7.0",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
"@google-cloud/firestore": "^5.0.2",
|
||||
"@koa/router": "8.0.0",
|
||||
"@sendgrid/mail": "7.1.1",
|
||||
"@sentry/node": "^6.0.0",
|
||||
"@sentry/node": "6.17.7",
|
||||
"@types/global-agent": "^2.1.1",
|
||||
"@types/koa__router": "^8.0.11",
|
||||
"airtable": "0.10.1",
|
||||
"arangojs": "7.2.0",
|
||||
"aws-sdk": "^2.767.0",
|
||||
|
@ -137,6 +73,7 @@
|
|||
"yargs": "13.2.4",
|
||||
"zlib": "1.0.5"
|
||||
},
|
||||
"description": "Budibase Web Server",
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.14.3",
|
||||
"@babel/preset-env": "^7.14.4",
|
||||
|
@ -145,13 +82,14 @@
|
|||
"@types/apidoc": "^0.50.0",
|
||||
"@types/bull": "^3.15.1",
|
||||
"@types/google-spreadsheet": "^3.1.5",
|
||||
"@types/jest": "^26.0.23",
|
||||
"@types/jest": "^27.4.1",
|
||||
"@types/koa": "^2.13.3",
|
||||
"@types/koa-router": "^7.4.2",
|
||||
"@types/lodash": "4.14.180",
|
||||
"@types/node": "^15.12.4",
|
||||
"@types/oracledb": "^5.2.1",
|
||||
"@types/redis": "^4.0.11",
|
||||
"@typescript-eslint/parser": "5.12.0",
|
||||
"apidoc": "^0.50.2",
|
||||
"babel-jest": "^27.0.2",
|
||||
"copyfiles": "^2.4.1",
|
||||
|
@ -170,11 +108,76 @@
|
|||
"swagger-jsdoc": "^6.1.0",
|
||||
"ts-jest": "^27.0.3",
|
||||
"ts-node": "^10.0.0",
|
||||
"typescript": "^4.3.5",
|
||||
"typescript": "^4.5.5",
|
||||
"update-dotenv": "^1.1.1"
|
||||
},
|
||||
"email": "hi@budibase.com",
|
||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc",
|
||||
"jest": {
|
||||
"collectCoverageFrom": [
|
||||
"src/**/*.js",
|
||||
"!**/node_modules/**",
|
||||
"!src/db/views/*.js",
|
||||
"!src/api/controllers/deploy/**/*.js",
|
||||
"!src/*.js",
|
||||
"!src/api/controllers/static/**/*",
|
||||
"!src/db/dynamoClient.js",
|
||||
"!src/utilities/usageQuota.js",
|
||||
"!src/api/routes/tests/**/*",
|
||||
"!src/db/tests/**/*",
|
||||
"!src/tests/**/*",
|
||||
"!src/automations/tests/**/*",
|
||||
"!src/utilities/fileProcessor.js",
|
||||
"!src/utilities/fileSystem/**/*",
|
||||
"!src/utilities/redis.js"
|
||||
],
|
||||
"coverageReporters": [
|
||||
"lcov",
|
||||
"json",
|
||||
"clover"
|
||||
],
|
||||
"preset": "ts-jest",
|
||||
"setupFiles": [
|
||||
"./scripts/jestSetup.js"
|
||||
],
|
||||
"testEnvironment": "node"
|
||||
},
|
||||
"keywords": [
|
||||
"budibase"
|
||||
],
|
||||
"license": "GPL-3.0",
|
||||
"main": "src/index.ts",
|
||||
"name": "@budibase/server",
|
||||
"optionalDependencies": {
|
||||
"oracledb": "^5.3.0"
|
||||
},
|
||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
||||
}
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Budibase/budibase.git"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rimraf dist/ && tsc -p tsconfig.build.json && mv dist/src/* dist/ && rimraf dist/src/ && yarn postbuild",
|
||||
"build:docker": "yarn run predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
|
||||
"build:docs": "node ./scripts/docs/generate.js open",
|
||||
"dev:builder": "yarn run dev:stack:up && nodemon",
|
||||
"dev:stack:down": "node scripts/dev/manage.js down",
|
||||
"dev:stack:nuke": "node scripts/dev/manage.js nuke",
|
||||
"dev:stack:up": "node scripts/dev/manage.js up",
|
||||
"env:account:disable": "node scripts/account.js disable",
|
||||
"env:account:enable": "node scripts/account.js enable",
|
||||
"env:localdomain:disable": "node scripts/localdomain.js disable",
|
||||
"env:localdomain:enable": "node scripts/localdomain.js enable",
|
||||
"env:multi:disable": "node scripts/multiTenancy.js disable",
|
||||
"env:multi:enable": "node scripts/multiTenancy.js enable",
|
||||
"env:selfhost:disable": "node scripts/selfhost.js disable",
|
||||
"env:selfhost:enable": "node scripts/selfhost.js enable",
|
||||
"initialise": "node scripts/initialise.js",
|
||||
"postbuild": "copyfiles -u 1 src/**/*.svelte dist/ && copyfiles -u 1 src/**/*.hbs dist/ && copyfiles -u 1 src/**/*.json dist/",
|
||||
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client",
|
||||
"run:docker": "node dist/index.js",
|
||||
"specs": "node specs/generate.js && openapi-typescript specs/openapi.yaml --output src/definitions/openapi.ts",
|
||||
"test": "jest --coverage --maxWorkers=2",
|
||||
"test:watch": "jest --watch"
|
||||
},
|
||||
"version": "1.0.105-alpha.29"
|
||||
}
|
|
@ -42,6 +42,8 @@ async function init() {
|
|||
REDIS_URL: "localhost:6379",
|
||||
WORKER_URL: "http://localhost:4002",
|
||||
INTERNAL_API_KEY: "budibase",
|
||||
ACCOUNT_PORTAL_URL: "http://localhost:10001",
|
||||
ACCOUNT_PORTAL_API_KEY: "budibase",
|
||||
JWT_SECRET: "testsecret",
|
||||
REDIS_PASSWORD: "budibase",
|
||||
MINIO_ACCESS_KEY: "budibase",
|
||||
|
|
|
@ -1,29 +1,29 @@
|
|||
const env = require("../../environment")
|
||||
const packageJson = require("../../../package.json")
|
||||
const {
|
||||
import env from "../../environment"
|
||||
import packageJson from "../../../package.json"
|
||||
import {
|
||||
createLinkView,
|
||||
createRoutingView,
|
||||
createAllSearchIndex,
|
||||
} = require("../../db/views/staticViews")
|
||||
const {
|
||||
} from "../../db/views/staticViews"
|
||||
import {
|
||||
getTemplateStream,
|
||||
createApp,
|
||||
deleteApp,
|
||||
} = require("../../utilities/fileSystem")
|
||||
const {
|
||||
} from "../../utilities/fileSystem"
|
||||
import {
|
||||
generateAppID,
|
||||
getLayoutParams,
|
||||
getScreenParams,
|
||||
generateDevAppID,
|
||||
DocumentTypes,
|
||||
AppStatus,
|
||||
} = require("../../db/utils")
|
||||
} from "../../db/utils"
|
||||
const {
|
||||
BUILTIN_ROLE_IDS,
|
||||
AccessController,
|
||||
} = require("@budibase/backend-core/roles")
|
||||
const { BASE_LAYOUTS } = require("../../constants/layouts")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
import { BASE_LAYOUTS } from "../../constants/layouts"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
const { processObject } = require("@budibase/string-templates")
|
||||
const {
|
||||
getAllApps,
|
||||
|
@ -31,24 +31,27 @@ const {
|
|||
getProdAppID,
|
||||
Replication,
|
||||
} = require("@budibase/backend-core/db")
|
||||
const { USERS_TABLE_SCHEMA } = require("../../constants")
|
||||
const { removeAppFromUserRoles } = require("../../utilities/workerRequests")
|
||||
const { clientLibraryPath, stringToReadStream } = require("../../utilities")
|
||||
const { getAllLocks } = require("../../utilities/redis")
|
||||
const {
|
||||
import { USERS_TABLE_SCHEMA } from "../../constants"
|
||||
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
|
||||
import { clientLibraryPath, stringToReadStream } from "../../utilities"
|
||||
import { getAllLocks } from "../../utilities/redis"
|
||||
import {
|
||||
updateClientLibrary,
|
||||
backupClientLibrary,
|
||||
revertClientLibrary,
|
||||
} = require("../../utilities/fileSystem/clientLibrary")
|
||||
} from "../../utilities/fileSystem/clientLibrary"
|
||||
const { getTenantId, isMultiTenant } = require("@budibase/backend-core/tenancy")
|
||||
const { syncGlobalUsers } = require("./user")
|
||||
import { syncGlobalUsers } from "./user"
|
||||
const { app: appCache } = require("@budibase/backend-core/cache")
|
||||
const { cleanupAutomations } = require("../../automations/utils")
|
||||
import { cleanupAutomations } from "../../automations/utils"
|
||||
const {
|
||||
getAppDB,
|
||||
getProdAppDB,
|
||||
updateAppId,
|
||||
} = require("@budibase/backend-core/context")
|
||||
import { getUniqueRows } from "../../utilities/usageQuota/rows"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { errors } from "@budibase/backend-core"
|
||||
|
||||
const URL_REGEX_SLASH = /\/|\\/g
|
||||
|
||||
|
@ -61,7 +64,7 @@ async function getLayouts() {
|
|||
include_docs: true,
|
||||
})
|
||||
)
|
||||
).rows.map(row => row.doc)
|
||||
).rows.map((row: any) => row.doc)
|
||||
}
|
||||
|
||||
async function getScreens() {
|
||||
|
@ -72,16 +75,16 @@ async function getScreens() {
|
|||
include_docs: true,
|
||||
})
|
||||
)
|
||||
).rows.map(row => row.doc)
|
||||
).rows.map((row: any) => row.doc)
|
||||
}
|
||||
|
||||
function getUserRoleId(ctx) {
|
||||
function getUserRoleId(ctx: any) {
|
||||
return !ctx.user.role || !ctx.user.role._id
|
||||
? BUILTIN_ROLE_IDS.PUBLIC
|
||||
: ctx.user.role._id
|
||||
}
|
||||
|
||||
exports.getAppUrl = ctx => {
|
||||
export const getAppUrl = (ctx: any) => {
|
||||
// construct the url
|
||||
let url
|
||||
if (ctx.request.body.url) {
|
||||
|
@ -97,29 +100,34 @@ exports.getAppUrl = ctx => {
|
|||
return url
|
||||
}
|
||||
|
||||
const checkAppUrl = (ctx, apps, url, currentAppId) => {
|
||||
const checkAppUrl = (ctx: any, apps: any, url: any, currentAppId?: string) => {
|
||||
if (currentAppId) {
|
||||
apps = apps.filter(app => app.appId !== currentAppId)
|
||||
apps = apps.filter((app: any) => app.appId !== currentAppId)
|
||||
}
|
||||
if (apps.some(app => app.url === url)) {
|
||||
if (apps.some((app: any) => app.url === url)) {
|
||||
ctx.throw(400, "App URL is already in use.")
|
||||
}
|
||||
}
|
||||
|
||||
const checkAppName = (ctx, apps, name, currentAppId) => {
|
||||
const checkAppName = (
|
||||
ctx: any,
|
||||
apps: any,
|
||||
name: any,
|
||||
currentAppId?: string
|
||||
) => {
|
||||
// TODO: Replace with Joi
|
||||
if (!name) {
|
||||
ctx.throw(400, "Name is required")
|
||||
}
|
||||
if (currentAppId) {
|
||||
apps = apps.filter(app => app.appId !== currentAppId)
|
||||
apps = apps.filter((app: any) => app.appId !== currentAppId)
|
||||
}
|
||||
if (apps.some(app => app.name === name)) {
|
||||
if (apps.some((app: any) => app.name === name)) {
|
||||
ctx.throw(400, "App name is already in use.")
|
||||
}
|
||||
}
|
||||
|
||||
async function createInstance(template) {
|
||||
async function createInstance(template: any) {
|
||||
const tenantId = isMultiTenant() ? getTenantId() : null
|
||||
const baseAppId = generateAppID(tenantId)
|
||||
const appId = generateDevAppID(baseAppId)
|
||||
|
@ -160,7 +168,7 @@ async function createInstance(template) {
|
|||
return { _id: appId }
|
||||
}
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
export const fetch = async (ctx: any) => {
|
||||
const dev = ctx.query && ctx.query.status === AppStatus.DEV
|
||||
const all = ctx.query && ctx.query.status === AppStatus.ALL
|
||||
const apps = await getAllApps({ dev, all })
|
||||
|
@ -172,7 +180,7 @@ exports.fetch = async ctx => {
|
|||
if (app.status !== "development") {
|
||||
continue
|
||||
}
|
||||
const lock = locks.find(lock => lock.appId === app.appId)
|
||||
const lock = locks.find((lock: any) => lock.appId === app.appId)
|
||||
if (lock) {
|
||||
app.lockedBy = lock.user
|
||||
} else {
|
||||
|
@ -185,7 +193,7 @@ exports.fetch = async ctx => {
|
|||
ctx.body = apps
|
||||
}
|
||||
|
||||
exports.fetchAppDefinition = async ctx => {
|
||||
export const fetchAppDefinition = async (ctx: any) => {
|
||||
const layouts = await getLayouts()
|
||||
const userRoleId = getUserRoleId(ctx)
|
||||
const accessController = new AccessController()
|
||||
|
@ -200,7 +208,7 @@ exports.fetchAppDefinition = async ctx => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.fetchAppPackage = async ctx => {
|
||||
export const fetchAppPackage = async (ctx: any) => {
|
||||
const db = getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
const layouts = await getLayouts()
|
||||
|
@ -221,7 +229,7 @@ exports.fetchAppPackage = async ctx => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.create = async ctx => {
|
||||
const performAppCreate = async (ctx: any) => {
|
||||
const apps = await getAllApps({ dev: true })
|
||||
const name = ctx.request.body.name
|
||||
checkAppName(ctx, apps, name)
|
||||
|
@ -229,7 +237,7 @@ exports.create = async ctx => {
|
|||
checkAppUrl(ctx, apps, url)
|
||||
|
||||
const { useTemplate, templateKey, templateString } = ctx.request.body
|
||||
const instanceConfig = {
|
||||
const instanceConfig: any = {
|
||||
useTemplate,
|
||||
key: templateKey,
|
||||
templateString,
|
||||
|
@ -280,13 +288,41 @@ exports.create = async ctx => {
|
|||
}
|
||||
|
||||
await appCache.invalidateAppMetadata(appId, newApplication)
|
||||
ctx.status = 200
|
||||
return newApplication
|
||||
}
|
||||
|
||||
const appPostCreate = async (ctx: any, appId: string) => {
|
||||
// app import & template creation
|
||||
if (ctx.request.body.useTemplate === "true") {
|
||||
const rows = await getUniqueRows([appId])
|
||||
const rowCount = rows ? rows.length : 0
|
||||
if (rowCount) {
|
||||
try {
|
||||
await quotas.addRows(rowCount)
|
||||
} catch (err: any) {
|
||||
if (err.code && err.code === errors.codes.USAGE_LIMIT_EXCEEDED) {
|
||||
// this import resulted in row usage exceeding the quota
|
||||
// delete the app
|
||||
// skip pre and post steps as no rows have been added to quotas yet
|
||||
ctx.params.appId = appId
|
||||
await destroyApp(ctx)
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const create = async (ctx: any) => {
|
||||
const newApplication = await quotas.addApp(() => performAppCreate(ctx))
|
||||
await appPostCreate(ctx, newApplication.appId)
|
||||
ctx.body = newApplication
|
||||
ctx.status = 200
|
||||
}
|
||||
|
||||
// This endpoint currently operates as a PATCH rather than a PUT
|
||||
// Thus name and url fields are handled only if present
|
||||
exports.update = async ctx => {
|
||||
export const update = async (ctx: any) => {
|
||||
const apps = await getAllApps({ dev: true })
|
||||
// validation
|
||||
const name = ctx.request.body.name
|
||||
|
@ -304,7 +340,7 @@ exports.update = async ctx => {
|
|||
ctx.body = data
|
||||
}
|
||||
|
||||
exports.updateClient = async ctx => {
|
||||
export const updateClient = async (ctx: any) => {
|
||||
// Get current app version
|
||||
const db = getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
|
@ -326,7 +362,7 @@ exports.updateClient = async ctx => {
|
|||
ctx.body = data
|
||||
}
|
||||
|
||||
exports.revertClient = async ctx => {
|
||||
export const revertClient = async (ctx: any) => {
|
||||
// Check app can be reverted
|
||||
const db = getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
|
@ -349,10 +385,15 @@ exports.revertClient = async ctx => {
|
|||
ctx.body = data
|
||||
}
|
||||
|
||||
exports.delete = async ctx => {
|
||||
const destroyApp = async (ctx: any) => {
|
||||
const db = getAppDB()
|
||||
|
||||
const result = await db.destroy()
|
||||
if (ctx.query?.unpublish) {
|
||||
await quotas.removePublishedApp()
|
||||
} else {
|
||||
await quotas.removeApp()
|
||||
}
|
||||
/* istanbul ignore next */
|
||||
if (!env.isTest() && !ctx.query.unpublish) {
|
||||
await deleteApp(ctx.params.appId)
|
||||
|
@ -363,12 +404,30 @@ exports.delete = async ctx => {
|
|||
// make sure the app/role doesn't stick around after the app has been deleted
|
||||
await removeAppFromUserRoles(ctx, ctx.params.appId)
|
||||
await appCache.invalidateAppMetadata(ctx.params.appId)
|
||||
return result
|
||||
}
|
||||
|
||||
const preDestroyApp = async (ctx: any) => {
|
||||
const rows = await getUniqueRows([ctx.params.appId])
|
||||
ctx.rowCount = rows.length
|
||||
}
|
||||
|
||||
const postDestroyApp = async (ctx: any) => {
|
||||
const rowCount = ctx.rowCount
|
||||
if (rowCount) {
|
||||
await quotas.removeRows(rowCount)
|
||||
}
|
||||
}
|
||||
|
||||
export const destroy = async (ctx: any) => {
|
||||
await preDestroyApp(ctx)
|
||||
const result = await destroyApp(ctx)
|
||||
await postDestroyApp(ctx)
|
||||
ctx.status = 200
|
||||
ctx.body = result
|
||||
}
|
||||
|
||||
exports.sync = async (ctx, next) => {
|
||||
export const sync = async (ctx: any, next: any) => {
|
||||
const appId = ctx.params.appId
|
||||
if (!isDevAppID(appId)) {
|
||||
ctx.throw(400, "This action cannot be performed for production apps")
|
||||
|
@ -398,7 +457,7 @@ exports.sync = async (ctx, next) => {
|
|||
let error
|
||||
try {
|
||||
await replication.replicate({
|
||||
filter: function (doc) {
|
||||
filter: function (doc: any) {
|
||||
return doc._id !== DocumentTypes.APP_METADATA
|
||||
},
|
||||
})
|
||||
|
@ -418,7 +477,7 @@ exports.sync = async (ctx, next) => {
|
|||
}
|
||||
}
|
||||
|
||||
const updateAppPackage = async (appPackage, appId) => {
|
||||
const updateAppPackage = async (appPackage: any, appId: any) => {
|
||||
const db = getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
|
||||
|
@ -437,7 +496,7 @@ const updateAppPackage = async (appPackage, appId) => {
|
|||
return response
|
||||
}
|
||||
|
||||
const createEmptyAppPackage = async (ctx, app) => {
|
||||
const createEmptyAppPackage = async (ctx: any, app: any) => {
|
||||
const db = getAppDB()
|
||||
|
||||
let screensAndLayouts = []
|
|
@ -1,20 +1,18 @@
|
|||
const Deployment = require("./Deployment")
|
||||
const {
|
||||
import Deployment from "./Deployment"
|
||||
import {
|
||||
Replication,
|
||||
getProdAppID,
|
||||
getDevelopmentAppID,
|
||||
} = require("@budibase/backend-core/db")
|
||||
const { DocumentTypes, getAutomationParams } = require("../../../db/utils")
|
||||
const {
|
||||
disableAllCrons,
|
||||
enableCronTrigger,
|
||||
} = require("../../../automations/utils")
|
||||
const { app: appCache } = require("@budibase/backend-core/cache")
|
||||
const {
|
||||
} from "@budibase/backend-core/db"
|
||||
import { DocumentTypes, getAutomationParams } from "../../../db/utils"
|
||||
import { disableAllCrons, enableCronTrigger } from "../../../automations/utils"
|
||||
import { app as appCache } from "@budibase/backend-core/cache"
|
||||
import {
|
||||
getAppId,
|
||||
getAppDB,
|
||||
getProdAppDB,
|
||||
} = require("@budibase/backend-core/context")
|
||||
} from "@budibase/backend-core/context"
|
||||
import { quotas } from "@budibase/pro"
|
||||
|
||||
// the max time we can wait for an invalidation to complete before considering it failed
|
||||
const MAX_PENDING_TIME_MS = 30 * 60000
|
||||
|
@ -25,9 +23,10 @@ const DeploymentStatus = {
|
|||
}
|
||||
|
||||
// checks that deployments are in a good state, any pending will be updated
|
||||
async function checkAllDeployments(deployments) {
|
||||
async function checkAllDeployments(deployments: any) {
|
||||
let updated = false
|
||||
for (let deployment of Object.values(deployments.history)) {
|
||||
let deployment: any
|
||||
for (deployment of Object.values(deployments.history)) {
|
||||
// check that no deployments have crashed etc and are now stuck
|
||||
if (
|
||||
deployment.status === DeploymentStatus.PENDING &&
|
||||
|
@ -41,7 +40,7 @@ async function checkAllDeployments(deployments) {
|
|||
return { updated, deployments }
|
||||
}
|
||||
|
||||
async function storeDeploymentHistory(deployment) {
|
||||
async function storeDeploymentHistory(deployment: any) {
|
||||
const deploymentJSON = deployment.getJSON()
|
||||
const db = getAppDB()
|
||||
|
||||
|
@ -70,7 +69,7 @@ async function storeDeploymentHistory(deployment) {
|
|||
return deployment
|
||||
}
|
||||
|
||||
async function initDeployedApp(prodAppId) {
|
||||
async function initDeployedApp(prodAppId: any) {
|
||||
const db = getProdAppDB()
|
||||
console.log("Reading automation docs")
|
||||
const automations = (
|
||||
|
@ -79,7 +78,7 @@ async function initDeployedApp(prodAppId) {
|
|||
include_docs: true,
|
||||
})
|
||||
)
|
||||
).rows.map(row => row.doc)
|
||||
).rows.map((row: any) => row.doc)
|
||||
console.log("You have " + automations.length + " automations")
|
||||
const promises = []
|
||||
console.log("Disabling prod crons..")
|
||||
|
@ -93,16 +92,17 @@ async function initDeployedApp(prodAppId) {
|
|||
console.log("Enabled cron triggers for deployed app..")
|
||||
}
|
||||
|
||||
async function deployApp(deployment) {
|
||||
async function deployApp(deployment: any) {
|
||||
try {
|
||||
const appId = getAppId()
|
||||
const devAppId = getDevelopmentAppID(appId)
|
||||
const productionAppId = getProdAppID(appId)
|
||||
|
||||
const replication = new Replication({
|
||||
const config: any = {
|
||||
source: devAppId,
|
||||
target: productionAppId,
|
||||
})
|
||||
}
|
||||
const replication = new Replication(config)
|
||||
|
||||
console.log("Replication object created")
|
||||
|
||||
|
@ -119,7 +119,7 @@ async function deployApp(deployment) {
|
|||
console.log("Deployed app initialised, setting deployment to successful")
|
||||
deployment.setStatus(DeploymentStatus.SUCCESS)
|
||||
await storeDeploymentHistory(deployment)
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
deployment.setStatus(DeploymentStatus.FAILURE, err.message)
|
||||
await storeDeploymentHistory(deployment)
|
||||
throw {
|
||||
|
@ -129,14 +129,11 @@ async function deployApp(deployment) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.fetchDeployments = async function (ctx) {
|
||||
export async function fetchDeployments(ctx: any) {
|
||||
try {
|
||||
const db = getAppDB()
|
||||
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||
const { updated, deployments } = await checkAllDeployments(
|
||||
deploymentDoc,
|
||||
ctx.user
|
||||
)
|
||||
const { updated, deployments } = await checkAllDeployments(deploymentDoc)
|
||||
if (updated) {
|
||||
await db.put(deployments)
|
||||
}
|
||||
|
@ -146,7 +143,7 @@ exports.fetchDeployments = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.deploymentProgress = async function (ctx) {
|
||||
export async function deploymentProgress(ctx: any) {
|
||||
try {
|
||||
const db = getAppDB()
|
||||
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||
|
@ -159,7 +156,20 @@ exports.deploymentProgress = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.deployApp = async function (ctx) {
|
||||
const isFirstDeploy = async () => {
|
||||
try {
|
||||
const db = getProdAppDB()
|
||||
await db.get(DocumentTypes.APP_METADATA)
|
||||
} catch (e: any) {
|
||||
if (e.status === 404) {
|
||||
return true
|
||||
}
|
||||
throw e
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const _deployApp = async function (ctx: any) {
|
||||
let deployment = new Deployment()
|
||||
console.log("Deployment object created")
|
||||
deployment.setStatus(DeploymentStatus.PENDING)
|
||||
|
@ -168,7 +178,14 @@ exports.deployApp = async function (ctx) {
|
|||
console.log("Stored deployment history")
|
||||
|
||||
console.log("Deploying app...")
|
||||
await deployApp(deployment)
|
||||
|
||||
if (await isFirstDeploy()) {
|
||||
await quotas.addPublishedApp(() => deployApp(deployment))
|
||||
} else {
|
||||
await deployApp(deployment)
|
||||
}
|
||||
|
||||
ctx.body = deployment
|
||||
}
|
||||
|
||||
export { _deployApp as deployApp }
|
|
@ -1,7 +1,7 @@
|
|||
const { getAllApps } = require("@budibase/backend-core/db")
|
||||
const { updateAppId } = require("@budibase/backend-core/context")
|
||||
import { search as stringSearch } from "./utils"
|
||||
import { default as controller } from "../application"
|
||||
import * as controller from "../application"
|
||||
import { Application } from "../../../definitions/common"
|
||||
|
||||
function fixAppID(app: Application, params: any) {
|
||||
|
@ -59,7 +59,7 @@ export async function destroy(ctx: any, next: any) {
|
|||
// get the app before deleting it
|
||||
await setResponseApp(ctx)
|
||||
const body = ctx.body
|
||||
await controller.delete(ctx)
|
||||
await controller.destroy(ctx)
|
||||
// overwrite the body again
|
||||
ctx.body = body
|
||||
await next()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { search as stringSearch } from "./utils"
|
||||
import { default as queryController } from "../query"
|
||||
import * as queryController from "../query"
|
||||
|
||||
export async function search(ctx: any, next: any) {
|
||||
await queryController.fetch(ctx)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { default as rowController } from "../row"
|
||||
import * as rowController from "../row"
|
||||
import { addRev } from "./utils"
|
||||
import { Row } from "../../../definitions/common"
|
||||
import { convertBookmark } from "../../../utilities"
|
||||
|
|
|
@ -1,22 +1,19 @@
|
|||
const {
|
||||
generateQueryID,
|
||||
getQueryParams,
|
||||
isProdAppID,
|
||||
} = require("../../../db/utils")
|
||||
const { BaseQueryVerbs } = require("../../../constants")
|
||||
const { Thread, ThreadType } = require("../../../threads")
|
||||
const { save: saveDatasource } = require("../datasource")
|
||||
const { RestImporter } = require("./import")
|
||||
const { invalidateDynamicVariables } = require("../../../threads/utils")
|
||||
const environment = require("../../../environment")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
import { generateQueryID, getQueryParams, isProdAppID } from "../../../db/utils"
|
||||
import { BaseQueryVerbs } from "../../../constants"
|
||||
import { Thread, ThreadType } from "../../../threads"
|
||||
import { save as saveDatasource } from "../datasource"
|
||||
import { RestImporter } from "./import"
|
||||
import { invalidateDynamicVariables } from "../../../threads/utils"
|
||||
import { QUERY_THREAD_TIMEOUT } from "../../../environment"
|
||||
import { getAppDB } from "@budibase/backend-core/context"
|
||||
import { quotas } from "@budibase/pro"
|
||||
|
||||
const Runner = new Thread(ThreadType.QUERY, {
|
||||
timeoutMs: environment.QUERY_THREAD_TIMEOUT || 10000,
|
||||
timeoutMs: QUERY_THREAD_TIMEOUT || 10000,
|
||||
})
|
||||
|
||||
// simple function to append "readable" to all read queries
|
||||
function enrichQueries(input) {
|
||||
function enrichQueries(input: any) {
|
||||
const wasArray = Array.isArray(input)
|
||||
const queries = wasArray ? input : [input]
|
||||
for (let query of queries) {
|
||||
|
@ -27,7 +24,7 @@ function enrichQueries(input) {
|
|||
return wasArray ? queries : queries[0]
|
||||
}
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
export async function fetch(ctx: any) {
|
||||
const db = getAppDB()
|
||||
|
||||
const body = await db.allDocs(
|
||||
|
@ -36,10 +33,10 @@ exports.fetch = async function (ctx) {
|
|||
})
|
||||
)
|
||||
|
||||
ctx.body = enrichQueries(body.rows.map(row => row.doc))
|
||||
ctx.body = enrichQueries(body.rows.map((row: any) => row.doc))
|
||||
}
|
||||
|
||||
exports.import = async ctx => {
|
||||
const _import = async (ctx: any) => {
|
||||
const body = ctx.request.body
|
||||
const data = body.data
|
||||
|
||||
|
@ -49,7 +46,7 @@ exports.import = async ctx => {
|
|||
let datasourceId
|
||||
if (!body.datasourceId) {
|
||||
// construct new datasource
|
||||
const info = await importer.getInfo()
|
||||
const info: any = await importer.getInfo()
|
||||
let datasource = {
|
||||
type: "datasource",
|
||||
source: "REST",
|
||||
|
@ -77,8 +74,9 @@ exports.import = async ctx => {
|
|||
}
|
||||
ctx.status = 200
|
||||
}
|
||||
export { _import as import }
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
export async function save(ctx: any) {
|
||||
const db = getAppDB()
|
||||
const query = ctx.request.body
|
||||
|
||||
|
@ -93,7 +91,7 @@ exports.save = async function (ctx) {
|
|||
ctx.message = `Query ${query.name} saved successfully.`
|
||||
}
|
||||
|
||||
exports.find = async function (ctx) {
|
||||
export async function find(ctx: any) {
|
||||
const db = getAppDB()
|
||||
const query = enrichQueries(await db.get(ctx.params.queryId))
|
||||
// remove properties that could be dangerous in real app
|
||||
|
@ -104,7 +102,7 @@ exports.find = async function (ctx) {
|
|||
ctx.body = query
|
||||
}
|
||||
|
||||
exports.preview = async function (ctx) {
|
||||
export async function preview(ctx: any) {
|
||||
const db = getAppDB()
|
||||
|
||||
const datasource = await db.get(ctx.request.body.datasourceId)
|
||||
|
@ -114,16 +112,18 @@ exports.preview = async function (ctx) {
|
|||
ctx.request.body
|
||||
|
||||
try {
|
||||
const { rows, keys, info, extra } = await Runner.run({
|
||||
appId: ctx.appId,
|
||||
datasource,
|
||||
queryVerb,
|
||||
fields,
|
||||
parameters,
|
||||
transformer,
|
||||
queryId,
|
||||
})
|
||||
const runFn = () =>
|
||||
Runner.run({
|
||||
appId: ctx.appId,
|
||||
datasource,
|
||||
queryVerb,
|
||||
fields,
|
||||
parameters,
|
||||
transformer,
|
||||
queryId,
|
||||
})
|
||||
|
||||
const { rows, keys, info, extra } = await quotas.addQuery(runFn)
|
||||
ctx.body = {
|
||||
rows,
|
||||
schemaFields: [...new Set(keys)],
|
||||
|
@ -135,7 +135,7 @@ exports.preview = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
async function execute(ctx, opts = { rowsOnly: false }) {
|
||||
async function execute(ctx: any, opts = { rowsOnly: false }) {
|
||||
const db = getAppDB()
|
||||
|
||||
const query = await db.get(ctx.params.queryId)
|
||||
|
@ -153,16 +153,19 @@ async function execute(ctx, opts = { rowsOnly: false }) {
|
|||
|
||||
// call the relevant CRUD method on the integration class
|
||||
try {
|
||||
const { rows, pagination, extra } = await Runner.run({
|
||||
appId: ctx.appId,
|
||||
datasource,
|
||||
queryVerb: query.queryVerb,
|
||||
fields: query.fields,
|
||||
pagination: ctx.request.body.pagination,
|
||||
parameters: enrichedParameters,
|
||||
transformer: query.transformer,
|
||||
queryId: ctx.params.queryId,
|
||||
})
|
||||
const runFn = () =>
|
||||
Runner.run({
|
||||
appId: ctx.appId,
|
||||
datasource,
|
||||
queryVerb: query.queryVerb,
|
||||
fields: query.fields,
|
||||
pagination: ctx.request.body.pagination,
|
||||
parameters: enrichedParameters,
|
||||
transformer: query.transformer,
|
||||
queryId: ctx.params.queryId,
|
||||
})
|
||||
|
||||
const { rows, pagination, extra } = await quotas.addQuery(runFn)
|
||||
if (opts && opts.rowsOnly) {
|
||||
ctx.body = rows
|
||||
} else {
|
||||
|
@ -173,15 +176,15 @@ async function execute(ctx, opts = { rowsOnly: false }) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.executeV1 = async function (ctx) {
|
||||
export async function executeV1(ctx: any) {
|
||||
return execute(ctx, { rowsOnly: true })
|
||||
}
|
||||
|
||||
exports.executeV2 = async function (ctx) {
|
||||
export async function executeV2(ctx: any) {
|
||||
return execute(ctx, { rowsOnly: false })
|
||||
}
|
||||
|
||||
const removeDynamicVariables = async queryId => {
|
||||
const removeDynamicVariables = async (queryId: any) => {
|
||||
const db = getAppDB()
|
||||
const query = await db.get(queryId)
|
||||
const datasource = await db.get(query.datasourceId)
|
||||
|
@ -190,19 +193,19 @@ const removeDynamicVariables = async queryId => {
|
|||
if (dynamicVariables) {
|
||||
// delete dynamic variables from the datasource
|
||||
datasource.config.dynamicVariables = dynamicVariables.filter(
|
||||
dv => dv.queryId !== queryId
|
||||
(dv: any) => dv.queryId !== queryId
|
||||
)
|
||||
await db.put(datasource)
|
||||
|
||||
// invalidate the deleted variables
|
||||
const variablesToDelete = dynamicVariables.filter(
|
||||
dv => dv.queryId === queryId
|
||||
(dv: any) => dv.queryId === queryId
|
||||
)
|
||||
await invalidateDynamicVariables(variablesToDelete)
|
||||
}
|
||||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
export async function destroy(ctx: any) {
|
||||
const db = getAppDB()
|
||||
await removeDynamicVariables(ctx.params.queryId)
|
||||
await db.remove(ctx.params.queryId, ctx.params.revId)
|
|
@ -52,7 +52,7 @@ interface RunConfig {
|
|||
|
||||
module External {
|
||||
function buildFilters(
|
||||
id: string | undefined,
|
||||
id: string | undefined | string[],
|
||||
filters: SearchFilters,
|
||||
table: Table
|
||||
) {
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
const internal = require("./internal")
|
||||
const external = require("./external")
|
||||
const { isExternalTable } = require("../../../integrations/utils")
|
||||
import { quotas } from "@budibase/pro"
|
||||
import internal from "./internal"
|
||||
import external from "./external"
|
||||
import { isExternalTable } from "../../../integrations/utils"
|
||||
|
||||
function pickApi(tableId) {
|
||||
function pickApi(tableId: any) {
|
||||
if (isExternalTable(tableId)) {
|
||||
return external
|
||||
}
|
||||
return internal
|
||||
}
|
||||
|
||||
function getTableId(ctx) {
|
||||
function getTableId(ctx: any) {
|
||||
if (ctx.request.body && ctx.request.body.tableId) {
|
||||
return ctx.request.body.tableId
|
||||
}
|
||||
|
@ -21,13 +22,13 @@ function getTableId(ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.patch = async ctx => {
|
||||
export async function patch(ctx: any): Promise<any> {
|
||||
const appId = ctx.appId
|
||||
const tableId = getTableId(ctx)
|
||||
const body = ctx.request.body
|
||||
// if it doesn't have an _id then its save
|
||||
if (body && !body._id) {
|
||||
return exports.save(ctx)
|
||||
return save(ctx)
|
||||
}
|
||||
try {
|
||||
const { row, table } = await pickApi(tableId).patch(ctx)
|
||||
|
@ -41,13 +42,13 @@ exports.patch = async ctx => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const saveRow = async (ctx: any) => {
|
||||
const appId = ctx.appId
|
||||
const tableId = getTableId(ctx)
|
||||
const body = ctx.request.body
|
||||
// if it has an ID already then its a patch
|
||||
if (body && body._id) {
|
||||
return exports.patch(ctx)
|
||||
return patch(ctx)
|
||||
}
|
||||
try {
|
||||
const { row, table } = await pickApi(tableId).save(ctx)
|
||||
|
@ -60,7 +61,11 @@ exports.save = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.fetchView = async function (ctx) {
|
||||
export async function save(ctx: any) {
|
||||
await quotas.addRow(() => saveRow(ctx))
|
||||
}
|
||||
|
||||
export async function fetchView(ctx: any) {
|
||||
const tableId = getTableId(ctx)
|
||||
try {
|
||||
ctx.body = await pickApi(tableId).fetchView(ctx)
|
||||
|
@ -69,7 +74,7 @@ exports.fetchView = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
export async function fetch(ctx: any) {
|
||||
const tableId = getTableId(ctx)
|
||||
try {
|
||||
ctx.body = await pickApi(tableId).fetch(ctx)
|
||||
|
@ -78,7 +83,7 @@ exports.fetch = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.find = async function (ctx) {
|
||||
export async function find(ctx: any) {
|
||||
const tableId = getTableId(ctx)
|
||||
try {
|
||||
ctx.body = await pickApi(tableId).find(ctx)
|
||||
|
@ -87,19 +92,21 @@ exports.find = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
export async function destroy(ctx: any) {
|
||||
const appId = ctx.appId
|
||||
const inputs = ctx.request.body
|
||||
const tableId = getTableId(ctx)
|
||||
let response, row
|
||||
if (inputs.rows) {
|
||||
let { rows } = await pickApi(tableId).bulkDestroy(ctx)
|
||||
await quotas.removeRows(rows.length)
|
||||
response = rows
|
||||
for (let row of rows) {
|
||||
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
|
||||
}
|
||||
} else {
|
||||
let resp = await pickApi(tableId).destroy(ctx)
|
||||
await quotas.removeRow()
|
||||
response = resp.response
|
||||
row = resp.row
|
||||
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
|
||||
|
@ -110,7 +117,7 @@ exports.destroy = async function (ctx) {
|
|||
ctx.body = response
|
||||
}
|
||||
|
||||
exports.search = async ctx => {
|
||||
export async function search(ctx: any) {
|
||||
const tableId = getTableId(ctx)
|
||||
try {
|
||||
ctx.status = 200
|
||||
|
@ -120,7 +127,7 @@ exports.search = async ctx => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.validate = async function (ctx) {
|
||||
export async function validate(ctx: any) {
|
||||
const tableId = getTableId(ctx)
|
||||
try {
|
||||
ctx.body = await pickApi(tableId).validate(ctx)
|
||||
|
@ -129,7 +136,7 @@ exports.validate = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.fetchEnrichedRow = async function (ctx) {
|
||||
export async function fetchEnrichedRow(ctx: any) {
|
||||
const tableId = getTableId(ctx)
|
||||
try {
|
||||
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
|
||||
|
@ -138,7 +145,7 @@ exports.fetchEnrichedRow = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.export = async function (ctx) {
|
||||
export const exportRows = async (ctx: any) => {
|
||||
const tableId = getTableId(ctx)
|
||||
try {
|
||||
ctx.body = await pickApi(tableId).exportRows(ctx)
|
|
@ -259,8 +259,9 @@ exports.find = async ctx => {
|
|||
|
||||
exports.destroy = async function (ctx) {
|
||||
const db = getAppDB()
|
||||
const { _id, _rev } = ctx.request.body
|
||||
const { _id } = ctx.request.body
|
||||
let row = await db.get(_id)
|
||||
let _rev = ctx.request.body._rev || row._rev
|
||||
|
||||
if (row.tableId !== ctx.params.tableId) {
|
||||
throw "Supplied tableId doesn't match the row's tableId"
|
||||
|
|
|
@ -65,7 +65,10 @@ exports.validate = async ({ tableId, row, table }) => {
|
|||
if (type === FieldTypes.ARRAY && row[fieldName]) {
|
||||
if (row[fieldName].length) {
|
||||
row[fieldName].map(val => {
|
||||
if (!constraints.inclusion.includes(val)) {
|
||||
if (
|
||||
!constraints.inclusion.includes(val) &&
|
||||
constraints.inclusion.length !== 0
|
||||
) {
|
||||
errors[fieldName] = "Field not in list"
|
||||
}
|
||||
})
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
const linkRows = require("../../../db/linkedRows")
|
||||
const { getRowParams, generateTableID } = require("../../../db/utils")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const {
|
||||
import { updateLinks, EventType } from "../../../db/linkedRows"
|
||||
import { getRowParams, generateTableID } from "../../../db/utils"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
import {
|
||||
TableSaveFunctions,
|
||||
hasTypeChanged,
|
||||
getTable,
|
||||
handleDataImport,
|
||||
} = require("./utils")
|
||||
const usageQuota = require("../../../utilities/usageQuota")
|
||||
} from "./utils"
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const env = require("../../../environment")
|
||||
const { cleanupAttachments } = require("../../../utilities/rowProcessor")
|
||||
const { runStaticFormulaChecks } = require("./bulkFormula")
|
||||
import { isTest } from "../../../environment"
|
||||
import { cleanupAttachments } from "../../../utilities/rowProcessor"
|
||||
import { runStaticFormulaChecks } from "./bulkFormula"
|
||||
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
export async function save(ctx: any) {
|
||||
const db = getAppDB()
|
||||
const { dataImport, ...rest } = ctx.request.body
|
||||
let tableToSave = {
|
||||
|
@ -80,10 +80,8 @@ exports.save = async function (ctx) {
|
|||
|
||||
// update linked rows
|
||||
try {
|
||||
const linkResp = await linkRows.updateLinks({
|
||||
eventType: oldTable
|
||||
? linkRows.EventType.TABLE_UPDATED
|
||||
: linkRows.EventType.TABLE_SAVE,
|
||||
const linkResp: any = await updateLinks({
|
||||
eventType: oldTable ? EventType.TABLE_UPDATED : EventType.TABLE_SAVE,
|
||||
table: tableToSave,
|
||||
oldTable: oldTable,
|
||||
})
|
||||
|
@ -105,11 +103,11 @@ exports.save = async function (ctx) {
|
|||
|
||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||
// has to run after, make sure it has _id
|
||||
await runStaticFormulaChecks(tableToSave, { oldTable })
|
||||
await runStaticFormulaChecks(tableToSave, { oldTable, deletion: null })
|
||||
return tableToSave
|
||||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
export async function destroy(ctx: any) {
|
||||
const db = getAppDB()
|
||||
const tableToDelete = await db.get(ctx.params.tableId)
|
||||
|
||||
|
@ -119,12 +117,14 @@ exports.destroy = async function (ctx) {
|
|||
include_docs: true,
|
||||
})
|
||||
)
|
||||
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
|
||||
await usageQuota.update(usageQuota.Properties.ROW, -rows.rows.length)
|
||||
await db.bulkDocs(
|
||||
rows.rows.map((row: any) => ({ ...row.doc, _deleted: true }))
|
||||
)
|
||||
await quotas.removeRows(rows.rows.length)
|
||||
|
||||
// update linked rows
|
||||
await linkRows.updateLinks({
|
||||
eventType: linkRows.EventType.TABLE_DELETE,
|
||||
await updateLinks({
|
||||
eventType: EventType.TABLE_DELETE,
|
||||
table: tableToDelete,
|
||||
})
|
||||
|
||||
|
@ -132,10 +132,10 @@ exports.destroy = async function (ctx) {
|
|||
await db.remove(tableToDelete)
|
||||
|
||||
// remove table search index
|
||||
if (!env.isTest()) {
|
||||
if (!isTest()) {
|
||||
const currentIndexes = await db.getIndexes()
|
||||
const existingIndex = currentIndexes.indexes.find(
|
||||
existing => existing.name === `search:${ctx.params.tableId}`
|
||||
(existing: any) => existing.name === `search:${ctx.params.tableId}`
|
||||
)
|
||||
if (existingIndex) {
|
||||
await db.deleteIndex(existingIndex)
|
||||
|
@ -143,12 +143,15 @@ exports.destroy = async function (ctx) {
|
|||
}
|
||||
|
||||
// has to run after, make sure it has _id
|
||||
await runStaticFormulaChecks(tableToDelete, { deletion: true })
|
||||
await runStaticFormulaChecks(tableToDelete, {
|
||||
oldTable: null,
|
||||
deletion: true,
|
||||
})
|
||||
await cleanupAttachments(tableToDelete, { rows })
|
||||
return tableToDelete
|
||||
}
|
||||
|
||||
exports.bulkImport = async function (ctx) {
|
||||
export async function bulkImport(ctx: any) {
|
||||
const table = await getTable(ctx.params.tableId)
|
||||
const { dataImport } = ctx.request.body
|
||||
await handleDataImport(ctx.user, table, dataImport)
|
|
@ -1,34 +1,34 @@
|
|||
const csvParser = require("../../../utilities/csvParser")
|
||||
const {
|
||||
import { transform } from "../../../utilities/csvParser"
|
||||
import {
|
||||
getRowParams,
|
||||
generateRowID,
|
||||
InternalTables,
|
||||
getTableParams,
|
||||
BudibaseInternalDB,
|
||||
} = require("../../../db/utils")
|
||||
const { isEqual } = require("lodash")
|
||||
const { AutoFieldSubTypes, FieldTypes } = require("../../../constants")
|
||||
const {
|
||||
} from "../../../db/utils"
|
||||
import { isEqual } from "lodash"
|
||||
import { AutoFieldSubTypes, FieldTypes } from "../../../constants"
|
||||
import {
|
||||
inputProcessing,
|
||||
cleanupAttachments,
|
||||
} = require("../../../utilities/rowProcessor")
|
||||
const {
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import {
|
||||
USERS_TABLE_SCHEMA,
|
||||
SwitchableTypes,
|
||||
CanSwitchTypes,
|
||||
} = require("../../../constants")
|
||||
const {
|
||||
} from "../../../constants"
|
||||
import {
|
||||
isExternalTable,
|
||||
breakExternalTableId,
|
||||
isSQL,
|
||||
} = require("../../../integrations/utils")
|
||||
const { getViews, saveView } = require("../view/utils")
|
||||
const viewTemplate = require("../view/viewBuilder")
|
||||
const usageQuota = require("../../../utilities/usageQuota")
|
||||
} from "../../../integrations/utils"
|
||||
import { getViews, saveView } from "../view/utils"
|
||||
import viewTemplate from "../view/viewBuilder"
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
|
||||
|
||||
exports.clearColumns = async (table, columnNames) => {
|
||||
export async function clearColumns(table: any, columnNames: any) {
|
||||
const db = getAppDB()
|
||||
const rows = await db.allDocs(
|
||||
getRowParams(table._id, null, {
|
||||
|
@ -36,18 +36,18 @@ exports.clearColumns = async (table, columnNames) => {
|
|||
})
|
||||
)
|
||||
return db.bulkDocs(
|
||||
rows.rows.map(({ doc }) => {
|
||||
columnNames.forEach(colName => delete doc[colName])
|
||||
rows.rows.map(({ doc }: any) => {
|
||||
columnNames.forEach((colName: any) => delete doc[colName])
|
||||
return doc
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
|
||||
export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
||||
const db = getAppDB()
|
||||
let updatedRows = []
|
||||
const rename = updatedTable._rename
|
||||
let deletedColumns = []
|
||||
let deletedColumns: any = []
|
||||
if (oldTable && oldTable.schema && updatedTable.schema) {
|
||||
deletedColumns = Object.keys(oldTable.schema).filter(
|
||||
colName => updatedTable.schema[colName] == null
|
||||
|
@ -61,14 +61,14 @@ exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
|
|||
include_docs: true,
|
||||
})
|
||||
)
|
||||
const rawRows = rows.rows.map(({ doc }) => doc)
|
||||
updatedRows = rawRows.map(row => {
|
||||
const rawRows = rows.rows.map(({ doc }: any) => doc)
|
||||
updatedRows = rawRows.map((row: any) => {
|
||||
row = cloneDeep(row)
|
||||
if (rename) {
|
||||
row[rename.updated] = row[rename.old]
|
||||
delete row[rename.old]
|
||||
} else if (deletedColumns.length !== 0) {
|
||||
deletedColumns.forEach(colName => delete row[colName])
|
||||
deletedColumns.forEach((colName: any) => delete row[colName])
|
||||
}
|
||||
return row
|
||||
})
|
||||
|
@ -76,14 +76,14 @@ exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
|
|||
// cleanup any attachments from object storage for deleted attachment columns
|
||||
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
|
||||
// Update views
|
||||
await exports.checkForViewUpdates(updatedTable, rename, deletedColumns)
|
||||
await checkForViewUpdates(updatedTable, rename, deletedColumns)
|
||||
delete updatedTable._rename
|
||||
}
|
||||
return { rows: updatedRows, table: updatedTable }
|
||||
}
|
||||
|
||||
// makes sure the passed in table isn't going to reset the auto ID
|
||||
exports.makeSureTableUpToDate = (table, tableToSave) => {
|
||||
export function makeSureTableUpToDate(table: any, tableToSave: any) {
|
||||
if (!table) {
|
||||
return tableToSave
|
||||
}
|
||||
|
@ -91,7 +91,9 @@ exports.makeSureTableUpToDate = (table, tableToSave) => {
|
|||
tableToSave._rev = table._rev
|
||||
// make sure auto IDs are always updated - these are internal
|
||||
// so the client may not know they have changed
|
||||
for (let [field, column] of Object.entries(table.schema)) {
|
||||
let field: any
|
||||
let column: any
|
||||
for ([field, column] of Object.entries(table.schema)) {
|
||||
if (
|
||||
column.autocolumn &&
|
||||
column.subtype === AutoFieldSubTypes.AUTO_ID &&
|
||||
|
@ -103,30 +105,32 @@ exports.makeSureTableUpToDate = (table, tableToSave) => {
|
|||
return tableToSave
|
||||
}
|
||||
|
||||
exports.handleDataImport = async (user, table, dataImport) => {
|
||||
export async function handleDataImport(user: any, table: any, dataImport: any) {
|
||||
if (!dataImport || !dataImport.csvString) {
|
||||
return table
|
||||
}
|
||||
|
||||
const db = getAppDB()
|
||||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform({
|
||||
const data = await transform({
|
||||
...dataImport,
|
||||
existingTable: table,
|
||||
})
|
||||
|
||||
let finalData = []
|
||||
let finalData: any = []
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let row = data[i]
|
||||
row._id = generateRowID(table._id)
|
||||
row.tableId = table._id
|
||||
const processed = inputProcessing(user, table, row, {
|
||||
const processed: any = inputProcessing(user, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
table = processed.table
|
||||
row = processed.row
|
||||
|
||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||
let fieldName: any
|
||||
let schema: any
|
||||
for ([fieldName, schema] of Object.entries(table.schema)) {
|
||||
// check whether the options need to be updated for inclusion as part of the data import
|
||||
if (
|
||||
schema.type === FieldTypes.OPTIONS &&
|
||||
|
@ -143,17 +147,13 @@ exports.handleDataImport = async (user, table, dataImport) => {
|
|||
finalData.push(row)
|
||||
}
|
||||
|
||||
await usageQuota.update(usageQuota.Properties.ROW, finalData.length, {
|
||||
dryRun: true,
|
||||
})
|
||||
await db.bulkDocs(finalData)
|
||||
await usageQuota.update(usageQuota.Properties.ROW, finalData.length)
|
||||
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData))
|
||||
let response = await db.put(table)
|
||||
table._rev = response._rev
|
||||
return table
|
||||
}
|
||||
|
||||
exports.handleSearchIndexes = async table => {
|
||||
export async function handleSearchIndexes(table: any) {
|
||||
const db = getAppDB()
|
||||
// create relevant search indexes
|
||||
if (table.indexes && table.indexes.length > 0) {
|
||||
|
@ -161,12 +161,12 @@ exports.handleSearchIndexes = async table => {
|
|||
const indexName = `search:${table._id}`
|
||||
|
||||
const existingIndex = currentIndexes.indexes.find(
|
||||
existing => existing.name === indexName
|
||||
(existing: any) => existing.name === indexName
|
||||
)
|
||||
|
||||
if (existingIndex) {
|
||||
const currentFields = existingIndex.def.fields.map(
|
||||
field => Object.keys(field)[0]
|
||||
(field: any) => Object.keys(field)[0]
|
||||
)
|
||||
|
||||
// if index fields have changed, delete the original index
|
||||
|
@ -197,7 +197,7 @@ exports.handleSearchIndexes = async table => {
|
|||
return table
|
||||
}
|
||||
|
||||
exports.checkStaticTables = table => {
|
||||
export function checkStaticTables(table: any) {
|
||||
// check user schema has all required elements
|
||||
if (table._id === InternalTables.USER_METADATA) {
|
||||
for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) {
|
||||
|
@ -211,7 +211,13 @@ exports.checkStaticTables = table => {
|
|||
}
|
||||
|
||||
class TableSaveFunctions {
|
||||
constructor({ user, oldTable, dataImport }) {
|
||||
db: any
|
||||
user: any
|
||||
oldTable: any
|
||||
dataImport: any
|
||||
rows: any
|
||||
|
||||
constructor({ user, oldTable, dataImport }: any) {
|
||||
this.db = getAppDB()
|
||||
this.user = user
|
||||
this.oldTable = oldTable
|
||||
|
@ -221,25 +227,25 @@ class TableSaveFunctions {
|
|||
}
|
||||
|
||||
// before anything is done
|
||||
async before(table) {
|
||||
async before(table: any) {
|
||||
if (this.oldTable) {
|
||||
table = exports.makeSureTableUpToDate(this.oldTable, table)
|
||||
table = makeSureTableUpToDate(this.oldTable, table)
|
||||
}
|
||||
table = exports.checkStaticTables(table)
|
||||
table = checkStaticTables(table)
|
||||
return table
|
||||
}
|
||||
|
||||
// when confirmed valid
|
||||
async mid(table) {
|
||||
let response = await exports.checkForColumnUpdates(this.oldTable, table)
|
||||
async mid(table: any) {
|
||||
let response = await checkForColumnUpdates(this.oldTable, table)
|
||||
this.rows = this.rows.concat(response.rows)
|
||||
return table
|
||||
}
|
||||
|
||||
// after saving
|
||||
async after(table) {
|
||||
table = await exports.handleSearchIndexes(table)
|
||||
table = await exports.handleDataImport(this.user, table, this.dataImport)
|
||||
async after(table: any) {
|
||||
table = await handleSearchIndexes(table)
|
||||
table = await handleDataImport(this.user, table, this.dataImport)
|
||||
return table
|
||||
}
|
||||
|
||||
|
@ -248,21 +254,21 @@ class TableSaveFunctions {
|
|||
}
|
||||
}
|
||||
|
||||
exports.getAllInternalTables = async () => {
|
||||
export async function getAllInternalTables() {
|
||||
const db = getAppDB()
|
||||
const internalTables = await db.allDocs(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
return internalTables.rows.map(tableDoc => ({
|
||||
return internalTables.rows.map((tableDoc: any) => ({
|
||||
...tableDoc.doc,
|
||||
type: "internal",
|
||||
sourceId: BudibaseInternalDB._id,
|
||||
}))
|
||||
}
|
||||
|
||||
exports.getAllExternalTables = async datasourceId => {
|
||||
export async function getAllExternalTables(datasourceId: any) {
|
||||
const db = getAppDB()
|
||||
const datasource = await db.get(datasourceId)
|
||||
if (!datasource || !datasource.entities) {
|
||||
|
@ -271,24 +277,28 @@ exports.getAllExternalTables = async datasourceId => {
|
|||
return datasource.entities
|
||||
}
|
||||
|
||||
exports.getExternalTable = async (datasourceId, tableName) => {
|
||||
const entities = await exports.getAllExternalTables(datasourceId)
|
||||
export async function getExternalTable(datasourceId: any, tableName: any) {
|
||||
const entities = await getAllExternalTables(datasourceId)
|
||||
return entities[tableName]
|
||||
}
|
||||
|
||||
exports.getTable = async tableId => {
|
||||
export async function getTable(tableId: any) {
|
||||
const db = getAppDB()
|
||||
if (isExternalTable(tableId)) {
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const datasource = await db.get(datasourceId)
|
||||
const table = await exports.getExternalTable(datasourceId, tableName)
|
||||
const table = await getExternalTable(datasourceId, tableName)
|
||||
return { ...table, sql: isSQL(datasource) }
|
||||
} else {
|
||||
return db.get(tableId)
|
||||
}
|
||||
}
|
||||
|
||||
exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
||||
export async function checkForViewUpdates(
|
||||
table: any,
|
||||
rename: any,
|
||||
deletedColumns: any
|
||||
) {
|
||||
const views = await getViews()
|
||||
const tableViews = views.filter(view => view.meta.tableId === table._id)
|
||||
|
||||
|
@ -312,7 +322,7 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
|||
|
||||
// Update filters if required
|
||||
if (view.meta.filters) {
|
||||
view.meta.filters.forEach(filter => {
|
||||
view.meta.filters.forEach((filter: any) => {
|
||||
if (filter.key === rename.old) {
|
||||
filter.key = rename.updated
|
||||
needsUpdated = true
|
||||
|
@ -320,7 +330,7 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
|||
})
|
||||
}
|
||||
} else if (deletedColumns) {
|
||||
deletedColumns.forEach(column => {
|
||||
deletedColumns.forEach((column: any) => {
|
||||
// Remove calculation statement if required
|
||||
if (view.meta.field === column) {
|
||||
delete view.meta.field
|
||||
|
@ -338,7 +348,7 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
|||
// Remove filters referencing deleted field if required
|
||||
if (view.meta.filters && view.meta.filters.length) {
|
||||
const initialLength = view.meta.filters.length
|
||||
view.meta.filters = view.meta.filters.filter(filter => {
|
||||
view.meta.filters = view.meta.filters.filter((filter: any) => {
|
||||
return filter.key !== column
|
||||
})
|
||||
if (initialLength !== view.meta.filters.length) {
|
||||
|
@ -360,16 +370,20 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.generateForeignKey = (column, relatedTable) => {
|
||||
export function generateForeignKey(column: any, relatedTable: any) {
|
||||
return `fk_${relatedTable.name}_${column.fieldName}`
|
||||
}
|
||||
|
||||
exports.generateJunctionTableName = (column, table, relatedTable) => {
|
||||
export function generateJunctionTableName(
|
||||
column: any,
|
||||
table: any,
|
||||
relatedTable: any
|
||||
) {
|
||||
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
|
||||
}
|
||||
|
||||
exports.foreignKeyStructure = (keyName, meta = null) => {
|
||||
const structure = {
|
||||
export function foreignKeyStructure(keyName: any, meta = null) {
|
||||
const structure: any = {
|
||||
type: FieldTypes.NUMBER,
|
||||
constraints: {},
|
||||
name: keyName,
|
||||
|
@ -380,7 +394,7 @@ exports.foreignKeyStructure = (keyName, meta = null) => {
|
|||
return structure
|
||||
}
|
||||
|
||||
exports.areSwitchableTypes = (type1, type2) => {
|
||||
export function areSwitchableTypes(type1: any, type2: any) {
|
||||
if (
|
||||
SwitchableTypes.indexOf(type1) === -1 &&
|
||||
SwitchableTypes.indexOf(type2) === -1
|
||||
|
@ -397,21 +411,24 @@ exports.areSwitchableTypes = (type1, type2) => {
|
|||
return false
|
||||
}
|
||||
|
||||
exports.hasTypeChanged = (table, oldTable) => {
|
||||
export function hasTypeChanged(table: any, oldTable: any) {
|
||||
if (!oldTable) {
|
||||
return false
|
||||
}
|
||||
for (let [key, field] of Object.entries(oldTable.schema)) {
|
||||
let key: any
|
||||
let field: any
|
||||
for ([key, field] of Object.entries(oldTable.schema)) {
|
||||
const oldType = field.type
|
||||
if (!table.schema[key]) {
|
||||
continue
|
||||
}
|
||||
const newType = table.schema[key].type
|
||||
if (oldType !== newType && !exports.areSwitchableTypes(oldType, newType)) {
|
||||
if (oldType !== newType && !areSwitchableTypes(oldType, newType)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
exports.TableSaveFunctions = TableSaveFunctions
|
||||
const _TableSaveFunctions = TableSaveFunctions
|
||||
export { _TableSaveFunctions as TableSaveFunctions }
|
|
@ -4,12 +4,14 @@ const {
|
|||
auditLog,
|
||||
buildTenancyMiddleware,
|
||||
} = require("@budibase/backend-core/auth")
|
||||
const { errors } = require("@budibase/backend-core")
|
||||
const currentApp = require("../middleware/currentapp")
|
||||
const compress = require("koa-compress")
|
||||
const zlib = require("zlib")
|
||||
const { mainRoutes, staticRoutes, publicRoutes } = require("./routes")
|
||||
const pkg = require("../../package.json")
|
||||
const env = require("../environment")
|
||||
const { middleware: pro } = require("@budibase/pro")
|
||||
|
||||
const router = new Router()
|
||||
|
||||
|
@ -52,6 +54,7 @@ router
|
|||
})
|
||||
)
|
||||
.use(currentApp)
|
||||
.use(pro.licensing())
|
||||
.use(auditLog)
|
||||
|
||||
// error handling middleware
|
||||
|
@ -60,10 +63,12 @@ router.use(async (ctx, next) => {
|
|||
await next()
|
||||
} catch (err) {
|
||||
ctx.status = err.status || err.statusCode || 500
|
||||
const error = errors.getPublicError(err)
|
||||
ctx.body = {
|
||||
message: err.message,
|
||||
status: ctx.status,
|
||||
validationErrors: err.validation,
|
||||
error,
|
||||
}
|
||||
if (env.NODE_ENV !== "jest") {
|
||||
ctx.log.error(err)
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
const Router = require("@koa/router")
|
||||
const controller = require("../controllers/application")
|
||||
const authorized = require("../../middleware/authorized")
|
||||
const { BUILDER } = require("@budibase/backend-core/permissions")
|
||||
const usage = require("../../middleware/usageQuota")
|
||||
import Router from "@koa/router"
|
||||
import * as controller from "../controllers/application"
|
||||
import authorized from "../../middleware/authorized"
|
||||
import { BUILDER } from "@budibase/backend-core/permissions"
|
||||
|
||||
const router = Router()
|
||||
const router = new Router()
|
||||
|
||||
router
|
||||
.post("/api/applications/:appId/sync", authorized(BUILDER), controller.sync)
|
||||
.post("/api/applications", authorized(BUILDER), usage, controller.create)
|
||||
.post("/api/applications", authorized(BUILDER), controller.create)
|
||||
.get("/api/applications/:appId/definition", controller.fetchAppDefinition)
|
||||
.get("/api/applications", controller.fetch)
|
||||
.get("/api/applications/:appId/appPackage", controller.fetchAppPackage)
|
||||
|
@ -23,11 +22,6 @@ router
|
|||
authorized(BUILDER),
|
||||
controller.revertClient
|
||||
)
|
||||
.delete(
|
||||
"/api/applications/:appId",
|
||||
authorized(BUILDER),
|
||||
usage,
|
||||
controller.delete
|
||||
)
|
||||
.delete("/api/applications/:appId", authorized(BUILDER), controller.destroy)
|
||||
|
||||
module.exports = router
|
||||
export default router
|
|
@ -1,62 +0,0 @@
|
|||
const authRoutes = require("./auth")
|
||||
const layoutRoutes = require("./layout")
|
||||
const screenRoutes = require("./screen")
|
||||
const userRoutes = require("./user")
|
||||
const applicationRoutes = require("./application")
|
||||
const tableRoutes = require("./table")
|
||||
const rowRoutes = require("./row")
|
||||
const viewRoutes = require("./view")
|
||||
const staticRoutes = require("./static")
|
||||
const componentRoutes = require("./component")
|
||||
const automationRoutes = require("./automation")
|
||||
const webhookRoutes = require("./webhook")
|
||||
const roleRoutes = require("./role")
|
||||
const deployRoutes = require("./deploy")
|
||||
const apiKeysRoutes = require("./apikeys")
|
||||
const templatesRoutes = require("./templates")
|
||||
const analyticsRoutes = require("./analytics")
|
||||
const routingRoutes = require("./routing")
|
||||
const integrationRoutes = require("./integration")
|
||||
const permissionRoutes = require("./permission")
|
||||
const datasourceRoutes = require("./datasource")
|
||||
const queryRoutes = require("./query")
|
||||
const backupRoutes = require("./backup")
|
||||
const metadataRoutes = require("./metadata")
|
||||
const devRoutes = require("./dev")
|
||||
const cloudRoutes = require("./cloud")
|
||||
const migrationRoutes = require("./migrations")
|
||||
const publicRoutes = require("./public")
|
||||
|
||||
exports.mainRoutes = [
|
||||
authRoutes,
|
||||
deployRoutes,
|
||||
layoutRoutes,
|
||||
screenRoutes,
|
||||
userRoutes,
|
||||
applicationRoutes,
|
||||
automationRoutes,
|
||||
viewRoutes,
|
||||
componentRoutes,
|
||||
roleRoutes,
|
||||
apiKeysRoutes,
|
||||
templatesRoutes,
|
||||
analyticsRoutes,
|
||||
webhookRoutes,
|
||||
routingRoutes,
|
||||
integrationRoutes,
|
||||
permissionRoutes,
|
||||
datasourceRoutes,
|
||||
queryRoutes,
|
||||
backupRoutes,
|
||||
metadataRoutes,
|
||||
devRoutes,
|
||||
cloudRoutes,
|
||||
// these need to be handled last as they still use /api/:tableId
|
||||
// this could be breaking as koa may recognise other routes as this
|
||||
tableRoutes,
|
||||
rowRoutes,
|
||||
migrationRoutes,
|
||||
]
|
||||
|
||||
exports.publicRoutes = publicRoutes
|
||||
exports.staticRoutes = staticRoutes
|
|
@ -0,0 +1,60 @@
|
|||
import authRoutes from "./auth"
|
||||
import layoutRoutes from "./layout"
|
||||
import screenRoutes from "./screen"
|
||||
import userRoutes from "./user"
|
||||
import applicationRoutes from "./application"
|
||||
import tableRoutes from "./table"
|
||||
import rowRoutes from "./row"
|
||||
import viewRoutes from "./view"
|
||||
import componentRoutes from "./component"
|
||||
import automationRoutes from "./automation"
|
||||
import webhookRoutes from "./webhook"
|
||||
import roleRoutes from "./role"
|
||||
import deployRoutes from "./deploy"
|
||||
import apiKeysRoutes from "./apikeys"
|
||||
import templatesRoutes from "./templates"
|
||||
import analyticsRoutes from "./analytics"
|
||||
import routingRoutes from "./routing"
|
||||
import integrationRoutes from "./integration"
|
||||
import permissionRoutes from "./permission"
|
||||
import datasourceRoutes from "./datasource"
|
||||
import queryRoutes from "./query"
|
||||
import backupRoutes from "./backup"
|
||||
import metadataRoutes from "./metadata"
|
||||
import devRoutes from "./dev"
|
||||
import cloudRoutes from "./cloud"
|
||||
import migrationRoutes from "./migrations"
|
||||
|
||||
export { default as staticRoutes } from "./static"
|
||||
export { default as publicRoutes } from "./public"
|
||||
|
||||
export const mainRoutes = [
|
||||
authRoutes,
|
||||
deployRoutes,
|
||||
layoutRoutes,
|
||||
screenRoutes,
|
||||
userRoutes,
|
||||
applicationRoutes,
|
||||
automationRoutes,
|
||||
viewRoutes,
|
||||
componentRoutes,
|
||||
roleRoutes,
|
||||
apiKeysRoutes,
|
||||
templatesRoutes,
|
||||
analyticsRoutes,
|
||||
webhookRoutes,
|
||||
routingRoutes,
|
||||
integrationRoutes,
|
||||
permissionRoutes,
|
||||
datasourceRoutes,
|
||||
queryRoutes,
|
||||
backupRoutes,
|
||||
metadataRoutes,
|
||||
devRoutes,
|
||||
cloudRoutes,
|
||||
// these need to be handled last as they still use /api/:tableId
|
||||
// this could be breaking as koa may recognise other routes as this
|
||||
tableRoutes,
|
||||
rowRoutes,
|
||||
migrationRoutes,
|
||||
]
|
|
@ -3,7 +3,6 @@ import queryEndpoints from "./queries"
|
|||
import tableEndpoints from "./tables"
|
||||
import rowEndpoints from "./rows"
|
||||
import userEndpoints from "./users"
|
||||
import usage from "../../../middleware/usageQuota"
|
||||
import authorized from "../../../middleware/authorized"
|
||||
import publicApi from "../../../middleware/publicApi"
|
||||
import { paramResource, paramSubResource } from "../../../middleware/resourceId"
|
||||
|
@ -114,8 +113,6 @@ function applyRoutes(
|
|||
// add the authorization middleware, using the correct perm type
|
||||
addMiddleware(endpoints.read, authorized(permType, PermissionLevels.READ))
|
||||
addMiddleware(endpoints.write, authorized(permType, PermissionLevels.WRITE))
|
||||
// add the usage quota middleware
|
||||
addMiddleware(endpoints.write, usage)
|
||||
// add the output mapper middleware
|
||||
addMiddleware(endpoints.read, mapperMiddleware, { output: true })
|
||||
addMiddleware(endpoints.write, mapperMiddleware, { output: true })
|
||||
|
@ -130,4 +127,4 @@ applyRoutes(queryEndpoints, PermissionTypes.QUERY, "queryId")
|
|||
// needs to be applied last for routing purposes, don't override other endpoints
|
||||
applyRoutes(rowEndpoints, PermissionTypes.TABLE, "tableId", "rowId")
|
||||
|
||||
module.exports = publicRouter
|
||||
export default publicRouter
|
||||
|
|
|
@ -1,18 +1,14 @@
|
|||
const Router = require("@koa/router")
|
||||
const rowController = require("../controllers/row")
|
||||
const authorized = require("../../middleware/authorized")
|
||||
const usage = require("../../middleware/usageQuota")
|
||||
const {
|
||||
paramResource,
|
||||
paramSubResource,
|
||||
} = require("../../middleware/resourceId")
|
||||
import Router from "@koa/router"
|
||||
import * as rowController from "../controllers/row"
|
||||
import authorized from "../../middleware/authorized"
|
||||
import { paramResource, paramSubResource } from "../../middleware/resourceId"
|
||||
const {
|
||||
PermissionLevels,
|
||||
PermissionTypes,
|
||||
} = require("@budibase/backend-core/permissions")
|
||||
const { internalSearchValidator } = require("./utils/validators")
|
||||
|
||||
const router = Router()
|
||||
const router = new Router()
|
||||
|
||||
router
|
||||
/**
|
||||
|
@ -180,7 +176,6 @@ router
|
|||
"/api/:tableId/rows",
|
||||
paramResource("tableId"),
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||
usage,
|
||||
rowController.save
|
||||
)
|
||||
/**
|
||||
|
@ -195,7 +190,6 @@ router
|
|||
"/api/:tableId/rows",
|
||||
paramResource("tableId"),
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||
usage,
|
||||
rowController.patch
|
||||
)
|
||||
/**
|
||||
|
@ -248,7 +242,6 @@ router
|
|||
"/api/:tableId/rows",
|
||||
paramResource("tableId"),
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||
usage,
|
||||
rowController.destroy
|
||||
)
|
||||
|
||||
|
@ -269,8 +262,7 @@ router
|
|||
"/api/:tableId/rows/exportRows",
|
||||
paramResource("tableId"),
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||
usage,
|
||||
rowController.export
|
||||
rowController.exportRows
|
||||
)
|
||||
|
||||
module.exports = router
|
||||
export default router
|
|
@ -1,19 +1,19 @@
|
|||
const Router = require("@koa/router")
|
||||
const controller = require("../controllers/static")
|
||||
const { budibaseTempDir } = require("../../utilities/budibaseDir")
|
||||
const authorized = require("../../middleware/authorized")
|
||||
const {
|
||||
import Router from "@koa/router"
|
||||
import * as controller from "../controllers/static"
|
||||
import { budibaseTempDir } from "../../utilities/budibaseDir"
|
||||
import authorized from "../../middleware/authorized"
|
||||
import {
|
||||
BUILDER,
|
||||
PermissionTypes,
|
||||
PermissionLevels,
|
||||
} = require("@budibase/backend-core/permissions")
|
||||
const env = require("../../environment")
|
||||
const { paramResource } = require("../../middleware/resourceId")
|
||||
} from "@budibase/backend-core/permissions"
|
||||
import * as env from "../../environment"
|
||||
import { paramResource } from "../../middleware/resourceId"
|
||||
|
||||
const router = Router()
|
||||
const router = new Router()
|
||||
|
||||
/* istanbul ignore next */
|
||||
router.param("file", async (file, ctx, next) => {
|
||||
router.param("file", async (file: any, ctx: any, next: any) => {
|
||||
ctx.file = file && file.includes(".") ? file : "index.html"
|
||||
if (!ctx.file.startsWith("budibase-client")) {
|
||||
return next()
|
||||
|
@ -52,4 +52,4 @@ router
|
|||
controller.getSignedUploadURL
|
||||
)
|
||||
|
||||
module.exports = router
|
||||
export default router
|
|
@ -1,31 +1,38 @@
|
|||
const rowController = require("../../../controllers/row")
|
||||
const appController = require("../../../controllers/application")
|
||||
const { AppStatus } = require("../../../../db/utils")
|
||||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
||||
const { TENANT_ID } = require("../../../../tests/utilities/structures")
|
||||
const { getAppDB, doInAppContext } = require("@budibase/backend-core/context")
|
||||
const env = require("../../../../environment")
|
||||
import * as rowController from "../../../controllers/row"
|
||||
import * as appController from "../../../controllers/application"
|
||||
import { AppStatus } from "../../../../db/utils"
|
||||
import { BUILTIN_ROLE_IDS } from "@budibase/backend-core/roles"
|
||||
import { TENANT_ID } from "../../../../tests/utilities/structures"
|
||||
import { getAppDB, doInAppContext } from "@budibase/backend-core/context"
|
||||
import * as env from "../../../../environment"
|
||||
|
||||
function Request(appId, params) {
|
||||
this.appId = appId
|
||||
this.params = params
|
||||
this.request = {}
|
||||
class Request {
|
||||
appId: any
|
||||
params: any
|
||||
request: any
|
||||
body: any
|
||||
|
||||
constructor(appId: any, params: any) {
|
||||
this.appId = appId
|
||||
this.params = params
|
||||
this.request = {}
|
||||
}
|
||||
}
|
||||
|
||||
function runRequest(appId, controlFunc, request) {
|
||||
function runRequest(appId: any, controlFunc: any, request?: any) {
|
||||
return doInAppContext(appId, async () => {
|
||||
return controlFunc(request)
|
||||
})
|
||||
}
|
||||
|
||||
exports.getAllTableRows = async config => {
|
||||
export const getAllTableRows = async (config: any) => {
|
||||
const req = new Request(config.appId, { tableId: config.table._id })
|
||||
await runRequest(config.appId, rowController.fetch, req)
|
||||
return req.body
|
||||
}
|
||||
|
||||
exports.clearAllApps = async (tenantId = TENANT_ID) => {
|
||||
const req = { query: { status: AppStatus.DEV }, user: { tenantId } }
|
||||
export const clearAllApps = async (tenantId = TENANT_ID) => {
|
||||
const req: any = { query: { status: AppStatus.DEV }, user: { tenantId } }
|
||||
await appController.fetch(req)
|
||||
const apps = req.body
|
||||
if (!apps || apps.length <= 0) {
|
||||
|
@ -34,11 +41,11 @@ exports.clearAllApps = async (tenantId = TENANT_ID) => {
|
|||
for (let app of apps) {
|
||||
const { appId } = app
|
||||
const req = new Request(null, { appId })
|
||||
await runRequest(appId, appController.delete, req)
|
||||
await runRequest(appId, appController.destroy, req)
|
||||
}
|
||||
}
|
||||
|
||||
exports.clearAllAutomations = async config => {
|
||||
export const clearAllAutomations = async (config: any) => {
|
||||
const automations = await config.getAllAutomations()
|
||||
for (let auto of automations) {
|
||||
await doInAppContext(config.appId, async () => {
|
||||
|
@ -47,7 +54,12 @@ exports.clearAllAutomations = async config => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.createRequest = (request, method, url, body) => {
|
||||
export const createRequest = (
|
||||
request: any,
|
||||
method: any,
|
||||
url: any,
|
||||
body: any
|
||||
) => {
|
||||
let req
|
||||
|
||||
if (method === "POST") req = request.post(url).send(body)
|
||||
|
@ -59,7 +71,12 @@ exports.createRequest = (request, method, url, body) => {
|
|||
return req
|
||||
}
|
||||
|
||||
exports.checkBuilderEndpoint = async ({ config, method, url, body }) => {
|
||||
export const checkBuilderEndpoint = async ({
|
||||
config,
|
||||
method,
|
||||
url,
|
||||
body,
|
||||
}: any) => {
|
||||
const headers = await config.login({
|
||||
userId: "us_fail",
|
||||
builder: false,
|
||||
|
@ -71,14 +88,14 @@ exports.checkBuilderEndpoint = async ({ config, method, url, body }) => {
|
|||
.expect(403)
|
||||
}
|
||||
|
||||
exports.checkPermissionsEndpoint = async ({
|
||||
export const checkPermissionsEndpoint = async ({
|
||||
config,
|
||||
method,
|
||||
url,
|
||||
body,
|
||||
passRole,
|
||||
failRole,
|
||||
}) => {
|
||||
}: any) => {
|
||||
const passHeader = await config.login({
|
||||
roleId: passRole,
|
||||
prodApp: true,
|
||||
|
@ -106,11 +123,11 @@ exports.checkPermissionsEndpoint = async ({
|
|||
.expect(403)
|
||||
}
|
||||
|
||||
exports.getDB = () => {
|
||||
export const getDB = () => {
|
||||
return getAppDB()
|
||||
}
|
||||
|
||||
exports.testAutomation = async (config, automation) => {
|
||||
export const testAutomation = async (config: any, automation: any) => {
|
||||
return runRequest(automation.appId, async () => {
|
||||
return await config.request
|
||||
.post(`/api/automations/${automation._id}/test`)
|
||||
|
@ -126,7 +143,7 @@ exports.testAutomation = async (config, automation) => {
|
|||
})
|
||||
}
|
||||
|
||||
exports.runInProd = async func => {
|
||||
export const runInProd = async (func: any) => {
|
||||
const nodeEnv = env.NODE_ENV
|
||||
const workerId = env.JEST_WORKER_ID
|
||||
env._set("NODE_ENV", "PRODUCTION")
|
|
@ -1,6 +1,5 @@
|
|||
// need to load environment first
|
||||
import { ExtendableContext } from "koa"
|
||||
|
||||
import * as env from "./environment"
|
||||
const CouchDB = require("./db")
|
||||
require("@budibase/backend-core").init(CouchDB)
|
||||
|
@ -15,7 +14,7 @@ const automations = require("./automations/index")
|
|||
const Sentry = require("@sentry/node")
|
||||
const fileSystem = require("./utilities/fileSystem")
|
||||
const bullboard = require("./automations/bullboard")
|
||||
const redis = require("./utilities/redis")
|
||||
import redis from "./utilities/redis"
|
||||
import * as migrations from "./migrations"
|
||||
|
||||
const app = new Koa()
|
||||
|
|
|
@ -13,6 +13,7 @@ const integromat = require("./steps/integromat")
|
|||
let filter = require("./steps/filter")
|
||||
let delay = require("./steps/delay")
|
||||
let queryRow = require("./steps/queryRows")
|
||||
let loop = require("./steps/loop")
|
||||
const env = require("../environment")
|
||||
|
||||
const ACTION_IMPLS = {
|
||||
|
@ -27,6 +28,7 @@ const ACTION_IMPLS = {
|
|||
DELAY: delay.run,
|
||||
FILTER: filter.run,
|
||||
QUERY_ROWS: queryRow.run,
|
||||
LOOP: loop.run,
|
||||
// these used to be lowercase step IDs, maintain for backwards compat
|
||||
discord: discord.run,
|
||||
slack: slack.run,
|
||||
|
@ -45,6 +47,7 @@ const ACTION_DEFINITIONS = {
|
|||
DELAY: delay.definition,
|
||||
FILTER: filter.definition,
|
||||
QUERY_ROWS: queryRow.definition,
|
||||
LOOP: loop.definition,
|
||||
// these used to be lowercase step IDs, maintain for backwards compat
|
||||
discord: discord.definition,
|
||||
slack: slack.definition,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
const { getTable } = require("../api/controllers/table/utils")
|
||||
const { findHBSBlocks } = require("@budibase/string-templates")
|
||||
|
||||
/**
|
||||
* When values are input to the system generally they will be of type string as this is required for template strings.
|
||||
|
@ -74,3 +75,14 @@ exports.getError = err => {
|
|||
}
|
||||
return typeof err !== "string" ? err.toString() : err
|
||||
}
|
||||
|
||||
exports.substituteLoopStep = (hbsString, substitute) => {
|
||||
let blocks = findHBSBlocks(hbsString)
|
||||
for (let block of blocks) {
|
||||
let oldBlock = block
|
||||
block = block.replace(/loop/, substitute)
|
||||
hbsString = hbsString.replace(new RegExp(oldBlock, "g"), block)
|
||||
}
|
||||
|
||||
return hbsString
|
||||
}
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
const rowController = require("../../api/controllers/row")
|
||||
const automationUtils = require("../automationUtils")
|
||||
const usage = require("../../utilities/usageQuota")
|
||||
const { buildCtx } = require("./utils")
|
||||
import { save } from "../../api/controllers/row"
|
||||
import { cleanUpRow, getError } from "../automationUtils"
|
||||
import { buildCtx } from "./utils"
|
||||
|
||||
exports.definition = {
|
||||
export const definition = {
|
||||
name: "Create Row",
|
||||
tagline: "Create a {{inputs.enriched.table.name}} row",
|
||||
icon: "TableRowAddBottom",
|
||||
|
@ -59,7 +58,7 @@ exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
exports.run = async function ({ inputs, appId, emitter }) {
|
||||
export async function run({ inputs, appId, emitter }: any) {
|
||||
if (inputs.row == null || inputs.row.tableId == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
@ -69,7 +68,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
|||
}
|
||||
}
|
||||
// have to clean up the row, remove the table from it
|
||||
const ctx = buildCtx(appId, emitter, {
|
||||
const ctx: any = buildCtx(appId, emitter, {
|
||||
body: inputs.row,
|
||||
params: {
|
||||
tableId: inputs.row.tableId,
|
||||
|
@ -77,13 +76,8 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
|||
})
|
||||
|
||||
try {
|
||||
inputs.row = await automationUtils.cleanUpRow(
|
||||
inputs.row.tableId,
|
||||
inputs.row
|
||||
)
|
||||
await usage.update(usage.Properties.ROW, 1, { dryRun: true })
|
||||
await rowController.save(ctx)
|
||||
await usage.update(usage.Properties.ROW, 1)
|
||||
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
|
||||
await save(ctx)
|
||||
return {
|
||||
row: inputs.row,
|
||||
response: ctx.body,
|
||||
|
@ -94,7 +88,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
|||
} catch (err) {
|
||||
return {
|
||||
success: false,
|
||||
response: automationUtils.getError(err),
|
||||
response: getError(err),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,9 +1,8 @@
|
|||
const rowController = require("../../api/controllers/row")
|
||||
const usage = require("../../utilities/usageQuota")
|
||||
const { buildCtx } = require("./utils")
|
||||
const automationUtils = require("../automationUtils")
|
||||
import { destroy } from "../../api/controllers/row"
|
||||
import { buildCtx } from "./utils"
|
||||
import { getError } from "../automationUtils"
|
||||
|
||||
exports.definition = {
|
||||
export const definition = {
|
||||
description: "Delete a row from your database",
|
||||
icon: "TableRowRemoveCenter",
|
||||
name: "Delete Row",
|
||||
|
@ -24,12 +23,8 @@ exports.definition = {
|
|||
type: "string",
|
||||
title: "Row ID",
|
||||
},
|
||||
revision: {
|
||||
type: "string",
|
||||
title: "Row Revision",
|
||||
},
|
||||
},
|
||||
required: ["tableId", "id", "revision"],
|
||||
required: ["tableId", "id"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
|
@ -52,8 +47,8 @@ exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
exports.run = async function ({ inputs, appId, emitter }) {
|
||||
if (inputs.id == null || inputs.revision == null) {
|
||||
export async function run({ inputs, appId, emitter }: any) {
|
||||
if (inputs.id == null) {
|
||||
return {
|
||||
success: false,
|
||||
response: {
|
||||
|
@ -62,7 +57,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
|||
}
|
||||
}
|
||||
|
||||
let ctx = buildCtx(appId, emitter, {
|
||||
let ctx: any = buildCtx(appId, emitter, {
|
||||
body: {
|
||||
_id: inputs.id,
|
||||
_rev: inputs.revision,
|
||||
|
@ -73,8 +68,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
|||
})
|
||||
|
||||
try {
|
||||
await usage.update(usage.Properties.ROW, -1)
|
||||
await rowController.destroy(ctx)
|
||||
await destroy(ctx)
|
||||
return {
|
||||
response: ctx.body,
|
||||
row: ctx.row,
|
||||
|
@ -83,7 +77,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
|||
} catch (err) {
|
||||
return {
|
||||
success: false,
|
||||
response: automationUtils.getError(err),
|
||||
response: getError(err),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
exports.definition = {
|
||||
name: "Looping",
|
||||
icon: "Reuse",
|
||||
tagline: "Loop the block",
|
||||
description: "Loop",
|
||||
stepId: "LOOP",
|
||||
internal: true,
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
option: {
|
||||
customType: "loopOption",
|
||||
title: "Input type",
|
||||
},
|
||||
binding: {
|
||||
type: "string",
|
||||
title: "Binding / Value",
|
||||
},
|
||||
iterations: {
|
||||
type: "number",
|
||||
title: "Max loop iterations",
|
||||
},
|
||||
failure: {
|
||||
type: "string",
|
||||
title: "Failure Condition",
|
||||
},
|
||||
},
|
||||
required: ["type", "value", "iterations", "failure"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
items: {
|
||||
customType: "item",
|
||||
description: "The item currently being executed",
|
||||
},
|
||||
success: {
|
||||
type: "boolean",
|
||||
description: "Whether the message loop was successfully",
|
||||
},
|
||||
iterations: {
|
||||
type: "number",
|
||||
descriptions: "The amount of times the block ran",
|
||||
},
|
||||
},
|
||||
required: ["success", "items", "iterations"],
|
||||
},
|
||||
},
|
||||
type: "LOGIC",
|
||||
}
|
|
@ -1,4 +1,3 @@
|
|||
jest.mock("../../utilities/usageQuota")
|
||||
jest.mock("../../threads/automation")
|
||||
jest.mock("../../utilities/redis", () => ({
|
||||
init: jest.fn(),
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
jest.mock("../../utilities/usageQuota")
|
||||
|
||||
const usageQuota = require("../../utilities/usageQuota")
|
||||
const setup = require("./utilities")
|
||||
import * as setup from "./utilities"
|
||||
|
||||
describe("test the create row action", () => {
|
||||
let table, row
|
||||
let table: any
|
||||
let row: any
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeEach(async () => {
|
||||
|
@ -36,20 +34,11 @@ describe("test the create row action", () => {
|
|||
row: {
|
||||
tableId: "invalid",
|
||||
invalid: "invalid",
|
||||
}
|
||||
},
|
||||
})
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("check usage quota attempts", async () => {
|
||||
await setup.runInProd(async () => {
|
||||
await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||
row
|
||||
})
|
||||
expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
|
||||
})
|
||||
})
|
||||
|
||||
it("should check invalid inputs return an error", async () => {
|
||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {})
|
||||
expect(res.success).toEqual(false)
|
|
@ -1,10 +1,9 @@
|
|||
jest.mock("../../utilities/usageQuota")
|
||||
|
||||
const usageQuota = require("../../utilities/usageQuota")
|
||||
const setup = require("./utilities")
|
||||
|
||||
describe("test the delete row action", () => {
|
||||
let table, row, inputs
|
||||
let table: any
|
||||
let row: any
|
||||
let inputs: any
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeEach(async () => {
|
||||
|
@ -37,7 +36,6 @@ describe("test the delete row action", () => {
|
|||
it("check usage quota attempts", async () => {
|
||||
await setup.runInProd(async () => {
|
||||
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
|
||||
expect(usageQuota.update).toHaveBeenCalledWith("rows", -1)
|
||||
})
|
||||
})
|
||||
|
|
@ -18,7 +18,6 @@ exports.afterAll = () => {
|
|||
|
||||
exports.runInProd = async fn => {
|
||||
env._set("NODE_ENV", "production")
|
||||
env._set("USE_QUOTAS", 1)
|
||||
let error
|
||||
try {
|
||||
await fn()
|
||||
|
@ -26,7 +25,6 @@ exports.runInProd = async fn => {
|
|||
error = err
|
||||
}
|
||||
env._set("NODE_ENV", "jest")
|
||||
env._set("USE_QUOTAS", null)
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
|
|
|
@ -1,26 +1,32 @@
|
|||
const { Thread, ThreadType } = require("../threads")
|
||||
const { definitions } = require("./triggerInfo")
|
||||
const webhooks = require("../api/controllers/webhook")
|
||||
const CouchDB = require("../db")
|
||||
const { queue } = require("./bullboard")
|
||||
const newid = require("../db/newid")
|
||||
const { updateEntityMetadata } = require("../utilities")
|
||||
const { MetadataTypes, WebhookType } = require("../constants")
|
||||
const { getProdAppID } = require("@budibase/backend-core/db")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { getAppDB, getAppId } = require("@budibase/backend-core/context")
|
||||
import { Thread, ThreadType } from "../threads"
|
||||
import { definitions } from "./triggerInfo"
|
||||
import * as webhooks from "../api/controllers/webhook"
|
||||
import CouchDB from "../db"
|
||||
import { queue } from "./bullboard"
|
||||
import newid from "../db/newid"
|
||||
import { updateEntityMetadata } from "../utilities"
|
||||
import { MetadataTypes, WebhookType } from "../constants"
|
||||
import { getProdAppID } from "@budibase/backend-core/db"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { getAppDB, getAppId } from "@budibase/backend-core/context"
|
||||
import { tenancy } from "@budibase/backend-core"
|
||||
import { quotas } from "@budibase/pro"
|
||||
|
||||
const WH_STEP_ID = definitions.WEBHOOK.stepId
|
||||
const CRON_STEP_ID = definitions.CRON.stepId
|
||||
const Runner = new Thread(ThreadType.AUTOMATION)
|
||||
|
||||
exports.processEvent = async job => {
|
||||
export async function processEvent(job: any) {
|
||||
try {
|
||||
// need to actually await these so that an error can be captured properly
|
||||
console.log(
|
||||
`${job.data.automation.appId} automation ${job.data.automation._id} running`
|
||||
)
|
||||
return await Runner.run(job)
|
||||
// need to actually await these so that an error can be captured properly
|
||||
const tenantId = tenancy.getTenantIDFromAppID(job.data.event.appId)
|
||||
return await tenancy.doInTenant(tenantId, async () => {
|
||||
const runFn = () => Runner.run(job)
|
||||
return quotas.addAutomation(runFn)
|
||||
})
|
||||
} catch (err) {
|
||||
const errJson = JSON.stringify(err)
|
||||
console.error(
|
||||
|
@ -31,11 +37,15 @@ exports.processEvent = async job => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.updateTestHistory = async (appId, automation, history) => {
|
||||
export async function updateTestHistory(
|
||||
appId: any,
|
||||
automation: any,
|
||||
history: any
|
||||
) {
|
||||
return updateEntityMetadata(
|
||||
MetadataTypes.AUTOMATION_TEST_HISTORY,
|
||||
automation._id,
|
||||
metadata => {
|
||||
(metadata: any) => {
|
||||
if (metadata && Array.isArray(metadata.history)) {
|
||||
metadata.history.push(history)
|
||||
} else {
|
||||
|
@ -48,7 +58,7 @@ exports.updateTestHistory = async (appId, automation, history) => {
|
|||
)
|
||||
}
|
||||
|
||||
exports.removeDeprecated = definitions => {
|
||||
export function removeDeprecated(definitions: any) {
|
||||
const base = cloneDeep(definitions)
|
||||
for (let key of Object.keys(base)) {
|
||||
if (base[key].deprecated) {
|
||||
|
@ -59,13 +69,15 @@ exports.removeDeprecated = definitions => {
|
|||
}
|
||||
|
||||
// end the repetition and the job itself
|
||||
exports.disableAllCrons = async appId => {
|
||||
export async function disableAllCrons(appId: any) {
|
||||
const promises = []
|
||||
const jobs = await queue.getRepeatableJobs()
|
||||
for (let job of jobs) {
|
||||
if (job.key.includes(`${appId}_cron`)) {
|
||||
promises.push(queue.removeRepeatableByKey(job.key))
|
||||
promises.push(queue.removeJobs(job.id))
|
||||
if (job.id) {
|
||||
promises.push(queue.removeJobs(job.id))
|
||||
}
|
||||
}
|
||||
}
|
||||
return Promise.all(promises)
|
||||
|
@ -76,9 +88,9 @@ exports.disableAllCrons = async appId => {
|
|||
* @param {string} appId The ID of the app in which we are checking for webhooks
|
||||
* @param {object|undefined} automation The automation object to be updated.
|
||||
*/
|
||||
exports.enableCronTrigger = async (appId, automation) => {
|
||||
export async function enableCronTrigger(appId: any, automation: any) {
|
||||
const trigger = automation ? automation.definition.trigger : null
|
||||
function isCronTrigger(auto) {
|
||||
function isCronTrigger(auto: any) {
|
||||
return (
|
||||
auto &&
|
||||
auto.definition.trigger &&
|
||||
|
@ -89,7 +101,7 @@ exports.enableCronTrigger = async (appId, automation) => {
|
|||
if (isCronTrigger(automation)) {
|
||||
// make a job id rather than letting Bull decide, makes it easier to handle on way out
|
||||
const jobId = `${appId}_cron_${newid()}`
|
||||
const job = await queue.add(
|
||||
const job: any = await queue.add(
|
||||
{
|
||||
automation,
|
||||
event: { appId, timestamp: Date.now() },
|
||||
|
@ -117,13 +129,13 @@ exports.enableCronTrigger = async (appId, automation) => {
|
|||
* @returns {Promise<object|undefined>} After this is complete the new automation object may have been updated and should be
|
||||
* written to DB (this does not write to DB as it would be wasteful to repeat).
|
||||
*/
|
||||
exports.checkForWebhooks = async ({ oldAuto, newAuto }) => {
|
||||
export async function checkForWebhooks({ oldAuto, newAuto }: any) {
|
||||
const appId = getAppId()
|
||||
const oldTrigger = oldAuto ? oldAuto.definition.trigger : null
|
||||
const newTrigger = newAuto ? newAuto.definition.trigger : null
|
||||
const triggerChanged =
|
||||
oldTrigger && newTrigger && oldTrigger.id !== newTrigger.id
|
||||
function isWebhookTrigger(auto) {
|
||||
function isWebhookTrigger(auto: any) {
|
||||
return (
|
||||
auto &&
|
||||
auto.definition.trigger &&
|
||||
|
@ -159,7 +171,7 @@ exports.checkForWebhooks = async ({ oldAuto, newAuto }) => {
|
|||
(!isWebhookTrigger(oldAuto) || triggerChanged) &&
|
||||
isWebhookTrigger(newAuto)
|
||||
) {
|
||||
const ctx = {
|
||||
const ctx: any = {
|
||||
appId,
|
||||
request: {
|
||||
body: new webhooks.Webhook(
|
||||
|
@ -189,6 +201,6 @@ exports.checkForWebhooks = async ({ oldAuto, newAuto }) => {
|
|||
* @param appId {string} the app that is being removed.
|
||||
* @return {Promise<void>} clean is complete if this succeeds.
|
||||
*/
|
||||
exports.cleanupAutomations = async appId => {
|
||||
await exports.disableAllCrons(appId)
|
||||
export async function cleanupAutomations(appId: any) {
|
||||
await disableAllCrons(appId)
|
||||
}
|
|
@ -190,5 +190,11 @@ exports.WebhookType = {
|
|||
AUTOMATION: "automation",
|
||||
}
|
||||
|
||||
exports.AutomationErrors = {
|
||||
INCORRECT_TYPE: "INCORRECT_TYPE",
|
||||
MAX_ITERATIONS: "MAX_ITERATIONS_REACHED",
|
||||
FAILURE_CONDITION: "FAILURE_CONDITION_MET",
|
||||
}
|
||||
|
||||
// pass through the list from the auth/core lib
|
||||
exports.ObjectStoreBuckets = ObjectStoreBuckets
|
||||
|
|
|
@ -38,8 +38,6 @@ module.exports = {
|
|||
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||
USE_QUOTAS: process.env.USE_QUOTAS,
|
||||
EXCLUDE_QUOTAS_TENANTS: process.env.EXCLUDE_QUOTAS_TENANTS,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
|
||||
|
@ -59,6 +57,7 @@ module.exports = {
|
|||
LOG_LEVEL: process.env.LOG_LEVEL,
|
||||
AUTOMATION_DIRECTORY: process.env.AUTOMATION_DIRECTORY,
|
||||
AUTOMATION_BUCKET: process.env.AUTOMATION_BUCKET,
|
||||
AUTOMATION_MAX_ITERATIONS: process.env.AUTOMATION_MAX_ITERATIONS,
|
||||
SENDGRID_API_KEY: process.env.SENDGRID_API_KEY,
|
||||
DYNAMO_ENDPOINT: process.env.DYNAMO_ENDPOINT,
|
||||
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
const {
|
||||
import {
|
||||
getUserRoleHierarchy,
|
||||
getRequiredResourceRole,
|
||||
BUILTIN_ROLE_IDS,
|
||||
} = require("@budibase/backend-core/roles")
|
||||
} from "@budibase/backend-core/roles"
|
||||
const {
|
||||
PermissionTypes,
|
||||
PermissionLevels,
|
||||
|
@ -13,7 +13,7 @@ const { isWebhookEndpoint } = require("./utils")
|
|||
const { buildCsrfMiddleware } = require("@budibase/backend-core/auth")
|
||||
const { getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
function hasResource(ctx) {
|
||||
function hasResource(ctx: any) {
|
||||
return ctx.resourceId != null
|
||||
}
|
||||
|
||||
|
@ -25,7 +25,12 @@ const csrf = buildCsrfMiddleware()
|
|||
* - Builders can access all resources.
|
||||
* - Otherwise the user must have the required role.
|
||||
*/
|
||||
const checkAuthorized = async (ctx, resourceRoles, permType, permLevel) => {
|
||||
const checkAuthorized = async (
|
||||
ctx: any,
|
||||
resourceRoles: any,
|
||||
permType: any,
|
||||
permLevel: any
|
||||
) => {
|
||||
// check if this is a builder api and the user is not a builder
|
||||
const isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global
|
||||
const isBuilderApi = permType === PermissionTypes.BUILDER
|
||||
|
@ -40,10 +45,10 @@ const checkAuthorized = async (ctx, resourceRoles, permType, permLevel) => {
|
|||
}
|
||||
|
||||
const checkAuthorizedResource = async (
|
||||
ctx,
|
||||
resourceRoles,
|
||||
permType,
|
||||
permLevel
|
||||
ctx: any,
|
||||
resourceRoles: any,
|
||||
permType: any,
|
||||
permLevel: any
|
||||
) => {
|
||||
// get the user's roles
|
||||
const roleId = ctx.roleId || BUILTIN_ROLE_IDS.PUBLIC
|
||||
|
@ -54,7 +59,9 @@ const checkAuthorizedResource = async (
|
|||
// check if the user has the required role
|
||||
if (resourceRoles.length > 0) {
|
||||
// deny access if the user doesn't have the required resource role
|
||||
const found = userRoles.find(role => resourceRoles.indexOf(role._id) !== -1)
|
||||
const found = userRoles.find(
|
||||
(role: any) => resourceRoles.indexOf(role._id) !== -1
|
||||
)
|
||||
if (!found) {
|
||||
ctx.throw(403, permError)
|
||||
}
|
||||
|
@ -64,9 +71,8 @@ const checkAuthorizedResource = async (
|
|||
}
|
||||
}
|
||||
|
||||
module.exports =
|
||||
(permType, permLevel = null, opts = { schema: false }) =>
|
||||
async (ctx, next) => {
|
||||
export = (permType: any, permLevel: any = null, opts = { schema: false }) =>
|
||||
async (ctx: any, next: any) => {
|
||||
// webhooks don't need authentication, each webhook unique
|
||||
// also internal requests (between services) don't need authorized
|
||||
if (isWebhookEndpoint(ctx) || ctx.internal) {
|
||||
|
@ -82,8 +88,8 @@ module.exports =
|
|||
await builderMiddleware(ctx, permType)
|
||||
|
||||
// get the resource roles
|
||||
let resourceRoles = [],
|
||||
otherLevelRoles
|
||||
let resourceRoles: any = []
|
||||
let otherLevelRoles: any = []
|
||||
const otherLevel =
|
||||
permLevel === PermissionLevels.READ
|
||||
? PermissionLevels.WRITE
|
|
@ -1,134 +0,0 @@
|
|||
jest.mock("../../db")
|
||||
jest.mock("../../utilities/usageQuota")
|
||||
jest.mock("@budibase/backend-core/tenancy", () => ({
|
||||
getTenantId: () => "testing123"
|
||||
}))
|
||||
|
||||
const usageQuotaMiddleware = require("../usageQuota")
|
||||
const usageQuota = require("../../utilities/usageQuota")
|
||||
const CouchDB = require("../../db")
|
||||
const env = require("../../environment")
|
||||
|
||||
class TestConfiguration {
|
||||
constructor() {
|
||||
this.throw = jest.fn()
|
||||
this.next = jest.fn()
|
||||
this.middleware = usageQuotaMiddleware
|
||||
this.ctx = {
|
||||
throw: this.throw,
|
||||
next: this.next,
|
||||
appId: "test",
|
||||
request: {
|
||||
body: {}
|
||||
},
|
||||
req: {
|
||||
method: "POST",
|
||||
url: "/applications"
|
||||
}
|
||||
}
|
||||
usageQuota.useQuotas = () => true
|
||||
}
|
||||
|
||||
executeMiddleware() {
|
||||
return this.middleware(this.ctx, this.next)
|
||||
}
|
||||
|
||||
setProd(bool) {
|
||||
if (bool) {
|
||||
env.isDev = () => false
|
||||
env.isProd = () => true
|
||||
this.ctx.user = { tenantId: "test" }
|
||||
} else {
|
||||
env.isDev = () => true
|
||||
env.isProd = () => false
|
||||
}
|
||||
}
|
||||
|
||||
setMethod(method) {
|
||||
this.ctx.req.method = method
|
||||
}
|
||||
|
||||
setUrl(url) {
|
||||
this.ctx.req.url = url
|
||||
}
|
||||
|
||||
setBody(body) {
|
||||
this.ctx.request.body = body
|
||||
}
|
||||
|
||||
setFiles(files) {
|
||||
this.ctx.request.files = { file: files }
|
||||
}
|
||||
}
|
||||
|
||||
describe("usageQuota middleware", () => {
|
||||
let config
|
||||
|
||||
beforeEach(() => {
|
||||
config = new TestConfiguration()
|
||||
})
|
||||
|
||||
it("skips the middleware if there is no usage property or method", async () => {
|
||||
await config.executeMiddleware()
|
||||
expect(config.next).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it("passes through to next middleware if document already exists", async () => {
|
||||
config.setProd(true)
|
||||
config.setBody({
|
||||
_id: "test",
|
||||
_rev: "test",
|
||||
})
|
||||
|
||||
CouchDB.mockImplementationOnce(() => ({
|
||||
get: async () => true
|
||||
}))
|
||||
|
||||
await config.executeMiddleware()
|
||||
|
||||
expect(config.next).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it("throws if request has _id, but the document no longer exists", async () => {
|
||||
config.setBody({
|
||||
_id: "123",
|
||||
_rev: "test",
|
||||
})
|
||||
config.setProd(true)
|
||||
|
||||
CouchDB.mockImplementationOnce(() => ({
|
||||
get: async () => {
|
||||
throw new Error()
|
||||
}
|
||||
}))
|
||||
|
||||
await config.executeMiddleware()
|
||||
expect(config.throw).toHaveBeenCalledWith(404, `${config.ctx.request.body._id} does not exist`)
|
||||
})
|
||||
|
||||
it("calculates and persists the correct usage quota for the relevant action", async () => {
|
||||
config.setUrl("/rows")
|
||||
|
||||
await config.executeMiddleware()
|
||||
|
||||
expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
|
||||
expect(config.next).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
// it("calculates the correct file size from a file upload call and adds it to quota", async () => {
|
||||
// config.setUrl("/upload")
|
||||
// config.setProd(true)
|
||||
// config.setFiles([
|
||||
// {
|
||||
// size: 100
|
||||
// },
|
||||
// {
|
||||
// size: 10000
|
||||
// },
|
||||
// ])
|
||||
// await config.executeMiddleware()
|
||||
|
||||
// expect(usageQuota.update).toHaveBeenCalledWith("storage", 10100)
|
||||
// expect(config.next).toHaveBeenCalled()
|
||||
// })
|
||||
})
|
|
@ -1,164 +0,0 @@
|
|||
const usageQuota = require("../utilities/usageQuota")
|
||||
const { getUniqueRows } = require("../utilities/usageQuota/rows")
|
||||
const {
|
||||
isExternalTable,
|
||||
isRowId: isExternalRowId,
|
||||
} = require("../integrations/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
// currently only counting new writes and deletes
|
||||
const METHOD_MAP = {
|
||||
POST: 1,
|
||||
DELETE: -1,
|
||||
}
|
||||
|
||||
const DOMAIN_MAP = {
|
||||
rows: usageQuota.Properties.ROW,
|
||||
// upload: usageQuota.Properties.UPLOAD, // doesn't work yet
|
||||
// views: usageQuota.Properties.VIEW, // doesn't work yet
|
||||
// users: usageQuota.Properties.USER, // doesn't work yet
|
||||
applications: usageQuota.Properties.APPS,
|
||||
// this will not be updated by endpoint calls
|
||||
// instead it will be updated by triggerInfo
|
||||
// automationRuns: usageQuota.Properties.AUTOMATION, // doesn't work yet
|
||||
}
|
||||
|
||||
function getProperty(url) {
|
||||
for (let domain of Object.keys(DOMAIN_MAP)) {
|
||||
if (url.indexOf(domain) !== -1) {
|
||||
return DOMAIN_MAP[domain]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = async (ctx, next) => {
|
||||
if (!usageQuota.useQuotas()) {
|
||||
return next()
|
||||
}
|
||||
|
||||
let usage = METHOD_MAP[ctx.req.method]
|
||||
const property = getProperty(ctx.req.url)
|
||||
if (usage == null || property == null) {
|
||||
return next()
|
||||
}
|
||||
// post request could be a save of a pre-existing entry
|
||||
if (ctx.request.body && ctx.request.body._id && ctx.request.body._rev) {
|
||||
const usageId = ctx.request.body._id
|
||||
try {
|
||||
if (ctx.appId) {
|
||||
const db = getAppDB()
|
||||
await db.get(usageId)
|
||||
}
|
||||
return next()
|
||||
} catch (err) {
|
||||
if (
|
||||
isExternalTable(usageId) ||
|
||||
(ctx.request.body.tableId &&
|
||||
isExternalTable(ctx.request.body.tableId)) ||
|
||||
isExternalRowId(usageId)
|
||||
) {
|
||||
return next()
|
||||
} else {
|
||||
ctx.throw(404, `${usageId} does not exist`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// update usage for uploads to be the total size
|
||||
if (property === usageQuota.Properties.UPLOAD) {
|
||||
const files =
|
||||
ctx.request.files.file.length > 1
|
||||
? Array.from(ctx.request.files.file)
|
||||
: [ctx.request.files.file]
|
||||
usage = files.map(file => file.size).reduce((total, size) => total + size)
|
||||
}
|
||||
try {
|
||||
await performRequest(ctx, next, property, usage)
|
||||
} catch (err) {
|
||||
ctx.throw(400, err)
|
||||
}
|
||||
}
|
||||
|
||||
const performRequest = async (ctx, next, property, usage) => {
|
||||
const usageContext = {
|
||||
skipNext: false,
|
||||
skipUsage: false,
|
||||
[usageQuota.Properties.APPS]: {},
|
||||
}
|
||||
|
||||
if (usage === -1) {
|
||||
if (PRE_DELETE[property]) {
|
||||
await PRE_DELETE[property](ctx, usageContext)
|
||||
}
|
||||
} else {
|
||||
if (PRE_CREATE[property]) {
|
||||
await PRE_CREATE[property](ctx, usageContext)
|
||||
}
|
||||
}
|
||||
|
||||
// run the request
|
||||
if (!usageContext.skipNext) {
|
||||
await usageQuota.update(property, usage, { dryRun: true })
|
||||
await next()
|
||||
}
|
||||
|
||||
if (usage === -1) {
|
||||
if (POST_DELETE[property]) {
|
||||
await POST_DELETE[property](ctx, usageContext)
|
||||
}
|
||||
} else {
|
||||
if (POST_CREATE[property]) {
|
||||
await POST_CREATE[property](ctx, usageContext)
|
||||
}
|
||||
}
|
||||
|
||||
// update the usage
|
||||
if (!usageContext.skipUsage) {
|
||||
await usageQuota.update(property, usage)
|
||||
}
|
||||
}
|
||||
|
||||
const appPreDelete = async (ctx, usageContext) => {
|
||||
if (ctx.query.unpublish) {
|
||||
// don't run usage decrement for unpublish
|
||||
usageContext.skipUsage = true
|
||||
return
|
||||
}
|
||||
|
||||
// store the row count to delete
|
||||
const rows = await getUniqueRows([ctx.appId])
|
||||
if (rows.length) {
|
||||
usageContext[usageQuota.Properties.APPS] = { rowCount: rows.length }
|
||||
}
|
||||
}
|
||||
|
||||
const appPostDelete = async (ctx, usageContext) => {
|
||||
// delete the app rows from usage
|
||||
const rowCount = usageContext[usageQuota.Properties.APPS].rowCount
|
||||
if (rowCount) {
|
||||
await usageQuota.update(usageQuota.Properties.ROW, -rowCount)
|
||||
}
|
||||
}
|
||||
|
||||
const appPostCreate = async ctx => {
|
||||
// app import & template creation
|
||||
if (ctx.request.body.useTemplate === "true") {
|
||||
const rows = await getUniqueRows([ctx.response.body.appId])
|
||||
const rowCount = rows ? rows.length : 0
|
||||
await usageQuota.update(usageQuota.Properties.ROW, rowCount)
|
||||
}
|
||||
}
|
||||
|
||||
const PRE_DELETE = {
|
||||
[usageQuota.Properties.APPS]: appPreDelete,
|
||||
}
|
||||
|
||||
const POST_DELETE = {
|
||||
[usageQuota.Properties.APPS]: appPostDelete,
|
||||
}
|
||||
|
||||
const PRE_CREATE = {}
|
||||
|
||||
const POST_CREATE = {
|
||||
[usageQuota.Properties.APPS]: appPostCreate,
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
const { createUserBuildersView } = require("@budibase/backend-core/db")
|
||||
import * as syncDevelopers from "./usageQuotas/syncDevelopers"
|
||||
|
||||
/**
|
||||
* Date:
|
||||
* March 2022
|
||||
*
|
||||
* Description:
|
||||
* Create the builder users view and sync the developer count
|
||||
*/
|
||||
|
||||
export const run = async (db: any) => {
|
||||
await createUserBuildersView(db)
|
||||
await syncDevelopers.run()
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue