Merge branch 'master' into grid-all-datasources

This commit is contained in:
Andrew Kingston 2023-10-18 15:59:12 +01:00 committed by GitHub
commit 41e72a28c9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
95 changed files with 602 additions and 633 deletions

View File

@ -14,7 +14,6 @@ env:
# Posthog token used by ui at build time # Posthog token used by ui at build time
POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }} INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
jobs: jobs:
@ -110,7 +109,6 @@ jobs:
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}" git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push git push
trigger-deploy-to-qa-env: trigger-deploy-to-qa-env:
needs: [release-helm-chart] needs: [release-helm-chart]
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@ -134,8 +134,6 @@ spec:
{{ end }} {{ end }}
- name: SELF_HOSTED - name: SELF_HOSTED
value: {{ .Values.globals.selfHosted | quote }} value: {{ .Values.globals.selfHosted | quote }}
- name: SENTRY_DSN
value: {{ .Values.globals.sentryDSN | quote }}
- name: POSTHOG_TOKEN - name: POSTHOG_TOKEN
value: {{ .Values.globals.posthogToken | quote }} value: {{ .Values.globals.posthogToken | quote }}
- name: WORKER_URL - name: WORKER_URL

View File

@ -130,8 +130,6 @@ spec:
{{ end }} {{ end }}
- name: SELF_HOSTED - name: SELF_HOSTED
value: {{ .Values.globals.selfHosted | quote }} value: {{ .Values.globals.selfHosted | quote }}
- name: SENTRY_DSN
value: {{ .Values.globals.sentryDSN }}
- name: ENABLE_ANALYTICS - name: ENABLE_ANALYTICS
value: {{ .Values.globals.enableAnalytics | quote }} value: {{ .Values.globals.enableAnalytics | quote }}
- name: POSTHOG_TOKEN - name: POSTHOG_TOKEN

View File

@ -78,7 +78,6 @@ globals:
budibaseEnv: PRODUCTION budibaseEnv: PRODUCTION
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR" tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
enableAnalytics: "1" enableAnalytics: "1"
sentryDSN: ""
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU" posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs

View File

@ -3,3 +3,6 @@
[couchdb] [couchdb]
database_dir = DATA_DIR/couch/dbs database_dir = DATA_DIR/couch/dbs
view_index_dir = DATA_DIR/couch/views view_index_dir = DATA_DIR/couch/views
[chttpd_auth]
timeout = 7200 ; 2 hours in seconds

View File

@ -19,7 +19,6 @@ services:
API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY} API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY}
JWT_SECRET: ${JWT_SECRET} JWT_SECRET: ${JWT_SECRET}
LOG_LEVEL: info LOG_LEVEL: info
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
ENABLE_ANALYTICS: "true" ENABLE_ANALYTICS: "true"
REDIS_URL: redis-service:6379 REDIS_URL: redis-service:6379
REDIS_PASSWORD: ${REDIS_PASSWORD} REDIS_PASSWORD: ${REDIS_PASSWORD}
@ -48,7 +47,6 @@ services:
COUCH_DB_USERNAME: ${COUCH_DB_USER} COUCH_DB_USERNAME: ${COUCH_DB_USER}
COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD} COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD}
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984 COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
INTERNAL_API_KEY: ${INTERNAL_API_KEY} INTERNAL_API_KEY: ${INTERNAL_API_KEY}
REDIS_URL: redis-service:6379 REDIS_URL: redis-service:6379
REDIS_PASSWORD: ${REDIS_PASSWORD} REDIS_PASSWORD: ${REDIS_PASSWORD}

View File

@ -20,7 +20,6 @@ services:
API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY} API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY}
JWT_SECRET: ${JWT_SECRET} JWT_SECRET: ${JWT_SECRET}
LOG_LEVEL: info LOG_LEVEL: info
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
ENABLE_ANALYTICS: "true" ENABLE_ANALYTICS: "true"
REDIS_URL: redis-service:6379 REDIS_URL: redis-service:6379
REDIS_PASSWORD: ${REDIS_PASSWORD} REDIS_PASSWORD: ${REDIS_PASSWORD}
@ -31,8 +30,8 @@ services:
depends_on: depends_on:
- worker-service - worker-service
- redis-service - redis-service
# volumes: # volumes:
# - /some/path/to/plugins:/plugins # - /some/path/to/plugins:/plugins
worker-service: worker-service:
restart: unless-stopped restart: unless-stopped
@ -51,7 +50,6 @@ services:
COUCH_DB_USERNAME: ${COUCH_DB_USER} COUCH_DB_USERNAME: ${COUCH_DB_USER}
COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD} COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD}
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984 COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
INTERNAL_API_KEY: ${INTERNAL_API_KEY} INTERNAL_API_KEY: ${INTERNAL_API_KEY}
REDIS_URL: redis-service:6379 REDIS_URL: redis-service:6379
REDIS_PASSWORD: ${REDIS_PASSWORD} REDIS_PASSWORD: ${REDIS_PASSWORD}
@ -113,7 +111,12 @@ services:
PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984" PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984"
depends_on: depends_on:
- couchdb-service - couchdb-service
command: ["sh","-c","sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;"] command:
[
"sh",
"-c",
"sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;",
]
redis-service: redis-service:
restart: unless-stopped restart: unless-stopped

View File

@ -1,5 +1,5 @@
{ {
"version": "2.11.36", "version": "2.11.38",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -33,8 +33,8 @@ function isInvalid(metadata?: { state: string }) {
* Get the requested app metadata by id. * Get the requested app metadata by id.
* Use redis cache to first read the app metadata. * Use redis cache to first read the app metadata.
* If not present fallback to loading the app metadata directly and re-caching. * If not present fallback to loading the app metadata directly and re-caching.
* @param {string} appId the id of the app to get metadata from. * @param appId the id of the app to get metadata from.
* @returns {object} the app metadata. * @returns the app metadata.
*/ */
export async function getAppMetadata(appId: string): Promise<App | DeletedApp> { export async function getAppMetadata(appId: string): Promise<App | DeletedApp> {
const client = await getAppClient() const client = await getAppClient()
@ -72,9 +72,9 @@ export async function getAppMetadata(appId: string): Promise<App | DeletedApp> {
/** /**
* Invalidate/reset the cached metadata when a change occurs in the db. * Invalidate/reset the cached metadata when a change occurs in the db.
* @param appId {string} the cache key to bust/update. * @param appId the cache key to bust/update.
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with. * @param newMetadata optional - can simply provide the new metadata to update with.
* @return {Promise<void>} will respond with success when cache is updated. * @return will respond with success when cache is updated.
*/ */
export async function invalidateAppMetadata(appId: string, newMetadata?: any) { export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
if (!appId) { if (!appId) {

View File

@ -61,9 +61,9 @@ async function populateUsersFromDB(
* Get the requested user by id. * Get the requested user by id.
* Use redis cache to first read the user. * Use redis cache to first read the user.
* If not present fallback to loading the user directly and re-caching. * If not present fallback to loading the user directly and re-caching.
* @param {*} userId the id of the user to get * @param userId the id of the user to get
* @param {*} tenantId the tenant of the user to get * @param tenantId the tenant of the user to get
* @param {*} populateUser function to provide the user for re-caching. default to couch db * @param populateUser function to provide the user for re-caching. default to couch db
* @returns * @returns
*/ */
export async function getUser( export async function getUser(
@ -111,8 +111,8 @@ export async function getUser(
* Get the requested users by id. * Get the requested users by id.
* Use redis cache to first read the users. * Use redis cache to first read the users.
* If not present fallback to loading the users directly and re-caching. * If not present fallback to loading the users directly and re-caching.
* @param {*} userIds the ids of the user to get * @param userIds the ids of the user to get
* @param {*} tenantId the tenant of the users to get * @param tenantId the tenant of the users to get
* @returns * @returns
*/ */
export async function getUsers( export async function getUsers(

View File

@ -119,8 +119,8 @@ export class Writethrough {
this.writeRateMs = writeRateMs this.writeRateMs = writeRateMs
} }
async put(doc: any) { async put(doc: any, writeRateMs: number = this.writeRateMs) {
return put(this.db, doc, this.writeRateMs) return put(this.db, doc, writeRateMs)
} }
async get(id: string) { async get(id: string) {

View File

@ -23,7 +23,7 @@ import environment from "../environment"
/** /**
* Generates a new configuration ID. * Generates a new configuration ID.
* @returns {string} The new configuration ID which the config doc can be stored under. * @returns The new configuration ID which the config doc can be stored under.
*/ */
export function generateConfigID(type: ConfigType) { export function generateConfigID(type: ConfigType) {
return `${DocumentType.CONFIG}${SEPARATOR}${type}` return `${DocumentType.CONFIG}${SEPARATOR}${type}`

View File

@ -62,7 +62,7 @@ export function isTenancyEnabled() {
/** /**
* Given an app ID this will attempt to retrieve the tenant ID from it. * Given an app ID this will attempt to retrieve the tenant ID from it.
* @return {null|string} The tenant ID found within the app ID. * @return The tenant ID found within the app ID.
*/ */
export function getTenantIDFromAppID(appId: string) { export function getTenantIDFromAppID(appId: string) {
if (!appId) { if (!appId) {

View File

@ -8,8 +8,8 @@ class Replication {
/** /**
* *
* @param {String} source - the DB you want to replicate or rollback to * @param source - the DB you want to replicate or rollback to
* @param {String} target - the DB you want to replicate to, or rollback from * @param target - the DB you want to replicate to, or rollback from
*/ */
constructor({ source, target }: any) { constructor({ source, target }: any) {
this.source = getPouchDB(source) this.source = getPouchDB(source)
@ -38,7 +38,7 @@ class Replication {
/** /**
* Two way replication operation, intended to be promise based. * Two way replication operation, intended to be promise based.
* @param {Object} opts - PouchDB replication options * @param opts - PouchDB replication options
*/ */
sync(opts = {}) { sync(opts = {}) {
this.replication = this.promisify(this.source.sync, opts) this.replication = this.promisify(this.source.sync, opts)
@ -47,7 +47,7 @@ class Replication {
/** /**
* One way replication operation, intended to be promise based. * One way replication operation, intended to be promise based.
* @param {Object} opts - PouchDB replication options * @param opts - PouchDB replication options
*/ */
replicate(opts = {}) { replicate(opts = {}) {
this.replication = this.promisify(this.source.replicate.to, opts) this.replication = this.promisify(this.source.replicate.to, opts)

View File

@ -599,10 +599,10 @@ async function runQuery<T>(
* Gets round the fixed limit of 200 results from a query by fetching as many * Gets round the fixed limit of 200 results from a query by fetching as many
* pages as required and concatenating the results. This recursively operates * pages as required and concatenating the results. This recursively operates
* until enough results have been found. * until enough results have been found.
* @param dbName {string} Which database to run a lucene query on * @param dbName Which database to run a lucene query on
* @param index {string} Which search index to utilise * @param index Which search index to utilise
* @param query {object} The JSON query structure * @param query The JSON query structure
* @param params {object} The search params including: * @param params The search params including:
* tableId {string} The table ID to search * tableId {string} The table ID to search
* sort {string} The sort column * sort {string} The sort column
* sortOrder {string} The sort order ("ascending" or "descending") * sortOrder {string} The sort order ("ascending" or "descending")
@ -655,10 +655,10 @@ async function recursiveSearch<T>(
* Performs a paginated search. A bookmark will be returned to allow the next * Performs a paginated search. A bookmark will be returned to allow the next
* page to be fetched. There is a max limit off 200 results per page in a * page to be fetched. There is a max limit off 200 results per page in a
* paginated search. * paginated search.
* @param dbName {string} Which database to run a lucene query on * @param dbName Which database to run a lucene query on
* @param index {string} Which search index to utilise * @param index Which search index to utilise
* @param query {object} The JSON query structure * @param query The JSON query structure
* @param params {object} The search params including: * @param params The search params including:
* tableId {string} The table ID to search * tableId {string} The table ID to search
* sort {string} The sort column * sort {string} The sort column
* sortOrder {string} The sort order ("ascending" or "descending") * sortOrder {string} The sort order ("ascending" or "descending")
@ -722,10 +722,10 @@ export async function paginatedSearch<T>(
* desired amount of results. There is a limit of 1000 results to avoid * desired amount of results. There is a limit of 1000 results to avoid
* heavy performance hits, and to avoid client components breaking from * heavy performance hits, and to avoid client components breaking from
* handling too much data. * handling too much data.
* @param dbName {string} Which database to run a lucene query on * @param dbName Which database to run a lucene query on
* @param index {string} Which search index to utilise * @param index Which search index to utilise
* @param query {object} The JSON query structure * @param query The JSON query structure
* @param params {object} The search params including: * @param params The search params including:
* tableId {string} The table ID to search * tableId {string} The table ID to search
* sort {string} The sort column * sort {string} The sort column
* sortOrder {string} The sort order ("ascending" or "descending") * sortOrder {string} The sort order ("ascending" or "descending")

View File

@ -45,7 +45,7 @@ export async function getAllDbs(opts = { efficient: false }) {
* Lots of different points in the system need to find the full list of apps, this will * Lots of different points in the system need to find the full list of apps, this will
* enumerate the entire CouchDB cluster and get the list of databases (every app). * enumerate the entire CouchDB cluster and get the list of databases (every app).
* *
* @return {Promise<object[]>} returns the app information document stored in each app database. * @return returns the app information document stored in each app database.
*/ */
export async function getAllApps({ export async function getAllApps({
dev, dev,

View File

@ -25,7 +25,7 @@ export function isDevApp(app: App) {
/** /**
* Generates a development app ID from a real app ID. * Generates a development app ID from a real app ID.
* @returns {string} the dev app ID which can be used for dev database. * @returns the dev app ID which can be used for dev database.
*/ */
export function getDevelopmentAppID(appId: string) { export function getDevelopmentAppID(appId: string) {
if (!appId || appId.startsWith(APP_DEV_PREFIX)) { if (!appId || appId.startsWith(APP_DEV_PREFIX)) {

View File

@ -8,7 +8,7 @@ import { newid } from "./newid"
/** /**
* Generates a new app ID. * Generates a new app ID.
* @returns {string} The new app ID which the app doc can be stored under. * @returns The new app ID which the app doc can be stored under.
*/ */
export const generateAppID = (tenantId?: string | null) => { export const generateAppID = (tenantId?: string | null) => {
let id = APP_PREFIX let id = APP_PREFIX
@ -20,9 +20,9 @@ export const generateAppID = (tenantId?: string | null) => {
/** /**
* Gets a new row ID for the specified table. * Gets a new row ID for the specified table.
* @param {string} tableId The table which the row is being created for. * @param tableId The table which the row is being created for.
* @param {string|null} id If an ID is to be used then the UUID can be substituted for this. * @param id If an ID is to be used then the UUID can be substituted for this.
* @returns {string} The new ID which a row doc can be stored under. * @returns The new ID which a row doc can be stored under.
*/ */
export function generateRowID(tableId: string, id?: string) { export function generateRowID(tableId: string, id?: string) {
id = id || newid() id = id || newid()
@ -31,7 +31,7 @@ export function generateRowID(tableId: string, id?: string) {
/** /**
* Generates a new workspace ID. * Generates a new workspace ID.
* @returns {string} The new workspace ID which the workspace doc can be stored under. * @returns The new workspace ID which the workspace doc can be stored under.
*/ */
export function generateWorkspaceID() { export function generateWorkspaceID() {
return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}` return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`
@ -39,7 +39,7 @@ export function generateWorkspaceID() {
/** /**
* Generates a new global user ID. * Generates a new global user ID.
* @returns {string} The new user ID which the user doc can be stored under. * @returns The new user ID which the user doc can be stored under.
*/ */
export function generateGlobalUserID(id?: any) { export function generateGlobalUserID(id?: any) {
return `${DocumentType.USER}${SEPARATOR}${id || newid()}` return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
@ -52,8 +52,8 @@ export function isGlobalUserID(id: string) {
/** /**
* Generates a new user ID based on the passed in global ID. * Generates a new user ID based on the passed in global ID.
* @param {string} globalId The ID of the global user. * @param globalId The ID of the global user.
* @returns {string} The new user ID which the user doc can be stored under. * @returns The new user ID which the user doc can be stored under.
*/ */
export function generateUserMetadataID(globalId: string) { export function generateUserMetadataID(globalId: string) {
return generateRowID(InternalTable.USER_METADATA, globalId) return generateRowID(InternalTable.USER_METADATA, globalId)
@ -84,7 +84,7 @@ export function generateAppUserID(prodAppId: string, userId: string) {
/** /**
* Generates a new role ID. * Generates a new role ID.
* @returns {string} The new role ID which the role doc can be stored under. * @returns The new role ID which the role doc can be stored under.
*/ */
export function generateRoleID(name: string) { export function generateRoleID(name: string) {
const prefix = `${DocumentType.ROLE}${SEPARATOR}` const prefix = `${DocumentType.ROLE}${SEPARATOR}`
@ -103,7 +103,7 @@ export function prefixRoleID(name: string) {
/** /**
* Generates a new dev info document ID - this is scoped to a user. * Generates a new dev info document ID - this is scoped to a user.
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under. * @returns The new dev info ID which info for dev (like api key) can be stored under.
*/ */
export const generateDevInfoID = (userId: any) => { export const generateDevInfoID = (userId: any) => {
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}` return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
@ -111,7 +111,7 @@ export const generateDevInfoID = (userId: any) => {
/** /**
* Generates a new plugin ID - to be used in the global DB. * Generates a new plugin ID - to be used in the global DB.
* @returns {string} The new plugin ID which a plugin metadata document can be stored under. * @returns The new plugin ID which a plugin metadata document can be stored under.
*/ */
export const generatePluginID = (name: string) => { export const generatePluginID = (name: string) => {
return `${DocumentType.PLUGIN}${SEPARATOR}${name}` return `${DocumentType.PLUGIN}${SEPARATOR}${name}`

View File

@ -12,12 +12,12 @@ import { getProdAppID } from "./conversions"
* is usually the case as most of our docs are top level e.g. tables, automations, users and so on. * is usually the case as most of our docs are top level e.g. tables, automations, users and so on.
* More complex cases such as link docs and rows which have multiple levels of IDs that their * More complex cases such as link docs and rows which have multiple levels of IDs that their
* ID consists of need their own functions to build the allDocs parameters. * ID consists of need their own functions to build the allDocs parameters.
* @param {string} docType The type of document which input params are being built for, e.g. user, * @param docType The type of document which input params are being built for, e.g. user,
* link, app, table and so on. * link, app, table and so on.
* @param {string|null} docId The ID of the document minus its type - this is only needed if looking * @param docId The ID of the document minus its type - this is only needed if looking
* for a singular document. * for a singular document.
* @param {object} otherProps Add any other properties onto the request, e.g. include_docs. * @param otherProps Add any other properties onto the request, e.g. include_docs.
* @returns {object} Parameters which can then be used with an allDocs request. * @returns Parameters which can then be used with an allDocs request.
*/ */
export function getDocParams( export function getDocParams(
docType: string, docType: string,
@ -36,11 +36,11 @@ export function getDocParams(
/** /**
* Gets the DB allDocs/query params for retrieving a row. * Gets the DB allDocs/query params for retrieving a row.
* @param {string|null} tableId The table in which the rows have been stored. * @param tableId The table in which the rows have been stored.
* @param {string|null} rowId The ID of the row which is being specifically queried for. This can be * @param rowId The ID of the row which is being specifically queried for. This can be
* left null to get all the rows in the table. * left null to get all the rows in the table.
* @param {object} otherProps Any other properties to add to the request. * @param otherProps Any other properties to add to the request.
* @returns {object} Parameters which can then be used with an allDocs request. * @returns Parameters which can then be used with an allDocs request.
*/ */
export function getRowParams( export function getRowParams(
tableId?: string | null, tableId?: string | null,

View File

@ -1,8 +1,8 @@
/** /**
* Makes sure that a URL has the correct number of slashes, while maintaining the * Makes sure that a URL has the correct number of slashes, while maintaining the
* http(s):// double slashes. * http(s):// double slashes.
* @param {string} url The URL to test and remove any extra double slashes. * @param url The URL to test and remove any extra double slashes.
* @return {string} The updated url. * @return The updated url.
*/ */
export function checkSlashesInUrl(url: string) { export function checkSlashesInUrl(url: string) {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2") return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")

View File

@ -13,10 +13,10 @@ export const options = {
/** /**
* Passport Local Authentication Middleware. * Passport Local Authentication Middleware.
* @param {*} ctx the request structure * @param ctx the request structure
* @param {*} email username to login with * @param email username to login with
* @param {*} password plain text password to log in with * @param password plain text password to log in with
* @param {*} done callback from passport to return user information and errors * @param done callback from passport to return user information and errors
* @returns The authenticated user, or errors if they occur * @returns The authenticated user, or errors if they occur
*/ */
export async function authenticate( export async function authenticate(

View File

@ -17,15 +17,15 @@ const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) { export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
/** /**
* @param {*} issuer The identity provider base URL * @param issuer The identity provider base URL
* @param {*} sub The user ID * @param sub The user ID
* @param {*} profile The user profile information. Created by passport from the /userinfo response * @param profile The user profile information. Created by passport from the /userinfo response
* @param {*} jwtClaims The parsed id_token claims * @param jwtClaims The parsed id_token claims
* @param {*} accessToken The access_token for contacting the identity provider - may or may not be a JWT * @param accessToken The access_token for contacting the identity provider - may or may not be a JWT
* @param {*} refreshToken The refresh_token for obtaining a new access_token - usually not a JWT * @param refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
* @param {*} idToken The id_token - always a JWT * @param idToken The id_token - always a JWT
* @param {*} params The response body from requesting an access_token * @param params The response body from requesting an access_token
* @param {*} done The passport callback: err, user, info * @param done The passport callback: err, user, info
*/ */
return async ( return async (
issuer: string, issuer: string,
@ -61,8 +61,8 @@ export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
} }
/** /**
* @param {*} profile The structured profile created by passport using the user info endpoint * @param profile The structured profile created by passport using the user info endpoint
* @param {*} jwtClaims The claims returned in the id token * @param jwtClaims The claims returned in the id token
*/ */
function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) { function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {
// profile not guaranteed to contain email e.g. github connected azure ad account // profile not guaranteed to contain email e.g. github connected azure ad account

View File

@ -5,9 +5,9 @@ import { ConfigType, GoogleInnerConfig } from "@budibase/types"
/** /**
* Utility to handle authentication errors. * Utility to handle authentication errors.
* *
* @param {*} done The passport callback. * @param done The passport callback.
* @param {*} message Message that will be returned in the response body * @param message Message that will be returned in the response body
* @param {*} err (Optional) error that will be logged * @param err (Optional) error that will be logged
*/ */
export function authError(done: Function, message: string, err?: any) { export function authError(done: Function, message: string, err?: any) {

View File

@ -6,10 +6,10 @@ import * as cloudfront from "../cloudfront"
* In production the client library is stored in the object store, however in development * In production the client library is stored in the object store, however in development
* we use the symlinked version produced by lerna, located in node modules. We link to this * we use the symlinked version produced by lerna, located in node modules. We link to this
* via a specific endpoint (under /api/assets/client). * via a specific endpoint (under /api/assets/client).
* @param {string} appId In production we need the appId to look up the correct bucket, as the * @param appId In production we need the appId to look up the correct bucket, as the
* version of the client lib may differ between apps. * version of the client lib may differ between apps.
* @param {string} version The version to retrieve. * @param version The version to retrieve.
* @return {string} The URL to be inserted into appPackage response or server rendered * @return The URL to be inserted into appPackage response or server rendered
* app index file. * app index file.
*/ */
export const clientLibraryUrl = (appId: string, version: string) => { export const clientLibraryUrl = (appId: string, version: string) => {

View File

@ -61,9 +61,9 @@ export function sanitizeBucket(input: string) {
/** /**
* Gets a connection to the object store using the S3 SDK. * Gets a connection to the object store using the S3 SDK.
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from. * @param bucket the name of the bucket which blobs will be uploaded/retrieved from.
* @param {object} opts configuration for the object store. * @param opts configuration for the object store.
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage. * @return an S3 object store object, check S3 Nodejs SDK for usage.
* @constructor * @constructor
*/ */
export const ObjectStore = ( export const ObjectStore = (

View File

@ -5,9 +5,9 @@ import { timeout } from "../utils"
* Bull works with a Job wrapper around all messages that contains a lot more information about * Bull works with a Job wrapper around all messages that contains a lot more information about
* the state of the message, this object constructor implements the same schema of Bull jobs * the state of the message, this object constructor implements the same schema of Bull jobs
* for the sake of maintaining API consistency. * for the sake of maintaining API consistency.
* @param {string} queue The name of the queue which the message will be carried on. * @param queue The name of the queue which the message will be carried on.
* @param {object} message The JSON message which will be passed back to the consumer. * @param message The JSON message which will be passed back to the consumer.
* @returns {Object} A new job which can now be put onto the queue, this is mostly an * @returns A new job which can now be put onto the queue, this is mostly an
* internal structure so that an in memory queue can be easily swapped for a Bull queue. * internal structure so that an in memory queue can be easily swapped for a Bull queue.
*/ */
function newJob(queue: string, message: any) { function newJob(queue: string, message: any) {
@ -32,8 +32,8 @@ class InMemoryQueue {
_addCount: number _addCount: number
/** /**
* The constructor the queue, exactly the same as that of Bulls. * The constructor the queue, exactly the same as that of Bulls.
* @param {string} name The name of the queue which is being configured. * @param name The name of the queue which is being configured.
* @param {object|null} opts This is not used by the in memory queue as there is no real use * @param opts This is not used by the in memory queue as there is no real use
* case when in memory, but is the same API as Bull * case when in memory, but is the same API as Bull
*/ */
constructor(name: string, opts = null) { constructor(name: string, opts = null) {
@ -49,7 +49,7 @@ class InMemoryQueue {
* Same callback API as Bull, each callback passed to this will consume messages as they are * Same callback API as Bull, each callback passed to this will consume messages as they are
* available. Please note this is a queue service, not a notification service, so each * available. Please note this is a queue service, not a notification service, so each
* consumer will receive different messages. * consumer will receive different messages.
* @param {function<object>} func The callback function which will return a "Job", the same * @param func The callback function which will return a "Job", the same
* as the Bull API, within this job the property "data" contains the JSON message. Please * as the Bull API, within this job the property "data" contains the JSON message. Please
* note this is incredibly limited compared to Bull as in reality the Job would contain * note this is incredibly limited compared to Bull as in reality the Job would contain
* a lot more information about the queue and current status of Bull cluster. * a lot more information about the queue and current status of Bull cluster.
@ -73,9 +73,9 @@ class InMemoryQueue {
* Simple function to replicate the add message functionality of Bull, putting * Simple function to replicate the add message functionality of Bull, putting
* a new message on the queue. This then emits an event which will be used to * a new message on the queue. This then emits an event which will be used to
* return the message to a consumer (if one is attached). * return the message to a consumer (if one is attached).
* @param {object} msg A message to be transported over the queue, this should be * @param msg A message to be transported over the queue, this should be
* a JSON message as this is required by Bull. * a JSON message as this is required by Bull.
* @param {boolean} repeat serves no purpose for the import queue. * @param repeat serves no purpose for the import queue.
*/ */
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
add(msg: any, repeat: boolean) { add(msg: any, repeat: boolean) {
@ -96,7 +96,7 @@ class InMemoryQueue {
/** /**
* This removes a cron which has been implemented, this is part of Bull API. * This removes a cron which has been implemented, this is part of Bull API.
* @param {string} cronJobId The cron which is to be removed. * @param cronJobId The cron which is to be removed.
*/ */
removeRepeatableByKey(cronJobId: string) { removeRepeatableByKey(cronJobId: string) {
// TODO: implement for testing // TODO: implement for testing

View File

@ -142,7 +142,7 @@ function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
* this can only be done with redis streams because they will have an end. * this can only be done with redis streams because they will have an end.
* @param stream A redis stream, specifically as this type of stream will have an end. * @param stream A redis stream, specifically as this type of stream will have an end.
* @param client The client to use for further lookups. * @param client The client to use for further lookups.
* @return {Promise<object>} The final output of the stream * @return The final output of the stream
*/ */
function promisifyStream(stream: any, client: RedisWrapper) { function promisifyStream(stream: any, client: RedisWrapper) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {

View File

@ -36,8 +36,8 @@ export function levelToNumber(perm: PermissionLevel) {
/** /**
* Given the specified permission level for the user return the levels they are allowed to carry out. * Given the specified permission level for the user return the levels they are allowed to carry out.
* @param {string} userPermLevel The permission level of the user. * @param userPermLevel The permission level of the user.
* @return {string[]} All the permission levels this user is allowed to carry out. * @return All the permission levels this user is allowed to carry out.
*/ */
export function getAllowedLevels(userPermLevel: PermissionLevel): string[] { export function getAllowedLevels(userPermLevel: PermissionLevel): string[] {
switch (userPermLevel) { switch (userPermLevel) {

View File

@ -149,9 +149,9 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
/** /**
* Gets the role object, this is mainly useful for two purposes, to check if the level exists and * Gets the role object, this is mainly useful for two purposes, to check if the level exists and
* to check if the role inherits any others. * to check if the role inherits any others.
* @param {string|null} roleId The level ID to lookup. * @param roleId The level ID to lookup.
* @param {object|null} opts options for the function, like whether to halt errors, instead return public. * @param opts options for the function, like whether to halt errors, instead return public.
* @returns {Promise<Role|object|null>} The role object, which may contain an "inherits" property. * @returns The role object, which may contain an "inherits" property.
*/ */
export async function getRole( export async function getRole(
roleId?: string, roleId?: string,
@ -225,8 +225,8 @@ export async function getUserRoleIdHierarchy(
/** /**
* Returns an ordered array of the user's inherited role IDs, this can be used * Returns an ordered array of the user's inherited role IDs, this can be used
* to determine if a user can access something that requires a specific role. * to determine if a user can access something that requires a specific role.
* @param {string} userRoleId The user's role ID, this can be found in their access token. * @param userRoleId The user's role ID, this can be found in their access token.
* @returns {Promise<object[]>} returns an ordered array of the roles, with the first being their * @returns returns an ordered array of the roles, with the first being their
* highest level of access and the last being the lowest level. * highest level of access and the last being the lowest level.
*/ */
export async function getUserRoleHierarchy(userRoleId?: string) { export async function getUserRoleHierarchy(userRoleId?: string) {
@ -258,7 +258,7 @@ export async function getAllRoleIds(appId?: string) {
/** /**
* Given an app ID this will retrieve all of the roles that are currently within that app. * Given an app ID this will retrieve all of the roles that are currently within that app.
* @return {Promise<object[]>} An array of the role objects that were found. * @return An array of the role objects that were found.
*/ */
export async function getAllRoles(appId?: string): Promise<RoleDoc[]> { export async function getAllRoles(appId?: string): Promise<RoleDoc[]> {
if (appId) { if (appId) {

View File

@ -21,17 +21,21 @@ import {
User, User,
UserStatus, UserStatus,
UserGroup, UserGroup,
ContextUser,
} from "@budibase/types" } from "@budibase/types"
import { import {
getAccountHolderFromUserIds, getAccountHolderFromUserIds,
isAdmin, isAdmin,
isCreator,
validateUniqueUser, validateUniqueUser,
} from "./utils" } from "./utils"
import { searchExistingEmails } from "./lookup" import { searchExistingEmails } from "./lookup"
import { hash } from "../utils" import { hash } from "../utils"
type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any> type QuotaUpdateFn = (
change: number,
creatorsChange: number,
cb?: () => Promise<any>
) => Promise<any>
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any> type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
type FeatureFn = () => Promise<Boolean> type FeatureFn = () => Promise<Boolean>
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]> type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
@ -135,7 +139,7 @@ export class UserDB {
if (!fullUser.roles) { if (!fullUser.roles) {
fullUser.roles = {} fullUser.roles = {}
} }
// add the active status to a user if its not provided // add the active status to a user if it's not provided
if (fullUser.status == null) { if (fullUser.status == null) {
fullUser.status = UserStatus.ACTIVE fullUser.status = UserStatus.ACTIVE
} }
@ -246,7 +250,8 @@ export class UserDB {
} }
const change = dbUser ? 0 : 1 // no change if there is existing user const change = dbUser ? 0 : 1 // no change if there is existing user
return UserDB.quotas.addUsers(change, async () => { const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
await validateUniqueUser(email, tenantId) await validateUniqueUser(email, tenantId)
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser) let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
@ -308,6 +313,7 @@ export class UserDB {
let usersToSave: any[] = [] let usersToSave: any[] = []
let newUsers: any[] = [] let newUsers: any[] = []
let newCreators: any[] = []
const emails = newUsersRequested.map((user: User) => user.email) const emails = newUsersRequested.map((user: User) => user.email)
const existingEmails = await searchExistingEmails(emails) const existingEmails = await searchExistingEmails(emails)
@ -328,59 +334,66 @@ export class UserDB {
} }
newUser.userGroups = groups newUser.userGroups = groups
newUsers.push(newUser) newUsers.push(newUser)
if (isCreator(newUser)) {
newCreators.push(newUser)
}
} }
const account = await accountSdk.getAccountByTenantId(tenantId) const account = await accountSdk.getAccountByTenantId(tenantId)
return UserDB.quotas.addUsers(newUsers.length, async () => { return UserDB.quotas.addUsers(
// create the promises array that will be called by bulkDocs newUsers.length,
newUsers.forEach((user: any) => { newCreators.length,
usersToSave.push( async () => {
UserDB.buildUser( // create the promises array that will be called by bulkDocs
user, newUsers.forEach((user: any) => {
{ usersToSave.push(
hashPassword: true, UserDB.buildUser(
requirePassword: user.requirePassword, user,
}, {
tenantId, hashPassword: true,
undefined, // no dbUser requirePassword: user.requirePassword,
account },
tenantId,
undefined, // no dbUser
account
)
) )
) })
})
const usersToBulkSave = await Promise.all(usersToSave) const usersToBulkSave = await Promise.all(usersToSave)
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
// Post-processing of bulk added users, e.g. events and cache operations // Post-processing of bulk added users, e.g. events and cache operations
for (const user of usersToBulkSave) { for (const user of usersToBulkSave) {
// TODO: Refactor to bulk insert users into the info db // TODO: Refactor to bulk insert users into the info db
// instead of relying on looping tenant creation // instead of relying on looping tenant creation
await platform.users.addUser(tenantId, user._id, user.email) await platform.users.addUser(tenantId, user._id, user.email)
await eventHelpers.handleSaveEvents(user, undefined) await eventHelpers.handleSaveEvents(user, undefined)
} }
const saved = usersToBulkSave.map(user => {
return {
_id: user._id,
email: user.email,
}
})
// now update the groups
if (Array.isArray(saved) && groups) {
const groupPromises = []
const createdUserIds = saved.map(user => user._id)
for (let groupId of groups) {
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
}
await Promise.all(groupPromises)
}
const saved = usersToBulkSave.map(user => {
return { return {
_id: user._id, successful: saved,
email: user.email, unsuccessful,
} }
})
// now update the groups
if (Array.isArray(saved) && groups) {
const groupPromises = []
const createdUserIds = saved.map(user => user._id)
for (let groupId of groups) {
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
}
await Promise.all(groupPromises)
} }
)
return {
successful: saved,
unsuccessful,
}
})
} }
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> { static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
@ -420,11 +433,12 @@ export class UserDB {
_deleted: true, _deleted: true,
})) }))
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete) const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
const creatorsToDelete = usersToDelete.filter(isCreator)
await UserDB.quotas.removeUsers(toDelete.length)
for (let user of usersToDelete) { for (let user of usersToDelete) {
await bulkDeleteProcessing(user) await bulkDeleteProcessing(user)
} }
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
// Build Response // Build Response
// index users by id // index users by id
@ -473,7 +487,8 @@ export class UserDB {
await db.remove(userId, dbUser._rev) await db.remove(userId, dbUser._rev)
await UserDB.quotas.removeUsers(1) const creatorsToDelete = isCreator(dbUser) ? 1 : 0
await UserDB.quotas.removeUsers(1, creatorsToDelete)
await eventHelpers.handleDeleteEvents(dbUser) await eventHelpers.handleDeleteEvents(dbUser)
await cache.user.invalidateUser(userId) await cache.user.invalidateUser(userId)
await sessions.invalidateSessions(userId, { reason: "deletion" }) await sessions.invalidateSessions(userId, { reason: "deletion" })

View File

@ -14,14 +14,15 @@ import {
} from "../db" } from "../db"
import { import {
BulkDocsResponse, BulkDocsResponse,
ContextUser,
SearchQuery, SearchQuery,
SearchQueryOperators, SearchQueryOperators,
SearchUsersRequest, SearchUsersRequest,
User, User,
ContextUser,
} from "@budibase/types" } from "@budibase/types"
import * as context from "../context"
import { getGlobalDB } from "../context" import { getGlobalDB } from "../context"
import * as context from "../context"
import { isCreator } from "./utils"
type GetOpts = { cleanup?: boolean } type GetOpts = { cleanup?: boolean }
@ -283,6 +284,19 @@ export async function getUserCount() {
return response.total_rows return response.total_rows
} }
export async function getCreatorCount() {
let creators = 0
async function iterate(startPage?: string) {
const page = await paginatedUsers({ bookmark: startPage })
creators += page.data.filter(isCreator).length
if (page.hasNextPage) {
await iterate(page.nextPage)
}
}
await iterate()
return creators
}
// used to remove the builder/admin permissions, for processing the // used to remove the builder/admin permissions, for processing the
// user as an app user (they may have some specific role/group // user as an app user (they may have some specific role/group
export function removePortalUserPermissions(user: User | ContextUser) { export function removePortalUserPermissions(user: User | ContextUser) {

View File

@ -10,6 +10,7 @@ import { getAccountByTenantId } from "../accounts"
// extract from shared-core to make easily accessible from backend-core // extract from shared-core to make easily accessible from backend-core
export const isBuilder = sdk.users.isBuilder export const isBuilder = sdk.users.isBuilder
export const isAdmin = sdk.users.isAdmin export const isAdmin = sdk.users.isAdmin
export const isCreator = sdk.users.isCreator
export const isGlobalBuilder = sdk.users.isGlobalBuilder export const isGlobalBuilder = sdk.users.isGlobalBuilder
export const isAdminOrBuilder = sdk.users.isAdminOrBuilder export const isAdminOrBuilder = sdk.users.isAdminOrBuilder
export const hasAdminPermissions = sdk.users.hasAdminPermissions export const hasAdminPermissions = sdk.users.hasAdminPermissions

View File

@ -79,8 +79,8 @@ export function isPublicApiRequest(ctx: Ctx): boolean {
/** /**
* Given a request tries to find the appId, which can be located in various places * Given a request tries to find the appId, which can be located in various places
* @param {object} ctx The main request body to look through. * @param ctx The main request body to look through.
* @returns {string|undefined} If an appId was found it will be returned. * @returns If an appId was found it will be returned.
*/ */
export async function getAppIdFromCtx(ctx: Ctx) { export async function getAppIdFromCtx(ctx: Ctx) {
// look in headers // look in headers
@ -135,7 +135,7 @@ function parseAppIdFromUrl(url?: string) {
/** /**
* opens the contents of the specified encrypted JWT. * opens the contents of the specified encrypted JWT.
* @return {object} the contents of the token. * @return the contents of the token.
*/ */
export function openJwt(token: string) { export function openJwt(token: string) {
if (!token) { if (!token) {
@ -169,8 +169,8 @@ export function isValidInternalAPIKey(apiKey: string) {
/** /**
* Get a cookie from context, and decrypt if necessary. * Get a cookie from context, and decrypt if necessary.
* @param {object} ctx The request which is to be manipulated. * @param ctx The request which is to be manipulated.
* @param {string} name The name of the cookie to get. * @param name The name of the cookie to get.
*/ */
export function getCookie(ctx: Ctx, name: string) { export function getCookie(ctx: Ctx, name: string) {
const cookie = ctx.cookies.get(name) const cookie = ctx.cookies.get(name)
@ -184,10 +184,10 @@ export function getCookie(ctx: Ctx, name: string) {
/** /**
* Store a cookie for the request - it will not expire. * Store a cookie for the request - it will not expire.
* @param {object} ctx The request which is to be manipulated. * @param ctx The request which is to be manipulated.
* @param {string} name The name of the cookie to set. * @param name The name of the cookie to set.
* @param {string|object} value The value of cookie which will be set. * @param value The value of cookie which will be set.
* @param {object} opts options like whether to sign. * @param opts options like whether to sign.
*/ */
export function setCookie( export function setCookie(
ctx: Ctx, ctx: Ctx,
@ -223,8 +223,8 @@ export function clearCookie(ctx: Ctx, name: string) {
/** /**
* Checks if the API call being made (based on the provided ctx object) is from the client. If * Checks if the API call being made (based on the provided ctx object) is from the client. If
* the call is not from a client app then it is from the builder. * the call is not from a client app then it is from the builder.
* @param {object} ctx The koa context object to be tested. * @param ctx The koa context object to be tested.
* @return {boolean} returns true if the call is from the client lib (a built app rather than the builder). * @return returns true if the call is from the client lib (a built app rather than the builder).
*/ */
export function isClient(ctx: Ctx) { export function isClient(ctx: Ctx) {
return ctx.headers[Header.TYPE] === "client" return ctx.headers[Header.TYPE] === "client"

View File

@ -0,0 +1,54 @@
const _ = require('lodash/fp')
const {structures} = require("../../../tests")
jest.mock("../../../src/context")
jest.mock("../../../src/db")
const context = require("../../../src/context")
const db = require("../../../src/db")
const {getCreatorCount} = require('../../../src/users/users')
describe("Users", () => {
let getGlobalDBMock
let getGlobalUserParamsMock
let paginationMock
beforeEach(() => {
jest.resetAllMocks()
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
paginationMock = jest.spyOn(db, "pagination")
})
it("Retrieves the number of creators", async () => {
const getUsers = (offset, limit, creators = false) => {
const range = _.range(offset, limit)
const opts = creators ? {builder: {global: true}} : undefined
return range.map(() => structures.users.user(opts))
}
const page1Data = getUsers(0, 8)
const page2Data = getUsers(8, 12, true)
getGlobalDBMock.mockImplementation(() => ({
name : "fake-db",
allDocs: () => ({
rows: [...page1Data, ...page2Data]
})
}))
paginationMock.mockImplementationOnce(() => ({
data: page1Data,
hasNextPage: true,
nextPage: "1"
}))
paginationMock.mockImplementation(() => ({
data: page2Data,
hasNextPage: false,
nextPage: undefined
}))
const creatorsCount = await getCreatorCount()
expect(creatorsCount).toBe(4)
expect(paginationMock).toHaveBeenCalledTimes(2)
})
})

View File

@ -72,6 +72,11 @@ export function quotas(): Quotas {
value: 1, value: 1,
triggers: [], triggers: [],
}, },
creators: {
name: "Creators",
value: 1,
triggers: [],
},
userGroups: { userGroups: {
name: "User Groups", name: "User Groups",
value: 1, value: 1,
@ -118,6 +123,10 @@ export function customer(): Customer {
export function subscription(): Subscription { export function subscription(): Subscription {
return { return {
amount: 10000, amount: 10000,
amounts: {
user: 10000,
creator: 0,
},
cancelAt: undefined, cancelAt: undefined,
currency: "usd", currency: "usd",
currentPeriodEnd: 0, currentPeriodEnd: 0,
@ -126,6 +135,10 @@ export function subscription(): Subscription {
duration: PriceDuration.MONTHLY, duration: PriceDuration.MONTHLY,
pastDueAt: undefined, pastDueAt: undefined,
quantity: 0, quantity: 0,
quantities: {
user: 0,
creator: 0,
},
status: "active", status: "active",
} }
} }

View File

@ -1,6 +1,6 @@
import { MonthlyQuotaName, QuotaUsage } from "@budibase/types" import { MonthlyQuotaName, QuotaUsage } from "@budibase/types"
export const usage = (): QuotaUsage => { export const usage = (users: number = 0, creators: number = 0): QuotaUsage => {
return { return {
_id: "usage_quota", _id: "usage_quota",
quotaReset: new Date().toISOString(), quotaReset: new Date().toISOString(),
@ -58,7 +58,8 @@ export const usage = (): QuotaUsage => {
usageQuota: { usageQuota: {
apps: 0, apps: 0,
plugins: 0, plugins: 0,
users: 0, users,
creators,
userGroups: 0, userGroups: 0,
rows: 0, rows: 0,
triggers: {}, triggers: {},

View File

@ -64,7 +64,6 @@
"@fortawesome/fontawesome-svg-core": "^6.2.1", "@fortawesome/fontawesome-svg-core": "^6.2.1",
"@fortawesome/free-brands-svg-icons": "^6.2.1", "@fortawesome/free-brands-svg-icons": "^6.2.1",
"@fortawesome/free-solid-svg-icons": "^6.2.1", "@fortawesome/free-solid-svg-icons": "^6.2.1",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1", "@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1", "@spectrum-css/vars": "^3.0.1",
"codemirror": "^5.59.0", "codemirror": "^5.59.0",

View File

@ -1,37 +0,0 @@
import * as Sentry from "@sentry/browser"
export default class SentryClient {
constructor(dsn) {
this.dsn = dsn
}
init() {
if (this.dsn) {
Sentry.init({ dsn: this.dsn })
this.initalised = true
}
}
/**
* Capture an exception and send it to sentry.
* @param {Error} err - JS error object
*/
captureException(err) {
if (!this.initalised) return
Sentry.captureException(err)
}
/**
* Identify user in sentry.
* @param {String} id - Unique user id
*/
identify(id) {
if (!this.initalised) return
Sentry.configureScope(scope => {
scope.setUser({ id })
})
}
}

View File

@ -1,16 +1,14 @@
import { API } from "api" import { API } from "api"
import PosthogClient from "./PosthogClient" import PosthogClient from "./PosthogClient"
import IntercomClient from "./IntercomClient" import IntercomClient from "./IntercomClient"
import SentryClient from "./SentryClient"
import { Events, EventSource } from "./constants" import { Events, EventSource } from "./constants"
const posthog = new PosthogClient(process.env.POSTHOG_TOKEN) const posthog = new PosthogClient(process.env.POSTHOG_TOKEN)
const sentry = new SentryClient(process.env.SENTRY_DSN)
const intercom = new IntercomClient(process.env.INTERCOM_TOKEN) const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
class AnalyticsHub { class AnalyticsHub {
constructor() { constructor() {
this.clients = [posthog, sentry, intercom] this.clients = [posthog, intercom]
} }
async activate() { async activate() {
@ -23,12 +21,9 @@ class AnalyticsHub {
identify(id) { identify(id) {
posthog.identify(id) posthog.identify(id)
sentry.identify(id)
} }
captureException(err) { captureException(_err) {}
sentry.captureException(err)
}
captureEvent(eventName, props = {}) { captureEvent(eventName, props = {}) {
posthog.captureEvent(eventName, props) posthog.captureEvent(eventName, props)

View File

@ -23,5 +23,7 @@
</script> </script>
{#key $params.datasourceId} {#key $params.datasourceId}
<slot /> {#if $datasources.selected}
<slot />
{/if}
{/key} {/key}

View File

@ -16,8 +16,7 @@
let selectedPanel = null let selectedPanel = null
let panelOptions = [] let panelOptions = []
// datasources.selected can return null temporarily on datasource deletion $: datasource = $datasources.selected
$: datasource = $datasources.selected || {}
$: getOptions(datasource) $: getOptions(datasource)

View File

@ -43,7 +43,7 @@
}) })
</script> </script>
<TestimonialPage> <TestimonialPage enabled={$organisation.testimonialsEnabled}>
<Layout gap="S" noPadding> <Layout gap="S" noPadding>
<img alt="logo" src={$organisation.logoUrl || Logo} /> <img alt="logo" src={$organisation.logoUrl || Logo} />
<span class="heading-wrap"> <span class="heading-wrap">

View File

@ -53,7 +53,7 @@
}) })
</script> </script>
<TestimonialPage> <TestimonialPage enabled={$organisation.testimonialsEnabled}>
<Layout gap="S" noPadding> <Layout gap="S" noPadding>
{#if loaded} {#if loaded}
<img alt="logo" src={$organisation.logoUrl || Logo} /> <img alt="logo" src={$organisation.logoUrl || Logo} />

View File

@ -80,7 +80,6 @@ export default defineConfig(({ mode }) => {
"process.env.INTERCOM_TOKEN": JSON.stringify( "process.env.INTERCOM_TOKEN": JSON.stringify(
process.env.INTERCOM_TOKEN process.env.INTERCOM_TOKEN
), ),
"process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN),
}), }),
copyFonts("fonts"), copyFonts("fonts"),
...(isProduction ? [] : devOnlyPlugins), ...(isProduction ? [] : devOnlyPlugins),

View File

@ -32,7 +32,7 @@ export const API = createAPIClient({
}, },
// Show an error notification for all API failures. // Show an error notification for all API failures.
// We could also log these to sentry. // We could also log these to Posthog.
// Or we could check error.status and redirect to login on a 403 etc. // Or we could check error.status and redirect to login on a 403 etc.
onError: error => { onError: error => {
const { status, method, url, message, handled, suppressErrors } = const { status, method, url, message, handled, suppressErrors } =

View File

@ -260,29 +260,31 @@
class:wrap={editable || contentLines > 1} class:wrap={editable || contentLines > 1}
on:wheel={e => (focused ? e.stopPropagation() : null)} on:wheel={e => (focused ? e.stopPropagation() : null)}
> >
{#each value || [] as relationship} {#if Array.isArray(value) && value.length}
{#if relationship[primaryDisplay] || relationship.primaryDisplay} {#each value as relationship}
<div class="badge"> {#if relationship[primaryDisplay] || relationship.primaryDisplay}
<span <div class="badge">
on:click={editable <span
? () => showRelationship(relationship._id) on:click={editable
: null} ? () => showRelationship(relationship._id)
> : null}
{readable( >
relationship[primaryDisplay] || relationship.primaryDisplay {readable(
)} relationship[primaryDisplay] || relationship.primaryDisplay
</span> )}
{#if editable} </span>
<Icon {#if editable}
name="Close" <Icon
size="XS" name="Close"
hoverable size="XS"
on:click={() => toggleRow(relationship)} hoverable
/> on:click={() => toggleRow(relationship)}
{/if} />
</div> {/if}
{/if} </div>
{/each} {/if}
{/each}
{/if}
{#if editable} {#if editable}
<div class="add" on:click={open}> <div class="add" on:click={open}>
<Icon name="Add" size="S" /> <Icon name="Add" size="S" />
@ -318,7 +320,7 @@
<div class="searching"> <div class="searching">
<ProgressCircle size="S" /> <ProgressCircle size="S" />
</div> </div>
{:else if searchResults?.length} {:else if Array.isArray(searchResults) && searchResults.length}
<div class="results"> <div class="results">
{#each searchResults as row, idx} {#each searchResults as row, idx}
<div <div

@ -1 +1 @@
Subproject commit 044bec6447066b215932d6726c437e7ec5a9e42e Subproject commit 570d14aa44aa88f4d053856322210f0008ba5c76

View File

@ -55,7 +55,6 @@
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "6.8.0", "@google-cloud/firestore": "6.8.0",
"@koa/router": "8.0.8", "@koa/router": "8.0.8",
"@sentry/node": "6.17.7",
"@socket.io/redis-adapter": "^8.2.1", "@socket.io/redis-adapter": "^8.2.1",
"airtable": "0.10.1", "airtable": "0.10.1",
"arangojs": "7.2.0", "arangojs": "7.2.0",

View File

@ -40,7 +40,7 @@ class Routing {
/** /**
* Gets the full routing structure by querying the routing view and processing the result into the tree. * Gets the full routing structure by querying the routing view and processing the result into the tree.
* @returns {Promise<object>} The routing structure, this is the full structure designed for use in the builder, * @returns The routing structure, this is the full structure designed for use in the builder,
* if the client routing is required then the updateRoutingStructureForUserRole should be used. * if the client routing is required then the updateRoutingStructureForUserRole should be used.
*/ */
async function getRoutingStructure() { async function getRoutingStructure() {

View File

@ -280,17 +280,8 @@ function isEditableColumn(column: FieldSchema) {
return !(isExternalAutoColumn || isFormula) return !(isExternalAutoColumn || isFormula)
} }
export type ExternalRequestReturnType<T> = T extends Operation.READ export type ExternalRequestReturnType<T extends Operation> =
? T extends Operation.READ ? Row[] : { row: Row; table: Table }
| Row[]
| {
row: Row
table: Table
}
: {
row: Row
table: Table
}
export class ExternalRequest<T extends Operation> { export class ExternalRequest<T extends Operation> {
private readonly operation: T private readonly operation: T
@ -857,11 +848,12 @@ export class ExternalRequest<T extends Operation> {
} }
const output = this.outputProcessing(response, table, relationships) const output = this.outputProcessing(response, table, relationships)
// if reading it'll just be an array of rows, return whole thing // if reading it'll just be an array of rows, return whole thing
const result = ( if (operation === Operation.READ) {
operation === Operation.READ && Array.isArray(response) return (
? output Array.isArray(output) ? output : [output]
: { row: output[0], table } ) as ExternalRequestReturnType<T>
) as ExternalRequestReturnType<T> } else {
return result return { row: output[0], table } as ExternalRequestReturnType<T>
}
} }
} }

View File

@ -44,7 +44,7 @@ export async function handleRequest<T extends Operation>(
return [] as any return [] as any
} }
return new ExternalRequest(operation, tableId, opts?.datasource).run( return new ExternalRequest<T>(operation, tableId, opts?.datasource).run(
opts || {} opts || {}
) )
} }
@ -148,17 +148,17 @@ export async function find(ctx: UserCtx): Promise<Row> {
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
const _id = ctx.request.body._id const _id = ctx.request.body._id
const { row } = (await handleRequest(Operation.DELETE, tableId, { const { row } = await handleRequest(Operation.DELETE, tableId, {
id: breakRowIdField(_id), id: breakRowIdField(_id),
includeSqlRelationships: IncludeRelationship.EXCLUDE, includeSqlRelationships: IncludeRelationship.EXCLUDE,
})) as { row: Row } })
return { response: { ok: true, id: _id }, row } return { response: { ok: true, id: _id }, row }
} }
export async function bulkDestroy(ctx: UserCtx) { export async function bulkDestroy(ctx: UserCtx) {
const { rows } = ctx.request.body const { rows } = ctx.request.body
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
let promises: Promise<Row[] | { row: Row; table: Table }>[] = [] let promises: Promise<{ row: Row; table: Table }>[] = []
for (let row of rows) { for (let row of rows) {
promises.push( promises.push(
handleRequest(Operation.DELETE, tableId, { handleRequest(Operation.DELETE, tableId, {
@ -167,7 +167,7 @@ export async function bulkDestroy(ctx: UserCtx) {
}) })
) )
} }
const responses = (await Promise.all(promises)) as { row: Row }[] const responses = await Promise.all(promises)
return { response: { ok: true }, rows: responses.map(resp => resp.row) } return { response: { ok: true }, rows: responses.map(resp => resp.row) }
} }
@ -183,11 +183,11 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
ctx.throw(400, "Datasource has not been configured for plus API.") ctx.throw(400, "Datasource has not been configured for plus API.")
} }
const tables = datasource.entities const tables = datasource.entities
const response = (await handleRequest(Operation.READ, tableId, { const response = await handleRequest(Operation.READ, tableId, {
id, id,
datasource, datasource,
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
})) as Row[] })
const table: Table = tables[tableName] const table: Table = tables[tableName]
const row = response[0] const row = response[0]
// this seems like a lot of work, but basically we need to dig deeper for the enrich // this seems like a lot of work, but basically we need to dig deeper for the enrich

View File

@ -88,8 +88,8 @@ const SCHEMA_MAP: Record<string, any> = {
/** /**
* Iterates through the array of filters to create a JS * Iterates through the array of filters to create a JS
* expression that gets used in a CouchDB view. * expression that gets used in a CouchDB view.
* @param {Array} filters - an array of filter objects * @param filters - an array of filter objects
* @returns {String} JS Expression * @returns JS Expression
*/ */
function parseFilterExpression(filters: ViewFilter[]) { function parseFilterExpression(filters: ViewFilter[]) {
const expression = [] const expression = []
@ -125,8 +125,8 @@ function parseFilterExpression(filters: ViewFilter[]) {
/** /**
* Returns a CouchDB compliant emit() expression that is used to emit the * Returns a CouchDB compliant emit() expression that is used to emit the
* correct key/value pairs for custom views. * correct key/value pairs for custom views.
* @param {String?} field - field to use for calculations, if any * @param field - field to use for calculations, if any
* @param {String?} groupBy - field to group calculation results on, if any * @param groupBy - field to group calculation results on, if any
*/ */
function parseEmitExpression(field: string, groupBy: string) { function parseEmitExpression(field: string, groupBy: string) {
return `emit(doc["${groupBy || "_id"}"], doc["${field}"]);` return `emit(doc["${groupBy || "_id"}"], doc["${field}"]);`
@ -136,7 +136,7 @@ function parseEmitExpression(field: string, groupBy: string) {
* Return a fully parsed CouchDB compliant view definition * Return a fully parsed CouchDB compliant view definition
* that will be stored in the design document in the database. * that will be stored in the design document in the database.
* *
* @param {Object} viewDefinition - the JSON definition for a custom view. * @param viewDefinition - the JSON definition for a custom view.
* field: field that calculations will be performed on * field: field that calculations will be performed on
* tableId: tableId of the table this view was created from * tableId: tableId of the table this view was created from
* groupBy: field that calculations will be grouped by. Field must be present for this to be useful * groupBy: field that calculations will be grouped by. Field must be present for this to be useful

View File

@ -1,11 +1,7 @@
import * as Sentry from "@sentry/node"
if (process.env.DD_APM_ENABLED) { if (process.env.DD_APM_ENABLED) {
require("./ddApm") require("./ddApm")
} }
// need to load environment first
import env from "./environment"
import * as db from "./db" import * as db from "./db"
db.init() db.init()
import { ServiceType } from "@budibase/types" import { ServiceType } from "@budibase/types"
@ -28,10 +24,6 @@ async function start() {
} }
// startup includes automation runner - if enabled // startup includes automation runner - if enabled
await startup(app, server) await startup(app, server)
if (env.isProd()) {
env._set("NODE_ENV", "production")
Sentry.init()
}
} }
start().catch(err => { start().catch(err => {

View File

@ -14,13 +14,13 @@ import { LoopStep, LoopStepType, LoopInput } from "../definitions/automations"
* make sure that the post template statement can be cast into the correct type, this function does this for numbers * make sure that the post template statement can be cast into the correct type, this function does this for numbers
* and booleans. * and booleans.
* *
* @param {object} inputs An object of inputs, please note this will not recurse down into any objects within, it simply * @param inputs An object of inputs, please note this will not recurse down into any objects within, it simply
* cleanses the top level inputs, however it can be used by recursively calling it deeper into the object structures if * cleanses the top level inputs, however it can be used by recursively calling it deeper into the object structures if
* the schema is known. * the schema is known.
* @param {object} schema The defined schema of the inputs, in the form of JSON schema. The schema definition of an * @param schema The defined schema of the inputs, in the form of JSON schema. The schema definition of an
* automation is the likely use case of this, however validate.js syntax can be converted closely enough to use this by * automation is the likely use case of this, however validate.js syntax can be converted closely enough to use this by
* wrapping the schema properties in a top level "properties" object. * wrapping the schema properties in a top level "properties" object.
* @returns {object} The inputs object which has had all the various types supported by this function converted to their * @returns The inputs object which has had all the various types supported by this function converted to their
* primitive types. * primitive types.
*/ */
export function cleanInputValues(inputs: Record<string, any>, schema?: any) { export function cleanInputValues(inputs: Record<string, any>, schema?: any) {
@ -74,9 +74,9 @@ export function cleanInputValues(inputs: Record<string, any>, schema?: any) {
* the automation but is instead part of the Table/Table. This function will get the table schema and use it to instead * the automation but is instead part of the Table/Table. This function will get the table schema and use it to instead
* perform the cleanInputValues function on the input row. * perform the cleanInputValues function on the input row.
* *
* @param {string} tableId The ID of the Table/Table which the schema is to be retrieved for. * @param tableId The ID of the Table/Table which the schema is to be retrieved for.
* @param {object} row The input row structure which requires clean-up after having been through template statements. * @param row The input row structure which requires clean-up after having been through template statements.
* @returns {Promise<Object>} The cleaned up rows object, will should now have all the required primitive types. * @returns The cleaned up rows object, will should now have all the required primitive types.
*/ */
export async function cleanUpRow(tableId: string, row: Row) { export async function cleanUpRow(tableId: string, row: Row) {
let table = await sdk.tables.getTable(tableId) let table = await sdk.tables.getTable(tableId)

View File

@ -148,8 +148,8 @@ export function isRebootTrigger(auto: Automation) {
/** /**
* This function handles checking of any cron jobs that need to be enabled/updated. * This function handles checking of any cron jobs that need to be enabled/updated.
* @param {string} appId The ID of the app in which we are checking for webhooks * @param appId The ID of the app in which we are checking for webhooks
* @param {object|undefined} automation The automation object to be updated. * @param automation The automation object to be updated.
*/ */
export async function enableCronTrigger(appId: any, automation: Automation) { export async function enableCronTrigger(appId: any, automation: Automation) {
const trigger = automation ? automation.definition.trigger : null const trigger = automation ? automation.definition.trigger : null
@ -187,10 +187,10 @@ export async function enableCronTrigger(appId: any, automation: Automation) {
/** /**
* This function handles checking if any webhooks need to be created or deleted for automations. * This function handles checking if any webhooks need to be created or deleted for automations.
* @param {string} appId The ID of the app in which we are checking for webhooks * @param appId The ID of the app in which we are checking for webhooks
* @param {object|undefined} oldAuto The old automation object if updating/deleting * @param oldAuto The old automation object if updating/deleting
* @param {object|undefined} newAuto The new automation object if creating/updating * @param newAuto The new automation object if creating/updating
* @returns {Promise<object|undefined>} After this is complete the new automation object may have been updated and should be * @returns After this is complete the new automation object may have been updated and should be
* written to DB (this does not write to DB as it would be wasteful to repeat). * written to DB (this does not write to DB as it would be wasteful to repeat).
*/ */
export async function checkForWebhooks({ oldAuto, newAuto }: any) { export async function checkForWebhooks({ oldAuto, newAuto }: any) {
@ -257,8 +257,8 @@ export async function checkForWebhooks({ oldAuto, newAuto }: any) {
/** /**
* When removing an app/unpublishing it need to make sure automations are cleaned up (cron). * When removing an app/unpublishing it need to make sure automations are cleaned up (cron).
* @param appId {string} the app that is being removed. * @param appId the app that is being removed.
* @return {Promise<void>} clean is complete if this succeeds. * @return clean is complete if this succeeds.
*/ */
export async function cleanupAutomations(appId: any) { export async function cleanupAutomations(appId: any) {
await disableAllCrons(appId) await disableAllCrons(appId)
@ -267,7 +267,7 @@ export async function cleanupAutomations(appId: any) {
/** /**
* Checks if the supplied automation is of a recurring type. * Checks if the supplied automation is of a recurring type.
* @param automation The automation to check. * @param automation The automation to check.
* @return {boolean} if it is recurring (cron). * @return if it is recurring (cron).
*/ */
export function isRecurring(automation: Automation) { export function isRecurring(automation: Automation) {
return automation.definition.trigger.stepId === definitions.CRON.stepId return automation.definition.trigger.stepId === definitions.CRON.stepId

View File

@ -1,8 +1,7 @@
import { IncludeDocs, getLinkDocuments } from "./linkUtils" import { IncludeDocs, getLinkDocuments } from "./linkUtils"
import { InternalTables, getUserMetadataParams } from "../utils" import { InternalTables, getUserMetadataParams } from "../utils"
import Sentry from "@sentry/node"
import { FieldTypes } from "../../constants" import { FieldTypes } from "../../constants"
import { context } from "@budibase/backend-core" import { context, logging } from "@budibase/backend-core"
import LinkDocument from "./LinkDocument" import LinkDocument from "./LinkDocument"
import { import {
Database, Database,
@ -39,7 +38,7 @@ class LinkController {
/** /**
* Retrieves the table, if it was not already found in the eventData. * Retrieves the table, if it was not already found in the eventData.
* @returns {Promise<object>} This will return a table based on the event data, either * @returns This will return a table based on the event data, either
* if it was in the event already, or it uses the specified tableId to get it. * if it was in the event already, or it uses the specified tableId to get it.
*/ */
async table() { async table() {
@ -53,8 +52,8 @@ class LinkController {
/** /**
* Checks if the table this was constructed with has any linking columns currently. * Checks if the table this was constructed with has any linking columns currently.
* If the table has not been retrieved this will retrieve it based on the eventData. * If the table has not been retrieved this will retrieve it based on the eventData.
* @params {object|null} table If a table that is not known to the link controller is to be tested. * @params table If a table that is not known to the link controller is to be tested.
* @returns {Promise<boolean>} True if there are any linked fields, otherwise it will return * @returns True if there are any linked fields, otherwise it will return
* false. * false.
*/ */
async doesTableHaveLinkedFields(table?: Table) { async doesTableHaveLinkedFields(table?: Table) {
@ -160,7 +159,7 @@ class LinkController {
/** /**
* When a row is saved this will carry out the necessary operations to make sure * When a row is saved this will carry out the necessary operations to make sure
* the link has been created/updated. * the link has been created/updated.
* @returns {Promise<object>} returns the row that has been cleaned and prepared to be written to the DB - links * @returns returns the row that has been cleaned and prepared to be written to the DB - links
* have also been created. * have also been created.
*/ */
async rowSaved() { async rowSaved() {
@ -272,7 +271,7 @@ class LinkController {
/** /**
* When a row is deleted this will carry out the necessary operations to make sure * When a row is deleted this will carry out the necessary operations to make sure
* any links that existed have been removed. * any links that existed have been removed.
* @returns {Promise<object>} The operation has been completed and the link documents should now * @returns The operation has been completed and the link documents should now
* be accurate. This also returns the row that was deleted. * be accurate. This also returns the row that was deleted.
*/ */
async rowDeleted() { async rowDeleted() {
@ -294,8 +293,8 @@ class LinkController {
/** /**
* Remove a field from a table as well as any linked rows that pertained to it. * Remove a field from a table as well as any linked rows that pertained to it.
* @param {string} fieldName The field to be removed from the table. * @param fieldName The field to be removed from the table.
* @returns {Promise<void>} The table has now been updated. * @returns The table has now been updated.
*/ */
async removeFieldFromTable(fieldName: string) { async removeFieldFromTable(fieldName: string) {
let oldTable = this._oldTable let oldTable = this._oldTable
@ -334,7 +333,7 @@ class LinkController {
/** /**
* When a table is saved this will carry out the necessary operations to make sure * When a table is saved this will carry out the necessary operations to make sure
* any linked tables are notified and updated correctly. * any linked tables are notified and updated correctly.
* @returns {Promise<object>} The operation has been completed and the link documents should now * @returns The operation has been completed and the link documents should now
* be accurate. Also returns the table that was operated on. * be accurate. Also returns the table that was operated on.
*/ */
async tableSaved() { async tableSaved() {
@ -395,7 +394,7 @@ class LinkController {
/** /**
* Update a table, this means if a field is removed need to handle removing from other table and removing * Update a table, this means if a field is removed need to handle removing from other table and removing
* any link docs that pertained to it. * any link docs that pertained to it.
* @returns {Promise<Object>} The table which has been saved, same response as with the tableSaved function. * @returns The table which has been saved, same response as with the tableSaved function.
*/ */
async tableUpdated() { async tableUpdated() {
const oldTable = this._oldTable const oldTable = this._oldTable
@ -419,7 +418,7 @@ class LinkController {
* When a table is deleted this will carry out the necessary operations to make sure * When a table is deleted this will carry out the necessary operations to make sure
* any linked tables have the joining column correctly removed as well as removing any * any linked tables have the joining column correctly removed as well as removing any
* now stale linking documents. * now stale linking documents.
* @returns {Promise<object>} The operation has been completed and the link documents should now * @returns The operation has been completed and the link documents should now
* be accurate. Also returns the table that was operated on. * be accurate. Also returns the table that was operated on.
*/ */
async tableDeleted() { async tableDeleted() {
@ -433,9 +432,8 @@ class LinkController {
delete linkedTable.schema[field.fieldName] delete linkedTable.schema[field.fieldName]
await this._db.put(linkedTable) await this._db.put(linkedTable)
} }
} catch (err) { } catch (err: any) {
/* istanbul ignore next */ logging.logWarn(err?.message, err)
Sentry.captureException(err)
} }
} }
// need to get the full link docs to delete them // need to get the full link docs to delete them

View File

@ -6,12 +6,12 @@ import { LinkDocument } from "@budibase/types"
* Creates a new link document structure which can be put to the database. It is important to * Creates a new link document structure which can be put to the database. It is important to
* note that while this talks about linker/linked the link is bi-directional and for all intent * note that while this talks about linker/linked the link is bi-directional and for all intent
* and purposes it does not matter from which direction the link was initiated. * and purposes it does not matter from which direction the link was initiated.
* @param {string} tableId1 The ID of the first table (the linker). * @param tableId1 The ID of the first table (the linker).
* @param {string} tableId2 The ID of the second table (the linked). * @param tableId2 The ID of the second table (the linked).
* @param {string} fieldName1 The name of the field in the linker table. * @param fieldName1 The name of the field in the linker table.
* @param {string} fieldName2 The name of the field in the linked table. * @param fieldName2 The name of the field in the linked table.
* @param {string} rowId1 The ID of the row which is acting as the linker. * @param rowId1 The ID of the row which is acting as the linker.
* @param {string} rowId2 The ID of the row which is acting as the linked. * @param rowId2 The ID of the row which is acting as the linked.
* @constructor * @constructor
*/ */
class LinkDocumentImpl implements LinkDocument { class LinkDocumentImpl implements LinkDocument {

View File

@ -90,13 +90,13 @@ async function getFullLinkedDocs(links: LinkDocumentValue[]) {
/** /**
* Update link documents for a row or table - this is to be called by the API controller when a change is occurring. * Update link documents for a row or table - this is to be called by the API controller when a change is occurring.
* @param {string} args.eventType states what type of change which is occurring, means this can be expanded upon in the * @param args.eventType states what type of change which is occurring, means this can be expanded upon in the
* future quite easily (all updates go through one function). * future quite easily (all updates go through one function).
* @param {string} args.tableId The ID of the of the table which is being changed. * @param args.tableId The ID of the of the table which is being changed.
* @param {object|undefined} args.row The row which is changing, e.g. created, updated or deleted. * @param args.row The row which is changing, e.g. created, updated or deleted.
* @param {object|undefined} args.table If the table has already been retrieved this can be used to reduce database gets. * @param args.table If the table has already been retrieved this can be used to reduce database gets.
* @param {object|undefined} args.oldTable If the table is being updated then the old table can be provided for differencing. * @param args.oldTable If the table is being updated then the old table can be provided for differencing.
* @returns {Promise<object>} When the update is complete this will respond successfully. Returns the row for * @returns When the update is complete this will respond successfully. Returns the row for
* row operations and the table for table operations. * row operations and the table for table operations.
*/ */
export async function updateLinks(args: { export async function updateLinks(args: {
@ -144,9 +144,9 @@ export async function updateLinks(args: {
/** /**
* Given a table and a list of rows this will retrieve all of the attached docs and enrich them into the row. * Given a table and a list of rows this will retrieve all of the attached docs and enrich them into the row.
* This is required for formula fields, this may only be utilised internally (for now). * This is required for formula fields, this may only be utilised internally (for now).
* @param {object} table The table from which the rows originated. * @param table The table from which the rows originated.
* @param {array<object>} rows The rows which are to be enriched. * @param rows The rows which are to be enriched.
* @return {Promise<*>} returns the rows with all of the enriched relationships on it. * @return returns the rows with all of the enriched relationships on it.
*/ */
export async function attachFullLinkedDocs(table: Table, rows: Row[]) { export async function attachFullLinkedDocs(table: Table, rows: Row[]) {
const linkedTableIds = getLinkedTableIDs(table) const linkedTableIds = getLinkedTableIDs(table)
@ -183,9 +183,9 @@ export async function attachFullLinkedDocs(table: Table, rows: Row[]) {
/** /**
* This function will take the given enriched rows and squash the links to only contain the primary display field. * This function will take the given enriched rows and squash the links to only contain the primary display field.
* @param {object} table The table from which the rows originated. * @param table The table from which the rows originated.
* @param {array<object>} enriched The pre-enriched rows (full docs) which are to be squashed. * @param enriched The pre-enriched rows (full docs) which are to be squashed.
* @returns {Promise<Array>} The rows after having their links squashed to only contain the ID and primary display. * @returns The rows after having their links squashed to only contain the ID and primary display.
*/ */
export async function squashLinksToPrimaryDisplay( export async function squashLinksToPrimaryDisplay(
table: Table, table: Table,

View File

@ -17,15 +17,15 @@ export const IncludeDocs = {
/** /**
* Gets the linking documents, not the linked documents themselves. * Gets the linking documents, not the linked documents themselves.
* @param {string} args.tableId The table which we are searching for linked rows against. * @param args.tableId The table which we are searching for linked rows against.
* @param {string|null} args.fieldName The name of column/field which is being altered, only looking for * @param args.fieldName The name of column/field which is being altered, only looking for
* linking documents that are related to it. If this is not specified then the table level will be assumed. * linking documents that are related to it. If this is not specified then the table level will be assumed.
* @param {string|null} args.rowId The ID of the row which we want to find linking documents for - * @param args.rowId The ID of the row which we want to find linking documents for -
* if this is not specified then it will assume table or field level depending on whether the * if this is not specified then it will assume table or field level depending on whether the
* field name has been specified. * field name has been specified.
* @param {boolean|null} args.includeDocs whether to include docs in the response call, this is considerably slower so only * @param args.includeDocs whether to include docs in the response call, this is considerably slower so only
* use this if actually interested in the docs themselves. * use this if actually interested in the docs themselves.
* @returns {Promise<object[]>} This will return an array of the linking documents that were found * @returns This will return an array of the linking documents that were found
* (if any). * (if any).
*/ */
export async function getLinkDocuments(args: { export async function getLinkDocuments(args: {

View File

@ -60,7 +60,7 @@ export function getTableParams(tableId?: Optional, otherProps = {}) {
/** /**
* Generates a new table ID. * Generates a new table ID.
* @returns {string} The new table ID which the table doc can be stored under. * @returns The new table ID which the table doc can be stored under.
*/ */
export function generateTableID() { export function generateTableID() {
return `${DocumentType.TABLE}${SEPARATOR}${newid()}` return `${DocumentType.TABLE}${SEPARATOR}${newid()}`
@ -68,8 +68,8 @@ export function generateTableID() {
/** /**
* Given a row ID this will find the table ID within it (only works for internal tables). * Given a row ID this will find the table ID within it (only works for internal tables).
* @param {string} rowId The ID of the row. * @param rowId The ID of the row.
* @returns {string} The table ID. * @returns The table ID.
*/ */
export function getTableIDFromRowID(rowId: string) { export function getTableIDFromRowID(rowId: string) {
const components = rowId const components = rowId
@ -90,7 +90,7 @@ export function getAutomationParams(
/** /**
* Generates a new automation ID. * Generates a new automation ID.
* @returns {string} The new automation ID which the automation doc can be stored under. * @returns The new automation ID which the automation doc can be stored under.
*/ */
export function generateAutomationID() { export function generateAutomationID() {
return `${DocumentType.AUTOMATION}${SEPARATOR}${newid()}` return `${DocumentType.AUTOMATION}${SEPARATOR}${newid()}`
@ -99,13 +99,13 @@ export function generateAutomationID() {
/** /**
* Generates a new link doc ID. This is currently not usable with the alldocs call, * Generates a new link doc ID. This is currently not usable with the alldocs call,
* instead a view is built to make walking to tree easier. * instead a view is built to make walking to tree easier.
* @param {string} tableId1 The ID of the linker table. * @param tableId1 The ID of the linker table.
* @param {string} tableId2 The ID of the linked table. * @param tableId2 The ID of the linked table.
* @param {string} rowId1 The ID of the linker row. * @param rowId1 The ID of the linker row.
* @param {string} rowId2 The ID of the linked row. * @param rowId2 The ID of the linked row.
* @param {string} fieldName1 The name of the field in the linker row. * @param fieldName1 The name of the field in the linker row.
* @param {string} fieldName2 the name of the field in the linked row. * @param fieldName2 the name of the field in the linked row.
* @returns {string} The new link doc ID which the automation doc can be stored under. * @returns The new link doc ID which the automation doc can be stored under.
*/ */
export function generateLinkID( export function generateLinkID(
tableId1: string, tableId1: string,
@ -130,7 +130,7 @@ export function getLinkParams(otherProps: any = {}) {
/** /**
* Generates a new layout ID. * Generates a new layout ID.
* @returns {string} The new layout ID which the layout doc can be stored under. * @returns The new layout ID which the layout doc can be stored under.
*/ */
export function generateLayoutID(id?: string) { export function generateLayoutID(id?: string) {
return `${DocumentType.LAYOUT}${SEPARATOR}${id || newid()}` return `${DocumentType.LAYOUT}${SEPARATOR}${id || newid()}`
@ -145,7 +145,7 @@ export function getLayoutParams(layoutId?: Optional, otherProps: any = {}) {
/** /**
* Generates a new screen ID. * Generates a new screen ID.
* @returns {string} The new screen ID which the screen doc can be stored under. * @returns The new screen ID which the screen doc can be stored under.
*/ */
export function generateScreenID() { export function generateScreenID() {
return `${DocumentType.SCREEN}${SEPARATOR}${newid()}` return `${DocumentType.SCREEN}${SEPARATOR}${newid()}`
@ -160,7 +160,7 @@ export function getScreenParams(screenId?: Optional, otherProps: any = {}) {
/** /**
* Generates a new webhook ID. * Generates a new webhook ID.
* @returns {string} The new webhook ID which the webhook doc can be stored under. * @returns The new webhook ID which the webhook doc can be stored under.
*/ */
export function generateWebhookID() { export function generateWebhookID() {
return `${DocumentType.WEBHOOK}${SEPARATOR}${newid()}` return `${DocumentType.WEBHOOK}${SEPARATOR}${newid()}`
@ -175,7 +175,7 @@ export function getWebhookParams(webhookId?: Optional, otherProps: any = {}) {
/** /**
* Generates a new datasource ID. * Generates a new datasource ID.
* @returns {string} The new datasource ID which the webhook doc can be stored under. * @returns The new datasource ID which the webhook doc can be stored under.
*/ */
export function generateDatasourceID({ plus = false } = {}) { export function generateDatasourceID({ plus = false } = {}) {
return `${ return `${
@ -202,7 +202,7 @@ export function getDatasourcePlusParams(
/** /**
* Generates a new query ID. * Generates a new query ID.
* @returns {string} The new query ID which the query doc can be stored under. * @returns The new query ID which the query doc can be stored under.
*/ */
export function generateQueryID(datasourceId: string) { export function generateQueryID(datasourceId: string) {
return `${ return `${
@ -242,7 +242,7 @@ export function getQueryParams(datasourceId?: Optional, otherProps: any = {}) {
/** /**
* Generates a new flag document ID. * Generates a new flag document ID.
* @returns {string} The ID of the flag document that was generated. * @returns The ID of the flag document that was generated.
*/ */
export function generateUserFlagID(userId: string) { export function generateUserFlagID(userId: string) {
return `${DocumentType.USER_FLAG}${SEPARATOR}${userId}` return `${DocumentType.USER_FLAG}${SEPARATOR}${userId}`
@ -288,7 +288,7 @@ export function getMultiIDParams(ids: string[]) {
/** /**
* Generates a new view ID. * Generates a new view ID.
* @returns {string} The new view ID which the view doc can be stored under. * @returns The new view ID which the view doc can be stored under.
*/ */
export function generateViewID(tableId: string) { export function generateViewID(tableId: string) {
return `${ return `${

View File

@ -17,7 +17,7 @@ const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR
/** /**
* Creates the link view for the instance, this will overwrite the existing one, but this should only * Creates the link view for the instance, this will overwrite the existing one, but this should only
* be called if it is found that the view does not exist. * be called if it is found that the view does not exist.
* @returns {Promise<void>} The view now exists, please note that the next view of this query will actually build it, * @returns The view now exists, please note that the next view of this query will actually build it,
* so it may be slow. * so it may be slow.
*/ */
export async function createLinkView() { export async function createLinkView() {

View File

@ -536,7 +536,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
* @param json The JSON query DSL which is to be converted to SQL. * @param json The JSON query DSL which is to be converted to SQL.
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning * @param opts extra options which are to be passed into the query builder, e.g. disableReturning
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes. * which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver. * @return the query ready to be passed to the driver.
*/ */
_query(json: QueryJson, opts: QueryOptions = {}) { _query(json: QueryJson, opts: QueryOptions = {}) {
const sqlClient = this.getSqlClient() const sqlClient = this.getSqlClient()

View File

@ -189,7 +189,7 @@ class SqlTableQueryBuilder {
/** /**
* @param json the input JSON structure from which an SQL query will be built. * @param json the input JSON structure from which an SQL query will be built.
* @return {string} the operation that was found in the JSON. * @return the operation that was found in the JSON.
*/ */
_operation(json: QueryJson): Operation { _operation(json: QueryJson): Operation {
return json.endpoint.operation return json.endpoint.operation

View File

@ -375,7 +375,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
/** /**
* Fetches the tables from the sql server database and assigns them to the datasource. * Fetches the tables from the sql server database and assigns them to the datasource.
* @param {*} datasourceId - datasourceId to fetch * @param datasourceId - datasourceId to fetch
* @param entities - the tables that are to be built * @param entities - the tables that are to be built
*/ */
async buildSchema( async buildSchema(

View File

@ -258,7 +258,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
/** /**
* Fetches the tables from the oracle table and assigns them to the datasource. * Fetches the tables from the oracle table and assigns them to the datasource.
* @param {*} datasourceId - datasourceId to fetch * @param datasourceId - datasourceId to fetch
* @param entities - the tables that are to be built * @param entities - the tables that are to be built
*/ */
async buildSchema( async buildSchema(

View File

@ -268,7 +268,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
/** /**
* Fetches the tables from the postgres table and assigns them to the datasource. * Fetches the tables from the postgres table and assigns them to the datasource.
* @param {*} datasourceId - datasourceId to fetch * @param datasourceId - datasourceId to fetch
* @param entities - the tables that are to be built * @param entities - the tables that are to be built
*/ */
async buildSchema( async buildSchema(

View File

@ -8,9 +8,8 @@ import * as automations from "./automations"
import { Thread } from "./threads" import { Thread } from "./threads"
import * as redis from "./utilities/redis" import * as redis from "./utilities/redis"
import { events, logging, middleware, timers } from "@budibase/backend-core" import { events, logging, middleware, timers } from "@budibase/backend-core"
const Sentry = require("@sentry/node") import destroyable from "server-destroy"
const destroyable = require("server-destroy") import { userAgent } from "koa-useragent"
const { userAgent } = require("koa-useragent")
export default function createKoaApp() { export default function createKoaApp() {
const app = new Koa() const app = new Koa()
@ -36,17 +35,6 @@ export default function createKoaApp() {
app.use(middleware.pino) app.use(middleware.pino)
app.use(userAgent) app.use(userAgent)
if (env.isProd()) {
app.on("error", (err: any, ctx: ExtendableContext) => {
Sentry.withScope(function (scope: any) {
scope.addEventProcessor(function (event: any) {
return Sentry.Handlers.parseRequest(event, ctx.request)
})
Sentry.captureException(err)
})
})
}
const server = http.createServer(app.callback()) const server = http.createServer(app.callback())
destroyable(server) destroyable(server)

View File

@ -3,6 +3,7 @@ import * as syncApps from "./usageQuotas/syncApps"
import * as syncRows from "./usageQuotas/syncRows" import * as syncRows from "./usageQuotas/syncRows"
import * as syncPlugins from "./usageQuotas/syncPlugins" import * as syncPlugins from "./usageQuotas/syncPlugins"
import * as syncUsers from "./usageQuotas/syncUsers" import * as syncUsers from "./usageQuotas/syncUsers"
import * as syncCreators from "./usageQuotas/syncCreators"
/** /**
* Synchronise quotas to the state of the db. * Synchronise quotas to the state of the db.
@ -13,5 +14,6 @@ export const run = async () => {
await syncRows.run() await syncRows.run()
await syncPlugins.run() await syncPlugins.run()
await syncUsers.run() await syncUsers.run()
await syncCreators.run()
}) })
} }

View File

@ -0,0 +1,13 @@
import { users } from "@budibase/backend-core"
import { quotas } from "@budibase/pro"
import { QuotaUsageType, StaticQuotaName } from "@budibase/types"
export const run = async () => {
const creatorCount = await users.getCreatorCount()
console.log(`Syncing creator count: ${creatorCount}`)
await quotas.setUsage(
creatorCount,
StaticQuotaName.CREATORS,
QuotaUsageType.STATIC
)
}

View File

@ -0,0 +1,26 @@
import TestConfig from "../../../../tests/utilities/TestConfiguration"
import * as syncCreators from "../syncCreators"
import { quotas } from "@budibase/pro"
describe("syncCreators", () => {
let config = new TestConfig(false)
beforeEach(async () => {
await config.init()
})
afterAll(config.end)
it("syncs creators", async () => {
return config.doInContext(null, async () => {
await config.createUser({ admin: true })
await syncCreators.run()
const usageDoc = await quotas.getQuotaUsage()
// default + additional creator
const creatorsCount = 2
expect(usageDoc.usageQuota.creators).toBe(creatorsCount)
})
})
})

View File

@ -48,10 +48,10 @@ function tarFilesToTmp(tmpDir: string, files: string[]) {
/** /**
* Exports a DB to either file or a variable (memory). * Exports a DB to either file or a variable (memory).
* @param {string} dbName the DB which is to be exported. * @param dbName the DB which is to be exported.
* @param {object} opts various options for the export, e.g. whether to stream, * @param opts various options for the export, e.g. whether to stream,
* a filter function or the name of the export. * a filter function or the name of the export.
* @return {*} either a readable stream or a string * @return either a readable stream or a string
*/ */
export async function exportDB( export async function exportDB(
dbName: string, dbName: string,
@ -98,9 +98,9 @@ function defineFilter(excludeRows?: boolean, excludeLogs?: boolean) {
/** /**
* Local utility to back up the database state for an app, excluding global user * Local utility to back up the database state for an app, excluding global user
* data or user relationships. * data or user relationships.
* @param {string} appId The app to back up * @param appId The app to back up
* @param {object} config Config to send to export DB/attachment export * @param config Config to send to export DB/attachment export
* @returns {*} either a string or a stream of the backup * @returns either a string or a stream of the backup
*/ */
export async function exportApp(appId: string, config?: ExportOpts) { export async function exportApp(appId: string, config?: ExportOpts) {
const prodAppId = dbCore.getProdAppID(appId) const prodAppId = dbCore.getProdAppID(appId)
@ -175,10 +175,10 @@ export async function exportApp(appId: string, config?: ExportOpts) {
/** /**
* Streams a backup of the database state for an app * Streams a backup of the database state for an app
* @param {string} appId The ID of the app which is to be backed up. * @param appId The ID of the app which is to be backed up.
* @param {boolean} excludeRows Flag to state whether the export should include data. * @param excludeRows Flag to state whether the export should include data.
* @param {string} encryptPassword password for encrypting the export. * @param encryptPassword password for encrypting the export.
* @returns {*} a readable stream of the backup which is written in real time * @returns a readable stream of the backup which is written in real time
*/ */
export async function streamExportApp({ export async function streamExportApp({
appId, appId,

View File

@ -96,8 +96,8 @@ async function updateAutomations(prodAppId: string, db: Database) {
/** /**
* This function manages temporary template files which are stored by Koa. * This function manages temporary template files which are stored by Koa.
* @param {Object} template The template object retrieved from the Koa context object. * @param template The template object retrieved from the Koa context object.
* @returns {Object} Returns a fs read stream which can be loaded into the database. * @returns Returns a fs read stream which can be loaded into the database.
*/ */
async function getTemplateStream(template: TemplateType) { async function getTemplateStream(template: TemplateType) {
if (template.file && template.file.type !== "text/plain") { if (template.file && template.file.type !== "text/plain") {

View File

@ -7,11 +7,11 @@ export async function getRow(
rowId: string, rowId: string,
opts?: { relationships?: boolean } opts?: { relationships?: boolean }
) { ) {
const response = (await handleRequest(Operation.READ, tableId, { const response = await handleRequest(Operation.READ, tableId, {
id: breakRowIdField(rowId), id: breakRowIdField(rowId),
includeSqlRelationships: opts?.relationships includeSqlRelationships: opts?.relationships
? IncludeRelationship.INCLUDE ? IncludeRelationship.INCLUDE
: IncludeRelationship.EXCLUDE, : IncludeRelationship.EXCLUDE,
})) as Row[] })
return response ? response[0] : response return response ? response[0] : response
} }

View File

@ -1,4 +1,4 @@
import { SearchFilters, SearchParams } from "@budibase/types" import { SearchFilters, SearchParams, Row } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTable } from "../../../integrations/utils"
import * as internal from "./search/internal" import * as internal from "./search/internal"
import * as external from "./search/external" import * as external from "./search/external"
@ -45,7 +45,7 @@ export async function exportRows(
return pickApi(options.tableId).exportRows(options) return pickApi(options.tableId).exportRows(options)
} }
export async function fetch(tableId: string) { export async function fetch(tableId: string): Promise<Row[]> {
return pickApi(tableId).fetch(tableId) return pickApi(tableId).fetch(tableId)
} }
@ -53,6 +53,6 @@ export async function fetchView(
tableId: string, tableId: string,
viewName: string, viewName: string,
params: ViewParams params: ViewParams
) { ): Promise<Row[]> {
return pickApi(tableId).fetchView(viewName, params) return pickApi(tableId).fetchView(viewName, params)
} }

View File

@ -55,15 +55,15 @@ export async function search(options: SearchParams) {
try { try {
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options) options = searchInputMapping(table, options)
let rows = (await handleRequest(Operation.READ, tableId, { let rows = await handleRequest(Operation.READ, tableId, {
filters: query, filters: query,
sort, sort,
paginate: paginateObj as PaginationJson, paginate: paginateObj as PaginationJson,
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
})) as Row[] })
let hasNextPage = false let hasNextPage = false
if (paginate && rows.length === limit) { if (paginate && rows.length === limit) {
const nextRows = (await handleRequest(Operation.READ, tableId, { const nextRows = await handleRequest(Operation.READ, tableId, {
filters: query, filters: query,
sort, sort,
paginate: { paginate: {
@ -71,7 +71,7 @@ export async function search(options: SearchParams) {
page: bookmark! * limit + 1, page: bookmark! * limit + 1,
}, },
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
})) as Row[] })
hasNextPage = nextRows.length > 0 hasNextPage = nextRows.length > 0
} }
@ -172,12 +172,18 @@ export async function exportRows(
} }
} }
export async function fetch(tableId: string) { export async function fetch(tableId: string): Promise<Row[]> {
const response = await handleRequest(Operation.READ, tableId, { const response = await handleRequest<Operation.READ>(
includeSqlRelationships: IncludeRelationship.INCLUDE, Operation.READ,
}) tableId,
{
includeSqlRelationships: IncludeRelationship.INCLUDE,
}
)
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
return await outputProcessing(table, response, { preserveLinks: true }) return await outputProcessing<Row[]>(table, response, {
preserveLinks: true,
})
} }
export async function fetchView(viewName: string) { export async function fetchView(viewName: string) {

View File

@ -6,26 +6,26 @@ import {
import env from "../../../../environment" import env from "../../../../environment"
import { fullSearch, paginatedSearch } from "./internalSearch" import { fullSearch, paginatedSearch } from "./internalSearch"
import { import {
InternalTables,
getRowParams,
DocumentType, DocumentType,
getRowParams,
InternalTables,
} from "../../../../db/utils" } from "../../../../db/utils"
import { getGlobalUsersFromMetadata } from "../../../../utilities/global" import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import { Database, Row, Table, SearchParams } from "@budibase/types" import { Database, Row, SearchParams, Table } from "@budibase/types"
import { cleanExportRows } from "../utils" import { cleanExportRows } from "../utils"
import { import {
Format,
csv, csv,
Format,
json, json,
jsonWithSchema, jsonWithSchema,
} from "../../../../api/controllers/view/exporters" } from "../../../../api/controllers/view/exporters"
import * as inMemoryViews from "../../../../db/inMemoryView" import * as inMemoryViews from "../../../../db/inMemoryView"
import { import {
migrateToInMemoryView,
migrateToDesignView,
getFromDesignDoc, getFromDesignDoc,
getFromMemoryDoc, getFromMemoryDoc,
migrateToDesignView,
migrateToInMemoryView,
} from "../../../../api/controllers/view/utils" } from "../../../../api/controllers/view/utils"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
import { ExportRowsParams, ExportRowsResult } from "../search" import { ExportRowsParams, ExportRowsResult } from "../search"
@ -139,13 +139,12 @@ export async function exportRows(
} }
} }
export async function fetch(tableId: string) { export async function fetch(tableId: string): Promise<Row[]> {
const db = context.getAppDB() const db = context.getAppDB()
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
const rows = await getRawTableData(db, tableId) const rows = await getRawTableData(db, tableId)
const result = await outputProcessing(table, rows) return await outputProcessing(table, rows)
return result
} }
async function getRawTableData(db: Database, tableId: string) { async function getRawTableData(db: Database, tableId: string) {

View File

@ -6,7 +6,7 @@ import path from "path"
/** /**
* Exactly the same as path.join * Exactly the same as path.join
* @param args Any number of string arguments to add to a path * @param args Any number of string arguments to add to a path
* @returns {string} The final path ready to use * @returns The final path ready to use
*/ */
export function join(...args: string[]) { export function join(...args: string[]) {
return path.join(...args) return path.join(...args)
@ -15,7 +15,7 @@ export function join(...args: string[]) {
/** /**
* Exactly the same as path.resolve * Exactly the same as path.resolve
* @param args Any number of string arguments to add to a path * @param args Any number of string arguments to add to a path
* @returns {string} The final path ready to use * @returns The final path ready to use
*/ */
export function resolve(...args: string[]) { export function resolve(...args: string[]) {
return path.resolve(...args) return path.resolve(...args)

View File

@ -11,8 +11,8 @@ export const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
/** /**
* Uploads the latest client library to the object store. * Uploads the latest client library to the object store.
* @param {string} appId The ID of the app which is being created. * @param appId The ID of the app which is being created.
* @return {Promise<void>} once promise completes app resources should be ready in object store. * @return once promise completes app resources should be ready in object store.
*/ */
export const createApp = async (appId: string) => { export const createApp = async (appId: string) => {
await updateClientLibrary(appId) await updateClientLibrary(appId)
@ -20,8 +20,8 @@ export const createApp = async (appId: string) => {
/** /**
* Removes all of the assets created for an app in the object store. * Removes all of the assets created for an app in the object store.
* @param {string} appId The ID of the app which is being deleted. * @param appId The ID of the app which is being deleted.
* @return {Promise<void>} once promise completes the app resources will be removed from object store. * @return once promise completes the app resources will be removed from object store.
*/ */
export const deleteApp = async (appId: string) => { export const deleteApp = async (appId: string) => {
await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`) await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)

View File

@ -53,8 +53,8 @@ export const checkDevelopmentEnvironment = () => {
* Used to retrieve a handlebars file from the system which will be used as a template. * Used to retrieve a handlebars file from the system which will be used as a template.
* This is allowable as the template handlebars files should be static and identical across * This is allowable as the template handlebars files should be static and identical across
* the cluster. * the cluster.
* @param {string} path The path to the handlebars file which is to be loaded. * @param path The path to the handlebars file which is to be loaded.
* @returns {string} The loaded handlebars file as a string - loaded as utf8. * @returns The loaded handlebars file as a string - loaded as utf8.
*/ */
export const loadHandlebarsFile = (path: PathLike) => { export const loadHandlebarsFile = (path: PathLike) => {
return fs.readFileSync(path, "utf8") return fs.readFileSync(path, "utf8")
@ -63,8 +63,8 @@ export const loadHandlebarsFile = (path: PathLike) => {
/** /**
* When return a file from the API need to write the file to the system temporarily so we * When return a file from the API need to write the file to the system temporarily so we
* can create a read stream to send. * can create a read stream to send.
* @param {string} contents the contents of the file which is to be returned from the API. * @param contents the contents of the file which is to be returned from the API.
* @return {Object} the read stream which can be put into the koa context body. * @return the read stream which can be put into the koa context body.
*/ */
export const apiFileReturn = (contents: any) => { export const apiFileReturn = (contents: any) => {
const path = join(budibaseTempDir(), uuid()) const path = join(budibaseTempDir(), uuid())
@ -78,8 +78,8 @@ export const streamFile = (path: string) => {
/** /**
* Writes the provided contents to a temporary file, which can be used briefly. * Writes the provided contents to a temporary file, which can be used briefly.
* @param {string} fileContents contents which will be written to a temp file. * @param fileContents contents which will be written to a temp file.
* @return {string} the path to the temp file. * @return the path to the temp file.
*/ */
export const storeTempFile = ( export const storeTempFile = (
fileContents: string | NodeJS.ArrayBufferView fileContents: string | NodeJS.ArrayBufferView
@ -100,7 +100,7 @@ export const stringToFileStream = (contents: any) => {
/** /**
* Creates a temp file and returns it from the API. * Creates a temp file and returns it from the API.
* @param {string} fileContents the contents to be returned in file. * @param fileContents the contents to be returned in file.
*/ */
export const sendTempFile = (fileContents: any) => { export const sendTempFile = (fileContents: any) => {
const path = storeTempFile(fileContents) const path = storeTempFile(fileContents)

View File

@ -5,8 +5,8 @@ import { objectStore } from "@budibase/backend-core"
/** /**
* This function manages temporary template files which are stored by Koa. * This function manages temporary template files which are stored by Koa.
* @param {Object} template The template object retrieved from the Koa context object. * @param template The template object retrieved from the Koa context object.
* @returns {Object} Returns an fs read stream which can be loaded into the database. * @returns Returns an fs read stream which can be loaded into the database.
*/ */
export const getTemplateStream = async (template: any) => { export const getTemplateStream = async (template: any) => {
if (template.file) { if (template.file) {
@ -20,7 +20,7 @@ export const getTemplateStream = async (template: any) => {
/** /**
* Retrieves a template and pipes it to minio as well as making it available temporarily. * Retrieves a template and pipes it to minio as well as making it available temporarily.
* @param {string} type The type of template which is to be retrieved. * @param type The type of template which is to be retrieved.
* @param name * @param name
* @return {Promise<*>} * @return {Promise<*>}
*/ */

View File

@ -24,8 +24,8 @@ export function removeFromArray(array: any[], element: any) {
/** /**
* Makes sure that a URL has the correct number of slashes, while maintaining the * Makes sure that a URL has the correct number of slashes, while maintaining the
* http(s):// double slashes. * http(s):// double slashes.
* @param {string} url The URL to test and remove any extra double slashes. * @param url The URL to test and remove any extra double slashes.
* @return {string} The updated url. * @return The updated url.
*/ */
export function checkSlashesInUrl(url: string) { export function checkSlashesInUrl(url: string) {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2") return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")

View File

@ -51,11 +51,11 @@ function getRemovedAttachmentKeys(
/** /**
* This will update any auto columns that are found on the row/table with the correct information based on * This will update any auto columns that are found on the row/table with the correct information based on
* time now and the current logged in user making the request. * time now and the current logged in user making the request.
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields. * @param user The user to be used for an appId as well as the createdBy and createdAt fields.
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing. * @param table The table which is to be used for the schema, as well as handling auto IDs incrementing.
* @param {Object} row The row which is to be updated with information for the auto columns. * @param row The row which is to be updated with information for the auto columns.
* @param {Object} opts specific options for function to carry out optional features. * @param opts specific options for function to carry out optional features.
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated * @returns The updated row and table, the table may need to be updated
* for automatic ID purposes. * for automatic ID purposes.
*/ */
export function processAutoColumn( export function processAutoColumn(
@ -111,9 +111,9 @@ export function processAutoColumn(
/** /**
* This will coerce a value to the correct types based on the type transform map * This will coerce a value to the correct types based on the type transform map
* @param {object} row The value to coerce * @param row The value to coerce
* @param {object} type The type fo coerce to * @param type The type fo coerce to
* @returns {object} The coerced value * @returns The coerced value
*/ */
export function coerce(row: any, type: string) { export function coerce(row: any, type: string) {
// no coercion specified for type, skip it // no coercion specified for type, skip it
@ -135,11 +135,11 @@ export function coerce(row: any, type: string) {
/** /**
* Given an input route this function will apply all the necessary pre-processing to it, such as coercion * Given an input route this function will apply all the necessary pre-processing to it, such as coercion
* of column values or adding auto-column values. * of column values or adding auto-column values.
* @param {object} user the user which is performing the input. * @param user the user which is performing the input.
* @param {object} row the row which is being created/updated. * @param row the row which is being created/updated.
* @param {object} table the table which the row is being saved to. * @param table the table which the row is being saved to.
* @param {object} opts some input processing options (like disabling auto-column relationships). * @param opts some input processing options (like disabling auto-column relationships).
* @returns {object} the row which has been prepared to be written to the DB. * @returns the row which has been prepared to be written to the DB.
*/ */
export async function inputProcessing( export async function inputProcessing(
userId: string | null | undefined, userId: string | null | undefined,
@ -198,11 +198,11 @@ export async function inputProcessing(
/** /**
* This function enriches the input rows with anything they are supposed to contain, for example * This function enriches the input rows with anything they are supposed to contain, for example
* link records or attachment links. * link records or attachment links.
* @param {object} table the table from which these rows came from originally, this is used to determine * @param table the table from which these rows came from originally, this is used to determine
* the schema of the rows and then enrich. * the schema of the rows and then enrich.
* @param {object[]|object} rows the rows which are to be enriched. * @param rows the rows which are to be enriched.
* @param {object} opts used to set some options for the output, such as disabling relationship squashing. * @param opts used to set some options for the output, such as disabling relationship squashing.
* @returns {object[]|object} the enriched rows will be returned. * @returns the enriched rows will be returned.
*/ */
export async function outputProcessing<T extends Row[] | Row>( export async function outputProcessing<T extends Row[] | Row>(
table: Table, table: Table,
@ -278,13 +278,13 @@ export async function outputProcessing<T extends Row[] | Row>(
/** /**
* Clean up any attachments that were attached to a row. * Clean up any attachments that were attached to a row.
* @param {object} table The table from which a row is being removed. * @param table The table from which a row is being removed.
* @param {any} row optional - the row being removed. * @param row optional - the row being removed.
* @param {any} rows optional - if multiple rows being deleted can do this in bulk. * @param rows optional - if multiple rows being deleted can do this in bulk.
* @param {any} oldRow optional - if updating a row this will determine the difference. * @param oldRow optional - if updating a row this will determine the difference.
* @param {any} oldTable optional - if updating a table, can supply the old table to look for * @param oldTable optional - if updating a table, can supply the old table to look for
* deleted attachment columns. * deleted attachment columns.
* @return {Promise<void>} When all attachments have been removed this will return. * @return When all attachments have been removed this will return.
*/ */
export async function cleanupAttachments( export async function cleanupAttachments(
table: Table, table: Table,

View File

@ -8,7 +8,7 @@ import { User } from "@budibase/types"
* will return "foo" over "bar". * will return "foo" over "bar".
* @param obj the object * @param obj the object
* @param key the key * @param key the key
* @return {*|null} the value or null if a value was not found for this key * @return the value or null if a value was not found for this key
*/ */
export const deepGet = (obj: { [x: string]: any }, key: string) => { export const deepGet = (obj: { [x: string]: any }, key: string) => {
if (!obj || !key) { if (!obj || !key) {

View File

@ -6,6 +6,7 @@ import {
InternalTable, InternalTable,
} from "@budibase/types" } from "@budibase/types"
import { getProdAppID } from "./applications" import { getProdAppID } from "./applications"
import * as _ from "lodash/fp"
// checks if a user is specifically a builder, given an app ID // checks if a user is specifically a builder, given an app ID
export function isBuilder(user: User | ContextUser, appId?: string): boolean { export function isBuilder(user: User | ContextUser, appId?: string): boolean {
@ -58,6 +59,18 @@ export function hasAppBuilderPermissions(user?: User | ContextUser): boolean {
return !isGlobalBuilder && appLength != null && appLength > 0 return !isGlobalBuilder && appLength != null && appLength > 0
} }
export function hasAppCreatorPermissions(user?: User | ContextUser): boolean {
if (!user) {
return false
}
return _.flow(
_.get("roles"),
_.values,
_.find(x => ["CREATOR", "ADMIN"].includes(x)),
x => !!x
)(user)
}
// checks if a user is capable of building any app // checks if a user is capable of building any app
export function hasBuilderPermissions(user?: User | ContextUser): boolean { export function hasBuilderPermissions(user?: User | ContextUser): boolean {
if (!user) { if (!user) {
@ -74,6 +87,18 @@ export function hasAdminPermissions(user?: User | ContextUser): boolean {
return !!user.admin?.global return !!user.admin?.global
} }
export function isCreator(user?: User | ContextUser): boolean {
if (!user) {
return false
}
return (
isGlobalBuilder(user) ||
hasAdminPermissions(user) ||
hasAppBuilderPermissions(user) ||
hasAppCreatorPermissions(user)
)
}
export function getGlobalUserID(userId?: string): string | undefined { export function getGlobalUserID(userId?: string): string | undefined {
if (typeof userId !== "string") { if (typeof userId !== "string") {
return userId return userId

View File

@ -32,6 +32,7 @@ export interface StaticUsage {
[StaticQuotaName.APPS]: number [StaticQuotaName.APPS]: number
[StaticQuotaName.PLUGINS]: number [StaticQuotaName.PLUGINS]: number
[StaticQuotaName.USERS]: number [StaticQuotaName.USERS]: number
[StaticQuotaName.CREATORS]: number
[StaticQuotaName.USER_GROUPS]: number [StaticQuotaName.USER_GROUPS]: number
[StaticQuotaName.ROWS]: number [StaticQuotaName.ROWS]: number
triggers: { triggers: {

View File

@ -1,5 +1,8 @@
export enum FeatureFlag { export enum FeatureFlag {
LICENSING = "LICENSING", LICENSING = "LICENSING",
// Feature IDs in Posthog
PER_CREATOR_PER_USER_PRICE = "18873",
PER_CREATOR_PER_USER_PRICE_ALERT = "18530",
} }
export interface TenantFeatureFlags { export interface TenantFeatureFlags {

View File

@ -5,10 +5,17 @@ export interface Customer {
currency: string | null | undefined currency: string | null | undefined
} }
export interface SubscriptionItems {
user: number | undefined
creator: number | undefined
}
export interface Subscription { export interface Subscription {
amount: number amount: number
amounts: SubscriptionItems | undefined
currency: string currency: string
quantity: number quantity: number
quantities: SubscriptionItems | undefined
duration: PriceDuration duration: PriceDuration
cancelAt: number | null | undefined cancelAt: number | null | undefined
currentPeriodStart: number currentPeriodStart: number

View File

@ -4,7 +4,9 @@ export enum PlanType {
PRO = "pro", PRO = "pro",
/** @deprecated */ /** @deprecated */
TEAM = "team", TEAM = "team",
/** @deprecated */
PREMIUM = "premium", PREMIUM = "premium",
PREMIUM_PLUS = "premium_plus",
BUSINESS = "business", BUSINESS = "business",
ENTERPRISE = "enterprise", ENTERPRISE = "enterprise",
} }
@ -26,10 +28,12 @@ export interface AvailablePrice {
currency: string currency: string
duration: PriceDuration duration: PriceDuration
priceId: string priceId: string
type?: string
} }
export enum PlanModel { export enum PlanModel {
PER_USER = "perUser", PER_USER = "perUser",
PER_CREATOR_PER_USER = "per_creator_per_user",
DAY_PASS = "dayPass", DAY_PASS = "dayPass",
} }

View File

@ -14,6 +14,7 @@ export enum StaticQuotaName {
ROWS = "rows", ROWS = "rows",
APPS = "apps", APPS = "apps",
USERS = "users", USERS = "users",
CREATORS = "creators",
USER_GROUPS = "userGroups", USER_GROUPS = "userGroups",
PLUGINS = "plugins", PLUGINS = "plugins",
} }
@ -67,6 +68,7 @@ export type StaticQuotas = {
[StaticQuotaName.ROWS]: Quota [StaticQuotaName.ROWS]: Quota
[StaticQuotaName.APPS]: Quota [StaticQuotaName.APPS]: Quota
[StaticQuotaName.USERS]: Quota [StaticQuotaName.USERS]: Quota
[StaticQuotaName.CREATORS]: Quota
[StaticQuotaName.USER_GROUPS]: Quota [StaticQuotaName.USER_GROUPS]: Quota
[StaticQuotaName.PLUGINS]: Quota [StaticQuotaName.PLUGINS]: Quota
} }

View File

@ -43,7 +43,6 @@
"@budibase/string-templates": "0.0.0", "@budibase/string-templates": "0.0.0",
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",
"@koa/router": "8.0.8", "@koa/router": "8.0.8",
"@sentry/node": "6.17.7",
"@techpass/passport-openidconnect": "0.3.2", "@techpass/passport-openidconnect": "0.3.2",
"@types/global-agent": "2.1.1", "@types/global-agent": "2.1.1",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",

View File

@ -4,8 +4,6 @@ if (process.env.DD_APM_ENABLED) {
// need to load environment first // need to load environment first
import env from "./environment" import env from "./environment"
import { Scope } from "@sentry/node"
import { Event } from "@sentry/types/dist/event"
import Application from "koa" import Application from "koa"
import { bootstrap } from "global-agent" import { bootstrap } from "global-agent"
import * as db from "./db" import * as db from "./db"
@ -25,9 +23,9 @@ import koaBody from "koa-body"
import http from "http" import http from "http"
import api from "./api" import api from "./api"
import * as redis from "./utilities/redis" import * as redis from "./utilities/redis"
const Sentry = require("@sentry/node")
const koaSession = require("koa-session") const koaSession = require("koa-session")
const { userAgent } = require("koa-useragent") import { userAgent } from "koa-useragent"
import destroyable from "server-destroy" import destroyable from "server-destroy"
import { initPro } from "./initPro" import { initPro } from "./initPro"
@ -66,20 +64,6 @@ app.use(auth.passport.session())
// api routes // api routes
app.use(api.routes()) app.use(api.routes())
// sentry
if (env.isProd()) {
Sentry.init()
app.on("error", (err, ctx) => {
Sentry.withScope(function (scope: Scope) {
scope.addEventProcessor(function (event: Event) {
return Sentry.Handlers.parseRequest(event, ctx.request)
})
Sentry.captureException(err)
})
})
}
const server = http.createServer(app.callback()) const server = http.createServer(app.callback())
destroyable(server) destroyable(server)

View File

@ -71,12 +71,12 @@ async function getLinkCode(
/** /**
* Builds an email using handlebars and the templates found in the system (default or otherwise). * Builds an email using handlebars and the templates found in the system (default or otherwise).
* @param {string} purpose the purpose of the email being built, e.g. invitation, password reset. * @param purpose the purpose of the email being built, e.g. invitation, password reset.
* @param {string} email the address which it is being sent to for contextual purposes. * @param email the address which it is being sent to for contextual purposes.
* @param {object} context the context which is being used for building the email (hbs context). * @param context the context which is being used for building the email (hbs context).
* @param {object|null} user if being sent to an existing user then the object can be provided for context. * @param user if being sent to an existing user then the object can be provided for context.
* @param {string|null} contents if using a custom template can supply contents for context. * @param contents if using a custom template can supply contents for context.
* @return {Promise<string>} returns the built email HTML if all provided parameters were valid. * @return returns the built email HTML if all provided parameters were valid.
*/ */
async function buildEmail( async function buildEmail(
purpose: EmailTemplatePurpose, purpose: EmailTemplatePurpose,
@ -128,7 +128,7 @@ async function buildEmail(
/** /**
* Checks if a SMTP config exists based on passed in parameters. * Checks if a SMTP config exists based on passed in parameters.
* @return {Promise<boolean>} returns true if there is a configuration that can be used. * @return returns true if there is a configuration that can be used.
*/ */
export async function isEmailConfigured() { export async function isEmailConfigured() {
// when "testing" or smtp fallback is enabled simply return true // when "testing" or smtp fallback is enabled simply return true
@ -142,10 +142,10 @@ export async function isEmailConfigured() {
/** /**
* Given an email address and an email purpose this will retrieve the SMTP configuration and * Given an email address and an email purpose this will retrieve the SMTP configuration and
* send an email using it. * send an email using it.
* @param {string} email The email address to send to. * @param email The email address to send to.
* @param {string} purpose The purpose of the email being sent (e.g. reset password). * @param purpose The purpose of the email being sent (e.g. reset password).
* @param {object} opts The options for sending the email. * @param opts The options for sending the email.
* @return {Promise<object>} returns details about the attempt to send email, e.g. if it is successful; based on * @return returns details about the attempt to send email, e.g. if it is successful; based on
* nodemailer response. * nodemailer response.
*/ */
export async function sendEmail( export async function sendEmail(
@ -212,8 +212,8 @@ export async function sendEmail(
/** /**
* Given an SMTP configuration this runs it through nodemailer to see if it is in fact functional. * Given an SMTP configuration this runs it through nodemailer to see if it is in fact functional.
* @param {object} config an SMTP configuration - this is based on the nodemailer API. * @param config an SMTP configuration - this is based on the nodemailer API.
* @return {Promise<boolean>} returns true if the configuration is valid. * @return returns true if the configuration is valid.
*/ */
export async function verifyConfig(config: SMTPInnerConfig) { export async function verifyConfig(config: SMTPInnerConfig) {
const transport = createSMTPTransport(config) const transport = createSMTPTransport(config)

View File

@ -1,8 +1,8 @@
/** /**
* Makes sure that a URL has the correct number of slashes, while maintaining the * Makes sure that a URL has the correct number of slashes, while maintaining the
* http(s):// double slashes. * http(s):// double slashes.
* @param {string} url The URL to test and remove any extra double slashes. * @param url The URL to test and remove any extra double slashes.
* @return {string} The updated url. * @return The updated url.
*/ */
export function checkSlashesInUrl(url: string) { export function checkSlashesInUrl(url: string) {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2") return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")

View File

@ -37,8 +37,8 @@ async function updateACode(db: string, code: string, value: any) {
/** /**
* Given an invite code and invite body, allow the update an existing/valid invite in redis * Given an invite code and invite body, allow the update an existing/valid invite in redis
* @param {string} inviteCode The invite code for an invite in redis * @param inviteCode The invite code for an invite in redis
* @param {object} value The body of the updated user invitation * @param value The body of the updated user invitation
*/ */
export async function updateInviteCode(inviteCode: string, value: string) { export async function updateInviteCode(inviteCode: string, value: string) {
await updateACode(redis.utils.Databases.INVITATIONS, inviteCode, value) await updateACode(redis.utils.Databases.INVITATIONS, inviteCode, value)
@ -77,9 +77,9 @@ export async function shutdown() {
/** /**
* Given a user ID this will store a code (that is returned) for an hour in redis. * Given a user ID this will store a code (that is returned) for an hour in redis.
* The user can then return this code for resetting their password (through their reset link). * The user can then return this code for resetting their password (through their reset link).
* @param {string} userId the ID of the user which is to be reset. * @param userId the ID of the user which is to be reset.
* @param {object} info Info about the user/the reset process. * @param info Info about the user/the reset process.
* @return {Promise<string>} returns the code that was stored to redis. * @return returns the code that was stored to redis.
*/ */
export async function getResetPasswordCode(userId: string, info: any) { export async function getResetPasswordCode(userId: string, info: any) {
return writeACode(redis.utils.Databases.PW_RESETS, { userId, info }) return writeACode(redis.utils.Databases.PW_RESETS, { userId, info })
@ -87,9 +87,9 @@ export async function getResetPasswordCode(userId: string, info: any) {
/** /**
* Given a reset code this will lookup to redis, check if the code is valid and delete if required. * Given a reset code this will lookup to redis, check if the code is valid and delete if required.
* @param {string} resetCode The code provided via the email link. * @param resetCode The code provided via the email link.
* @param {boolean} deleteCode If the code is used/finished with this will delete it - defaults to true. * @param deleteCode If the code is used/finished with this will delete it - defaults to true.
* @return {Promise<string>} returns the user ID if it is found * @return returns the user ID if it is found
*/ */
export async function checkResetPasswordCode( export async function checkResetPasswordCode(
resetCode: string, resetCode: string,
@ -104,9 +104,9 @@ export async function checkResetPasswordCode(
/** /**
* Generates an invitation code and writes it to redis - which can later be checked for user creation. * Generates an invitation code and writes it to redis - which can later be checked for user creation.
* @param {string} email the email address which the code is being sent to (for use later). * @param email the email address which the code is being sent to (for use later).
* @param {object|null} info Information to be carried along with the invitation. * @param info Information to be carried along with the invitation.
* @return {Promise<string>} returns the code that was stored to redis. * @return returns the code that was stored to redis.
*/ */
export async function getInviteCode(email: string, info: any) { export async function getInviteCode(email: string, info: any) {
return writeACode(redis.utils.Databases.INVITATIONS, { email, info }) return writeACode(redis.utils.Databases.INVITATIONS, { email, info })
@ -114,9 +114,9 @@ export async function getInviteCode(email: string, info: any) {
/** /**
* Checks that the provided invite code is valid - will return the email address of user that was invited. * Checks that the provided invite code is valid - will return the email address of user that was invited.
* @param {string} inviteCode the invite code that was provided as part of the link. * @param inviteCode the invite code that was provided as part of the link.
* @param {boolean} deleteCode whether or not the code should be deleted after retrieval - defaults to true. * @param deleteCode whether or not the code should be deleted after retrieval - defaults to true.
* @return {Promise<object>} If the code is valid then an email address will be returned. * @return If the code is valid then an email address will be returned.
*/ */
export async function checkInviteCode( export async function checkInviteCode(
inviteCode: string, inviteCode: string,

129
yarn.lock
View File

@ -4255,126 +4255,6 @@
node-fetch "^2.6.0" node-fetch "^2.6.0"
utf-8-validate "^5.0.2" utf-8-validate "^5.0.2"
"@sentry/browser@5.19.1":
version "5.19.1"
resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-5.19.1.tgz#b22f36fc71f36719ad352a54e6b31722622128c0"
integrity sha512-Aon5Nc2n8sIXKg6Xbr4RM3/Xs7vFpXksL56z3yIuGrmpCM8ToQ25/tQv8h+anYi72x5bn1npzaXB/NwU1Qwfhg==
dependencies:
"@sentry/core" "5.19.1"
"@sentry/types" "5.19.1"
"@sentry/utils" "5.19.1"
tslib "^1.9.3"
"@sentry/core@5.19.1":
version "5.19.1"
resolved "https://registry.yarnpkg.com/@sentry/core/-/core-5.19.1.tgz#f5ff7feb1118035f75f1d0bc2a76e2b040d2aa8e"
integrity sha512-BGGxjeT95Og/hloBhQXAVcndVXPmIU6drtF3oKRT12cBpiG965xEDEUwiJVvyb5MAvojdVEZBK2LURUFY/d7Zw==
dependencies:
"@sentry/hub" "5.19.1"
"@sentry/minimal" "5.19.1"
"@sentry/types" "5.19.1"
"@sentry/utils" "5.19.1"
tslib "^1.9.3"
"@sentry/core@6.17.7":
version "6.17.7"
resolved "https://registry.yarnpkg.com/@sentry/core/-/core-6.17.7.tgz#f591235c06b1a4e75d748b15c539e071bd3f5cf5"
integrity sha512-SRhLkD05lQb4eCt1ed9Dz72DKbRDlM8PJix8eC2oJLtwyFTS0IlJNkIYRrbsSKkJUm0VsKcDkzIHvUAgBBQICw==
dependencies:
"@sentry/hub" "6.17.7"
"@sentry/minimal" "6.17.7"
"@sentry/types" "6.17.7"
"@sentry/utils" "6.17.7"
tslib "^1.9.3"
"@sentry/hub@5.19.1":
version "5.19.1"
resolved "https://registry.yarnpkg.com/@sentry/hub/-/hub-5.19.1.tgz#f3bc8500680974ce43c1eedcd8e90696cc18b306"
integrity sha512-XjfbNGWVeDsP38alm5Cm08YPIw5Hu6HbPkw7a3y1piViTrg4HdtsE+ZJqq0YcURo2RTpg6Ks6coCS/zJxIPygQ==
dependencies:
"@sentry/types" "5.19.1"
"@sentry/utils" "5.19.1"
tslib "^1.9.3"
"@sentry/hub@6.17.7":
version "6.17.7"
resolved "https://registry.yarnpkg.com/@sentry/hub/-/hub-6.17.7.tgz#5c90d661e263dad7da0e0106f1cb90cf797d93a7"
integrity sha512-siGzcg+quGOdjRaBGAz6T3ycwHUsGgvalptSJdf5Q783FVFhU+haPul++zGOYURXOgx0RjYGWqagwO8+jljl3Q==
dependencies:
"@sentry/types" "6.17.7"
"@sentry/utils" "6.17.7"
tslib "^1.9.3"
"@sentry/minimal@5.19.1":
version "5.19.1"
resolved "https://registry.yarnpkg.com/@sentry/minimal/-/minimal-5.19.1.tgz#04043d93a7dc90cbed1a31d80f6bf59688ea3100"
integrity sha512-pgNfsaCroEsC8gv+NqmPTIkj4wyK6ZgYLV12IT4k2oJLkGyg45TSAKabyB7oEP5jsj8sRzm8tDomu8M4HpaCHg==
dependencies:
"@sentry/hub" "5.19.1"
"@sentry/types" "5.19.1"
tslib "^1.9.3"
"@sentry/minimal@6.17.7":
version "6.17.7"
resolved "https://registry.yarnpkg.com/@sentry/minimal/-/minimal-6.17.7.tgz#f19182047f19b563f40a30d45d2ce9ad7df1ec4e"
integrity sha512-+/FGem1uXsXikX9wHPw44nevO7YTVjkkiPjyLsvnWMjv64r4Au5s+NQSFHDaytRm9IlU//+OasCAS5VAwHcYRg==
dependencies:
"@sentry/hub" "6.17.7"
"@sentry/types" "6.17.7"
tslib "^1.9.3"
"@sentry/node@6.17.7":
version "6.17.7"
resolved "https://registry.yarnpkg.com/@sentry/node/-/node-6.17.7.tgz#c142d93328b29312098276e0cb66ec3b9e805a93"
integrity sha512-YlBhEv8YYUimHLpwzUP5lXkagbGxGAfjvsbahhvqf7rRl8Fu5XbatAcSDcx7YE4R0Iox94IfZy95kF2NL4Idow==
dependencies:
"@sentry/core" "6.17.7"
"@sentry/hub" "6.17.7"
"@sentry/tracing" "6.17.7"
"@sentry/types" "6.17.7"
"@sentry/utils" "6.17.7"
cookie "^0.4.1"
https-proxy-agent "^5.0.0"
lru_map "^0.3.3"
tslib "^1.9.3"
"@sentry/tracing@6.17.7":
version "6.17.7"
resolved "https://registry.yarnpkg.com/@sentry/tracing/-/tracing-6.17.7.tgz#f4536683b29bb3ac7ddda5ca49494731cec6b619"
integrity sha512-QzIDHOjjdi/0LTdrK2LTC27YEOODI473KD8KmMJ+r9PmjDeIjNzz4hJlPwQSnXR3Mu/8foxGJGXsAt3LNmKzlQ==
dependencies:
"@sentry/hub" "6.17.7"
"@sentry/minimal" "6.17.7"
"@sentry/types" "6.17.7"
"@sentry/utils" "6.17.7"
tslib "^1.9.3"
"@sentry/types@5.19.1":
version "5.19.1"
resolved "https://registry.yarnpkg.com/@sentry/types/-/types-5.19.1.tgz#8762f668d3fc2416fbde31d15d13009544caeb54"
integrity sha512-M5MhTLnjqYFwxMwcFPBpBgYQqI9hCvtVuj/A+NvcBHpe7VWOXdn/Sys+zD6C76DWGFYQdw3OWCsZimP24dL8mA==
"@sentry/types@6.17.7":
version "6.17.7"
resolved "https://registry.yarnpkg.com/@sentry/types/-/types-6.17.7.tgz#61946a3f6532b8f21251b264f173b02f9ea2458e"
integrity sha512-iBlJDhrSowZKeqvutY0tCkUjrWqkLFsHrbaQ553r1Nx+/4mxHjzVYtEVGMjZAxQUEbkm0TbnQIkkT7ltglNJ9A==
"@sentry/utils@5.19.1":
version "5.19.1"
resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-5.19.1.tgz#e1134db40e4bb9732251e515721cec7ee94d4d9c"
integrity sha512-neUiNBnZSHjWTZWy2QV02EHTx1C2L3DBPzRXlh0ca5xrI7LMBLmhkHlhebn1E5ky3PW1teqZTgmh0jZoL99TEA==
dependencies:
"@sentry/types" "5.19.1"
tslib "^1.9.3"
"@sentry/utils@6.17.7":
version "6.17.7"
resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-6.17.7.tgz#0574bf914cc129b5e47041b75bb34dfbe0decbba"
integrity sha512-HEEEeKlZtwfQvH0waSKv5FKRFjHkVgkkEiAigXoYGQAlaUIuwRTvZGFnsmBoKMIrA4pARkA00FwwdtMU7ziC8A==
dependencies:
"@sentry/types" "6.17.7"
tslib "^1.9.3"
"@shopify/jest-koa-mocks@5.1.1": "@shopify/jest-koa-mocks@5.1.1":
version "5.1.1" version "5.1.1"
resolved "https://registry.yarnpkg.com/@shopify/jest-koa-mocks/-/jest-koa-mocks-5.1.1.tgz#d615815b0cf0ec9823d054da68c419508b3c3193" resolved "https://registry.yarnpkg.com/@shopify/jest-koa-mocks/-/jest-koa-mocks-5.1.1.tgz#d615815b0cf0ec9823d054da68c419508b3c3193"
@ -15025,11 +14905,6 @@ lru-cache@^9.1.1:
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-9.1.2.tgz#255fdbc14b75589d6d0e73644ca167a8db506835" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-9.1.2.tgz#255fdbc14b75589d6d0e73644ca167a8db506835"
integrity sha512-ERJq3FOzJTxBbFjZ7iDs+NiK4VI9Wz+RdrrAB8dio1oV+YvdPzUEE4QNiT2VD51DkIbCYRUUzCRkssXCHqSnKQ== integrity sha512-ERJq3FOzJTxBbFjZ7iDs+NiK4VI9Wz+RdrrAB8dio1oV+YvdPzUEE4QNiT2VD51DkIbCYRUUzCRkssXCHqSnKQ==
lru_map@^0.3.3:
version "0.3.3"
resolved "https://registry.yarnpkg.com/lru_map/-/lru_map-0.3.3.tgz#b5c8351b9464cbd750335a79650a0ec0e56118dd"
integrity sha512-Pn9cox5CsMYngeDbmChANltQl+5pi6XmTrraMSzhPmMBbmgcxmqWry0U3PGapCU1yB4/LqCcom7qhHZiF/jGfQ==
ltgt@2.2.1, ltgt@^2.1.2, ltgt@~2.2.0: ltgt@2.2.1, ltgt@^2.1.2, ltgt@~2.2.0:
version "2.2.1" version "2.2.1"
resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.2.1.tgz#f35ca91c493f7b73da0e07495304f17b31f87ee5" resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.2.1.tgz#f35ca91c493f7b73da0e07495304f17b31f87ee5"
@ -21218,7 +21093,7 @@ tsconfig-paths@^4.1.2, tsconfig-paths@^4.2.0:
minimist "^1.2.6" minimist "^1.2.6"
strip-bom "^3.0.0" strip-bom "^3.0.0"
tslib@^1.10.0, tslib@^1.11.1, tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3: tslib@^1.10.0, tslib@^1.11.1, tslib@^1.8.1, tslib@^1.9.0:
version "1.14.1" version "1.14.1"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
@ -21862,7 +21737,7 @@ vlq@^0.2.2:
resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26" resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow== integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==
vm2@^3.9.19, vm2@^3.9.8: vm2@^3.9.19:
version "3.9.19" version "3.9.19"
resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.19.tgz#be1e1d7a106122c6c492b4d51c2e8b93d3ed6a4a" resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.19.tgz#be1e1d7a106122c6c492b4d51c2e8b93d3ed6a4a"
integrity sha512-J637XF0DHDMV57R6JyVsTak7nIL8gy5KH4r1HiwWLf/4GBbb5MKL5y7LpmF4A8E2nR6XmzpmMFQ7V7ppPTmUQg== integrity sha512-J637XF0DHDMV57R6JyVsTak7nIL8gy5KH4r1HiwWLf/4GBbb5MKL5y7LpmF4A8E2nR6XmzpmMFQ7V7ppPTmUQg==