Merge branch 'master' of github.com:Budibase/budibase into labday/sqs
This commit is contained in:
commit
fdfd58391a
|
@ -14,7 +14,6 @@ env:
|
||||||
# Posthog token used by ui at build time
|
# Posthog token used by ui at build time
|
||||||
POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
|
POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
|
||||||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
|
||||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
@ -110,7 +109,6 @@ jobs:
|
||||||
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
|
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
|
||||||
git push
|
git push
|
||||||
|
|
||||||
|
|
||||||
trigger-deploy-to-qa-env:
|
trigger-deploy-to-qa-env:
|
||||||
needs: [release-helm-chart]
|
needs: [release-helm-chart]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
@ -134,8 +134,6 @@ spec:
|
||||||
{{ end }}
|
{{ end }}
|
||||||
- name: SELF_HOSTED
|
- name: SELF_HOSTED
|
||||||
value: {{ .Values.globals.selfHosted | quote }}
|
value: {{ .Values.globals.selfHosted | quote }}
|
||||||
- name: SENTRY_DSN
|
|
||||||
value: {{ .Values.globals.sentryDSN | quote }}
|
|
||||||
- name: POSTHOG_TOKEN
|
- name: POSTHOG_TOKEN
|
||||||
value: {{ .Values.globals.posthogToken | quote }}
|
value: {{ .Values.globals.posthogToken | quote }}
|
||||||
- name: WORKER_URL
|
- name: WORKER_URL
|
||||||
|
|
|
@ -130,8 +130,6 @@ spec:
|
||||||
{{ end }}
|
{{ end }}
|
||||||
- name: SELF_HOSTED
|
- name: SELF_HOSTED
|
||||||
value: {{ .Values.globals.selfHosted | quote }}
|
value: {{ .Values.globals.selfHosted | quote }}
|
||||||
- name: SENTRY_DSN
|
|
||||||
value: {{ .Values.globals.sentryDSN }}
|
|
||||||
- name: ENABLE_ANALYTICS
|
- name: ENABLE_ANALYTICS
|
||||||
value: {{ .Values.globals.enableAnalytics | quote }}
|
value: {{ .Values.globals.enableAnalytics | quote }}
|
||||||
- name: POSTHOG_TOKEN
|
- name: POSTHOG_TOKEN
|
||||||
|
|
|
@ -78,7 +78,6 @@ globals:
|
||||||
budibaseEnv: PRODUCTION
|
budibaseEnv: PRODUCTION
|
||||||
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
|
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
|
||||||
enableAnalytics: "1"
|
enableAnalytics: "1"
|
||||||
sentryDSN: ""
|
|
||||||
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
|
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
|
||||||
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
||||||
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
||||||
|
|
|
@ -3,3 +3,6 @@
|
||||||
[couchdb]
|
[couchdb]
|
||||||
database_dir = DATA_DIR/couch/dbs
|
database_dir = DATA_DIR/couch/dbs
|
||||||
view_index_dir = DATA_DIR/couch/views
|
view_index_dir = DATA_DIR/couch/views
|
||||||
|
|
||||||
|
[chttpd_auth]
|
||||||
|
timeout = 7200 ; 2 hours in seconds
|
||||||
|
|
|
@ -19,7 +19,6 @@ services:
|
||||||
API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY}
|
API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY}
|
||||||
JWT_SECRET: ${JWT_SECRET}
|
JWT_SECRET: ${JWT_SECRET}
|
||||||
LOG_LEVEL: info
|
LOG_LEVEL: info
|
||||||
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
|
|
||||||
ENABLE_ANALYTICS: "true"
|
ENABLE_ANALYTICS: "true"
|
||||||
REDIS_URL: redis-service:6379
|
REDIS_URL: redis-service:6379
|
||||||
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
||||||
|
@ -48,7 +47,6 @@ services:
|
||||||
COUCH_DB_USERNAME: ${COUCH_DB_USER}
|
COUCH_DB_USERNAME: ${COUCH_DB_USER}
|
||||||
COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD}
|
COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD}
|
||||||
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
|
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
|
||||||
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
|
|
||||||
INTERNAL_API_KEY: ${INTERNAL_API_KEY}
|
INTERNAL_API_KEY: ${INTERNAL_API_KEY}
|
||||||
REDIS_URL: redis-service:6379
|
REDIS_URL: redis-service:6379
|
||||||
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
||||||
|
|
|
@ -20,7 +20,6 @@ services:
|
||||||
API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY}
|
API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY}
|
||||||
JWT_SECRET: ${JWT_SECRET}
|
JWT_SECRET: ${JWT_SECRET}
|
||||||
LOG_LEVEL: info
|
LOG_LEVEL: info
|
||||||
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
|
|
||||||
ENABLE_ANALYTICS: "true"
|
ENABLE_ANALYTICS: "true"
|
||||||
REDIS_URL: redis-service:6379
|
REDIS_URL: redis-service:6379
|
||||||
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
||||||
|
@ -31,8 +30,8 @@ services:
|
||||||
depends_on:
|
depends_on:
|
||||||
- worker-service
|
- worker-service
|
||||||
- redis-service
|
- redis-service
|
||||||
# volumes:
|
# volumes:
|
||||||
# - /some/path/to/plugins:/plugins
|
# - /some/path/to/plugins:/plugins
|
||||||
|
|
||||||
worker-service:
|
worker-service:
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
@ -51,7 +50,6 @@ services:
|
||||||
COUCH_DB_USERNAME: ${COUCH_DB_USER}
|
COUCH_DB_USERNAME: ${COUCH_DB_USER}
|
||||||
COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD}
|
COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD}
|
||||||
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
|
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
|
||||||
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
|
|
||||||
INTERNAL_API_KEY: ${INTERNAL_API_KEY}
|
INTERNAL_API_KEY: ${INTERNAL_API_KEY}
|
||||||
REDIS_URL: redis-service:6379
|
REDIS_URL: redis-service:6379
|
||||||
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
||||||
|
@ -113,7 +111,12 @@ services:
|
||||||
PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984"
|
PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984"
|
||||||
depends_on:
|
depends_on:
|
||||||
- couchdb-service
|
- couchdb-service
|
||||||
command: ["sh","-c","sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;"]
|
command:
|
||||||
|
[
|
||||||
|
"sh",
|
||||||
|
"-c",
|
||||||
|
"sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;",
|
||||||
|
]
|
||||||
|
|
||||||
redis-service:
|
redis-service:
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.11.36",
|
"version": "2.11.39",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
|
@ -33,8 +33,8 @@ function isInvalid(metadata?: { state: string }) {
|
||||||
* Get the requested app metadata by id.
|
* Get the requested app metadata by id.
|
||||||
* Use redis cache to first read the app metadata.
|
* Use redis cache to first read the app metadata.
|
||||||
* If not present fallback to loading the app metadata directly and re-caching.
|
* If not present fallback to loading the app metadata directly and re-caching.
|
||||||
* @param {string} appId the id of the app to get metadata from.
|
* @param appId the id of the app to get metadata from.
|
||||||
* @returns {object} the app metadata.
|
* @returns the app metadata.
|
||||||
*/
|
*/
|
||||||
export async function getAppMetadata(appId: string): Promise<App | DeletedApp> {
|
export async function getAppMetadata(appId: string): Promise<App | DeletedApp> {
|
||||||
const client = await getAppClient()
|
const client = await getAppClient()
|
||||||
|
@ -72,9 +72,9 @@ export async function getAppMetadata(appId: string): Promise<App | DeletedApp> {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invalidate/reset the cached metadata when a change occurs in the db.
|
* Invalidate/reset the cached metadata when a change occurs in the db.
|
||||||
* @param appId {string} the cache key to bust/update.
|
* @param appId the cache key to bust/update.
|
||||||
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
|
* @param newMetadata optional - can simply provide the new metadata to update with.
|
||||||
* @return {Promise<void>} will respond with success when cache is updated.
|
* @return will respond with success when cache is updated.
|
||||||
*/
|
*/
|
||||||
export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
|
export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
|
||||||
if (!appId) {
|
if (!appId) {
|
||||||
|
|
|
@ -61,9 +61,9 @@ async function populateUsersFromDB(
|
||||||
* Get the requested user by id.
|
* Get the requested user by id.
|
||||||
* Use redis cache to first read the user.
|
* Use redis cache to first read the user.
|
||||||
* If not present fallback to loading the user directly and re-caching.
|
* If not present fallback to loading the user directly and re-caching.
|
||||||
* @param {*} userId the id of the user to get
|
* @param userId the id of the user to get
|
||||||
* @param {*} tenantId the tenant of the user to get
|
* @param tenantId the tenant of the user to get
|
||||||
* @param {*} populateUser function to provide the user for re-caching. default to couch db
|
* @param populateUser function to provide the user for re-caching. default to couch db
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
export async function getUser(
|
export async function getUser(
|
||||||
|
@ -111,8 +111,8 @@ export async function getUser(
|
||||||
* Get the requested users by id.
|
* Get the requested users by id.
|
||||||
* Use redis cache to first read the users.
|
* Use redis cache to first read the users.
|
||||||
* If not present fallback to loading the users directly and re-caching.
|
* If not present fallback to loading the users directly and re-caching.
|
||||||
* @param {*} userIds the ids of the user to get
|
* @param userIds the ids of the user to get
|
||||||
* @param {*} tenantId the tenant of the users to get
|
* @param tenantId the tenant of the users to get
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
export async function getUsers(
|
export async function getUsers(
|
||||||
|
|
|
@ -119,8 +119,8 @@ export class Writethrough {
|
||||||
this.writeRateMs = writeRateMs
|
this.writeRateMs = writeRateMs
|
||||||
}
|
}
|
||||||
|
|
||||||
async put(doc: any) {
|
async put(doc: any, writeRateMs: number = this.writeRateMs) {
|
||||||
return put(this.db, doc, this.writeRateMs)
|
return put(this.db, doc, writeRateMs)
|
||||||
}
|
}
|
||||||
|
|
||||||
async get(id: string) {
|
async get(id: string) {
|
||||||
|
|
|
@ -23,7 +23,7 @@ import environment from "../environment"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new configuration ID.
|
* Generates a new configuration ID.
|
||||||
* @returns {string} The new configuration ID which the config doc can be stored under.
|
* @returns The new configuration ID which the config doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateConfigID(type: ConfigType) {
|
export function generateConfigID(type: ConfigType) {
|
||||||
return `${DocumentType.CONFIG}${SEPARATOR}${type}`
|
return `${DocumentType.CONFIG}${SEPARATOR}${type}`
|
||||||
|
|
|
@ -62,7 +62,7 @@ export function isTenancyEnabled() {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given an app ID this will attempt to retrieve the tenant ID from it.
|
* Given an app ID this will attempt to retrieve the tenant ID from it.
|
||||||
* @return {null|string} The tenant ID found within the app ID.
|
* @return The tenant ID found within the app ID.
|
||||||
*/
|
*/
|
||||||
export function getTenantIDFromAppID(appId: string) {
|
export function getTenantIDFromAppID(appId: string) {
|
||||||
if (!appId) {
|
if (!appId) {
|
||||||
|
|
|
@ -8,8 +8,8 @@ class Replication {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param {String} source - the DB you want to replicate or rollback to
|
* @param source - the DB you want to replicate or rollback to
|
||||||
* @param {String} target - the DB you want to replicate to, or rollback from
|
* @param target - the DB you want to replicate to, or rollback from
|
||||||
*/
|
*/
|
||||||
constructor({ source, target }: any) {
|
constructor({ source, target }: any) {
|
||||||
this.source = getPouchDB(source)
|
this.source = getPouchDB(source)
|
||||||
|
@ -38,7 +38,7 @@ class Replication {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Two way replication operation, intended to be promise based.
|
* Two way replication operation, intended to be promise based.
|
||||||
* @param {Object} opts - PouchDB replication options
|
* @param opts - PouchDB replication options
|
||||||
*/
|
*/
|
||||||
sync(opts = {}) {
|
sync(opts = {}) {
|
||||||
this.replication = this.promisify(this.source.sync, opts)
|
this.replication = this.promisify(this.source.sync, opts)
|
||||||
|
@ -47,7 +47,7 @@ class Replication {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* One way replication operation, intended to be promise based.
|
* One way replication operation, intended to be promise based.
|
||||||
* @param {Object} opts - PouchDB replication options
|
* @param opts - PouchDB replication options
|
||||||
*/
|
*/
|
||||||
replicate(opts = {}) {
|
replicate(opts = {}) {
|
||||||
this.replication = this.promisify(this.source.replicate.to, opts)
|
this.replication = this.promisify(this.source.replicate.to, opts)
|
||||||
|
|
|
@ -599,10 +599,10 @@ async function runQuery<T>(
|
||||||
* Gets round the fixed limit of 200 results from a query by fetching as many
|
* Gets round the fixed limit of 200 results from a query by fetching as many
|
||||||
* pages as required and concatenating the results. This recursively operates
|
* pages as required and concatenating the results. This recursively operates
|
||||||
* until enough results have been found.
|
* until enough results have been found.
|
||||||
* @param dbName {string} Which database to run a lucene query on
|
* @param dbName Which database to run a lucene query on
|
||||||
* @param index {string} Which search index to utilise
|
* @param index Which search index to utilise
|
||||||
* @param query {object} The JSON query structure
|
* @param query The JSON query structure
|
||||||
* @param params {object} The search params including:
|
* @param params The search params including:
|
||||||
* tableId {string} The table ID to search
|
* tableId {string} The table ID to search
|
||||||
* sort {string} The sort column
|
* sort {string} The sort column
|
||||||
* sortOrder {string} The sort order ("ascending" or "descending")
|
* sortOrder {string} The sort order ("ascending" or "descending")
|
||||||
|
@ -655,10 +655,10 @@ async function recursiveSearch<T>(
|
||||||
* Performs a paginated search. A bookmark will be returned to allow the next
|
* Performs a paginated search. A bookmark will be returned to allow the next
|
||||||
* page to be fetched. There is a max limit off 200 results per page in a
|
* page to be fetched. There is a max limit off 200 results per page in a
|
||||||
* paginated search.
|
* paginated search.
|
||||||
* @param dbName {string} Which database to run a lucene query on
|
* @param dbName Which database to run a lucene query on
|
||||||
* @param index {string} Which search index to utilise
|
* @param index Which search index to utilise
|
||||||
* @param query {object} The JSON query structure
|
* @param query The JSON query structure
|
||||||
* @param params {object} The search params including:
|
* @param params The search params including:
|
||||||
* tableId {string} The table ID to search
|
* tableId {string} The table ID to search
|
||||||
* sort {string} The sort column
|
* sort {string} The sort column
|
||||||
* sortOrder {string} The sort order ("ascending" or "descending")
|
* sortOrder {string} The sort order ("ascending" or "descending")
|
||||||
|
@ -722,10 +722,10 @@ export async function paginatedSearch<T>(
|
||||||
* desired amount of results. There is a limit of 1000 results to avoid
|
* desired amount of results. There is a limit of 1000 results to avoid
|
||||||
* heavy performance hits, and to avoid client components breaking from
|
* heavy performance hits, and to avoid client components breaking from
|
||||||
* handling too much data.
|
* handling too much data.
|
||||||
* @param dbName {string} Which database to run a lucene query on
|
* @param dbName Which database to run a lucene query on
|
||||||
* @param index {string} Which search index to utilise
|
* @param index Which search index to utilise
|
||||||
* @param query {object} The JSON query structure
|
* @param query The JSON query structure
|
||||||
* @param params {object} The search params including:
|
* @param params The search params including:
|
||||||
* tableId {string} The table ID to search
|
* tableId {string} The table ID to search
|
||||||
* sort {string} The sort column
|
* sort {string} The sort column
|
||||||
* sortOrder {string} The sort order ("ascending" or "descending")
|
* sortOrder {string} The sort order ("ascending" or "descending")
|
||||||
|
|
|
@ -45,7 +45,7 @@ export async function getAllDbs(opts = { efficient: false }) {
|
||||||
* Lots of different points in the system need to find the full list of apps, this will
|
* Lots of different points in the system need to find the full list of apps, this will
|
||||||
* enumerate the entire CouchDB cluster and get the list of databases (every app).
|
* enumerate the entire CouchDB cluster and get the list of databases (every app).
|
||||||
*
|
*
|
||||||
* @return {Promise<object[]>} returns the app information document stored in each app database.
|
* @return returns the app information document stored in each app database.
|
||||||
*/
|
*/
|
||||||
export async function getAllApps({
|
export async function getAllApps({
|
||||||
dev,
|
dev,
|
||||||
|
|
|
@ -25,7 +25,7 @@ export function isDevApp(app: App) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a development app ID from a real app ID.
|
* Generates a development app ID from a real app ID.
|
||||||
* @returns {string} the dev app ID which can be used for dev database.
|
* @returns the dev app ID which can be used for dev database.
|
||||||
*/
|
*/
|
||||||
export function getDevelopmentAppID(appId: string) {
|
export function getDevelopmentAppID(appId: string) {
|
||||||
if (!appId || appId.startsWith(APP_DEV_PREFIX)) {
|
if (!appId || appId.startsWith(APP_DEV_PREFIX)) {
|
||||||
|
|
|
@ -8,7 +8,7 @@ import { newid } from "./newid"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new app ID.
|
* Generates a new app ID.
|
||||||
* @returns {string} The new app ID which the app doc can be stored under.
|
* @returns The new app ID which the app doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export const generateAppID = (tenantId?: string | null) => {
|
export const generateAppID = (tenantId?: string | null) => {
|
||||||
let id = APP_PREFIX
|
let id = APP_PREFIX
|
||||||
|
@ -20,9 +20,9 @@ export const generateAppID = (tenantId?: string | null) => {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets a new row ID for the specified table.
|
* Gets a new row ID for the specified table.
|
||||||
* @param {string} tableId The table which the row is being created for.
|
* @param tableId The table which the row is being created for.
|
||||||
* @param {string|null} id If an ID is to be used then the UUID can be substituted for this.
|
* @param id If an ID is to be used then the UUID can be substituted for this.
|
||||||
* @returns {string} The new ID which a row doc can be stored under.
|
* @returns The new ID which a row doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateRowID(tableId: string, id?: string) {
|
export function generateRowID(tableId: string, id?: string) {
|
||||||
id = id || newid()
|
id = id || newid()
|
||||||
|
@ -31,7 +31,7 @@ export function generateRowID(tableId: string, id?: string) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new workspace ID.
|
* Generates a new workspace ID.
|
||||||
* @returns {string} The new workspace ID which the workspace doc can be stored under.
|
* @returns The new workspace ID which the workspace doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateWorkspaceID() {
|
export function generateWorkspaceID() {
|
||||||
return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`
|
return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`
|
||||||
|
@ -39,7 +39,7 @@ export function generateWorkspaceID() {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new global user ID.
|
* Generates a new global user ID.
|
||||||
* @returns {string} The new user ID which the user doc can be stored under.
|
* @returns The new user ID which the user doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateGlobalUserID(id?: any) {
|
export function generateGlobalUserID(id?: any) {
|
||||||
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
|
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
|
||||||
|
@ -52,8 +52,8 @@ export function isGlobalUserID(id: string) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new user ID based on the passed in global ID.
|
* Generates a new user ID based on the passed in global ID.
|
||||||
* @param {string} globalId The ID of the global user.
|
* @param globalId The ID of the global user.
|
||||||
* @returns {string} The new user ID which the user doc can be stored under.
|
* @returns The new user ID which the user doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateUserMetadataID(globalId: string) {
|
export function generateUserMetadataID(globalId: string) {
|
||||||
return generateRowID(InternalTable.USER_METADATA, globalId)
|
return generateRowID(InternalTable.USER_METADATA, globalId)
|
||||||
|
@ -84,7 +84,7 @@ export function generateAppUserID(prodAppId: string, userId: string) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new role ID.
|
* Generates a new role ID.
|
||||||
* @returns {string} The new role ID which the role doc can be stored under.
|
* @returns The new role ID which the role doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateRoleID(name: string) {
|
export function generateRoleID(name: string) {
|
||||||
const prefix = `${DocumentType.ROLE}${SEPARATOR}`
|
const prefix = `${DocumentType.ROLE}${SEPARATOR}`
|
||||||
|
@ -103,7 +103,7 @@ export function prefixRoleID(name: string) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new dev info document ID - this is scoped to a user.
|
* Generates a new dev info document ID - this is scoped to a user.
|
||||||
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
|
* @returns The new dev info ID which info for dev (like api key) can be stored under.
|
||||||
*/
|
*/
|
||||||
export const generateDevInfoID = (userId: any) => {
|
export const generateDevInfoID = (userId: any) => {
|
||||||
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
|
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
|
||||||
|
@ -111,7 +111,7 @@ export const generateDevInfoID = (userId: any) => {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new plugin ID - to be used in the global DB.
|
* Generates a new plugin ID - to be used in the global DB.
|
||||||
* @returns {string} The new plugin ID which a plugin metadata document can be stored under.
|
* @returns The new plugin ID which a plugin metadata document can be stored under.
|
||||||
*/
|
*/
|
||||||
export const generatePluginID = (name: string) => {
|
export const generatePluginID = (name: string) => {
|
||||||
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
|
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
|
||||||
|
|
|
@ -12,12 +12,12 @@ import { getProdAppID } from "./conversions"
|
||||||
* is usually the case as most of our docs are top level e.g. tables, automations, users and so on.
|
* is usually the case as most of our docs are top level e.g. tables, automations, users and so on.
|
||||||
* More complex cases such as link docs and rows which have multiple levels of IDs that their
|
* More complex cases such as link docs and rows which have multiple levels of IDs that their
|
||||||
* ID consists of need their own functions to build the allDocs parameters.
|
* ID consists of need their own functions to build the allDocs parameters.
|
||||||
* @param {string} docType The type of document which input params are being built for, e.g. user,
|
* @param docType The type of document which input params are being built for, e.g. user,
|
||||||
* link, app, table and so on.
|
* link, app, table and so on.
|
||||||
* @param {string|null} docId The ID of the document minus its type - this is only needed if looking
|
* @param docId The ID of the document minus its type - this is only needed if looking
|
||||||
* for a singular document.
|
* for a singular document.
|
||||||
* @param {object} otherProps Add any other properties onto the request, e.g. include_docs.
|
* @param otherProps Add any other properties onto the request, e.g. include_docs.
|
||||||
* @returns {object} Parameters which can then be used with an allDocs request.
|
* @returns Parameters which can then be used with an allDocs request.
|
||||||
*/
|
*/
|
||||||
export function getDocParams(
|
export function getDocParams(
|
||||||
docType: string,
|
docType: string,
|
||||||
|
@ -36,11 +36,11 @@ export function getDocParams(
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the DB allDocs/query params for retrieving a row.
|
* Gets the DB allDocs/query params for retrieving a row.
|
||||||
* @param {string|null} tableId The table in which the rows have been stored.
|
* @param tableId The table in which the rows have been stored.
|
||||||
* @param {string|null} rowId The ID of the row which is being specifically queried for. This can be
|
* @param rowId The ID of the row which is being specifically queried for. This can be
|
||||||
* left null to get all the rows in the table.
|
* left null to get all the rows in the table.
|
||||||
* @param {object} otherProps Any other properties to add to the request.
|
* @param otherProps Any other properties to add to the request.
|
||||||
* @returns {object} Parameters which can then be used with an allDocs request.
|
* @returns Parameters which can then be used with an allDocs request.
|
||||||
*/
|
*/
|
||||||
export function getRowParams(
|
export function getRowParams(
|
||||||
tableId?: string | null,
|
tableId?: string | null,
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
/**
|
/**
|
||||||
* Makes sure that a URL has the correct number of slashes, while maintaining the
|
* Makes sure that a URL has the correct number of slashes, while maintaining the
|
||||||
* http(s):// double slashes.
|
* http(s):// double slashes.
|
||||||
* @param {string} url The URL to test and remove any extra double slashes.
|
* @param url The URL to test and remove any extra double slashes.
|
||||||
* @return {string} The updated url.
|
* @return The updated url.
|
||||||
*/
|
*/
|
||||||
export function checkSlashesInUrl(url: string) {
|
export function checkSlashesInUrl(url: string) {
|
||||||
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
|
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
|
||||||
|
|
|
@ -13,10 +13,10 @@ export const options = {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Passport Local Authentication Middleware.
|
* Passport Local Authentication Middleware.
|
||||||
* @param {*} ctx the request structure
|
* @param ctx the request structure
|
||||||
* @param {*} email username to login with
|
* @param email username to login with
|
||||||
* @param {*} password plain text password to log in with
|
* @param password plain text password to log in with
|
||||||
* @param {*} done callback from passport to return user information and errors
|
* @param done callback from passport to return user information and errors
|
||||||
* @returns The authenticated user, or errors if they occur
|
* @returns The authenticated user, or errors if they occur
|
||||||
*/
|
*/
|
||||||
export async function authenticate(
|
export async function authenticate(
|
||||||
|
|
|
@ -17,15 +17,15 @@ const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||||
|
|
||||||
export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
|
export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
|
||||||
/**
|
/**
|
||||||
* @param {*} issuer The identity provider base URL
|
* @param issuer The identity provider base URL
|
||||||
* @param {*} sub The user ID
|
* @param sub The user ID
|
||||||
* @param {*} profile The user profile information. Created by passport from the /userinfo response
|
* @param profile The user profile information. Created by passport from the /userinfo response
|
||||||
* @param {*} jwtClaims The parsed id_token claims
|
* @param jwtClaims The parsed id_token claims
|
||||||
* @param {*} accessToken The access_token for contacting the identity provider - may or may not be a JWT
|
* @param accessToken The access_token for contacting the identity provider - may or may not be a JWT
|
||||||
* @param {*} refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
|
* @param refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
|
||||||
* @param {*} idToken The id_token - always a JWT
|
* @param idToken The id_token - always a JWT
|
||||||
* @param {*} params The response body from requesting an access_token
|
* @param params The response body from requesting an access_token
|
||||||
* @param {*} done The passport callback: err, user, info
|
* @param done The passport callback: err, user, info
|
||||||
*/
|
*/
|
||||||
return async (
|
return async (
|
||||||
issuer: string,
|
issuer: string,
|
||||||
|
@ -61,8 +61,8 @@ export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {*} profile The structured profile created by passport using the user info endpoint
|
* @param profile The structured profile created by passport using the user info endpoint
|
||||||
* @param {*} jwtClaims The claims returned in the id token
|
* @param jwtClaims The claims returned in the id token
|
||||||
*/
|
*/
|
||||||
function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {
|
function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {
|
||||||
// profile not guaranteed to contain email e.g. github connected azure ad account
|
// profile not guaranteed to contain email e.g. github connected azure ad account
|
||||||
|
|
|
@ -5,9 +5,9 @@ import { ConfigType, GoogleInnerConfig } from "@budibase/types"
|
||||||
/**
|
/**
|
||||||
* Utility to handle authentication errors.
|
* Utility to handle authentication errors.
|
||||||
*
|
*
|
||||||
* @param {*} done The passport callback.
|
* @param done The passport callback.
|
||||||
* @param {*} message Message that will be returned in the response body
|
* @param message Message that will be returned in the response body
|
||||||
* @param {*} err (Optional) error that will be logged
|
* @param err (Optional) error that will be logged
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export function authError(done: Function, message: string, err?: any) {
|
export function authError(done: Function, message: string, err?: any) {
|
||||||
|
|
|
@ -6,10 +6,10 @@ import * as cloudfront from "../cloudfront"
|
||||||
* In production the client library is stored in the object store, however in development
|
* In production the client library is stored in the object store, however in development
|
||||||
* we use the symlinked version produced by lerna, located in node modules. We link to this
|
* we use the symlinked version produced by lerna, located in node modules. We link to this
|
||||||
* via a specific endpoint (under /api/assets/client).
|
* via a specific endpoint (under /api/assets/client).
|
||||||
* @param {string} appId In production we need the appId to look up the correct bucket, as the
|
* @param appId In production we need the appId to look up the correct bucket, as the
|
||||||
* version of the client lib may differ between apps.
|
* version of the client lib may differ between apps.
|
||||||
* @param {string} version The version to retrieve.
|
* @param version The version to retrieve.
|
||||||
* @return {string} The URL to be inserted into appPackage response or server rendered
|
* @return The URL to be inserted into appPackage response or server rendered
|
||||||
* app index file.
|
* app index file.
|
||||||
*/
|
*/
|
||||||
export const clientLibraryUrl = (appId: string, version: string) => {
|
export const clientLibraryUrl = (appId: string, version: string) => {
|
||||||
|
|
|
@ -61,9 +61,9 @@ export function sanitizeBucket(input: string) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets a connection to the object store using the S3 SDK.
|
* Gets a connection to the object store using the S3 SDK.
|
||||||
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
* @param bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
||||||
* @param {object} opts configuration for the object store.
|
* @param opts configuration for the object store.
|
||||||
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
* @return an S3 object store object, check S3 Nodejs SDK for usage.
|
||||||
* @constructor
|
* @constructor
|
||||||
*/
|
*/
|
||||||
export const ObjectStore = (
|
export const ObjectStore = (
|
||||||
|
|
|
@ -5,9 +5,9 @@ import { timeout } from "../utils"
|
||||||
* Bull works with a Job wrapper around all messages that contains a lot more information about
|
* Bull works with a Job wrapper around all messages that contains a lot more information about
|
||||||
* the state of the message, this object constructor implements the same schema of Bull jobs
|
* the state of the message, this object constructor implements the same schema of Bull jobs
|
||||||
* for the sake of maintaining API consistency.
|
* for the sake of maintaining API consistency.
|
||||||
* @param {string} queue The name of the queue which the message will be carried on.
|
* @param queue The name of the queue which the message will be carried on.
|
||||||
* @param {object} message The JSON message which will be passed back to the consumer.
|
* @param message The JSON message which will be passed back to the consumer.
|
||||||
* @returns {Object} A new job which can now be put onto the queue, this is mostly an
|
* @returns A new job which can now be put onto the queue, this is mostly an
|
||||||
* internal structure so that an in memory queue can be easily swapped for a Bull queue.
|
* internal structure so that an in memory queue can be easily swapped for a Bull queue.
|
||||||
*/
|
*/
|
||||||
function newJob(queue: string, message: any) {
|
function newJob(queue: string, message: any) {
|
||||||
|
@ -32,8 +32,8 @@ class InMemoryQueue {
|
||||||
_addCount: number
|
_addCount: number
|
||||||
/**
|
/**
|
||||||
* The constructor the queue, exactly the same as that of Bulls.
|
* The constructor the queue, exactly the same as that of Bulls.
|
||||||
* @param {string} name The name of the queue which is being configured.
|
* @param name The name of the queue which is being configured.
|
||||||
* @param {object|null} opts This is not used by the in memory queue as there is no real use
|
* @param opts This is not used by the in memory queue as there is no real use
|
||||||
* case when in memory, but is the same API as Bull
|
* case when in memory, but is the same API as Bull
|
||||||
*/
|
*/
|
||||||
constructor(name: string, opts = null) {
|
constructor(name: string, opts = null) {
|
||||||
|
@ -49,7 +49,7 @@ class InMemoryQueue {
|
||||||
* Same callback API as Bull, each callback passed to this will consume messages as they are
|
* Same callback API as Bull, each callback passed to this will consume messages as they are
|
||||||
* available. Please note this is a queue service, not a notification service, so each
|
* available. Please note this is a queue service, not a notification service, so each
|
||||||
* consumer will receive different messages.
|
* consumer will receive different messages.
|
||||||
* @param {function<object>} func The callback function which will return a "Job", the same
|
* @param func The callback function which will return a "Job", the same
|
||||||
* as the Bull API, within this job the property "data" contains the JSON message. Please
|
* as the Bull API, within this job the property "data" contains the JSON message. Please
|
||||||
* note this is incredibly limited compared to Bull as in reality the Job would contain
|
* note this is incredibly limited compared to Bull as in reality the Job would contain
|
||||||
* a lot more information about the queue and current status of Bull cluster.
|
* a lot more information about the queue and current status of Bull cluster.
|
||||||
|
@ -73,9 +73,9 @@ class InMemoryQueue {
|
||||||
* Simple function to replicate the add message functionality of Bull, putting
|
* Simple function to replicate the add message functionality of Bull, putting
|
||||||
* a new message on the queue. This then emits an event which will be used to
|
* a new message on the queue. This then emits an event which will be used to
|
||||||
* return the message to a consumer (if one is attached).
|
* return the message to a consumer (if one is attached).
|
||||||
* @param {object} msg A message to be transported over the queue, this should be
|
* @param msg A message to be transported over the queue, this should be
|
||||||
* a JSON message as this is required by Bull.
|
* a JSON message as this is required by Bull.
|
||||||
* @param {boolean} repeat serves no purpose for the import queue.
|
* @param repeat serves no purpose for the import queue.
|
||||||
*/
|
*/
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
add(msg: any, repeat: boolean) {
|
add(msg: any, repeat: boolean) {
|
||||||
|
@ -96,7 +96,7 @@ class InMemoryQueue {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This removes a cron which has been implemented, this is part of Bull API.
|
* This removes a cron which has been implemented, this is part of Bull API.
|
||||||
* @param {string} cronJobId The cron which is to be removed.
|
* @param cronJobId The cron which is to be removed.
|
||||||
*/
|
*/
|
||||||
removeRepeatableByKey(cronJobId: string) {
|
removeRepeatableByKey(cronJobId: string) {
|
||||||
// TODO: implement for testing
|
// TODO: implement for testing
|
||||||
|
|
|
@ -142,7 +142,7 @@ function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
|
||||||
* this can only be done with redis streams because they will have an end.
|
* this can only be done with redis streams because they will have an end.
|
||||||
* @param stream A redis stream, specifically as this type of stream will have an end.
|
* @param stream A redis stream, specifically as this type of stream will have an end.
|
||||||
* @param client The client to use for further lookups.
|
* @param client The client to use for further lookups.
|
||||||
* @return {Promise<object>} The final output of the stream
|
* @return The final output of the stream
|
||||||
*/
|
*/
|
||||||
function promisifyStream(stream: any, client: RedisWrapper) {
|
function promisifyStream(stream: any, client: RedisWrapper) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
|
|
|
@ -36,8 +36,8 @@ export function levelToNumber(perm: PermissionLevel) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given the specified permission level for the user return the levels they are allowed to carry out.
|
* Given the specified permission level for the user return the levels they are allowed to carry out.
|
||||||
* @param {string} userPermLevel The permission level of the user.
|
* @param userPermLevel The permission level of the user.
|
||||||
* @return {string[]} All the permission levels this user is allowed to carry out.
|
* @return All the permission levels this user is allowed to carry out.
|
||||||
*/
|
*/
|
||||||
export function getAllowedLevels(userPermLevel: PermissionLevel): string[] {
|
export function getAllowedLevels(userPermLevel: PermissionLevel): string[] {
|
||||||
switch (userPermLevel) {
|
switch (userPermLevel) {
|
||||||
|
|
|
@ -149,9 +149,9 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
|
||||||
/**
|
/**
|
||||||
* Gets the role object, this is mainly useful for two purposes, to check if the level exists and
|
* Gets the role object, this is mainly useful for two purposes, to check if the level exists and
|
||||||
* to check if the role inherits any others.
|
* to check if the role inherits any others.
|
||||||
* @param {string|null} roleId The level ID to lookup.
|
* @param roleId The level ID to lookup.
|
||||||
* @param {object|null} opts options for the function, like whether to halt errors, instead return public.
|
* @param opts options for the function, like whether to halt errors, instead return public.
|
||||||
* @returns {Promise<Role|object|null>} The role object, which may contain an "inherits" property.
|
* @returns The role object, which may contain an "inherits" property.
|
||||||
*/
|
*/
|
||||||
export async function getRole(
|
export async function getRole(
|
||||||
roleId?: string,
|
roleId?: string,
|
||||||
|
@ -225,8 +225,8 @@ export async function getUserRoleIdHierarchy(
|
||||||
/**
|
/**
|
||||||
* Returns an ordered array of the user's inherited role IDs, this can be used
|
* Returns an ordered array of the user's inherited role IDs, this can be used
|
||||||
* to determine if a user can access something that requires a specific role.
|
* to determine if a user can access something that requires a specific role.
|
||||||
* @param {string} userRoleId The user's role ID, this can be found in their access token.
|
* @param userRoleId The user's role ID, this can be found in their access token.
|
||||||
* @returns {Promise<object[]>} returns an ordered array of the roles, with the first being their
|
* @returns returns an ordered array of the roles, with the first being their
|
||||||
* highest level of access and the last being the lowest level.
|
* highest level of access and the last being the lowest level.
|
||||||
*/
|
*/
|
||||||
export async function getUserRoleHierarchy(userRoleId?: string) {
|
export async function getUserRoleHierarchy(userRoleId?: string) {
|
||||||
|
@ -258,7 +258,7 @@ export async function getAllRoleIds(appId?: string) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given an app ID this will retrieve all of the roles that are currently within that app.
|
* Given an app ID this will retrieve all of the roles that are currently within that app.
|
||||||
* @return {Promise<object[]>} An array of the role objects that were found.
|
* @return An array of the role objects that were found.
|
||||||
*/
|
*/
|
||||||
export async function getAllRoles(appId?: string): Promise<RoleDoc[]> {
|
export async function getAllRoles(appId?: string): Promise<RoleDoc[]> {
|
||||||
if (appId) {
|
if (appId) {
|
||||||
|
|
|
@ -21,17 +21,21 @@ import {
|
||||||
User,
|
User,
|
||||||
UserStatus,
|
UserStatus,
|
||||||
UserGroup,
|
UserGroup,
|
||||||
ContextUser,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import {
|
import {
|
||||||
getAccountHolderFromUserIds,
|
getAccountHolderFromUserIds,
|
||||||
isAdmin,
|
isAdmin,
|
||||||
|
isCreator,
|
||||||
validateUniqueUser,
|
validateUniqueUser,
|
||||||
} from "./utils"
|
} from "./utils"
|
||||||
import { searchExistingEmails } from "./lookup"
|
import { searchExistingEmails } from "./lookup"
|
||||||
import { hash } from "../utils"
|
import { hash } from "../utils"
|
||||||
|
|
||||||
type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any>
|
type QuotaUpdateFn = (
|
||||||
|
change: number,
|
||||||
|
creatorsChange: number,
|
||||||
|
cb?: () => Promise<any>
|
||||||
|
) => Promise<any>
|
||||||
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
|
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
|
||||||
type FeatureFn = () => Promise<Boolean>
|
type FeatureFn = () => Promise<Boolean>
|
||||||
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
|
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
|
||||||
|
@ -135,7 +139,7 @@ export class UserDB {
|
||||||
if (!fullUser.roles) {
|
if (!fullUser.roles) {
|
||||||
fullUser.roles = {}
|
fullUser.roles = {}
|
||||||
}
|
}
|
||||||
// add the active status to a user if its not provided
|
// add the active status to a user if it's not provided
|
||||||
if (fullUser.status == null) {
|
if (fullUser.status == null) {
|
||||||
fullUser.status = UserStatus.ACTIVE
|
fullUser.status = UserStatus.ACTIVE
|
||||||
}
|
}
|
||||||
|
@ -246,7 +250,8 @@ export class UserDB {
|
||||||
}
|
}
|
||||||
|
|
||||||
const change = dbUser ? 0 : 1 // no change if there is existing user
|
const change = dbUser ? 0 : 1 // no change if there is existing user
|
||||||
return UserDB.quotas.addUsers(change, async () => {
|
const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0
|
||||||
|
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
|
||||||
await validateUniqueUser(email, tenantId)
|
await validateUniqueUser(email, tenantId)
|
||||||
|
|
||||||
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
|
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
|
||||||
|
@ -308,6 +313,7 @@ export class UserDB {
|
||||||
|
|
||||||
let usersToSave: any[] = []
|
let usersToSave: any[] = []
|
||||||
let newUsers: any[] = []
|
let newUsers: any[] = []
|
||||||
|
let newCreators: any[] = []
|
||||||
|
|
||||||
const emails = newUsersRequested.map((user: User) => user.email)
|
const emails = newUsersRequested.map((user: User) => user.email)
|
||||||
const existingEmails = await searchExistingEmails(emails)
|
const existingEmails = await searchExistingEmails(emails)
|
||||||
|
@ -328,59 +334,66 @@ export class UserDB {
|
||||||
}
|
}
|
||||||
newUser.userGroups = groups
|
newUser.userGroups = groups
|
||||||
newUsers.push(newUser)
|
newUsers.push(newUser)
|
||||||
|
if (isCreator(newUser)) {
|
||||||
|
newCreators.push(newUser)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const account = await accountSdk.getAccountByTenantId(tenantId)
|
const account = await accountSdk.getAccountByTenantId(tenantId)
|
||||||
return UserDB.quotas.addUsers(newUsers.length, async () => {
|
return UserDB.quotas.addUsers(
|
||||||
// create the promises array that will be called by bulkDocs
|
newUsers.length,
|
||||||
newUsers.forEach((user: any) => {
|
newCreators.length,
|
||||||
usersToSave.push(
|
async () => {
|
||||||
UserDB.buildUser(
|
// create the promises array that will be called by bulkDocs
|
||||||
user,
|
newUsers.forEach((user: any) => {
|
||||||
{
|
usersToSave.push(
|
||||||
hashPassword: true,
|
UserDB.buildUser(
|
||||||
requirePassword: user.requirePassword,
|
user,
|
||||||
},
|
{
|
||||||
tenantId,
|
hashPassword: true,
|
||||||
undefined, // no dbUser
|
requirePassword: user.requirePassword,
|
||||||
account
|
},
|
||||||
|
tenantId,
|
||||||
|
undefined, // no dbUser
|
||||||
|
account
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
})
|
||||||
})
|
|
||||||
|
|
||||||
const usersToBulkSave = await Promise.all(usersToSave)
|
const usersToBulkSave = await Promise.all(usersToSave)
|
||||||
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
|
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
|
||||||
|
|
||||||
// Post-processing of bulk added users, e.g. events and cache operations
|
// Post-processing of bulk added users, e.g. events and cache operations
|
||||||
for (const user of usersToBulkSave) {
|
for (const user of usersToBulkSave) {
|
||||||
// TODO: Refactor to bulk insert users into the info db
|
// TODO: Refactor to bulk insert users into the info db
|
||||||
// instead of relying on looping tenant creation
|
// instead of relying on looping tenant creation
|
||||||
await platform.users.addUser(tenantId, user._id, user.email)
|
await platform.users.addUser(tenantId, user._id, user.email)
|
||||||
await eventHelpers.handleSaveEvents(user, undefined)
|
await eventHelpers.handleSaveEvents(user, undefined)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const saved = usersToBulkSave.map(user => {
|
||||||
|
return {
|
||||||
|
_id: user._id,
|
||||||
|
email: user.email,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// now update the groups
|
||||||
|
if (Array.isArray(saved) && groups) {
|
||||||
|
const groupPromises = []
|
||||||
|
const createdUserIds = saved.map(user => user._id)
|
||||||
|
for (let groupId of groups) {
|
||||||
|
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
|
||||||
|
}
|
||||||
|
await Promise.all(groupPromises)
|
||||||
|
}
|
||||||
|
|
||||||
const saved = usersToBulkSave.map(user => {
|
|
||||||
return {
|
return {
|
||||||
_id: user._id,
|
successful: saved,
|
||||||
email: user.email,
|
unsuccessful,
|
||||||
}
|
}
|
||||||
})
|
|
||||||
|
|
||||||
// now update the groups
|
|
||||||
if (Array.isArray(saved) && groups) {
|
|
||||||
const groupPromises = []
|
|
||||||
const createdUserIds = saved.map(user => user._id)
|
|
||||||
for (let groupId of groups) {
|
|
||||||
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
|
|
||||||
}
|
|
||||||
await Promise.all(groupPromises)
|
|
||||||
}
|
}
|
||||||
|
)
|
||||||
return {
|
|
||||||
successful: saved,
|
|
||||||
unsuccessful,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
|
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
|
||||||
|
@ -420,11 +433,12 @@ export class UserDB {
|
||||||
_deleted: true,
|
_deleted: true,
|
||||||
}))
|
}))
|
||||||
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
|
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
|
||||||
|
const creatorsToDelete = usersToDelete.filter(isCreator)
|
||||||
|
|
||||||
await UserDB.quotas.removeUsers(toDelete.length)
|
|
||||||
for (let user of usersToDelete) {
|
for (let user of usersToDelete) {
|
||||||
await bulkDeleteProcessing(user)
|
await bulkDeleteProcessing(user)
|
||||||
}
|
}
|
||||||
|
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
|
||||||
|
|
||||||
// Build Response
|
// Build Response
|
||||||
// index users by id
|
// index users by id
|
||||||
|
@ -473,7 +487,8 @@ export class UserDB {
|
||||||
|
|
||||||
await db.remove(userId, dbUser._rev)
|
await db.remove(userId, dbUser._rev)
|
||||||
|
|
||||||
await UserDB.quotas.removeUsers(1)
|
const creatorsToDelete = isCreator(dbUser) ? 1 : 0
|
||||||
|
await UserDB.quotas.removeUsers(1, creatorsToDelete)
|
||||||
await eventHelpers.handleDeleteEvents(dbUser)
|
await eventHelpers.handleDeleteEvents(dbUser)
|
||||||
await cache.user.invalidateUser(userId)
|
await cache.user.invalidateUser(userId)
|
||||||
await sessions.invalidateSessions(userId, { reason: "deletion" })
|
await sessions.invalidateSessions(userId, { reason: "deletion" })
|
||||||
|
|
|
@ -14,14 +14,15 @@ import {
|
||||||
} from "../db"
|
} from "../db"
|
||||||
import {
|
import {
|
||||||
BulkDocsResponse,
|
BulkDocsResponse,
|
||||||
ContextUser,
|
|
||||||
SearchQuery,
|
SearchQuery,
|
||||||
SearchQueryOperators,
|
SearchQueryOperators,
|
||||||
SearchUsersRequest,
|
SearchUsersRequest,
|
||||||
User,
|
User,
|
||||||
|
ContextUser,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as context from "../context"
|
|
||||||
import { getGlobalDB } from "../context"
|
import { getGlobalDB } from "../context"
|
||||||
|
import * as context from "../context"
|
||||||
|
import { isCreator } from "./utils"
|
||||||
|
|
||||||
type GetOpts = { cleanup?: boolean }
|
type GetOpts = { cleanup?: boolean }
|
||||||
|
|
||||||
|
@ -283,6 +284,19 @@ export async function getUserCount() {
|
||||||
return response.total_rows
|
return response.total_rows
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function getCreatorCount() {
|
||||||
|
let creators = 0
|
||||||
|
async function iterate(startPage?: string) {
|
||||||
|
const page = await paginatedUsers({ bookmark: startPage })
|
||||||
|
creators += page.data.filter(isCreator).length
|
||||||
|
if (page.hasNextPage) {
|
||||||
|
await iterate(page.nextPage)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await iterate()
|
||||||
|
return creators
|
||||||
|
}
|
||||||
|
|
||||||
// used to remove the builder/admin permissions, for processing the
|
// used to remove the builder/admin permissions, for processing the
|
||||||
// user as an app user (they may have some specific role/group
|
// user as an app user (they may have some specific role/group
|
||||||
export function removePortalUserPermissions(user: User | ContextUser) {
|
export function removePortalUserPermissions(user: User | ContextUser) {
|
||||||
|
|
|
@ -10,6 +10,7 @@ import { getAccountByTenantId } from "../accounts"
|
||||||
// extract from shared-core to make easily accessible from backend-core
|
// extract from shared-core to make easily accessible from backend-core
|
||||||
export const isBuilder = sdk.users.isBuilder
|
export const isBuilder = sdk.users.isBuilder
|
||||||
export const isAdmin = sdk.users.isAdmin
|
export const isAdmin = sdk.users.isAdmin
|
||||||
|
export const isCreator = sdk.users.isCreator
|
||||||
export const isGlobalBuilder = sdk.users.isGlobalBuilder
|
export const isGlobalBuilder = sdk.users.isGlobalBuilder
|
||||||
export const isAdminOrBuilder = sdk.users.isAdminOrBuilder
|
export const isAdminOrBuilder = sdk.users.isAdminOrBuilder
|
||||||
export const hasAdminPermissions = sdk.users.hasAdminPermissions
|
export const hasAdminPermissions = sdk.users.hasAdminPermissions
|
||||||
|
|
|
@ -79,8 +79,8 @@ export function isPublicApiRequest(ctx: Ctx): boolean {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given a request tries to find the appId, which can be located in various places
|
* Given a request tries to find the appId, which can be located in various places
|
||||||
* @param {object} ctx The main request body to look through.
|
* @param ctx The main request body to look through.
|
||||||
* @returns {string|undefined} If an appId was found it will be returned.
|
* @returns If an appId was found it will be returned.
|
||||||
*/
|
*/
|
||||||
export async function getAppIdFromCtx(ctx: Ctx) {
|
export async function getAppIdFromCtx(ctx: Ctx) {
|
||||||
// look in headers
|
// look in headers
|
||||||
|
@ -135,7 +135,7 @@ function parseAppIdFromUrl(url?: string) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* opens the contents of the specified encrypted JWT.
|
* opens the contents of the specified encrypted JWT.
|
||||||
* @return {object} the contents of the token.
|
* @return the contents of the token.
|
||||||
*/
|
*/
|
||||||
export function openJwt(token: string) {
|
export function openJwt(token: string) {
|
||||||
if (!token) {
|
if (!token) {
|
||||||
|
@ -169,8 +169,8 @@ export function isValidInternalAPIKey(apiKey: string) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a cookie from context, and decrypt if necessary.
|
* Get a cookie from context, and decrypt if necessary.
|
||||||
* @param {object} ctx The request which is to be manipulated.
|
* @param ctx The request which is to be manipulated.
|
||||||
* @param {string} name The name of the cookie to get.
|
* @param name The name of the cookie to get.
|
||||||
*/
|
*/
|
||||||
export function getCookie(ctx: Ctx, name: string) {
|
export function getCookie(ctx: Ctx, name: string) {
|
||||||
const cookie = ctx.cookies.get(name)
|
const cookie = ctx.cookies.get(name)
|
||||||
|
@ -184,10 +184,10 @@ export function getCookie(ctx: Ctx, name: string) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Store a cookie for the request - it will not expire.
|
* Store a cookie for the request - it will not expire.
|
||||||
* @param {object} ctx The request which is to be manipulated.
|
* @param ctx The request which is to be manipulated.
|
||||||
* @param {string} name The name of the cookie to set.
|
* @param name The name of the cookie to set.
|
||||||
* @param {string|object} value The value of cookie which will be set.
|
* @param value The value of cookie which will be set.
|
||||||
* @param {object} opts options like whether to sign.
|
* @param opts options like whether to sign.
|
||||||
*/
|
*/
|
||||||
export function setCookie(
|
export function setCookie(
|
||||||
ctx: Ctx,
|
ctx: Ctx,
|
||||||
|
@ -223,8 +223,8 @@ export function clearCookie(ctx: Ctx, name: string) {
|
||||||
/**
|
/**
|
||||||
* Checks if the API call being made (based on the provided ctx object) is from the client. If
|
* Checks if the API call being made (based on the provided ctx object) is from the client. If
|
||||||
* the call is not from a client app then it is from the builder.
|
* the call is not from a client app then it is from the builder.
|
||||||
* @param {object} ctx The koa context object to be tested.
|
* @param ctx The koa context object to be tested.
|
||||||
* @return {boolean} returns true if the call is from the client lib (a built app rather than the builder).
|
* @return returns true if the call is from the client lib (a built app rather than the builder).
|
||||||
*/
|
*/
|
||||||
export function isClient(ctx: Ctx) {
|
export function isClient(ctx: Ctx) {
|
||||||
return ctx.headers[Header.TYPE] === "client"
|
return ctx.headers[Header.TYPE] === "client"
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
const _ = require('lodash/fp')
|
||||||
|
const {structures} = require("../../../tests")
|
||||||
|
|
||||||
|
jest.mock("../../../src/context")
|
||||||
|
jest.mock("../../../src/db")
|
||||||
|
|
||||||
|
const context = require("../../../src/context")
|
||||||
|
const db = require("../../../src/db")
|
||||||
|
|
||||||
|
const {getCreatorCount} = require('../../../src/users/users')
|
||||||
|
|
||||||
|
describe("Users", () => {
|
||||||
|
|
||||||
|
let getGlobalDBMock
|
||||||
|
let getGlobalUserParamsMock
|
||||||
|
let paginationMock
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.resetAllMocks()
|
||||||
|
|
||||||
|
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
|
||||||
|
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
|
||||||
|
paginationMock = jest.spyOn(db, "pagination")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("Retrieves the number of creators", async () => {
|
||||||
|
const getUsers = (offset, limit, creators = false) => {
|
||||||
|
const range = _.range(offset, limit)
|
||||||
|
const opts = creators ? {builder: {global: true}} : undefined
|
||||||
|
return range.map(() => structures.users.user(opts))
|
||||||
|
}
|
||||||
|
const page1Data = getUsers(0, 8)
|
||||||
|
const page2Data = getUsers(8, 12, true)
|
||||||
|
getGlobalDBMock.mockImplementation(() => ({
|
||||||
|
name : "fake-db",
|
||||||
|
allDocs: () => ({
|
||||||
|
rows: [...page1Data, ...page2Data]
|
||||||
|
})
|
||||||
|
}))
|
||||||
|
paginationMock.mockImplementationOnce(() => ({
|
||||||
|
data: page1Data,
|
||||||
|
hasNextPage: true,
|
||||||
|
nextPage: "1"
|
||||||
|
}))
|
||||||
|
paginationMock.mockImplementation(() => ({
|
||||||
|
data: page2Data,
|
||||||
|
hasNextPage: false,
|
||||||
|
nextPage: undefined
|
||||||
|
}))
|
||||||
|
const creatorsCount = await getCreatorCount()
|
||||||
|
expect(creatorsCount).toBe(4)
|
||||||
|
expect(paginationMock).toHaveBeenCalledTimes(2)
|
||||||
|
})
|
||||||
|
})
|
|
@ -72,6 +72,11 @@ export function quotas(): Quotas {
|
||||||
value: 1,
|
value: 1,
|
||||||
triggers: [],
|
triggers: [],
|
||||||
},
|
},
|
||||||
|
creators: {
|
||||||
|
name: "Creators",
|
||||||
|
value: 1,
|
||||||
|
triggers: [],
|
||||||
|
},
|
||||||
userGroups: {
|
userGroups: {
|
||||||
name: "User Groups",
|
name: "User Groups",
|
||||||
value: 1,
|
value: 1,
|
||||||
|
@ -118,6 +123,10 @@ export function customer(): Customer {
|
||||||
export function subscription(): Subscription {
|
export function subscription(): Subscription {
|
||||||
return {
|
return {
|
||||||
amount: 10000,
|
amount: 10000,
|
||||||
|
amounts: {
|
||||||
|
user: 10000,
|
||||||
|
creator: 0,
|
||||||
|
},
|
||||||
cancelAt: undefined,
|
cancelAt: undefined,
|
||||||
currency: "usd",
|
currency: "usd",
|
||||||
currentPeriodEnd: 0,
|
currentPeriodEnd: 0,
|
||||||
|
@ -126,6 +135,10 @@ export function subscription(): Subscription {
|
||||||
duration: PriceDuration.MONTHLY,
|
duration: PriceDuration.MONTHLY,
|
||||||
pastDueAt: undefined,
|
pastDueAt: undefined,
|
||||||
quantity: 0,
|
quantity: 0,
|
||||||
|
quantities: {
|
||||||
|
user: 0,
|
||||||
|
creator: 0,
|
||||||
|
},
|
||||||
status: "active",
|
status: "active",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { MonthlyQuotaName, QuotaUsage } from "@budibase/types"
|
import { MonthlyQuotaName, QuotaUsage } from "@budibase/types"
|
||||||
|
|
||||||
export const usage = (): QuotaUsage => {
|
export const usage = (users: number = 0, creators: number = 0): QuotaUsage => {
|
||||||
return {
|
return {
|
||||||
_id: "usage_quota",
|
_id: "usage_quota",
|
||||||
quotaReset: new Date().toISOString(),
|
quotaReset: new Date().toISOString(),
|
||||||
|
@ -58,7 +58,8 @@ export const usage = (): QuotaUsage => {
|
||||||
usageQuota: {
|
usageQuota: {
|
||||||
apps: 0,
|
apps: 0,
|
||||||
plugins: 0,
|
plugins: 0,
|
||||||
users: 0,
|
users,
|
||||||
|
creators,
|
||||||
userGroups: 0,
|
userGroups: 0,
|
||||||
rows: 0,
|
rows: 0,
|
||||||
triggers: {},
|
triggers: {},
|
||||||
|
|
|
@ -64,7 +64,6 @@
|
||||||
"@fortawesome/fontawesome-svg-core": "^6.2.1",
|
"@fortawesome/fontawesome-svg-core": "^6.2.1",
|
||||||
"@fortawesome/free-brands-svg-icons": "^6.2.1",
|
"@fortawesome/free-brands-svg-icons": "^6.2.1",
|
||||||
"@fortawesome/free-solid-svg-icons": "^6.2.1",
|
"@fortawesome/free-solid-svg-icons": "^6.2.1",
|
||||||
"@sentry/browser": "5.19.1",
|
|
||||||
"@spectrum-css/page": "^3.0.1",
|
"@spectrum-css/page": "^3.0.1",
|
||||||
"@spectrum-css/vars": "^3.0.1",
|
"@spectrum-css/vars": "^3.0.1",
|
||||||
"codemirror": "^5.59.0",
|
"codemirror": "^5.59.0",
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
import * as Sentry from "@sentry/browser"
|
|
||||||
|
|
||||||
export default class SentryClient {
|
|
||||||
constructor(dsn) {
|
|
||||||
this.dsn = dsn
|
|
||||||
}
|
|
||||||
|
|
||||||
init() {
|
|
||||||
if (this.dsn) {
|
|
||||||
Sentry.init({ dsn: this.dsn })
|
|
||||||
|
|
||||||
this.initalised = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Capture an exception and send it to sentry.
|
|
||||||
* @param {Error} err - JS error object
|
|
||||||
*/
|
|
||||||
captureException(err) {
|
|
||||||
if (!this.initalised) return
|
|
||||||
|
|
||||||
Sentry.captureException(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Identify user in sentry.
|
|
||||||
* @param {String} id - Unique user id
|
|
||||||
*/
|
|
||||||
identify(id) {
|
|
||||||
if (!this.initalised) return
|
|
||||||
|
|
||||||
Sentry.configureScope(scope => {
|
|
||||||
scope.setUser({ id })
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,16 +1,14 @@
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
import PosthogClient from "./PosthogClient"
|
import PosthogClient from "./PosthogClient"
|
||||||
import IntercomClient from "./IntercomClient"
|
import IntercomClient from "./IntercomClient"
|
||||||
import SentryClient from "./SentryClient"
|
|
||||||
import { Events, EventSource } from "./constants"
|
import { Events, EventSource } from "./constants"
|
||||||
|
|
||||||
const posthog = new PosthogClient(process.env.POSTHOG_TOKEN)
|
const posthog = new PosthogClient(process.env.POSTHOG_TOKEN)
|
||||||
const sentry = new SentryClient(process.env.SENTRY_DSN)
|
|
||||||
const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
|
const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
|
||||||
|
|
||||||
class AnalyticsHub {
|
class AnalyticsHub {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.clients = [posthog, sentry, intercom]
|
this.clients = [posthog, intercom]
|
||||||
}
|
}
|
||||||
|
|
||||||
async activate() {
|
async activate() {
|
||||||
|
@ -23,12 +21,9 @@ class AnalyticsHub {
|
||||||
|
|
||||||
identify(id) {
|
identify(id) {
|
||||||
posthog.identify(id)
|
posthog.identify(id)
|
||||||
sentry.identify(id)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
captureException(err) {
|
captureException(_err) {}
|
||||||
sentry.captureException(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
captureEvent(eventName, props = {}) {
|
captureEvent(eventName, props = {}) {
|
||||||
posthog.captureEvent(eventName, props)
|
posthog.captureEvent(eventName, props)
|
||||||
|
|
|
@ -23,5 +23,7 @@
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{#key $params.datasourceId}
|
{#key $params.datasourceId}
|
||||||
<slot />
|
{#if $datasources.selected}
|
||||||
|
<slot />
|
||||||
|
{/if}
|
||||||
{/key}
|
{/key}
|
||||||
|
|
|
@ -16,8 +16,7 @@
|
||||||
let selectedPanel = null
|
let selectedPanel = null
|
||||||
let panelOptions = []
|
let panelOptions = []
|
||||||
|
|
||||||
// datasources.selected can return null temporarily on datasource deletion
|
$: datasource = $datasources.selected
|
||||||
$: datasource = $datasources.selected || {}
|
|
||||||
|
|
||||||
$: getOptions(datasource)
|
$: getOptions(datasource)
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<TestimonialPage>
|
<TestimonialPage enabled={$organisation.testimonialsEnabled}>
|
||||||
<Layout gap="S" noPadding>
|
<Layout gap="S" noPadding>
|
||||||
<img alt="logo" src={$organisation.logoUrl || Logo} />
|
<img alt="logo" src={$organisation.logoUrl || Logo} />
|
||||||
<span class="heading-wrap">
|
<span class="heading-wrap">
|
||||||
|
|
|
@ -53,7 +53,7 @@
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<TestimonialPage>
|
<TestimonialPage enabled={$organisation.testimonialsEnabled}>
|
||||||
<Layout gap="S" noPadding>
|
<Layout gap="S" noPadding>
|
||||||
{#if loaded}
|
{#if loaded}
|
||||||
<img alt="logo" src={$organisation.logoUrl || Logo} />
|
<img alt="logo" src={$organisation.logoUrl || Logo} />
|
||||||
|
|
|
@ -80,7 +80,6 @@ export default defineConfig(({ mode }) => {
|
||||||
"process.env.INTERCOM_TOKEN": JSON.stringify(
|
"process.env.INTERCOM_TOKEN": JSON.stringify(
|
||||||
process.env.INTERCOM_TOKEN
|
process.env.INTERCOM_TOKEN
|
||||||
),
|
),
|
||||||
"process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN),
|
|
||||||
}),
|
}),
|
||||||
copyFonts("fonts"),
|
copyFonts("fonts"),
|
||||||
...(isProduction ? [] : devOnlyPlugins),
|
...(isProduction ? [] : devOnlyPlugins),
|
||||||
|
|
|
@ -32,7 +32,7 @@ export const API = createAPIClient({
|
||||||
},
|
},
|
||||||
|
|
||||||
// Show an error notification for all API failures.
|
// Show an error notification for all API failures.
|
||||||
// We could also log these to sentry.
|
// We could also log these to Posthog.
|
||||||
// Or we could check error.status and redirect to login on a 403 etc.
|
// Or we could check error.status and redirect to login on a 403 etc.
|
||||||
onError: error => {
|
onError: error => {
|
||||||
const { status, method, url, message, handled, suppressErrors } =
|
const { status, method, url, message, handled, suppressErrors } =
|
||||||
|
|
|
@ -9,7 +9,9 @@ export const buildRelationshipEndpoints = API => ({
|
||||||
if (!tableId || !rowId) {
|
if (!tableId || !rowId) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
const response = await API.get({ url: `/api/${tableId}/${rowId}/enrich` })
|
const response = await API.get({
|
||||||
|
url: `/api/${tableId}/${rowId}/enrich?field=${fieldName}`,
|
||||||
|
})
|
||||||
if (!fieldName) {
|
if (!fieldName) {
|
||||||
return response || []
|
return response || []
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -260,29 +260,31 @@
|
||||||
class:wrap={editable || contentLines > 1}
|
class:wrap={editable || contentLines > 1}
|
||||||
on:wheel={e => (focused ? e.stopPropagation() : null)}
|
on:wheel={e => (focused ? e.stopPropagation() : null)}
|
||||||
>
|
>
|
||||||
{#each value || [] as relationship}
|
{#if Array.isArray(value) && value.length}
|
||||||
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
|
{#each value as relationship}
|
||||||
<div class="badge">
|
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
|
||||||
<span
|
<div class="badge">
|
||||||
on:click={editable
|
<span
|
||||||
? () => showRelationship(relationship._id)
|
on:click={editable
|
||||||
: null}
|
? () => showRelationship(relationship._id)
|
||||||
>
|
: null}
|
||||||
{readable(
|
>
|
||||||
relationship[primaryDisplay] || relationship.primaryDisplay
|
{readable(
|
||||||
)}
|
relationship[primaryDisplay] || relationship.primaryDisplay
|
||||||
</span>
|
)}
|
||||||
{#if editable}
|
</span>
|
||||||
<Icon
|
{#if editable}
|
||||||
name="Close"
|
<Icon
|
||||||
size="XS"
|
name="Close"
|
||||||
hoverable
|
size="XS"
|
||||||
on:click={() => toggleRow(relationship)}
|
hoverable
|
||||||
/>
|
on:click={() => toggleRow(relationship)}
|
||||||
{/if}
|
/>
|
||||||
</div>
|
{/if}
|
||||||
{/if}
|
</div>
|
||||||
{/each}
|
{/if}
|
||||||
|
{/each}
|
||||||
|
{/if}
|
||||||
{#if editable}
|
{#if editable}
|
||||||
<div class="add" on:click={open}>
|
<div class="add" on:click={open}>
|
||||||
<Icon name="Add" size="S" />
|
<Icon name="Add" size="S" />
|
||||||
|
@ -318,7 +320,7 @@
|
||||||
<div class="searching">
|
<div class="searching">
|
||||||
<ProgressCircle size="S" />
|
<ProgressCircle size="S" />
|
||||||
</div>
|
</div>
|
||||||
{:else if searchResults?.length}
|
{:else if Array.isArray(searchResults) && searchResults.length}
|
||||||
<div class="results">
|
<div class="results">
|
||||||
{#each searchResults as row, idx}
|
{#each searchResults as row, idx}
|
||||||
<div
|
<div
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 044bec6447066b215932d6726c437e7ec5a9e42e
|
Subproject commit 570d14aa44aa88f4d053856322210f0008ba5c76
|
|
@ -55,7 +55,6 @@
|
||||||
"@elastic/elasticsearch": "7.10.0",
|
"@elastic/elasticsearch": "7.10.0",
|
||||||
"@google-cloud/firestore": "6.8.0",
|
"@google-cloud/firestore": "6.8.0",
|
||||||
"@koa/router": "8.0.8",
|
"@koa/router": "8.0.8",
|
||||||
"@sentry/node": "6.17.7",
|
|
||||||
"@socket.io/redis-adapter": "^8.2.1",
|
"@socket.io/redis-adapter": "^8.2.1",
|
||||||
"airtable": "0.10.1",
|
"airtable": "0.10.1",
|
||||||
"arangojs": "7.2.0",
|
"arangojs": "7.2.0",
|
||||||
|
|
|
@ -4,7 +4,6 @@ import {
|
||||||
getQueryParams,
|
getQueryParams,
|
||||||
getTableParams,
|
getTableParams,
|
||||||
} from "../../db/utils"
|
} from "../../db/utils"
|
||||||
import { destroy as tableDestroy } from "./table/internal"
|
|
||||||
import { getIntegration } from "../../integrations"
|
import { getIntegration } from "../../integrations"
|
||||||
import { invalidateDynamicVariables } from "../../threads/utils"
|
import { invalidateDynamicVariables } from "../../threads/utils"
|
||||||
import { context, db as dbCore, events } from "@budibase/backend-core"
|
import { context, db as dbCore, events } from "@budibase/backend-core"
|
||||||
|
@ -325,11 +324,7 @@ async function destroyInternalTablesBySourceId(datasourceId: string) {
|
||||||
|
|
||||||
// Destroy the tables.
|
// Destroy the tables.
|
||||||
for (const table of datasourceTableDocs) {
|
for (const table of datasourceTableDocs) {
|
||||||
await tableDestroy({
|
await sdk.tables.internal.destroy(table)
|
||||||
params: {
|
|
||||||
tableId: table._id,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,7 @@ class Routing {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the full routing structure by querying the routing view and processing the result into the tree.
|
* Gets the full routing structure by querying the routing view and processing the result into the tree.
|
||||||
* @returns {Promise<object>} The routing structure, this is the full structure designed for use in the builder,
|
* @returns The routing structure, this is the full structure designed for use in the builder,
|
||||||
* if the client routing is required then the updateRoutingStructureForUserRole should be used.
|
* if the client routing is required then the updateRoutingStructureForUserRole should be used.
|
||||||
*/
|
*/
|
||||||
async function getRoutingStructure() {
|
async function getRoutingStructure() {
|
||||||
|
|
|
@ -237,17 +237,8 @@ function isEditableColumn(column: FieldSchema) {
|
||||||
return !(isExternalAutoColumn || isFormula)
|
return !(isExternalAutoColumn || isFormula)
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ExternalRequestReturnType<T> = T extends Operation.READ
|
export type ExternalRequestReturnType<T extends Operation> =
|
||||||
?
|
T extends Operation.READ ? Row[] : { row: Row; table: Table }
|
||||||
| Row[]
|
|
||||||
| {
|
|
||||||
row: Row
|
|
||||||
table: Table
|
|
||||||
}
|
|
||||||
: {
|
|
||||||
row: Row
|
|
||||||
table: Table
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ExternalRequest<T extends Operation> {
|
export class ExternalRequest<T extends Operation> {
|
||||||
private readonly operation: T
|
private readonly operation: T
|
||||||
|
@ -657,10 +648,12 @@ export class ExternalRequest<T extends Operation> {
|
||||||
relationships
|
relationships
|
||||||
)
|
)
|
||||||
// if reading it'll just be an array of rows, return whole thing
|
// if reading it'll just be an array of rows, return whole thing
|
||||||
return (
|
if (operation === Operation.READ) {
|
||||||
operation === Operation.READ && Array.isArray(response)
|
return (
|
||||||
? output
|
Array.isArray(output) ? output : [output]
|
||||||
: { row: output[0], table }
|
) as ExternalRequestReturnType<T>
|
||||||
) as ExternalRequestReturnType<T>
|
} else {
|
||||||
|
return { row: output[0], table } as ExternalRequestReturnType<T>
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,7 +44,7 @@ export async function handleRequest<T extends Operation>(
|
||||||
return [] as any
|
return [] as any
|
||||||
}
|
}
|
||||||
|
|
||||||
return new ExternalRequest(operation, tableId, opts?.datasource).run(
|
return new ExternalRequest<T>(operation, tableId, opts?.datasource).run(
|
||||||
opts || {}
|
opts || {}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -148,17 +148,17 @@ export async function find(ctx: UserCtx): Promise<Row> {
|
||||||
export async function destroy(ctx: UserCtx) {
|
export async function destroy(ctx: UserCtx) {
|
||||||
const tableId = utils.getTableId(ctx)
|
const tableId = utils.getTableId(ctx)
|
||||||
const _id = ctx.request.body._id
|
const _id = ctx.request.body._id
|
||||||
const { row } = (await handleRequest(Operation.DELETE, tableId, {
|
const { row } = await handleRequest(Operation.DELETE, tableId, {
|
||||||
id: breakRowIdField(_id),
|
id: breakRowIdField(_id),
|
||||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||||
})) as { row: Row }
|
})
|
||||||
return { response: { ok: true, id: _id }, row }
|
return { response: { ok: true, id: _id }, row }
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function bulkDestroy(ctx: UserCtx) {
|
export async function bulkDestroy(ctx: UserCtx) {
|
||||||
const { rows } = ctx.request.body
|
const { rows } = ctx.request.body
|
||||||
const tableId = utils.getTableId(ctx)
|
const tableId = utils.getTableId(ctx)
|
||||||
let promises: Promise<Row[] | { row: Row; table: Table }>[] = []
|
let promises: Promise<{ row: Row; table: Table }>[] = []
|
||||||
for (let row of rows) {
|
for (let row of rows) {
|
||||||
promises.push(
|
promises.push(
|
||||||
handleRequest(Operation.DELETE, tableId, {
|
handleRequest(Operation.DELETE, tableId, {
|
||||||
|
@ -167,7 +167,7 @@ export async function bulkDestroy(ctx: UserCtx) {
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
const responses = (await Promise.all(promises)) as { row: Row }[]
|
const responses = await Promise.all(promises)
|
||||||
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
|
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -183,11 +183,11 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||||
ctx.throw(400, "Datasource has not been configured for plus API.")
|
ctx.throw(400, "Datasource has not been configured for plus API.")
|
||||||
}
|
}
|
||||||
const tables = datasource.entities
|
const tables = datasource.entities
|
||||||
const response = (await handleRequest(Operation.READ, tableId, {
|
const response = await handleRequest(Operation.READ, tableId, {
|
||||||
id,
|
id,
|
||||||
datasource,
|
datasource,
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
})) as Row[]
|
})
|
||||||
const table: Table = tables[tableName]
|
const table: Table = tables[tableName]
|
||||||
const row = response[0]
|
const row = response[0]
|
||||||
// this seems like a lot of work, but basically we need to dig deeper for the enrich
|
// this seems like a lot of work, but basically we need to dig deeper for the enrich
|
||||||
|
|
|
@ -26,6 +26,7 @@ import { fixRow } from "../public/rows"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import * as exporters from "../view/exporters"
|
import * as exporters from "../view/exporters"
|
||||||
import { apiFileReturn } from "../../../utilities/fileSystem"
|
import { apiFileReturn } from "../../../utilities/fileSystem"
|
||||||
|
import { Format } from "../view/exporters"
|
||||||
export * as views from "./views"
|
export * as views from "./views"
|
||||||
|
|
||||||
function pickApi(tableId: any) {
|
function pickApi(tableId: any) {
|
||||||
|
@ -267,7 +268,7 @@ export const exportRows = async (
|
||||||
async () => {
|
async () => {
|
||||||
const { fileName, content } = await sdk.rows.exportRows({
|
const { fileName, content } = await sdk.rows.exportRows({
|
||||||
tableId,
|
tableId,
|
||||||
format,
|
format: format as Format,
|
||||||
rowIds: rows,
|
rowIds: rows,
|
||||||
columns,
|
columns,
|
||||||
query,
|
query,
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import * as linkRows from "../../../db/linkedRows"
|
import * as linkRows from "../../../db/linkedRows"
|
||||||
import {
|
import {
|
||||||
generateRowID,
|
generateRowID,
|
||||||
|
getMultiIDParams,
|
||||||
getTableIDFromRowID,
|
getTableIDFromRowID,
|
||||||
InternalTables,
|
InternalTables,
|
||||||
} from "../../../db/utils"
|
} from "../../../db/utils"
|
||||||
|
@ -29,6 +30,8 @@ import {
|
||||||
UserCtx,
|
UserCtx,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils"
|
||||||
|
import { flatten } from "lodash"
|
||||||
|
|
||||||
// const CALCULATION_TYPES = {
|
// const CALCULATION_TYPES = {
|
||||||
// SUM: "sum",
|
// SUM: "sum",
|
||||||
|
@ -165,7 +168,7 @@ export async function destroy(ctx: UserCtx) {
|
||||||
if (row.tableId !== tableId) {
|
if (row.tableId !== tableId) {
|
||||||
throw "Supplied tableId doesn't match the row's tableId"
|
throw "Supplied tableId doesn't match the row's tableId"
|
||||||
}
|
}
|
||||||
const table = await sdk.tables.getTable(row.tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
// update the row to include full relationships before deleting them
|
// update the row to include full relationships before deleting them
|
||||||
row = await outputProcessing(table, row, {
|
row = await outputProcessing(table, row, {
|
||||||
squash: false,
|
squash: false,
|
||||||
|
@ -175,7 +178,7 @@ export async function destroy(ctx: UserCtx) {
|
||||||
await linkRows.updateLinks({
|
await linkRows.updateLinks({
|
||||||
eventType: linkRows.EventType.ROW_DELETE,
|
eventType: linkRows.EventType.ROW_DELETE,
|
||||||
row,
|
row,
|
||||||
tableId: row.tableId,
|
tableId,
|
||||||
})
|
})
|
||||||
// remove any attachments that were on the row from object storage
|
// remove any attachments that were on the row from object storage
|
||||||
await cleanupAttachments(table, { row })
|
await cleanupAttachments(table, { row })
|
||||||
|
@ -332,60 +335,52 @@ export async function exportRows(ctx: UserCtx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchEnrichedRow(ctx: UserCtx) {
|
export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||||
|
const fieldName = ctx.request.query.field as string | undefined
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const tableId = utils.getTableId(ctx)
|
const tableId = utils.getTableId(ctx)
|
||||||
const rowId = ctx.params.rowId
|
const rowId = ctx.params.rowId as string
|
||||||
// need table to work out where links go in row
|
// need table to work out where links go in row, as well as the link docs
|
||||||
let [table, row] = await Promise.all([
|
let response = await Promise.all([
|
||||||
sdk.tables.getTable(tableId),
|
sdk.tables.getTable(tableId),
|
||||||
utils.findRow(ctx, tableId, rowId),
|
utils.findRow(ctx, tableId, rowId),
|
||||||
|
linkRows.getLinkDocuments({ tableId, rowId, fieldName }),
|
||||||
])
|
])
|
||||||
// get the link docs
|
const table = response[0] as Table
|
||||||
const linkVals = (await linkRows.getLinkDocuments({
|
const row = response[1] as Row
|
||||||
tableId,
|
const linkVals = response[2] as LinkDocumentValue[]
|
||||||
rowId,
|
|
||||||
})) as LinkDocumentValue[]
|
|
||||||
// look up the actual rows based on the ids
|
// look up the actual rows based on the ids
|
||||||
let response = (
|
const params = getMultiIDParams(linkVals.map(linkVal => linkVal.id))
|
||||||
await db.allDocs({
|
let linkedRows = (await db.allDocs(params)).rows.map(row => row.doc)
|
||||||
include_docs: true,
|
|
||||||
keys: linkVals.map(linkVal => linkVal.id),
|
// get the linked tables
|
||||||
})
|
const linkTableIds = getLinkedTableIDs(table as Table)
|
||||||
).rows.map(row => row.doc)
|
const linkTables = await sdk.tables.getTables(linkTableIds)
|
||||||
// group responses by table
|
|
||||||
let groups: any = {},
|
// perform output processing
|
||||||
tables: Record<string, Table> = {}
|
let final: Promise<Row[]>[] = []
|
||||||
for (let row of response) {
|
for (let linkTable of linkTables) {
|
||||||
if (!row.tableId) {
|
const relatedRows = linkedRows.filter(row => row.tableId === linkTable._id)
|
||||||
row.tableId = getTableIDFromRowID(row._id)
|
// include the row being enriched for performance reasons, don't need to fetch it to include
|
||||||
}
|
final = final.concat(
|
||||||
const linkedTableId = row.tableId
|
outputProcessing(linkTable, relatedRows, {
|
||||||
if (groups[linkedTableId] == null) {
|
// have to clone to avoid JSON cycle
|
||||||
groups[linkedTableId] = [row]
|
fromRow: cloneDeep(row),
|
||||||
tables[linkedTableId] = await db.get(linkedTableId)
|
squash: true,
|
||||||
} else {
|
})
|
||||||
groups[linkedTableId].push(row)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let linkedRows: Row[] = []
|
|
||||||
for (let [tableId, rows] of Object.entries(groups)) {
|
|
||||||
// need to include the IDs in these rows for any links they may have
|
|
||||||
linkedRows = linkedRows.concat(
|
|
||||||
await outputProcessing(tables[tableId], rows as Row[])
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
// finalise the promises
|
||||||
|
linkedRows = flatten(await Promise.all(final))
|
||||||
|
|
||||||
// insert the link rows in the correct place throughout the main row
|
// insert the link rows in the correct place throughout the main row
|
||||||
for (let fieldName of Object.keys(table.schema)) {
|
for (let fieldName of Object.keys(table.schema)) {
|
||||||
let field = table.schema[fieldName]
|
let field = table.schema[fieldName]
|
||||||
if (field.type === FieldTypes.LINK) {
|
if (field.type === FieldTypes.LINK) {
|
||||||
// find the links that pertain to this field, get their indexes
|
// find the links that pertain to this field
|
||||||
const linkIndexes = linkVals
|
const links = linkVals.filter(link => link.fieldName === fieldName)
|
||||||
.filter(link => link.fieldName === fieldName)
|
|
||||||
.map(link => linkVals.indexOf(link))
|
|
||||||
// find the rows that the links state are linked to this field
|
// find the rows that the links state are linked to this field
|
||||||
row[fieldName] = linkedRows.filter((linkRow, index) =>
|
row[fieldName] = linkedRows.filter(linkRow =>
|
||||||
linkIndexes.includes(index)
|
links.find(link => link.id === linkRow._id)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ import {
|
||||||
FieldType,
|
FieldType,
|
||||||
Operation,
|
Operation,
|
||||||
QueryJson,
|
QueryJson,
|
||||||
|
RelationshipFieldMetadata,
|
||||||
Row,
|
Row,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
SortType,
|
SortType,
|
||||||
|
@ -24,18 +25,19 @@ function buildInternalFieldList(
|
||||||
fieldList = fieldList.concat(
|
fieldList = fieldList.concat(
|
||||||
CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`)
|
CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`)
|
||||||
)
|
)
|
||||||
for (let col of Object.values(table.schema)) {
|
if (opts.relationships) {
|
||||||
const isLink = col.type === FieldType.LINK
|
for (let col of Object.values(table.schema)) {
|
||||||
if (isLink && !opts.relationships) {
|
if (col.type === FieldType.LINK) {
|
||||||
continue
|
const linkCol = col as RelationshipFieldMetadata
|
||||||
}
|
const relatedTable = tables.find(
|
||||||
if (isLink) {
|
table => table._id === linkCol.tableId
|
||||||
const relatedTable = tables.find(table => table._id === col.tableId)!
|
)!
|
||||||
fieldList = fieldList.concat(
|
fieldList = fieldList.concat(
|
||||||
buildInternalFieldList(relatedTable, tables, { relationships: false })
|
buildInternalFieldList(relatedTable, tables, { relationships: false })
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
fieldList.push(`${table._id}.${col.name}`)
|
fieldList.push(`${table._id}.${col.name}`)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return fieldList
|
return fieldList
|
||||||
|
@ -56,13 +58,14 @@ function cleanupFilters(filters: SearchFilters, tables: Table[]) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// relationship, switch to table ID
|
// relationship, switch to table ID
|
||||||
const tableRelated = tables.find(table =>
|
const tableRelated = tables.find(
|
||||||
key.includes(tableInFilter(table.originalName!))
|
table =>
|
||||||
|
table.originalName && key.includes(tableInFilter(table.originalName))
|
||||||
)
|
)
|
||||||
if (tableRelated) {
|
if (tableRelated && tableRelated.originalName) {
|
||||||
filter[
|
filter[
|
||||||
key.replace(
|
key.replace(
|
||||||
tableInFilter(tableRelated.originalName!),
|
tableInFilter(tableRelated.originalName),
|
||||||
tableInFilter(tableRelated._id!)
|
tableInFilter(tableRelated._id!)
|
||||||
)
|
)
|
||||||
] = filter[key]
|
] = filter[key]
|
||||||
|
|
|
@ -149,7 +149,7 @@ export async function finaliseRow(
|
||||||
await db.put(table)
|
await db.put(table)
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
if (err.status === 409) {
|
if (err.status === 409) {
|
||||||
const updatedTable = await sdk.tables.getTable(table._id)
|
const updatedTable = await sdk.tables.getTable(table._id!)
|
||||||
let response = processAutoColumn(null, updatedTable, row, {
|
let response = processAutoColumn(null, updatedTable, row, {
|
||||||
reprocessing: true,
|
reprocessing: true,
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,15 +1,7 @@
|
||||||
import { InternalTables } from "../../../../db/utils"
|
import { InternalTables } from "../../../../db/utils"
|
||||||
import * as userController from "../../user"
|
import * as userController from "../../user"
|
||||||
import { context } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import {
|
import { Ctx, RelationshipsJson, Row, Table, UserCtx } from "@budibase/types"
|
||||||
Ctx,
|
|
||||||
FieldType,
|
|
||||||
RelationshipsJson,
|
|
||||||
Row,
|
|
||||||
SearchFilters,
|
|
||||||
Table,
|
|
||||||
UserCtx,
|
|
||||||
} from "@budibase/types"
|
|
||||||
import {
|
import {
|
||||||
processDates,
|
processDates,
|
||||||
processFormulas,
|
processFormulas,
|
||||||
|
@ -19,7 +11,6 @@ import {
|
||||||
updateRelationshipColumns,
|
updateRelationshipColumns,
|
||||||
} from "./sqlUtils"
|
} from "./sqlUtils"
|
||||||
import { basicProcessing, generateIdForRow, fixArrayTypes } from "./basic"
|
import { basicProcessing, generateIdForRow, fixArrayTypes } from "./basic"
|
||||||
import { NoEmptyFilterStrings } from "../../../../constants"
|
|
||||||
import sdk from "../../../../sdk"
|
import sdk from "../../../../sdk"
|
||||||
|
|
||||||
import validateJs from "validate.js"
|
import validateJs from "validate.js"
|
||||||
|
@ -53,7 +44,7 @@ export async function findRow(ctx: UserCtx, tableId: string, rowId: string) {
|
||||||
return row
|
return row
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getTableId(ctx: Ctx) {
|
export function getTableId(ctx: Ctx): string {
|
||||||
// top priority, use the URL first
|
// top priority, use the URL first
|
||||||
if (ctx.params?.sourceId) {
|
if (ctx.params?.sourceId) {
|
||||||
return ctx.params.sourceId
|
return ctx.params.sourceId
|
||||||
|
@ -70,23 +61,21 @@ export function getTableId(ctx: Ctx) {
|
||||||
if (ctx.params?.viewName) {
|
if (ctx.params?.viewName) {
|
||||||
return ctx.params.viewName
|
return ctx.params.viewName
|
||||||
}
|
}
|
||||||
|
throw new Error("Unable to find table ID in request")
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function validate(opts: {
|
export async function validate(
|
||||||
tableId?: string
|
opts: { row: Row } & ({ tableId: string } | { table: Table })
|
||||||
row: Row
|
) {
|
||||||
table?: Table
|
|
||||||
}) {
|
|
||||||
let fetchedTable: Table
|
let fetchedTable: Table
|
||||||
if (!opts.table) {
|
if ("tableId" in opts) {
|
||||||
fetchedTable = await sdk.tables.getTable(opts.tableId)
|
fetchedTable = await sdk.tables.getTable(opts.tableId)
|
||||||
} else {
|
} else {
|
||||||
fetchedTable = opts.table
|
fetchedTable = opts.table
|
||||||
}
|
}
|
||||||
const errors: any = {}
|
|
||||||
return sdk.rows.utils.validate({
|
return sdk.rows.utils.validate({
|
||||||
...opts,
|
...opts,
|
||||||
table: fetchedTable || opts.table,
|
table: fetchedTable,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -152,35 +141,6 @@ export function sqlOutputProcessing(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// don't do a pure falsy check, as 0 is included
|
|
||||||
// https://github.com/Budibase/budibase/issues/10118
|
|
||||||
export function removeEmptyFilters(filters: SearchFilters) {
|
|
||||||
for (let filterField of NoEmptyFilterStrings) {
|
|
||||||
if (!filters[filterField]) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let filterType of Object.keys(filters)) {
|
|
||||||
if (filterType !== filterField) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// don't know which one we're checking, type could be anything
|
|
||||||
const value = filters[filterType] as unknown
|
|
||||||
if (typeof value === "object") {
|
|
||||||
for (let [key, value] of Object.entries(
|
|
||||||
filters[filterType] as object
|
|
||||||
)) {
|
|
||||||
if (value == null || value === "") {
|
|
||||||
// @ts-ignore
|
|
||||||
delete filters[filterField][key]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return filters
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isUserMetadataTable(tableId: string) {
|
export function isUserMetadataTable(tableId: string) {
|
||||||
return tableId === InternalTables.USER_METADATA
|
return tableId === InternalTables.USER_METADATA
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,36 @@
|
||||||
|
import {
|
||||||
|
Datasource,
|
||||||
|
Operation,
|
||||||
|
QueryJson,
|
||||||
|
RenameColumn,
|
||||||
|
Table,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||||
|
|
||||||
|
export async function makeTableRequest(
|
||||||
|
datasource: Datasource,
|
||||||
|
operation: Operation,
|
||||||
|
table: Table,
|
||||||
|
tables: Record<string, Table>,
|
||||||
|
oldTable?: Table,
|
||||||
|
renamed?: RenameColumn
|
||||||
|
) {
|
||||||
|
const json: QueryJson = {
|
||||||
|
endpoint: {
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
entityId: table._id!,
|
||||||
|
operation,
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
tables,
|
||||||
|
},
|
||||||
|
table,
|
||||||
|
}
|
||||||
|
if (oldTable) {
|
||||||
|
json.meta!.table = oldTable
|
||||||
|
}
|
||||||
|
if (renamed) {
|
||||||
|
json.meta!.renamed = renamed
|
||||||
|
}
|
||||||
|
return makeExternalQuery(datasource, json)
|
||||||
|
}
|
|
@ -1,108 +1,20 @@
|
||||||
import {
|
import { breakExternalTableId } from "../../../integrations/utils"
|
||||||
breakExternalTableId,
|
|
||||||
buildExternalTableId,
|
|
||||||
} from "../../../integrations/utils"
|
|
||||||
import {
|
|
||||||
foreignKeyStructure,
|
|
||||||
generateForeignKey,
|
|
||||||
generateJunctionTableName,
|
|
||||||
hasTypeChanged,
|
|
||||||
setStaticSchemas,
|
|
||||||
} from "./utils"
|
|
||||||
import { FieldTypes } from "../../../constants"
|
|
||||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
|
||||||
import { handleRequest } from "../row/external"
|
import { handleRequest } from "../row/external"
|
||||||
import { context, events } from "@budibase/backend-core"
|
import { events } from "@budibase/backend-core"
|
||||||
import { isRows, isSchema, parse } from "../../../utilities/schema"
|
import { isRows, isSchema, parse } from "../../../utilities/schema"
|
||||||
import {
|
import {
|
||||||
BulkImportRequest,
|
BulkImportRequest,
|
||||||
BulkImportResponse,
|
BulkImportResponse,
|
||||||
Datasource,
|
|
||||||
FieldSchema,
|
|
||||||
ManyToManyRelationshipFieldMetadata,
|
|
||||||
ManyToOneRelationshipFieldMetadata,
|
|
||||||
OneToManyRelationshipFieldMetadata,
|
|
||||||
Operation,
|
Operation,
|
||||||
QueryJson,
|
|
||||||
RelationshipFieldMetadata,
|
|
||||||
RelationshipType,
|
|
||||||
RenameColumn,
|
|
||||||
SaveTableRequest,
|
SaveTableRequest,
|
||||||
SaveTableResponse,
|
SaveTableResponse,
|
||||||
Table,
|
Table,
|
||||||
TableRequest,
|
TableRequest,
|
||||||
UserCtx,
|
UserCtx,
|
||||||
ViewV2,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import { builderSocket } from "../../../websockets"
|
import { builderSocket } from "../../../websockets"
|
||||||
|
|
||||||
const { cloneDeep } = require("lodash/fp")
|
|
||||||
|
|
||||||
async function makeTableRequest(
|
|
||||||
datasource: Datasource,
|
|
||||||
operation: Operation,
|
|
||||||
table: Table,
|
|
||||||
tables: Record<string, Table>,
|
|
||||||
oldTable?: Table,
|
|
||||||
renamed?: RenameColumn
|
|
||||||
) {
|
|
||||||
const json: QueryJson = {
|
|
||||||
endpoint: {
|
|
||||||
datasourceId: datasource._id!,
|
|
||||||
entityId: table._id!,
|
|
||||||
operation,
|
|
||||||
},
|
|
||||||
meta: {
|
|
||||||
tables,
|
|
||||||
},
|
|
||||||
table,
|
|
||||||
}
|
|
||||||
if (oldTable) {
|
|
||||||
json.meta!.table = oldTable
|
|
||||||
}
|
|
||||||
if (renamed) {
|
|
||||||
json.meta!.renamed = renamed
|
|
||||||
}
|
|
||||||
return makeExternalQuery(datasource, json)
|
|
||||||
}
|
|
||||||
|
|
||||||
function cleanupRelationships(
|
|
||||||
table: Table,
|
|
||||||
tables: Record<string, Table>,
|
|
||||||
oldTable?: Table
|
|
||||||
) {
|
|
||||||
const tableToIterate = oldTable ? oldTable : table
|
|
||||||
// clean up relationships in couch table schemas
|
|
||||||
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
|
|
||||||
if (
|
|
||||||
schema.type === FieldTypes.LINK &&
|
|
||||||
(!oldTable || table.schema[key] == null)
|
|
||||||
) {
|
|
||||||
const schemaTableId = schema.tableId
|
|
||||||
const relatedTable = Object.values(tables).find(
|
|
||||||
table => table._id === schemaTableId
|
|
||||||
)
|
|
||||||
const foreignKey =
|
|
||||||
schema.relationshipType !== RelationshipType.MANY_TO_MANY &&
|
|
||||||
schema.foreignKey
|
|
||||||
if (!relatedTable || !foreignKey) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
for (let [relatedKey, relatedSchema] of Object.entries(
|
|
||||||
relatedTable.schema
|
|
||||||
)) {
|
|
||||||
if (
|
|
||||||
relatedSchema.type === FieldTypes.LINK &&
|
|
||||||
relatedSchema.fieldName === foreignKey
|
|
||||||
) {
|
|
||||||
delete relatedTable.schema[relatedKey]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getDatasourceId(table: Table) {
|
function getDatasourceId(table: Table) {
|
||||||
if (!table) {
|
if (!table) {
|
||||||
throw "No table supplied"
|
throw "No table supplied"
|
||||||
|
@ -113,247 +25,32 @@ function getDatasourceId(table: Table) {
|
||||||
return breakExternalTableId(table._id).datasourceId
|
return breakExternalTableId(table._id).datasourceId
|
||||||
}
|
}
|
||||||
|
|
||||||
function otherRelationshipType(type?: string) {
|
|
||||||
if (type === RelationshipType.MANY_TO_MANY) {
|
|
||||||
return RelationshipType.MANY_TO_MANY
|
|
||||||
}
|
|
||||||
return type === RelationshipType.ONE_TO_MANY
|
|
||||||
? RelationshipType.MANY_TO_ONE
|
|
||||||
: RelationshipType.ONE_TO_MANY
|
|
||||||
}
|
|
||||||
|
|
||||||
function generateManyLinkSchema(
|
|
||||||
datasource: Datasource,
|
|
||||||
column: ManyToManyRelationshipFieldMetadata,
|
|
||||||
table: Table,
|
|
||||||
relatedTable: Table
|
|
||||||
): Table {
|
|
||||||
if (!table.primary || !relatedTable.primary) {
|
|
||||||
throw new Error("Unable to generate many link schema, no primary keys")
|
|
||||||
}
|
|
||||||
const primary = table.name + table.primary[0]
|
|
||||||
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
|
|
||||||
const jcTblName = generateJunctionTableName(column, table, relatedTable)
|
|
||||||
// first create the new table
|
|
||||||
const junctionTable = {
|
|
||||||
_id: buildExternalTableId(datasource._id!, jcTblName),
|
|
||||||
name: jcTblName,
|
|
||||||
primary: [primary, relatedPrimary],
|
|
||||||
constrained: [primary, relatedPrimary],
|
|
||||||
schema: {
|
|
||||||
[primary]: foreignKeyStructure(primary, {
|
|
||||||
toTable: table.name,
|
|
||||||
toKey: table.primary[0],
|
|
||||||
}),
|
|
||||||
[relatedPrimary]: foreignKeyStructure(relatedPrimary, {
|
|
||||||
toTable: relatedTable.name,
|
|
||||||
toKey: relatedTable.primary[0],
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
column.through = junctionTable._id
|
|
||||||
column.throughFrom = relatedPrimary
|
|
||||||
column.throughTo = primary
|
|
||||||
column.fieldName = relatedPrimary
|
|
||||||
return junctionTable
|
|
||||||
}
|
|
||||||
|
|
||||||
function generateLinkSchema(
|
|
||||||
column:
|
|
||||||
| OneToManyRelationshipFieldMetadata
|
|
||||||
| ManyToOneRelationshipFieldMetadata,
|
|
||||||
table: Table,
|
|
||||||
relatedTable: Table,
|
|
||||||
type: RelationshipType.ONE_TO_MANY | RelationshipType.MANY_TO_ONE
|
|
||||||
) {
|
|
||||||
if (!table.primary || !relatedTable.primary) {
|
|
||||||
throw new Error("Unable to generate link schema, no primary keys")
|
|
||||||
}
|
|
||||||
const isOneSide = type === RelationshipType.ONE_TO_MANY
|
|
||||||
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
|
|
||||||
// generate a foreign key
|
|
||||||
const foreignKey = generateForeignKey(column, relatedTable)
|
|
||||||
column.relationshipType = type
|
|
||||||
column.foreignKey = isOneSide ? foreignKey : primary
|
|
||||||
column.fieldName = isOneSide ? primary : foreignKey
|
|
||||||
return foreignKey
|
|
||||||
}
|
|
||||||
|
|
||||||
function generateRelatedSchema(
|
|
||||||
linkColumn: RelationshipFieldMetadata,
|
|
||||||
table: Table,
|
|
||||||
relatedTable: Table,
|
|
||||||
columnName: string
|
|
||||||
) {
|
|
||||||
// generate column for other table
|
|
||||||
const relatedSchema = cloneDeep(linkColumn)
|
|
||||||
const isMany2Many =
|
|
||||||
linkColumn.relationshipType === RelationshipType.MANY_TO_MANY
|
|
||||||
// swap them from the main link
|
|
||||||
if (!isMany2Many && linkColumn.foreignKey) {
|
|
||||||
relatedSchema.fieldName = linkColumn.foreignKey
|
|
||||||
relatedSchema.foreignKey = linkColumn.fieldName
|
|
||||||
}
|
|
||||||
// is many to many
|
|
||||||
else if (isMany2Many) {
|
|
||||||
// don't need to copy through, already got it
|
|
||||||
relatedSchema.fieldName = linkColumn.throughTo
|
|
||||||
relatedSchema.throughTo = linkColumn.throughFrom
|
|
||||||
relatedSchema.throughFrom = linkColumn.throughTo
|
|
||||||
}
|
|
||||||
relatedSchema.relationshipType = otherRelationshipType(
|
|
||||||
linkColumn.relationshipType
|
|
||||||
)
|
|
||||||
relatedSchema.tableId = relatedTable._id
|
|
||||||
relatedSchema.name = columnName
|
|
||||||
table.schema[columnName] = relatedSchema
|
|
||||||
}
|
|
||||||
|
|
||||||
function isRelationshipSetup(column: RelationshipFieldMetadata) {
|
|
||||||
return (column as any).foreignKey || (column as any).through
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||||
const inputs = ctx.request.body
|
const inputs = ctx.request.body
|
||||||
const renamed = inputs?._rename
|
const renaming = inputs?._rename
|
||||||
// can't do this right now
|
// can't do this right now
|
||||||
delete inputs.rows
|
delete inputs.rows
|
||||||
const datasourceId = getDatasourceId(ctx.request.body)!
|
const tableId = ctx.request.body._id
|
||||||
|
const datasourceId = getDatasourceId(ctx.request.body)
|
||||||
// table doesn't exist already, note that it is created
|
// table doesn't exist already, note that it is created
|
||||||
if (!inputs._id) {
|
if (!inputs._id) {
|
||||||
inputs.created = true
|
inputs.created = true
|
||||||
}
|
}
|
||||||
let tableToSave: TableRequest = {
|
try {
|
||||||
type: "table",
|
const { datasource, table } = await sdk.tables.external.save(
|
||||||
_id: buildExternalTableId(datasourceId, inputs.name),
|
datasourceId!,
|
||||||
sourceId: datasourceId,
|
inputs,
|
||||||
...inputs,
|
{ tableId, renaming }
|
||||||
}
|
|
||||||
|
|
||||||
let oldTable: Table | undefined
|
|
||||||
if (ctx.request.body && ctx.request.body._id) {
|
|
||||||
oldTable = await sdk.tables.getTable(ctx.request.body._id)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasTypeChanged(tableToSave, oldTable)) {
|
|
||||||
ctx.throw(400, "A column type has changed.")
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let view in tableToSave.views) {
|
|
||||||
const tableView = tableToSave.views[view]
|
|
||||||
if (!tableView || !sdk.views.isV2(tableView)) continue
|
|
||||||
|
|
||||||
tableToSave.views[view] = sdk.views.syncSchema(
|
|
||||||
oldTable!.views![view] as ViewV2,
|
|
||||||
tableToSave.schema,
|
|
||||||
renamed
|
|
||||||
)
|
)
|
||||||
}
|
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
||||||
|
return table
|
||||||
const db = context.getAppDB()
|
} catch (err: any) {
|
||||||
const datasource = await sdk.datasources.get(datasourceId)
|
if (err instanceof Error) {
|
||||||
if (!datasource.entities) {
|
ctx.throw(400, err.message)
|
||||||
datasource.entities = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// GSheets is a specific case - only ever has a static primary key
|
|
||||||
tableToSave = setStaticSchemas(datasource, tableToSave)
|
|
||||||
|
|
||||||
const oldTables = cloneDeep(datasource.entities)
|
|
||||||
const tables: Record<string, Table> = datasource.entities
|
|
||||||
|
|
||||||
const extraTablesToUpdate = []
|
|
||||||
|
|
||||||
// check if relations need setup
|
|
||||||
for (let schema of Object.values(tableToSave.schema)) {
|
|
||||||
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const schemaTableId = schema.tableId
|
|
||||||
const relatedTable = Object.values(tables).find(
|
|
||||||
table => table._id === schemaTableId
|
|
||||||
)
|
|
||||||
if (!relatedTable) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const relatedColumnName = schema.fieldName!
|
|
||||||
const relationType = schema.relationshipType
|
|
||||||
if (relationType === RelationshipType.MANY_TO_MANY) {
|
|
||||||
const junctionTable = generateManyLinkSchema(
|
|
||||||
datasource,
|
|
||||||
schema,
|
|
||||||
tableToSave,
|
|
||||||
relatedTable
|
|
||||||
)
|
|
||||||
if (tables[junctionTable.name]) {
|
|
||||||
throw "Junction table already exists, cannot create another relationship."
|
|
||||||
}
|
|
||||||
tables[junctionTable.name] = junctionTable
|
|
||||||
extraTablesToUpdate.push(junctionTable)
|
|
||||||
} else {
|
} else {
|
||||||
const fkTable =
|
ctx.throw(err.status || 500, err?.message || err)
|
||||||
relationType === RelationshipType.ONE_TO_MANY
|
|
||||||
? tableToSave
|
|
||||||
: relatedTable
|
|
||||||
const foreignKey = generateLinkSchema(
|
|
||||||
schema,
|
|
||||||
tableToSave,
|
|
||||||
relatedTable,
|
|
||||||
relationType
|
|
||||||
)
|
|
||||||
fkTable.schema[foreignKey] = foreignKeyStructure(foreignKey)
|
|
||||||
if (fkTable.constrained == null) {
|
|
||||||
fkTable.constrained = []
|
|
||||||
}
|
|
||||||
if (fkTable.constrained.indexOf(foreignKey) === -1) {
|
|
||||||
fkTable.constrained.push(foreignKey)
|
|
||||||
}
|
|
||||||
// foreign key is in other table, need to save it to external
|
|
||||||
if (fkTable._id !== tableToSave._id) {
|
|
||||||
extraTablesToUpdate.push(fkTable)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
generateRelatedSchema(schema, relatedTable, tableToSave, relatedColumnName)
|
|
||||||
schema.main = true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
cleanupRelationships(tableToSave, tables, oldTable)
|
|
||||||
|
|
||||||
const operation = oldTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
|
|
||||||
await makeTableRequest(
|
|
||||||
datasource,
|
|
||||||
operation,
|
|
||||||
tableToSave,
|
|
||||||
tables,
|
|
||||||
oldTable,
|
|
||||||
renamed
|
|
||||||
)
|
|
||||||
// update any extra tables (like foreign keys in other tables)
|
|
||||||
for (let extraTable of extraTablesToUpdate) {
|
|
||||||
const oldExtraTable = oldTables[extraTable.name]
|
|
||||||
let op = oldExtraTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
|
|
||||||
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
|
|
||||||
}
|
|
||||||
|
|
||||||
// make sure the constrained list, all still exist
|
|
||||||
if (Array.isArray(tableToSave.constrained)) {
|
|
||||||
tableToSave.constrained = tableToSave.constrained.filter(constraint =>
|
|
||||||
Object.keys(tableToSave.schema).includes(constraint)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// remove the rename prop
|
|
||||||
delete tableToSave._rename
|
|
||||||
// store it into couch now for budibase reference
|
|
||||||
datasource.entities[tableToSave.name] = tableToSave
|
|
||||||
await db.put(sdk.tables.populateExternalTableSchemas(datasource))
|
|
||||||
|
|
||||||
// Since tables are stored inside datasources, we need to notify clients
|
|
||||||
// that the datasource definition changed
|
|
||||||
const updatedDatasource = await sdk.datasources.get(datasource._id!)
|
|
||||||
builderSocket?.emitDatasourceUpdate(ctx, updatedDatasource)
|
|
||||||
|
|
||||||
return tableToSave
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function destroy(ctx: UserCtx) {
|
export async function destroy(ctx: UserCtx) {
|
||||||
|
@ -364,27 +61,20 @@ export async function destroy(ctx: UserCtx) {
|
||||||
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
|
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
|
||||||
}
|
}
|
||||||
const datasourceId = getDatasourceId(tableToDelete)
|
const datasourceId = getDatasourceId(tableToDelete)
|
||||||
|
try {
|
||||||
const db = context.getAppDB()
|
const { datasource, table } = await sdk.tables.external.destroy(
|
||||||
const datasource = await sdk.datasources.get(datasourceId!)
|
datasourceId!,
|
||||||
const tables = datasource.entities
|
tableToDelete
|
||||||
|
)
|
||||||
const operation = Operation.DELETE_TABLE
|
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
||||||
if (tables) {
|
return table
|
||||||
await makeTableRequest(datasource, operation, tableToDelete, tables)
|
} catch (err: any) {
|
||||||
cleanupRelationships(tableToDelete, tables)
|
if (err instanceof Error) {
|
||||||
delete tables[tableToDelete.name]
|
ctx.throw(400, err.message)
|
||||||
datasource.entities = tables
|
} else {
|
||||||
|
ctx.throw(err.status || 500, err.message || err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await db.put(sdk.tables.populateExternalTableSchemas(datasource))
|
|
||||||
|
|
||||||
// Since tables are stored inside datasources, we need to notify clients
|
|
||||||
// that the datasource definition changed
|
|
||||||
const updatedDatasource = await sdk.datasources.get(datasource._id!)
|
|
||||||
builderSocket?.emitDatasourceUpdate(ctx, updatedDatasource)
|
|
||||||
|
|
||||||
return tableToDelete
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function bulkImport(
|
export async function bulkImport(
|
||||||
|
|
|
@ -16,6 +16,7 @@ import {
|
||||||
Table,
|
Table,
|
||||||
TableResponse,
|
TableResponse,
|
||||||
UserCtx,
|
UserCtx,
|
||||||
|
Row,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import { jsonFromCsvString } from "../../../utilities/csv"
|
import { jsonFromCsvString } from "../../../utilities/csv"
|
||||||
|
@ -139,8 +140,7 @@ export async function validateNewTableImport(ctx: UserCtx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function validateExistingTableImport(ctx: UserCtx) {
|
export async function validateExistingTableImport(ctx: UserCtx) {
|
||||||
const { rows, tableId }: { rows: unknown; tableId: unknown } =
|
const { rows, tableId }: { rows: Row[]; tableId?: string } = ctx.request.body
|
||||||
ctx.request.body
|
|
||||||
|
|
||||||
let schema = null
|
let schema = null
|
||||||
if (tableId) {
|
if (tableId) {
|
||||||
|
|
|
@ -1,14 +1,5 @@
|
||||||
import { updateLinks, EventType } from "../../../db/linkedRows"
|
import { generateTableID } from "../../../db/utils"
|
||||||
import { getRowParams, generateTableID } from "../../../db/utils"
|
import { handleDataImport } from "./utils"
|
||||||
import { FieldTypes } from "../../../constants"
|
|
||||||
import { TableSaveFunctions, hasTypeChanged, handleDataImport } from "./utils"
|
|
||||||
import { context } from "@budibase/backend-core"
|
|
||||||
import env from "../../../environment"
|
|
||||||
import {
|
|
||||||
cleanupAttachments,
|
|
||||||
fixAutoColumnSubType,
|
|
||||||
} from "../../../utilities/rowProcessor"
|
|
||||||
import { runStaticFormulaChecks } from "./bulkFormula"
|
|
||||||
import {
|
import {
|
||||||
BulkImportRequest,
|
BulkImportRequest,
|
||||||
BulkImportResponse,
|
BulkImportResponse,
|
||||||
|
@ -17,195 +8,52 @@ import {
|
||||||
SaveTableResponse,
|
SaveTableResponse,
|
||||||
Table,
|
Table,
|
||||||
UserCtx,
|
UserCtx,
|
||||||
ViewStatisticsSchema,
|
|
||||||
ViewV2,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { quotas } from "@budibase/pro"
|
|
||||||
import isEqual from "lodash/isEqual"
|
|
||||||
import { cloneDeep } from "lodash/fp"
|
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
|
||||||
function checkAutoColumns(table: Table, oldTable?: Table) {
|
|
||||||
if (!table.schema) {
|
|
||||||
return table
|
|
||||||
}
|
|
||||||
for (let [key, schema] of Object.entries(table.schema)) {
|
|
||||||
if (!schema.autocolumn || schema.subtype) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const oldSchema = oldTable && oldTable.schema[key]
|
|
||||||
if (oldSchema && oldSchema.subtype) {
|
|
||||||
table.schema[key].subtype = oldSchema.subtype
|
|
||||||
} else {
|
|
||||||
table.schema[key] = fixAutoColumnSubType(schema)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return table
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||||
const db = context.getAppDB()
|
|
||||||
const { rows, ...rest } = ctx.request.body
|
const { rows, ...rest } = ctx.request.body
|
||||||
let tableToSave: Table & {
|
let tableToSave: Table & {
|
||||||
_rename?: { old: string; updated: string } | undefined
|
_rename?: RenameColumn
|
||||||
} = {
|
} = {
|
||||||
type: "table",
|
type: "table",
|
||||||
_id: generateTableID(),
|
_id: generateTableID(),
|
||||||
views: {},
|
views: {},
|
||||||
...rest,
|
...rest,
|
||||||
}
|
}
|
||||||
|
const renaming = tableToSave._rename
|
||||||
|
delete tableToSave._rename
|
||||||
|
|
||||||
// if the table obj had an _id then it will have been retrieved
|
|
||||||
let oldTable: Table | undefined
|
|
||||||
if (ctx.request.body && ctx.request.body._id) {
|
|
||||||
oldTable = await sdk.tables.getTable(ctx.request.body._id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// check all types are correct
|
|
||||||
if (hasTypeChanged(tableToSave, oldTable)) {
|
|
||||||
ctx.throw(400, "A column type has changed.")
|
|
||||||
}
|
|
||||||
// check that subtypes have been maintained
|
|
||||||
tableToSave = checkAutoColumns(tableToSave, oldTable)
|
|
||||||
|
|
||||||
// saving a table is a complex operation, involving many different steps, this
|
|
||||||
// has been broken out into a utility to make it more obvious/easier to manipulate
|
|
||||||
const tableSaveFunctions = new TableSaveFunctions({
|
|
||||||
user: ctx.user,
|
|
||||||
oldTable,
|
|
||||||
importRows: rows,
|
|
||||||
})
|
|
||||||
tableToSave = await tableSaveFunctions.before(tableToSave)
|
|
||||||
|
|
||||||
// make sure that types don't change of a column, have to remove
|
|
||||||
// the column if you want to change the type
|
|
||||||
if (oldTable && oldTable.schema) {
|
|
||||||
for (const propKey of Object.keys(tableToSave.schema)) {
|
|
||||||
let oldColumn = oldTable.schema[propKey]
|
|
||||||
if (oldColumn && oldColumn.type === FieldTypes.INTERNAL) {
|
|
||||||
oldTable.schema[propKey].type = FieldTypes.AUTO
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't rename if the name is the same
|
|
||||||
let _rename: RenameColumn | undefined = tableToSave._rename
|
|
||||||
/* istanbul ignore next */
|
|
||||||
if (_rename && _rename.old === _rename.updated) {
|
|
||||||
_rename = undefined
|
|
||||||
delete tableToSave._rename
|
|
||||||
}
|
|
||||||
|
|
||||||
// rename row fields when table column is renamed
|
|
||||||
/* istanbul ignore next */
|
|
||||||
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
|
|
||||||
ctx.throw(400, "Cannot rename a linked column.")
|
|
||||||
}
|
|
||||||
|
|
||||||
tableToSave = await tableSaveFunctions.mid(tableToSave)
|
|
||||||
|
|
||||||
// update schema of non-statistics views when new columns are added
|
|
||||||
for (let view in tableToSave.views) {
|
|
||||||
const tableView = tableToSave.views[view]
|
|
||||||
if (!tableView) continue
|
|
||||||
|
|
||||||
if (sdk.views.isV2(tableView)) {
|
|
||||||
tableToSave.views[view] = sdk.views.syncSchema(
|
|
||||||
oldTable!.views![view] as ViewV2,
|
|
||||||
tableToSave.schema,
|
|
||||||
_rename
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
(tableView.schema as ViewStatisticsSchema).group ||
|
|
||||||
tableView.schema.field
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
tableView.schema = tableToSave.schema
|
|
||||||
}
|
|
||||||
|
|
||||||
// update linked rows
|
|
||||||
try {
|
try {
|
||||||
const linkResp: any = await updateLinks({
|
const { table } = await sdk.tables.internal.save(tableToSave, {
|
||||||
eventType: oldTable ? EventType.TABLE_UPDATED : EventType.TABLE_SAVE,
|
user: ctx.user,
|
||||||
table: tableToSave,
|
rowsToImport: rows,
|
||||||
oldTable: oldTable,
|
tableId: ctx.request.body._id,
|
||||||
|
renaming: renaming,
|
||||||
})
|
})
|
||||||
if (linkResp != null && linkResp._rev) {
|
|
||||||
tableToSave._rev = linkResp._rev
|
return table
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err instanceof Error) {
|
||||||
|
ctx.throw(400, err.message)
|
||||||
|
} else {
|
||||||
|
ctx.throw(err.status || 500, err.message || err)
|
||||||
}
|
}
|
||||||
} catch (err) {
|
|
||||||
ctx.throw(400, err as string)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// don't perform any updates until relationships have been
|
|
||||||
// checked by the updateLinks function
|
|
||||||
const updatedRows = tableSaveFunctions.getUpdatedRows()
|
|
||||||
if (updatedRows && updatedRows.length !== 0) {
|
|
||||||
await db.bulkDocs(updatedRows)
|
|
||||||
}
|
|
||||||
let result = await db.put(tableToSave)
|
|
||||||
tableToSave._rev = result.rev
|
|
||||||
const savedTable = cloneDeep(tableToSave)
|
|
||||||
|
|
||||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
|
||||||
// the table may be updated as part of the table save after functionality - need to write it
|
|
||||||
if (!isEqual(savedTable, tableToSave)) {
|
|
||||||
result = await db.put(tableToSave)
|
|
||||||
tableToSave._rev = result.rev
|
|
||||||
}
|
|
||||||
// has to run after, make sure it has _id
|
|
||||||
await runStaticFormulaChecks(tableToSave, { oldTable, deletion: false })
|
|
||||||
return tableToSave
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function destroy(ctx: any) {
|
export async function destroy(ctx: UserCtx) {
|
||||||
const db = context.getAppDB()
|
|
||||||
const tableToDelete = await sdk.tables.getTable(ctx.params.tableId)
|
const tableToDelete = await sdk.tables.getTable(ctx.params.tableId)
|
||||||
|
try {
|
||||||
// Delete all rows for that table
|
const { table } = await sdk.tables.internal.destroy(tableToDelete)
|
||||||
const rowsData = await db.allDocs(
|
return table
|
||||||
getRowParams(ctx.params.tableId, null, {
|
} catch (err: any) {
|
||||||
include_docs: true,
|
if (err instanceof Error) {
|
||||||
})
|
ctx.throw(400, err.message)
|
||||||
)
|
} else {
|
||||||
await db.bulkDocs(
|
ctx.throw(err.status || 500, err.message || err)
|
||||||
rowsData.rows.map((row: any) => ({ ...row.doc, _deleted: true }))
|
|
||||||
)
|
|
||||||
await quotas.removeRows(rowsData.rows.length, {
|
|
||||||
tableId: ctx.params.tableId,
|
|
||||||
})
|
|
||||||
|
|
||||||
// update linked rows
|
|
||||||
await updateLinks({
|
|
||||||
eventType: EventType.TABLE_DELETE,
|
|
||||||
table: tableToDelete,
|
|
||||||
})
|
|
||||||
|
|
||||||
// don't remove the table itself until very end
|
|
||||||
await db.remove(tableToDelete._id!, tableToDelete._rev)
|
|
||||||
|
|
||||||
// remove table search index
|
|
||||||
if (!env.isTest() || env.COUCH_DB_URL) {
|
|
||||||
const currentIndexes = await db.getIndexes()
|
|
||||||
const existingIndex = currentIndexes.indexes.find(
|
|
||||||
(existing: any) => existing.name === `search:${ctx.params.tableId}`
|
|
||||||
)
|
|
||||||
if (existingIndex) {
|
|
||||||
await db.deleteIndex(existingIndex)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// has to run after, make sure it has _id
|
|
||||||
await runStaticFormulaChecks(tableToDelete, {
|
|
||||||
deletion: true,
|
|
||||||
})
|
|
||||||
await cleanupAttachments(tableToDelete, {
|
|
||||||
rows: rowsData.rows.map((row: any) => row.doc),
|
|
||||||
})
|
|
||||||
return tableToDelete
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function bulkImport(
|
export async function bulkImport(
|
||||||
|
@ -213,6 +61,10 @@ export async function bulkImport(
|
||||||
) {
|
) {
|
||||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||||
const { rows, identifierFields } = ctx.request.body
|
const { rows, identifierFields } = ctx.request.body
|
||||||
await handleDataImport(ctx.user, table, rows, identifierFields)
|
await handleDataImport(table, {
|
||||||
|
importRows: rows,
|
||||||
|
identifierFields,
|
||||||
|
user: ctx.user,
|
||||||
|
})
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,9 @@ const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
||||||
[FieldType.ATTACHMENT]: SQLiteType.BLOB,
|
[FieldType.ATTACHMENT]: SQLiteType.BLOB,
|
||||||
[FieldType.ARRAY]: SQLiteType.BLOB,
|
[FieldType.ARRAY]: SQLiteType.BLOB,
|
||||||
[FieldType.LINK]: SQLiteType.BLOB,
|
[FieldType.LINK]: SQLiteType.BLOB,
|
||||||
|
[FieldType.BIGINT]: SQLiteType.REAL,
|
||||||
|
// TODO: consider the difference between multi-user and single user types (subtyping)
|
||||||
|
[FieldType.BB_REFERENCE]: SQLiteType.TEXT,
|
||||||
}
|
}
|
||||||
|
|
||||||
function mapTable(table: Table): { [key: string]: SQLiteType } {
|
function mapTable(table: Table): { [key: string]: SQLiteType } {
|
||||||
|
|
|
@ -27,9 +27,16 @@ import {
|
||||||
Row,
|
Row,
|
||||||
SourceName,
|
SourceName,
|
||||||
Table,
|
Table,
|
||||||
|
Database,
|
||||||
|
RenameColumn,
|
||||||
|
NumberFieldMetadata,
|
||||||
|
FieldSchema,
|
||||||
|
View,
|
||||||
|
RelationshipFieldMetadata,
|
||||||
|
FieldType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
export async function clearColumns(table: any, columnNames: any) {
|
export async function clearColumns(table: Table, columnNames: string[]) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const rows = await db.allDocs(
|
const rows = await db.allDocs(
|
||||||
getRowParams(table._id, null, {
|
getRowParams(table._id, null, {
|
||||||
|
@ -44,10 +51,13 @@ export async function clearColumns(table: any, columnNames: any) {
|
||||||
)) as { id: string; _rev?: string }[]
|
)) as { id: string; _rev?: string }[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
export async function checkForColumnUpdates(
|
||||||
|
updatedTable: Table,
|
||||||
|
oldTable?: Table,
|
||||||
|
columnRename?: RenameColumn
|
||||||
|
) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
let updatedRows = []
|
let updatedRows = []
|
||||||
const rename = updatedTable._rename
|
|
||||||
let deletedColumns: any = []
|
let deletedColumns: any = []
|
||||||
if (oldTable && oldTable.schema && updatedTable.schema) {
|
if (oldTable && oldTable.schema && updatedTable.schema) {
|
||||||
deletedColumns = Object.keys(oldTable.schema).filter(
|
deletedColumns = Object.keys(oldTable.schema).filter(
|
||||||
|
@ -55,7 +65,7 @@ export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
// check for renaming of columns or deleted columns
|
// check for renaming of columns or deleted columns
|
||||||
if (rename || deletedColumns.length !== 0) {
|
if (columnRename || deletedColumns.length !== 0) {
|
||||||
// Update all rows
|
// Update all rows
|
||||||
const rows = await db.allDocs(
|
const rows = await db.allDocs(
|
||||||
getRowParams(updatedTable._id, null, {
|
getRowParams(updatedTable._id, null, {
|
||||||
|
@ -65,9 +75,9 @@ export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
||||||
const rawRows = rows.rows.map(({ doc }: any) => doc)
|
const rawRows = rows.rows.map(({ doc }: any) => doc)
|
||||||
updatedRows = rawRows.map((row: any) => {
|
updatedRows = rawRows.map((row: any) => {
|
||||||
row = cloneDeep(row)
|
row = cloneDeep(row)
|
||||||
if (rename) {
|
if (columnRename) {
|
||||||
row[rename.updated] = row[rename.old]
|
row[columnRename.updated] = row[columnRename.old]
|
||||||
delete row[rename.old]
|
delete row[columnRename.old]
|
||||||
} else if (deletedColumns.length !== 0) {
|
} else if (deletedColumns.length !== 0) {
|
||||||
deletedColumns.forEach((colName: any) => delete row[colName])
|
deletedColumns.forEach((colName: any) => delete row[colName])
|
||||||
}
|
}
|
||||||
|
@ -77,14 +87,13 @@ export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
||||||
// cleanup any attachments from object storage for deleted attachment columns
|
// cleanup any attachments from object storage for deleted attachment columns
|
||||||
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
|
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
|
||||||
// Update views
|
// Update views
|
||||||
await checkForViewUpdates(updatedTable, rename, deletedColumns)
|
await checkForViewUpdates(updatedTable, deletedColumns, columnRename)
|
||||||
delete updatedTable._rename
|
|
||||||
}
|
}
|
||||||
return { rows: updatedRows, table: updatedTable }
|
return { rows: updatedRows, table: updatedTable }
|
||||||
}
|
}
|
||||||
|
|
||||||
// makes sure the passed in table isn't going to reset the auto ID
|
// makes sure the passed in table isn't going to reset the auto ID
|
||||||
export function makeSureTableUpToDate(table: any, tableToSave: any) {
|
export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
|
||||||
if (!table) {
|
if (!table) {
|
||||||
return tableToSave
|
return tableToSave
|
||||||
}
|
}
|
||||||
|
@ -100,16 +109,17 @@ export function makeSureTableUpToDate(table: any, tableToSave: any) {
|
||||||
column.subtype === AutoFieldSubTypes.AUTO_ID &&
|
column.subtype === AutoFieldSubTypes.AUTO_ID &&
|
||||||
tableToSave.schema[field]
|
tableToSave.schema[field]
|
||||||
) {
|
) {
|
||||||
tableToSave.schema[field].lastID = column.lastID
|
const tableCol = tableToSave.schema[field] as NumberFieldMetadata
|
||||||
|
tableCol.lastID = column.lastID
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return tableToSave
|
return tableToSave
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function importToRows(
|
export async function importToRows(
|
||||||
data: any[],
|
data: Row[],
|
||||||
table: Table,
|
table: Table,
|
||||||
user: ContextUser | null = null
|
user?: ContextUser
|
||||||
) {
|
) {
|
||||||
let originalTable = table
|
let originalTable = table
|
||||||
let finalData: any = []
|
let finalData: any = []
|
||||||
|
@ -151,19 +161,20 @@ export async function importToRows(
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function handleDataImport(
|
export async function handleDataImport(
|
||||||
user: ContextUser,
|
|
||||||
table: Table,
|
table: Table,
|
||||||
rows: Row[],
|
opts?: { identifierFields?: string[]; user?: ContextUser; importRows?: Row[] }
|
||||||
identifierFields: Array<string> = []
|
|
||||||
) {
|
) {
|
||||||
const schema = table.schema
|
const schema = table.schema
|
||||||
|
const identifierFields = opts?.identifierFields || []
|
||||||
|
const user = opts?.user
|
||||||
|
const importRows = opts?.importRows
|
||||||
|
|
||||||
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
if (!importRows || !isRows(importRows) || !isSchema(schema)) {
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const data = parse(rows, schema)
|
const data = parse(importRows, schema)
|
||||||
|
|
||||||
let finalData: any = await importToRows(data, table, user)
|
let finalData: any = await importToRows(data, table, user)
|
||||||
|
|
||||||
|
@ -201,7 +212,7 @@ export async function handleDataImport(
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function handleSearchIndexes(table: any) {
|
export async function handleSearchIndexes(table: Table) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
// create relevant search indexes
|
// create relevant search indexes
|
||||||
if (table.indexes && table.indexes.length > 0) {
|
if (table.indexes && table.indexes.length > 0) {
|
||||||
|
@ -245,13 +256,13 @@ export async function handleSearchIndexes(table: any) {
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
export function checkStaticTables(table: any) {
|
export function checkStaticTables(table: Table) {
|
||||||
// check user schema has all required elements
|
// check user schema has all required elements
|
||||||
if (table._id === InternalTables.USER_METADATA) {
|
if (table._id === InternalTables.USER_METADATA) {
|
||||||
for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) {
|
for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) {
|
||||||
// check if the schema exists on the table to be created/updated
|
// check if the schema exists on the table to be created/updated
|
||||||
if (table.schema[key] == null) {
|
if (table.schema[key] == null) {
|
||||||
table.schema[key] = schema
|
table.schema[key] = schema as FieldSchema
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -259,13 +270,21 @@ export function checkStaticTables(table: any) {
|
||||||
}
|
}
|
||||||
|
|
||||||
class TableSaveFunctions {
|
class TableSaveFunctions {
|
||||||
db: any
|
db: Database
|
||||||
user: any
|
user?: ContextUser
|
||||||
oldTable: any
|
oldTable?: Table
|
||||||
importRows: any
|
importRows?: Row[]
|
||||||
rows: any
|
rows: Row[]
|
||||||
|
|
||||||
constructor({ user, oldTable, importRows }: any) {
|
constructor({
|
||||||
|
user,
|
||||||
|
oldTable,
|
||||||
|
importRows,
|
||||||
|
}: {
|
||||||
|
user?: ContextUser
|
||||||
|
oldTable?: Table
|
||||||
|
importRows?: Row[]
|
||||||
|
}) {
|
||||||
this.db = context.getAppDB()
|
this.db = context.getAppDB()
|
||||||
this.user = user
|
this.user = user
|
||||||
this.oldTable = oldTable
|
this.oldTable = oldTable
|
||||||
|
@ -275,7 +294,7 @@ class TableSaveFunctions {
|
||||||
}
|
}
|
||||||
|
|
||||||
// before anything is done
|
// before anything is done
|
||||||
async before(table: any) {
|
async before(table: Table) {
|
||||||
if (this.oldTable) {
|
if (this.oldTable) {
|
||||||
table = makeSureTableUpToDate(this.oldTable, table)
|
table = makeSureTableUpToDate(this.oldTable, table)
|
||||||
}
|
}
|
||||||
|
@ -284,16 +303,23 @@ class TableSaveFunctions {
|
||||||
}
|
}
|
||||||
|
|
||||||
// when confirmed valid
|
// when confirmed valid
|
||||||
async mid(table: any) {
|
async mid(table: Table, columnRename?: RenameColumn) {
|
||||||
let response = await checkForColumnUpdates(this.oldTable, table)
|
let response = await checkForColumnUpdates(
|
||||||
|
table,
|
||||||
|
this.oldTable,
|
||||||
|
columnRename
|
||||||
|
)
|
||||||
this.rows = this.rows.concat(response.rows)
|
this.rows = this.rows.concat(response.rows)
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
// after saving
|
// after saving
|
||||||
async after(table: any) {
|
async after(table: Table) {
|
||||||
table = await handleSearchIndexes(table)
|
table = await handleSearchIndexes(table)
|
||||||
table = await handleDataImport(this.user, table, this.importRows)
|
table = await handleDataImport(table, {
|
||||||
|
importRows: this.importRows,
|
||||||
|
user: this.user,
|
||||||
|
})
|
||||||
await addTableToSqlite(table)
|
await addTableToSqlite(table)
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
@ -304,9 +330,9 @@ class TableSaveFunctions {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function checkForViewUpdates(
|
export async function checkForViewUpdates(
|
||||||
table: any,
|
table: Table,
|
||||||
rename: any,
|
deletedColumns: string[],
|
||||||
deletedColumns: any
|
columnRename?: RenameColumn
|
||||||
) {
|
) {
|
||||||
const views = await getViews()
|
const views = await getViews()
|
||||||
const tableViews = views.filter(view => view.meta.tableId === table._id)
|
const tableViews = views.filter(view => view.meta.tableId === table._id)
|
||||||
|
@ -316,30 +342,30 @@ export async function checkForViewUpdates(
|
||||||
let needsUpdated = false
|
let needsUpdated = false
|
||||||
|
|
||||||
// First check for renames, otherwise check for deletions
|
// First check for renames, otherwise check for deletions
|
||||||
if (rename) {
|
if (columnRename) {
|
||||||
// Update calculation field if required
|
// Update calculation field if required
|
||||||
if (view.meta.field === rename.old) {
|
if (view.meta.field === columnRename.old) {
|
||||||
view.meta.field = rename.updated
|
view.meta.field = columnRename.updated
|
||||||
needsUpdated = true
|
needsUpdated = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update group by field if required
|
// Update group by field if required
|
||||||
if (view.meta.groupBy === rename.old) {
|
if (view.meta.groupBy === columnRename.old) {
|
||||||
view.meta.groupBy = rename.updated
|
view.meta.groupBy = columnRename.updated
|
||||||
needsUpdated = true
|
needsUpdated = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update filters if required
|
// Update filters if required
|
||||||
if (view.meta.filters) {
|
if (view.meta.filters) {
|
||||||
view.meta.filters.forEach((filter: any) => {
|
view.meta.filters.forEach((filter: any) => {
|
||||||
if (filter.key === rename.old) {
|
if (filter.key === columnRename.old) {
|
||||||
filter.key = rename.updated
|
filter.key = columnRename.updated
|
||||||
needsUpdated = true
|
needsUpdated = true
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} else if (deletedColumns) {
|
} else if (deletedColumns) {
|
||||||
deletedColumns.forEach((column: any) => {
|
deletedColumns.forEach((column: string) => {
|
||||||
// Remove calculation statement if required
|
// Remove calculation statement if required
|
||||||
if (view.meta.field === column) {
|
if (view.meta.field === column) {
|
||||||
delete view.meta.field
|
delete view.meta.field
|
||||||
|
@ -380,24 +406,29 @@ export async function checkForViewUpdates(
|
||||||
if (!newViewTemplate.meta.schema) {
|
if (!newViewTemplate.meta.schema) {
|
||||||
newViewTemplate.meta.schema = table.schema
|
newViewTemplate.meta.schema = table.schema
|
||||||
}
|
}
|
||||||
table.views[view.name] = newViewTemplate.meta
|
if (table.views?.[view.name]) {
|
||||||
|
table.views[view.name] = newViewTemplate.meta as View
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function generateForeignKey(column: any, relatedTable: any) {
|
export function generateForeignKey(
|
||||||
|
column: RelationshipFieldMetadata,
|
||||||
|
relatedTable: Table
|
||||||
|
) {
|
||||||
return `fk_${relatedTable.name}_${column.fieldName}`
|
return `fk_${relatedTable.name}_${column.fieldName}`
|
||||||
}
|
}
|
||||||
|
|
||||||
export function generateJunctionTableName(
|
export function generateJunctionTableName(
|
||||||
column: any,
|
column: RelationshipFieldMetadata,
|
||||||
table: any,
|
table: Table,
|
||||||
relatedTable: any
|
relatedTable: Table
|
||||||
) {
|
) {
|
||||||
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
|
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
|
||||||
}
|
}
|
||||||
|
|
||||||
export function foreignKeyStructure(keyName: any, meta?: any) {
|
export function foreignKeyStructure(keyName: string, meta?: any) {
|
||||||
const structure: any = {
|
const structure: any = {
|
||||||
type: FieldTypes.NUMBER,
|
type: FieldTypes.NUMBER,
|
||||||
constraints: {},
|
constraints: {},
|
||||||
|
@ -409,7 +440,7 @@ export function foreignKeyStructure(keyName: any, meta?: any) {
|
||||||
return structure
|
return structure
|
||||||
}
|
}
|
||||||
|
|
||||||
export function areSwitchableTypes(type1: any, type2: any) {
|
export function areSwitchableTypes(type1: FieldType, type2: FieldType) {
|
||||||
if (
|
if (
|
||||||
SwitchableTypes.indexOf(type1) === -1 &&
|
SwitchableTypes.indexOf(type1) === -1 &&
|
||||||
SwitchableTypes.indexOf(type2) === -1
|
SwitchableTypes.indexOf(type2) === -1
|
||||||
|
|
|
@ -1,13 +1,11 @@
|
||||||
import { generateUserFlagID } from "../../db/utils"
|
import { generateUserFlagID, InternalTables } from "../../db/utils"
|
||||||
import { InternalTables } from "../../db/utils"
|
|
||||||
import { getFullUser } from "../../utilities/users"
|
import { getFullUser } from "../../utilities/users"
|
||||||
import { context } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import { Ctx, UserCtx } from "@budibase/types"
|
import { Ctx, UserCtx } from "@budibase/types"
|
||||||
import sdk from "../../sdk"
|
import sdk from "../../sdk"
|
||||||
|
|
||||||
export async function fetchMetadata(ctx: Ctx) {
|
export async function fetchMetadata(ctx: Ctx) {
|
||||||
const users = await sdk.users.fetchMetadata()
|
ctx.body = await sdk.users.fetchMetadata()
|
||||||
ctx.body = users
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function updateSelfMetadata(ctx: UserCtx) {
|
export async function updateSelfMetadata(ctx: UserCtx) {
|
||||||
|
|
|
@ -88,8 +88,8 @@ const SCHEMA_MAP: Record<string, any> = {
|
||||||
/**
|
/**
|
||||||
* Iterates through the array of filters to create a JS
|
* Iterates through the array of filters to create a JS
|
||||||
* expression that gets used in a CouchDB view.
|
* expression that gets used in a CouchDB view.
|
||||||
* @param {Array} filters - an array of filter objects
|
* @param filters - an array of filter objects
|
||||||
* @returns {String} JS Expression
|
* @returns JS Expression
|
||||||
*/
|
*/
|
||||||
function parseFilterExpression(filters: ViewFilter[]) {
|
function parseFilterExpression(filters: ViewFilter[]) {
|
||||||
const expression = []
|
const expression = []
|
||||||
|
@ -125,8 +125,8 @@ function parseFilterExpression(filters: ViewFilter[]) {
|
||||||
/**
|
/**
|
||||||
* Returns a CouchDB compliant emit() expression that is used to emit the
|
* Returns a CouchDB compliant emit() expression that is used to emit the
|
||||||
* correct key/value pairs for custom views.
|
* correct key/value pairs for custom views.
|
||||||
* @param {String?} field - field to use for calculations, if any
|
* @param field - field to use for calculations, if any
|
||||||
* @param {String?} groupBy - field to group calculation results on, if any
|
* @param groupBy - field to group calculation results on, if any
|
||||||
*/
|
*/
|
||||||
function parseEmitExpression(field: string, groupBy: string) {
|
function parseEmitExpression(field: string, groupBy: string) {
|
||||||
return `emit(doc["${groupBy || "_id"}"], doc["${field}"]);`
|
return `emit(doc["${groupBy || "_id"}"], doc["${field}"]);`
|
||||||
|
@ -136,7 +136,7 @@ function parseEmitExpression(field: string, groupBy: string) {
|
||||||
* Return a fully parsed CouchDB compliant view definition
|
* Return a fully parsed CouchDB compliant view definition
|
||||||
* that will be stored in the design document in the database.
|
* that will be stored in the design document in the database.
|
||||||
*
|
*
|
||||||
* @param {Object} viewDefinition - the JSON definition for a custom view.
|
* @param viewDefinition - the JSON definition for a custom view.
|
||||||
* field: field that calculations will be performed on
|
* field: field that calculations will be performed on
|
||||||
* tableId: tableId of the table this view was created from
|
* tableId: tableId of the table this view was created from
|
||||||
* groupBy: field that calculations will be grouped by. Field must be present for this to be useful
|
* groupBy: field that calculations will be grouped by. Field must be present for this to be useful
|
||||||
|
|
|
@ -12,14 +12,14 @@ describe("run misc tests", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("/bbtel", () => {
|
describe("/bbtel", () => {
|
||||||
it("check if analytics enabled", async () => {
|
it("check if analytics enabled", async () => {
|
||||||
const res = await request
|
const res = await request
|
||||||
.get(`/api/bbtel`)
|
.get(`/api/bbtel`)
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(200)
|
.expect(200)
|
||||||
expect(typeof res.body.enabled).toEqual("boolean")
|
expect(typeof res.body.enabled).toEqual("boolean")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("/health", () => {
|
describe("/health", () => {
|
||||||
|
@ -37,7 +37,6 @@ describe("run misc tests", () => {
|
||||||
} else {
|
} else {
|
||||||
expect(text.split(".").length).toEqual(3)
|
expect(text.split(".").length).toEqual(3)
|
||||||
}
|
}
|
||||||
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -93,77 +92,79 @@ describe("run misc tests", () => {
|
||||||
constraints: {
|
constraints: {
|
||||||
type: "array",
|
type: "array",
|
||||||
presence: {
|
presence: {
|
||||||
"allowEmpty": true
|
allowEmpty: true,
|
||||||
},
|
},
|
||||||
inclusion: [
|
inclusion: ["One", "Two", "Three"],
|
||||||
"One",
|
|
||||||
"Two",
|
|
||||||
"Three",
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
name: "Sample Tags",
|
name: "Sample Tags",
|
||||||
sortable: false
|
sortable: false,
|
||||||
},
|
},
|
||||||
g: {
|
g: {
|
||||||
type: "options",
|
type: "options",
|
||||||
constraints: {
|
constraints: {
|
||||||
type: "string",
|
type: "string",
|
||||||
presence: false,
|
presence: false,
|
||||||
inclusion: [
|
inclusion: ["Alpha", "Beta", "Gamma"],
|
||||||
"Alpha",
|
|
||||||
"Beta",
|
|
||||||
"Gamma"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
name: "Sample Opts"
|
name: "Sample Opts",
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const importRows = [
|
||||||
|
{ a: "1", b: "2", c: "3", d: "4", f: "['One']", g: "Alpha" },
|
||||||
|
{ a: "5", b: "6", c: "7", d: "8", f: "[]", g: undefined },
|
||||||
|
{ a: "9", b: "10", c: "11", d: "12", f: "['Two','Four']", g: "" },
|
||||||
|
{ a: "13", b: "14", c: "15", d: "16", g: "Omega" },
|
||||||
|
]
|
||||||
// Shift specific row tests to the row spec
|
// Shift specific row tests to the row spec
|
||||||
await tableUtils.handleDataImport(
|
await tableUtils.handleDataImport(table, {
|
||||||
{ userId: "test" },
|
importRows,
|
||||||
table,
|
user: { userId: "test" },
|
||||||
[
|
})
|
||||||
{ a: '1', b: '2', c: '3', d: '4', f: "['One']", g: "Alpha" },
|
|
||||||
{ a: '5', b: '6', c: '7', d: '8', f: "[]", g: undefined},
|
|
||||||
{ a: '9', b: '10', c: '11', d: '12', f: "['Two','Four']", g: ""},
|
|
||||||
{ a: '13', b: '14', c: '15', d: '16', g: "Omega"}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
// 4 rows imported, the auto ID starts at 1
|
// 4 rows imported, the auto ID starts at 1
|
||||||
// We expect the handleDataImport function to update the lastID
|
// We expect the handleDataImport function to update the lastID
|
||||||
expect(table.schema.e.lastID).toEqual(4);
|
expect(table.schema.e.lastID).toEqual(4)
|
||||||
|
|
||||||
// Array/Multi - should have added a new value to the inclusion.
|
// Array/Multi - should have added a new value to the inclusion.
|
||||||
expect(table.schema.f.constraints.inclusion).toEqual(['Four','One','Three','Two']);
|
expect(table.schema.f.constraints.inclusion).toEqual([
|
||||||
|
"Four",
|
||||||
|
"One",
|
||||||
|
"Three",
|
||||||
|
"Two",
|
||||||
|
])
|
||||||
|
|
||||||
// Options - should have a new value in the inclusion
|
// Options - should have a new value in the inclusion
|
||||||
expect(table.schema.g.constraints.inclusion).toEqual(['Alpha','Beta','Gamma','Omega']);
|
expect(table.schema.g.constraints.inclusion).toEqual([
|
||||||
|
"Alpha",
|
||||||
|
"Beta",
|
||||||
|
"Gamma",
|
||||||
|
"Omega",
|
||||||
|
])
|
||||||
|
|
||||||
const rows = await config.getRows()
|
const rows = await config.getRows()
|
||||||
expect(rows.length).toEqual(4);
|
expect(rows.length).toEqual(4)
|
||||||
|
|
||||||
const rowOne = rows.find(row => row.e === 1)
|
const rowOne = rows.find(row => row.e === 1)
|
||||||
expect(rowOne.a).toEqual("1")
|
expect(rowOne.a).toEqual("1")
|
||||||
expect(rowOne.f).toEqual(['One'])
|
expect(rowOne.f).toEqual(["One"])
|
||||||
expect(rowOne.g).toEqual('Alpha')
|
expect(rowOne.g).toEqual("Alpha")
|
||||||
|
|
||||||
const rowTwo = rows.find(row => row.e === 2)
|
const rowTwo = rows.find(row => row.e === 2)
|
||||||
expect(rowTwo.a).toEqual("5")
|
expect(rowTwo.a).toEqual("5")
|
||||||
expect(rowTwo.f).toEqual([])
|
expect(rowTwo.f).toEqual([])
|
||||||
expect(rowTwo.g).toEqual(undefined)
|
expect(rowTwo.g).toEqual(undefined)
|
||||||
|
|
||||||
const rowThree = rows.find(row => row.e === 3)
|
const rowThree = rows.find(row => row.e === 3)
|
||||||
expect(rowThree.a).toEqual("9")
|
expect(rowThree.a).toEqual("9")
|
||||||
expect(rowThree.f).toEqual(['Two','Four'])
|
expect(rowThree.f).toEqual(["Two", "Four"])
|
||||||
expect(rowThree.g).toEqual(null)
|
expect(rowThree.g).toEqual(null)
|
||||||
|
|
||||||
const rowFour = rows.find(row => row.e === 4)
|
const rowFour = rows.find(row => row.e === 4)
|
||||||
expect(rowFour.a).toEqual("13")
|
expect(rowFour.a).toEqual("13")
|
||||||
expect(rowFour.f).toEqual(undefined)
|
expect(rowFour.f).toEqual(undefined)
|
||||||
expect(rowFour.g).toEqual('Omega')
|
expect(rowFour.g).toEqual("Omega")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,11 +1,7 @@
|
||||||
import * as Sentry from "@sentry/node"
|
|
||||||
|
|
||||||
if (process.env.DD_APM_ENABLED) {
|
if (process.env.DD_APM_ENABLED) {
|
||||||
require("./ddApm")
|
require("./ddApm")
|
||||||
}
|
}
|
||||||
|
|
||||||
// need to load environment first
|
|
||||||
import env from "./environment"
|
|
||||||
import * as db from "./db"
|
import * as db from "./db"
|
||||||
db.init()
|
db.init()
|
||||||
import { ServiceType } from "@budibase/types"
|
import { ServiceType } from "@budibase/types"
|
||||||
|
@ -28,10 +24,6 @@ async function start() {
|
||||||
}
|
}
|
||||||
// startup includes automation runner - if enabled
|
// startup includes automation runner - if enabled
|
||||||
await startup(app, server)
|
await startup(app, server)
|
||||||
if (env.isProd()) {
|
|
||||||
env._set("NODE_ENV", "production")
|
|
||||||
Sentry.init()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
start().catch(err => {
|
start().catch(err => {
|
||||||
|
|
|
@ -14,13 +14,13 @@ import { LoopStep, LoopStepType, LoopInput } from "../definitions/automations"
|
||||||
* make sure that the post template statement can be cast into the correct type, this function does this for numbers
|
* make sure that the post template statement can be cast into the correct type, this function does this for numbers
|
||||||
* and booleans.
|
* and booleans.
|
||||||
*
|
*
|
||||||
* @param {object} inputs An object of inputs, please note this will not recurse down into any objects within, it simply
|
* @param inputs An object of inputs, please note this will not recurse down into any objects within, it simply
|
||||||
* cleanses the top level inputs, however it can be used by recursively calling it deeper into the object structures if
|
* cleanses the top level inputs, however it can be used by recursively calling it deeper into the object structures if
|
||||||
* the schema is known.
|
* the schema is known.
|
||||||
* @param {object} schema The defined schema of the inputs, in the form of JSON schema. The schema definition of an
|
* @param schema The defined schema of the inputs, in the form of JSON schema. The schema definition of an
|
||||||
* automation is the likely use case of this, however validate.js syntax can be converted closely enough to use this by
|
* automation is the likely use case of this, however validate.js syntax can be converted closely enough to use this by
|
||||||
* wrapping the schema properties in a top level "properties" object.
|
* wrapping the schema properties in a top level "properties" object.
|
||||||
* @returns {object} The inputs object which has had all the various types supported by this function converted to their
|
* @returns The inputs object which has had all the various types supported by this function converted to their
|
||||||
* primitive types.
|
* primitive types.
|
||||||
*/
|
*/
|
||||||
export function cleanInputValues(inputs: Record<string, any>, schema?: any) {
|
export function cleanInputValues(inputs: Record<string, any>, schema?: any) {
|
||||||
|
@ -74,9 +74,9 @@ export function cleanInputValues(inputs: Record<string, any>, schema?: any) {
|
||||||
* the automation but is instead part of the Table/Table. This function will get the table schema and use it to instead
|
* the automation but is instead part of the Table/Table. This function will get the table schema and use it to instead
|
||||||
* perform the cleanInputValues function on the input row.
|
* perform the cleanInputValues function on the input row.
|
||||||
*
|
*
|
||||||
* @param {string} tableId The ID of the Table/Table which the schema is to be retrieved for.
|
* @param tableId The ID of the Table/Table which the schema is to be retrieved for.
|
||||||
* @param {object} row The input row structure which requires clean-up after having been through template statements.
|
* @param row The input row structure which requires clean-up after having been through template statements.
|
||||||
* @returns {Promise<Object>} The cleaned up rows object, will should now have all the required primitive types.
|
* @returns The cleaned up rows object, will should now have all the required primitive types.
|
||||||
*/
|
*/
|
||||||
export async function cleanUpRow(tableId: string, row: Row) {
|
export async function cleanUpRow(tableId: string, row: Row) {
|
||||||
let table = await sdk.tables.getTable(tableId)
|
let table = await sdk.tables.getTable(tableId)
|
||||||
|
|
|
@ -148,8 +148,8 @@ export function isRebootTrigger(auto: Automation) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function handles checking of any cron jobs that need to be enabled/updated.
|
* This function handles checking of any cron jobs that need to be enabled/updated.
|
||||||
* @param {string} appId The ID of the app in which we are checking for webhooks
|
* @param appId The ID of the app in which we are checking for webhooks
|
||||||
* @param {object|undefined} automation The automation object to be updated.
|
* @param automation The automation object to be updated.
|
||||||
*/
|
*/
|
||||||
export async function enableCronTrigger(appId: any, automation: Automation) {
|
export async function enableCronTrigger(appId: any, automation: Automation) {
|
||||||
const trigger = automation ? automation.definition.trigger : null
|
const trigger = automation ? automation.definition.trigger : null
|
||||||
|
@ -187,10 +187,10 @@ export async function enableCronTrigger(appId: any, automation: Automation) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function handles checking if any webhooks need to be created or deleted for automations.
|
* This function handles checking if any webhooks need to be created or deleted for automations.
|
||||||
* @param {string} appId The ID of the app in which we are checking for webhooks
|
* @param appId The ID of the app in which we are checking for webhooks
|
||||||
* @param {object|undefined} oldAuto The old automation object if updating/deleting
|
* @param oldAuto The old automation object if updating/deleting
|
||||||
* @param {object|undefined} newAuto The new automation object if creating/updating
|
* @param newAuto The new automation object if creating/updating
|
||||||
* @returns {Promise<object|undefined>} After this is complete the new automation object may have been updated and should be
|
* @returns After this is complete the new automation object may have been updated and should be
|
||||||
* written to DB (this does not write to DB as it would be wasteful to repeat).
|
* written to DB (this does not write to DB as it would be wasteful to repeat).
|
||||||
*/
|
*/
|
||||||
export async function checkForWebhooks({ oldAuto, newAuto }: any) {
|
export async function checkForWebhooks({ oldAuto, newAuto }: any) {
|
||||||
|
@ -257,8 +257,8 @@ export async function checkForWebhooks({ oldAuto, newAuto }: any) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* When removing an app/unpublishing it need to make sure automations are cleaned up (cron).
|
* When removing an app/unpublishing it need to make sure automations are cleaned up (cron).
|
||||||
* @param appId {string} the app that is being removed.
|
* @param appId the app that is being removed.
|
||||||
* @return {Promise<void>} clean is complete if this succeeds.
|
* @return clean is complete if this succeeds.
|
||||||
*/
|
*/
|
||||||
export async function cleanupAutomations(appId: any) {
|
export async function cleanupAutomations(appId: any) {
|
||||||
await disableAllCrons(appId)
|
await disableAllCrons(appId)
|
||||||
|
@ -267,7 +267,7 @@ export async function cleanupAutomations(appId: any) {
|
||||||
/**
|
/**
|
||||||
* Checks if the supplied automation is of a recurring type.
|
* Checks if the supplied automation is of a recurring type.
|
||||||
* @param automation The automation to check.
|
* @param automation The automation to check.
|
||||||
* @return {boolean} if it is recurring (cron).
|
* @return if it is recurring (cron).
|
||||||
*/
|
*/
|
||||||
export function isRecurring(automation: Automation) {
|
export function isRecurring(automation: Automation) {
|
||||||
return automation.definition.trigger.stepId === definitions.CRON.stepId
|
return automation.definition.trigger.stepId === definitions.CRON.stepId
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
import { IncludeDocs, getLinkDocuments } from "./linkUtils"
|
import { IncludeDocs, getLinkDocuments } from "./linkUtils"
|
||||||
import { InternalTables, getUserMetadataParams } from "../utils"
|
import { InternalTables, getUserMetadataParams } from "../utils"
|
||||||
import Sentry from "@sentry/node"
|
|
||||||
import { FieldTypes } from "../../constants"
|
import { FieldTypes } from "../../constants"
|
||||||
import { context } from "@budibase/backend-core"
|
import { context, logging } from "@budibase/backend-core"
|
||||||
import LinkDocument from "./LinkDocument"
|
import LinkDocument from "./LinkDocument"
|
||||||
import {
|
import {
|
||||||
Database,
|
Database,
|
||||||
|
@ -39,7 +38,7 @@ class LinkController {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves the table, if it was not already found in the eventData.
|
* Retrieves the table, if it was not already found in the eventData.
|
||||||
* @returns {Promise<object>} This will return a table based on the event data, either
|
* @returns This will return a table based on the event data, either
|
||||||
* if it was in the event already, or it uses the specified tableId to get it.
|
* if it was in the event already, or it uses the specified tableId to get it.
|
||||||
*/
|
*/
|
||||||
async table() {
|
async table() {
|
||||||
|
@ -53,8 +52,8 @@ class LinkController {
|
||||||
/**
|
/**
|
||||||
* Checks if the table this was constructed with has any linking columns currently.
|
* Checks if the table this was constructed with has any linking columns currently.
|
||||||
* If the table has not been retrieved this will retrieve it based on the eventData.
|
* If the table has not been retrieved this will retrieve it based on the eventData.
|
||||||
* @params {object|null} table If a table that is not known to the link controller is to be tested.
|
* @params table If a table that is not known to the link controller is to be tested.
|
||||||
* @returns {Promise<boolean>} True if there are any linked fields, otherwise it will return
|
* @returns True if there are any linked fields, otherwise it will return
|
||||||
* false.
|
* false.
|
||||||
*/
|
*/
|
||||||
async doesTableHaveLinkedFields(table?: Table) {
|
async doesTableHaveLinkedFields(table?: Table) {
|
||||||
|
@ -160,7 +159,7 @@ class LinkController {
|
||||||
/**
|
/**
|
||||||
* When a row is saved this will carry out the necessary operations to make sure
|
* When a row is saved this will carry out the necessary operations to make sure
|
||||||
* the link has been created/updated.
|
* the link has been created/updated.
|
||||||
* @returns {Promise<object>} returns the row that has been cleaned and prepared to be written to the DB - links
|
* @returns returns the row that has been cleaned and prepared to be written to the DB - links
|
||||||
* have also been created.
|
* have also been created.
|
||||||
*/
|
*/
|
||||||
async rowSaved() {
|
async rowSaved() {
|
||||||
|
@ -272,7 +271,7 @@ class LinkController {
|
||||||
/**
|
/**
|
||||||
* When a row is deleted this will carry out the necessary operations to make sure
|
* When a row is deleted this will carry out the necessary operations to make sure
|
||||||
* any links that existed have been removed.
|
* any links that existed have been removed.
|
||||||
* @returns {Promise<object>} The operation has been completed and the link documents should now
|
* @returns The operation has been completed and the link documents should now
|
||||||
* be accurate. This also returns the row that was deleted.
|
* be accurate. This also returns the row that was deleted.
|
||||||
*/
|
*/
|
||||||
async rowDeleted() {
|
async rowDeleted() {
|
||||||
|
@ -294,8 +293,8 @@ class LinkController {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove a field from a table as well as any linked rows that pertained to it.
|
* Remove a field from a table as well as any linked rows that pertained to it.
|
||||||
* @param {string} fieldName The field to be removed from the table.
|
* @param fieldName The field to be removed from the table.
|
||||||
* @returns {Promise<void>} The table has now been updated.
|
* @returns The table has now been updated.
|
||||||
*/
|
*/
|
||||||
async removeFieldFromTable(fieldName: string) {
|
async removeFieldFromTable(fieldName: string) {
|
||||||
let oldTable = this._oldTable
|
let oldTable = this._oldTable
|
||||||
|
@ -334,7 +333,7 @@ class LinkController {
|
||||||
/**
|
/**
|
||||||
* When a table is saved this will carry out the necessary operations to make sure
|
* When a table is saved this will carry out the necessary operations to make sure
|
||||||
* any linked tables are notified and updated correctly.
|
* any linked tables are notified and updated correctly.
|
||||||
* @returns {Promise<object>} The operation has been completed and the link documents should now
|
* @returns The operation has been completed and the link documents should now
|
||||||
* be accurate. Also returns the table that was operated on.
|
* be accurate. Also returns the table that was operated on.
|
||||||
*/
|
*/
|
||||||
async tableSaved() {
|
async tableSaved() {
|
||||||
|
@ -395,7 +394,7 @@ class LinkController {
|
||||||
/**
|
/**
|
||||||
* Update a table, this means if a field is removed need to handle removing from other table and removing
|
* Update a table, this means if a field is removed need to handle removing from other table and removing
|
||||||
* any link docs that pertained to it.
|
* any link docs that pertained to it.
|
||||||
* @returns {Promise<Object>} The table which has been saved, same response as with the tableSaved function.
|
* @returns The table which has been saved, same response as with the tableSaved function.
|
||||||
*/
|
*/
|
||||||
async tableUpdated() {
|
async tableUpdated() {
|
||||||
const oldTable = this._oldTable
|
const oldTable = this._oldTable
|
||||||
|
@ -419,7 +418,7 @@ class LinkController {
|
||||||
* When a table is deleted this will carry out the necessary operations to make sure
|
* When a table is deleted this will carry out the necessary operations to make sure
|
||||||
* any linked tables have the joining column correctly removed as well as removing any
|
* any linked tables have the joining column correctly removed as well as removing any
|
||||||
* now stale linking documents.
|
* now stale linking documents.
|
||||||
* @returns {Promise<object>} The operation has been completed and the link documents should now
|
* @returns The operation has been completed and the link documents should now
|
||||||
* be accurate. Also returns the table that was operated on.
|
* be accurate. Also returns the table that was operated on.
|
||||||
*/
|
*/
|
||||||
async tableDeleted() {
|
async tableDeleted() {
|
||||||
|
@ -433,9 +432,8 @@ class LinkController {
|
||||||
delete linkedTable.schema[field.fieldName]
|
delete linkedTable.schema[field.fieldName]
|
||||||
await this._db.put(linkedTable)
|
await this._db.put(linkedTable)
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
/* istanbul ignore next */
|
logging.logWarn(err?.message, err)
|
||||||
Sentry.captureException(err)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// need to get the full link docs to delete them
|
// need to get the full link docs to delete them
|
||||||
|
|
|
@ -6,12 +6,12 @@ import { LinkDocument } from "@budibase/types"
|
||||||
* Creates a new link document structure which can be put to the database. It is important to
|
* Creates a new link document structure which can be put to the database. It is important to
|
||||||
* note that while this talks about linker/linked the link is bi-directional and for all intent
|
* note that while this talks about linker/linked the link is bi-directional and for all intent
|
||||||
* and purposes it does not matter from which direction the link was initiated.
|
* and purposes it does not matter from which direction the link was initiated.
|
||||||
* @param {string} tableId1 The ID of the first table (the linker).
|
* @param tableId1 The ID of the first table (the linker).
|
||||||
* @param {string} tableId2 The ID of the second table (the linked).
|
* @param tableId2 The ID of the second table (the linked).
|
||||||
* @param {string} fieldName1 The name of the field in the linker table.
|
* @param fieldName1 The name of the field in the linker table.
|
||||||
* @param {string} fieldName2 The name of the field in the linked table.
|
* @param fieldName2 The name of the field in the linked table.
|
||||||
* @param {string} rowId1 The ID of the row which is acting as the linker.
|
* @param rowId1 The ID of the row which is acting as the linker.
|
||||||
* @param {string} rowId2 The ID of the row which is acting as the linked.
|
* @param rowId2 The ID of the row which is acting as the linked.
|
||||||
* @constructor
|
* @constructor
|
||||||
*/
|
*/
|
||||||
class LinkDocumentImpl implements LinkDocument {
|
class LinkDocumentImpl implements LinkDocument {
|
||||||
|
|
|
@ -9,13 +9,13 @@ import {
|
||||||
getLinkedTable,
|
getLinkedTable,
|
||||||
} from "./linkUtils"
|
} from "./linkUtils"
|
||||||
import flatten from "lodash/flatten"
|
import flatten from "lodash/flatten"
|
||||||
import { FieldTypes } from "../../constants"
|
|
||||||
import { getMultiIDParams, USER_METDATA_PREFIX } from "../utils"
|
import { getMultiIDParams, USER_METDATA_PREFIX } from "../utils"
|
||||||
import partition from "lodash/partition"
|
import partition from "lodash/partition"
|
||||||
import { getGlobalUsersFromMetadata } from "../../utilities/global"
|
import { getGlobalUsersFromMetadata } from "../../utilities/global"
|
||||||
import { processFormulas } from "../../utilities/rowProcessor"
|
import { processFormulas } from "../../utilities/rowProcessor"
|
||||||
import { context } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import { Table, Row, LinkDocumentValue } from "@budibase/types"
|
import { Table, Row, LinkDocumentValue, FieldType } from "@budibase/types"
|
||||||
|
import sdk from "../../sdk"
|
||||||
|
|
||||||
export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils"
|
export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils"
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ export const EventType = {
|
||||||
|
|
||||||
function clearRelationshipFields(table: Table, rows: Row[]) {
|
function clearRelationshipFields(table: Table, rows: Row[]) {
|
||||||
for (let [key, field] of Object.entries(table.schema)) {
|
for (let [key, field] of Object.entries(table.schema)) {
|
||||||
if (field.type === FieldTypes.LINK) {
|
if (field.type === FieldType.LINK) {
|
||||||
rows = rows.map(row => {
|
rows = rows.map(row => {
|
||||||
delete row[key]
|
delete row[key]
|
||||||
return row
|
return row
|
||||||
|
@ -45,7 +45,7 @@ function clearRelationshipFields(table: Table, rows: Row[]) {
|
||||||
return rows
|
return rows
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLinksForRows(rows: Row[]) {
|
async function getLinksForRows(rows: Row[]): Promise<LinkDocumentValue[]> {
|
||||||
const tableIds = [...new Set(rows.map(el => el.tableId))]
|
const tableIds = [...new Set(rows.map(el => el.tableId))]
|
||||||
// start by getting all the link values for performance reasons
|
// start by getting all the link values for performance reasons
|
||||||
const promises = tableIds.map(tableId =>
|
const promises = tableIds.map(tableId =>
|
||||||
|
@ -90,13 +90,13 @@ async function getFullLinkedDocs(links: LinkDocumentValue[]) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update link documents for a row or table - this is to be called by the API controller when a change is occurring.
|
* Update link documents for a row or table - this is to be called by the API controller when a change is occurring.
|
||||||
* @param {string} args.eventType states what type of change which is occurring, means this can be expanded upon in the
|
* @param args.eventType states what type of change which is occurring, means this can be expanded upon in the
|
||||||
* future quite easily (all updates go through one function).
|
* future quite easily (all updates go through one function).
|
||||||
* @param {string} args.tableId The ID of the of the table which is being changed.
|
* @param args.tableId The ID of the of the table which is being changed.
|
||||||
* @param {object|undefined} args.row The row which is changing, e.g. created, updated or deleted.
|
* @param args.row The row which is changing, e.g. created, updated or deleted.
|
||||||
* @param {object|undefined} args.table If the table has already been retrieved this can be used to reduce database gets.
|
* @param args.table If the table has already been retrieved this can be used to reduce database gets.
|
||||||
* @param {object|undefined} args.oldTable If the table is being updated then the old table can be provided for differencing.
|
* @param args.oldTable If the table is being updated then the old table can be provided for differencing.
|
||||||
* @returns {Promise<object>} When the update is complete this will respond successfully. Returns the row for
|
* @returns When the update is complete this will respond successfully. Returns the row for
|
||||||
* row operations and the table for table operations.
|
* row operations and the table for table operations.
|
||||||
*/
|
*/
|
||||||
export async function updateLinks(args: {
|
export async function updateLinks(args: {
|
||||||
|
@ -144,34 +144,59 @@ export async function updateLinks(args: {
|
||||||
/**
|
/**
|
||||||
* Given a table and a list of rows this will retrieve all of the attached docs and enrich them into the row.
|
* Given a table and a list of rows this will retrieve all of the attached docs and enrich them into the row.
|
||||||
* This is required for formula fields, this may only be utilised internally (for now).
|
* This is required for formula fields, this may only be utilised internally (for now).
|
||||||
* @param {object} table The table from which the rows originated.
|
* @param table The table from which the rows originated.
|
||||||
* @param {array<object>} rows The rows which are to be enriched.
|
* @param rows The rows which are to be enriched.
|
||||||
* @return {Promise<*>} returns the rows with all of the enriched relationships on it.
|
* @param opts optional - options like passing in a base row to use for enrichment.
|
||||||
|
* @return returns the rows with all of the enriched relationships on it.
|
||||||
*/
|
*/
|
||||||
export async function attachFullLinkedDocs(table: Table, rows: Row[]) {
|
export async function attachFullLinkedDocs(
|
||||||
|
table: Table,
|
||||||
|
rows: Row[],
|
||||||
|
opts?: { fromRow?: Row }
|
||||||
|
) {
|
||||||
const linkedTableIds = getLinkedTableIDs(table)
|
const linkedTableIds = getLinkedTableIDs(table)
|
||||||
if (linkedTableIds.length === 0) {
|
if (linkedTableIds.length === 0) {
|
||||||
return rows
|
return rows
|
||||||
}
|
}
|
||||||
// get all the links
|
// get tables and links
|
||||||
const links = (await getLinksForRows(rows)).filter(link =>
|
let response = await Promise.all([
|
||||||
|
getLinksForRows(rows),
|
||||||
|
sdk.tables.getTables(linkedTableIds),
|
||||||
|
])
|
||||||
|
// find the links that pertain to one of the rows that is being enriched
|
||||||
|
const links = (response[0] as LinkDocumentValue[]).filter(link =>
|
||||||
rows.some(row => row._id === link.thisId)
|
rows.some(row => row._id === link.thisId)
|
||||||
)
|
)
|
||||||
|
// if fromRow has been passed in, then we don't need to fetch it (optimisation)
|
||||||
|
let linksWithoutFromRow = links
|
||||||
|
if (opts?.fromRow) {
|
||||||
|
linksWithoutFromRow = links.filter(link => link.id !== opts?.fromRow?._id)
|
||||||
|
}
|
||||||
|
const linkedTables = response[1] as Table[]
|
||||||
// clear any existing links that could be dupe'd
|
// clear any existing links that could be dupe'd
|
||||||
rows = clearRelationshipFields(table, rows)
|
rows = clearRelationshipFields(table, rows)
|
||||||
// now get the docs and combine into the rows
|
// now get the docs and combine into the rows
|
||||||
let linked = await getFullLinkedDocs(links)
|
let linked = []
|
||||||
const linkedTables: Table[] = []
|
if (linksWithoutFromRow.length > 0) {
|
||||||
|
linked = await getFullLinkedDocs(linksWithoutFromRow)
|
||||||
|
}
|
||||||
for (let row of rows) {
|
for (let row of rows) {
|
||||||
for (let link of links.filter(link => link.thisId === row._id)) {
|
for (let link of links.filter(link => link.thisId === row._id)) {
|
||||||
if (row[link.fieldName] == null) {
|
if (row[link.fieldName] == null) {
|
||||||
row[link.fieldName] = []
|
row[link.fieldName] = []
|
||||||
}
|
}
|
||||||
const linkedRow = linked.find(row => row._id === link.id)
|
let linkedRow: Row
|
||||||
|
if (opts?.fromRow && opts?.fromRow?._id === link.id) {
|
||||||
|
linkedRow = opts.fromRow!
|
||||||
|
} else {
|
||||||
|
linkedRow = linked.find(row => row._id === link.id)
|
||||||
|
}
|
||||||
if (linkedRow) {
|
if (linkedRow) {
|
||||||
const linkedTableId =
|
const linkedTableId =
|
||||||
linkedRow.tableId || getRelatedTableForField(table, link.fieldName)
|
linkedRow.tableId || getRelatedTableForField(table, link.fieldName)
|
||||||
const linkedTable = await getLinkedTable(linkedTableId, linkedTables)
|
const linkedTable = linkedTables.find(
|
||||||
|
table => table._id === linkedTableId
|
||||||
|
)
|
||||||
if (linkedTable) {
|
if (linkedTable) {
|
||||||
row[link.fieldName].push(processFormulas(linkedTable, linkedRow))
|
row[link.fieldName].push(processFormulas(linkedTable, linkedRow))
|
||||||
}
|
}
|
||||||
|
@ -183,9 +208,9 @@ export async function attachFullLinkedDocs(table: Table, rows: Row[]) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function will take the given enriched rows and squash the links to only contain the primary display field.
|
* This function will take the given enriched rows and squash the links to only contain the primary display field.
|
||||||
* @param {object} table The table from which the rows originated.
|
* @param table The table from which the rows originated.
|
||||||
* @param {array<object>} enriched The pre-enriched rows (full docs) which are to be squashed.
|
* @param enriched The pre-enriched rows (full docs) which are to be squashed.
|
||||||
* @returns {Promise<Array>} The rows after having their links squashed to only contain the ID and primary display.
|
* @returns The rows after having their links squashed to only contain the ID and primary display.
|
||||||
*/
|
*/
|
||||||
export async function squashLinksToPrimaryDisplay(
|
export async function squashLinksToPrimaryDisplay(
|
||||||
table: Table,
|
table: Table,
|
||||||
|
@ -199,13 +224,13 @@ export async function squashLinksToPrimaryDisplay(
|
||||||
// this only fetches the table if its not already in array
|
// this only fetches the table if its not already in array
|
||||||
const rowTable = await getLinkedTable(row.tableId!, linkedTables)
|
const rowTable = await getLinkedTable(row.tableId!, linkedTables)
|
||||||
for (let [column, schema] of Object.entries(rowTable?.schema || {})) {
|
for (let [column, schema] of Object.entries(rowTable?.schema || {})) {
|
||||||
if (schema.type !== FieldTypes.LINK || !Array.isArray(row[column])) {
|
if (schema.type !== FieldType.LINK || !Array.isArray(row[column])) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
const newLinks = []
|
const newLinks = []
|
||||||
for (let link of row[column]) {
|
for (let link of row[column]) {
|
||||||
const linkTblId = link.tableId || getRelatedTableForField(table, column)
|
const linkTblId = link.tableId || getRelatedTableForField(table, column)
|
||||||
const linkedTable = await getLinkedTable(linkTblId, linkedTables)
|
const linkedTable = await getLinkedTable(linkTblId!, linkedTables)
|
||||||
const obj: any = { _id: link._id }
|
const obj: any = { _id: link._id }
|
||||||
if (linkedTable?.primaryDisplay && link[linkedTable.primaryDisplay]) {
|
if (linkedTable?.primaryDisplay && link[linkedTable.primaryDisplay]) {
|
||||||
obj.primaryDisplay = link[linkedTable.primaryDisplay]
|
obj.primaryDisplay = link[linkedTable.primaryDisplay]
|
||||||
|
|
|
@ -17,33 +17,36 @@ export const IncludeDocs = {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the linking documents, not the linked documents themselves.
|
* Gets the linking documents, not the linked documents themselves.
|
||||||
* @param {string} args.tableId The table which we are searching for linked rows against.
|
* @param args.tableId The table which we are searching for linked rows against.
|
||||||
* @param {string|null} args.fieldName The name of column/field which is being altered, only looking for
|
* @param args.fieldName The name of column/field which is being altered, only looking for
|
||||||
* linking documents that are related to it. If this is not specified then the table level will be assumed.
|
* linking documents that are related to it. If this is not specified then the table level will be assumed.
|
||||||
* @param {string|null} args.rowId The ID of the row which we want to find linking documents for -
|
* @param args.rowId The ID of the row which we want to find linking documents for -
|
||||||
* if this is not specified then it will assume table or field level depending on whether the
|
* if this is not specified then it will assume table or field level depending on whether the
|
||||||
* field name has been specified.
|
* field name has been specified.
|
||||||
* @param {boolean|null} args.includeDocs whether to include docs in the response call, this is considerably slower so only
|
* @param args.includeDocs whether to include docs in the response call, this is considerably slower so only
|
||||||
* use this if actually interested in the docs themselves.
|
* use this if actually interested in the docs themselves.
|
||||||
* @returns {Promise<object[]>} This will return an array of the linking documents that were found
|
* @returns This will return an array of the linking documents that were found
|
||||||
* (if any).
|
* (if any).
|
||||||
*/
|
*/
|
||||||
export async function getLinkDocuments(args: {
|
export async function getLinkDocuments(args: {
|
||||||
tableId?: string
|
tableId?: string
|
||||||
rowId?: string
|
rowId?: string
|
||||||
includeDocs?: any
|
fieldName?: string
|
||||||
|
includeDocs?: boolean
|
||||||
}): Promise<LinkDocumentValue[] | LinkDocument[]> {
|
}): Promise<LinkDocumentValue[] | LinkDocument[]> {
|
||||||
const { tableId, rowId, includeDocs } = args
|
const { tableId, rowId, fieldName, includeDocs } = args
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
let params: any
|
let params: any
|
||||||
if (rowId != null) {
|
if (rowId) {
|
||||||
params = { key: [tableId, rowId] }
|
params = { key: [tableId, rowId] }
|
||||||
}
|
}
|
||||||
// only table is known
|
// only table is known
|
||||||
else {
|
else {
|
||||||
params = { startKey: [tableId], endKey: [tableId, {}] }
|
params = { startKey: [tableId], endKey: [tableId, {}] }
|
||||||
}
|
}
|
||||||
params.include_docs = !!includeDocs
|
if (includeDocs) {
|
||||||
|
params.include_docs = true
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
let linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows
|
let linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows
|
||||||
// filter to get unique entries
|
// filter to get unique entries
|
||||||
|
@ -63,6 +66,14 @@ export async function getLinkDocuments(args: {
|
||||||
return unique
|
return unique
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// filter down to just the required field name
|
||||||
|
if (fieldName) {
|
||||||
|
linkRows = linkRows.filter(link => {
|
||||||
|
const value = link.value as LinkDocumentValue
|
||||||
|
return value.fieldName === fieldName
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// return docs if docs requested, otherwise just the value information
|
||||||
if (includeDocs) {
|
if (includeDocs) {
|
||||||
return linkRows.map(row => row.doc) as LinkDocument[]
|
return linkRows.map(row => row.doc) as LinkDocument[]
|
||||||
} else {
|
} else {
|
||||||
|
@ -87,7 +98,7 @@ export function getUniqueByProp(array: any[], prop: string) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getLinkedTableIDs(table: Table) {
|
export function getLinkedTableIDs(table: Table): string[] {
|
||||||
return Object.values(table.schema)
|
return Object.values(table.schema)
|
||||||
.filter(isRelationshipColumn)
|
.filter(isRelationshipColumn)
|
||||||
.map(column => column.tableId)
|
.map(column => column.tableId)
|
||||||
|
|
|
@ -68,7 +68,7 @@ export function getTableParams(tableId?: Optional, otherProps = {}) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new table ID.
|
* Generates a new table ID.
|
||||||
* @returns {string} The new table ID which the table doc can be stored under.
|
* @returns The new table ID which the table doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateTableID() {
|
export function generateTableID() {
|
||||||
return `${DocumentType.TABLE}${SEPARATOR}${newid()}`
|
return `${DocumentType.TABLE}${SEPARATOR}${newid()}`
|
||||||
|
@ -76,8 +76,8 @@ export function generateTableID() {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given a row ID this will find the table ID within it (only works for internal tables).
|
* Given a row ID this will find the table ID within it (only works for internal tables).
|
||||||
* @param {string} rowId The ID of the row.
|
* @param rowId The ID of the row.
|
||||||
* @returns {string} The table ID.
|
* @returns The table ID.
|
||||||
*/
|
*/
|
||||||
export function getTableIDFromRowID(rowId: string) {
|
export function getTableIDFromRowID(rowId: string) {
|
||||||
const components = rowId
|
const components = rowId
|
||||||
|
@ -98,7 +98,7 @@ export function getAutomationParams(
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new automation ID.
|
* Generates a new automation ID.
|
||||||
* @returns {string} The new automation ID which the automation doc can be stored under.
|
* @returns The new automation ID which the automation doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateAutomationID() {
|
export function generateAutomationID() {
|
||||||
return `${DocumentType.AUTOMATION}${SEPARATOR}${newid()}`
|
return `${DocumentType.AUTOMATION}${SEPARATOR}${newid()}`
|
||||||
|
@ -107,13 +107,13 @@ export function generateAutomationID() {
|
||||||
/**
|
/**
|
||||||
* Generates a new link doc ID. This is currently not usable with the alldocs call,
|
* Generates a new link doc ID. This is currently not usable with the alldocs call,
|
||||||
* instead a view is built to make walking to tree easier.
|
* instead a view is built to make walking to tree easier.
|
||||||
* @param {string} tableId1 The ID of the linker table.
|
* @param tableId1 The ID of the linker table.
|
||||||
* @param {string} tableId2 The ID of the linked table.
|
* @param tableId2 The ID of the linked table.
|
||||||
* @param {string} rowId1 The ID of the linker row.
|
* @param rowId1 The ID of the linker row.
|
||||||
* @param {string} rowId2 The ID of the linked row.
|
* @param rowId2 The ID of the linked row.
|
||||||
* @param {string} fieldName1 The name of the field in the linker row.
|
* @param fieldName1 The name of the field in the linker row.
|
||||||
* @param {string} fieldName2 the name of the field in the linked row.
|
* @param fieldName2 the name of the field in the linked row.
|
||||||
* @returns {string} The new link doc ID which the automation doc can be stored under.
|
* @returns The new link doc ID which the automation doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateLinkID(
|
export function generateLinkID(
|
||||||
tableId1: string,
|
tableId1: string,
|
||||||
|
@ -138,7 +138,7 @@ export function getLinkParams(otherProps: any = {}) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new layout ID.
|
* Generates a new layout ID.
|
||||||
* @returns {string} The new layout ID which the layout doc can be stored under.
|
* @returns The new layout ID which the layout doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateLayoutID(id?: string) {
|
export function generateLayoutID(id?: string) {
|
||||||
return `${DocumentType.LAYOUT}${SEPARATOR}${id || newid()}`
|
return `${DocumentType.LAYOUT}${SEPARATOR}${id || newid()}`
|
||||||
|
@ -153,7 +153,7 @@ export function getLayoutParams(layoutId?: Optional, otherProps: any = {}) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new screen ID.
|
* Generates a new screen ID.
|
||||||
* @returns {string} The new screen ID which the screen doc can be stored under.
|
* @returns The new screen ID which the screen doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateScreenID() {
|
export function generateScreenID() {
|
||||||
return `${DocumentType.SCREEN}${SEPARATOR}${newid()}`
|
return `${DocumentType.SCREEN}${SEPARATOR}${newid()}`
|
||||||
|
@ -168,7 +168,7 @@ export function getScreenParams(screenId?: Optional, otherProps: any = {}) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new webhook ID.
|
* Generates a new webhook ID.
|
||||||
* @returns {string} The new webhook ID which the webhook doc can be stored under.
|
* @returns The new webhook ID which the webhook doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateWebhookID() {
|
export function generateWebhookID() {
|
||||||
return `${DocumentType.WEBHOOK}${SEPARATOR}${newid()}`
|
return `${DocumentType.WEBHOOK}${SEPARATOR}${newid()}`
|
||||||
|
@ -183,7 +183,7 @@ export function getWebhookParams(webhookId?: Optional, otherProps: any = {}) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new datasource ID.
|
* Generates a new datasource ID.
|
||||||
* @returns {string} The new datasource ID which the webhook doc can be stored under.
|
* @returns The new datasource ID which the webhook doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateDatasourceID({ plus = false } = {}) {
|
export function generateDatasourceID({ plus = false } = {}) {
|
||||||
return `${
|
return `${
|
||||||
|
@ -210,7 +210,7 @@ export function getDatasourcePlusParams(
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new query ID.
|
* Generates a new query ID.
|
||||||
* @returns {string} The new query ID which the query doc can be stored under.
|
* @returns The new query ID which the query doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateQueryID(datasourceId: string) {
|
export function generateQueryID(datasourceId: string) {
|
||||||
return `${
|
return `${
|
||||||
|
@ -250,7 +250,7 @@ export function getQueryParams(datasourceId?: Optional, otherProps: any = {}) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new flag document ID.
|
* Generates a new flag document ID.
|
||||||
* @returns {string} The ID of the flag document that was generated.
|
* @returns The ID of the flag document that was generated.
|
||||||
*/
|
*/
|
||||||
export function generateUserFlagID(userId: string) {
|
export function generateUserFlagID(userId: string) {
|
||||||
return `${DocumentType.USER_FLAG}${SEPARATOR}${userId}`
|
return `${DocumentType.USER_FLAG}${SEPARATOR}${userId}`
|
||||||
|
@ -302,7 +302,7 @@ export function getMultiIDParams(ids: string[]) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new view ID.
|
* Generates a new view ID.
|
||||||
* @returns {string} The new view ID which the view doc can be stored under.
|
* @returns The new view ID which the view doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateViewID(tableId: string) {
|
export function generateViewID(tableId: string) {
|
||||||
return `${
|
return `${
|
||||||
|
|
|
@ -17,7 +17,7 @@ const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR
|
||||||
/**
|
/**
|
||||||
* Creates the link view for the instance, this will overwrite the existing one, but this should only
|
* Creates the link view for the instance, this will overwrite the existing one, but this should only
|
||||||
* be called if it is found that the view does not exist.
|
* be called if it is found that the view does not exist.
|
||||||
* @returns {Promise<void>} The view now exists, please note that the next view of this query will actually build it,
|
* @returns The view now exists, please note that the next view of this query will actually build it,
|
||||||
* so it may be slow.
|
* so it may be slow.
|
||||||
*/
|
*/
|
||||||
export async function createLinkView() {
|
export async function createLinkView() {
|
||||||
|
|
|
@ -539,7 +539,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
* @param json The JSON query DSL which is to be converted to SQL.
|
* @param json The JSON query DSL which is to be converted to SQL.
|
||||||
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
|
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
|
||||||
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
|
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
|
||||||
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
|
* @return the query ready to be passed to the driver.
|
||||||
*/
|
*/
|
||||||
_query(json: QueryJson, opts: QueryOptions = {}) {
|
_query(json: QueryJson, opts: QueryOptions = {}) {
|
||||||
const sqlClient = this.getSqlClient()
|
const sqlClient = this.getSqlClient()
|
||||||
|
|
|
@ -189,7 +189,7 @@ class SqlTableQueryBuilder {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param json the input JSON structure from which an SQL query will be built.
|
* @param json the input JSON structure from which an SQL query will be built.
|
||||||
* @return {string} the operation that was found in the JSON.
|
* @return the operation that was found in the JSON.
|
||||||
*/
|
*/
|
||||||
_operation(json: QueryJson): Operation {
|
_operation(json: QueryJson): Operation {
|
||||||
return json.endpoint.operation
|
return json.endpoint.operation
|
||||||
|
|
|
@ -375,7 +375,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetches the tables from the sql server database and assigns them to the datasource.
|
* Fetches the tables from the sql server database and assigns them to the datasource.
|
||||||
* @param {*} datasourceId - datasourceId to fetch
|
* @param datasourceId - datasourceId to fetch
|
||||||
* @param entities - the tables that are to be built
|
* @param entities - the tables that are to be built
|
||||||
*/
|
*/
|
||||||
async buildSchema(
|
async buildSchema(
|
||||||
|
|
|
@ -258,7 +258,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetches the tables from the oracle table and assigns them to the datasource.
|
* Fetches the tables from the oracle table and assigns them to the datasource.
|
||||||
* @param {*} datasourceId - datasourceId to fetch
|
* @param datasourceId - datasourceId to fetch
|
||||||
* @param entities - the tables that are to be built
|
* @param entities - the tables that are to be built
|
||||||
*/
|
*/
|
||||||
async buildSchema(
|
async buildSchema(
|
||||||
|
|
|
@ -268,7 +268,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetches the tables from the postgres table and assigns them to the datasource.
|
* Fetches the tables from the postgres table and assigns them to the datasource.
|
||||||
* @param {*} datasourceId - datasourceId to fetch
|
* @param datasourceId - datasourceId to fetch
|
||||||
* @param entities - the tables that are to be built
|
* @param entities - the tables that are to be built
|
||||||
*/
|
*/
|
||||||
async buildSchema(
|
async buildSchema(
|
||||||
|
|
|
@ -8,9 +8,8 @@ import * as automations from "./automations"
|
||||||
import { Thread } from "./threads"
|
import { Thread } from "./threads"
|
||||||
import * as redis from "./utilities/redis"
|
import * as redis from "./utilities/redis"
|
||||||
import { events, logging, middleware, timers } from "@budibase/backend-core"
|
import { events, logging, middleware, timers } from "@budibase/backend-core"
|
||||||
const Sentry = require("@sentry/node")
|
import destroyable from "server-destroy"
|
||||||
const destroyable = require("server-destroy")
|
import { userAgent } from "koa-useragent"
|
||||||
const { userAgent } = require("koa-useragent")
|
|
||||||
|
|
||||||
export default function createKoaApp() {
|
export default function createKoaApp() {
|
||||||
const app = new Koa()
|
const app = new Koa()
|
||||||
|
@ -36,17 +35,6 @@ export default function createKoaApp() {
|
||||||
app.use(middleware.pino)
|
app.use(middleware.pino)
|
||||||
app.use(userAgent)
|
app.use(userAgent)
|
||||||
|
|
||||||
if (env.isProd()) {
|
|
||||||
app.on("error", (err: any, ctx: ExtendableContext) => {
|
|
||||||
Sentry.withScope(function (scope: any) {
|
|
||||||
scope.addEventProcessor(function (event: any) {
|
|
||||||
return Sentry.Handlers.parseRequest(event, ctx.request)
|
|
||||||
})
|
|
||||||
Sentry.captureException(err)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const server = http.createServer(app.callback())
|
const server = http.createServer(app.callback())
|
||||||
destroyable(server)
|
destroyable(server)
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ jest.mock("../../sdk", () => ({
|
||||||
import sdk from "../../sdk"
|
import sdk from "../../sdk"
|
||||||
import { Next } from "koa"
|
import { Next } from "koa"
|
||||||
|
|
||||||
|
const tableId = utils.generateTableID()
|
||||||
const mockGetView = sdk.views.get as jest.MockedFunction<typeof sdk.views.get>
|
const mockGetView = sdk.views.get as jest.MockedFunction<typeof sdk.views.get>
|
||||||
const mockGetTable = sdk.tables.getTable as jest.MockedFunction<
|
const mockGetTable = sdk.tables.getTable as jest.MockedFunction<
|
||||||
typeof sdk.tables.getTable
|
typeof sdk.tables.getTable
|
||||||
|
@ -41,6 +42,7 @@ class TestConfiguration {
|
||||||
body: ctxRequestBody,
|
body: ctxRequestBody,
|
||||||
}
|
}
|
||||||
this.params.viewId = viewId
|
this.params.viewId = viewId
|
||||||
|
this.params.sourceId = tableId
|
||||||
return this.middleware(
|
return this.middleware(
|
||||||
{
|
{
|
||||||
request: this.request as any,
|
request: this.request as any,
|
||||||
|
@ -69,7 +71,7 @@ describe("trimViewRowInfo middleware", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
const table: Table = {
|
const table: Table = {
|
||||||
_id: utils.generateTableID(),
|
_id: tableId,
|
||||||
name: generator.word(),
|
name: generator.word(),
|
||||||
type: "table",
|
type: "table",
|
||||||
schema: {
|
schema: {
|
||||||
|
|
|
@ -3,6 +3,7 @@ import * as syncApps from "./usageQuotas/syncApps"
|
||||||
import * as syncRows from "./usageQuotas/syncRows"
|
import * as syncRows from "./usageQuotas/syncRows"
|
||||||
import * as syncPlugins from "./usageQuotas/syncPlugins"
|
import * as syncPlugins from "./usageQuotas/syncPlugins"
|
||||||
import * as syncUsers from "./usageQuotas/syncUsers"
|
import * as syncUsers from "./usageQuotas/syncUsers"
|
||||||
|
import * as syncCreators from "./usageQuotas/syncCreators"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Synchronise quotas to the state of the db.
|
* Synchronise quotas to the state of the db.
|
||||||
|
@ -13,5 +14,6 @@ export const run = async () => {
|
||||||
await syncRows.run()
|
await syncRows.run()
|
||||||
await syncPlugins.run()
|
await syncPlugins.run()
|
||||||
await syncUsers.run()
|
await syncUsers.run()
|
||||||
|
await syncCreators.run()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
import { users } from "@budibase/backend-core"
|
||||||
|
import { quotas } from "@budibase/pro"
|
||||||
|
import { QuotaUsageType, StaticQuotaName } from "@budibase/types"
|
||||||
|
|
||||||
|
export const run = async () => {
|
||||||
|
const creatorCount = await users.getCreatorCount()
|
||||||
|
console.log(`Syncing creator count: ${creatorCount}`)
|
||||||
|
await quotas.setUsage(
|
||||||
|
creatorCount,
|
||||||
|
StaticQuotaName.CREATORS,
|
||||||
|
QuotaUsageType.STATIC
|
||||||
|
)
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
import TestConfig from "../../../../tests/utilities/TestConfiguration"
|
||||||
|
import * as syncCreators from "../syncCreators"
|
||||||
|
import { quotas } from "@budibase/pro"
|
||||||
|
|
||||||
|
describe("syncCreators", () => {
|
||||||
|
let config = new TestConfig(false)
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await config.init()
|
||||||
|
})
|
||||||
|
|
||||||
|
afterAll(config.end)
|
||||||
|
|
||||||
|
it("syncs creators", async () => {
|
||||||
|
return config.doInContext(null, async () => {
|
||||||
|
await config.createUser({ admin: true })
|
||||||
|
|
||||||
|
await syncCreators.run()
|
||||||
|
|
||||||
|
const usageDoc = await quotas.getQuotaUsage()
|
||||||
|
// default + additional creator
|
||||||
|
const creatorsCount = 2
|
||||||
|
expect(usageDoc.usageQuota.creators).toBe(creatorsCount)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -48,10 +48,10 @@ function tarFilesToTmp(tmpDir: string, files: string[]) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Exports a DB to either file or a variable (memory).
|
* Exports a DB to either file or a variable (memory).
|
||||||
* @param {string} dbName the DB which is to be exported.
|
* @param dbName the DB which is to be exported.
|
||||||
* @param {object} opts various options for the export, e.g. whether to stream,
|
* @param opts various options for the export, e.g. whether to stream,
|
||||||
* a filter function or the name of the export.
|
* a filter function or the name of the export.
|
||||||
* @return {*} either a readable stream or a string
|
* @return either a readable stream or a string
|
||||||
*/
|
*/
|
||||||
export async function exportDB(
|
export async function exportDB(
|
||||||
dbName: string,
|
dbName: string,
|
||||||
|
@ -98,9 +98,9 @@ function defineFilter(excludeRows?: boolean, excludeLogs?: boolean) {
|
||||||
/**
|
/**
|
||||||
* Local utility to back up the database state for an app, excluding global user
|
* Local utility to back up the database state for an app, excluding global user
|
||||||
* data or user relationships.
|
* data or user relationships.
|
||||||
* @param {string} appId The app to back up
|
* @param appId The app to back up
|
||||||
* @param {object} config Config to send to export DB/attachment export
|
* @param config Config to send to export DB/attachment export
|
||||||
* @returns {*} either a string or a stream of the backup
|
* @returns either a string or a stream of the backup
|
||||||
*/
|
*/
|
||||||
export async function exportApp(appId: string, config?: ExportOpts) {
|
export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
const prodAppId = dbCore.getProdAppID(appId)
|
const prodAppId = dbCore.getProdAppID(appId)
|
||||||
|
@ -175,10 +175,10 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Streams a backup of the database state for an app
|
* Streams a backup of the database state for an app
|
||||||
* @param {string} appId The ID of the app which is to be backed up.
|
* @param appId The ID of the app which is to be backed up.
|
||||||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
* @param excludeRows Flag to state whether the export should include data.
|
||||||
* @param {string} encryptPassword password for encrypting the export.
|
* @param encryptPassword password for encrypting the export.
|
||||||
* @returns {*} a readable stream of the backup which is written in real time
|
* @returns a readable stream of the backup which is written in real time
|
||||||
*/
|
*/
|
||||||
export async function streamExportApp({
|
export async function streamExportApp({
|
||||||
appId,
|
appId,
|
||||||
|
|
|
@ -96,8 +96,8 @@ async function updateAutomations(prodAppId: string, db: Database) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function manages temporary template files which are stored by Koa.
|
* This function manages temporary template files which are stored by Koa.
|
||||||
* @param {Object} template The template object retrieved from the Koa context object.
|
* @param template The template object retrieved from the Koa context object.
|
||||||
* @returns {Object} Returns a fs read stream which can be loaded into the database.
|
* @returns Returns a fs read stream which can be loaded into the database.
|
||||||
*/
|
*/
|
||||||
async function getTemplateStream(template: TemplateType) {
|
async function getTemplateStream(template: TemplateType) {
|
||||||
if (template.file && template.file.type !== "text/plain") {
|
if (template.file && template.file.type !== "text/plain") {
|
||||||
|
|
|
@ -23,10 +23,13 @@ import {
|
||||||
getTableParams,
|
getTableParams,
|
||||||
} from "../../../db/utils"
|
} from "../../../db/utils"
|
||||||
import sdk from "../../index"
|
import sdk from "../../index"
|
||||||
|
import datasource from "../../../api/routes/datasource"
|
||||||
|
|
||||||
const ENV_VAR_PREFIX = "env."
|
const ENV_VAR_PREFIX = "env."
|
||||||
|
|
||||||
export async function fetch() {
|
export async function fetch(opts?: {
|
||||||
|
enriched: boolean
|
||||||
|
}): Promise<Datasource[]> {
|
||||||
// Get internal tables
|
// Get internal tables
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const internalTables = await db.allDocs(
|
const internalTables = await db.allDocs(
|
||||||
|
@ -44,7 +47,7 @@ export async function fetch() {
|
||||||
|
|
||||||
const bbInternalDb = {
|
const bbInternalDb = {
|
||||||
...BudibaseInternalDB,
|
...BudibaseInternalDB,
|
||||||
}
|
} as Datasource
|
||||||
|
|
||||||
// Get external datasources
|
// Get external datasources
|
||||||
const datasources = (
|
const datasources = (
|
||||||
|
@ -66,7 +69,18 @@ export async function fetch() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return [bbInternalDb, ...datasources]
|
if (opts?.enriched) {
|
||||||
|
const envVars = await getEnvironmentVariables()
|
||||||
|
const promises = datasources.map(datasource =>
|
||||||
|
enrichDatasourceWithValues(datasource, envVars)
|
||||||
|
)
|
||||||
|
const enriched = (await Promise.all(promises)).map(
|
||||||
|
result => result.datasource
|
||||||
|
)
|
||||||
|
return [bbInternalDb, ...enriched]
|
||||||
|
} else {
|
||||||
|
return [bbInternalDb, ...datasources]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function areRESTVariablesValid(datasource: Datasource) {
|
export function areRESTVariablesValid(datasource: Datasource) {
|
||||||
|
@ -107,9 +121,12 @@ export function checkDatasourceTypes(schema: Integration, config: any) {
|
||||||
return config
|
return config
|
||||||
}
|
}
|
||||||
|
|
||||||
async function enrichDatasourceWithValues(datasource: Datasource) {
|
async function enrichDatasourceWithValues(
|
||||||
|
datasource: Datasource,
|
||||||
|
variables?: Record<string, string>
|
||||||
|
) {
|
||||||
const cloned = cloneDeep(datasource)
|
const cloned = cloneDeep(datasource)
|
||||||
const env = await getEnvironmentVariables()
|
const env = variables ? variables : await getEnvironmentVariables()
|
||||||
//Do not process entities, as we do not want to process formulas
|
//Do not process entities, as we do not want to process formulas
|
||||||
const { entities, ...clonedWithoutEntities } = cloned
|
const { entities, ...clonedWithoutEntities } = cloned
|
||||||
const processed = processObjectSync(
|
const processed = processObjectSync(
|
||||||
|
@ -235,9 +252,9 @@ export function mergeConfigs(update: Datasource, old: Datasource) {
|
||||||
if (value !== PASSWORD_REPLACEMENT) {
|
if (value !== PASSWORD_REPLACEMENT) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if (old.config?.[key]) {
|
if (update.config && old.config && old.config?.[key]) {
|
||||||
update.config[key] = old.config?.[key]
|
update.config[key] = old.config?.[key]
|
||||||
} else {
|
} else if (update.config) {
|
||||||
delete update.config[key]
|
delete update.config[key]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,11 +7,11 @@ export async function getRow(
|
||||||
rowId: string,
|
rowId: string,
|
||||||
opts?: { relationships?: boolean }
|
opts?: { relationships?: boolean }
|
||||||
) {
|
) {
|
||||||
const response = (await handleRequest(Operation.READ, tableId, {
|
const response = await handleRequest(Operation.READ, tableId, {
|
||||||
id: breakRowIdField(rowId),
|
id: breakRowIdField(rowId),
|
||||||
includeSqlRelationships: opts?.relationships
|
includeSqlRelationships: opts?.relationships
|
||||||
? IncludeRelationship.INCLUDE
|
? IncludeRelationship.INCLUDE
|
||||||
: IncludeRelationship.EXCLUDE,
|
: IncludeRelationship.EXCLUDE,
|
||||||
})) as Row[]
|
})
|
||||||
return response ? response[0] : response
|
return response ? response[0] : response
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
import { SearchFilters, SearchParams } from "@budibase/types"
|
import { SearchFilters, SearchParams, Row } from "@budibase/types"
|
||||||
import { isExternalTable } from "../../../integrations/utils"
|
import { isExternalTable } from "../../../integrations/utils"
|
||||||
import * as internal from "./search/internal"
|
import * as internal from "./search/internal"
|
||||||
import * as external from "./search/external"
|
import * as external from "./search/external"
|
||||||
import { Format } from "../../../api/controllers/view/exporters"
|
import { Format } from "../../../api/controllers/view/exporters"
|
||||||
export { isValidFilter, removeEmptyFilters } from "../../../integrations/utils"
|
export { isValidFilter } from "../../../integrations/utils"
|
||||||
|
import { NoEmptyFilterStrings } from "../../../constants"
|
||||||
|
|
||||||
export interface ViewParams {
|
export interface ViewParams {
|
||||||
calculation: string
|
calculation: string
|
||||||
|
@ -18,6 +19,35 @@ function pickApi(tableId: any) {
|
||||||
return internal
|
return internal
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// don't do a pure falsy check, as 0 is included
|
||||||
|
// https://github.com/Budibase/budibase/issues/10118
|
||||||
|
export function removeEmptyFilters(filters: SearchFilters) {
|
||||||
|
for (let filterField of NoEmptyFilterStrings) {
|
||||||
|
if (!filters[filterField]) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let filterType of Object.keys(filters)) {
|
||||||
|
if (filterType !== filterField) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// don't know which one we're checking, type could be anything
|
||||||
|
const value = filters[filterType] as unknown
|
||||||
|
if (typeof value === "object") {
|
||||||
|
for (let [key, value] of Object.entries(
|
||||||
|
filters[filterType] as object
|
||||||
|
)) {
|
||||||
|
if (value == null || value === "") {
|
||||||
|
// @ts-ignore
|
||||||
|
delete filters[filterField][key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return filters
|
||||||
|
}
|
||||||
|
|
||||||
export async function search(options: SearchParams): Promise<{
|
export async function search(options: SearchParams): Promise<{
|
||||||
rows: any[]
|
rows: any[]
|
||||||
hasNextPage?: boolean
|
hasNextPage?: boolean
|
||||||
|
@ -45,7 +75,7 @@ export async function exportRows(
|
||||||
return pickApi(options.tableId).exportRows(options)
|
return pickApi(options.tableId).exportRows(options)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(tableId: string) {
|
export async function fetch(tableId: string): Promise<Row[]> {
|
||||||
return pickApi(tableId).fetch(tableId)
|
return pickApi(tableId).fetch(tableId)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,6 +83,6 @@ export async function fetchView(
|
||||||
tableId: string,
|
tableId: string,
|
||||||
viewName: string,
|
viewName: string,
|
||||||
params: ViewParams
|
params: ViewParams
|
||||||
) {
|
): Promise<Row[]> {
|
||||||
return pickApi(tableId).fetchView(viewName, params)
|
return pickApi(tableId).fetchView(viewName, params)
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,15 +55,15 @@ export async function search(options: SearchParams) {
|
||||||
try {
|
try {
|
||||||
const table = await sdk.tables.getTable(tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
options = searchInputMapping(table, options)
|
options = searchInputMapping(table, options)
|
||||||
let rows = (await handleRequest(Operation.READ, tableId, {
|
let rows = await handleRequest(Operation.READ, tableId, {
|
||||||
filters: query,
|
filters: query,
|
||||||
sort,
|
sort,
|
||||||
paginate: paginateObj as PaginationJson,
|
paginate: paginateObj as PaginationJson,
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
})) as Row[]
|
})
|
||||||
let hasNextPage = false
|
let hasNextPage = false
|
||||||
if (paginate && rows.length === limit) {
|
if (paginate && rows.length === limit) {
|
||||||
const nextRows = (await handleRequest(Operation.READ, tableId, {
|
const nextRows = await handleRequest(Operation.READ, tableId, {
|
||||||
filters: query,
|
filters: query,
|
||||||
sort,
|
sort,
|
||||||
paginate: {
|
paginate: {
|
||||||
|
@ -71,7 +71,7 @@ export async function search(options: SearchParams) {
|
||||||
page: bookmark! * limit + 1,
|
page: bookmark! * limit + 1,
|
||||||
},
|
},
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
})) as Row[]
|
})
|
||||||
hasNextPage = nextRows.length > 0
|
hasNextPage = nextRows.length > 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -172,12 +172,18 @@ export async function exportRows(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(tableId: string) {
|
export async function fetch(tableId: string): Promise<Row[]> {
|
||||||
const response = await handleRequest(Operation.READ, tableId, {
|
const response = await handleRequest<Operation.READ>(
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
Operation.READ,
|
||||||
})
|
tableId,
|
||||||
|
{
|
||||||
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
|
}
|
||||||
|
)
|
||||||
const table = await sdk.tables.getTable(tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
return await outputProcessing(table, response, { preserveLinks: true })
|
return await outputProcessing<Row[]>(table, response, {
|
||||||
|
preserveLinks: true,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchView(viewName: string) {
|
export async function fetchView(viewName: string) {
|
||||||
|
|
|
@ -5,9 +5,7 @@ import {
|
||||||
} from "@budibase/backend-core"
|
} from "@budibase/backend-core"
|
||||||
import env from "../../../../environment"
|
import env from "../../../../environment"
|
||||||
import { fullSearch, paginatedSearch } from "./internalSearch"
|
import { fullSearch, paginatedSearch } from "./internalSearch"
|
||||||
import { InternalTables, getRowParams } from "../../../../db/utils"
|
import { getRowParams, InternalTables } from "../../../../db/utils"
|
||||||
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
|
|
||||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
|
||||||
import {
|
import {
|
||||||
Database,
|
Database,
|
||||||
Row,
|
Row,
|
||||||
|
@ -15,18 +13,20 @@ import {
|
||||||
SearchParams,
|
SearchParams,
|
||||||
DocumentType,
|
DocumentType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
|
||||||
|
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||||
import {
|
import {
|
||||||
Format,
|
|
||||||
csv,
|
csv,
|
||||||
|
Format,
|
||||||
json,
|
json,
|
||||||
jsonWithSchema,
|
jsonWithSchema,
|
||||||
} from "../../../../api/controllers/view/exporters"
|
} from "../../../../api/controllers/view/exporters"
|
||||||
import * as inMemoryViews from "../../../../db/inMemoryView"
|
import * as inMemoryViews from "../../../../db/inMemoryView"
|
||||||
import {
|
import {
|
||||||
migrateToInMemoryView,
|
|
||||||
migrateToDesignView,
|
|
||||||
getFromDesignDoc,
|
getFromDesignDoc,
|
||||||
getFromMemoryDoc,
|
getFromMemoryDoc,
|
||||||
|
migrateToDesignView,
|
||||||
|
migrateToInMemoryView,
|
||||||
} from "../../../../api/controllers/view/utils"
|
} from "../../../../api/controllers/view/utils"
|
||||||
import sdk from "../../../../sdk"
|
import sdk from "../../../../sdk"
|
||||||
import { ExportRowsParams, ExportRowsResult } from "../search"
|
import { ExportRowsParams, ExportRowsResult } from "../search"
|
||||||
|
@ -140,13 +140,12 @@ export async function exportRows(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(tableId: string) {
|
export async function fetch(tableId: string): Promise<Row[]> {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
|
|
||||||
const table = await sdk.tables.getTable(tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
const rows = await getRawTableData(db, tableId)
|
const rows = await getRawTableData(db, tableId)
|
||||||
const result = await outputProcessing(table, rows)
|
return await outputProcessing(table, rows)
|
||||||
return result
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getRawTableData(db: Database, tableId: string) {
|
async function getRawTableData(db: Database, tableId: string) {
|
||||||
|
|
|
@ -69,12 +69,15 @@ export async function validate({
|
||||||
valid: boolean
|
valid: boolean
|
||||||
errors: Record<string, any>
|
errors: Record<string, any>
|
||||||
}> {
|
}> {
|
||||||
let fetchedTable: Table
|
let fetchedTable: Table | undefined
|
||||||
if (!table) {
|
if (!table && tableId) {
|
||||||
fetchedTable = await sdk.tables.getTable(tableId)
|
fetchedTable = await sdk.tables.getTable(tableId)
|
||||||
} else {
|
} else if (table) {
|
||||||
fetchedTable = table
|
fetchedTable = table
|
||||||
}
|
}
|
||||||
|
if (fetchedTable === undefined) {
|
||||||
|
throw new Error("Unable to fetch table for validation")
|
||||||
|
}
|
||||||
const errors: Record<string, any> = {}
|
const errors: Record<string, any> = {}
|
||||||
for (let fieldName of Object.keys(fetchedTable.schema)) {
|
for (let fieldName of Object.keys(fetchedTable.schema)) {
|
||||||
const column = fetchedTable.schema[fieldName]
|
const column = fetchedTable.schema[fieldName]
|
||||||
|
|
|
@ -0,0 +1,196 @@
|
||||||
|
import {
|
||||||
|
Operation,
|
||||||
|
RelationshipType,
|
||||||
|
RenameColumn,
|
||||||
|
Table,
|
||||||
|
TableRequest,
|
||||||
|
ViewV2,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import { context } from "@budibase/backend-core"
|
||||||
|
import { buildExternalTableId } from "../../../../integrations/utils"
|
||||||
|
import {
|
||||||
|
foreignKeyStructure,
|
||||||
|
hasTypeChanged,
|
||||||
|
setStaticSchemas,
|
||||||
|
} from "../../../../api/controllers/table/utils"
|
||||||
|
import { cloneDeep } from "lodash/fp"
|
||||||
|
import { FieldTypes } from "../../../../constants"
|
||||||
|
import { makeTableRequest } from "../../../../api/controllers/table/ExternalRequest"
|
||||||
|
import {
|
||||||
|
isRelationshipSetup,
|
||||||
|
cleanupRelationships,
|
||||||
|
generateLinkSchema,
|
||||||
|
generateManyLinkSchema,
|
||||||
|
generateRelatedSchema,
|
||||||
|
} from "./utils"
|
||||||
|
|
||||||
|
import { getTable } from "../getters"
|
||||||
|
import { populateExternalTableSchemas } from "../validation"
|
||||||
|
import datasourceSdk from "../../datasources"
|
||||||
|
import * as viewSdk from "../../views"
|
||||||
|
|
||||||
|
export async function save(
|
||||||
|
datasourceId: string,
|
||||||
|
update: Table,
|
||||||
|
opts?: { tableId?: string; renaming?: RenameColumn }
|
||||||
|
) {
|
||||||
|
let tableToSave: TableRequest = {
|
||||||
|
type: "table",
|
||||||
|
_id: buildExternalTableId(datasourceId, update.name),
|
||||||
|
sourceId: datasourceId,
|
||||||
|
...update,
|
||||||
|
}
|
||||||
|
|
||||||
|
const tableId = opts?.tableId || update._id
|
||||||
|
let oldTable: Table | undefined
|
||||||
|
if (tableId) {
|
||||||
|
oldTable = await getTable(tableId)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasTypeChanged(tableToSave, oldTable)) {
|
||||||
|
throw new Error("A column type has changed.")
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let view in tableToSave.views) {
|
||||||
|
const tableView = tableToSave.views[view]
|
||||||
|
if (!tableView || !viewSdk.isV2(tableView)) continue
|
||||||
|
|
||||||
|
tableToSave.views[view] = viewSdk.syncSchema(
|
||||||
|
oldTable!.views![view] as ViewV2,
|
||||||
|
tableToSave.schema,
|
||||||
|
opts?.renaming
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const db = context.getAppDB()
|
||||||
|
const datasource = await datasourceSdk.get(datasourceId)
|
||||||
|
if (!datasource.entities) {
|
||||||
|
datasource.entities = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GSheets is a specific case - only ever has a static primary key
|
||||||
|
tableToSave = setStaticSchemas(datasource, tableToSave)
|
||||||
|
|
||||||
|
const oldTables = cloneDeep(datasource.entities)
|
||||||
|
const tables: Record<string, Table> = datasource.entities
|
||||||
|
|
||||||
|
const extraTablesToUpdate = []
|
||||||
|
|
||||||
|
// check if relations need setup
|
||||||
|
for (let schema of Object.values(tableToSave.schema)) {
|
||||||
|
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const schemaTableId = schema.tableId
|
||||||
|
const relatedTable = Object.values(tables).find(
|
||||||
|
table => table._id === schemaTableId
|
||||||
|
)
|
||||||
|
if (!relatedTable) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const relatedColumnName = schema.fieldName!
|
||||||
|
const relationType = schema.relationshipType
|
||||||
|
if (relationType === RelationshipType.MANY_TO_MANY) {
|
||||||
|
const junctionTable = generateManyLinkSchema(
|
||||||
|
datasource,
|
||||||
|
schema,
|
||||||
|
tableToSave,
|
||||||
|
relatedTable
|
||||||
|
)
|
||||||
|
if (tables[junctionTable.name]) {
|
||||||
|
throw new Error(
|
||||||
|
"Junction table already exists, cannot create another relationship."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
tables[junctionTable.name] = junctionTable
|
||||||
|
extraTablesToUpdate.push(junctionTable)
|
||||||
|
} else {
|
||||||
|
const fkTable =
|
||||||
|
relationType === RelationshipType.ONE_TO_MANY
|
||||||
|
? tableToSave
|
||||||
|
: relatedTable
|
||||||
|
const foreignKey = generateLinkSchema(
|
||||||
|
schema,
|
||||||
|
tableToSave,
|
||||||
|
relatedTable,
|
||||||
|
relationType
|
||||||
|
)
|
||||||
|
if (fkTable.schema[foreignKey] != null) {
|
||||||
|
throw new Error(
|
||||||
|
`Unable to generate foreign key - column ${foreignKey} already in use.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
fkTable.schema[foreignKey] = foreignKeyStructure(foreignKey)
|
||||||
|
if (fkTable.constrained == null) {
|
||||||
|
fkTable.constrained = []
|
||||||
|
}
|
||||||
|
if (fkTable.constrained.indexOf(foreignKey) === -1) {
|
||||||
|
fkTable.constrained.push(foreignKey)
|
||||||
|
}
|
||||||
|
// foreign key is in other table, need to save it to external
|
||||||
|
if (fkTable._id !== tableToSave._id) {
|
||||||
|
extraTablesToUpdate.push(fkTable)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
generateRelatedSchema(schema, relatedTable, tableToSave, relatedColumnName)
|
||||||
|
schema.main = true
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanupRelationships(tableToSave, tables, oldTable)
|
||||||
|
|
||||||
|
const operation = tableId ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
|
||||||
|
await makeTableRequest(
|
||||||
|
datasource,
|
||||||
|
operation,
|
||||||
|
tableToSave,
|
||||||
|
tables,
|
||||||
|
oldTable,
|
||||||
|
opts?.renaming
|
||||||
|
)
|
||||||
|
// update any extra tables (like foreign keys in other tables)
|
||||||
|
for (let extraTable of extraTablesToUpdate) {
|
||||||
|
const oldExtraTable = oldTables[extraTable.name]
|
||||||
|
let op = oldExtraTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
|
||||||
|
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
|
||||||
|
}
|
||||||
|
|
||||||
|
// make sure the constrained list, all still exist
|
||||||
|
if (Array.isArray(tableToSave.constrained)) {
|
||||||
|
tableToSave.constrained = tableToSave.constrained.filter(constraint =>
|
||||||
|
Object.keys(tableToSave.schema).includes(constraint)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove the rename prop
|
||||||
|
delete tableToSave._rename
|
||||||
|
// store it into couch now for budibase reference
|
||||||
|
datasource.entities[tableToSave.name] = tableToSave
|
||||||
|
await db.put(populateExternalTableSchemas(datasource))
|
||||||
|
|
||||||
|
// Since tables are stored inside datasources, we need to notify clients
|
||||||
|
// that the datasource definition changed
|
||||||
|
const updatedDatasource = await datasourceSdk.get(datasource._id!)
|
||||||
|
|
||||||
|
return { datasource: updatedDatasource, table: tableToSave }
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function destroy(datasourceId: string, table: Table) {
|
||||||
|
const db = context.getAppDB()
|
||||||
|
const datasource = await datasourceSdk.get(datasourceId)
|
||||||
|
const tables = datasource.entities
|
||||||
|
|
||||||
|
const operation = Operation.DELETE_TABLE
|
||||||
|
if (tables) {
|
||||||
|
await makeTableRequest(datasource, operation, table, tables)
|
||||||
|
cleanupRelationships(table, tables)
|
||||||
|
delete tables[table.name]
|
||||||
|
datasource.entities = tables
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.put(populateExternalTableSchemas(datasource))
|
||||||
|
|
||||||
|
// Since tables are stored inside datasources, we need to notify clients
|
||||||
|
// that the datasource definition changed
|
||||||
|
const updatedDatasource = await datasourceSdk.get(datasource._id!)
|
||||||
|
return { datasource: updatedDatasource, table }
|
||||||
|
}
|
|
@ -0,0 +1,161 @@
|
||||||
|
import {
|
||||||
|
Datasource,
|
||||||
|
ManyToManyRelationshipFieldMetadata,
|
||||||
|
ManyToOneRelationshipFieldMetadata,
|
||||||
|
OneToManyRelationshipFieldMetadata,
|
||||||
|
RelationshipFieldMetadata,
|
||||||
|
RelationshipType,
|
||||||
|
Table,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import { FieldTypes } from "../../../../constants"
|
||||||
|
import {
|
||||||
|
foreignKeyStructure,
|
||||||
|
generateForeignKey,
|
||||||
|
generateJunctionTableName,
|
||||||
|
} from "../../../../api/controllers/table/utils"
|
||||||
|
import { buildExternalTableId } from "../../../../integrations/utils"
|
||||||
|
import { cloneDeep } from "lodash/fp"
|
||||||
|
|
||||||
|
export function cleanupRelationships(
|
||||||
|
table: Table,
|
||||||
|
tables: Record<string, Table>,
|
||||||
|
oldTable?: Table
|
||||||
|
) {
|
||||||
|
const tableToIterate = oldTable ? oldTable : table
|
||||||
|
// clean up relationships in couch table schemas
|
||||||
|
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
|
||||||
|
if (
|
||||||
|
schema.type === FieldTypes.LINK &&
|
||||||
|
(!oldTable || table.schema[key] == null)
|
||||||
|
) {
|
||||||
|
const schemaTableId = schema.tableId
|
||||||
|
const relatedTable = Object.values(tables).find(
|
||||||
|
table => table._id === schemaTableId
|
||||||
|
)
|
||||||
|
const foreignKey =
|
||||||
|
schema.relationshipType !== RelationshipType.MANY_TO_MANY &&
|
||||||
|
schema.foreignKey
|
||||||
|
if (!relatedTable || !foreignKey) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for (let [relatedKey, relatedSchema] of Object.entries(
|
||||||
|
relatedTable.schema
|
||||||
|
)) {
|
||||||
|
if (
|
||||||
|
relatedSchema.type === FieldTypes.LINK &&
|
||||||
|
relatedSchema.fieldName === foreignKey
|
||||||
|
) {
|
||||||
|
delete relatedTable.schema[relatedKey]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function otherRelationshipType(type: RelationshipType) {
|
||||||
|
if (type === RelationshipType.MANY_TO_MANY) {
|
||||||
|
return RelationshipType.MANY_TO_MANY
|
||||||
|
}
|
||||||
|
return type === RelationshipType.ONE_TO_MANY
|
||||||
|
? RelationshipType.MANY_TO_ONE
|
||||||
|
: RelationshipType.ONE_TO_MANY
|
||||||
|
}
|
||||||
|
|
||||||
|
export function generateManyLinkSchema(
|
||||||
|
datasource: Datasource,
|
||||||
|
column: ManyToManyRelationshipFieldMetadata,
|
||||||
|
table: Table,
|
||||||
|
relatedTable: Table
|
||||||
|
): Table {
|
||||||
|
if (!table.primary || !relatedTable.primary) {
|
||||||
|
const noPrimaryName = !table.primary ? table.name : relatedTable.name
|
||||||
|
throw new Error(
|
||||||
|
`Unable to generate many link schema, "${noPrimaryName}" does not have a primary key`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const primary = table.name + table.primary[0]
|
||||||
|
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
|
||||||
|
const jcTblName = generateJunctionTableName(column, table, relatedTable)
|
||||||
|
// first create the new table
|
||||||
|
const junctionTable = {
|
||||||
|
_id: buildExternalTableId(datasource._id!, jcTblName),
|
||||||
|
name: jcTblName,
|
||||||
|
primary: [primary, relatedPrimary],
|
||||||
|
constrained: [primary, relatedPrimary],
|
||||||
|
schema: {
|
||||||
|
[primary]: foreignKeyStructure(primary, {
|
||||||
|
toTable: table.name,
|
||||||
|
toKey: table.primary[0],
|
||||||
|
}),
|
||||||
|
[relatedPrimary]: foreignKeyStructure(relatedPrimary, {
|
||||||
|
toTable: relatedTable.name,
|
||||||
|
toKey: relatedTable.primary[0],
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
column.through = junctionTable._id
|
||||||
|
column.throughFrom = relatedPrimary
|
||||||
|
column.throughTo = primary
|
||||||
|
column.fieldName = relatedPrimary
|
||||||
|
return junctionTable
|
||||||
|
}
|
||||||
|
|
||||||
|
export function generateLinkSchema(
|
||||||
|
column:
|
||||||
|
| OneToManyRelationshipFieldMetadata
|
||||||
|
| ManyToOneRelationshipFieldMetadata,
|
||||||
|
table: Table,
|
||||||
|
relatedTable: Table,
|
||||||
|
type: RelationshipType.ONE_TO_MANY | RelationshipType.MANY_TO_ONE
|
||||||
|
) {
|
||||||
|
if (!table.primary || !relatedTable.primary) {
|
||||||
|
throw new Error("Unable to generate link schema, no primary keys")
|
||||||
|
}
|
||||||
|
const isOneSide = type === RelationshipType.ONE_TO_MANY
|
||||||
|
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
|
||||||
|
// generate a foreign key
|
||||||
|
const foreignKey = generateForeignKey(column, relatedTable)
|
||||||
|
column.relationshipType = type
|
||||||
|
column.foreignKey = isOneSide ? foreignKey : primary
|
||||||
|
column.fieldName = isOneSide ? primary : foreignKey
|
||||||
|
return foreignKey
|
||||||
|
}
|
||||||
|
|
||||||
|
export function generateRelatedSchema(
|
||||||
|
linkColumn: RelationshipFieldMetadata,
|
||||||
|
table: Table,
|
||||||
|
relatedTable: Table,
|
||||||
|
columnName: string
|
||||||
|
) {
|
||||||
|
// generate column for other table
|
||||||
|
let relatedSchema: RelationshipFieldMetadata
|
||||||
|
const isMany2Many =
|
||||||
|
linkColumn.relationshipType === RelationshipType.MANY_TO_MANY
|
||||||
|
// swap them from the main link
|
||||||
|
if (!isMany2Many && linkColumn.foreignKey) {
|
||||||
|
relatedSchema = cloneDeep(linkColumn) as
|
||||||
|
| OneToManyRelationshipFieldMetadata
|
||||||
|
| ManyToOneRelationshipFieldMetadata
|
||||||
|
relatedSchema.fieldName = linkColumn.foreignKey
|
||||||
|
relatedSchema.foreignKey = linkColumn.fieldName
|
||||||
|
}
|
||||||
|
// is many to many
|
||||||
|
else {
|
||||||
|
const manyToManyCol = linkColumn as ManyToManyRelationshipFieldMetadata
|
||||||
|
relatedSchema = cloneDeep(linkColumn) as ManyToManyRelationshipFieldMetadata
|
||||||
|
// don't need to copy through, already got it
|
||||||
|
relatedSchema.fieldName = manyToManyCol.throughTo!
|
||||||
|
relatedSchema.throughTo = manyToManyCol.throughFrom
|
||||||
|
relatedSchema.throughFrom = manyToManyCol.throughTo
|
||||||
|
}
|
||||||
|
relatedSchema.relationshipType = otherRelationshipType(
|
||||||
|
linkColumn.relationshipType
|
||||||
|
)
|
||||||
|
relatedSchema.tableId = relatedTable._id!
|
||||||
|
relatedSchema.name = columnName
|
||||||
|
table.schema[columnName] = relatedSchema
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isRelationshipSetup(column: RelationshipFieldMetadata) {
|
||||||
|
return (column as any).foreignKey || (column as any).through
|
||||||
|
}
|
|
@ -0,0 +1,124 @@
|
||||||
|
import { context } from "@budibase/backend-core"
|
||||||
|
import {
|
||||||
|
BudibaseInternalDB,
|
||||||
|
getMultiIDParams,
|
||||||
|
getTableParams,
|
||||||
|
} from "../../../db/utils"
|
||||||
|
import {
|
||||||
|
breakExternalTableId,
|
||||||
|
isExternalTable,
|
||||||
|
isSQL,
|
||||||
|
} from "../../../integrations/utils"
|
||||||
|
import {
|
||||||
|
AllDocsResponse,
|
||||||
|
Database,
|
||||||
|
Table,
|
||||||
|
TableResponse,
|
||||||
|
TableViewsResponse,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import datasources from "../datasources"
|
||||||
|
import sdk from "../../../sdk"
|
||||||
|
|
||||||
|
function processInternalTables(docs: AllDocsResponse<Table[]>): Table[] {
|
||||||
|
return docs.rows.map((tableDoc: any) => ({
|
||||||
|
...tableDoc.doc,
|
||||||
|
type: "internal",
|
||||||
|
sourceId: tableDoc.doc.sourceId || BudibaseInternalDB._id,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getAllInternalTables(db?: Database): Promise<Table[]> {
|
||||||
|
if (!db) {
|
||||||
|
db = context.getAppDB()
|
||||||
|
}
|
||||||
|
const internalTables = await db.allDocs<Table[]>(
|
||||||
|
getTableParams(null, {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
return processInternalTables(internalTables)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getAllExternalTables(): Promise<Table[]> {
|
||||||
|
const datasources = await sdk.datasources.fetch({ enriched: true })
|
||||||
|
const allEntities = datasources.map(datasource => datasource.entities)
|
||||||
|
let final: Table[] = []
|
||||||
|
for (let entities of allEntities) {
|
||||||
|
if (entities) {
|
||||||
|
final = final.concat(Object.values(entities))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return final
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getExternalTable(
|
||||||
|
datasourceId: string,
|
||||||
|
tableName: string
|
||||||
|
): Promise<Table> {
|
||||||
|
const entities = await getExternalTablesInDatasource(datasourceId)
|
||||||
|
return entities[tableName]
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getTable(tableId: string): Promise<Table> {
|
||||||
|
const db = context.getAppDB()
|
||||||
|
if (isExternalTable(tableId)) {
|
||||||
|
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||||
|
const datasource = await datasources.get(datasourceId!)
|
||||||
|
const table = await getExternalTable(datasourceId!, tableName!)
|
||||||
|
return { ...table, sql: isSQL(datasource) }
|
||||||
|
} else {
|
||||||
|
return db.get(tableId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getAllTables() {
|
||||||
|
const [internal, external] = await Promise.all([
|
||||||
|
getAllInternalTables(),
|
||||||
|
getAllExternalTables(),
|
||||||
|
])
|
||||||
|
return [...internal, ...external]
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getExternalTablesInDatasource(
|
||||||
|
datasourceId: string
|
||||||
|
): Promise<Record<string, Table>> {
|
||||||
|
const datasource = await datasources.get(datasourceId, { enriched: true })
|
||||||
|
if (!datasource || !datasource.entities) {
|
||||||
|
throw new Error("Datasource is not configured fully.")
|
||||||
|
}
|
||||||
|
return datasource.entities
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getTables(tableIds: string[]): Promise<Table[]> {
|
||||||
|
const externalTableIds = tableIds.filter(tableId => isExternalTable(tableId)),
|
||||||
|
internalTableIds = tableIds.filter(tableId => !isExternalTable(tableId))
|
||||||
|
let tables: Table[] = []
|
||||||
|
if (externalTableIds.length) {
|
||||||
|
const externalTables = await getAllExternalTables()
|
||||||
|
tables = tables.concat(
|
||||||
|
externalTables.filter(
|
||||||
|
table => externalTableIds.indexOf(table._id!) !== -1
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (internalTableIds.length) {
|
||||||
|
const db = context.getAppDB()
|
||||||
|
const internalTableDocs = await db.allDocs<Table[]>(
|
||||||
|
getMultiIDParams(internalTableIds)
|
||||||
|
)
|
||||||
|
tables = tables.concat(processInternalTables(internalTableDocs))
|
||||||
|
}
|
||||||
|
return tables
|
||||||
|
}
|
||||||
|
|
||||||
|
export function enrichViewSchemas(table: Table): TableResponse {
|
||||||
|
return {
|
||||||
|
...table,
|
||||||
|
views: Object.values(table.views ?? [])
|
||||||
|
.map(v => sdk.views.enrichSchema(v, table.schema))
|
||||||
|
.reduce((p, v) => {
|
||||||
|
p[v.name!] = v
|
||||||
|
return p
|
||||||
|
}, {} as TableViewsResponse),
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,95 +1,11 @@
|
||||||
import { context } from "@budibase/backend-core"
|
|
||||||
import { BudibaseInternalDB, getTableParams } from "../../../db/utils"
|
|
||||||
import {
|
|
||||||
breakExternalTableId,
|
|
||||||
isExternalTable,
|
|
||||||
isSQL,
|
|
||||||
} from "../../../integrations/utils"
|
|
||||||
import {
|
|
||||||
Database,
|
|
||||||
Table,
|
|
||||||
TableResponse,
|
|
||||||
TableViewsResponse,
|
|
||||||
} from "@budibase/types"
|
|
||||||
import datasources from "../datasources"
|
|
||||||
import { populateExternalTableSchemas } from "./validation"
|
import { populateExternalTableSchemas } from "./validation"
|
||||||
import sdk from "../../../sdk"
|
import * as getters from "./getters"
|
||||||
|
import * as updates from "./update"
|
||||||
async function getAllInternalTables(db?: Database): Promise<Table[]> {
|
import * as utils from "./utils"
|
||||||
if (!db) {
|
|
||||||
db = context.getAppDB()
|
|
||||||
}
|
|
||||||
const internalTables = await db.allDocs(
|
|
||||||
getTableParams(null, {
|
|
||||||
include_docs: true,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
return internalTables.rows.map((tableDoc: any) => ({
|
|
||||||
...tableDoc.doc,
|
|
||||||
type: "internal",
|
|
||||||
sourceId: tableDoc.doc.sourceId || BudibaseInternalDB._id,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getAllExternalTables(
|
|
||||||
datasourceId: any
|
|
||||||
): Promise<Record<string, Table>> {
|
|
||||||
const datasource = await datasources.get(datasourceId, { enriched: true })
|
|
||||||
if (!datasource || !datasource.entities) {
|
|
||||||
throw "Datasource is not configured fully."
|
|
||||||
}
|
|
||||||
return datasource.entities
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getExternalTable(
|
|
||||||
datasourceId: any,
|
|
||||||
tableName: any
|
|
||||||
): Promise<Table> {
|
|
||||||
const entities = await getAllExternalTables(datasourceId)
|
|
||||||
return entities[tableName]
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getTable(tableId: any): Promise<Table> {
|
|
||||||
const db = context.getAppDB()
|
|
||||||
if (isExternalTable(tableId)) {
|
|
||||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
|
||||||
const datasource = await datasources.get(datasourceId!)
|
|
||||||
const table = await getExternalTable(datasourceId, tableName)
|
|
||||||
return { ...table, sql: isSQL(datasource) }
|
|
||||||
} else {
|
|
||||||
return db.get(tableId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function enrichViewSchemas(table: Table): TableResponse {
|
|
||||||
return {
|
|
||||||
...table,
|
|
||||||
views: Object.values(table.views ?? [])
|
|
||||||
.map(v => sdk.views.enrichSchema(v, table.schema))
|
|
||||||
.reduce((p, v) => {
|
|
||||||
p[v.name] = v
|
|
||||||
return p
|
|
||||||
}, {} as TableViewsResponse),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function saveTable(table: Table) {
|
|
||||||
const db = context.getAppDB()
|
|
||||||
if (isExternalTable(table._id!)) {
|
|
||||||
const datasource = await sdk.datasources.get(table.sourceId!)
|
|
||||||
datasource.entities![table.name] = table
|
|
||||||
await db.put(datasource)
|
|
||||||
} else {
|
|
||||||
await db.put(table)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
getAllInternalTables,
|
|
||||||
getAllExternalTables,
|
|
||||||
getExternalTable,
|
|
||||||
getTable,
|
|
||||||
populateExternalTableSchemas,
|
populateExternalTableSchemas,
|
||||||
enrichViewSchemas,
|
...updates,
|
||||||
saveTable,
|
...getters,
|
||||||
|
...utils,
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,172 @@
|
||||||
|
import {
|
||||||
|
RenameColumn,
|
||||||
|
Table,
|
||||||
|
ViewStatisticsSchema,
|
||||||
|
ViewV2,
|
||||||
|
Row,
|
||||||
|
ContextUser,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import {
|
||||||
|
hasTypeChanged,
|
||||||
|
TableSaveFunctions,
|
||||||
|
} from "../../../../api/controllers/table/utils"
|
||||||
|
import { FieldTypes } from "../../../../constants"
|
||||||
|
import { EventType, updateLinks } from "../../../../db/linkedRows"
|
||||||
|
import { cloneDeep } from "lodash/fp"
|
||||||
|
import isEqual from "lodash/isEqual"
|
||||||
|
import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula"
|
||||||
|
import { context } from "@budibase/backend-core"
|
||||||
|
import { getTable } from "../getters"
|
||||||
|
import { checkAutoColumns } from "./utils"
|
||||||
|
import * as viewsSdk from "../../views"
|
||||||
|
import sdk from "../../../index"
|
||||||
|
import { getRowParams } from "../../../../db/utils"
|
||||||
|
import { quotas } from "@budibase/pro"
|
||||||
|
import env from "../../../../environment"
|
||||||
|
import { cleanupAttachments } from "../../../../utilities/rowProcessor"
|
||||||
|
|
||||||
|
export async function save(
|
||||||
|
table: Table,
|
||||||
|
opts?: {
|
||||||
|
user?: ContextUser
|
||||||
|
tableId?: string
|
||||||
|
rowsToImport?: Row[]
|
||||||
|
renaming?: RenameColumn
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
const db = context.getAppDB()
|
||||||
|
|
||||||
|
// if the table obj had an _id then it will have been retrieved
|
||||||
|
let oldTable: Table | undefined
|
||||||
|
if (opts?.tableId) {
|
||||||
|
oldTable = await getTable(opts.tableId)
|
||||||
|
}
|
||||||
|
|
||||||
|
// check all types are correct
|
||||||
|
if (hasTypeChanged(table, oldTable)) {
|
||||||
|
throw new Error("A column type has changed.")
|
||||||
|
}
|
||||||
|
// check that subtypes have been maintained
|
||||||
|
table = checkAutoColumns(table, oldTable)
|
||||||
|
|
||||||
|
// saving a table is a complex operation, involving many different steps, this
|
||||||
|
// has been broken out into a utility to make it more obvious/easier to manipulate
|
||||||
|
const tableSaveFunctions = new TableSaveFunctions({
|
||||||
|
user: opts?.user,
|
||||||
|
oldTable,
|
||||||
|
importRows: opts?.rowsToImport,
|
||||||
|
})
|
||||||
|
table = await tableSaveFunctions.before(table)
|
||||||
|
|
||||||
|
let renaming = opts?.renaming
|
||||||
|
if (renaming && renaming.old === renaming.updated) {
|
||||||
|
renaming = undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
// rename row fields when table column is renamed
|
||||||
|
if (renaming && table.schema[renaming.updated].type === FieldTypes.LINK) {
|
||||||
|
throw new Error("Cannot rename a linked column.")
|
||||||
|
}
|
||||||
|
|
||||||
|
table = await tableSaveFunctions.mid(table, renaming)
|
||||||
|
|
||||||
|
// update schema of non-statistics views when new columns are added
|
||||||
|
for (let view in table.views) {
|
||||||
|
const tableView = table.views[view]
|
||||||
|
if (!tableView) continue
|
||||||
|
|
||||||
|
if (viewsSdk.isV2(tableView)) {
|
||||||
|
table.views[view] = viewsSdk.syncSchema(
|
||||||
|
oldTable!.views![view] as ViewV2,
|
||||||
|
table.schema,
|
||||||
|
renaming
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
(tableView.schema as ViewStatisticsSchema).group ||
|
||||||
|
tableView.schema.field
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
tableView.schema = table.schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// update linked rows
|
||||||
|
const linkResp: any = await updateLinks({
|
||||||
|
eventType: oldTable ? EventType.TABLE_UPDATED : EventType.TABLE_SAVE,
|
||||||
|
table: table,
|
||||||
|
oldTable: oldTable,
|
||||||
|
})
|
||||||
|
if (linkResp != null && linkResp._rev) {
|
||||||
|
table._rev = linkResp._rev
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't perform any updates until relationships have been
|
||||||
|
// checked by the updateLinks function
|
||||||
|
const updatedRows = tableSaveFunctions.getUpdatedRows()
|
||||||
|
if (updatedRows && updatedRows.length !== 0) {
|
||||||
|
await db.bulkDocs(updatedRows)
|
||||||
|
}
|
||||||
|
let result = await db.put(table)
|
||||||
|
table._rev = result.rev
|
||||||
|
const savedTable = cloneDeep(table)
|
||||||
|
|
||||||
|
table = await tableSaveFunctions.after(table)
|
||||||
|
// the table may be updated as part of the table save after functionality - need to write it
|
||||||
|
if (!isEqual(savedTable, table)) {
|
||||||
|
result = await db.put(table)
|
||||||
|
table._rev = result.rev
|
||||||
|
}
|
||||||
|
// has to run after, make sure it has _id
|
||||||
|
await runStaticFormulaChecks(table, { oldTable, deletion: false })
|
||||||
|
return { table }
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function destroy(table: Table) {
|
||||||
|
const db = context.getAppDB()
|
||||||
|
const tableId = table._id!
|
||||||
|
|
||||||
|
// Delete all rows for that table
|
||||||
|
const rowsData = await db.allDocs(
|
||||||
|
getRowParams(tableId, null, {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
await db.bulkDocs(
|
||||||
|
rowsData.rows.map((row: any) => ({ ...row.doc, _deleted: true }))
|
||||||
|
)
|
||||||
|
await quotas.removeRows(rowsData.rows.length, {
|
||||||
|
tableId,
|
||||||
|
})
|
||||||
|
|
||||||
|
// update linked rows
|
||||||
|
await updateLinks({
|
||||||
|
eventType: EventType.TABLE_DELETE,
|
||||||
|
table: table,
|
||||||
|
})
|
||||||
|
|
||||||
|
// don't remove the table itself until very end
|
||||||
|
await db.remove(tableId, table._rev)
|
||||||
|
|
||||||
|
// remove table search index
|
||||||
|
if (!env.isTest() || env.COUCH_DB_URL) {
|
||||||
|
const currentIndexes = await db.getIndexes()
|
||||||
|
const existingIndex = currentIndexes.indexes.find(
|
||||||
|
(existing: any) => existing.name === `search:${tableId}`
|
||||||
|
)
|
||||||
|
if (existingIndex) {
|
||||||
|
await db.deleteIndex(existingIndex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// has to run after, make sure it has _id
|
||||||
|
await runStaticFormulaChecks(table, {
|
||||||
|
deletion: true,
|
||||||
|
})
|
||||||
|
await cleanupAttachments(table, {
|
||||||
|
rows: rowsData.rows.map((row: any) => row.doc),
|
||||||
|
})
|
||||||
|
|
||||||
|
return { table }
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
import { Table } from "@budibase/types"
|
||||||
|
import { fixAutoColumnSubType } from "../../../../utilities/rowProcessor"
|
||||||
|
|
||||||
|
export function checkAutoColumns(table: Table, oldTable?: Table) {
|
||||||
|
if (!table.schema) {
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
for (let [key, schema] of Object.entries(table.schema)) {
|
||||||
|
if (!schema.autocolumn || schema.subtype) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const oldSchema = oldTable && oldTable.schema[key]
|
||||||
|
if (oldSchema && oldSchema.subtype) {
|
||||||
|
table.schema[key].subtype = oldSchema.subtype
|
||||||
|
} else {
|
||||||
|
table.schema[key] = fixAutoColumnSubType(schema)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return table
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue