Replacing all plural TS enumerations with singular - this involves a major rename of the DocumentTypes.

This commit is contained in:
mike12345567 2022-08-11 13:50:05 +01:00
parent 0efa0d375d
commit b803a3fd93
73 changed files with 502 additions and 539 deletions

View File

@ -1,6 +1,6 @@
const redis = require("../redis/init")
const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants")
const { DocumentType } = require("../db/constants")
const AppState = {
INVALID: "invalid",
@ -14,7 +14,7 @@ const populateFromDB = async appId => {
return doWithDB(
appId,
db => {
return db.get(DocumentTypes.APP_METADATA)
return db.get(DocumentType.APP_METADATA)
},
{ skip_setup: true }
)

View File

@ -1,4 +1,4 @@
export enum ContextKeys {
export enum ContextKey {
TENANT_ID = "tenantId",
GLOBAL_DB = "globalDb",
APP_ID = "appId",

View File

@ -1,11 +1,11 @@
import env from "../environment"
import { SEPARATOR, DocumentTypes } from "../db/constants"
import { SEPARATOR, DocumentType } from "../db/constants"
import cls from "./FunctionContext"
import { dangerousGetDB, closeDB } from "../db"
import { baseGlobalDBName } from "../tenancy/utils"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKeys } from "./constants"
import { ContextKey } from "./constants"
import {
updateUsing,
closeWithUsing,
@ -33,8 +33,8 @@ export const closeTenancy = async () => {
}
await closeDB(db)
// clear from context now that database is closed/task is finished
cls.setOnContext(ContextKeys.TENANT_ID, null)
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
cls.setOnContext(ContextKey.TENANT_ID, null)
cls.setOnContext(ContextKey.GLOBAL_DB, null)
}
// export const isDefaultTenant = () => {
@ -54,7 +54,7 @@ export const getTenantIDFromAppID = (appId: string) => {
return null
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentTypes.DEV
const hasDev = split[1] === DocumentType.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return null
}
@ -83,14 +83,14 @@ export const doInTenant = (tenantId: string | null, task: any) => {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.TENANCY_IN_USE, () => {
await closeWithUsing(ContextKey.TENANCY_IN_USE, () => {
return closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.TENANT_ID) === tenantId
return updateUsing(ContextKeys.TENANCY_IN_USE, existing, internal)
const existing = cls.getFromContext(ContextKey.TENANT_ID) === tenantId
return updateUsing(ContextKey.TENANCY_IN_USE, existing, internal)
}
export const doInAppContext = (appId: string, task: any) => {
@ -108,7 +108,7 @@ export const doInAppContext = (appId: string, task: any) => {
setAppTenantId(appId)
}
// set the app ID
cls.setOnContext(ContextKeys.APP_ID, appId)
cls.setOnContext(ContextKey.APP_ID, appId)
// preserve the identity
if (identity) {
@ -118,14 +118,14 @@ export const doInAppContext = (appId: string, task: any) => {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.APP_IN_USE, async () => {
await closeWithUsing(ContextKey.APP_IN_USE, async () => {
await closeAppDBs()
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.APP_ID) === appId
return updateUsing(ContextKeys.APP_IN_USE, existing, internal)
const existing = cls.getFromContext(ContextKey.APP_ID) === appId
return updateUsing(ContextKey.APP_IN_USE, existing, internal)
}
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
@ -135,7 +135,7 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => {
async function internal(opts = { existing: false }) {
if (!opts.existing) {
cls.setOnContext(ContextKeys.IDENTITY, identity)
cls.setOnContext(ContextKey.IDENTITY, identity)
// set the tenant so that doInTenant will preserve identity
if (identity.tenantId) {
updateTenantId(identity.tenantId)
@ -146,27 +146,27 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.IDENTITY_IN_USE, async () => {
await closeWithUsing(ContextKey.IDENTITY_IN_USE, async () => {
setIdentity(null)
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.IDENTITY)
return updateUsing(ContextKeys.IDENTITY_IN_USE, existing, internal)
const existing = cls.getFromContext(ContextKey.IDENTITY)
return updateUsing(ContextKey.IDENTITY_IN_USE, existing, internal)
}
export const getIdentity = (): IdentityContext | undefined => {
try {
return cls.getFromContext(ContextKeys.IDENTITY)
return cls.getFromContext(ContextKey.IDENTITY)
} catch (e) {
// do nothing - identity is not in context
}
}
export const updateTenantId = (tenantId: string | null) => {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
cls.setOnContext(ContextKey.TENANT_ID, tenantId)
if (env.USE_COUCH) {
setGlobalDB(tenantId)
}
@ -176,7 +176,7 @@ export const updateAppId = async (appId: string) => {
try {
// have to close first, before removing the databases from context
await closeAppDBs()
cls.setOnContext(ContextKeys.APP_ID, appId)
cls.setOnContext(ContextKey.APP_ID, appId)
} catch (err) {
if (env.isTest()) {
TEST_APP_ID = appId
@ -189,12 +189,12 @@ export const updateAppId = async (appId: string) => {
export const setGlobalDB = (tenantId: string | null) => {
const dbName = baseGlobalDBName(tenantId)
const db = dangerousGetDB(dbName)
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
cls.setOnContext(ContextKey.GLOBAL_DB, db)
return db
}
export const getGlobalDB = () => {
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
const db = cls.getFromContext(ContextKey.GLOBAL_DB)
if (!db) {
throw new Error("Global DB not found")
}
@ -202,7 +202,7 @@ export const getGlobalDB = () => {
}
export const isTenantIdSet = () => {
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
return !!tenantId
}
@ -210,7 +210,7 @@ export const getTenantId = () => {
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
if (!tenantId) {
throw new Error("Tenant id not found")
}
@ -218,7 +218,7 @@ export const getTenantId = () => {
}
export const getAppId = () => {
const foundId = cls.getFromContext(ContextKeys.APP_ID)
const foundId = cls.getFromContext(ContextKey.APP_ID)
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
@ -231,7 +231,7 @@ export const getAppId = () => {
* contained, dev or prod.
*/
export const getAppDB = (opts?: any) => {
return getContextDB(ContextKeys.CURRENT_DB, opts)
return getContextDB(ContextKey.CURRENT_DB, opts)
}
/**
@ -239,7 +239,7 @@ export const getAppDB = (opts?: any) => {
* contained a development app ID, this will open the prod one.
*/
export const getProdAppDB = (opts?: any) => {
return getContextDB(ContextKeys.PROD_DB, opts)
return getContextDB(ContextKey.PROD_DB, opts)
}
/**
@ -247,5 +247,5 @@ export const getProdAppDB = (opts?: any) => {
* contained a prod app ID, this will open the dev one.
*/
export const getDevAppDB = (opts?: any) => {
return getContextDB(ContextKeys.DEV_DB, opts)
return getContextDB(ContextKey.DEV_DB, opts)
}

View File

@ -6,7 +6,7 @@ import {
} from "./index"
import cls from "./FunctionContext"
import { IdentityContext } from "@budibase/types"
import { ContextKeys } from "./constants"
import { ContextKey } from "./constants"
import { dangerousGetDB, closeDB } from "../db"
import { isEqual } from "lodash"
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
@ -47,17 +47,13 @@ export const setAppTenantId = (appId: string) => {
}
export const setIdentity = (identity: IdentityContext | null) => {
cls.setOnContext(ContextKeys.IDENTITY, identity)
cls.setOnContext(ContextKey.IDENTITY, identity)
}
// this function makes sure the PouchDB objects are closed and
// fully deleted when finished - this protects against memory leaks
export async function closeAppDBs() {
const dbKeys = [
ContextKeys.CURRENT_DB,
ContextKeys.PROD_DB,
ContextKeys.DEV_DB,
]
const dbKeys = [ContextKey.CURRENT_DB, ContextKey.PROD_DB, ContextKey.DEV_DB]
for (let dbKey of dbKeys) {
const db = cls.getFromContext(dbKey)
if (!db) {
@ -68,16 +64,16 @@ export async function closeAppDBs() {
cls.setOnContext(dbKey, null)
}
// clear the app ID now that the databases are closed
if (cls.getFromContext(ContextKeys.APP_ID)) {
cls.setOnContext(ContextKeys.APP_ID, null)
if (cls.getFromContext(ContextKey.APP_ID)) {
cls.setOnContext(ContextKey.APP_ID, null)
}
if (cls.getFromContext(ContextKeys.DB_OPTS)) {
cls.setOnContext(ContextKeys.DB_OPTS, null)
if (cls.getFromContext(ContextKey.DB_OPTS)) {
cls.setOnContext(ContextKey.DB_OPTS, null)
}
}
export function getContextDB(key: string, opts: any) {
const dbOptsKey = `${key}${ContextKeys.DB_OPTS}`
const dbOptsKey = `${key}${ContextKey.DB_OPTS}`
let storedOpts = cls.getFromContext(dbOptsKey)
let db = cls.getFromContext(key)
if (db && isEqual(opts, storedOpts)) {
@ -88,13 +84,13 @@ export function getContextDB(key: string, opts: any) {
let toUseAppId
switch (key) {
case ContextKeys.CURRENT_DB:
case ContextKey.CURRENT_DB:
toUseAppId = appId
break
case ContextKeys.PROD_DB:
case ContextKey.PROD_DB:
toUseAppId = getProdAppID(appId)
break
case ContextKeys.DEV_DB:
case ContextKey.DEV_DB:
toUseAppId = getDevelopmentAppID(appId)
break
}

View File

@ -4,13 +4,13 @@ export const UNICODE_MAX = "\ufff0"
/**
* Can be used to create a few different forms of querying a view.
*/
export enum AutomationViewModes {
export enum AutomationViewMode {
ALL = "all",
AUTOMATION = "automation",
STATUS = "status",
}
export enum ViewNames {
export enum ViewName {
USER_BY_APP = "by_app",
USER_BY_EMAIL = "by_email2",
BY_API_KEY = "by_api_key",
@ -21,13 +21,13 @@ export enum ViewNames {
}
export const DeprecatedViews = {
[ViewNames.USER_BY_EMAIL]: [
[ViewName.USER_BY_EMAIL]: [
// removed due to inaccuracy in view doc filter logic
"by_email",
],
}
export enum DocumentTypes {
export enum DocumentType {
USER = "us",
GROUP = "gr",
WORKSPACE = "workspace",
@ -62,6 +62,6 @@ export const StaticDatabases = {
},
}
export const APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR
export const APP_DEV = exports.DocumentTypes.APP_DEV + exports.SEPARATOR
export const APP_PREFIX = DocumentType.APP + SEPARATOR
export const APP_DEV = DocumentType.APP_DEV + SEPARATOR
export const APP_DEV_PREFIX = APP_DEV

View File

@ -1,7 +1,7 @@
import { newid } from "../hashing"
import { DEFAULT_TENANT_ID, Configs } from "../constants"
import env from "../environment"
import { SEPARATOR, DocumentTypes, UNICODE_MAX, ViewNames } from "./constants"
import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants"
import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy"
import fetch from "node-fetch"
import { doWithDB, allDbs } from "./index"
@ -58,7 +58,7 @@ export function getDocParams(
/**
* Retrieve the correct index for a view based on default design DB.
*/
export function getQueryIndex(viewName: ViewNames) {
export function getQueryIndex(viewName: ViewName) {
return `database/${viewName}`
}
@ -67,7 +67,7 @@ export function getQueryIndex(viewName: ViewNames) {
* @returns {string} The new workspace ID which the workspace doc can be stored under.
*/
export function generateWorkspaceID() {
return `${DocumentTypes.WORKSPACE}${SEPARATOR}${newid()}`
return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`
}
/**
@ -76,8 +76,8 @@ export function generateWorkspaceID() {
export function getWorkspaceParams(id = "", otherProps = {}) {
return {
...otherProps,
startkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}`,
endkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
endkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,
}
}
@ -86,7 +86,7 @@ export function getWorkspaceParams(id = "", otherProps = {}) {
* @returns {string} The new user ID which the user doc can be stored under.
*/
export function generateGlobalUserID(id?: any) {
return `${DocumentTypes.USER}${SEPARATOR}${id || newid()}`
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
}
/**
@ -102,8 +102,8 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
// need to include this incase pagination
startkey: startkey
? startkey
: `${DocumentTypes.USER}${SEPARATOR}${globalId}`,
endkey: `${DocumentTypes.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
: `${DocumentType.USER}${SEPARATOR}${globalId}`,
endkey: `${DocumentType.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
}
}
@ -121,7 +121,7 @@ export function getUsersByAppParams(appId: any, otherProps: any = {}) {
* @param ownerId The owner/user of the template, this could be global or a workspace level.
*/
export function generateTemplateID(ownerId: any) {
return `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
}
export function generateAppUserID(prodAppId: string, userId: string) {
@ -143,7 +143,7 @@ export function getTemplateParams(
if (templateId) {
final = templateId
} else {
final = `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`
final = `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`
}
return {
...otherProps,
@ -157,14 +157,14 @@ export function getTemplateParams(
* @returns {string} The new role ID which the role doc can be stored under.
*/
export function generateRoleID(id: any) {
return `${DocumentTypes.ROLE}${SEPARATOR}${id || newid()}`
return `${DocumentType.ROLE}${SEPARATOR}${id || newid()}`
}
/**
* Gets parameters for retrieving a role, this is a utility function for the getDocParams function.
*/
export function getRoleParams(roleId = null, otherProps = {}) {
return getDocParams(DocumentTypes.ROLE, roleId, otherProps)
return getDocParams(DocumentType.ROLE, roleId, otherProps)
}
export function getStartEndKeyURL(base: any, baseKey: any, tenantId = null) {
@ -211,9 +211,9 @@ export async function getAllDbs(opts = { efficient: false }) {
await addDbs(couchUrl)
} else {
// get prod apps
await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP, tenantId))
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP, tenantId))
// get dev apps
await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP_DEV, tenantId))
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP_DEV, tenantId))
// add global db name
dbs.push(getGlobalDBName(tenantId))
}
@ -235,12 +235,12 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
const appDbNames = dbs.filter((dbName: any) => {
const split = dbName.split(SEPARATOR)
// it is an app, check the tenantId
if (split[0] === DocumentTypes.APP) {
if (split[0] === DocumentType.APP) {
// tenantId is always right before the UUID
const possibleTenantId = split[split.length - 2]
const noTenantId =
split.length === 2 || possibleTenantId === DocumentTypes.DEV
split.length === 2 || possibleTenantId === DocumentType.DEV
return (
(tenantId === DEFAULT_TENANT_ID && noTenantId) ||
@ -326,7 +326,7 @@ export async function dbExists(dbName: any) {
export const generateConfigID = ({ type, workspace, user }: any) => {
const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR)
return `${DocumentTypes.CONFIG}${SEPARATOR}${scope}`
return `${DocumentType.CONFIG}${SEPARATOR}${scope}`
}
/**
@ -340,8 +340,8 @@ export const getConfigParams = (
return {
...otherProps,
startkey: `${DocumentTypes.CONFIG}${SEPARATOR}${scope}`,
endkey: `${DocumentTypes.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`,
startkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}`,
endkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`,
}
}
@ -350,7 +350,7 @@ export const getConfigParams = (
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
*/
export const generateDevInfoID = (userId: any) => {
return `${DocumentTypes.DEV_INFO}${SEPARATOR}${userId}`
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
}
/**

View File

@ -1,6 +1,6 @@
const {
DocumentTypes,
ViewNames,
DocumentType,
ViewName,
DeprecatedViews,
SEPARATOR,
} = require("./utils")
@ -44,14 +44,14 @@ exports.createNewUserEmailView = async () => {
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}")) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.USER_BY_EMAIL]: view,
[ViewName.USER_BY_EMAIL]: view,
}
await db.put(designDoc)
}
@ -68,7 +68,7 @@ exports.createUserAppView = async () => {
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}") && doc.roles) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
for (let prodAppId of Object.keys(doc.roles)) {
let emitted = prodAppId + "${SEPARATOR}" + doc._id
emit(emitted, null)
@ -78,7 +78,7 @@ exports.createUserAppView = async () => {
}
designDoc.views = {
...designDoc.views,
[ViewNames.USER_BY_APP]: view,
[ViewName.USER_BY_APP]: view,
}
await db.put(designDoc)
}
@ -93,14 +93,14 @@ exports.createApiKeyView = async () => {
}
const view = {
map: `function(doc) {
if (doc._id.startsWith("${DocumentTypes.DEV_INFO}") && doc.apiKey) {
if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
emit(doc.apiKey, doc.userId)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.BY_API_KEY]: view,
[ViewName.BY_API_KEY]: view,
}
await db.put(designDoc)
}
@ -123,17 +123,17 @@ exports.createUserBuildersView = async () => {
}
designDoc.views = {
...designDoc.views,
[ViewNames.USER_BY_BUILDERS]: view,
[ViewName.USER_BY_BUILDERS]: view,
}
await db.put(designDoc)
}
exports.queryGlobalView = async (viewName, params, db = null) => {
const CreateFuncByName = {
[ViewNames.USER_BY_EMAIL]: exports.createNewUserEmailView,
[ViewNames.BY_API_KEY]: exports.createApiKeyView,
[ViewNames.USER_BY_BUILDERS]: exports.createUserBuildersView,
[ViewNames.USER_BY_APP]: exports.createUserAppView,
[ViewName.USER_BY_EMAIL]: exports.createNewUserEmailView,
[ViewName.BY_API_KEY]: exports.createApiKeyView,
[ViewName.USER_BY_BUILDERS]: exports.createUserBuildersView,
[ViewName.USER_BY_APP]: exports.createUserAppView,
}
// can pass DB in if working with something specific
if (!db) {

View File

@ -1,4 +1,5 @@
import errors from "./errors"
const errorClasses = errors.errors
import * as events from "./events"
import * as migrations from "./migrations"

View File

@ -4,7 +4,7 @@ import { getUser } from "../cache/user"
import { getSession, updateSessionTTL } from "../security/sessions"
import { buildMatcherRegex, matches } from "./matchers"
import { SEPARATOR } from "../db/constants"
import { ViewNames } from "../db/utils"
import { ViewName } from "../db/utils"
import { queryGlobalView } from "../db/views"
import { getGlobalDB, doInTenant } from "../tenancy"
import { decrypt } from "../security/encryption"
@ -43,7 +43,7 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
const db = getGlobalDB()
// api key is encrypted in the database
const userId = await queryGlobalView(
ViewNames.BY_API_KEY,
ViewName.BY_API_KEY,
{
key: apiKey,
},

View File

@ -1,6 +1,6 @@
import { DEFAULT_TENANT_ID } from "../constants"
import { doWithDB } from "../db"
import { DocumentTypes, StaticDatabases } from "../db/constants"
import { DocumentType, StaticDatabases } from "../db/constants"
import { getAllApps } from "../db/utils"
import environment from "../environment"
import {
@ -21,10 +21,10 @@ import {
export const getMigrationsDoc = async (db: any) => {
// get the migrations doc
try {
return await db.get(DocumentTypes.MIGRATIONS)
return await db.get(DocumentType.MIGRATIONS)
} catch (err: any) {
if (err.status && err.status === 404) {
return { _id: DocumentTypes.MIGRATIONS }
return { _id: DocumentType.MIGRATIONS }
} else {
console.error(err)
throw err

View File

@ -3,7 +3,7 @@ const { BUILTIN_PERMISSION_IDS, PermissionLevels } = require("./permissions")
const {
generateRoleID,
getRoleParams,
DocumentTypes,
DocumentType,
SEPARATOR,
} = require("../db/utils")
const { getAppDB } = require("../context")
@ -338,7 +338,7 @@ class AccessController {
* Adds the "role_" for builtin role IDs which are to be written to the DB (for permissions).
*/
exports.getDBRoleID = roleId => {
if (roleId.startsWith(DocumentTypes.ROLE)) {
if (roleId.startsWith(DocumentType.ROLE)) {
return roleId
}
return generateRoleID(roleId)
@ -349,8 +349,8 @@ exports.getDBRoleID = roleId => {
*/
exports.getExternalRoleID = roleId => {
// for built in roles we want to remove the DB role ID element (role_)
if (roleId.startsWith(DocumentTypes.ROLE) && isBuiltin(roleId)) {
return roleId.split(`${DocumentTypes.ROLE}${SEPARATOR}`)[1]
if (roleId.startsWith(DocumentType.ROLE) && isBuiltin(roleId)) {
return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1]
}
return roleId
}

View File

@ -1,5 +1,5 @@
const {
ViewNames,
ViewName,
getUsersByAppParams,
getProdAppID,
generateAppUserID,
@ -18,7 +18,7 @@ exports.getGlobalUserByEmail = async email => {
throw "Must supply an email address to view"
}
return await queryGlobalView(ViewNames.USER_BY_EMAIL, {
return await queryGlobalView(ViewName.USER_BY_EMAIL, {
key: email.toLowerCase(),
include_docs: true,
})
@ -32,7 +32,7 @@ exports.searchGlobalUsersByApp = async (appId, opts) => {
include_docs: true,
})
params.startkey = opts && opts.startkey ? opts.startkey : params.startkey
let response = await queryGlobalView(ViewNames.USER_BY_APP, params)
let response = await queryGlobalView(ViewName.USER_BY_APP, params)
if (!response) {
response = []
}
@ -56,7 +56,7 @@ exports.searchGlobalUsersByEmail = async (email, opts) => {
const lcEmail = email.toLowerCase()
// handle if passing up startkey for pagination
const startkey = opts && opts.startkey ? opts.startkey : lcEmail
let response = await queryGlobalView(ViewNames.USER_BY_EMAIL, {
let response = await queryGlobalView(ViewName.USER_BY_EMAIL, {
...opts,
startkey,
endkey: `${lcEmail}${UNICODE_MAX}`,

View File

@ -1,9 +1,4 @@
const {
DocumentTypes,
SEPARATOR,
ViewNames,
getAllApps,
} = require("./db/utils")
const { DocumentType, SEPARATOR, ViewName, getAllApps } = require("./db/utils")
const jwt = require("jsonwebtoken")
const { options } = require("./middleware/passport/jwt")
const { queryGlobalView } = require("./db/views")
@ -17,7 +12,7 @@ const {
const events = require("./events")
const tenancy = require("./tenancy")
const APP_PREFIX = DocumentTypes.APP + SEPARATOR
const APP_PREFIX = DocumentType.APP + SEPARATOR
const PROD_APP_PREFIX = "/app/"
function confirmAppId(possibleAppId) {
@ -154,7 +149,7 @@ exports.isClient = ctx => {
}
const getBuilders = async () => {
const builders = await queryGlobalView(ViewNames.USER_BY_BUILDERS, {
const builders = await queryGlobalView(ViewName.USER_BY_BUILDERS, {
include_docs: false,
})

View File

@ -1,6 +1,6 @@
import { events } from "@budibase/backend-core"
import { AnalyticsPingRequest, PingSource } from "@budibase/types"
import { DocumentTypes, isDevAppID } from "../../db/utils"
import { DocumentType, isDevAppID } from "../../db/utils"
import { context } from "@budibase/backend-core"
export const isEnabled = async (ctx: any) => {
@ -15,7 +15,7 @@ export const ping = async (ctx: any) => {
switch (body.source) {
case PingSource.APP: {
const db = context.getAppDB({ skip_setup: true })
const appInfo = await db.get(DocumentTypes.APP_METADATA)
const appInfo = await db.get(DocumentType.APP_METADATA)
let appId = context.getAppId()
if (isDevAppID(appId)) {

View File

@ -15,7 +15,7 @@ import {
getLayoutParams,
getScreenParams,
generateDevAppID,
DocumentTypes,
DocumentType,
AppStatus,
} from "../../db/utils"
const {
@ -206,7 +206,7 @@ export const fetchAppDefinition = async (ctx: any) => {
export const fetchAppPackage = async (ctx: any) => {
const db = context.getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
const application = await db.get(DocumentType.APP_METADATA)
const layouts = await getLayouts()
let screens = await getScreens()
@ -248,13 +248,13 @@ const performAppCreate = async (ctx: any) => {
let _rev
try {
// if template there will be an existing doc
const existing = await db.get(DocumentTypes.APP_METADATA)
const existing = await db.get(DocumentType.APP_METADATA)
_rev = existing._rev
} catch (err) {
// nothing to do
}
const newApplication: App = {
_id: DocumentTypes.APP_METADATA,
_id: DocumentType.APP_METADATA,
_rev,
appId: instance._id,
type: "app",
@ -383,7 +383,7 @@ export const update = async (ctx: any) => {
export const updateClient = async (ctx: any) => {
// Get current app version
const db = context.getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
const application = await db.get(DocumentType.APP_METADATA)
const currentVersion = application.version
// Update client library and manifest
@ -407,7 +407,7 @@ export const updateClient = async (ctx: any) => {
export const revertClient = async (ctx: any) => {
// Check app can be reverted
const db = context.getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
const application = await db.get(DocumentType.APP_METADATA)
if (!application.revertableVersion) {
ctx.throw(400, "There is no version to revert to")
}
@ -439,7 +439,7 @@ const destroyApp = async (ctx: any) => {
}
const db = isUnpublish ? context.getProdAppDB() : context.getAppDB()
const app = await db.get(DocumentTypes.APP_METADATA)
const app = await db.get(DocumentType.APP_METADATA)
const result = await db.destroy()
if (isUnpublish) {
@ -526,7 +526,7 @@ export const sync = async (ctx: any, next: any) => {
try {
await replication.replicate({
filter: function (doc: any) {
return doc._id !== DocumentTypes.APP_METADATA
return doc._id !== DocumentType.APP_METADATA
},
})
} catch (err) {
@ -550,7 +550,7 @@ export const sync = async (ctx: any, next: any) => {
const updateAppPackage = async (appPackage: any, appId: any) => {
return context.doInAppContext(appId, async () => {
const db = context.getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
const application = await db.get(DocumentType.APP_METADATA)
const newAppPackage = { ...application, ...appPackage }
if (appPackage._rev !== application._rev) {

View File

@ -3,7 +3,7 @@ const triggers = require("../../automations/triggers")
const {
getAutomationParams,
generateAutomationID,
DocumentTypes,
DocumentType,
} = require("../../db/utils")
const {
checkForWebhooks,
@ -201,7 +201,7 @@ exports.clearLogError = async function (ctx) {
const { automationId, appId } = ctx.request.body
await doInAppContext(appId, async () => {
const db = getProdAppDB()
const metadata = await db.get(DocumentTypes.APP_METADATA)
const metadata = await db.get(DocumentType.APP_METADATA)
if (!automationId) {
delete metadata.automationErrors
} else if (

View File

@ -1,6 +1,6 @@
const { streamBackup } = require("../../utilities/fileSystem")
const { events, context } = require("@budibase/backend-core")
const { DocumentTypes } = require("../../db/utils")
const { DocumentType } = require("../../db/utils")
exports.exportAppDump = async function (ctx) {
let { appId, excludeRows } = ctx.query
@ -12,7 +12,7 @@ exports.exportAppDump = async function (ctx) {
await context.doInAppContext(appId, async () => {
const appDb = context.getAppDB()
const app = await appDb.get(DocumentTypes.APP_METADATA)
const app = await appDb.get(DocumentType.APP_METADATA)
await events.app.exported(app)
})
}

View File

@ -11,7 +11,7 @@ const {
getGlobalDB,
} = require("@budibase/backend-core/tenancy")
const { create } = require("./application")
const { getDocParams, DocumentTypes, isDevAppID } = require("../../db/utils")
const { getDocParams, DocumentType, isDevAppID } = require("../../db/utils")
async function createApp(appName, appImport) {
const ctx = {
@ -31,7 +31,7 @@ exports.exportApps = async ctx => {
}
const apps = await getAllApps({ all: true })
const globalDBString = await exportDB(getGlobalDBName(), {
filter: doc => !doc._id.startsWith(DocumentTypes.USER),
filter: doc => !doc._id.startsWith(DocumentType.USER),
})
let allDBs = {
global: globalDBString,
@ -97,7 +97,7 @@ exports.importApps = async ctx => {
}
// if there are any users make sure to remove them
let users = await getAllDocType(globalDb, DocumentTypes.USER)
let users = await getAllDocType(globalDb, DocumentType.USER)
let userDeletionPromises = []
for (let user of users) {
userDeletionPromises.push(globalDb.remove(user._id, user._rev))

View File

@ -1,10 +1,10 @@
const { DocumentTypes } = require("../../db/utils")
const { DocumentType } = require("../../db/utils")
const { getComponentLibraryManifest } = require("../../utilities/fileSystem")
const { getAppDB } = require("@budibase/backend-core/context")
exports.fetchAppComponentDefinitions = async function (ctx) {
const db = getAppDB()
const app = await db.get(DocumentTypes.APP_METADATA)
const app = await db.get(DocumentType.APP_METADATA)
let componentManifests = await Promise.all(
app.componentLibraries.map(async library => {

View File

@ -2,7 +2,7 @@ const {
generateDatasourceID,
getDatasourceParams,
getQueryParams,
DocumentTypes,
DocumentType,
BudibaseInternalDB,
getTableParams,
} = require("../../db/utils")
@ -132,7 +132,7 @@ exports.save = async function (ctx) {
const datasource = {
_id: generateDatasourceID({ plus }),
type: plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE,
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
...ctx.request.body.datasource,
}

View File

@ -4,7 +4,7 @@ import {
getProdAppID,
getDevelopmentAppID,
} from "@budibase/backend-core/db"
import { DocumentTypes, getAutomationParams } from "../../../db/utils"
import { DocumentType, getAutomationParams } from "../../../db/utils"
import {
disableAllCrons,
enableCronTrigger,
@ -52,9 +52,9 @@ async function storeDeploymentHistory(deployment: any) {
let deploymentDoc
try {
// theres only one deployment doc per app database
deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
} catch (err) {
deploymentDoc = { _id: DocumentTypes.DEPLOYMENTS, history: {} }
deploymentDoc = { _id: DocumentType.DEPLOYMENTS, history: {} }
}
const deploymentId = deploymentJSON._id
@ -115,7 +115,7 @@ async function deployApp(deployment: any) {
await replication.replicate()
console.log("replication complete.. replacing app meta doc")
const db = getProdAppDB()
const appDoc = await db.get(DocumentTypes.APP_METADATA)
const appDoc = await db.get(DocumentType.APP_METADATA)
deployment.appUrl = appDoc.url
@ -146,7 +146,7 @@ async function deployApp(deployment: any) {
export async function fetchDeployments(ctx: any) {
try {
const db = getAppDB()
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
const { updated, deployments } = await checkAllDeployments(deploymentDoc)
if (updated) {
await db.put(deployments)
@ -160,7 +160,7 @@ export async function fetchDeployments(ctx: any) {
export async function deploymentProgress(ctx: any) {
try {
const db = getAppDB()
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
ctx.body = deploymentDoc[ctx.params.deploymentId]
} catch (err) {
ctx.throw(
@ -173,7 +173,7 @@ export async function deploymentProgress(ctx: any) {
const isFirstDeploy = async () => {
try {
const db = getProdAppDB()
await db.get(DocumentTypes.APP_METADATA)
await db.get(DocumentType.APP_METADATA)
} catch (e: any) {
if (e.status === 404) {
return true

View File

@ -4,7 +4,7 @@ const { checkSlashesInUrl } = require("../../utilities")
const { request } = require("../../utilities/workerRequests")
const { clearLock } = require("../../utilities/redis")
const { Replication, getProdAppID } = require("@budibase/backend-core/db")
const { DocumentTypes } = require("../../db/utils")
const { DocumentType } = require("../../db/utils")
const { app: appCache } = require("@budibase/backend-core/cache")
const { getProdAppDB, getAppDB } = require("@budibase/backend-core/context")
const { events } = require("@budibase/backend-core")
@ -87,7 +87,7 @@ exports.revert = async ctx => {
if (info.error) {
throw info.error
}
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
if (
!deploymentDoc.history ||
Object.keys(deploymentDoc.history).length === 0
@ -110,7 +110,7 @@ exports.revert = async ctx => {
// update appID in reverted app to be dev version again
const db = getAppDB()
const appDoc = await db.get(DocumentTypes.APP_METADATA)
const appDoc = await db.get(DocumentType.APP_METADATA)
appDoc.appId = appId
appDoc.instance._id = appId
await db.put(appDoc)

View File

@ -1,6 +1,6 @@
const { cloneDeep } = require("lodash")
const { definitions } = require("../../integrations")
const { SourceNames } = require("../../definitions/datasource")
const { SourceName } = require("@budibase/types")
const googlesheets = require("../../integrations/googlesheets")
const { featureFlags } = require("@budibase/backend-core")
@ -10,7 +10,7 @@ exports.fetch = async function (ctx) {
// for google sheets integration google verification
if (featureFlags.isEnabled(featureFlags.FeatureFlag.GOOGLE_SHEETS)) {
defs[SourceNames.GOOGLE_SHEETS] = googlesheets.schema
defs[SourceName.GOOGLE_SHEETS] = googlesheets.schema
}
ctx.body = defs

View File

@ -1,6 +1,6 @@
const { getAppDB } = require("@budibase/backend-core/context")
import { isExternalTable } from "../../../integrations/utils"
import { APP_PREFIX, DocumentTypes } from "../../../db/utils"
import { APP_PREFIX, DocumentType } from "../../../db/utils"
export async function addRev(
body: { _id?: string; _rev?: string },
@ -11,7 +11,7 @@ export async function addRev(
}
let id = body._id
if (body._id.startsWith(APP_PREFIX)) {
id = DocumentTypes.APP_METADATA
id = DocumentType.APP_METADATA
}
const db = getAppDB()
const dbDoc = await db.get(id)

View File

@ -1,5 +1,5 @@
import { ImportInfo } from "./base"
import { Query, QueryParameter } from "../../../../../definitions/datasource"
import { Query, QueryParameter } from "@budibase/types"
import { OpenAPIV2 } from "openapi-types"
import { OpenAPISource } from "./base/openapi"
import { URL } from "url"

View File

@ -1,5 +1,5 @@
import { ImportInfo } from "./base"
import { Query, QueryParameter } from "../../../../../definitions/datasource"
import { Query, QueryParameter } from "@budibase/types"
import { OpenAPIV3 } from "openapi-types"
import { OpenAPISource } from "./base/openapi"
import { URL } from "url"

View File

@ -1,18 +1,16 @@
import {
FilterTypes,
IncludeRelationships,
FilterType,
IncludeRelationship,
Operation,
PaginationJson,
RelationshipsJson,
SearchFilters,
SortJson,
} from "../../../definitions/datasource"
import {
Datasource,
FieldSchema,
Row,
Table,
} from "../../../definitions/common"
} from "@budibase/types"
import {
breakRowIdField,
generateRowIdField,
@ -128,7 +126,7 @@ module External {
if (
typeof filter !== "object" ||
Object.keys(filter).length === 0 ||
key === FilterTypes.ONE_OF
key === FilterType.ONE_OF
) {
continue
}
@ -634,7 +632,7 @@ module External {
*/
buildFields(
table: Table,
includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE
includeRelations: IncludeRelationship = IncludeRelationship.INCLUDE
) {
function extractRealFields(table: Table, existing: string[] = []) {
return Object.entries(table.schema)

View File

@ -3,7 +3,7 @@ const {
generateRowID,
getRowParams,
getTableIDFromRowID,
DocumentTypes,
DocumentType,
InternalTables,
} = require("../../../db/utils")
const { dangerousGetDB } = require("@budibase/backend-core/db")
@ -183,7 +183,7 @@ exports.fetchView = async ctx => {
const viewName = ctx.params.viewName
// if this is a table view being looked for just transfer to that
if (viewName.startsWith(DocumentTypes.TABLE)) {
if (viewName.startsWith(DocumentType.TABLE)) {
ctx.params.tableId = viewName
return exports.fetch(ctx)
}

View File

@ -14,7 +14,7 @@ const env = require("../../../environment")
const { clientLibraryPath } = require("../../../utilities")
const { upload } = require("../../../utilities/fileSystem")
const { attachmentsRelativeURL } = require("../../../utilities")
const { DocumentTypes } = require("../../../db/utils")
const { DocumentType } = require("../../../db/utils")
const { getAppDB, getAppId } = require("@budibase/backend-core/context")
const { setCookie, clearCookie } = require("@budibase/backend-core/utils")
const AWS = require("aws-sdk")
@ -99,7 +99,7 @@ export const uploadFile = async function (ctx: any) {
export const serveApp = async function (ctx: any) {
const db = getAppDB({ skip_setup: true })
const appInfo = await db.get(DocumentTypes.APP_METADATA)
const appInfo = await db.get(DocumentType.APP_METADATA)
let appId = getAppId()
if (!env.isJest()) {

View File

@ -7,7 +7,7 @@ const { getTable } = require("../table/utils")
const { FieldTypes } = require("../../../constants")
const { getAppDB } = require("@budibase/backend-core/context")
const { events } = require("@budibase/backend-core")
const { DocumentTypes } = require("../../../db/utils")
const { DocumentType } = require("../../../db/utils")
const { cloneDeep, isEqual } = require("lodash")
exports.fetch = async ctx => {
@ -181,7 +181,7 @@ exports.exportView = async ctx => {
ctx.attachment(filename)
ctx.body = apiFileReturn(exporter(headers, rows))
if (viewName.startsWith(DocumentTypes.TABLE)) {
if (viewName.startsWith(DocumentType.TABLE)) {
await events.table.exported(table, format)
} else {
await events.view.exported(table, format)

View File

@ -2,7 +2,7 @@ const {
ViewNames,
generateMemoryViewID,
getMemoryViewParams,
DocumentTypes,
DocumentType,
SEPARATOR,
} = require("../../../db/utils")
const env = require("../../../environment")
@ -16,7 +16,7 @@ exports.getView = async viewName => {
return designDoc.views[viewName]
} else {
// This is a table view, don't read the view from the DB
if (viewName.startsWith(DocumentTypes.TABLE + SEPARATOR)) {
if (viewName.startsWith(DocumentType.TABLE + SEPARATOR)) {
return null
}

View File

@ -1,6 +1,6 @@
const newid = require("./newid")
const {
DocumentTypes: CoreDocTypes,
DocumentType: CoreDocTypes,
getRoleParams,
generateRoleID,
APP_DEV_PREFIX,
@ -23,7 +23,7 @@ const AppStatus = {
DEPLOYED: "published",
}
const DocumentTypes = {
const DocumentType = {
...CoreDocTypes,
TABLE: "ta",
ROW: "ro",
@ -66,12 +66,12 @@ exports.APP_PREFIX = APP_PREFIX
exports.APP_DEV_PREFIX = APP_DEV_PREFIX
exports.isDevAppID = isDevAppID
exports.isProdAppID = isProdAppID
exports.USER_METDATA_PREFIX = `${DocumentTypes.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
exports.LINK_USER_METADATA_PREFIX = `${DocumentTypes.LINK}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
exports.TABLE_ROW_PREFIX = `${DocumentTypes.ROW}${SEPARATOR}${DocumentTypes.TABLE}`
exports.USER_METDATA_PREFIX = `${DocumentType.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
exports.LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
exports.TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}`
exports.ViewNames = ViewNames
exports.InternalTables = InternalTables
exports.DocumentTypes = DocumentTypes
exports.DocumentType = DocumentType
exports.SEPARATOR = SEPARATOR
exports.UNICODE_MAX = UNICODE_MAX
exports.SearchIndexes = SearchIndexes
@ -114,7 +114,7 @@ exports.getDocParams = getDocParams
* Gets parameters for retrieving tables, this is a utility function for the getDocParams function.
*/
exports.getTableParams = (tableId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.TABLE, tableId, otherProps)
return getDocParams(DocumentType.TABLE, tableId, otherProps)
}
/**
@ -122,7 +122,7 @@ exports.getTableParams = (tableId = null, otherProps = {}) => {
* @returns {string} The new table ID which the table doc can be stored under.
*/
exports.generateTableID = () => {
return `${DocumentTypes.TABLE}${SEPARATOR}${newid()}`
return `${DocumentType.TABLE}${SEPARATOR}${newid()}`
}
/**
@ -135,12 +135,12 @@ exports.generateTableID = () => {
*/
exports.getRowParams = (tableId = null, rowId = null, otherProps = {}) => {
if (tableId == null) {
return getDocParams(DocumentTypes.ROW, null, otherProps)
return getDocParams(DocumentType.ROW, null, otherProps)
}
const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId
return getDocParams(DocumentTypes.ROW, endOfKey, otherProps)
return getDocParams(DocumentType.ROW, endOfKey, otherProps)
}
/**
@ -150,9 +150,9 @@ exports.getRowParams = (tableId = null, rowId = null, otherProps = {}) => {
*/
exports.getTableIDFromRowID = rowId => {
const components = rowId
.split(DocumentTypes.TABLE + SEPARATOR)[1]
.split(DocumentType.TABLE + SEPARATOR)[1]
.split(SEPARATOR)
return `${DocumentTypes.TABLE}${SEPARATOR}${components[0]}`
return `${DocumentType.TABLE}${SEPARATOR}${components[0]}`
}
/**
@ -163,7 +163,7 @@ exports.getTableIDFromRowID = rowId => {
*/
exports.generateRowID = (tableId, id = null) => {
id = id || newid()
return `${DocumentTypes.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`
return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`
}
/**
@ -186,7 +186,7 @@ exports.generateUserMetadataID = globalId => {
* Breaks up the ID to get the global ID.
*/
exports.getGlobalIDFromUserMetadataID = id => {
const prefix = `${DocumentTypes.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
if (!id || !id.includes(prefix)) {
return id
}
@ -197,7 +197,7 @@ exports.getGlobalIDFromUserMetadataID = id => {
* Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
*/
exports.getAutomationParams = (automationId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.AUTOMATION, automationId, otherProps)
return getDocParams(DocumentType.AUTOMATION, automationId, otherProps)
}
/**
@ -205,7 +205,7 @@ exports.getAutomationParams = (automationId = null, otherProps = {}) => {
* @returns {string} The new automation ID which the automation doc can be stored under.
*/
exports.generateAutomationID = () => {
return `${DocumentTypes.AUTOMATION}${SEPARATOR}${newid()}`
return `${DocumentType.AUTOMATION}${SEPARATOR}${newid()}`
}
/**
@ -230,14 +230,14 @@ exports.generateLinkID = (
const tables = `${SEPARATOR}${tableId1}${SEPARATOR}${tableId2}`
const rows = `${SEPARATOR}${rowId1}${SEPARATOR}${rowId2}`
const fields = `${SEPARATOR}${fieldName1}${SEPARATOR}${fieldName2}`
return `${DocumentTypes.LINK}${tables}${rows}${fields}`
return `${DocumentType.LINK}${tables}${rows}${fields}`
}
/**
* Gets parameters for retrieving link docs, this is a utility function for the getDocParams function.
*/
exports.getLinkParams = (otherProps = {}) => {
return getDocParams(DocumentTypes.LINK, null, otherProps)
return getDocParams(DocumentType.LINK, null, otherProps)
}
/**
@ -245,14 +245,14 @@ exports.getLinkParams = (otherProps = {}) => {
* @returns {string} The new layout ID which the layout doc can be stored under.
*/
exports.generateLayoutID = id => {
return `${DocumentTypes.LAYOUT}${SEPARATOR}${id || newid()}`
return `${DocumentType.LAYOUT}${SEPARATOR}${id || newid()}`
}
/**
* Gets parameters for retrieving layout, this is a utility function for the getDocParams function.
*/
exports.getLayoutParams = (layoutId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.LAYOUT, layoutId, otherProps)
return getDocParams(DocumentType.LAYOUT, layoutId, otherProps)
}
/**
@ -260,14 +260,14 @@ exports.getLayoutParams = (layoutId = null, otherProps = {}) => {
* @returns {string} The new screen ID which the screen doc can be stored under.
*/
exports.generateScreenID = () => {
return `${DocumentTypes.SCREEN}${SEPARATOR}${newid()}`
return `${DocumentType.SCREEN}${SEPARATOR}${newid()}`
}
/**
* Gets parameters for retrieving screens, this is a utility function for the getDocParams function.
*/
exports.getScreenParams = (screenId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.SCREEN, screenId, otherProps)
return getDocParams(DocumentType.SCREEN, screenId, otherProps)
}
/**
@ -275,14 +275,14 @@ exports.getScreenParams = (screenId = null, otherProps = {}) => {
* @returns {string} The new webhook ID which the webhook doc can be stored under.
*/
exports.generateWebhookID = () => {
return `${DocumentTypes.WEBHOOK}${SEPARATOR}${newid()}`
return `${DocumentType.WEBHOOK}${SEPARATOR}${newid()}`
}
/**
* Gets parameters for retrieving a webhook, this is a utility function for the getDocParams function.
*/
exports.getWebhookParams = (webhookId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.WEBHOOK, webhookId, otherProps)
return getDocParams(DocumentType.WEBHOOK, webhookId, otherProps)
}
/**
@ -291,7 +291,7 @@ exports.getWebhookParams = (webhookId = null, otherProps = {}) => {
*/
exports.generateDatasourceID = ({ plus = false } = {}) => {
return `${
plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE
plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE
}${SEPARATOR}${newid()}`
}
@ -299,7 +299,7 @@ exports.generateDatasourceID = ({ plus = false } = {}) => {
* Gets parameters for retrieving a datasource, this is a utility function for the getDocParams function.
*/
exports.getDatasourceParams = (datasourceId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.DATASOURCE, datasourceId, otherProps)
return getDocParams(DocumentType.DATASOURCE, datasourceId, otherProps)
}
/**
@ -308,7 +308,7 @@ exports.getDatasourceParams = (datasourceId = null, otherProps = {}) => {
*/
exports.generateQueryID = datasourceId => {
return `${
DocumentTypes.QUERY
DocumentType.QUERY
}${SEPARATOR}${datasourceId}${SEPARATOR}${newid()}`
}
@ -317,14 +317,14 @@ exports.generateQueryID = datasourceId => {
* automations etc.
*/
exports.generateAutomationMetadataID = automationId => {
return `${DocumentTypes.AUTOMATION_METADATA}${SEPARATOR}${automationId}`
return `${DocumentType.AUTOMATION_METADATA}${SEPARATOR}${automationId}`
}
/**
* Retrieve all automation metadata in an app database.
*/
exports.getAutomationMetadataParams = (otherProps = {}) => {
return getDocParams(DocumentTypes.AUTOMATION_METADATA, null, otherProps)
return getDocParams(DocumentType.AUTOMATION_METADATA, null, otherProps)
}
/**
@ -332,11 +332,11 @@ exports.getAutomationMetadataParams = (otherProps = {}) => {
*/
exports.getQueryParams = (datasourceId = null, otherProps = {}) => {
if (datasourceId == null) {
return getDocParams(DocumentTypes.QUERY, null, otherProps)
return getDocParams(DocumentType.QUERY, null, otherProps)
}
return getDocParams(
DocumentTypes.QUERY,
DocumentType.QUERY,
`${datasourceId}${SEPARATOR}`,
otherProps
)
@ -347,11 +347,11 @@ exports.getQueryParams = (datasourceId = null, otherProps = {}) => {
* @returns {string} The ID of the flag document that was generated.
*/
exports.generateUserFlagID = userId => {
return `${DocumentTypes.USER_FLAG}${SEPARATOR}${userId}`
return `${DocumentType.USER_FLAG}${SEPARATOR}${userId}`
}
exports.generateMetadataID = (type, entityId) => {
return `${DocumentTypes.METADATA}${SEPARATOR}${type}${SEPARATOR}${entityId}`
return `${DocumentType.METADATA}${SEPARATOR}${type}${SEPARATOR}${entityId}`
}
exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
@ -359,15 +359,15 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
if (entityId != null) {
docId += entityId
}
return getDocParams(DocumentTypes.METADATA, docId, otherProps)
return getDocParams(DocumentType.METADATA, docId, otherProps)
}
exports.generateMemoryViewID = viewName => {
return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}`
return `${DocumentType.MEM_VIEW}${SEPARATOR}${viewName}`
}
exports.getMemoryViewParams = (otherProps = {}) => {
return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps)
return getDocParams(DocumentType.MEM_VIEW, null, otherProps)
}
/**

View File

@ -1,11 +1,11 @@
const { getAppDB } = require("@budibase/backend-core/context")
const {
DocumentTypes,
DocumentType,
SEPARATOR,
ViewNames,
SearchIndexes,
} = require("../utils")
const SCREEN_PREFIX = DocumentTypes.SCREEN + SEPARATOR
const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR
/**************************************************
* INFORMATION *

View File

@ -5,14 +5,14 @@ import {
Document,
} from "@budibase/types"
export enum LoopStepTypes {
export enum LoopStepType {
ARRAY = "Array",
STRING = "String",
}
export interface LoopStep extends AutomationStep {
inputs: {
option: LoopStepTypes
option: LoopStepType
[key: string]: any
}
}

View File

@ -1,5 +1,13 @@
export { Query, Datasource } from "./datasource"
export { FieldSchema, TableSchema, Table, Document, Row } from "@budibase/types"
import { Document } from "@budibase/types"
export {
Query,
Datasource,
FieldSchema,
TableSchema,
Table,
Document,
Row,
} from "@budibase/types"
export interface Application extends Document {
_id: string

View File

@ -1,32 +1,3 @@
import { SortDirection, Operation, SourceNames } from "@budibase/types"
// export everything that used to be exported from here
export {
Operation,
SortDirection,
QueryTypes,
DatasourceFieldTypes,
SourceNames,
IncludeRelationships,
FilterTypes,
QueryDefinition,
ExtraQueryConfig,
Integration,
IntegrationBase,
QueryParameter,
PaginationConfig,
PaginationValues,
RestQueryFields,
Query,
Datasource,
SearchFilters,
SortJson,
PaginationJson,
RenameColumn,
RelationshipsJson,
QueryJson,
SqlQuery,
} from "@budibase/types"
/********************************************
* This file contains structures which are *
* internal to the server and don't need to *

View File

@ -1,7 +1,7 @@
import {
Integration,
DatasourceFieldTypes,
QueryTypes,
DatasourceFieldType,
QueryType,
IntegrationBase,
} from "@budibase/types"
@ -21,61 +21,61 @@ module AirtableModule {
type: "Spreadsheet",
datasource: {
apiKey: {
type: DatasourceFieldTypes.PASSWORD,
type: DatasourceFieldType.PASSWORD,
default: "enter api key",
required: true,
},
base: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "mybase",
required: true,
},
},
query: {
create: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
fields: {
table: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
read: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
table: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
view: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
numRecords: {
type: DatasourceFieldTypes.NUMBER,
type: DatasourceFieldType.NUMBER,
default: 10,
},
},
},
update: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
fields: {
id: {
display: "Record ID",
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
table: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
delete: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
},
}

View File

@ -1,7 +1,7 @@
import {
Integration,
DatasourceFieldTypes,
QueryTypes,
DatasourceFieldType,
QueryType,
IntegrationBase,
} from "@budibase/types"
@ -24,35 +24,35 @@ module ArangoModule {
"ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ",
datasource: {
url: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "http://localhost:8529",
required: true,
},
username: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "root",
required: true,
},
password: {
type: DatasourceFieldTypes.PASSWORD,
type: DatasourceFieldType.PASSWORD,
required: true,
},
databaseName: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "_system",
required: true,
},
collection: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
query: {
read: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
create: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
},
}

View File

@ -2,12 +2,12 @@ import { Knex, knex } from "knex"
import {
Operation,
QueryJson,
QueryOptions,
RelationshipsJson,
SearchFilters,
SortDirection,
} from "../../definitions/datasource"
import { isIsoDateString, SqlClients } from "../utils"
} from "@budibase/types"
import { QueryOptions } from "../../definitions/datasource"
import { isIsoDateString, SqlClient } from "../utils"
import SqlTableQueryBuilder from "./sqlTable"
import environment from "../../environment"
@ -27,14 +27,14 @@ function likeKey(client: string, key: string): string {
}
let start: string, end: string
switch (client) {
case SqlClients.MY_SQL:
case SqlClient.MY_SQL:
start = end = "`"
break
case SqlClients.ORACLE:
case SqlClients.POSTGRES:
case SqlClient.ORACLE:
case SqlClient.POSTGRES:
start = end = '"'
break
case SqlClients.MS_SQL:
case SqlClient.MS_SQL:
start = "["
end = "]"
break
@ -102,7 +102,7 @@ function generateSelectStatement(
if (
columnName &&
schema?.[columnName] &&
knex.client.config.client === SqlClients.POSTGRES
knex.client.config.client === SqlClient.POSTGRES
) {
const externalType = schema[columnName].externalType
if (externalType?.includes("money")) {
@ -146,7 +146,7 @@ class InternalBuilder {
const like = (key: string, value: any) => {
const fnc = allOr ? "orWhere" : "where"
// postgres supports ilike, nothing else does
if (this.client === SqlClients.POSTGRES) {
if (this.client === SqlClient.POSTGRES) {
query = query[fnc](key, "ilike", `%${value}%`)
} else {
const rawFnc = `${fnc}Raw`
@ -173,7 +173,7 @@ class InternalBuilder {
iterate(filters.string, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
// postgres supports ilike, nothing else does
if (this.client === SqlClients.POSTGRES) {
if (this.client === SqlClient.POSTGRES) {
query = query[fnc](key, "ilike", `${value}%`)
} else {
const rawFnc = `${fnc}Raw`
@ -229,7 +229,7 @@ class InternalBuilder {
if (filters.contains) {
const fnc = allOr ? "orWhere" : "where"
const rawFnc = `${fnc}Raw`
if (this.client === SqlClients.POSTGRES) {
if (this.client === SqlClient.POSTGRES) {
iterate(filters.contains, (key: string, value: any) => {
const fieldNames = key.split(/\./g)
const tableName = fieldNames[0]
@ -242,7 +242,7 @@ class InternalBuilder {
`"${tableName}"."${columnName}"::jsonb @> '[${value}]'`
)
})
} else if (this.client === SqlClients.MY_SQL) {
} else if (this.client === SqlClient.MY_SQL) {
iterate(filters.contains, (key: string, value: any) => {
if (typeof value === "string") {
value = `"${value}"`
@ -265,7 +265,7 @@ class InternalBuilder {
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(`${table?.name}.${key}`, direction)
}
} else if (this.client === SqlClients.MS_SQL && paginate?.limit) {
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
// @ts-ignore
query = query.orderBy(`${table?.name}.${table?.primary[0]}`)
}
@ -414,7 +414,7 @@ class InternalBuilder {
[tableName]: query,
}).select(selectStatement)
// have to add after as well (this breaks MS-SQL)
if (this.client !== SqlClients.MS_SQL) {
if (this.client !== SqlClient.MS_SQL) {
preQuery = this.addSorting(preQuery, json)
}
// handle joins
@ -565,9 +565,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
// same as delete, manage returning
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
let id
if (sqlClient === SqlClients.MS_SQL) {
if (sqlClient === SqlClient.MS_SQL) {
id = results?.[0].id
} else if (sqlClient === SqlClients.MY_SQL) {
} else if (sqlClient === SqlClient.MY_SQL) {
id = results?.insertId
}
row = processFn(

View File

@ -1,10 +1,5 @@
import { Knex, knex } from "knex"
import { Table } from "../../definitions/common"
import {
Operation,
QueryJson,
RenameColumn,
} from "../../definitions/datasource"
import { Operation, QueryJson, RenameColumn, Table } from "@budibase/types"
import { breakExternalTableId } from "../utils"
import SchemaBuilder = Knex.SchemaBuilder
import CreateTableBuilder = Knex.CreateTableBuilder

View File

@ -1,5 +1,4 @@
import { QueryJson } from "../../definitions/datasource"
import { Datasource } from "../../definitions/common"
import { QueryJson, Datasource } from "@budibase/types"
module DatasourceUtils {
const { integrations } = require("../index")

View File

@ -1,7 +1,7 @@
import {
Integration,
DatasourceFieldTypes,
QueryTypes,
DatasourceFieldType,
QueryType,
IntegrationBase,
} from "@budibase/types"
@ -21,30 +21,30 @@ module CouchDBModule {
"Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.",
datasource: {
url: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
default: "http://localhost:5984",
},
database: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
query: {
create: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
read: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
update: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
delete: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
id: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},

View File

@ -1,7 +1,7 @@
import {
Integration,
DatasourceFieldTypes,
QueryTypes,
DatasourceFieldType,
QueryType,
IntegrationBase,
} from "@budibase/types"
@ -24,101 +24,101 @@ module DynamoModule {
type: "Non-relational",
datasource: {
region: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
default: "us-east-1",
},
accessKeyId: {
type: DatasourceFieldTypes.PASSWORD,
type: DatasourceFieldType.PASSWORD,
required: true,
},
secretAccessKey: {
type: DatasourceFieldTypes.PASSWORD,
type: DatasourceFieldType.PASSWORD,
required: true,
},
endpoint: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: false,
default: "https://dynamodb.us-east-1.amazonaws.com",
},
},
query: {
create: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
fields: {
table: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
read: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
readable: true,
fields: {
table: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
index: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
},
},
},
scan: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
readable: true,
fields: {
table: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
index: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
},
},
},
describe: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
readable: true,
fields: {
table: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
get: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
readable: true,
fields: {
table: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
update: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
fields: {
table: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
delete: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
fields: {
table: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},

View File

@ -1,7 +1,7 @@
import {
Integration,
DatasourceFieldTypes,
QueryTypes,
DatasourceFieldType,
QueryType,
IntegrationBase,
} from "@budibase/types"
@ -20,55 +20,55 @@ module ElasticsearchModule {
type: "Non-relational",
datasource: {
url: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
default: "http://localhost:9200",
},
},
query: {
create: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
fields: {
index: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
read: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
fields: {
index: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
update: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
customisable: true,
fields: {
id: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
index: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
delete: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
index: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
id: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},

View File

@ -1,7 +1,7 @@
import {
DatasourceFieldTypes,
DatasourceFieldType,
Integration,
QueryTypes,
QueryType,
IntegrationBase,
} from "@budibase/types"
import { Firestore, WhereFilterOp } from "@google-cloud/firestore"
@ -21,46 +21,46 @@ module Firebase {
"Cloud Firestore is a flexible, scalable database for mobile, web, and server development from Firebase and Google Cloud.",
datasource: {
email: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
privateKey: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
projectId: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
query: {
create: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
read: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
update: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
delete: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
},
extra: {
collection: {
displayName: "Collection",
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
filterField: {
displayName: "Filter field",
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: false,
},
filter: {
displayName: "Filter comparison",
type: DatasourceFieldTypes.LIST,
type: DatasourceFieldType.LIST,
required: false,
data: {
read: [
@ -79,7 +79,7 @@ module Firebase {
},
filterValue: {
displayName: "Filter value",
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: false,
},
},

View File

@ -1,7 +1,7 @@
import {
DatasourceFieldTypes,
DatasourceFieldType,
Integration,
QueryTypes,
QueryType,
Table,
TableSchema,
QueryJson,
@ -54,59 +54,59 @@ module GoogleSheetsModule {
datasource: {
spreadsheetId: {
display: "Google Sheet URL",
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
query: {
create: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
sheet: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
row: {
type: QueryTypes.JSON,
type: QueryType.JSON,
required: true,
},
},
},
read: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
sheet: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
update: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
sheet: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
rowIndex: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
row: {
type: QueryTypes.JSON,
type: QueryType.JSON,
required: true,
},
},
},
delete: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
sheet: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
rowIndex: {
type: DatasourceFieldTypes.NUMBER,
type: DatasourceFieldType.NUMBER,
required: true,
},
},

View File

@ -13,54 +13,54 @@ const googlesheets = require("./googlesheets")
const firebase = require("./firebase")
const redis = require("./redis")
const snowflake = require("./snowflake")
const { SourceNames } = require("../definitions/datasource")
const { SourceName } = require("@budibase/types")
const environment = require("../environment")
const DEFINITIONS = {
[SourceNames.POSTGRES]: postgres.schema,
[SourceNames.DYNAMODB]: dynamodb.schema,
[SourceNames.MONGODB]: mongodb.schema,
[SourceNames.ELASTICSEARCH]: elasticsearch.schema,
[SourceNames.COUCHDB]: couchdb.schema,
[SourceNames.SQL_SERVER]: sqlServer.schema,
[SourceNames.S3]: s3.schema,
[SourceNames.AIRTABLE]: airtable.schema,
[SourceNames.MYSQL]: mysql.schema,
[SourceNames.ARANGODB]: arangodb.schema,
[SourceNames.REST]: rest.schema,
[SourceNames.FIRESTORE]: firebase.schema,
[SourceNames.REDIS]: redis.schema,
[SourceNames.SNOWFLAKE]: snowflake.schema,
[SourceName.POSTGRES]: postgres.schema,
[SourceName.DYNAMODB]: dynamodb.schema,
[SourceName.MONGODB]: mongodb.schema,
[SourceName.ELASTICSEARCH]: elasticsearch.schema,
[SourceName.COUCHDB]: couchdb.schema,
[SourceName.SQL_SERVER]: sqlServer.schema,
[SourceName.S3]: s3.schema,
[SourceName.AIRTABLE]: airtable.schema,
[SourceName.MYSQL]: mysql.schema,
[SourceName.ARANGODB]: arangodb.schema,
[SourceName.REST]: rest.schema,
[SourceName.FIRESTORE]: firebase.schema,
[SourceName.REDIS]: redis.schema,
[SourceName.SNOWFLAKE]: snowflake.schema,
}
const INTEGRATIONS = {
[SourceNames.POSTGRES]: postgres.integration,
[SourceNames.DYNAMODB]: dynamodb.integration,
[SourceNames.MONGODB]: mongodb.integration,
[SourceNames.ELASTICSEARCH]: elasticsearch.integration,
[SourceNames.COUCHDB]: couchdb.integration,
[SourceNames.SQL_SERVER]: sqlServer.integration,
[SourceNames.S3]: s3.integration,
[SourceNames.AIRTABLE]: airtable.integration,
[SourceNames.MYSQL]: mysql.integration,
[SourceNames.ARANGODB]: arangodb.integration,
[SourceNames.REST]: rest.integration,
[SourceNames.FIRESTORE]: firebase.integration,
[SourceNames.GOOGLE_SHEETS]: googlesheets.integration,
[SourceNames.REDIS]: redis.integration,
[SourceNames.FIREBASE]: firebase.integration,
[SourceNames.SNOWFLAKE]: snowflake.integration,
[SourceName.POSTGRES]: postgres.integration,
[SourceName.DYNAMODB]: dynamodb.integration,
[SourceName.MONGODB]: mongodb.integration,
[SourceName.ELASTICSEARCH]: elasticsearch.integration,
[SourceName.COUCHDB]: couchdb.integration,
[SourceName.SQL_SERVER]: sqlServer.integration,
[SourceName.S3]: s3.integration,
[SourceName.AIRTABLE]: airtable.integration,
[SourceName.MYSQL]: mysql.integration,
[SourceName.ARANGODB]: arangodb.integration,
[SourceName.REST]: rest.integration,
[SourceName.FIRESTORE]: firebase.integration,
[SourceName.GOOGLE_SHEETS]: googlesheets.integration,
[SourceName.REDIS]: redis.integration,
[SourceName.FIREBASE]: firebase.integration,
[SourceName.SNOWFLAKE]: snowflake.integration,
}
// optionally add oracle integration if the oracle binary can be installed
if (process.arch && !process.arch.startsWith("arm")) {
const oracle = require("./oracle")
DEFINITIONS[SourceNames.ORACLE] = oracle.schema
INTEGRATIONS[SourceNames.ORACLE] = oracle.integration
DEFINITIONS[SourceName.ORACLE] = oracle.schema
INTEGRATIONS[SourceName.ORACLE] = oracle.integration
}
if (environment.SELF_HOSTED) {
DEFINITIONS[SourceNames.GOOGLE_SHEETS] = googlesheets.schema
DEFINITIONS[SourceName.GOOGLE_SHEETS] = googlesheets.schema
}
module.exports = {

View File

@ -1,11 +1,11 @@
import {
DatasourceFieldTypes,
DatasourceFieldType,
Integration,
Operation,
Table,
TableSchema,
QueryJson,
QueryTypes,
QueryType,
SqlQuery,
DatasourcePlus,
} from "@budibase/types"
@ -14,7 +14,7 @@ import {
buildExternalTableId,
convertSqlType,
finaliseExternalTables,
SqlClients,
SqlClient,
} from "./utils"
module MSSQLModule {
@ -48,48 +48,48 @@ module MSSQLModule {
type: "Relational",
datasource: {
user: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
default: "localhost",
},
password: {
type: DatasourceFieldTypes.PASSWORD,
type: DatasourceFieldType.PASSWORD,
required: true,
},
server: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "localhost",
},
port: {
type: DatasourceFieldTypes.NUMBER,
type: DatasourceFieldType.NUMBER,
required: false,
default: 1433,
},
database: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "root",
},
schema: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: DEFAULT_SCHEMA,
},
encrypt: {
type: DatasourceFieldTypes.BOOLEAN,
type: DatasourceFieldType.BOOLEAN,
default: true,
},
},
query: {
create: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
read: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
update: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
delete: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
},
}
@ -112,7 +112,7 @@ module MSSQLModule {
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
constructor(config: MSSQLConfig) {
super(SqlClients.MS_SQL)
super(SqlClient.MS_SQL)
this.config = config
const clientCfg = {
...this.config,

View File

@ -1,7 +1,7 @@
import {
Integration,
DatasourceFieldTypes,
QueryTypes,
DatasourceFieldType,
QueryType,
IntegrationBase,
} from "@budibase/types"
import {
@ -29,38 +29,38 @@ module MongoDBModule {
"MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.",
datasource: {
connectionString: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
default: "mongodb://localhost:27017",
},
db: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
query: {
create: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
read: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
update: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
delete: {
type: QueryTypes.JSON,
type: QueryType.JSON,
},
},
extra: {
collection: {
displayName: "Collection",
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
actionTypes: {
displayName: "Action Types",
type: DatasourceFieldTypes.LIST,
type: DatasourceFieldType.LIST,
required: true,
data: {
read: ["find", "findOne", "findOneAndUpdate", "count", "distinct"],

View File

@ -1,7 +1,7 @@
import {
Integration,
DatasourceFieldTypes,
QueryTypes,
DatasourceFieldType,
QueryType,
QueryJson,
SqlQuery,
Table,
@ -10,7 +10,7 @@ import {
} from "@budibase/types"
import {
getSqlQuery,
SqlClients,
SqlClient,
buildExternalTableId,
convertSqlType,
finaliseExternalTables,
@ -42,51 +42,51 @@ module MySQLModule {
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
datasource: {
host: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "localhost",
required: true,
},
port: {
type: DatasourceFieldTypes.NUMBER,
type: DatasourceFieldType.NUMBER,
default: 3306,
required: false,
},
user: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "root",
required: true,
},
password: {
type: DatasourceFieldTypes.PASSWORD,
type: DatasourceFieldType.PASSWORD,
default: "root",
required: true,
},
database: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
ssl: {
type: DatasourceFieldTypes.OBJECT,
type: DatasourceFieldType.OBJECT,
required: false,
},
rejectUnauthorized: {
type: DatasourceFieldTypes.BOOLEAN,
type: DatasourceFieldType.BOOLEAN,
default: true,
required: false,
},
},
query: {
create: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
read: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
update: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
delete: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
},
}
@ -120,7 +120,7 @@ module MySQLModule {
public schemaErrors: Record<string, string> = {}
constructor(config: MySQLConfig) {
super(SqlClients.MY_SQL)
super(SqlClient.MY_SQL)
this.config = config
if (config.ssl && Object.keys(config.ssl).length === 0) {
delete config.ssl

View File

@ -1,9 +1,9 @@
import {
DatasourceFieldTypes,
DatasourceFieldType,
Integration,
Operation,
QueryJson,
QueryTypes,
QueryType,
SqlQuery,
Table,
DatasourcePlus,
@ -13,7 +13,7 @@ import {
convertSqlType,
finaliseExternalTables,
getSqlQuery,
SqlClients,
SqlClient,
} from "./utils"
import oracledb, {
BindParameters,
@ -45,40 +45,40 @@ module OracleModule {
"Oracle Database is an object-relational database management system developed by Oracle Corporation",
datasource: {
host: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "localhost",
required: true,
},
port: {
type: DatasourceFieldTypes.NUMBER,
type: DatasourceFieldType.NUMBER,
required: true,
default: 1521,
},
database: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
user: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
password: {
type: DatasourceFieldTypes.PASSWORD,
type: DatasourceFieldType.PASSWORD,
required: true,
},
},
query: {
create: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
read: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
update: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
delete: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
},
}
@ -172,7 +172,7 @@ module OracleModule {
OR cons.status IS NULL)
`
constructor(config: OracleConfig) {
super(SqlClients.ORACLE)
super(SqlClient.ORACLE)
this.config = config
}

View File

@ -1,7 +1,7 @@
import {
Integration,
DatasourceFieldTypes,
QueryTypes,
DatasourceFieldType,
QueryType,
QueryJson,
SqlQuery,
Table,
@ -12,7 +12,7 @@ import {
buildExternalTableId,
convertSqlType,
finaliseExternalTables,
SqlClients,
SqlClient,
} from "./utils"
module PostgresModule {
@ -52,63 +52,63 @@ module PostgresModule {
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
datasource: {
host: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "localhost",
required: true,
},
port: {
type: DatasourceFieldTypes.NUMBER,
type: DatasourceFieldType.NUMBER,
required: true,
default: 5432,
},
database: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "postgres",
required: true,
},
user: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "root",
required: true,
},
password: {
type: DatasourceFieldTypes.PASSWORD,
type: DatasourceFieldType.PASSWORD,
default: "root",
required: true,
},
schema: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "public",
required: true,
},
ssl: {
type: DatasourceFieldTypes.BOOLEAN,
type: DatasourceFieldType.BOOLEAN,
default: false,
required: false,
},
rejectUnauthorized: {
type: DatasourceFieldTypes.BOOLEAN,
type: DatasourceFieldType.BOOLEAN,
default: false,
required: false,
},
ca: {
type: DatasourceFieldTypes.LONGFORM,
type: DatasourceFieldType.LONGFORM,
default: false,
required: false,
},
},
query: {
create: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
read: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
update: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
delete: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
},
}
@ -133,7 +133,7 @@ module PostgresModule {
`
constructor(config: PostgresConfig) {
super(SqlClients.POSTGRES)
super(SqlClient.POSTGRES)
this.config = config
let newConfig = {

View File

@ -1,4 +1,4 @@
import { DatasourceFieldTypes, Integration, QueryTypes } from "@budibase/types"
import { DatasourceFieldType, Integration, QueryType } from "@budibase/types"
import Redis from "ioredis"
module RedisModule {
@ -36,36 +36,36 @@ module RedisModule {
},
query: {
create: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
key: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
value: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
ttl: {
type: DatasourceFieldTypes.NUMBER,
type: DatasourceFieldType.NUMBER,
},
},
},
read: {
readable: true,
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
key: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
},
delete: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
key: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
required: true,
},
},
@ -73,7 +73,7 @@ module RedisModule {
command: {
readable: true,
displayName: "Redis Command",
type: QueryTypes.JSON,
type: QueryType.JSON,
},
},
}

View File

@ -1,14 +1,14 @@
import {
Integration,
DatasourceFieldTypes,
QueryTypes,
DatasourceFieldType,
QueryType,
PaginationConfig,
IntegrationBase,
PaginationValues,
RestQueryFields as RestQuery,
} from "@budibase/types"
import {
RestConfig,
RestQueryFields as RestQuery,
AuthType,
BasicAuthConfig,
BearerAuthConfig,
@ -26,27 +26,27 @@ const BodyTypes = {
const coreFields = {
path: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
display: "URL",
},
queryString: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
},
headers: {
type: DatasourceFieldTypes.OBJECT,
type: DatasourceFieldType.OBJECT,
},
enabledHeaders: {
type: DatasourceFieldTypes.OBJECT,
type: DatasourceFieldType.OBJECT,
},
requestBody: {
type: DatasourceFieldTypes.JSON,
type: DatasourceFieldType.JSON,
},
bodyType: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
enum: Object.values(BodyTypes),
},
pagination: {
type: DatasourceFieldTypes.OBJECT,
type: DatasourceFieldType.OBJECT,
},
}
@ -69,13 +69,13 @@ module RestModule {
type: "API",
datasource: {
url: {
type: DatasourceFieldTypes.STRING,
type: DatasourceFieldType.STRING,
default: "",
required: false,
deprecated: true,
},
defaultHeaders: {
type: DatasourceFieldTypes.OBJECT,
type: DatasourceFieldType.OBJECT,
required: false,
default: {},
},
@ -84,30 +84,30 @@ module RestModule {
create: {
readable: true,
displayName: "POST",
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: coreFields,
},
read: {
displayName: "GET",
readable: true,
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: coreFields,
},
update: {
displayName: "PUT",
readable: true,
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: coreFields,
},
patch: {
displayName: "PATCH",
readable: true,
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: coreFields,
},
delete: {
displayName: "DELETE",
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: coreFields,
},
},

View File

@ -1,4 +1,4 @@
import { Integration, QueryTypes, IntegrationBase } from "@budibase/types"
import { Integration, QueryType, IntegrationBase } from "@budibase/types"
module S3Module {
const AWS = require("aws-sdk")
@ -43,7 +43,7 @@ module S3Module {
},
query: {
read: {
type: QueryTypes.FIELDS,
type: QueryType.FIELDS,
fields: {
bucket: {
type: "string",

View File

@ -1,4 +1,4 @@
import { Integration, QueryTypes, SqlQuery } from "@budibase/types"
import { Integration, QueryType, SqlQuery } from "@budibase/types"
import { Snowflake } from "snowflake-promise"
module SnowflakeModule {
@ -45,16 +45,16 @@ module SnowflakeModule {
},
query: {
create: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
read: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
update: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
delete: {
type: QueryTypes.SQL,
type: QueryType.SQL,
},
},
}

View File

@ -1,5 +1,5 @@
const Sql = require("../base/sql")
const { SqlClients } = require("../utils")
const { SqlClient } = require("../utils")
const TABLE_NAME = "test"
@ -47,7 +47,7 @@ function generateDeleteJson(table = TABLE_NAME, filters = {}) {
describe("SQL query builder", () => {
const limit = 500
const client = SqlClients.POSTGRES
const client = SqlClient.POSTGRES
let sql
beforeEach(() => {
@ -174,7 +174,7 @@ describe("SQL query builder", () => {
})
it("should work with MS-SQL", () => {
const query = new Sql(SqlClients.MS_SQL, 10)._query(generateReadJson())
const query = new Sql(SqlClient.MS_SQL, 10)._query(generateReadJson())
expect(query).toEqual({
bindings: [10],
sql: `select * from (select top (@p0) * from [${TABLE_NAME}]) as [${TABLE_NAME}]`
@ -182,7 +182,7 @@ describe("SQL query builder", () => {
})
it("should work with MySQL", () => {
const query = new Sql(SqlClients.MY_SQL, 10)._query(generateReadJson())
const query = new Sql(SqlClient.MY_SQL, 10)._query(generateReadJson())
expect(query).toEqual({
bindings: [10],
sql: `select * from (select * from \`${TABLE_NAME}\` limit ?) as \`${TABLE_NAME}\``
@ -241,7 +241,7 @@ describe("SQL query builder", () => {
})
it("should use like expression for MS-SQL when filter is contains", () => {
const query = new Sql(SqlClients.MS_SQL, 10)._query(generateReadJson({
const query = new Sql(SqlClient.MS_SQL, 10)._query(generateReadJson({
filters: {
contains: {
age: 20,
@ -256,7 +256,7 @@ describe("SQL query builder", () => {
})
it("should use JSON_CONTAINS expression for MySQL when filter is contains", () => {
const query = new Sql(SqlClients.MY_SQL, 10)._query(generateReadJson({
const query = new Sql(SqlClient.MY_SQL, 10)._query(generateReadJson({
filters: {
contains: {
age: 20,
@ -271,7 +271,7 @@ describe("SQL query builder", () => {
})
it("should use jsonb operator expression for PostgreSQL when filter is contains", () => {
const query = new Sql(SqlClients.POSTGRES, 10)._query(generateReadJson({
const query = new Sql(SqlClient.POSTGRES, 10)._query(generateReadJson({
filters: {
contains: {
age: 20,

View File

@ -1,6 +1,5 @@
import { SourceNames, SqlQuery } from "../definitions/datasource"
import { Datasource, Table } from "../definitions/common"
import { DocumentTypes, SEPARATOR } from "../db/utils"
import { SourceName, SqlQuery, Datasource, Table } from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils"
import { FieldTypes, BuildSchemaErrors, InvalidColumns } from "../constants"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
@ -68,7 +67,7 @@ const SQL_TYPE_MAP = {
...SQL_MISC_TYPE_MAP,
}
export enum SqlClients {
export enum SqlClient {
MS_SQL = "mssql",
POSTGRES = "pg",
MY_SQL = "mysql2",
@ -76,7 +75,7 @@ export enum SqlClients {
}
export function isExternalTable(tableId: string) {
return tableId.includes(DocumentTypes.DATASOURCE)
return tableId.includes(DocumentType.DATASOURCE)
}
export function buildExternalTableId(datasourceId: string, tableName: string) {
@ -169,10 +168,10 @@ export function isSQL(datasource: Datasource): boolean {
return false
}
const SQL = [
SourceNames.POSTGRES,
SourceNames.SQL_SERVER,
SourceNames.MYSQL,
SourceNames.ORACLE,
SourceName.POSTGRES,
SourceName.SQL_SERVER,
SourceName.MYSQL,
SourceName.ORACLE,
]
return SQL.indexOf(datasource.source) !== -1
}

View File

@ -6,7 +6,7 @@ const {
setDebounce,
} = require("../utilities/redis")
const { doWithDB } = require("@budibase/backend-core/db")
const { DocumentTypes, getGlobalIDFromUserMetadataID } = require("../db/utils")
const { DocumentType, getGlobalIDFromUserMetadataID } = require("../db/utils")
const { PermissionTypes } = require("@budibase/backend-core/permissions")
const { app: appCache } = require("@budibase/backend-core/cache")
@ -49,7 +49,7 @@ async function updateAppUpdatedAt(ctx) {
return
}
await doWithDB(appId, async db => {
const metadata = await db.get(DocumentTypes.APP_METADATA)
const metadata = await db.get(DocumentType.APP_METADATA)
metadata.updatedAt = new Date().toISOString()
metadata.updatedBy = getGlobalIDFromUserMetadataID(ctx.user.userId)

View File

@ -1,4 +1,4 @@
const { DocumentTypes } = require("@budibase/backend-core/db")
const { DocumentType } = require("@budibase/backend-core/db")
import { getAppUrl } from "../../api/controllers/application"
/**
@ -11,7 +11,7 @@ import { getAppUrl } from "../../api/controllers/application"
export const run = async (appDb: any) => {
let metadata
try {
metadata = await appDb.get(DocumentTypes.APP_METADATA)
metadata = await appDb.get(DocumentType.APP_METADATA)
} catch (e) {
// sometimes the metadata document doesn't exist
// exit early instead of failing the migration

View File

@ -62,7 +62,7 @@ export const run = async (appDb: any) => {
await events.backfillCache.start(EVENTS)
let timestamp: string | number = DEFAULT_TIMESTAMP
const app: App = await appDb.get(dbUtils.DocumentTypes.APP_METADATA)
const app: App = await appDb.get(dbUtils.DocumentType.APP_METADATA)
if (app.createdAt) {
timestamp = app.createdAt as string
}

View File

@ -1,6 +1,6 @@
import { events } from "@budibase/backend-core"
import { getQueryParams } from "../../../../db/utils"
import { Query, Datasource, SourceNames } from "@budibase/types"
import { Query, Datasource, SourceName } from "@budibase/types"
const getQueries = async (appDb: any): Promise<Query[]> => {
const response = await appDb.allDocs(
@ -33,7 +33,7 @@ export const backfill = async (appDb: any, timestamp: string | number) => {
datasource = {
type: "unknown",
_id: query.datasourceId,
source: SourceNames.UNKNOWN,
source: SourceName.UNKNOWN,
}
} else {
throw e

View File

@ -4,7 +4,7 @@ import { DEFAULT_TIMESTAMP } from ".."
// manually define user doc params - normally server doesn't read users from the db
const getUserParams = (props: any) => {
return dbUtils.getDocParams(dbUtils.DocumentTypes.USER, null, props)
return dbUtils.getDocParams(dbUtils.DocumentType.USER, null, props)
}
export const getUsers = async (globalDb: any): Promise<User[]> => {

View File

@ -1,4 +1,4 @@
const { DocumentTypes, doWithDB } = require("@budibase/backend-core/db")
const { DocumentType, doWithDB } = require("@budibase/backend-core/db")
const TestConfig = require("../../../tests/utilities/TestConfiguration")
const migration = require("../appUrls")
@ -15,11 +15,11 @@ describe("run", () => {
it("runs successfully", async () => {
const app = await config.createApp("testApp")
const metadata = await doWithDB(app.appId, async db => {
const metadataDoc = await db.get(DocumentTypes.APP_METADATA)
const metadataDoc = await db.get( DocumentType.APP_METADATA)
delete metadataDoc.url
await db.put(metadataDoc)
await migration.run(db)
return await db.get(DocumentTypes.APP_METADATA)
return await db.get( DocumentType.APP_METADATA)
})
expect(metadata.url).toEqual("/testapp")
})

View File

@ -2,7 +2,7 @@ import {
events,
migrations,
tenancy,
DocumentTypes,
DocumentType,
context,
db,
} from "@budibase/backend-core"
@ -17,7 +17,7 @@ const timestamp = mocks.date.MOCK_DATE.toISOString()
const clearMigrations = async () => {
const dbs = [context.getDevAppDB(), context.getProdAppDB()]
for (const db of dbs) {
const doc = await db.get(DocumentTypes.MIGRATIONS)
const doc = await db.get(DocumentType.MIGRATIONS)
const newDoc = { _id: doc._id, _rev: doc._rev }
await db.put(newDoc)
}

View File

@ -25,7 +25,7 @@ const newid = require("../../db/newid")
const context = require("@budibase/backend-core/context")
const { generateDevInfoID, SEPARATOR } = require("@budibase/backend-core/db")
const { encrypt } = require("@budibase/backend-core/encryption")
const { DocumentTypes } = require("../../db/utils")
const { DocumentType } = require("../../db/utils")
const GLOBAL_USER_ID = "us_uuid1"
const EMAIL = "babs@babs.com"
@ -344,7 +344,7 @@ class TestConfiguration {
return context.doInAppContext(prodAppId, async () => {
const db = context.getProdAppDB()
return await db.get(DocumentTypes.APP_METADATA)
return await db.get(DocumentType.APP_METADATA)
})
}

View File

@ -11,7 +11,7 @@ import { storeLog } from "../automations/logging"
import { Automation, AutomationStep, AutomationStatus } from "@budibase/types"
import {
LoopStep,
LoopStepTypes,
LoopStepType,
LoopInput,
AutomationEvent,
TriggerOutput,
@ -35,12 +35,12 @@ function typecastForLooping(loopStep: LoopStep, input: LoopInput) {
}
try {
switch (loopStep.inputs.option) {
case LoopStepTypes.ARRAY:
case LoopStepType.ARRAY:
if (typeof input.binding === "string") {
return JSON.parse(input.binding)
}
break
case LoopStepTypes.STRING:
case LoopStepType.STRING:
if (Array.isArray(input.binding)) {
return input.binding.join(",")
}

View File

@ -8,7 +8,7 @@ const {
lowerBuiltinRoleID,
getBuiltinRoles,
} = require("@budibase/backend-core/roles")
const { DocumentTypes } = require("../db/utils")
const { DocumentType } = require("../db/utils")
const CURRENTLY_SUPPORTED_LEVELS = [
PermissionLevels.WRITE,
@ -17,19 +17,19 @@ const CURRENTLY_SUPPORTED_LEVELS = [
]
exports.getPermissionType = resourceId => {
const docType = Object.values(DocumentTypes).filter(docType =>
const docType = Object.values(DocumentType).filter(docType =>
resourceId.startsWith(docType)
)[0]
switch (docType) {
case DocumentTypes.TABLE:
case DocumentTypes.ROW:
case DocumentType.TABLE:
case DocumentType.ROW:
return PermissionTypes.TABLE
case DocumentTypes.AUTOMATION:
case DocumentType.AUTOMATION:
return PermissionTypes.AUTOMATION
case DocumentTypes.WEBHOOK:
case DocumentType.WEBHOOK:
return PermissionTypes.WEBHOOK
case DocumentTypes.QUERY:
case DocumentTypes.DATASOURCE:
case DocumentType.QUERY:
case DocumentType.DATASOURCE:
return PermissionTypes.QUERY
default:
// views don't have an ID, will end up here

View File

@ -1,11 +1,11 @@
import { Document } from "../document"
import { SourceNames } from "../../sdk"
import { SourceName } from "../../sdk"
import { Table } from "./table"
export interface Datasource extends Document {
type: string
name?: string
source: SourceNames
source: SourceName
// the config is defined by the schema
config?: {
[key: string]: string | number | boolean

View File

@ -1,6 +1,6 @@
import { Document } from "../document"
export enum FieldTypes {
export enum FieldType {
STRING = "string",
LONGFORM = "longform",
OPTIONS = "options",

View File

@ -2,3 +2,4 @@ export * from "./account"
export * from "./app"
export * from "./global"
export * from "./platform"
export * from "./document"

View File

@ -16,13 +16,13 @@ export enum SortDirection {
DESCENDING = "DESCENDING",
}
export enum QueryTypes {
export enum QueryType {
SQL = "sql",
JSON = "json",
FIELDS = "fields",
}
export enum DatasourceFieldTypes {
export enum DatasourceFieldType {
STRING = "string",
LONGFORM = "longForm",
BOOLEAN = "boolean",
@ -34,7 +34,7 @@ export enum DatasourceFieldTypes {
FILE = "file",
}
export enum SourceNames {
export enum SourceName {
POSTGRES = "POSTGRES",
DYNAMODB = "DYNAMODB",
MONGODB = "MONGODB",
@ -54,12 +54,12 @@ export enum SourceNames {
UNKNOWN = "unknown",
}
export enum IncludeRelationships {
export enum IncludeRelationship {
INCLUDE = 1,
EXCLUDE = 0,
}
export enum FilterTypes {
export enum FilterType {
STRING = "string",
FUZZY = "fuzzy",
RANGE = "range",
@ -71,7 +71,7 @@ export enum FilterTypes {
}
export interface QueryDefinition {
type: QueryTypes
type: QueryType
displayName?: string
readable?: boolean
customisable?: boolean

View File

@ -2,7 +2,7 @@ const { getAllRoles } = require("@budibase/backend-core/roles")
const {
getAllApps,
getProdAppID,
DocumentTypes,
DocumentType,
} = require("@budibase/backend-core/db")
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
const { user: userCache } = require("@budibase/backend-core/cache")
@ -36,7 +36,7 @@ exports.find = async ctx => {
const appId = ctx.params.appId
await doInAppContext(appId, async () => {
const db = getAppDB()
const app = await db.get(DocumentTypes.APP_METADATA)
const app = await db.get(DocumentType.APP_METADATA)
ctx.body = {
roles: await getAllRoles(),
name: app.name,