Merge pull request #8354 from Budibase/feature/app-backups
App backups backend
This commit is contained in:
commit
d2a2b09f1f
|
@ -26,6 +26,7 @@
|
||||||
"aws-sdk": "2.1030.0",
|
"aws-sdk": "2.1030.0",
|
||||||
"bcrypt": "5.0.1",
|
"bcrypt": "5.0.1",
|
||||||
"bcryptjs": "2.4.3",
|
"bcryptjs": "2.4.3",
|
||||||
|
"bull": "4.10.1",
|
||||||
"dotenv": "16.0.1",
|
"dotenv": "16.0.1",
|
||||||
"emitter-listener": "1.1.2",
|
"emitter-listener": "1.1.2",
|
||||||
"ioredis": "4.28.0",
|
"ioredis": "4.28.0",
|
||||||
|
@ -63,6 +64,7 @@
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/chance": "1.1.3",
|
"@types/chance": "1.1.3",
|
||||||
|
"@types/ioredis": "4.28.0",
|
||||||
"@types/jest": "27.5.1",
|
"@types/jest": "27.5.1",
|
||||||
"@types/koa": "2.0.52",
|
"@types/koa": "2.0.52",
|
||||||
"@types/lodash": "4.14.180",
|
"@types/lodash": "4.14.180",
|
||||||
|
|
|
@ -53,6 +53,9 @@ export const getTenantIDFromAppID = (appId: string) => {
|
||||||
if (!appId) {
|
if (!appId) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
if (!isMultiTenant()) {
|
||||||
|
return DEFAULT_TENANT_ID
|
||||||
|
}
|
||||||
const split = appId.split(SEPARATOR)
|
const split = appId.split(SEPARATOR)
|
||||||
const hasDev = split[1] === DocumentType.DEV
|
const hasDev = split[1] === DocumentType.DEV
|
||||||
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
|
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
|
||||||
|
|
|
@ -21,6 +21,7 @@ export enum ViewName {
|
||||||
ACCOUNT_BY_EMAIL = "account_by_email",
|
ACCOUNT_BY_EMAIL = "account_by_email",
|
||||||
PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
|
PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
|
||||||
USER_BY_GROUP = "by_group_user",
|
USER_BY_GROUP = "by_group_user",
|
||||||
|
APP_BACKUP_BY_TRIGGER = "by_trigger",
|
||||||
}
|
}
|
||||||
|
|
||||||
export const DeprecatedViews = {
|
export const DeprecatedViews = {
|
||||||
|
@ -30,6 +31,10 @@ export const DeprecatedViews = {
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum InternalTable {
|
||||||
|
USER_METADATA = "ta_users",
|
||||||
|
}
|
||||||
|
|
||||||
export enum DocumentType {
|
export enum DocumentType {
|
||||||
USER = "us",
|
USER = "us",
|
||||||
GROUP = "gr",
|
GROUP = "gr",
|
||||||
|
@ -46,9 +51,23 @@ export enum DocumentType {
|
||||||
AUTOMATION_LOG = "log_au",
|
AUTOMATION_LOG = "log_au",
|
||||||
ACCOUNT_METADATA = "acc_metadata",
|
ACCOUNT_METADATA = "acc_metadata",
|
||||||
PLUGIN = "plg",
|
PLUGIN = "plg",
|
||||||
TABLE = "ta",
|
|
||||||
DATASOURCE = "datasource",
|
DATASOURCE = "datasource",
|
||||||
DATASOURCE_PLUS = "datasource_plus",
|
DATASOURCE_PLUS = "datasource_plus",
|
||||||
|
APP_BACKUP = "backup",
|
||||||
|
TABLE = "ta",
|
||||||
|
ROW = "ro",
|
||||||
|
AUTOMATION = "au",
|
||||||
|
LINK = "li",
|
||||||
|
WEBHOOK = "wh",
|
||||||
|
INSTANCE = "inst",
|
||||||
|
LAYOUT = "layout",
|
||||||
|
SCREEN = "screen",
|
||||||
|
QUERY = "query",
|
||||||
|
DEPLOYMENTS = "deployments",
|
||||||
|
METADATA = "metadata",
|
||||||
|
MEM_VIEW = "view",
|
||||||
|
USER_FLAG = "flag",
|
||||||
|
AUTOMATION_METADATA = "meta_au",
|
||||||
}
|
}
|
||||||
|
|
||||||
export const StaticDatabases = {
|
export const StaticDatabases = {
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
const pouch = require("./pouch")
|
import pouch from "./pouch"
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
|
import { checkSlashesInUrl } from "../helpers"
|
||||||
|
import fetch from "node-fetch"
|
||||||
|
import { PouchOptions, CouchFindOptions } from "@budibase/types"
|
||||||
|
|
||||||
const openDbs = []
|
const openDbs: string[] = []
|
||||||
let PouchDB
|
let PouchDB: any
|
||||||
let initialised = false
|
let initialised = false
|
||||||
const dbList = new Set()
|
const dbList = new Set()
|
||||||
|
|
||||||
|
@ -14,8 +17,8 @@ if (env.MEMORY_LEAK_CHECK) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const put =
|
const put =
|
||||||
dbPut =>
|
(dbPut: any) =>
|
||||||
async (doc, options = {}) => {
|
async (doc: any, options = {}) => {
|
||||||
if (!doc.createdAt) {
|
if (!doc.createdAt) {
|
||||||
doc.createdAt = new Date().toISOString()
|
doc.createdAt = new Date().toISOString()
|
||||||
}
|
}
|
||||||
|
@ -29,7 +32,7 @@ const checkInitialised = () => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.init = opts => {
|
export async function init(opts?: PouchOptions) {
|
||||||
PouchDB = pouch.getPouch(opts)
|
PouchDB = pouch.getPouch(opts)
|
||||||
initialised = true
|
initialised = true
|
||||||
}
|
}
|
||||||
|
@ -37,7 +40,7 @@ exports.init = opts => {
|
||||||
// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION
|
// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION
|
||||||
// this function is prone to leaks, should only be used
|
// this function is prone to leaks, should only be used
|
||||||
// in situations that using the function doWithDB does not work
|
// in situations that using the function doWithDB does not work
|
||||||
exports.dangerousGetDB = (dbName, opts) => {
|
export function dangerousGetDB(dbName: string, opts?: any) {
|
||||||
checkInitialised()
|
checkInitialised()
|
||||||
if (env.isTest()) {
|
if (env.isTest()) {
|
||||||
dbList.add(dbName)
|
dbList.add(dbName)
|
||||||
|
@ -53,7 +56,7 @@ exports.dangerousGetDB = (dbName, opts) => {
|
||||||
|
|
||||||
// use this function if you have called dangerousGetDB - close
|
// use this function if you have called dangerousGetDB - close
|
||||||
// the databases you've opened once finished
|
// the databases you've opened once finished
|
||||||
exports.closeDB = async db => {
|
export async function closeDB(db: PouchDB.Database) {
|
||||||
if (!db || env.isTest()) {
|
if (!db || env.isTest()) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -71,21 +74,59 @@ exports.closeDB = async db => {
|
||||||
// we have to use a callback for this so that we can close
|
// we have to use a callback for this so that we can close
|
||||||
// the DB when we're done, without this manual requests would
|
// the DB when we're done, without this manual requests would
|
||||||
// need to close the database when done with it to avoid memory leaks
|
// need to close the database when done with it to avoid memory leaks
|
||||||
exports.doWithDB = async (dbName, cb, opts = {}) => {
|
export async function doWithDB(dbName: string, cb: any, opts = {}) {
|
||||||
const db = exports.dangerousGetDB(dbName, opts)
|
const db = dangerousGetDB(dbName, opts)
|
||||||
// need this to be async so that we can correctly close DB after all
|
// need this to be async so that we can correctly close DB after all
|
||||||
// async operations have been completed
|
// async operations have been completed
|
||||||
try {
|
try {
|
||||||
return await cb(db)
|
return await cb(db)
|
||||||
} finally {
|
} finally {
|
||||||
await exports.closeDB(db)
|
await closeDB(db)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.allDbs = () => {
|
export function allDbs() {
|
||||||
if (!env.isTest()) {
|
if (!env.isTest()) {
|
||||||
throw new Error("Cannot be used outside test environment.")
|
throw new Error("Cannot be used outside test environment.")
|
||||||
}
|
}
|
||||||
checkInitialised()
|
checkInitialised()
|
||||||
return [...dbList]
|
return [...dbList]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function directCouchQuery(
|
||||||
|
path: string,
|
||||||
|
method: string = "GET",
|
||||||
|
body?: any
|
||||||
|
) {
|
||||||
|
let { url, cookie } = pouch.getCouchInfo()
|
||||||
|
const couchUrl = `${url}/${path}`
|
||||||
|
const params: any = {
|
||||||
|
method: method,
|
||||||
|
headers: {
|
||||||
|
Authorization: cookie,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if (body && method !== "GET") {
|
||||||
|
params.body = JSON.stringify(body)
|
||||||
|
params.headers["Content-Type"] = "application/json"
|
||||||
|
}
|
||||||
|
const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), params)
|
||||||
|
if (response.status < 300) {
|
||||||
|
return await response.json()
|
||||||
|
} else {
|
||||||
|
throw "Cannot connect to CouchDB instance"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function directCouchAllDbs(queryString?: string) {
|
||||||
|
let couchPath = "/_all_dbs"
|
||||||
|
if (queryString) {
|
||||||
|
couchPath += `?${queryString}`
|
||||||
|
}
|
||||||
|
return await directCouchQuery(couchPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function directCouchFind(dbName: string, opts: CouchFindOptions) {
|
||||||
|
const json = await directCouchQuery(`${dbName}/_find`, "POST", opts)
|
||||||
|
return { rows: json.docs, bookmark: json.bookmark }
|
||||||
|
}
|
|
@ -1,14 +1,17 @@
|
||||||
import { newid } from "../hashing"
|
import { newid } from "../hashing"
|
||||||
import { DEFAULT_TENANT_ID, Configs } from "../constants"
|
import { DEFAULT_TENANT_ID, Configs } from "../constants"
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants"
|
import {
|
||||||
|
SEPARATOR,
|
||||||
|
DocumentType,
|
||||||
|
UNICODE_MAX,
|
||||||
|
ViewName,
|
||||||
|
InternalTable,
|
||||||
|
} from "./constants"
|
||||||
import { getTenantId, getGlobalDB } from "../context"
|
import { getTenantId, getGlobalDB } from "../context"
|
||||||
import { getGlobalDBName } from "./tenancy"
|
import { getGlobalDBName } from "./tenancy"
|
||||||
import fetch from "node-fetch"
|
import { doWithDB, allDbs, directCouchAllDbs } from "./index"
|
||||||
import { doWithDB, allDbs } from "./index"
|
|
||||||
import { getCouchInfo } from "./pouch"
|
|
||||||
import { getAppMetadata } from "../cache/appMetadata"
|
import { getAppMetadata } from "../cache/appMetadata"
|
||||||
import { checkSlashesInUrl } from "../helpers"
|
|
||||||
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
|
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
|
||||||
import { APP_PREFIX } from "./constants"
|
import { APP_PREFIX } from "./constants"
|
||||||
import * as events from "../events"
|
import * as events from "../events"
|
||||||
|
@ -43,8 +46,8 @@ export const generateAppID = (tenantId = null) => {
|
||||||
* @returns {object} Parameters which can then be used with an allDocs request.
|
* @returns {object} Parameters which can then be used with an allDocs request.
|
||||||
*/
|
*/
|
||||||
export function getDocParams(
|
export function getDocParams(
|
||||||
docType: any,
|
docType: string,
|
||||||
docId: any = null,
|
docId?: string | null,
|
||||||
otherProps: any = {}
|
otherProps: any = {}
|
||||||
) {
|
) {
|
||||||
if (docId == null) {
|
if (docId == null) {
|
||||||
|
@ -57,6 +60,28 @@ export function getDocParams(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the DB allDocs/query params for retrieving a row.
|
||||||
|
* @param {string|null} tableId The table in which the rows have been stored.
|
||||||
|
* @param {string|null} rowId The ID of the row which is being specifically queried for. This can be
|
||||||
|
* left null to get all the rows in the table.
|
||||||
|
* @param {object} otherProps Any other properties to add to the request.
|
||||||
|
* @returns {object} Parameters which can then be used with an allDocs request.
|
||||||
|
*/
|
||||||
|
export function getRowParams(
|
||||||
|
tableId?: string | null,
|
||||||
|
rowId?: string | null,
|
||||||
|
otherProps = {}
|
||||||
|
) {
|
||||||
|
if (tableId == null) {
|
||||||
|
return getDocParams(DocumentType.ROW, null, otherProps)
|
||||||
|
}
|
||||||
|
|
||||||
|
const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId
|
||||||
|
|
||||||
|
return getDocParams(DocumentType.ROW, endOfKey, otherProps)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieve the correct index for a view based on default design DB.
|
* Retrieve the correct index for a view based on default design DB.
|
||||||
*/
|
*/
|
||||||
|
@ -64,6 +89,17 @@ export function getQueryIndex(viewName: ViewName) {
|
||||||
return `database/${viewName}`
|
return `database/${viewName}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a new row ID for the specified table.
|
||||||
|
* @param {string} tableId The table which the row is being created for.
|
||||||
|
* @param {string|null} id If an ID is to be used then the UUID can be substituted for this.
|
||||||
|
* @returns {string} The new ID which a row doc can be stored under.
|
||||||
|
*/
|
||||||
|
export function generateRowID(tableId: string, id?: string) {
|
||||||
|
id = id || newid()
|
||||||
|
return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if a given ID is that of a table.
|
* Check if a given ID is that of a table.
|
||||||
* @returns {boolean}
|
* @returns {boolean}
|
||||||
|
@ -131,6 +167,33 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
|
||||||
|
*/
|
||||||
|
export function getUserMetadataParams(userId?: string, otherProps = {}) {
|
||||||
|
return getRowParams(InternalTable.USER_METADATA, userId, otherProps)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a new user ID based on the passed in global ID.
|
||||||
|
* @param {string} globalId The ID of the global user.
|
||||||
|
* @returns {string} The new user ID which the user doc can be stored under.
|
||||||
|
*/
|
||||||
|
export function generateUserMetadataID(globalId: string) {
|
||||||
|
return generateRowID(InternalTable.USER_METADATA, globalId)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Breaks up the ID to get the global ID.
|
||||||
|
*/
|
||||||
|
export function getGlobalIDFromUserMetadataID(id: string) {
|
||||||
|
const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`
|
||||||
|
if (!id || !id.includes(prefix)) {
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
return id.split(prefix)[1]
|
||||||
|
}
|
||||||
|
|
||||||
export function getUsersByAppParams(appId: any, otherProps: any = {}) {
|
export function getUsersByAppParams(appId: any, otherProps: any = {}) {
|
||||||
const prodAppId = getProdAppID(appId)
|
const prodAppId = getProdAppID(appId)
|
||||||
return {
|
return {
|
||||||
|
@ -191,9 +254,9 @@ export function getRoleParams(roleId = null, otherProps = {}) {
|
||||||
return getDocParams(DocumentType.ROLE, roleId, otherProps)
|
return getDocParams(DocumentType.ROLE, roleId, otherProps)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getStartEndKeyURL(base: any, baseKey: any, tenantId = null) {
|
export function getStartEndKeyURL(baseKey: any, tenantId = null) {
|
||||||
const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : ""
|
const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : ""
|
||||||
return `${base}?startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"`
|
return `startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"`
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -209,22 +272,10 @@ export async function getAllDbs(opts = { efficient: false }) {
|
||||||
return allDbs()
|
return allDbs()
|
||||||
}
|
}
|
||||||
let dbs: any[] = []
|
let dbs: any[] = []
|
||||||
let { url, cookie } = getCouchInfo()
|
async function addDbs(queryString?: string) {
|
||||||
async function addDbs(couchUrl: string) {
|
const json = await directCouchAllDbs(queryString)
|
||||||
const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), {
|
dbs = dbs.concat(json)
|
||||||
method: "GET",
|
|
||||||
headers: {
|
|
||||||
Authorization: cookie,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if (response.status === 200) {
|
|
||||||
let json = await response.json()
|
|
||||||
dbs = dbs.concat(json)
|
|
||||||
} else {
|
|
||||||
throw "Cannot connect to CouchDB instance"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
let couchUrl = `${url}/_all_dbs`
|
|
||||||
let tenantId = getTenantId()
|
let tenantId = getTenantId()
|
||||||
if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) {
|
if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) {
|
||||||
// just get all DBs when:
|
// just get all DBs when:
|
||||||
|
@ -232,12 +283,12 @@ export async function getAllDbs(opts = { efficient: false }) {
|
||||||
// - default tenant
|
// - default tenant
|
||||||
// - apps dbs don't contain tenant id
|
// - apps dbs don't contain tenant id
|
||||||
// - non-default tenant dbs are filtered out application side in getAllApps
|
// - non-default tenant dbs are filtered out application side in getAllApps
|
||||||
await addDbs(couchUrl)
|
await addDbs()
|
||||||
} else {
|
} else {
|
||||||
// get prod apps
|
// get prod apps
|
||||||
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP, tenantId))
|
await addDbs(getStartEndKeyURL(DocumentType.APP, tenantId))
|
||||||
// get dev apps
|
// get dev apps
|
||||||
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP_DEV, tenantId))
|
await addDbs(getStartEndKeyURL(DocumentType.APP_DEV, tenantId))
|
||||||
// add global db name
|
// add global db name
|
||||||
dbs.push(getGlobalDBName(tenantId))
|
dbs.push(getGlobalDBName(tenantId))
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
import { AppBackup, AppBackupRestoreEvent, Event } from "@budibase/types"
|
||||||
|
import { publishEvent } from "../events"
|
||||||
|
|
||||||
|
export async function appBackupRestored(backup: AppBackup) {
|
||||||
|
const properties: AppBackupRestoreEvent = {
|
||||||
|
appId: backup.appId,
|
||||||
|
backupName: backup.name!,
|
||||||
|
backupCreatedAt: backup.timestamp,
|
||||||
|
}
|
||||||
|
|
||||||
|
await publishEvent(Event.APP_BACKUP_RESTORED, properties)
|
||||||
|
}
|
|
@ -19,3 +19,4 @@ export * as installation from "./installation"
|
||||||
export * as backfill from "./backfill"
|
export * as backfill from "./backfill"
|
||||||
export * as group from "./group"
|
export * as group from "./group"
|
||||||
export * as plugin from "./plugin"
|
export * as plugin from "./plugin"
|
||||||
|
export * as backup from "./backup"
|
||||||
|
|
|
@ -19,6 +19,7 @@ import pino from "./pino"
|
||||||
import * as middleware from "./middleware"
|
import * as middleware from "./middleware"
|
||||||
import plugins from "./plugin"
|
import plugins from "./plugin"
|
||||||
import encryption from "./security/encryption"
|
import encryption from "./security/encryption"
|
||||||
|
import * as queue from "./queue"
|
||||||
|
|
||||||
// mimic the outer package exports
|
// mimic the outer package exports
|
||||||
import * as db from "./pkg/db"
|
import * as db from "./pkg/db"
|
||||||
|
@ -63,6 +64,7 @@ const core = {
|
||||||
...errorClasses,
|
...errorClasses,
|
||||||
middleware,
|
middleware,
|
||||||
encryption,
|
encryption,
|
||||||
|
queue,
|
||||||
}
|
}
|
||||||
|
|
||||||
export = core
|
export = core
|
||||||
|
|
|
@ -18,11 +18,16 @@ const STATE = {
|
||||||
bucketCreationPromises: {},
|
bucketCreationPromises: {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ListParams = {
|
||||||
|
ContinuationToken?: string
|
||||||
|
}
|
||||||
|
|
||||||
const CONTENT_TYPE_MAP: any = {
|
const CONTENT_TYPE_MAP: any = {
|
||||||
html: "text/html",
|
html: "text/html",
|
||||||
css: "text/css",
|
css: "text/css",
|
||||||
js: "application/javascript",
|
js: "application/javascript",
|
||||||
json: "application/json",
|
json: "application/json",
|
||||||
|
gz: "application/gzip",
|
||||||
}
|
}
|
||||||
const STRING_CONTENT_TYPES = [
|
const STRING_CONTENT_TYPES = [
|
||||||
CONTENT_TYPE_MAP.html,
|
CONTENT_TYPE_MAP.html,
|
||||||
|
@ -32,16 +37,16 @@ const STRING_CONTENT_TYPES = [
|
||||||
]
|
]
|
||||||
|
|
||||||
// does normal sanitization and then swaps dev apps to apps
|
// does normal sanitization and then swaps dev apps to apps
|
||||||
export function sanitizeKey(input: any) {
|
export function sanitizeKey(input: string) {
|
||||||
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
|
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
// simply handles the dev app to app conversion
|
// simply handles the dev app to app conversion
|
||||||
export function sanitizeBucket(input: any) {
|
export function sanitizeBucket(input: string) {
|
||||||
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
||||||
}
|
}
|
||||||
|
|
||||||
function publicPolicy(bucketName: any) {
|
function publicPolicy(bucketName: string) {
|
||||||
return {
|
return {
|
||||||
Version: "2012-10-17",
|
Version: "2012-10-17",
|
||||||
Statement: [
|
Statement: [
|
||||||
|
@ -69,7 +74,7 @@ const PUBLIC_BUCKETS = [
|
||||||
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
||||||
* @constructor
|
* @constructor
|
||||||
*/
|
*/
|
||||||
export const ObjectStore = (bucket: any) => {
|
export const ObjectStore = (bucket: string) => {
|
||||||
const config: any = {
|
const config: any = {
|
||||||
s3ForcePathStyle: true,
|
s3ForcePathStyle: true,
|
||||||
signatureVersion: "v4",
|
signatureVersion: "v4",
|
||||||
|
@ -93,7 +98,7 @@ export const ObjectStore = (bucket: any) => {
|
||||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||||
* if it does not exist then it will create it.
|
* if it does not exist then it will create it.
|
||||||
*/
|
*/
|
||||||
export const makeSureBucketExists = async (client: any, bucketName: any) => {
|
export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
try {
|
try {
|
||||||
await client
|
await client
|
||||||
|
@ -145,7 +150,7 @@ export const upload = async ({
|
||||||
type,
|
type,
|
||||||
metadata,
|
metadata,
|
||||||
}: any) => {
|
}: any) => {
|
||||||
const extension = [...filename.split(".")].pop()
|
const extension = filename.split(".").pop()
|
||||||
const fileBytes = fs.readFileSync(path)
|
const fileBytes = fs.readFileSync(path)
|
||||||
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
@ -168,8 +173,8 @@ export const upload = async ({
|
||||||
* through to the object store.
|
* through to the object store.
|
||||||
*/
|
*/
|
||||||
export const streamUpload = async (
|
export const streamUpload = async (
|
||||||
bucketName: any,
|
bucketName: string,
|
||||||
filename: any,
|
filename: string,
|
||||||
stream: any,
|
stream: any,
|
||||||
extra = {}
|
extra = {}
|
||||||
) => {
|
) => {
|
||||||
|
@ -202,7 +207,7 @@ export const streamUpload = async (
|
||||||
* retrieves the contents of a file from the object store, if it is a known content type it
|
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||||
* will be converted, otherwise it will be returned as a buffer stream.
|
* will be converted, otherwise it will be returned as a buffer stream.
|
||||||
*/
|
*/
|
||||||
export const retrieve = async (bucketName: any, filepath: any) => {
|
export const retrieve = async (bucketName: string, filepath: string) => {
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: sanitizeBucket(bucketName),
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
@ -217,10 +222,38 @@ export const retrieve = async (bucketName: any, filepath: any) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const listAllObjects = async (bucketName: string, path: string) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
const list = (params: ListParams = {}) => {
|
||||||
|
return objectStore
|
||||||
|
.listObjectsV2({
|
||||||
|
...params,
|
||||||
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
Prefix: sanitizeKey(path),
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
}
|
||||||
|
let isTruncated = false,
|
||||||
|
token,
|
||||||
|
objects: AWS.S3.Types.Object[] = []
|
||||||
|
do {
|
||||||
|
let params: ListParams = {}
|
||||||
|
if (token) {
|
||||||
|
params.ContinuationToken = token
|
||||||
|
}
|
||||||
|
const response = await list(params)
|
||||||
|
if (response.Contents) {
|
||||||
|
objects = objects.concat(response.Contents)
|
||||||
|
}
|
||||||
|
isTruncated = !!response.IsTruncated
|
||||||
|
} while (isTruncated)
|
||||||
|
return objects
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Same as retrieval function but puts to a temporary file.
|
* Same as retrieval function but puts to a temporary file.
|
||||||
*/
|
*/
|
||||||
export const retrieveToTmp = async (bucketName: any, filepath: any) => {
|
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
filepath = sanitizeKey(filepath)
|
filepath = sanitizeKey(filepath)
|
||||||
const data = await retrieve(bucketName, filepath)
|
const data = await retrieve(bucketName, filepath)
|
||||||
|
@ -229,10 +262,31 @@ export const retrieveToTmp = async (bucketName: any, filepath: any) => {
|
||||||
return outputPath
|
return outputPath
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const retrieveDirectory = async (bucketName: string, path: string) => {
|
||||||
|
let writePath = join(budibaseTempDir(), v4())
|
||||||
|
fs.mkdirSync(writePath)
|
||||||
|
const objects = await listAllObjects(bucketName, path)
|
||||||
|
let fullObjects = await Promise.all(
|
||||||
|
objects.map(obj => retrieve(bucketName, obj.Key!))
|
||||||
|
)
|
||||||
|
let count = 0
|
||||||
|
for (let obj of objects) {
|
||||||
|
const filename = obj.Key!
|
||||||
|
const data = fullObjects[count++]
|
||||||
|
const possiblePath = filename.split("/")
|
||||||
|
if (possiblePath.length > 1) {
|
||||||
|
const dirs = possiblePath.slice(0, possiblePath.length - 1)
|
||||||
|
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
|
||||||
|
}
|
||||||
|
fs.writeFileSync(join(writePath, ...possiblePath), data)
|
||||||
|
}
|
||||||
|
return writePath
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delete a single file.
|
* Delete a single file.
|
||||||
*/
|
*/
|
||||||
export const deleteFile = async (bucketName: any, filepath: any) => {
|
export const deleteFile = async (bucketName: string, filepath: string) => {
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -242,7 +296,7 @@ export const deleteFile = async (bucketName: any, filepath: any) => {
|
||||||
return objectStore.deleteObject(params)
|
return objectStore.deleteObject(params)
|
||||||
}
|
}
|
||||||
|
|
||||||
export const deleteFiles = async (bucketName: any, filepaths: any) => {
|
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -258,8 +312,8 @@ export const deleteFiles = async (bucketName: any, filepaths: any) => {
|
||||||
* Delete a path, including everything within.
|
* Delete a path, including everything within.
|
||||||
*/
|
*/
|
||||||
export const deleteFolder = async (
|
export const deleteFolder = async (
|
||||||
bucketName: any,
|
bucketName: string,
|
||||||
folder: any
|
folder: string
|
||||||
): Promise<any> => {
|
): Promise<any> => {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
folder = sanitizeKey(folder)
|
folder = sanitizeKey(folder)
|
||||||
|
@ -292,9 +346,9 @@ export const deleteFolder = async (
|
||||||
}
|
}
|
||||||
|
|
||||||
export const uploadDirectory = async (
|
export const uploadDirectory = async (
|
||||||
bucketName: any,
|
bucketName: string,
|
||||||
localPath: any,
|
localPath: string,
|
||||||
bucketPath: any
|
bucketPath: string
|
||||||
) => {
|
) => {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
let uploads = []
|
let uploads = []
|
||||||
|
@ -326,7 +380,11 @@ exports.downloadTarballDirect = async (
|
||||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
||||||
}
|
}
|
||||||
|
|
||||||
export const downloadTarball = async (url: any, bucketName: any, path: any) => {
|
export const downloadTarball = async (
|
||||||
|
url: string,
|
||||||
|
bucketName: string,
|
||||||
|
path: string
|
||||||
|
) => {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
path = sanitizeKey(path)
|
path = sanitizeKey(path)
|
||||||
const response = await fetch(url)
|
const response = await fetch(url)
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
export enum JobQueue {
|
||||||
|
AUTOMATION = "automationQueue",
|
||||||
|
APP_BACKUP = "appBackupQueue",
|
||||||
|
}
|
|
@ -0,0 +1,127 @@
|
||||||
|
import events from "events"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bull works with a Job wrapper around all messages that contains a lot more information about
|
||||||
|
* the state of the message, this object constructor implements the same schema of Bull jobs
|
||||||
|
* for the sake of maintaining API consistency.
|
||||||
|
* @param {string} queue The name of the queue which the message will be carried on.
|
||||||
|
* @param {object} message The JSON message which will be passed back to the consumer.
|
||||||
|
* @returns {Object} A new job which can now be put onto the queue, this is mostly an
|
||||||
|
* internal structure so that an in memory queue can be easily swapped for a Bull queue.
|
||||||
|
*/
|
||||||
|
function newJob(queue: string, message: any) {
|
||||||
|
return {
|
||||||
|
timestamp: Date.now(),
|
||||||
|
queue: queue,
|
||||||
|
data: message,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is designed to replicate Bull (https://github.com/OptimalBits/bull) in memory as a sort of mock.
|
||||||
|
* It is relatively simple, using an event emitter internally to register when messages are available
|
||||||
|
* to the consumers - in can support many inputs and many consumers.
|
||||||
|
*/
|
||||||
|
class InMemoryQueue {
|
||||||
|
_name: string
|
||||||
|
_opts?: any
|
||||||
|
_messages: any[]
|
||||||
|
_emitter: EventEmitter
|
||||||
|
/**
|
||||||
|
* The constructor the queue, exactly the same as that of Bulls.
|
||||||
|
* @param {string} name The name of the queue which is being configured.
|
||||||
|
* @param {object|null} opts This is not used by the in memory queue as there is no real use
|
||||||
|
* case when in memory, but is the same API as Bull
|
||||||
|
*/
|
||||||
|
constructor(name: string, opts = null) {
|
||||||
|
this._name = name
|
||||||
|
this._opts = opts
|
||||||
|
this._messages = []
|
||||||
|
this._emitter = new events.EventEmitter()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Same callback API as Bull, each callback passed to this will consume messages as they are
|
||||||
|
* available. Please note this is a queue service, not a notification service, so each
|
||||||
|
* consumer will receive different messages.
|
||||||
|
* @param {function<object>} func The callback function which will return a "Job", the same
|
||||||
|
* as the Bull API, within this job the property "data" contains the JSON message. Please
|
||||||
|
* note this is incredibly limited compared to Bull as in reality the Job would contain
|
||||||
|
* a lot more information about the queue and current status of Bull cluster.
|
||||||
|
*/
|
||||||
|
process(func: any) {
|
||||||
|
this._emitter.on("message", async () => {
|
||||||
|
if (this._messages.length <= 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let msg = this._messages.shift()
|
||||||
|
let resp = func(msg)
|
||||||
|
if (resp.then != null) {
|
||||||
|
await resp
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// simply puts a message to the queue and emits to the queue for processing
|
||||||
|
/**
|
||||||
|
* Simple function to replicate the add message functionality of Bull, putting
|
||||||
|
* a new message on the queue. This then emits an event which will be used to
|
||||||
|
* return the message to a consumer (if one is attached).
|
||||||
|
* @param {object} msg A message to be transported over the queue, this should be
|
||||||
|
* a JSON message as this is required by Bull.
|
||||||
|
* @param {boolean} repeat serves no purpose for the import queue.
|
||||||
|
*/
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
add(msg: any, repeat: boolean) {
|
||||||
|
if (typeof msg !== "object") {
|
||||||
|
throw "Queue only supports carrying JSON."
|
||||||
|
}
|
||||||
|
this._messages.push(newJob(this._name, msg))
|
||||||
|
this._emitter.emit("message")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* replicating the close function from bull, which waits for jobs to finish.
|
||||||
|
*/
|
||||||
|
async close() {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This removes a cron which has been implemented, this is part of Bull API.
|
||||||
|
* @param {string} cronJobId The cron which is to be removed.
|
||||||
|
*/
|
||||||
|
removeRepeatableByKey(cronJobId: string) {
|
||||||
|
// TODO: implement for testing
|
||||||
|
console.log(cronJobId)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implemented for tests
|
||||||
|
*/
|
||||||
|
getRepeatableJobs() {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
removeJobs(pattern: string) {
|
||||||
|
// no-op
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implemented for tests
|
||||||
|
*/
|
||||||
|
async clean() {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
async getJob() {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
|
||||||
|
on() {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export = InMemoryQueue
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * from "./queue"
|
||||||
|
export * from "./constants"
|
|
@ -0,0 +1,101 @@
|
||||||
|
import { Job, JobId, Queue } from "bull"
|
||||||
|
import { JobQueue } from "./constants"
|
||||||
|
|
||||||
|
export type StalledFn = (job: Job) => Promise<void>
|
||||||
|
|
||||||
|
export function addListeners(
|
||||||
|
queue: Queue,
|
||||||
|
jobQueue: JobQueue,
|
||||||
|
removeStalledCb?: StalledFn
|
||||||
|
) {
|
||||||
|
logging(queue, jobQueue)
|
||||||
|
if (removeStalledCb) {
|
||||||
|
handleStalled(queue, removeStalledCb)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleStalled(queue: Queue, removeStalledCb?: StalledFn) {
|
||||||
|
queue.on("stalled", async (job: Job) => {
|
||||||
|
if (removeStalledCb) {
|
||||||
|
await removeStalledCb(job)
|
||||||
|
} else if (job.opts.repeat) {
|
||||||
|
const jobId = job.id
|
||||||
|
const repeatJobs = await queue.getRepeatableJobs()
|
||||||
|
for (let repeatJob of repeatJobs) {
|
||||||
|
if (repeatJob.id === jobId) {
|
||||||
|
await queue.removeRepeatableByKey(repeatJob.key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.log(`jobId=${jobId} disabled`)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function logging(queue: Queue, jobQueue: JobQueue) {
|
||||||
|
let eventType: string
|
||||||
|
switch (jobQueue) {
|
||||||
|
case JobQueue.AUTOMATION:
|
||||||
|
eventType = "automation-event"
|
||||||
|
break
|
||||||
|
case JobQueue.APP_BACKUP:
|
||||||
|
eventType = "app-backup-event"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if (process.env.NODE_DEBUG?.includes("bull")) {
|
||||||
|
queue
|
||||||
|
.on("error", (error: any) => {
|
||||||
|
// An error occurred.
|
||||||
|
console.error(`${eventType}=error error=${JSON.stringify(error)}`)
|
||||||
|
})
|
||||||
|
.on("waiting", (jobId: JobId) => {
|
||||||
|
// A Job is waiting to be processed as soon as a worker is idling.
|
||||||
|
console.log(`${eventType}=waiting jobId=${jobId}`)
|
||||||
|
})
|
||||||
|
.on("active", (job: Job, jobPromise: any) => {
|
||||||
|
// A job has started. You can use `jobPromise.cancel()`` to abort it.
|
||||||
|
console.log(`${eventType}=active jobId=${job.id}`)
|
||||||
|
})
|
||||||
|
.on("stalled", (job: Job) => {
|
||||||
|
// A job has been marked as stalled. This is useful for debugging job
|
||||||
|
// workers that crash or pause the event loop.
|
||||||
|
console.error(
|
||||||
|
`${eventType}=stalled jobId=${job.id} job=${JSON.stringify(job)}`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.on("progress", (job: Job, progress: any) => {
|
||||||
|
// A job's progress was updated!
|
||||||
|
console.log(
|
||||||
|
`${eventType}=progress jobId=${job.id} progress=${progress}`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.on("completed", (job: Job, result) => {
|
||||||
|
// A job successfully completed with a `result`.
|
||||||
|
console.log(`${eventType}=completed jobId=${job.id} result=${result}`)
|
||||||
|
})
|
||||||
|
.on("failed", (job, err: any) => {
|
||||||
|
// A job failed with reason `err`!
|
||||||
|
console.log(`${eventType}=failed jobId=${job.id} error=${err}`)
|
||||||
|
})
|
||||||
|
.on("paused", () => {
|
||||||
|
// The queue has been paused.
|
||||||
|
console.log(`${eventType}=paused`)
|
||||||
|
})
|
||||||
|
.on("resumed", (job: Job) => {
|
||||||
|
// The queue has been resumed.
|
||||||
|
console.log(`${eventType}=paused jobId=${job.id}`)
|
||||||
|
})
|
||||||
|
.on("cleaned", (jobs: Job[], type: string) => {
|
||||||
|
// Old jobs have been cleaned from the queue. `jobs` is an array of cleaned
|
||||||
|
// jobs, and `type` is the type of jobs cleaned.
|
||||||
|
console.log(`${eventType}=cleaned length=${jobs.length} type=${type}`)
|
||||||
|
})
|
||||||
|
.on("drained", () => {
|
||||||
|
// Emitted every time the queue has processed all the waiting jobs (even if there can be some delayed jobs not yet processed)
|
||||||
|
console.log(`${eventType}=drained`)
|
||||||
|
})
|
||||||
|
.on("removed", (job: Job) => {
|
||||||
|
// A job successfully removed.
|
||||||
|
console.log(`${eventType}=removed jobId=${job.id}`)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
import env from "../environment"
|
||||||
|
import { getRedisOptions } from "../redis/utils"
|
||||||
|
import { JobQueue } from "./constants"
|
||||||
|
import InMemoryQueue from "./inMemoryQueue"
|
||||||
|
import BullQueue from "bull"
|
||||||
|
import { addListeners, StalledFn } from "./listeners"
|
||||||
|
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
|
||||||
|
|
||||||
|
const CLEANUP_PERIOD_MS = 60 * 1000
|
||||||
|
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
|
||||||
|
let cleanupInterval: NodeJS.Timeout
|
||||||
|
|
||||||
|
async function cleanup() {
|
||||||
|
for (let queue of QUEUES) {
|
||||||
|
await queue.clean(CLEANUP_PERIOD_MS, "completed")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createQueue<T>(
|
||||||
|
jobQueue: JobQueue,
|
||||||
|
opts: { removeStalledCb?: StalledFn } = {}
|
||||||
|
): BullQueue.Queue<T> {
|
||||||
|
const queueConfig: any = redisProtocolUrl || { redis: redisOpts }
|
||||||
|
let queue: any
|
||||||
|
if (!env.isTest()) {
|
||||||
|
queue = new BullQueue(jobQueue, queueConfig)
|
||||||
|
} else {
|
||||||
|
queue = new InMemoryQueue(jobQueue, queueConfig)
|
||||||
|
}
|
||||||
|
addListeners(queue, jobQueue, opts?.removeStalledCb)
|
||||||
|
QUEUES.push(queue)
|
||||||
|
if (!cleanupInterval) {
|
||||||
|
cleanupInterval = setInterval(cleanup, CLEANUP_PERIOD_MS)
|
||||||
|
// fire off an initial cleanup
|
||||||
|
cleanup().catch(err => {
|
||||||
|
console.error(`Unable to cleanup automation queue initially - ${err}`)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return queue
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.shutdown = async () => {
|
||||||
|
if (QUEUES.length) {
|
||||||
|
clearInterval(cleanupInterval)
|
||||||
|
for (let queue of QUEUES) {
|
||||||
|
await queue.close()
|
||||||
|
}
|
||||||
|
QUEUES = []
|
||||||
|
}
|
||||||
|
console.log("Queues shutdown")
|
||||||
|
}
|
|
@ -543,6 +543,36 @@
|
||||||
semver "^7.3.5"
|
semver "^7.3.5"
|
||||||
tar "^6.1.11"
|
tar "^6.1.11"
|
||||||
|
|
||||||
|
"@msgpackr-extract/msgpackr-extract-darwin-arm64@2.1.2":
|
||||||
|
version "2.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-2.1.2.tgz#9571b87be3a3f2c46de05585470bc4f3af2f6f00"
|
||||||
|
integrity sha512-TyVLn3S/+ikMDsh0gbKv2YydKClN8HaJDDpONlaZR+LVJmsxLFUgA+O7zu59h9+f9gX1aj/ahw9wqa6rosmrYQ==
|
||||||
|
|
||||||
|
"@msgpackr-extract/msgpackr-extract-darwin-x64@2.1.2":
|
||||||
|
version "2.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-2.1.2.tgz#bfbc6936ede2955218f5621a675679a5fe8e6f4c"
|
||||||
|
integrity sha512-YPXtcVkhmVNoMGlqp81ZHW4dMxK09msWgnxtsDpSiZwTzUBG2N+No2bsr7WMtBKCVJMSD6mbAl7YhKUqkp/Few==
|
||||||
|
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-arm64@2.1.2":
|
||||||
|
version "2.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-2.1.2.tgz#22555e28382af2922e7450634c8a2f240bb9eb82"
|
||||||
|
integrity sha512-vHZ2JiOWF2+DN9lzltGbhtQNzDo8fKFGrf37UJrgqxU0yvtERrzUugnfnX1wmVfFhSsF8OxrfqiNOUc5hko1Zg==
|
||||||
|
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-arm@2.1.2":
|
||||||
|
version "2.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-2.1.2.tgz#ffb6ae1beea7ac572b6be6bf2a8e8162ebdd8be7"
|
||||||
|
integrity sha512-42R4MAFeIeNn+L98qwxAt360bwzX2Kf0ZQkBBucJ2Ircza3asoY4CDbgiu9VWklq8gWJVSJSJBwDI+c/THiWkA==
|
||||||
|
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-x64@2.1.2":
|
||||||
|
version "2.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-2.1.2.tgz#7caf62eebbfb1345de40f75e89666b3d4194755f"
|
||||||
|
integrity sha512-RjRoRxg7Q3kPAdUSC5EUUPlwfMkIVhmaRTIe+cqHbKrGZ4M6TyCA/b5qMaukQ/1CHWrqYY2FbKOAU8Hg0pQFzg==
|
||||||
|
|
||||||
|
"@msgpackr-extract/msgpackr-extract-win32-x64@2.1.2":
|
||||||
|
version "2.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-2.1.2.tgz#f2d8b9ddd8d191205ed26ce54aba3dfc5ae3e7c9"
|
||||||
|
integrity sha512-rIZVR48zA8hGkHIK7ED6+ZiXsjRCcAVBJbm8o89OKAMTmEAQ2QvoOxoiu3w2isAaWwzgtQIOFIqHwvZDyLKCvw==
|
||||||
|
|
||||||
"@shopify/jest-koa-mocks@5.0.1":
|
"@shopify/jest-koa-mocks@5.0.1":
|
||||||
version "5.0.1"
|
version "5.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/@shopify/jest-koa-mocks/-/jest-koa-mocks-5.0.1.tgz#fba490b6b7985fbb571eb9974897d396a3642e94"
|
resolved "https://registry.yarnpkg.com/@shopify/jest-koa-mocks/-/jest-koa-mocks-5.0.1.tgz#fba490b6b7985fbb571eb9974897d396a3642e94"
|
||||||
|
@ -733,6 +763,13 @@
|
||||||
resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-1.8.2.tgz#7315b4c4c54f82d13fa61c228ec5c2ea5cc9e0e1"
|
resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-1.8.2.tgz#7315b4c4c54f82d13fa61c228ec5c2ea5cc9e0e1"
|
||||||
integrity sha512-EqX+YQxINb+MeXaIqYDASb6U6FCHbWjkj4a1CKDBks3d/QiB2+PqBLyO72vLDgAO1wUI4O+9gweRcQK11bTL/w==
|
integrity sha512-EqX+YQxINb+MeXaIqYDASb6U6FCHbWjkj4a1CKDBks3d/QiB2+PqBLyO72vLDgAO1wUI4O+9gweRcQK11bTL/w==
|
||||||
|
|
||||||
|
"@types/ioredis@4.28.0":
|
||||||
|
version "4.28.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/ioredis/-/ioredis-4.28.0.tgz#609b2ea0d91231df2dd7f67dd77436bc72584911"
|
||||||
|
integrity sha512-HSA/JQivJgV0e+353gvgu6WVoWvGRe0HyHOnAN2AvbVIhUlJBhNnnkP8gEEokrDWrxywrBkwo8NuDZ6TVPL9XA==
|
||||||
|
dependencies:
|
||||||
|
"@types/node" "*"
|
||||||
|
|
||||||
"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1":
|
"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1":
|
||||||
version "2.0.4"
|
version "2.0.4"
|
||||||
resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44"
|
resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44"
|
||||||
|
@ -1497,6 +1534,21 @@ buffer@^5.5.0, buffer@^5.6.0:
|
||||||
base64-js "^1.3.1"
|
base64-js "^1.3.1"
|
||||||
ieee754 "^1.1.13"
|
ieee754 "^1.1.13"
|
||||||
|
|
||||||
|
bull@4.10.1:
|
||||||
|
version "4.10.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/bull/-/bull-4.10.1.tgz#f14974b6089358b62b495a2cbf838aadc098e43f"
|
||||||
|
integrity sha512-Fp21tRPb2EaZPVfmM+ONZKVz2RA+to+zGgaTLyCKt3JMSU8OOBqK8143OQrnGuGpsyE5G+9FevFAGhdZZfQP2g==
|
||||||
|
dependencies:
|
||||||
|
cron-parser "^4.2.1"
|
||||||
|
debuglog "^1.0.0"
|
||||||
|
get-port "^5.1.1"
|
||||||
|
ioredis "^4.28.5"
|
||||||
|
lodash "^4.17.21"
|
||||||
|
msgpackr "^1.5.2"
|
||||||
|
p-timeout "^3.2.0"
|
||||||
|
semver "^7.3.2"
|
||||||
|
uuid "^8.3.0"
|
||||||
|
|
||||||
cache-content-type@^1.0.0:
|
cache-content-type@^1.0.0:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/cache-content-type/-/cache-content-type-1.0.1.tgz#035cde2b08ee2129f4a8315ea8f00a00dba1453c"
|
resolved "https://registry.yarnpkg.com/cache-content-type/-/cache-content-type-1.0.1.tgz#035cde2b08ee2129f4a8315ea8f00a00dba1453c"
|
||||||
|
@ -1764,6 +1816,13 @@ core-util-is@~1.0.0:
|
||||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
|
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
|
||||||
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
|
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
|
||||||
|
|
||||||
|
cron-parser@^4.2.1:
|
||||||
|
version "4.6.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/cron-parser/-/cron-parser-4.6.0.tgz#404c3fdbff10ae80eef6b709555d577ef2fd2e0d"
|
||||||
|
integrity sha512-guZNLMGUgg6z4+eGhmHGw7ft+v6OQeuHzd1gcLxCo9Yg/qoxmG3nindp2/uwGCLizEisf2H0ptqeVXeoCpP6FA==
|
||||||
|
dependencies:
|
||||||
|
luxon "^3.0.1"
|
||||||
|
|
||||||
cross-spawn@^7.0.3:
|
cross-spawn@^7.0.3:
|
||||||
version "7.0.3"
|
version "7.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
|
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
|
||||||
|
@ -1837,6 +1896,11 @@ debug@~3.1.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
ms "2.0.0"
|
ms "2.0.0"
|
||||||
|
|
||||||
|
debuglog@^1.0.0:
|
||||||
|
version "1.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492"
|
||||||
|
integrity sha512-syBZ+rnAK3EgMsH2aYEOLUW7mZSY9Gb+0wUMCFsZvcmiz+HigA0LOcq/HoQqVuGG+EKykunc7QG2bzrponfaSw==
|
||||||
|
|
||||||
decimal.js@^10.2.1:
|
decimal.js@^10.2.1:
|
||||||
version "10.3.1"
|
version "10.3.1"
|
||||||
resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783"
|
resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783"
|
||||||
|
@ -2318,6 +2382,11 @@ get-package-type@^0.1.0:
|
||||||
resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a"
|
resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a"
|
||||||
integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==
|
integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==
|
||||||
|
|
||||||
|
get-port@^5.1.1:
|
||||||
|
version "5.1.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/get-port/-/get-port-5.1.1.tgz#0469ed07563479de6efb986baf053dcd7d4e3193"
|
||||||
|
integrity sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ==
|
||||||
|
|
||||||
get-stream@^4.1.0:
|
get-stream@^4.1.0:
|
||||||
version "4.1.0"
|
version "4.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
||||||
|
@ -2652,6 +2721,23 @@ ioredis@4.28.0:
|
||||||
redis-parser "^3.0.0"
|
redis-parser "^3.0.0"
|
||||||
standard-as-callback "^2.1.0"
|
standard-as-callback "^2.1.0"
|
||||||
|
|
||||||
|
ioredis@^4.28.5:
|
||||||
|
version "4.28.5"
|
||||||
|
resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.5.tgz#5c149e6a8d76a7f8fa8a504ffc85b7d5b6797f9f"
|
||||||
|
integrity sha512-3GYo0GJtLqgNXj4YhrisLaNNvWSNwSS2wS4OELGfGxH8I69+XfNdnmV1AyN+ZqMh0i7eX+SWjrwFKDBDgfBC1A==
|
||||||
|
dependencies:
|
||||||
|
cluster-key-slot "^1.1.0"
|
||||||
|
debug "^4.3.1"
|
||||||
|
denque "^1.1.0"
|
||||||
|
lodash.defaults "^4.2.0"
|
||||||
|
lodash.flatten "^4.4.0"
|
||||||
|
lodash.isarguments "^3.1.0"
|
||||||
|
p-map "^2.1.0"
|
||||||
|
redis-commands "1.7.0"
|
||||||
|
redis-errors "^1.2.0"
|
||||||
|
redis-parser "^3.0.0"
|
||||||
|
standard-as-callback "^2.1.0"
|
||||||
|
|
||||||
is-arrayish@^0.2.1:
|
is-arrayish@^0.2.1:
|
||||||
version "0.2.1"
|
version "0.2.1"
|
||||||
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
|
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
|
||||||
|
@ -3725,6 +3811,11 @@ ltgt@2.2.1, ltgt@^2.1.2, ltgt@~2.2.0:
|
||||||
resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.2.1.tgz#f35ca91c493f7b73da0e07495304f17b31f87ee5"
|
resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.2.1.tgz#f35ca91c493f7b73da0e07495304f17b31f87ee5"
|
||||||
integrity sha512-AI2r85+4MquTw9ZYqabu4nMwy9Oftlfa/e/52t9IjtfG+mGBbTNdAoZ3RQKLHR6r0wQnwZnPIEh/Ya6XTWAKNA==
|
integrity sha512-AI2r85+4MquTw9ZYqabu4nMwy9Oftlfa/e/52t9IjtfG+mGBbTNdAoZ3RQKLHR6r0wQnwZnPIEh/Ya6XTWAKNA==
|
||||||
|
|
||||||
|
luxon@^3.0.1:
|
||||||
|
version "3.0.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/luxon/-/luxon-3.0.4.tgz#d179e4e9f05e092241e7044f64aaa54796b03929"
|
||||||
|
integrity sha512-aV48rGUwP/Vydn8HT+5cdr26YYQiUZ42NM6ToMoaGKwYfWbfLeRkEu1wXWMHBZT6+KyLfcbbtVcoQFCbbPjKlw==
|
||||||
|
|
||||||
make-dir@^3.0.0, make-dir@^3.1.0:
|
make-dir@^3.0.0, make-dir@^3.1.0:
|
||||||
version "3.1.0"
|
version "3.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f"
|
resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f"
|
||||||
|
@ -3872,6 +3963,27 @@ ms@^2.1.1, ms@^2.1.3:
|
||||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
|
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
|
||||||
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
|
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
|
||||||
|
|
||||||
|
msgpackr-extract@^2.1.2:
|
||||||
|
version "2.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/msgpackr-extract/-/msgpackr-extract-2.1.2.tgz#56272030f3e163e1b51964ef8b1cd5e7240c03ed"
|
||||||
|
integrity sha512-cmrmERQFb19NX2JABOGtrKdHMyI6RUyceaPBQ2iRz9GnDkjBWFjNJC0jyyoOfZl2U/LZE3tQCCQc4dlRyA8mcA==
|
||||||
|
dependencies:
|
||||||
|
node-gyp-build-optional-packages "5.0.3"
|
||||||
|
optionalDependencies:
|
||||||
|
"@msgpackr-extract/msgpackr-extract-darwin-arm64" "2.1.2"
|
||||||
|
"@msgpackr-extract/msgpackr-extract-darwin-x64" "2.1.2"
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-arm" "2.1.2"
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-arm64" "2.1.2"
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-x64" "2.1.2"
|
||||||
|
"@msgpackr-extract/msgpackr-extract-win32-x64" "2.1.2"
|
||||||
|
|
||||||
|
msgpackr@^1.5.2:
|
||||||
|
version "1.7.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.7.2.tgz#68d6debf5999d6b61abb6e7046a689991ebf7261"
|
||||||
|
integrity sha512-mWScyHTtG6TjivXX9vfIy2nBtRupaiAj0HQ2mtmpmYujAmqZmaaEVPaSZ1NKLMvicaMLFzEaMk0ManxMRg8rMQ==
|
||||||
|
optionalDependencies:
|
||||||
|
msgpackr-extract "^2.1.2"
|
||||||
|
|
||||||
napi-macros@~2.0.0:
|
napi-macros@~2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b"
|
resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b"
|
||||||
|
@ -3919,6 +4031,11 @@ node-forge@^0.7.1:
|
||||||
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac"
|
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac"
|
||||||
integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw==
|
integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw==
|
||||||
|
|
||||||
|
node-gyp-build-optional-packages@5.0.3:
|
||||||
|
version "5.0.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.0.3.tgz#92a89d400352c44ad3975010368072b41ad66c17"
|
||||||
|
integrity sha512-k75jcVzk5wnnc/FMxsf4udAoTEUv2jY3ycfdSd3yWu6Cnd1oee6/CfZJApyscA4FJOmdoixWwiwOyf16RzD5JA==
|
||||||
|
|
||||||
node-gyp-build@~4.1.0:
|
node-gyp-build@~4.1.0:
|
||||||
version "4.1.1"
|
version "4.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb"
|
resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb"
|
||||||
|
@ -4075,6 +4192,11 @@ p-cancelable@^1.0.0:
|
||||||
resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc"
|
resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc"
|
||||||
integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==
|
integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==
|
||||||
|
|
||||||
|
p-finally@^1.0.0:
|
||||||
|
version "1.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
|
||||||
|
integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==
|
||||||
|
|
||||||
p-limit@^2.2.0:
|
p-limit@^2.2.0:
|
||||||
version "2.3.0"
|
version "2.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
|
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
|
||||||
|
@ -4094,6 +4216,13 @@ p-map@^2.1.0:
|
||||||
resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175"
|
resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175"
|
||||||
integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==
|
integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==
|
||||||
|
|
||||||
|
p-timeout@^3.2.0:
|
||||||
|
version "3.2.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-3.2.0.tgz#c7e17abc971d2a7962ef83626b35d635acf23dfe"
|
||||||
|
integrity sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==
|
||||||
|
dependencies:
|
||||||
|
p-finally "^1.0.0"
|
||||||
|
|
||||||
p-try@^2.0.0:
|
p-try@^2.0.0:
|
||||||
version "2.2.0"
|
version "2.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
|
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
|
||||||
|
@ -5360,7 +5489,7 @@ uuid@8.1.0:
|
||||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.1.0.tgz#6f1536eb43249f473abc6bd58ff983da1ca30d8d"
|
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.1.0.tgz#6f1536eb43249f473abc6bd58ff983da1ca30d8d"
|
||||||
integrity sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg==
|
integrity sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg==
|
||||||
|
|
||||||
uuid@8.3.2, uuid@^8.3.2:
|
uuid@8.3.2, uuid@^8.3.0, uuid@^8.3.2:
|
||||||
version "8.3.2"
|
version "8.3.2"
|
||||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
|
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
|
||||||
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
|
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
|
||||||
|
|
|
@ -21,7 +21,6 @@
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
import { onMount } from "svelte"
|
import { onMount } from "svelte"
|
||||||
import { apps, auth, admin, templates, licensing } from "stores/portal"
|
import { apps, auth, admin, templates, licensing } from "stores/portal"
|
||||||
import download from "downloadjs"
|
|
||||||
import { goto } from "@roxi/routify"
|
import { goto } from "@roxi/routify"
|
||||||
import AppRow from "components/start/AppRow.svelte"
|
import AppRow from "components/start/AppRow.svelte"
|
||||||
import { AppStatus } from "constants"
|
import { AppStatus } from "constants"
|
||||||
|
@ -140,7 +139,7 @@
|
||||||
|
|
||||||
const initiateAppsExport = () => {
|
const initiateAppsExport = () => {
|
||||||
try {
|
try {
|
||||||
download(`/api/cloud/export`)
|
window.location = `/api/cloud/export`
|
||||||
notifications.success("Apps exported successfully")
|
notifications.success("Apps exported successfully")
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
notifications.error(`Error exporting apps: ${err}`)
|
notifications.error(`Error exporting apps: ${err}`)
|
||||||
|
|
|
@ -2,6 +2,9 @@ import { writable } from "svelte/store"
|
||||||
import { AppStatus } from "../../constants"
|
import { AppStatus } from "../../constants"
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
|
|
||||||
|
// properties that should always come from the dev app, not the deployed
|
||||||
|
const DEV_PROPS = ["updatedBy", "updatedAt"]
|
||||||
|
|
||||||
const extractAppId = id => {
|
const extractAppId = id => {
|
||||||
const split = id?.split("_") || []
|
const split = id?.split("_") || []
|
||||||
return split.length ? split[split.length - 1] : null
|
return split.length ? split[split.length - 1] : null
|
||||||
|
@ -57,9 +60,19 @@ export function createAppStore() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let devProps = {}
|
||||||
|
if (appMap[id]) {
|
||||||
|
const entries = Object.entries(appMap[id]).filter(
|
||||||
|
([key]) => DEV_PROPS.indexOf(key) !== -1
|
||||||
|
)
|
||||||
|
entries.forEach(entry => {
|
||||||
|
devProps[entry[0]] = entry[1]
|
||||||
|
})
|
||||||
|
}
|
||||||
appMap[id] = {
|
appMap[id] = {
|
||||||
...appMap[id],
|
...appMap[id],
|
||||||
...app,
|
...app,
|
||||||
|
...devProps,
|
||||||
prodId: app.appId,
|
prodId: app.appId,
|
||||||
prodRev: app._rev,
|
prodRev: app._rev,
|
||||||
}
|
}
|
||||||
|
|
|
@ -670,6 +670,11 @@ has@^1.0.3:
|
||||||
dependencies:
|
dependencies:
|
||||||
function-bind "^1.1.1"
|
function-bind "^1.1.1"
|
||||||
|
|
||||||
|
html5-qrcode@^2.2.1:
|
||||||
|
version "2.2.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/html5-qrcode/-/html5-qrcode-2.2.3.tgz#5acb826860365e7c7ab91e1e14528ea16a502e8a"
|
||||||
|
integrity sha512-9CtEz5FVT56T76entiQxyrASzBWl8Rm30NHiQH8T163Eml5LS14BoZlYel9igxbikOt7O8KhvrT3awN1Y2HMqw==
|
||||||
|
|
||||||
htmlparser2@^6.0.0:
|
htmlparser2@^6.0.0:
|
||||||
version "6.1.0"
|
version "6.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7"
|
resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7"
|
||||||
|
|
|
@ -86,14 +86,14 @@
|
||||||
"@bull-board/koa": "3.9.4",
|
"@bull-board/koa": "3.9.4",
|
||||||
"@elastic/elasticsearch": "7.10.0",
|
"@elastic/elasticsearch": "7.10.0",
|
||||||
"@google-cloud/firestore": "5.0.2",
|
"@google-cloud/firestore": "5.0.2",
|
||||||
"@koa/router": "8.0.0",
|
"@koa/router": "8.0.8",
|
||||||
"@sendgrid/mail": "7.1.1",
|
"@sendgrid/mail": "7.1.1",
|
||||||
"@sentry/node": "6.17.7",
|
"@sentry/node": "6.17.7",
|
||||||
"airtable": "0.10.1",
|
"airtable": "0.10.1",
|
||||||
"arangojs": "7.2.0",
|
"arangojs": "7.2.0",
|
||||||
"aws-sdk": "2.1030.0",
|
"aws-sdk": "2.1030.0",
|
||||||
"bcryptjs": "2.4.3",
|
"bcryptjs": "2.4.3",
|
||||||
"bull": "4.8.5",
|
"bull": "4.10.1",
|
||||||
"chmodr": "1.2.0",
|
"chmodr": "1.2.0",
|
||||||
"chokidar": "3.5.3",
|
"chokidar": "3.5.3",
|
||||||
"csvtojson": "2.0.10",
|
"csvtojson": "2.0.10",
|
||||||
|
@ -112,7 +112,7 @@
|
||||||
"js-yaml": "4.1.0",
|
"js-yaml": "4.1.0",
|
||||||
"jsonschema": "1.4.0",
|
"jsonschema": "1.4.0",
|
||||||
"knex": "0.95.15",
|
"knex": "0.95.15",
|
||||||
"koa": "2.7.0",
|
"koa": "2.13.4",
|
||||||
"koa-body": "4.2.0",
|
"koa-body": "4.2.0",
|
||||||
"koa-compress": "4.0.1",
|
"koa-compress": "4.0.1",
|
||||||
"koa-connect": "2.1.0",
|
"koa-connect": "2.1.0",
|
||||||
|
@ -159,12 +159,12 @@
|
||||||
"@jest/test-sequencer": "24.9.0",
|
"@jest/test-sequencer": "24.9.0",
|
||||||
"@types/apidoc": "0.50.0",
|
"@types/apidoc": "0.50.0",
|
||||||
"@types/bson": "4.2.0",
|
"@types/bson": "4.2.0",
|
||||||
"@types/bull": "3.15.8",
|
|
||||||
"@types/global-agent": "2.1.1",
|
"@types/global-agent": "2.1.1",
|
||||||
"@types/google-spreadsheet": "3.1.5",
|
"@types/google-spreadsheet": "3.1.5",
|
||||||
|
"@types/ioredis": "4.28.10",
|
||||||
"@types/jest": "27.5.1",
|
"@types/jest": "27.5.1",
|
||||||
"@types/koa": "2.13.4",
|
"@types/koa": "2.13.4",
|
||||||
"@types/koa__router": "8.0.0",
|
"@types/koa__router": "8.0.11",
|
||||||
"@types/lodash": "4.14.180",
|
"@types/lodash": "4.14.180",
|
||||||
"@types/node": "14.18.20",
|
"@types/node": "14.18.20",
|
||||||
"@types/node-fetch": "2.6.1",
|
"@types/node-fetch": "2.6.1",
|
||||||
|
|
|
@ -783,6 +783,7 @@
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": [
|
||||||
"string",
|
"string",
|
||||||
|
"barcodeqr",
|
||||||
"longform",
|
"longform",
|
||||||
"options",
|
"options",
|
||||||
"number",
|
"number",
|
||||||
|
@ -986,6 +987,7 @@
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": [
|
||||||
"string",
|
"string",
|
||||||
|
"barcodeqr",
|
||||||
"longform",
|
"longform",
|
||||||
"options",
|
"options",
|
||||||
"number",
|
"number",
|
||||||
|
@ -1200,6 +1202,7 @@
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": [
|
||||||
"string",
|
"string",
|
||||||
|
"barcodeqr",
|
||||||
"longform",
|
"longform",
|
||||||
"options",
|
"options",
|
||||||
"number",
|
"number",
|
||||||
|
|
|
@ -579,6 +579,7 @@ components:
|
||||||
type: string
|
type: string
|
||||||
enum:
|
enum:
|
||||||
- string
|
- string
|
||||||
|
- barcodeqr
|
||||||
- longform
|
- longform
|
||||||
- options
|
- options
|
||||||
- number
|
- number
|
||||||
|
@ -741,6 +742,7 @@ components:
|
||||||
type: string
|
type: string
|
||||||
enum:
|
enum:
|
||||||
- string
|
- string
|
||||||
|
- barcodeqr
|
||||||
- longform
|
- longform
|
||||||
- options
|
- options
|
||||||
- number
|
- number
|
||||||
|
@ -910,6 +912,7 @@ components:
|
||||||
type: string
|
type: string
|
||||||
enum:
|
enum:
|
||||||
- string
|
- string
|
||||||
|
- barcodeqr
|
||||||
- longform
|
- longform
|
||||||
- options
|
- options
|
||||||
- number
|
- number
|
||||||
|
|
|
@ -5,11 +5,7 @@ import {
|
||||||
createRoutingView,
|
createRoutingView,
|
||||||
createAllSearchIndex,
|
createAllSearchIndex,
|
||||||
} from "../../db/views/staticViews"
|
} from "../../db/views/staticViews"
|
||||||
import {
|
import { createApp, deleteApp } from "../../utilities/fileSystem"
|
||||||
getTemplateStream,
|
|
||||||
createApp,
|
|
||||||
deleteApp,
|
|
||||||
} from "../../utilities/fileSystem"
|
|
||||||
import {
|
import {
|
||||||
generateAppID,
|
generateAppID,
|
||||||
getLayoutParams,
|
getLayoutParams,
|
||||||
|
@ -50,6 +46,7 @@ import { errors, events, migrations } from "@budibase/backend-core"
|
||||||
import { App, Layout, Screen, MigrationType } from "@budibase/types"
|
import { App, Layout, Screen, MigrationType } from "@budibase/types"
|
||||||
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
|
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
|
||||||
import { enrichPluginURLs } from "../../utilities/plugins"
|
import { enrichPluginURLs } from "../../utilities/plugins"
|
||||||
|
import sdk from "../../sdk"
|
||||||
|
|
||||||
const URL_REGEX_SLASH = /\/|\\/g
|
const URL_REGEX_SLASH = /\/|\\/g
|
||||||
|
|
||||||
|
@ -153,11 +150,7 @@ async function createInstance(template: any) {
|
||||||
throw "Error loading database dump from memory."
|
throw "Error loading database dump from memory."
|
||||||
}
|
}
|
||||||
} else if (template && template.useTemplate === "true") {
|
} else if (template && template.useTemplate === "true") {
|
||||||
/* istanbul ignore next */
|
await sdk.backups.importApp(appId, db, template)
|
||||||
const { ok } = await db.load(await getTemplateStream(template))
|
|
||||||
if (!ok) {
|
|
||||||
throw "Error loading database dump from template."
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// create the users table
|
// create the users table
|
||||||
await db.put(USERS_TABLE_SCHEMA)
|
await db.put(USERS_TABLE_SCHEMA)
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
const { streamBackup } = require("../../utilities/fileSystem")
|
import sdk from "../../sdk"
|
||||||
const { events, context } = require("@budibase/backend-core")
|
import { events, context } from "@budibase/backend-core"
|
||||||
const { DocumentType } = require("../../db/utils")
|
import { DocumentType } from "../../db/utils"
|
||||||
const { isQsTrue } = require("../../utilities")
|
import { isQsTrue } from "../../utilities"
|
||||||
|
|
||||||
exports.exportAppDump = async function (ctx) {
|
export async function exportAppDump(ctx: any) {
|
||||||
let { appId, excludeRows } = ctx.query
|
let { appId, excludeRows } = ctx.query
|
||||||
const appName = decodeURI(ctx.query.appname)
|
const appName = decodeURI(ctx.query.appname)
|
||||||
excludeRows = isQsTrue(excludeRows)
|
excludeRows = isQsTrue(excludeRows)
|
||||||
const backupIdentifier = `${appName}-export-${new Date().getTime()}.txt`
|
const backupIdentifier = `${appName}-export-${new Date().getTime()}.tar.gz`
|
||||||
ctx.attachment(backupIdentifier)
|
ctx.attachment(backupIdentifier)
|
||||||
ctx.body = await streamBackup(appId, excludeRows)
|
ctx.body = await sdk.backups.streamExportApp(appId, excludeRows)
|
||||||
|
|
||||||
await context.doInAppContext(appId, async () => {
|
await context.doInAppContext(appId, async () => {
|
||||||
const appDb = context.getAppDB()
|
const appDb = context.getAppDB()
|
|
@ -1,51 +1,30 @@
|
||||||
const env = require("../../environment")
|
const env = require("../../environment")
|
||||||
const { getAllApps, getGlobalDBName } = require("@budibase/backend-core/db")
|
const { getAllApps, getGlobalDBName } = require("@budibase/backend-core/db")
|
||||||
const {
|
|
||||||
exportDB,
|
|
||||||
sendTempFile,
|
|
||||||
readFileSync,
|
|
||||||
} = require("../../utilities/fileSystem")
|
|
||||||
const { stringToReadStream } = require("../../utilities")
|
|
||||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
||||||
const { create } = require("./application")
|
const { streamFile } = require("../../utilities/fileSystem")
|
||||||
|
const { stringToReadStream } = require("../../utilities")
|
||||||
const { getDocParams, DocumentType, isDevAppID } = require("../../db/utils")
|
const { getDocParams, DocumentType, isDevAppID } = require("../../db/utils")
|
||||||
|
const { create } = require("./application")
|
||||||
|
const { join } = require("path")
|
||||||
|
const sdk = require("../../sdk")
|
||||||
|
|
||||||
async function createApp(appName, appImport) {
|
async function createApp(appName, appDirectory) {
|
||||||
const ctx = {
|
const ctx = {
|
||||||
request: {
|
request: {
|
||||||
body: {
|
body: {
|
||||||
templateString: appImport,
|
useTemplate: true,
|
||||||
name: appName,
|
name: appName,
|
||||||
},
|
},
|
||||||
|
files: {
|
||||||
|
templateFile: {
|
||||||
|
path: appDirectory,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
return create(ctx)
|
return create(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.exportApps = async ctx => {
|
|
||||||
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
|
|
||||||
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
|
|
||||||
}
|
|
||||||
const apps = await getAllApps({ all: true })
|
|
||||||
const globalDBString = await exportDB(getGlobalDBName(), {
|
|
||||||
filter: doc => !doc._id.startsWith(DocumentType.USER),
|
|
||||||
})
|
|
||||||
let allDBs = {
|
|
||||||
global: globalDBString,
|
|
||||||
}
|
|
||||||
for (let app of apps) {
|
|
||||||
const appId = app.appId || app._id
|
|
||||||
// only export the dev apps as they will be the latest, the user can republish the apps
|
|
||||||
// in their self hosted environment
|
|
||||||
if (isDevAppID(appId)) {
|
|
||||||
allDBs[app.name] = await exportDB(appId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const filename = `cloud-export-${new Date().getTime()}.txt`
|
|
||||||
ctx.attachment(filename)
|
|
||||||
ctx.body = sendTempFile(JSON.stringify(allDBs))
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getAllDocType(db, docType) {
|
async function getAllDocType(db, docType) {
|
||||||
const response = await db.allDocs(
|
const response = await db.allDocs(
|
||||||
getDocParams(docType, null, {
|
getDocParams(docType, null, {
|
||||||
|
@ -55,6 +34,28 @@ async function getAllDocType(db, docType) {
|
||||||
return response.rows.map(row => row.doc)
|
return response.rows.map(row => row.doc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.exportApps = async ctx => {
|
||||||
|
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
|
||||||
|
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
|
||||||
|
}
|
||||||
|
const apps = await getAllApps({ all: true })
|
||||||
|
const globalDBString = await sdk.backups.exportDB(getGlobalDBName(), {
|
||||||
|
filter: doc => !doc._id.startsWith(DocumentType.USER),
|
||||||
|
})
|
||||||
|
// only export the dev apps as they will be the latest, the user can republish the apps
|
||||||
|
// in their self-hosted environment
|
||||||
|
let appMetadata = apps
|
||||||
|
.filter(app => isDevAppID(app.appId || app._id))
|
||||||
|
.map(app => ({ appId: app.appId || app._id, name: app.name }))
|
||||||
|
const tmpPath = await sdk.backups.exportMultipleApps(
|
||||||
|
appMetadata,
|
||||||
|
globalDBString
|
||||||
|
)
|
||||||
|
const filename = `cloud-export-${new Date().getTime()}.tar.gz`
|
||||||
|
ctx.attachment(filename)
|
||||||
|
ctx.body = streamFile(tmpPath)
|
||||||
|
}
|
||||||
|
|
||||||
async function hasBeenImported() {
|
async function hasBeenImported() {
|
||||||
if (!env.SELF_HOSTED || env.MULTI_TENANCY) {
|
if (!env.SELF_HOSTED || env.MULTI_TENANCY) {
|
||||||
return true
|
return true
|
||||||
|
@ -80,17 +81,20 @@ exports.importApps = async ctx => {
|
||||||
"Import file is required and environment must be fresh to import apps."
|
"Import file is required and environment must be fresh to import apps."
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
const importFile = ctx.request.files.importFile
|
if (ctx.request.files.importFile.type !== "application/gzip") {
|
||||||
const importString = readFileSync(importFile.path)
|
ctx.throw(400, "Import file must be a gzipped tarball.")
|
||||||
const dbs = JSON.parse(importString)
|
}
|
||||||
const globalDbImport = dbs.global
|
|
||||||
// remove from the list of apps
|
// initially get all the app databases out of the tarball
|
||||||
delete dbs.global
|
const tmpPath = sdk.backups.untarFile(ctx.request.files.importFile)
|
||||||
|
const globalDbImport = sdk.backups.getGlobalDBFile(tmpPath)
|
||||||
|
const appNames = sdk.backups.getListOfAppsInMulti(tmpPath)
|
||||||
|
|
||||||
const globalDb = getGlobalDB()
|
const globalDb = getGlobalDB()
|
||||||
// load the global db first
|
// load the global db first
|
||||||
await globalDb.load(stringToReadStream(globalDbImport))
|
await globalDb.load(stringToReadStream(globalDbImport))
|
||||||
for (let [appName, appImport] of Object.entries(dbs)) {
|
for (let appName of appNames) {
|
||||||
await createApp(appName, appImport)
|
await createApp(appName, join(tmpPath, appName))
|
||||||
}
|
}
|
||||||
|
|
||||||
// if there are any users make sure to remove them
|
// if there are any users make sure to remove them
|
||||||
|
|
|
@ -1,23 +1,25 @@
|
||||||
import Deployment from "./Deployment"
|
import Deployment from "./Deployment"
|
||||||
import {
|
import {
|
||||||
Replication,
|
|
||||||
getProdAppID,
|
|
||||||
getDevelopmentAppID,
|
getDevelopmentAppID,
|
||||||
|
getProdAppID,
|
||||||
|
Replication,
|
||||||
} from "@budibase/backend-core/db"
|
} from "@budibase/backend-core/db"
|
||||||
import { DocumentType, getAutomationParams } from "../../../db/utils"
|
import { DocumentType, getAutomationParams } from "../../../db/utils"
|
||||||
import {
|
import {
|
||||||
|
clearMetadata,
|
||||||
disableAllCrons,
|
disableAllCrons,
|
||||||
enableCronTrigger,
|
enableCronTrigger,
|
||||||
clearMetadata,
|
|
||||||
} from "../../../automations/utils"
|
} from "../../../automations/utils"
|
||||||
import { app as appCache } from "@budibase/backend-core/cache"
|
import { app as appCache } from "@budibase/backend-core/cache"
|
||||||
import {
|
import {
|
||||||
getAppId,
|
|
||||||
getAppDB,
|
getAppDB,
|
||||||
getProdAppDB,
|
getAppId,
|
||||||
getDevAppDB,
|
getDevAppDB,
|
||||||
|
getProdAppDB,
|
||||||
} from "@budibase/backend-core/context"
|
} from "@budibase/backend-core/context"
|
||||||
import { events } from "@budibase/backend-core"
|
import { events } from "@budibase/backend-core"
|
||||||
|
import { backups } from "@budibase/pro"
|
||||||
|
import { AppBackupTrigger } from "@budibase/types"
|
||||||
|
|
||||||
// the max time we can wait for an invalidation to complete before considering it failed
|
// the max time we can wait for an invalidation to complete before considering it failed
|
||||||
const MAX_PENDING_TIME_MS = 30 * 60000
|
const MAX_PENDING_TIME_MS = 30 * 60000
|
||||||
|
@ -98,13 +100,24 @@ async function initDeployedApp(prodAppId: any) {
|
||||||
console.log("Enabled cron triggers for deployed app..")
|
console.log("Enabled cron triggers for deployed app..")
|
||||||
}
|
}
|
||||||
|
|
||||||
async function deployApp(deployment: any) {
|
async function deployApp(deployment: any, userId: string) {
|
||||||
let replication
|
let replication
|
||||||
try {
|
try {
|
||||||
const appId = getAppId()
|
const appId = getAppId()
|
||||||
const devAppId = getDevelopmentAppID(appId)
|
const devAppId = getDevelopmentAppID(appId)
|
||||||
const productionAppId = getProdAppID(appId)
|
const productionAppId = getProdAppID(appId)
|
||||||
|
|
||||||
|
// don't try this if feature isn't allowed, will error
|
||||||
|
if (await backups.isEnabled()) {
|
||||||
|
// trigger backup initially
|
||||||
|
await backups.triggerAppBackup(
|
||||||
|
productionAppId,
|
||||||
|
AppBackupTrigger.PUBLISH,
|
||||||
|
{
|
||||||
|
createdBy: userId,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
const config: any = {
|
const config: any = {
|
||||||
source: devAppId,
|
source: devAppId,
|
||||||
target: productionAppId,
|
target: productionAppId,
|
||||||
|
@ -205,7 +218,7 @@ const _deployApp = async function (ctx: any) {
|
||||||
|
|
||||||
console.log("Deploying app...")
|
console.log("Deploying app...")
|
||||||
|
|
||||||
let app = await deployApp(deployment)
|
let app = await deployApp(deployment, ctx.user._id)
|
||||||
|
|
||||||
await events.app.published(app)
|
await events.app.published(app)
|
||||||
ctx.body = deployment
|
ctx.body = deployment
|
||||||
|
|
|
@ -5,7 +5,7 @@ require("svelte/register")
|
||||||
const send = require("koa-send")
|
const send = require("koa-send")
|
||||||
const { resolve, join } = require("../../../utilities/centralPath")
|
const { resolve, join } = require("../../../utilities/centralPath")
|
||||||
const uuid = require("uuid")
|
const uuid = require("uuid")
|
||||||
const { ObjectStoreBuckets } = require("../../../constants")
|
const { ObjectStoreBuckets, ATTACHMENT_DIR } = require("../../../constants")
|
||||||
const { processString } = require("@budibase/string-templates")
|
const { processString } = require("@budibase/string-templates")
|
||||||
const {
|
const {
|
||||||
loadHandlebarsFile,
|
loadHandlebarsFile,
|
||||||
|
@ -90,7 +90,7 @@ export const uploadFile = async function (ctx: any) {
|
||||||
|
|
||||||
return prepareUpload({
|
return prepareUpload({
|
||||||
file,
|
file,
|
||||||
s3Key: `${ctx.appId}/attachments/${processedFileName}`,
|
s3Key: `${ctx.appId}/${ATTACHMENT_DIR}/${processedFileName}`,
|
||||||
bucket: ObjectStoreBuckets.APPS,
|
bucket: ObjectStoreBuckets.APPS,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
const { FieldTypes, FormulaTypes } = require("../../../constants")
|
const { FieldTypes, FormulaTypes } = require("../../../constants")
|
||||||
const { getAllInternalTables, clearColumns } = require("./utils")
|
const { clearColumns } = require("./utils")
|
||||||
const { doesContainStrings } = require("@budibase/string-templates")
|
const { doesContainStrings } = require("@budibase/string-templates")
|
||||||
const { cloneDeep } = require("lodash/fp")
|
const { cloneDeep } = require("lodash/fp")
|
||||||
const { isEqual, uniq } = require("lodash")
|
const { isEqual, uniq } = require("lodash")
|
||||||
const { updateAllFormulasInTable } = require("../row/staticFormula")
|
const { updateAllFormulasInTable } = require("../row/staticFormula")
|
||||||
const { getAppDB } = require("@budibase/backend-core/context")
|
const { getAppDB } = require("@budibase/backend-core/context")
|
||||||
|
const sdk = require("../../../sdk")
|
||||||
|
|
||||||
function isStaticFormula(column) {
|
function isStaticFormula(column) {
|
||||||
return (
|
return (
|
||||||
|
@ -39,7 +40,7 @@ function getFormulaThatUseColumn(table, columnNames) {
|
||||||
*/
|
*/
|
||||||
async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
|
async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
|
||||||
// start by retrieving all tables, remove the current table from the list
|
// start by retrieving all tables, remove the current table from the list
|
||||||
const tables = (await getAllInternalTables()).filter(
|
const tables = (await sdk.tables.getAllInternalTables()).filter(
|
||||||
tbl => tbl._id !== table._id
|
tbl => tbl._id !== table._id
|
||||||
)
|
)
|
||||||
const schemaToUse = oldTable ? oldTable.schema : table.schema
|
const schemaToUse = oldTable ? oldTable.schema : table.schema
|
||||||
|
@ -99,7 +100,7 @@ async function updateRelatedFormulaLinksOnTables(
|
||||||
) {
|
) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
// start by retrieving all tables, remove the current table from the list
|
// start by retrieving all tables, remove the current table from the list
|
||||||
const tables = (await getAllInternalTables()).filter(
|
const tables = (await sdk.tables.getAllInternalTables()).filter(
|
||||||
tbl => tbl._id !== table._id
|
tbl => tbl._id !== table._id
|
||||||
)
|
)
|
||||||
// clone the tables, so we can compare at end
|
// clone the tables, so we can compare at end
|
||||||
|
|
|
@ -3,7 +3,6 @@ const {
|
||||||
breakExternalTableId,
|
breakExternalTableId,
|
||||||
} = require("../../../integrations/utils")
|
} = require("../../../integrations/utils")
|
||||||
const {
|
const {
|
||||||
getTable,
|
|
||||||
generateForeignKey,
|
generateForeignKey,
|
||||||
generateJunctionTableName,
|
generateJunctionTableName,
|
||||||
foreignKeyStructure,
|
foreignKeyStructure,
|
||||||
|
@ -20,6 +19,7 @@ const csvParser = require("../../../utilities/csvParser")
|
||||||
const { handleRequest } = require("../row/external")
|
const { handleRequest } = require("../row/external")
|
||||||
const { getAppDB } = require("@budibase/backend-core/context")
|
const { getAppDB } = require("@budibase/backend-core/context")
|
||||||
const { events } = require("@budibase/backend-core")
|
const { events } = require("@budibase/backend-core")
|
||||||
|
const sdk = require("../../../sdk")
|
||||||
|
|
||||||
async function makeTableRequest(
|
async function makeTableRequest(
|
||||||
datasource,
|
datasource,
|
||||||
|
@ -181,7 +181,7 @@ exports.save = async function (ctx) {
|
||||||
|
|
||||||
let oldTable
|
let oldTable
|
||||||
if (ctx.request.body && ctx.request.body._id) {
|
if (ctx.request.body && ctx.request.body._id) {
|
||||||
oldTable = await getTable(ctx.request.body._id)
|
oldTable = await sdk.tables.getTable(ctx.request.body._id)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasTypeChanged(tableToSave, oldTable)) {
|
if (hasTypeChanged(tableToSave, oldTable)) {
|
||||||
|
@ -281,7 +281,7 @@ exports.save = async function (ctx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.destroy = async function (ctx) {
|
exports.destroy = async function (ctx) {
|
||||||
const tableToDelete = await getTable(ctx.params.tableId)
|
const tableToDelete = await sdk.tables.getTable(ctx.params.tableId)
|
||||||
if (!tableToDelete || !tableToDelete.created) {
|
if (!tableToDelete || !tableToDelete.created) {
|
||||||
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
|
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
|
||||||
}
|
}
|
||||||
|
@ -303,7 +303,7 @@ exports.destroy = async function (ctx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.bulkImport = async function (ctx) {
|
exports.bulkImport = async function (ctx) {
|
||||||
const table = await getTable(ctx.params.tableId)
|
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||||
const { dataImport } = ctx.request.body
|
const { dataImport } = ctx.request.body
|
||||||
if (!dataImport || !dataImport.schema || !dataImport.csvString) {
|
if (!dataImport || !dataImport.schema || !dataImport.csvString) {
|
||||||
ctx.throw(400, "Provided data import information is invalid.")
|
ctx.throw(400, "Provided data import information is invalid.")
|
||||||
|
|
|
@ -4,8 +4,8 @@ const csvParser = require("../../../utilities/csvParser")
|
||||||
const { isExternalTable, isSQL } = require("../../../integrations/utils")
|
const { isExternalTable, isSQL } = require("../../../integrations/utils")
|
||||||
const { getDatasourceParams } = require("../../../db/utils")
|
const { getDatasourceParams } = require("../../../db/utils")
|
||||||
const { getAppDB } = require("@budibase/backend-core/context")
|
const { getAppDB } = require("@budibase/backend-core/context")
|
||||||
const { getTable, getAllInternalTables } = require("./utils")
|
|
||||||
const { events } = require("@budibase/backend-core")
|
const { events } = require("@budibase/backend-core")
|
||||||
|
const sdk = require("../../../sdk")
|
||||||
|
|
||||||
function pickApi({ tableId, table }) {
|
function pickApi({ tableId, table }) {
|
||||||
if (table && !tableId) {
|
if (table && !tableId) {
|
||||||
|
@ -23,7 +23,7 @@ function pickApi({ tableId, table }) {
|
||||||
exports.fetch = async function (ctx) {
|
exports.fetch = async function (ctx) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
|
|
||||||
const internal = await getAllInternalTables()
|
const internal = await sdk.tables.getAllInternalTables()
|
||||||
|
|
||||||
const externalTables = await db.allDocs(
|
const externalTables = await db.allDocs(
|
||||||
getDatasourceParams("plus", {
|
getDatasourceParams("plus", {
|
||||||
|
@ -50,7 +50,7 @@ exports.fetch = async function (ctx) {
|
||||||
|
|
||||||
exports.find = async function (ctx) {
|
exports.find = async function (ctx) {
|
||||||
const tableId = ctx.params.tableId
|
const tableId = ctx.params.tableId
|
||||||
ctx.body = await getTable(tableId)
|
ctx.body = await sdk.tables.getTable(tableId)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.save = async function (ctx) {
|
exports.save = async function (ctx) {
|
||||||
|
@ -101,7 +101,7 @@ exports.validateCSVSchema = async function (ctx) {
|
||||||
const { csvString, schema = {}, tableId } = ctx.request.body
|
const { csvString, schema = {}, tableId } = ctx.request.body
|
||||||
let existingTable
|
let existingTable
|
||||||
if (tableId) {
|
if (tableId) {
|
||||||
existingTable = await getTable(tableId)
|
existingTable = await sdk.tables.getTable(tableId)
|
||||||
}
|
}
|
||||||
let result = await csvParser.parse(csvString, schema)
|
let result = await csvParser.parse(csvString, schema)
|
||||||
if (existingTable) {
|
if (existingTable) {
|
||||||
|
|
|
@ -1,12 +1,7 @@
|
||||||
import { updateLinks, EventType } from "../../../db/linkedRows"
|
import { updateLinks, EventType } from "../../../db/linkedRows"
|
||||||
import { getRowParams, generateTableID } from "../../../db/utils"
|
import { getRowParams, generateTableID } from "../../../db/utils"
|
||||||
import { FieldTypes } from "../../../constants"
|
import { FieldTypes } from "../../../constants"
|
||||||
import {
|
import { TableSaveFunctions, hasTypeChanged, handleDataImport } from "./utils"
|
||||||
TableSaveFunctions,
|
|
||||||
hasTypeChanged,
|
|
||||||
getTable,
|
|
||||||
handleDataImport,
|
|
||||||
} from "./utils"
|
|
||||||
const { getAppDB } = require("@budibase/backend-core/context")
|
const { getAppDB } = require("@budibase/backend-core/context")
|
||||||
import { isTest } from "../../../environment"
|
import { isTest } from "../../../environment"
|
||||||
import {
|
import {
|
||||||
|
@ -19,6 +14,7 @@ import { quotas } from "@budibase/pro"
|
||||||
import { isEqual } from "lodash"
|
import { isEqual } from "lodash"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
|
import sdk from "../../../sdk"
|
||||||
|
|
||||||
function checkAutoColumns(table: Table, oldTable: Table) {
|
function checkAutoColumns(table: Table, oldTable: Table) {
|
||||||
if (!table.schema) {
|
if (!table.schema) {
|
||||||
|
@ -188,7 +184,7 @@ export async function destroy(ctx: any) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function bulkImport(ctx: any) {
|
export async function bulkImport(ctx: any) {
|
||||||
const table = await getTable(ctx.params.tableId)
|
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||||
const { dataImport } = ctx.request.body
|
const { dataImport } = ctx.request.body
|
||||||
await handleDataImport(ctx.user, table, dataImport)
|
await handleDataImport(ctx.user, table, dataImport)
|
||||||
return table
|
return table
|
||||||
|
|
|
@ -1,11 +1,5 @@
|
||||||
import { transform } from "../../../utilities/csvParser"
|
import { transform } from "../../../utilities/csvParser"
|
||||||
import {
|
import { getRowParams, generateRowID, InternalTables } from "../../../db/utils"
|
||||||
getRowParams,
|
|
||||||
generateRowID,
|
|
||||||
InternalTables,
|
|
||||||
getTableParams,
|
|
||||||
BudibaseInternalDB,
|
|
||||||
} from "../../../db/utils"
|
|
||||||
import { isEqual } from "lodash"
|
import { isEqual } from "lodash"
|
||||||
import { AutoFieldSubTypes, FieldTypes } from "../../../constants"
|
import { AutoFieldSubTypes, FieldTypes } from "../../../constants"
|
||||||
import {
|
import {
|
||||||
|
@ -17,11 +11,6 @@ import {
|
||||||
SwitchableTypes,
|
SwitchableTypes,
|
||||||
CanSwitchTypes,
|
CanSwitchTypes,
|
||||||
} from "../../../constants"
|
} from "../../../constants"
|
||||||
import {
|
|
||||||
isExternalTable,
|
|
||||||
breakExternalTableId,
|
|
||||||
isSQL,
|
|
||||||
} from "../../../integrations/utils"
|
|
||||||
import { getViews, saveView } from "../view/utils"
|
import { getViews, saveView } from "../view/utils"
|
||||||
import viewTemplate from "../view/viewBuilder"
|
import viewTemplate from "../view/viewBuilder"
|
||||||
const { getAppDB } = require("@budibase/backend-core/context")
|
const { getAppDB } = require("@budibase/backend-core/context")
|
||||||
|
@ -256,46 +245,6 @@ class TableSaveFunctions {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getAllInternalTables() {
|
|
||||||
const db = getAppDB()
|
|
||||||
const internalTables = await db.allDocs(
|
|
||||||
getTableParams(null, {
|
|
||||||
include_docs: true,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
return internalTables.rows.map((tableDoc: any) => ({
|
|
||||||
...tableDoc.doc,
|
|
||||||
type: "internal",
|
|
||||||
sourceId: BudibaseInternalDB._id,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getAllExternalTables(datasourceId: any) {
|
|
||||||
const db = getAppDB()
|
|
||||||
const datasource = await db.get(datasourceId)
|
|
||||||
if (!datasource || !datasource.entities) {
|
|
||||||
throw "Datasource is not configured fully."
|
|
||||||
}
|
|
||||||
return datasource.entities
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getExternalTable(datasourceId: any, tableName: any) {
|
|
||||||
const entities = await getAllExternalTables(datasourceId)
|
|
||||||
return entities[tableName]
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getTable(tableId: any) {
|
|
||||||
const db = getAppDB()
|
|
||||||
if (isExternalTable(tableId)) {
|
|
||||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
|
||||||
const datasource = await db.get(datasourceId)
|
|
||||||
const table = await getExternalTable(datasourceId, tableName)
|
|
||||||
return { ...table, sql: isSQL(datasource) }
|
|
||||||
} else {
|
|
||||||
return db.get(tableId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function checkForViewUpdates(
|
export async function checkForViewUpdates(
|
||||||
table: any,
|
table: any,
|
||||||
rename: any,
|
rename: any,
|
||||||
|
|
|
@ -3,12 +3,12 @@ const { apiFileReturn } = require("../../../utilities/fileSystem")
|
||||||
const exporters = require("./exporters")
|
const exporters = require("./exporters")
|
||||||
const { saveView, getView, getViews, deleteView } = require("./utils")
|
const { saveView, getView, getViews, deleteView } = require("./utils")
|
||||||
const { fetchView } = require("../row")
|
const { fetchView } = require("../row")
|
||||||
const { getTable } = require("../table/utils")
|
|
||||||
const { FieldTypes } = require("../../../constants")
|
const { FieldTypes } = require("../../../constants")
|
||||||
const { getAppDB } = require("@budibase/backend-core/context")
|
const { getAppDB } = require("@budibase/backend-core/context")
|
||||||
const { events } = require("@budibase/backend-core")
|
const { events } = require("@budibase/backend-core")
|
||||||
const { DocumentType } = require("../../../db/utils")
|
const { DocumentType } = require("../../../db/utils")
|
||||||
const { cloneDeep, isEqual } = require("lodash")
|
const { cloneDeep, isEqual } = require("lodash")
|
||||||
|
const sdk = require("../../../sdk")
|
||||||
|
|
||||||
exports.fetch = async ctx => {
|
exports.fetch = async ctx => {
|
||||||
ctx.body = await getViews()
|
ctx.body = await getViews()
|
||||||
|
@ -144,7 +144,7 @@ exports.exportView = async ctx => {
|
||||||
|
|
||||||
let schema = view && view.meta && view.meta.schema
|
let schema = view && view.meta && view.meta.schema
|
||||||
const tableId = ctx.params.tableId || view.meta.tableId
|
const tableId = ctx.params.tableId || view.meta.tableId
|
||||||
const table = await getTable(tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
if (!schema) {
|
if (!schema) {
|
||||||
schema = table.schema
|
schema = table.schema
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +0,0 @@
|
||||||
const Router = require("@koa/router")
|
|
||||||
const controller = require("../controllers/backup")
|
|
||||||
const authorized = require("../../middleware/authorized")
|
|
||||||
const { BUILDER } = require("@budibase/backend-core/permissions")
|
|
||||||
|
|
||||||
const router = new Router()
|
|
||||||
|
|
||||||
router.get("/api/backups/export", authorized(BUILDER), controller.exportAppDump)
|
|
||||||
|
|
||||||
module.exports = router
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
import Router from "@koa/router"
|
||||||
|
import * as controller from "../controllers/backup"
|
||||||
|
import authorized from "../../middleware/authorized"
|
||||||
|
import { BUILDER } from "@budibase/backend-core/permissions"
|
||||||
|
|
||||||
|
const router = new Router()
|
||||||
|
|
||||||
|
router.get("/api/backups/export", authorized(BUILDER), controller.exportAppDump)
|
||||||
|
|
||||||
|
export default router
|
|
@ -25,11 +25,17 @@ import devRoutes from "./dev"
|
||||||
import cloudRoutes from "./cloud"
|
import cloudRoutes from "./cloud"
|
||||||
import migrationRoutes from "./migrations"
|
import migrationRoutes from "./migrations"
|
||||||
import pluginRoutes from "./plugin"
|
import pluginRoutes from "./plugin"
|
||||||
|
import Router from "@koa/router"
|
||||||
|
import { api } from "@budibase/pro"
|
||||||
|
|
||||||
export { default as staticRoutes } from "./static"
|
export { default as staticRoutes } from "./static"
|
||||||
export { default as publicRoutes } from "./public"
|
export { default as publicRoutes } from "./public"
|
||||||
|
|
||||||
export const mainRoutes = [
|
const appBackupRoutes = api.appBackups
|
||||||
|
const scheduleRoutes = api.schedules
|
||||||
|
export const mainRoutes: Router[] = [
|
||||||
|
appBackupRoutes,
|
||||||
|
backupRoutes,
|
||||||
authRoutes,
|
authRoutes,
|
||||||
deployRoutes,
|
deployRoutes,
|
||||||
layoutRoutes,
|
layoutRoutes,
|
||||||
|
@ -49,14 +55,14 @@ export const mainRoutes = [
|
||||||
permissionRoutes,
|
permissionRoutes,
|
||||||
datasourceRoutes,
|
datasourceRoutes,
|
||||||
queryRoutes,
|
queryRoutes,
|
||||||
backupRoutes,
|
|
||||||
metadataRoutes,
|
metadataRoutes,
|
||||||
devRoutes,
|
devRoutes,
|
||||||
cloudRoutes,
|
cloudRoutes,
|
||||||
// these need to be handled last as they still use /api/:tableId
|
|
||||||
// this could be breaking as koa may recognise other routes as this
|
|
||||||
tableRoutes,
|
|
||||||
rowRoutes,
|
rowRoutes,
|
||||||
migrationRoutes,
|
migrationRoutes,
|
||||||
pluginRoutes,
|
pluginRoutes,
|
||||||
|
scheduleRoutes,
|
||||||
|
// these need to be handled last as they still use /api/:tableId
|
||||||
|
// this could be breaking as koa may recognise other routes as this
|
||||||
|
tableRoutes,
|
||||||
]
|
]
|
||||||
|
|
|
@ -21,7 +21,7 @@ describe("/backups", () => {
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
.expect(200)
|
.expect(200)
|
||||||
expect(res.text).toBeDefined()
|
expect(res.text).toBeDefined()
|
||||||
expect(res.text.includes(`"db_name":"${config.getAppId()}"`)).toEqual(true)
|
expect(res.headers["content-type"]).toEqual("application/gzip")
|
||||||
expect(events.app.exported.mock.calls.length).toBe(1)
|
expect(events.app.exported.mock.calls.length).toBe(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
const setup = require("./utilities")
|
import setup from "./utilities"
|
||||||
const { events } = require("@budibase/backend-core")
|
import { events } from "@budibase/backend-core"
|
||||||
|
|
||||||
describe("/deployments", () => {
|
describe("/deployments", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
|
@ -19,7 +19,7 @@ describe("/deployments", () => {
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(200)
|
.expect(200)
|
||||||
expect(events.app.published.mock.calls.length).toBe(1)
|
expect((events.app.published as jest.Mock).mock.calls.length).toBe(1)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
|
@ -37,6 +37,8 @@ import {
|
||||||
} from "./utilities/workerRequests"
|
} from "./utilities/workerRequests"
|
||||||
import { watch } from "./watch"
|
import { watch } from "./watch"
|
||||||
import { initialise as initialiseWebsockets } from "./websocket"
|
import { initialise as initialiseWebsockets } from "./websocket"
|
||||||
|
import sdk from "./sdk"
|
||||||
|
import * as pro from "@budibase/pro"
|
||||||
|
|
||||||
const app = new Koa()
|
const app = new Koa()
|
||||||
|
|
||||||
|
@ -102,12 +104,25 @@ server.on("close", async () => {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const initPro = async () => {
|
||||||
|
await pro.init({
|
||||||
|
backups: {
|
||||||
|
processing: {
|
||||||
|
exportAppFn: sdk.backups.exportApp,
|
||||||
|
importAppFn: sdk.backups.importApp,
|
||||||
|
statsFn: sdk.backups.calculateBackupStats,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = server.listen(env.PORT || 0, async () => {
|
module.exports = server.listen(env.PORT || 0, async () => {
|
||||||
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
|
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
|
||||||
env._set("PORT", server.address().port)
|
env._set("PORT", server.address().port)
|
||||||
eventEmitter.emitPort(env.PORT)
|
eventEmitter.emitPort(env.PORT)
|
||||||
fileSystem.init()
|
fileSystem.init()
|
||||||
await redis.init()
|
await redis.init()
|
||||||
|
await initPro()
|
||||||
|
|
||||||
// run migrations on startup if not done via http
|
// run migrations on startup if not done via http
|
||||||
// not recommended in a clustered environment
|
// not recommended in a clustered environment
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
const { getTable } = require("../api/controllers/table/utils")
|
|
||||||
const {
|
const {
|
||||||
findHBSBlocks,
|
findHBSBlocks,
|
||||||
decodeJSBinding,
|
decodeJSBinding,
|
||||||
isJSBinding,
|
isJSBinding,
|
||||||
encodeJSBinding,
|
encodeJSBinding,
|
||||||
} = require("@budibase/string-templates")
|
} = require("@budibase/string-templates")
|
||||||
|
const sdk = require("../sdk")
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* When values are input to the system generally they will be of type string as this is required for template strings.
|
* When values are input to the system generally they will be of type string as this is required for template strings.
|
||||||
|
@ -64,7 +64,7 @@ exports.cleanInputValues = (inputs, schema) => {
|
||||||
* @returns {Promise<Object>} The cleaned up rows object, will should now have all the required primitive types.
|
* @returns {Promise<Object>} The cleaned up rows object, will should now have all the required primitive types.
|
||||||
*/
|
*/
|
||||||
exports.cleanUpRow = async (tableId, row) => {
|
exports.cleanUpRow = async (tableId, row) => {
|
||||||
let table = await getTable(tableId)
|
let table = await sdk.tables.getTable(tableId)
|
||||||
return exports.cleanInputValues(row, { properties: table.schema })
|
return exports.cleanInputValues(row, { properties: table.schema })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,37 +1,17 @@
|
||||||
const { createBullBoard } = require("@bull-board/api")
|
const { createBullBoard } = require("@bull-board/api")
|
||||||
const { BullAdapter } = require("@bull-board/api/bullAdapter")
|
const { BullAdapter } = require("@bull-board/api/bullAdapter")
|
||||||
const { KoaAdapter } = require("@bull-board/koa")
|
const { KoaAdapter } = require("@bull-board/koa")
|
||||||
const env = require("../environment")
|
const { queue } = require("@budibase/backend-core")
|
||||||
const Queue = env.isTest()
|
const automation = require("../threads/automation")
|
||||||
? require("../utilities/queue/inMemoryQueue")
|
|
||||||
: require("bull")
|
|
||||||
const { JobQueues } = require("../constants")
|
|
||||||
const { utils } = require("@budibase/backend-core/redis")
|
|
||||||
const { opts, redisProtocolUrl } = utils.getRedisOptions()
|
|
||||||
const listeners = require("./listeners")
|
|
||||||
|
|
||||||
const CLEANUP_PERIOD_MS = 60 * 1000
|
let automationQueue = queue.createQueue(
|
||||||
const queueConfig = redisProtocolUrl || { redis: opts }
|
queue.JobQueue.AUTOMATION,
|
||||||
let cleanupInternal = null
|
automation.removeStalled
|
||||||
|
)
|
||||||
let automationQueue = new Queue(JobQueues.AUTOMATIONS, queueConfig)
|
|
||||||
listeners.addListeners(automationQueue)
|
|
||||||
|
|
||||||
async function cleanup() {
|
|
||||||
await automationQueue.clean(CLEANUP_PERIOD_MS, "completed")
|
|
||||||
}
|
|
||||||
|
|
||||||
const PATH_PREFIX = "/bulladmin"
|
const PATH_PREFIX = "/bulladmin"
|
||||||
|
|
||||||
exports.init = () => {
|
exports.init = () => {
|
||||||
// cleanup the events every 5 minutes
|
|
||||||
if (!cleanupInternal) {
|
|
||||||
cleanupInternal = setInterval(cleanup, CLEANUP_PERIOD_MS)
|
|
||||||
// fire off an initial cleanup
|
|
||||||
cleanup().catch(err => {
|
|
||||||
console.error(`Unable to cleanup automation queue initially - ${err}`)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// Set up queues for bull board admin
|
// Set up queues for bull board admin
|
||||||
const queues = [automationQueue]
|
const queues = [automationQueue]
|
||||||
const adapters = []
|
const adapters = []
|
||||||
|
@ -48,12 +28,7 @@ exports.init = () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.shutdown = async () => {
|
exports.shutdown = async () => {
|
||||||
if (automationQueue) {
|
await queue.shutdown()
|
||||||
clearInterval(cleanupInternal)
|
|
||||||
await automationQueue.close()
|
|
||||||
automationQueue = null
|
|
||||||
}
|
|
||||||
console.log("Bull shutdown")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.queue = automationQueue
|
exports.automationQueue = automationQueue
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
const { processEvent } = require("./utils")
|
const { processEvent } = require("./utils")
|
||||||
const { queue, shutdown } = require("./bullboard")
|
const { automationQueue, shutdown } = require("./bullboard")
|
||||||
const { TRIGGER_DEFINITIONS, rebootTrigger } = require("./triggers")
|
const { TRIGGER_DEFINITIONS, rebootTrigger } = require("./triggers")
|
||||||
const { ACTION_DEFINITIONS } = require("./actions")
|
const { ACTION_DEFINITIONS } = require("./actions")
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ const { ACTION_DEFINITIONS } = require("./actions")
|
||||||
*/
|
*/
|
||||||
exports.init = async function () {
|
exports.init = async function () {
|
||||||
// this promise will not complete
|
// this promise will not complete
|
||||||
const promise = queue.process(async job => {
|
const promise = automationQueue.process(async job => {
|
||||||
await processEvent(job)
|
await processEvent(job)
|
||||||
})
|
})
|
||||||
// on init we need to trigger any reboot automations
|
// on init we need to trigger any reboot automations
|
||||||
|
@ -17,13 +17,13 @@ exports.init = async function () {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getQueues = () => {
|
exports.getQueues = () => {
|
||||||
return [queue]
|
return [automationQueue]
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.shutdown = () => {
|
exports.shutdown = () => {
|
||||||
return shutdown()
|
return shutdown()
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.queue = queue
|
exports.automationQueue = automationQueue
|
||||||
exports.TRIGGER_DEFINITIONS = TRIGGER_DEFINITIONS
|
exports.TRIGGER_DEFINITIONS = TRIGGER_DEFINITIONS
|
||||||
exports.ACTION_DEFINITIONS = ACTION_DEFINITIONS
|
exports.ACTION_DEFINITIONS = ACTION_DEFINITIONS
|
||||||
|
|
|
@ -1,78 +0,0 @@
|
||||||
import { Queue, Job, JobId } from "bull"
|
|
||||||
import { AutomationEvent } from "../definitions/automations"
|
|
||||||
import * as automation from "../threads/automation"
|
|
||||||
|
|
||||||
export const addListeners = (queue: Queue) => {
|
|
||||||
logging(queue)
|
|
||||||
handleStalled(queue)
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleStalled = (queue: Queue) => {
|
|
||||||
queue.on("stalled", async (job: Job) => {
|
|
||||||
await automation.removeStalled(job as AutomationEvent)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const logging = (queue: Queue) => {
|
|
||||||
if (process.env.NODE_DEBUG?.includes("bull")) {
|
|
||||||
queue
|
|
||||||
.on("error", (error: any) => {
|
|
||||||
// An error occurred.
|
|
||||||
console.error(`automation-event=error error=${JSON.stringify(error)}`)
|
|
||||||
})
|
|
||||||
.on("waiting", (jobId: JobId) => {
|
|
||||||
// A Job is waiting to be processed as soon as a worker is idling.
|
|
||||||
console.log(`automation-event=waiting jobId=${jobId}`)
|
|
||||||
})
|
|
||||||
.on("active", (job: Job, jobPromise: any) => {
|
|
||||||
// A job has started. You can use `jobPromise.cancel()`` to abort it.
|
|
||||||
console.log(`automation-event=active jobId=${job.id}`)
|
|
||||||
})
|
|
||||||
.on("stalled", (job: Job) => {
|
|
||||||
// A job has been marked as stalled. This is useful for debugging job
|
|
||||||
// workers that crash or pause the event loop.
|
|
||||||
console.error(
|
|
||||||
`automation-event=stalled jobId=${job.id} job=${JSON.stringify(job)}`
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.on("progress", (job: Job, progress: any) => {
|
|
||||||
// A job's progress was updated!
|
|
||||||
console.log(
|
|
||||||
`automation-event=progress jobId=${job.id} progress=${progress}`
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.on("completed", (job: Job, result) => {
|
|
||||||
// A job successfully completed with a `result`.
|
|
||||||
console.log(
|
|
||||||
`automation-event=completed jobId=${job.id} result=${result}`
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.on("failed", (job, err: any) => {
|
|
||||||
// A job failed with reason `err`!
|
|
||||||
console.log(`automation-event=failed jobId=${job.id} error=${err}`)
|
|
||||||
})
|
|
||||||
.on("paused", () => {
|
|
||||||
// The queue has been paused.
|
|
||||||
console.log(`automation-event=paused`)
|
|
||||||
})
|
|
||||||
.on("resumed", (job: Job) => {
|
|
||||||
// The queue has been resumed.
|
|
||||||
console.log(`automation-event=paused jobId=${job.id}`)
|
|
||||||
})
|
|
||||||
.on("cleaned", (jobs: Job[], type: string) => {
|
|
||||||
// Old jobs have been cleaned from the queue. `jobs` is an array of cleaned
|
|
||||||
// jobs, and `type` is the type of jobs cleaned.
|
|
||||||
console.log(
|
|
||||||
`automation-event=cleaned length=${jobs.length} type=${type}`
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.on("drained", () => {
|
|
||||||
// Emitted every time the queue has processed all the waiting jobs (even if there can be some delayed jobs not yet processed)
|
|
||||||
console.log(`automation-event=drained`)
|
|
||||||
})
|
|
||||||
.on("removed", (job: Job) => {
|
|
||||||
// A job successfully removed.
|
|
||||||
console.log(`automation-event=removed jobId=${job.id}`)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -4,7 +4,7 @@ const { coerce } = require("../utilities/rowProcessor")
|
||||||
const { definitions } = require("./triggerInfo")
|
const { definitions } = require("./triggerInfo")
|
||||||
const { isDevAppID } = require("../db/utils")
|
const { isDevAppID } = require("../db/utils")
|
||||||
// need this to call directly, so we can get a response
|
// need this to call directly, so we can get a response
|
||||||
const { queue } = require("./bullboard")
|
const { automationQueue } = require("./bullboard")
|
||||||
const { checkTestFlag } = require("../utilities/redis")
|
const { checkTestFlag } = require("../utilities/redis")
|
||||||
const utils = require("./utils")
|
const utils = require("./utils")
|
||||||
const env = require("../environment")
|
const env = require("../environment")
|
||||||
|
@ -56,7 +56,7 @@ async function queueRelevantRowAutomations(event, eventType) {
|
||||||
automationTrigger.inputs &&
|
automationTrigger.inputs &&
|
||||||
automationTrigger.inputs.tableId === event.row.tableId
|
automationTrigger.inputs.tableId === event.row.tableId
|
||||||
) {
|
) {
|
||||||
await queue.add({ automation, event }, JOB_OPTS)
|
await automationQueue.add({ automation, event }, JOB_OPTS)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -110,7 +110,7 @@ exports.externalTrigger = async function (
|
||||||
if (getResponses) {
|
if (getResponses) {
|
||||||
return utils.processEvent({ data })
|
return utils.processEvent({ data })
|
||||||
} else {
|
} else {
|
||||||
return queue.add(data, JOB_OPTS)
|
return automationQueue.add(data, JOB_OPTS)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ exports.rebootTrigger = async () => {
|
||||||
timestamp: Date.now(),
|
timestamp: Date.now(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
rebootEvents.push(queue.add(job, JOB_OPTS))
|
rebootEvents.push(automationQueue.add(job, JOB_OPTS))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await Promise.all(rebootEvents)
|
await Promise.all(rebootEvents)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import { Thread, ThreadType } from "../threads"
|
import { Thread, ThreadType } from "../threads"
|
||||||
import { definitions } from "./triggerInfo"
|
import { definitions } from "./triggerInfo"
|
||||||
import * as webhooks from "../api/controllers/webhook"
|
import * as webhooks from "../api/controllers/webhook"
|
||||||
import { queue } from "./bullboard"
|
import { automationQueue } from "./bullboard"
|
||||||
import newid from "../db/newid"
|
import newid from "../db/newid"
|
||||||
import { updateEntityMetadata } from "../utilities"
|
import { updateEntityMetadata } from "../utilities"
|
||||||
import { MetadataTypes, WebhookType } from "../constants"
|
import { MetadataTypes, WebhookType } from "../constants"
|
||||||
|
@ -79,21 +79,25 @@ export function removeDeprecated(definitions: any) {
|
||||||
// end the repetition and the job itself
|
// end the repetition and the job itself
|
||||||
export async function disableAllCrons(appId: any) {
|
export async function disableAllCrons(appId: any) {
|
||||||
const promises = []
|
const promises = []
|
||||||
const jobs = await queue.getRepeatableJobs()
|
const jobs = await automationQueue.getRepeatableJobs()
|
||||||
for (let job of jobs) {
|
for (let job of jobs) {
|
||||||
if (job.key.includes(`${appId}_cron`)) {
|
if (job.key.includes(`${appId}_cron`)) {
|
||||||
promises.push(queue.removeRepeatableByKey(job.key))
|
promises.push(automationQueue.removeRepeatableByKey(job.key))
|
||||||
if (job.id) {
|
if (job.id) {
|
||||||
promises.push(queue.removeJobs(job.id))
|
promises.push(automationQueue.removeJobs(job.id))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Promise.all(promises)
|
return Promise.all(promises)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function disableCron(jobId: string, jobKey: string) {
|
export async function disableCronById(jobId: number | string) {
|
||||||
await queue.removeRepeatableByKey(jobKey)
|
const repeatJobs = await automationQueue.getRepeatableJobs()
|
||||||
await queue.removeJobs(jobId)
|
for (let repeatJob of repeatJobs) {
|
||||||
|
if (repeatJob.id === jobId) {
|
||||||
|
await automationQueue.removeRepeatableByKey(repeatJob.key)
|
||||||
|
}
|
||||||
|
}
|
||||||
console.log(`jobId=${jobId} disabled`)
|
console.log(`jobId=${jobId} disabled`)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -141,7 +145,7 @@ export async function enableCronTrigger(appId: any, automation: Automation) {
|
||||||
) {
|
) {
|
||||||
// make a job id rather than letting Bull decide, makes it easier to handle on way out
|
// make a job id rather than letting Bull decide, makes it easier to handle on way out
|
||||||
const jobId = `${appId}_cron_${newid()}`
|
const jobId = `${appId}_cron_${newid()}`
|
||||||
const job: any = await queue.add(
|
const job: any = await automationQueue.add(
|
||||||
{
|
{
|
||||||
automation,
|
automation,
|
||||||
event: { appId, timestamp: Date.now() },
|
event: { appId, timestamp: Date.now() },
|
||||||
|
|
|
@ -1,10 +1,6 @@
|
||||||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
||||||
const { UserStatus } = require("@budibase/backend-core/constants")
|
const { UserStatus } = require("@budibase/backend-core/constants")
|
||||||
const { ObjectStoreBuckets } = require("@budibase/backend-core/objectStore")
|
const { objectStore } = require("@budibase/backend-core")
|
||||||
|
|
||||||
exports.JobQueues = {
|
|
||||||
AUTOMATIONS: "automationQueue",
|
|
||||||
}
|
|
||||||
|
|
||||||
const FilterTypes = {
|
const FilterTypes = {
|
||||||
STRING: "string",
|
STRING: "string",
|
||||||
|
@ -211,6 +207,6 @@ exports.AutomationErrors = {
|
||||||
}
|
}
|
||||||
|
|
||||||
// pass through the list from the auth/core lib
|
// pass through the list from the auth/core lib
|
||||||
exports.ObjectStoreBuckets = ObjectStoreBuckets
|
exports.ObjectStoreBuckets = objectStore.ObjectStoreBuckets
|
||||||
|
|
||||||
exports.MAX_AUTOMATION_RECURRING_ERRORS = 5
|
exports.MAX_AUTOMATION_RECURRING_ERRORS = 5
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
const newid = require("./newid")
|
const newid = require("./newid")
|
||||||
const {
|
const {
|
||||||
DocumentType: CoreDocTypes,
|
DocumentType: CoreDocType,
|
||||||
|
InternalTable,
|
||||||
getRoleParams,
|
getRoleParams,
|
||||||
generateRoleID,
|
generateRoleID,
|
||||||
APP_DEV_PREFIX,
|
APP_DEV_PREFIX,
|
||||||
|
@ -13,6 +14,12 @@ const {
|
||||||
generateAppID,
|
generateAppID,
|
||||||
getQueryIndex,
|
getQueryIndex,
|
||||||
ViewName,
|
ViewName,
|
||||||
|
getDocParams,
|
||||||
|
getRowParams,
|
||||||
|
generateRowID,
|
||||||
|
getUserMetadataParams,
|
||||||
|
generateUserMetadataID,
|
||||||
|
getGlobalIDFromUserMetadataID,
|
||||||
} = require("@budibase/backend-core/db")
|
} = require("@budibase/backend-core/db")
|
||||||
|
|
||||||
const UNICODE_MAX = "\ufff0"
|
const UNICODE_MAX = "\ufff0"
|
||||||
|
@ -23,28 +30,7 @@ const AppStatus = {
|
||||||
DEPLOYED: "published",
|
DEPLOYED: "published",
|
||||||
}
|
}
|
||||||
|
|
||||||
const DocumentType = {
|
const DocumentType = CoreDocType
|
||||||
...CoreDocTypes,
|
|
||||||
TABLE: "ta",
|
|
||||||
ROW: "ro",
|
|
||||||
USER: "us",
|
|
||||||
AUTOMATION: "au",
|
|
||||||
LINK: "li",
|
|
||||||
WEBHOOK: "wh",
|
|
||||||
INSTANCE: "inst",
|
|
||||||
LAYOUT: "layout",
|
|
||||||
SCREEN: "screen",
|
|
||||||
QUERY: "query",
|
|
||||||
DEPLOYMENTS: "deployments",
|
|
||||||
METADATA: "metadata",
|
|
||||||
MEM_VIEW: "view",
|
|
||||||
USER_FLAG: "flag",
|
|
||||||
AUTOMATION_METADATA: "meta_au",
|
|
||||||
}
|
|
||||||
|
|
||||||
const InternalTables = {
|
|
||||||
USER_METADATA: "ta_users",
|
|
||||||
}
|
|
||||||
|
|
||||||
const SearchIndexes = {
|
const SearchIndexes = {
|
||||||
ROWS: "rows",
|
ROWS: "rows",
|
||||||
|
@ -64,11 +50,11 @@ exports.APP_PREFIX = APP_PREFIX
|
||||||
exports.APP_DEV_PREFIX = APP_DEV_PREFIX
|
exports.APP_DEV_PREFIX = APP_DEV_PREFIX
|
||||||
exports.isDevAppID = isDevAppID
|
exports.isDevAppID = isDevAppID
|
||||||
exports.isProdAppID = isProdAppID
|
exports.isProdAppID = isProdAppID
|
||||||
exports.USER_METDATA_PREFIX = `${DocumentType.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
exports.USER_METDATA_PREFIX = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`
|
||||||
exports.LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
exports.LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`
|
||||||
exports.TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}`
|
exports.TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}`
|
||||||
exports.ViewName = ViewName
|
exports.ViewName = ViewName
|
||||||
exports.InternalTables = InternalTables
|
exports.InternalTables = InternalTable
|
||||||
exports.DocumentType = DocumentType
|
exports.DocumentType = DocumentType
|
||||||
exports.SEPARATOR = SEPARATOR
|
exports.SEPARATOR = SEPARATOR
|
||||||
exports.UNICODE_MAX = UNICODE_MAX
|
exports.UNICODE_MAX = UNICODE_MAX
|
||||||
|
@ -77,36 +63,15 @@ exports.AppStatus = AppStatus
|
||||||
exports.BudibaseInternalDB = BudibaseInternalDB
|
exports.BudibaseInternalDB = BudibaseInternalDB
|
||||||
exports.generateAppID = generateAppID
|
exports.generateAppID = generateAppID
|
||||||
exports.generateDevAppID = getDevelopmentAppID
|
exports.generateDevAppID = getDevelopmentAppID
|
||||||
|
|
||||||
exports.generateRoleID = generateRoleID
|
exports.generateRoleID = generateRoleID
|
||||||
exports.getRoleParams = getRoleParams
|
exports.getRoleParams = getRoleParams
|
||||||
|
|
||||||
exports.getQueryIndex = getQueryIndex
|
exports.getQueryIndex = getQueryIndex
|
||||||
|
|
||||||
/**
|
|
||||||
* If creating DB allDocs/query params with only a single top level ID this can be used, this
|
|
||||||
* is usually the case as most of our docs are top level e.g. tables, automations, users and so on.
|
|
||||||
* More complex cases such as link docs and rows which have multiple levels of IDs that their
|
|
||||||
* ID consists of need their own functions to build the allDocs parameters.
|
|
||||||
* @param {string} docType The type of document which input params are being built for, e.g. user,
|
|
||||||
* link, app, table and so on.
|
|
||||||
* @param {string|null} docId The ID of the document minus its type - this is only needed if looking
|
|
||||||
* for a singular document.
|
|
||||||
* @param {object} otherProps Add any other properties onto the request, e.g. include_docs.
|
|
||||||
* @returns {object} Parameters which can then be used with an allDocs request.
|
|
||||||
*/
|
|
||||||
function getDocParams(docType, docId = null, otherProps = {}) {
|
|
||||||
if (docId == null) {
|
|
||||||
docId = ""
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...otherProps,
|
|
||||||
startkey: `${docType}${SEPARATOR}${docId}`,
|
|
||||||
endkey: `${docType}${SEPARATOR}${docId}${UNICODE_MAX}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getDocParams = getDocParams
|
exports.getDocParams = getDocParams
|
||||||
|
exports.getRowParams = getRowParams
|
||||||
|
exports.generateRowID = generateRowID
|
||||||
|
exports.getUserMetadataParams = getUserMetadataParams
|
||||||
|
exports.generateUserMetadataID = generateUserMetadataID
|
||||||
|
exports.getGlobalIDFromUserMetadataID = getGlobalIDFromUserMetadataID
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets parameters for retrieving tables, this is a utility function for the getDocParams function.
|
* Gets parameters for retrieving tables, this is a utility function for the getDocParams function.
|
||||||
|
@ -123,24 +88,6 @@ exports.generateTableID = () => {
|
||||||
return `${DocumentType.TABLE}${SEPARATOR}${newid()}`
|
return `${DocumentType.TABLE}${SEPARATOR}${newid()}`
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the DB allDocs/query params for retrieving a row.
|
|
||||||
* @param {string|null} tableId The table in which the rows have been stored.
|
|
||||||
* @param {string|null} rowId The ID of the row which is being specifically queried for. This can be
|
|
||||||
* left null to get all the rows in the table.
|
|
||||||
* @param {object} otherProps Any other properties to add to the request.
|
|
||||||
* @returns {object} Parameters which can then be used with an allDocs request.
|
|
||||||
*/
|
|
||||||
exports.getRowParams = (tableId = null, rowId = null, otherProps = {}) => {
|
|
||||||
if (tableId == null) {
|
|
||||||
return getDocParams(DocumentType.ROW, null, otherProps)
|
|
||||||
}
|
|
||||||
|
|
||||||
const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId
|
|
||||||
|
|
||||||
return getDocParams(DocumentType.ROW, endOfKey, otherProps)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given a row ID this will find the table ID within it (only works for internal tables).
|
* Given a row ID this will find the table ID within it (only works for internal tables).
|
||||||
* @param {string} rowId The ID of the row.
|
* @param {string} rowId The ID of the row.
|
||||||
|
@ -153,44 +100,6 @@ exports.getTableIDFromRowID = rowId => {
|
||||||
return `${DocumentType.TABLE}${SEPARATOR}${components[0]}`
|
return `${DocumentType.TABLE}${SEPARATOR}${components[0]}`
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets a new row ID for the specified table.
|
|
||||||
* @param {string} tableId The table which the row is being created for.
|
|
||||||
* @param {string|null} id If an ID is to be used then the UUID can be substituted for this.
|
|
||||||
* @returns {string} The new ID which a row doc can be stored under.
|
|
||||||
*/
|
|
||||||
exports.generateRowID = (tableId, id = null) => {
|
|
||||||
id = id || newid()
|
|
||||||
return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
|
|
||||||
*/
|
|
||||||
exports.getUserMetadataParams = (userId = null, otherProps = {}) => {
|
|
||||||
return exports.getRowParams(InternalTables.USER_METADATA, userId, otherProps)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a new user ID based on the passed in global ID.
|
|
||||||
* @param {string} globalId The ID of the global user.
|
|
||||||
* @returns {string} The new user ID which the user doc can be stored under.
|
|
||||||
*/
|
|
||||||
exports.generateUserMetadataID = globalId => {
|
|
||||||
return exports.generateRowID(InternalTables.USER_METADATA, globalId)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Breaks up the ID to get the global ID.
|
|
||||||
*/
|
|
||||||
exports.getGlobalIDFromUserMetadataID = id => {
|
|
||||||
const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
|
||||||
if (!id || !id.includes(prefix)) {
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
return id.split(prefix)[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
|
* Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -27,18 +27,6 @@ export interface TriggerOutput {
|
||||||
timestamp?: number
|
timestamp?: number
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AutomationEvent {
|
|
||||||
data: {
|
|
||||||
automation: Automation
|
|
||||||
event: any
|
|
||||||
}
|
|
||||||
opts?: {
|
|
||||||
repeat?: {
|
|
||||||
jobId: string
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AutomationContext extends AutomationResults {
|
export interface AutomationContext extends AutomationResults {
|
||||||
steps: any[]
|
steps: any[]
|
||||||
trigger: any
|
trigger: any
|
||||||
|
|
|
@ -1,18 +1,11 @@
|
||||||
import { events } from "@budibase/backend-core"
|
import { events } from "@budibase/backend-core"
|
||||||
import { getTableParams } from "../../../../db/utils"
|
import sdk from "../../../../sdk"
|
||||||
import { Table } from "@budibase/types"
|
|
||||||
|
|
||||||
const getTables = async (appDb: any): Promise<Table[]> => {
|
export const backfill = async (
|
||||||
const response = await appDb.allDocs(
|
appDb: PouchDB.Database,
|
||||||
getTableParams(null, {
|
timestamp: string | number
|
||||||
include_docs: true,
|
) => {
|
||||||
})
|
const tables = await sdk.tables.getAllInternalTables(appDb)
|
||||||
)
|
|
||||||
return response.rows.map((row: any) => row.doc)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const backfill = async (appDb: any, timestamp: string | number) => {
|
|
||||||
const tables = await getTables(appDb)
|
|
||||||
|
|
||||||
for (const table of tables) {
|
for (const table of tables) {
|
||||||
await events.table.created(table, timestamp)
|
await events.table.created(table, timestamp)
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export const DB_EXPORT_FILE = "db.txt"
|
||||||
|
export const GLOBAL_DB_EXPORT_FILE = "global.txt"
|
|
@ -0,0 +1,171 @@
|
||||||
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
|
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
||||||
|
import { retrieveDirectory } from "../../../utilities/fileSystem/utilities"
|
||||||
|
import { streamFile, createTempFolder } from "../../../utilities/fileSystem"
|
||||||
|
import { ObjectStoreBuckets } from "../../../constants"
|
||||||
|
import {
|
||||||
|
LINK_USER_METADATA_PREFIX,
|
||||||
|
TABLE_ROW_PREFIX,
|
||||||
|
USER_METDATA_PREFIX,
|
||||||
|
} from "../../../db/utils"
|
||||||
|
import { DB_EXPORT_FILE, GLOBAL_DB_EXPORT_FILE } from "./constants"
|
||||||
|
import fs from "fs"
|
||||||
|
import { join } from "path"
|
||||||
|
import env from "../../../environment"
|
||||||
|
const uuid = require("uuid/v4")
|
||||||
|
const tar = require("tar")
|
||||||
|
const MemoryStream = require("memorystream")
|
||||||
|
|
||||||
|
type ExportOpts = {
|
||||||
|
filter?: any
|
||||||
|
exportPath?: string
|
||||||
|
tar?: boolean
|
||||||
|
excludeRows?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
function tarFilesToTmp(tmpDir: string, files: string[]) {
|
||||||
|
const exportFile = join(budibaseTempDir(), `${uuid()}.tar.gz`)
|
||||||
|
tar.create(
|
||||||
|
{
|
||||||
|
sync: true,
|
||||||
|
gzip: true,
|
||||||
|
file: exportFile,
|
||||||
|
recursive: true,
|
||||||
|
cwd: tmpDir,
|
||||||
|
},
|
||||||
|
files
|
||||||
|
)
|
||||||
|
return exportFile
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exports a DB to either file or a variable (memory).
|
||||||
|
* @param {string} dbName the DB which is to be exported.
|
||||||
|
* @param {object} opts various options for the export, e.g. whether to stream,
|
||||||
|
* a filter function or the name of the export.
|
||||||
|
* @return {*} either a readable stream or a string
|
||||||
|
*/
|
||||||
|
export async function exportDB(dbName: string, opts: ExportOpts = {}) {
|
||||||
|
return dbCore.doWithDB(dbName, async (db: any) => {
|
||||||
|
// Write the dump to file if required
|
||||||
|
if (opts?.exportPath) {
|
||||||
|
const path = opts?.exportPath
|
||||||
|
const writeStream = fs.createWriteStream(path)
|
||||||
|
await db.dump(writeStream, { filter: opts?.filter })
|
||||||
|
return path
|
||||||
|
} else {
|
||||||
|
// Stringify the dump in memory if required
|
||||||
|
const memStream = new MemoryStream()
|
||||||
|
let appString = ""
|
||||||
|
memStream.on("data", (chunk: any) => {
|
||||||
|
appString += chunk.toString()
|
||||||
|
})
|
||||||
|
await db.dump(memStream, { filter: opts?.filter })
|
||||||
|
return appString
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function defineFilter(excludeRows?: boolean) {
|
||||||
|
const ids = [USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX]
|
||||||
|
if (excludeRows) {
|
||||||
|
ids.push(TABLE_ROW_PREFIX)
|
||||||
|
}
|
||||||
|
return (doc: any) =>
|
||||||
|
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Local utility to back up the database state for an app, excluding global user
|
||||||
|
* data or user relationships.
|
||||||
|
* @param {string} appId The app to back up
|
||||||
|
* @param {object} config Config to send to export DB/attachment export
|
||||||
|
* @returns {*} either a string or a stream of the backup
|
||||||
|
*/
|
||||||
|
export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
|
const prodAppId = dbCore.getProdAppID(appId)
|
||||||
|
const appPath = `${prodAppId}/`
|
||||||
|
// export bucket contents
|
||||||
|
let tmpPath
|
||||||
|
if (!env.isTest()) {
|
||||||
|
tmpPath = await retrieveDirectory(ObjectStoreBuckets.APPS, appPath)
|
||||||
|
} else {
|
||||||
|
tmpPath = createTempFolder(uuid())
|
||||||
|
}
|
||||||
|
const downloadedPath = join(tmpPath, appPath)
|
||||||
|
if (fs.existsSync(downloadedPath)) {
|
||||||
|
const allFiles = fs.readdirSync(downloadedPath)
|
||||||
|
for (let file of allFiles) {
|
||||||
|
const path = join(downloadedPath, file)
|
||||||
|
// move out of app directory, simplify structure
|
||||||
|
fs.renameSync(path, join(downloadedPath, "..", file))
|
||||||
|
}
|
||||||
|
// remove the old app directory created by object export
|
||||||
|
fs.rmdirSync(downloadedPath)
|
||||||
|
}
|
||||||
|
// enforce an export of app DB to the tmp path
|
||||||
|
const dbPath = join(tmpPath, DB_EXPORT_FILE)
|
||||||
|
await exportDB(appId, {
|
||||||
|
...config,
|
||||||
|
filter: defineFilter(config?.excludeRows),
|
||||||
|
exportPath: dbPath,
|
||||||
|
})
|
||||||
|
// if tar requested, return where the tarball is
|
||||||
|
if (config?.tar) {
|
||||||
|
// now the tmpPath contains both the DB export and attachments, tar this
|
||||||
|
const tarPath = tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath))
|
||||||
|
// cleanup the tmp export files as tarball returned
|
||||||
|
fs.rmSync(tmpPath, { recursive: true, force: true })
|
||||||
|
return tarPath
|
||||||
|
}
|
||||||
|
// tar not requested, turn the directory where export is
|
||||||
|
else {
|
||||||
|
return tmpPath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Export all apps + global DB (if supplied) to a single tarball, this includes
|
||||||
|
* the attachments for each app as well.
|
||||||
|
* @param {object[]} appMetadata The IDs and names of apps to export.
|
||||||
|
* @param {string} globalDbContents The contents of the global DB to export as well.
|
||||||
|
* @return {string} The path to the tarball.
|
||||||
|
*/
|
||||||
|
export async function exportMultipleApps(
|
||||||
|
appMetadata: { appId: string; name: string }[],
|
||||||
|
globalDbContents?: string
|
||||||
|
) {
|
||||||
|
const tmpPath = join(budibaseTempDir(), uuid())
|
||||||
|
fs.mkdirSync(tmpPath)
|
||||||
|
let exportPromises: Promise<void>[] = []
|
||||||
|
// export each app to a directory, then move it into the complete export
|
||||||
|
const exportAndMove = async (appId: string, appName: string) => {
|
||||||
|
const path = await exportApp(appId)
|
||||||
|
await fs.promises.rename(path, join(tmpPath, appName))
|
||||||
|
}
|
||||||
|
for (let metadata of appMetadata) {
|
||||||
|
exportPromises.push(exportAndMove(metadata.appId, metadata.name))
|
||||||
|
}
|
||||||
|
// wait for all exports to finish
|
||||||
|
await Promise.all(exportPromises)
|
||||||
|
// add the global DB contents
|
||||||
|
if (globalDbContents) {
|
||||||
|
fs.writeFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), globalDbContents)
|
||||||
|
}
|
||||||
|
const appNames = appMetadata.map(metadata => metadata.name)
|
||||||
|
const tarPath = tarFilesToTmp(tmpPath, [...appNames, GLOBAL_DB_EXPORT_FILE])
|
||||||
|
// clear up the tmp path now tarball generated
|
||||||
|
fs.rmSync(tmpPath, { recursive: true, force: true })
|
||||||
|
return tarPath
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Streams a backup of the database state for an app
|
||||||
|
* @param {string} appId The ID of the app which is to be backed up.
|
||||||
|
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
||||||
|
* @returns {*} a readable stream of the backup which is written in real time
|
||||||
|
*/
|
||||||
|
export async function streamExportApp(appId: string, excludeRows: boolean) {
|
||||||
|
const tmpPath = await exportApp(appId, { excludeRows, tar: true })
|
||||||
|
return streamFile(tmpPath)
|
||||||
|
}
|
|
@ -0,0 +1,168 @@
|
||||||
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
|
import { TABLE_ROW_PREFIX } from "../../../db/utils"
|
||||||
|
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
||||||
|
import { DB_EXPORT_FILE, GLOBAL_DB_EXPORT_FILE } from "./constants"
|
||||||
|
import {
|
||||||
|
uploadDirectory,
|
||||||
|
upload,
|
||||||
|
} from "../../../utilities/fileSystem/utilities"
|
||||||
|
import { downloadTemplate } from "../../../utilities/fileSystem"
|
||||||
|
import { ObjectStoreBuckets, FieldTypes } from "../../../constants"
|
||||||
|
import { join } from "path"
|
||||||
|
import fs from "fs"
|
||||||
|
import sdk from "../../"
|
||||||
|
import { CouchFindOptions, RowAttachment } from "@budibase/types"
|
||||||
|
const uuid = require("uuid/v4")
|
||||||
|
const tar = require("tar")
|
||||||
|
|
||||||
|
type TemplateType = {
|
||||||
|
file?: {
|
||||||
|
type: string
|
||||||
|
path: string
|
||||||
|
}
|
||||||
|
key?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
async function updateAttachmentColumns(
|
||||||
|
prodAppId: string,
|
||||||
|
db: PouchDB.Database
|
||||||
|
) {
|
||||||
|
// iterate through attachment documents and update them
|
||||||
|
const tables = await sdk.tables.getAllInternalTables(db)
|
||||||
|
for (let table of tables) {
|
||||||
|
const attachmentCols: string[] = []
|
||||||
|
for (let [key, column] of Object.entries(table.schema)) {
|
||||||
|
if (column.type === FieldTypes.ATTACHMENT) {
|
||||||
|
attachmentCols.push(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// no attachment columns, nothing to do
|
||||||
|
if (attachmentCols.length === 0) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// use the CouchDB Mango query API to lookup rows that have attachments
|
||||||
|
const params: CouchFindOptions = {
|
||||||
|
selector: {
|
||||||
|
_id: {
|
||||||
|
$regex: `^${TABLE_ROW_PREFIX}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
attachmentCols.forEach(col => (params.selector[col] = { $exists: true }))
|
||||||
|
const { rows } = await dbCore.directCouchFind(db.name, params)
|
||||||
|
for (let row of rows) {
|
||||||
|
for (let column of attachmentCols) {
|
||||||
|
if (!Array.isArray(row[column])) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
row[column] = row[column].map((attachment: RowAttachment) => {
|
||||||
|
// URL looks like: /prod-budi-app-assets/appId/attachments/file.csv
|
||||||
|
const urlParts = attachment.url.split("/")
|
||||||
|
// drop the first empty element
|
||||||
|
urlParts.shift()
|
||||||
|
// get the prefix
|
||||||
|
const prefix = urlParts.shift()
|
||||||
|
// remove the app ID
|
||||||
|
urlParts.shift()
|
||||||
|
// add new app ID
|
||||||
|
urlParts.unshift(prodAppId)
|
||||||
|
const key = urlParts.join("/")
|
||||||
|
return {
|
||||||
|
...attachment,
|
||||||
|
key,
|
||||||
|
url: `/${prefix}/${key}`,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// write back the updated attachments
|
||||||
|
await db.bulkDocs(rows)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function manages temporary template files which are stored by Koa.
|
||||||
|
* @param {Object} template The template object retrieved from the Koa context object.
|
||||||
|
* @returns {Object} Returns a fs read stream which can be loaded into the database.
|
||||||
|
*/
|
||||||
|
async function getTemplateStream(template: TemplateType) {
|
||||||
|
if (template.file) {
|
||||||
|
return fs.createReadStream(template.file.path)
|
||||||
|
} else if (template.key) {
|
||||||
|
const [type, name] = template.key.split("/")
|
||||||
|
const tmpPath = await downloadTemplate(type, name)
|
||||||
|
return fs.createReadStream(join(tmpPath, name, "db", "dump.txt"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function untarFile(file: { path: string }) {
|
||||||
|
const tmpPath = join(budibaseTempDir(), uuid())
|
||||||
|
fs.mkdirSync(tmpPath)
|
||||||
|
// extract the tarball
|
||||||
|
tar.extract({
|
||||||
|
sync: true,
|
||||||
|
cwd: tmpPath,
|
||||||
|
file: file.path,
|
||||||
|
})
|
||||||
|
return tmpPath
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getGlobalDBFile(tmpPath: string) {
|
||||||
|
return fs.readFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), "utf8")
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getListOfAppsInMulti(tmpPath: string) {
|
||||||
|
return fs.readdirSync(tmpPath).filter(dir => dir !== GLOBAL_DB_EXPORT_FILE)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function importApp(
|
||||||
|
appId: string,
|
||||||
|
db: PouchDB.Database,
|
||||||
|
template: TemplateType
|
||||||
|
) {
|
||||||
|
let prodAppId = dbCore.getProdAppID(appId)
|
||||||
|
let dbStream: any
|
||||||
|
const isTar = template.file && template.file.type === "application/gzip"
|
||||||
|
const isDirectory =
|
||||||
|
template.file && fs.lstatSync(template.file.path).isDirectory()
|
||||||
|
if (template.file && (isTar || isDirectory)) {
|
||||||
|
const tmpPath = isTar ? untarFile(template.file) : template.file.path
|
||||||
|
const contents = fs.readdirSync(tmpPath)
|
||||||
|
// have to handle object import
|
||||||
|
if (contents.length) {
|
||||||
|
let promises = []
|
||||||
|
let excludedFiles = [GLOBAL_DB_EXPORT_FILE, DB_EXPORT_FILE]
|
||||||
|
for (let filename of contents) {
|
||||||
|
const path = join(tmpPath, filename)
|
||||||
|
if (excludedFiles.includes(filename)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filename = join(prodAppId, filename)
|
||||||
|
if (fs.lstatSync(path).isDirectory()) {
|
||||||
|
promises.push(
|
||||||
|
uploadDirectory(ObjectStoreBuckets.APPS, path, filename)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
promises.push(
|
||||||
|
upload({
|
||||||
|
bucket: ObjectStoreBuckets.APPS,
|
||||||
|
path,
|
||||||
|
filename,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await Promise.all(promises)
|
||||||
|
}
|
||||||
|
dbStream = fs.createReadStream(join(tmpPath, DB_EXPORT_FILE))
|
||||||
|
} else {
|
||||||
|
dbStream = await getTemplateStream(template)
|
||||||
|
}
|
||||||
|
// @ts-ignore
|
||||||
|
const { ok } = await db.load(dbStream)
|
||||||
|
if (!ok) {
|
||||||
|
throw "Error loading database dump from template."
|
||||||
|
}
|
||||||
|
await updateAttachmentColumns(prodAppId, db)
|
||||||
|
return ok
|
||||||
|
}
|
|
@ -0,0 +1,9 @@
|
||||||
|
import * as exportApps from "./exports"
|
||||||
|
import * as importApps from "./imports"
|
||||||
|
import * as statistics from "./statistics"
|
||||||
|
|
||||||
|
export default {
|
||||||
|
...exportApps,
|
||||||
|
...importApps,
|
||||||
|
...statistics,
|
||||||
|
}
|
|
@ -0,0 +1,77 @@
|
||||||
|
import { context, db as dbCore } from "@budibase/backend-core"
|
||||||
|
import {
|
||||||
|
getDatasourceParams,
|
||||||
|
getTableParams,
|
||||||
|
getAutomationParams,
|
||||||
|
getScreenParams,
|
||||||
|
} from "../../../db/utils"
|
||||||
|
|
||||||
|
async function runInContext(appId: string, cb: any, db?: PouchDB.Database) {
|
||||||
|
if (db) {
|
||||||
|
return cb(db)
|
||||||
|
} else {
|
||||||
|
const devAppId = dbCore.getDevAppID(appId)
|
||||||
|
return context.doInAppContext(devAppId, () => {
|
||||||
|
const db = context.getAppDB()
|
||||||
|
return cb(db)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function calculateDatasourceCount(
|
||||||
|
appId: string,
|
||||||
|
db?: PouchDB.Database
|
||||||
|
) {
|
||||||
|
return runInContext(
|
||||||
|
appId,
|
||||||
|
async (db: PouchDB.Database) => {
|
||||||
|
const datasourceList = await db.allDocs(getDatasourceParams())
|
||||||
|
const tableList = await db.allDocs(getTableParams())
|
||||||
|
return datasourceList.rows.length + tableList.rows.length
|
||||||
|
},
|
||||||
|
db
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function calculateAutomationCount(
|
||||||
|
appId: string,
|
||||||
|
db?: PouchDB.Database
|
||||||
|
) {
|
||||||
|
return runInContext(
|
||||||
|
appId,
|
||||||
|
async (db: PouchDB.Database) => {
|
||||||
|
const automationList = await db.allDocs(getAutomationParams())
|
||||||
|
return automationList.rows.length
|
||||||
|
},
|
||||||
|
db
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function calculateScreenCount(
|
||||||
|
appId: string,
|
||||||
|
db?: PouchDB.Database
|
||||||
|
) {
|
||||||
|
return runInContext(
|
||||||
|
appId,
|
||||||
|
async (db: PouchDB.Database) => {
|
||||||
|
const screenList = await db.allDocs(getScreenParams())
|
||||||
|
return screenList.rows.length
|
||||||
|
},
|
||||||
|
db
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function calculateBackupStats(appId: string) {
|
||||||
|
return runInContext(appId, async (db: PouchDB.Database) => {
|
||||||
|
const promises = []
|
||||||
|
promises.push(calculateDatasourceCount(appId, db))
|
||||||
|
promises.push(calculateAutomationCount(appId, db))
|
||||||
|
promises.push(calculateScreenCount(appId, db))
|
||||||
|
const responses = await Promise.all(promises)
|
||||||
|
return {
|
||||||
|
datasources: responses[0],
|
||||||
|
automations: responses[1],
|
||||||
|
screens: responses[2],
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,60 @@
|
||||||
|
import { getAppDB } from "@budibase/backend-core/context"
|
||||||
|
import { BudibaseInternalDB, getTableParams } from "../../../db/utils"
|
||||||
|
import {
|
||||||
|
breakExternalTableId,
|
||||||
|
isExternalTable,
|
||||||
|
isSQL,
|
||||||
|
} from "../../../integrations/utils"
|
||||||
|
import { Table } from "@budibase/types"
|
||||||
|
|
||||||
|
async function getAllInternalTables(db?: PouchDB.Database): Promise<Table[]> {
|
||||||
|
if (!db) {
|
||||||
|
db = getAppDB() as PouchDB.Database
|
||||||
|
}
|
||||||
|
const internalTables = await db.allDocs(
|
||||||
|
getTableParams(null, {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
return internalTables.rows.map((tableDoc: any) => ({
|
||||||
|
...tableDoc.doc,
|
||||||
|
type: "internal",
|
||||||
|
sourceId: BudibaseInternalDB._id,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getAllExternalTables(datasourceId: any): Promise<Table[]> {
|
||||||
|
const db = getAppDB()
|
||||||
|
const datasource = await db.get(datasourceId)
|
||||||
|
if (!datasource || !datasource.entities) {
|
||||||
|
throw "Datasource is not configured fully."
|
||||||
|
}
|
||||||
|
return datasource.entities
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getExternalTable(
|
||||||
|
datasourceId: any,
|
||||||
|
tableName: any
|
||||||
|
): Promise<Table> {
|
||||||
|
const entities = await getAllExternalTables(datasourceId)
|
||||||
|
return entities[tableName]
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getTable(tableId: any): Promise<Table> {
|
||||||
|
const db = getAppDB()
|
||||||
|
if (isExternalTable(tableId)) {
|
||||||
|
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||||
|
const datasource = await db.get(datasourceId)
|
||||||
|
const table = await getExternalTable(datasourceId, tableName)
|
||||||
|
return { ...table, sql: isSQL(datasource) }
|
||||||
|
} else {
|
||||||
|
return db.get(tableId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
getAllInternalTables,
|
||||||
|
getAllExternalTables,
|
||||||
|
getExternalTable,
|
||||||
|
getTable,
|
||||||
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
import { default as backups } from "./app/backups"
|
||||||
|
import { default as tables } from "./app/tables"
|
||||||
|
|
||||||
|
const sdk = {
|
||||||
|
backups,
|
||||||
|
tables,
|
||||||
|
}
|
||||||
|
|
||||||
|
// default export for TS
|
||||||
|
export default sdk
|
||||||
|
|
||||||
|
// default export for JS
|
||||||
|
module.exports = sdk
|
|
@ -25,7 +25,7 @@ const newid = require("../../db/newid")
|
||||||
const context = require("@budibase/backend-core/context")
|
const context = require("@budibase/backend-core/context")
|
||||||
const { generateDevInfoID, SEPARATOR } = require("@budibase/backend-core/db")
|
const { generateDevInfoID, SEPARATOR } = require("@budibase/backend-core/db")
|
||||||
const { encrypt } = require("@budibase/backend-core/encryption")
|
const { encrypt } = require("@budibase/backend-core/encryption")
|
||||||
const { DocumentType } = require("../../db/utils")
|
const { DocumentType, generateUserMetadataID } = require("../../db/utils")
|
||||||
|
|
||||||
const GLOBAL_USER_ID = "us_uuid1"
|
const GLOBAL_USER_ID = "us_uuid1"
|
||||||
const EMAIL = "babs@babs.com"
|
const EMAIL = "babs@babs.com"
|
||||||
|
@ -95,7 +95,10 @@ class TestConfiguration {
|
||||||
|
|
||||||
// use a new id as the name to avoid name collisions
|
// use a new id as the name to avoid name collisions
|
||||||
async init(appName = newid()) {
|
async init(appName = newid()) {
|
||||||
await this.globalUser()
|
this.user = await this.globalUser()
|
||||||
|
this.globalUserId = this.user._id
|
||||||
|
this.userMetadataId = generateUserMetadataID(this.globalUserId)
|
||||||
|
|
||||||
return this.createApp(appName)
|
return this.createApp(appName)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
import { default as threadUtils } from "./utils"
|
import { default as threadUtils } from "./utils"
|
||||||
|
import { Job } from "bull"
|
||||||
threadUtils.threadSetup()
|
threadUtils.threadSetup()
|
||||||
import { isRecurring, disableCron, isErrorInOutput } from "../automations/utils"
|
import {
|
||||||
|
isRecurring,
|
||||||
|
disableCronById,
|
||||||
|
isErrorInOutput,
|
||||||
|
} from "../automations/utils"
|
||||||
import { default as actions } from "../automations/actions"
|
import { default as actions } from "../automations/actions"
|
||||||
import { default as automationUtils } from "../automations/automationUtils"
|
import { default as automationUtils } from "../automations/automationUtils"
|
||||||
import { default as AutomationEmitter } from "../events/AutomationEmitter"
|
import { default as AutomationEmitter } from "../events/AutomationEmitter"
|
||||||
|
@ -13,7 +18,6 @@ import {
|
||||||
LoopStep,
|
LoopStep,
|
||||||
LoopStepType,
|
LoopStepType,
|
||||||
LoopInput,
|
LoopInput,
|
||||||
AutomationEvent,
|
|
||||||
TriggerOutput,
|
TriggerOutput,
|
||||||
AutomationContext,
|
AutomationContext,
|
||||||
AutomationMetadata,
|
AutomationMetadata,
|
||||||
|
@ -73,19 +77,16 @@ class Orchestrator {
|
||||||
_automation: Automation
|
_automation: Automation
|
||||||
_emitter: any
|
_emitter: any
|
||||||
_context: AutomationContext
|
_context: AutomationContext
|
||||||
_repeat?: { jobId: string; jobKey: string }
|
_job: Job
|
||||||
executionOutput: AutomationContext
|
executionOutput: AutomationContext
|
||||||
|
|
||||||
constructor(automation: Automation, triggerOutput: TriggerOutput, opts: any) {
|
constructor(job: Job) {
|
||||||
|
let automation = job.data.automation,
|
||||||
|
triggerOutput = job.data.event
|
||||||
const metadata = triggerOutput.metadata
|
const metadata = triggerOutput.metadata
|
||||||
this._chainCount = metadata ? metadata.automationChainCount : 0
|
this._chainCount = metadata ? metadata.automationChainCount : 0
|
||||||
this._appId = triggerOutput.appId as string
|
this._appId = triggerOutput.appId as string
|
||||||
if (opts?.repeat) {
|
this._job = job
|
||||||
this._repeat = {
|
|
||||||
jobId: opts.repeat.jobId,
|
|
||||||
jobKey: opts.repeat.key,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const triggerStepId = automation.definition.trigger.stepId
|
const triggerStepId = automation.definition.trigger.stepId
|
||||||
triggerOutput = this.cleanupTriggerOutputs(triggerStepId, triggerOutput)
|
triggerOutput = this.cleanupTriggerOutputs(triggerStepId, triggerOutput)
|
||||||
// remove from context
|
// remove from context
|
||||||
|
@ -134,7 +135,7 @@ class Orchestrator {
|
||||||
}
|
}
|
||||||
|
|
||||||
async stopCron(reason: string) {
|
async stopCron(reason: string) {
|
||||||
if (!this._repeat) {
|
if (!this._job.opts.repeat) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
logWarn(
|
logWarn(
|
||||||
|
@ -142,7 +143,7 @@ class Orchestrator {
|
||||||
)
|
)
|
||||||
const automation = this._automation
|
const automation = this._automation
|
||||||
const trigger = automation.definition.trigger
|
const trigger = automation.definition.trigger
|
||||||
await disableCron(this._repeat?.jobId, this._repeat?.jobKey)
|
await disableCronById(this._job.id)
|
||||||
this.updateExecutionOutput(
|
this.updateExecutionOutput(
|
||||||
trigger.id,
|
trigger.id,
|
||||||
trigger.stepId,
|
trigger.stepId,
|
||||||
|
@ -156,7 +157,7 @@ class Orchestrator {
|
||||||
}
|
}
|
||||||
|
|
||||||
async checkIfShouldStop(metadata: AutomationMetadata): Promise<boolean> {
|
async checkIfShouldStop(metadata: AutomationMetadata): Promise<boolean> {
|
||||||
if (!metadata.errorCount || !this._repeat) {
|
if (!metadata.errorCount || !this._job.opts.repeat) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if (metadata.errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) {
|
if (metadata.errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) {
|
||||||
|
@ -475,17 +476,13 @@ class Orchestrator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function execute(input: AutomationEvent, callback: WorkerCallback) {
|
export function execute(job: Job, callback: WorkerCallback) {
|
||||||
const appId = input.data.event.appId
|
const appId = job.data.event.appId
|
||||||
if (!appId) {
|
if (!appId) {
|
||||||
throw new Error("Unable to execute, event doesn't contain app ID.")
|
throw new Error("Unable to execute, event doesn't contain app ID.")
|
||||||
}
|
}
|
||||||
doInAppContext(appId, async () => {
|
doInAppContext(appId, async () => {
|
||||||
const automationOrchestrator = new Orchestrator(
|
const automationOrchestrator = new Orchestrator(job)
|
||||||
input.data.automation,
|
|
||||||
input.data.event,
|
|
||||||
input.opts
|
|
||||||
)
|
|
||||||
try {
|
try {
|
||||||
const response = await automationOrchestrator.execute()
|
const response = await automationOrchestrator.execute()
|
||||||
callback(null, response)
|
callback(null, response)
|
||||||
|
@ -495,17 +492,13 @@ export function execute(input: AutomationEvent, callback: WorkerCallback) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export const removeStalled = async (input: AutomationEvent) => {
|
export const removeStalled = async (job: Job) => {
|
||||||
const appId = input.data.event.appId
|
const appId = job.data.event.appId
|
||||||
if (!appId) {
|
if (!appId) {
|
||||||
throw new Error("Unable to execute, event doesn't contain app ID.")
|
throw new Error("Unable to execute, event doesn't contain app ID.")
|
||||||
}
|
}
|
||||||
await doInAppContext(appId, async () => {
|
await doInAppContext(appId, async () => {
|
||||||
const automationOrchestrator = new Orchestrator(
|
const automationOrchestrator = new Orchestrator(job)
|
||||||
input.data.automation,
|
|
||||||
input.data.event,
|
|
||||||
input.opts
|
|
||||||
)
|
|
||||||
await automationOrchestrator.stopCron("stalled")
|
await automationOrchestrator.stopCron("stalled")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,17 +2,11 @@ const { budibaseTempDir } = require("../budibaseDir")
|
||||||
const fs = require("fs")
|
const fs = require("fs")
|
||||||
const { join } = require("path")
|
const { join } = require("path")
|
||||||
const uuid = require("uuid/v4")
|
const uuid = require("uuid/v4")
|
||||||
const {
|
|
||||||
doWithDB,
|
|
||||||
dangerousGetDB,
|
|
||||||
closeDB,
|
|
||||||
} = require("@budibase/backend-core/db")
|
|
||||||
const { ObjectStoreBuckets } = require("../../constants")
|
const { ObjectStoreBuckets } = require("../../constants")
|
||||||
const {
|
const {
|
||||||
upload,
|
upload,
|
||||||
retrieve,
|
retrieve,
|
||||||
retrieveToTmp,
|
retrieveToTmp,
|
||||||
streamUpload,
|
|
||||||
deleteFolder,
|
deleteFolder,
|
||||||
downloadTarball,
|
downloadTarball,
|
||||||
downloadTarballDirect,
|
downloadTarballDirect,
|
||||||
|
@ -21,12 +15,6 @@ const {
|
||||||
const { updateClientLibrary } = require("./clientLibrary")
|
const { updateClientLibrary } = require("./clientLibrary")
|
||||||
const { checkSlashesInUrl } = require("../")
|
const { checkSlashesInUrl } = require("../")
|
||||||
const env = require("../../environment")
|
const env = require("../../environment")
|
||||||
const {
|
|
||||||
USER_METDATA_PREFIX,
|
|
||||||
LINK_USER_METADATA_PREFIX,
|
|
||||||
TABLE_ROW_PREFIX,
|
|
||||||
} = require("../../db/utils")
|
|
||||||
const MemoryStream = require("memorystream")
|
|
||||||
const { getAppId } = require("@budibase/backend-core/context")
|
const { getAppId } = require("@budibase/backend-core/context")
|
||||||
const tar = require("tar")
|
const tar = require("tar")
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
|
@ -86,21 +74,6 @@ exports.checkDevelopmentEnvironment = () => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This function manages temporary template files which are stored by Koa.
|
|
||||||
* @param {Object} template The template object retrieved from the Koa context object.
|
|
||||||
* @returns {Object} Returns an fs read stream which can be loaded into the database.
|
|
||||||
*/
|
|
||||||
exports.getTemplateStream = async template => {
|
|
||||||
if (template.file) {
|
|
||||||
return fs.createReadStream(template.file.path)
|
|
||||||
} else {
|
|
||||||
const [type, name] = template.key.split("/")
|
|
||||||
const tmpPath = await exports.downloadTemplate(type, name)
|
|
||||||
return fs.createReadStream(join(tmpPath, name, "db", "dump.txt"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Used to retrieve a handlebars file from the system which will be used as a template.
|
* Used to retrieve a handlebars file from the system which will be used as a template.
|
||||||
* This is allowable as the template handlebars files should be static and identical across
|
* This is allowable as the template handlebars files should be static and identical across
|
||||||
|
@ -124,98 +97,8 @@ exports.apiFileReturn = contents => {
|
||||||
return fs.createReadStream(path)
|
return fs.createReadStream(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.defineFilter = excludeRows => {
|
exports.streamFile = path => {
|
||||||
const ids = [USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX]
|
return fs.createReadStream(path)
|
||||||
if (excludeRows) {
|
|
||||||
ids.push(TABLE_ROW_PREFIX)
|
|
||||||
}
|
|
||||||
return doc =>
|
|
||||||
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Local utility to back up the database state for an app, excluding global user
|
|
||||||
* data or user relationships.
|
|
||||||
* @param {string} appId The app to backup
|
|
||||||
* @param {object} config Config to send to export DB
|
|
||||||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
|
||||||
* @returns {*} either a string or a stream of the backup
|
|
||||||
*/
|
|
||||||
const backupAppData = async (appId, config, excludeRows) => {
|
|
||||||
return await exports.exportDB(appId, {
|
|
||||||
...config,
|
|
||||||
filter: exports.defineFilter(excludeRows),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Takes a copy of the database state for an app to the object store.
|
|
||||||
* @param {string} appId The ID of the app which is to be backed up.
|
|
||||||
* @param {string} backupName The name of the backup located in the object store.
|
|
||||||
* @return {*} a readable stream to the completed backup file
|
|
||||||
*/
|
|
||||||
exports.performBackup = async (appId, backupName) => {
|
|
||||||
return await backupAppData(appId, { exportName: backupName })
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Streams a backup of the database state for an app
|
|
||||||
* @param {string} appId The ID of the app which is to be backed up.
|
|
||||||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
|
||||||
* @returns {*} a readable stream of the backup which is written in real time
|
|
||||||
*/
|
|
||||||
exports.streamBackup = async (appId, excludeRows) => {
|
|
||||||
return await backupAppData(appId, { stream: true }, excludeRows)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Exports a DB to either file or a variable (memory).
|
|
||||||
* @param {string} dbName the DB which is to be exported.
|
|
||||||
* @param {string} exportName optional - provide a filename to write the backup to a file
|
|
||||||
* @param {boolean} stream optional - whether to perform a full backup
|
|
||||||
* @param {function} filter optional - a filter function to clear out any un-wanted docs.
|
|
||||||
* @return {*} either a readable stream or a string
|
|
||||||
*/
|
|
||||||
exports.exportDB = async (dbName, { stream, filter, exportName } = {}) => {
|
|
||||||
// streaming a DB dump is a bit more complicated, can't close DB
|
|
||||||
if (stream) {
|
|
||||||
const db = dangerousGetDB(dbName)
|
|
||||||
const memStream = new MemoryStream()
|
|
||||||
memStream.on("end", async () => {
|
|
||||||
await closeDB(db)
|
|
||||||
})
|
|
||||||
db.dump(memStream, { filter })
|
|
||||||
return memStream
|
|
||||||
}
|
|
||||||
|
|
||||||
return doWithDB(dbName, async db => {
|
|
||||||
// Write the dump to file if required
|
|
||||||
if (exportName) {
|
|
||||||
const path = join(budibaseTempDir(), exportName)
|
|
||||||
const writeStream = fs.createWriteStream(path)
|
|
||||||
await db.dump(writeStream, { filter })
|
|
||||||
|
|
||||||
// Upload the dump to the object store if self hosted
|
|
||||||
if (env.SELF_HOSTED) {
|
|
||||||
await streamUpload(
|
|
||||||
ObjectStoreBuckets.BACKUPS,
|
|
||||||
join(dbName, exportName),
|
|
||||||
fs.createReadStream(path)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return fs.createReadStream(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stringify the dump in memory if required
|
|
||||||
const memStream = new MemoryStream()
|
|
||||||
let appString = ""
|
|
||||||
memStream.on("data", chunk => {
|
|
||||||
appString += chunk.toString()
|
|
||||||
})
|
|
||||||
await db.dump(memStream, { filter })
|
|
||||||
return appString
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -6,6 +6,7 @@ const {
|
||||||
streamUpload,
|
streamUpload,
|
||||||
retrieve,
|
retrieve,
|
||||||
retrieveToTmp,
|
retrieveToTmp,
|
||||||
|
retrieveDirectory,
|
||||||
deleteFolder,
|
deleteFolder,
|
||||||
uploadDirectory,
|
uploadDirectory,
|
||||||
downloadTarball,
|
downloadTarball,
|
||||||
|
@ -27,6 +28,7 @@ exports.upload = upload
|
||||||
exports.streamUpload = streamUpload
|
exports.streamUpload = streamUpload
|
||||||
exports.retrieve = retrieve
|
exports.retrieve = retrieve
|
||||||
exports.retrieveToTmp = retrieveToTmp
|
exports.retrieveToTmp = retrieveToTmp
|
||||||
|
exports.retrieveDirectory = retrieveDirectory
|
||||||
exports.deleteFolder = deleteFolder
|
exports.deleteFolder = deleteFolder
|
||||||
exports.uploadDirectory = uploadDirectory
|
exports.uploadDirectory = uploadDirectory
|
||||||
exports.downloadTarball = downloadTarball
|
exports.downloadTarball = downloadTarball
|
||||||
|
|
|
@ -2003,18 +2003,6 @@
|
||||||
resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796"
|
resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796"
|
||||||
integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==
|
integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==
|
||||||
|
|
||||||
"@koa/router@8.0.0":
|
|
||||||
version "8.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/@koa/router/-/router-8.0.0.tgz#fd4ffa6f03d8293a04c023cb4a22b612401fbe70"
|
|
||||||
integrity sha512-P70CGOGs6JPu/mnrd9lt6ESzlBXLHT/uTK8+5U4M7Oapt8la/tiZv2c7X9jq0ksFsM59RH3AwJYzKOuavDcjIw==
|
|
||||||
dependencies:
|
|
||||||
debug "^3.1.0"
|
|
||||||
http-errors "^1.3.1"
|
|
||||||
koa-compose "^3.0.0"
|
|
||||||
methods "^1.0.1"
|
|
||||||
path-to-regexp "^1.1.1"
|
|
||||||
urijs "^1.19.0"
|
|
||||||
|
|
||||||
"@koa/router@8.0.8":
|
"@koa/router@8.0.8":
|
||||||
version "8.0.8"
|
version "8.0.8"
|
||||||
resolved "https://registry.yarnpkg.com/@koa/router/-/router-8.0.8.tgz#95f32d11373d03d89dcb63fabe9ac6f471095236"
|
resolved "https://registry.yarnpkg.com/@koa/router/-/router-8.0.8.tgz#95f32d11373d03d89dcb63fabe9ac6f471095236"
|
||||||
|
@ -2668,14 +2656,6 @@
|
||||||
dependencies:
|
dependencies:
|
||||||
bson "*"
|
bson "*"
|
||||||
|
|
||||||
"@types/bull@3.15.8":
|
|
||||||
version "3.15.8"
|
|
||||||
resolved "https://registry.yarnpkg.com/@types/bull/-/bull-3.15.8.tgz#ae2139f94490d740b37c8da5d828ce75dd82ce7c"
|
|
||||||
integrity sha512-8DbSPMSsZH5PWPnGEkAZLYgJEH4ghHJNKF7LB6Wr5R0/v6g+Vs+JoaA7kcvLtHE936xg2WpFPkaoaJgExOmKDw==
|
|
||||||
dependencies:
|
|
||||||
"@types/ioredis" "*"
|
|
||||||
"@types/redis" "^2.8.0"
|
|
||||||
|
|
||||||
"@types/caseless@*":
|
"@types/caseless@*":
|
||||||
version "0.12.2"
|
version "0.12.2"
|
||||||
resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.2.tgz#f65d3d6389e01eeb458bd54dc8f52b95a9463bc8"
|
resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.2.tgz#f65d3d6389e01eeb458bd54dc8f52b95a9463bc8"
|
||||||
|
@ -2792,7 +2772,7 @@
|
||||||
resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-1.8.2.tgz#7315b4c4c54f82d13fa61c228ec5c2ea5cc9e0e1"
|
resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-1.8.2.tgz#7315b4c4c54f82d13fa61c228ec5c2ea5cc9e0e1"
|
||||||
integrity sha512-EqX+YQxINb+MeXaIqYDASb6U6FCHbWjkj4a1CKDBks3d/QiB2+PqBLyO72vLDgAO1wUI4O+9gweRcQK11bTL/w==
|
integrity sha512-EqX+YQxINb+MeXaIqYDASb6U6FCHbWjkj4a1CKDBks3d/QiB2+PqBLyO72vLDgAO1wUI4O+9gweRcQK11bTL/w==
|
||||||
|
|
||||||
"@types/ioredis@*":
|
"@types/ioredis@4.28.10":
|
||||||
version "4.28.10"
|
version "4.28.10"
|
||||||
resolved "https://registry.yarnpkg.com/@types/ioredis/-/ioredis-4.28.10.tgz#40ceb157a4141088d1394bb87c98ed09a75a06ff"
|
resolved "https://registry.yarnpkg.com/@types/ioredis/-/ioredis-4.28.10.tgz#40ceb157a4141088d1394bb87c98ed09a75a06ff"
|
||||||
integrity sha512-69LyhUgrXdgcNDv7ogs1qXZomnfOEnSmrmMFqKgt1XMJxmoOSG/u3wYy13yACIfKuMJ8IhKgHafDO3sx19zVQQ==
|
integrity sha512-69LyhUgrXdgcNDv7ogs1qXZomnfOEnSmrmMFqKgt1XMJxmoOSG/u3wYy13yACIfKuMJ8IhKgHafDO3sx19zVQQ==
|
||||||
|
@ -2865,10 +2845,10 @@
|
||||||
"@types/koa-compose" "*"
|
"@types/koa-compose" "*"
|
||||||
"@types/node" "*"
|
"@types/node" "*"
|
||||||
|
|
||||||
"@types/koa__router@8.0.0":
|
"@types/koa__router@8.0.11":
|
||||||
version "8.0.0"
|
version "8.0.11"
|
||||||
resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.0.tgz#057a7254a25df5bc93b42a1acacb2d99cd02d297"
|
resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.11.tgz#d7b37e6db934fc072ea1baa2ab92bc8ac4564f3e"
|
||||||
integrity sha512-XaGqudqJyFOmByN+f9BrEIZEgLfBnvVtZlm/beuTxWpbWpMHiA+ZmA+mB5dsrbGemko61wUA+WG0jhUzMSq+JA==
|
integrity sha512-WXgKWpBsbS14kzmzD9LeFapOIa678h7zvUHxDwXwSx4ETKXhXLVUAToX6jZ/U7EihM7qwyD9W/BZvB0MRu7MTQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/koa" "*"
|
"@types/koa" "*"
|
||||||
|
|
||||||
|
@ -2971,13 +2951,6 @@
|
||||||
dependencies:
|
dependencies:
|
||||||
redis "*"
|
redis "*"
|
||||||
|
|
||||||
"@types/redis@^2.8.0":
|
|
||||||
version "2.8.32"
|
|
||||||
resolved "https://registry.yarnpkg.com/@types/redis/-/redis-2.8.32.tgz#1d3430219afbee10f8cfa389dad2571a05ecfb11"
|
|
||||||
integrity sha512-7jkMKxcGq9p242exlbsVzuJb57KqHRhNl4dHoQu2Y5v9bCAbtIXXH0R3HleSQW4CTOqpHIYUW3t6tpUj4BVQ+w==
|
|
||||||
dependencies:
|
|
||||||
"@types/node" "*"
|
|
||||||
|
|
||||||
"@types/request@^2.48.7":
|
"@types/request@^2.48.7":
|
||||||
version "2.48.8"
|
version "2.48.8"
|
||||||
resolved "https://registry.yarnpkg.com/@types/request/-/request-2.48.8.tgz#0b90fde3b655ab50976cb8c5ac00faca22f5a82c"
|
resolved "https://registry.yarnpkg.com/@types/request/-/request-2.48.8.tgz#0b90fde3b655ab50976cb8c5ac00faca22f5a82c"
|
||||||
|
@ -3528,7 +3501,7 @@ any-base@^1.1.0:
|
||||||
resolved "https://registry.yarnpkg.com/any-base/-/any-base-1.1.0.tgz#ae101a62bc08a597b4c9ab5b7089d456630549fe"
|
resolved "https://registry.yarnpkg.com/any-base/-/any-base-1.1.0.tgz#ae101a62bc08a597b4c9ab5b7089d456630549fe"
|
||||||
integrity sha512-uMgjozySS8adZZYePpaWs8cxB9/kdzmpX6SgJZ+wbz1K5eYk5QMYDVJaZKhxyIHUdnnJkfR7SVgStgH7LkGUyg==
|
integrity sha512-uMgjozySS8adZZYePpaWs8cxB9/kdzmpX6SgJZ+wbz1K5eYk5QMYDVJaZKhxyIHUdnnJkfR7SVgStgH7LkGUyg==
|
||||||
|
|
||||||
any-promise@^1.0.0, any-promise@^1.1.0:
|
any-promise@^1.0.0:
|
||||||
version "1.3.0"
|
version "1.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f"
|
resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f"
|
||||||
integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==
|
integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==
|
||||||
|
@ -4376,10 +4349,10 @@ buffer@^5.1.0, buffer@^5.2.0, buffer@^5.2.1, buffer@^5.5.0, buffer@^5.6.0:
|
||||||
base64-js "^1.3.1"
|
base64-js "^1.3.1"
|
||||||
ieee754 "^1.1.13"
|
ieee754 "^1.1.13"
|
||||||
|
|
||||||
bull@4.8.5:
|
bull@4.10.1:
|
||||||
version "4.8.5"
|
version "4.10.1"
|
||||||
resolved "https://registry.yarnpkg.com/bull/-/bull-4.8.5.tgz#eebafddc3249d6d5e8ced1c42b8bfa8efcc274aa"
|
resolved "https://registry.yarnpkg.com/bull/-/bull-4.10.1.tgz#f14974b6089358b62b495a2cbf838aadc098e43f"
|
||||||
integrity sha512-2Z630e4f6VsLJnWMAtfEHwIqJYmND4W3dcG48RIbXeWpvb4UnYtpe/zxEdslJu0PKrltB4IkFj5YtBsdeQRn8w==
|
integrity sha512-Fp21tRPb2EaZPVfmM+ONZKVz2RA+to+zGgaTLyCKt3JMSU8OOBqK8143OQrnGuGpsyE5G+9FevFAGhdZZfQP2g==
|
||||||
dependencies:
|
dependencies:
|
||||||
cron-parser "^4.2.1"
|
cron-parser "^4.2.1"
|
||||||
debuglog "^1.0.0"
|
debuglog "^1.0.0"
|
||||||
|
@ -4906,14 +4879,6 @@ cookiejar@^2.1.0:
|
||||||
resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.3.tgz#fc7a6216e408e74414b90230050842dacda75acc"
|
resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.3.tgz#fc7a6216e408e74414b90230050842dacda75acc"
|
||||||
integrity sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==
|
integrity sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==
|
||||||
|
|
||||||
cookies@~0.7.1:
|
|
||||||
version "0.7.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/cookies/-/cookies-0.7.3.tgz#7912ce21fbf2e8c2da70cf1c3f351aecf59dadfa"
|
|
||||||
integrity sha512-+gixgxYSgQLTaTIilDHAdlNPZDENDQernEMiIcZpYYP14zgHsCt4Ce1FEjFtcp6GefhozebB6orvhAAWx/IS0A==
|
|
||||||
dependencies:
|
|
||||||
depd "~1.1.2"
|
|
||||||
keygrip "~1.0.3"
|
|
||||||
|
|
||||||
cookies@~0.8.0:
|
cookies@~0.8.0:
|
||||||
version "0.8.0"
|
version "0.8.0"
|
||||||
resolved "https://registry.yarnpkg.com/cookies/-/cookies-0.8.0.tgz#1293ce4b391740a8406e3c9870e828c4b54f3f90"
|
resolved "https://registry.yarnpkg.com/cookies/-/cookies-0.8.0.tgz#1293ce4b391740a8406e3c9870e828c4b54f3f90"
|
||||||
|
@ -5137,13 +5102,6 @@ debug@^3.1.0, debug@^3.2.6, debug@^3.2.7:
|
||||||
dependencies:
|
dependencies:
|
||||||
ms "^2.1.1"
|
ms "^2.1.1"
|
||||||
|
|
||||||
debug@~3.1.0:
|
|
||||||
version "3.1.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
|
|
||||||
integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==
|
|
||||||
dependencies:
|
|
||||||
ms "2.0.0"
|
|
||||||
|
|
||||||
debuglog@^1.0.0:
|
debuglog@^1.0.0:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492"
|
resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492"
|
||||||
|
@ -5714,11 +5672,6 @@ error-ex@^1.3.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
is-arrayish "^0.2.1"
|
is-arrayish "^0.2.1"
|
||||||
|
|
||||||
error-inject@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/error-inject/-/error-inject-1.0.0.tgz#e2b3d91b54aed672f309d950d154850fa11d4f37"
|
|
||||||
integrity sha512-JM8N6PytDbmIYm1IhPWlo8vr3NtfjhDY/1MhD/a5b/aad/USE8a0+NsqE9d5n+GVGmuNkPQWm4bFQWv18d8tMg==
|
|
||||||
|
|
||||||
error-stack-parser@^2.0.6:
|
error-stack-parser@^2.0.6:
|
||||||
version "2.1.4"
|
version "2.1.4"
|
||||||
resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286"
|
resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286"
|
||||||
|
@ -7400,7 +7353,7 @@ http-errors@2.0.0:
|
||||||
statuses "2.0.1"
|
statuses "2.0.1"
|
||||||
toidentifier "1.0.1"
|
toidentifier "1.0.1"
|
||||||
|
|
||||||
http-errors@^1.3.1, http-errors@^1.6.3, http-errors@^1.7.3, http-errors@~1.8.0:
|
http-errors@^1.6.3, http-errors@^1.7.3, http-errors@~1.8.0:
|
||||||
version "1.8.1"
|
version "1.8.1"
|
||||||
resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.8.1.tgz#7c3f28577cbc8a207388455dbd62295ed07bd68c"
|
resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.8.1.tgz#7c3f28577cbc8a207388455dbd62295ed07bd68c"
|
||||||
integrity sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==
|
integrity sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==
|
||||||
|
@ -9274,11 +9227,6 @@ jws@^4.0.0:
|
||||||
jwa "^2.0.0"
|
jwa "^2.0.0"
|
||||||
safe-buffer "^5.0.1"
|
safe-buffer "^5.0.1"
|
||||||
|
|
||||||
keygrip@~1.0.3:
|
|
||||||
version "1.0.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/keygrip/-/keygrip-1.0.3.tgz#399d709f0aed2bab0a059e0cdd3a5023a053e1dc"
|
|
||||||
integrity sha512-/PpesirAIfaklxUzp4Yb7xBper9MwP6hNRA6BGGUFCgbJ+BM5CKBtsoxinNXkLHAr+GXS1/lSlF2rP7cv5Fl+g==
|
|
||||||
|
|
||||||
keygrip@~1.1.0:
|
keygrip@~1.1.0:
|
||||||
version "1.1.0"
|
version "1.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/keygrip/-/keygrip-1.1.0.tgz#871b1681d5e159c62a445b0c74b615e0917e7226"
|
resolved "https://registry.yarnpkg.com/keygrip/-/keygrip-1.1.0.tgz#871b1681d5e159c62a445b0c74b615e0917e7226"
|
||||||
|
@ -9364,13 +9312,6 @@ koa-body@4.2.0:
|
||||||
co-body "^5.1.1"
|
co-body "^5.1.1"
|
||||||
formidable "^1.1.1"
|
formidable "^1.1.1"
|
||||||
|
|
||||||
koa-compose@^3.0.0:
|
|
||||||
version "3.2.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/koa-compose/-/koa-compose-3.2.1.tgz#a85ccb40b7d986d8e5a345b3a1ace8eabcf54de7"
|
|
||||||
integrity sha512-8gen2cvKHIZ35eDEik5WOo8zbVp9t4cP8p4hW4uE55waxolLRexKKrqfCpwhGVppnB40jWeF8bZeTVg99eZgPw==
|
|
||||||
dependencies:
|
|
||||||
any-promise "^1.1.0"
|
|
||||||
|
|
||||||
koa-compose@^4.1.0:
|
koa-compose@^4.1.0:
|
||||||
version "4.1.0"
|
version "4.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/koa-compose/-/koa-compose-4.1.0.tgz#507306b9371901db41121c812e923d0d67d3e877"
|
resolved "https://registry.yarnpkg.com/koa-compose/-/koa-compose-4.1.0.tgz#507306b9371901db41121c812e923d0d67d3e877"
|
||||||
|
@ -9392,14 +9333,6 @@ koa-connect@2.1.0:
|
||||||
resolved "https://registry.yarnpkg.com/koa-connect/-/koa-connect-2.1.0.tgz#16bce0a917c4cb24233aaac83fbc5b83804b4a1c"
|
resolved "https://registry.yarnpkg.com/koa-connect/-/koa-connect-2.1.0.tgz#16bce0a917c4cb24233aaac83fbc5b83804b4a1c"
|
||||||
integrity sha512-O9pcFafHk0oQsBevlbTBlB9co+2RUQJ4zCzu3qJPmGlGoeEZkne+7gWDkecqDPSbCtED6LmhlQladxs6NjOnMQ==
|
integrity sha512-O9pcFafHk0oQsBevlbTBlB9co+2RUQJ4zCzu3qJPmGlGoeEZkne+7gWDkecqDPSbCtED6LmhlQladxs6NjOnMQ==
|
||||||
|
|
||||||
koa-convert@^1.2.0:
|
|
||||||
version "1.2.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/koa-convert/-/koa-convert-1.2.0.tgz#da40875df49de0539098d1700b50820cebcd21d0"
|
|
||||||
integrity sha512-K9XqjmEDStGX09v3oxR7t5uPRy0jqJdvodHa6wxWTHrTfDq0WUNnYTOOUZN6g8OM8oZQXprQASbiIXG2Ez8ehA==
|
|
||||||
dependencies:
|
|
||||||
co "^4.6.0"
|
|
||||||
koa-compose "^3.0.0"
|
|
||||||
|
|
||||||
koa-convert@^2.0.0:
|
koa-convert@^2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/koa-convert/-/koa-convert-2.0.0.tgz#86a0c44d81d40551bae22fee6709904573eea4f5"
|
resolved "https://registry.yarnpkg.com/koa-convert/-/koa-convert-2.0.0.tgz#86a0c44d81d40551bae22fee6709904573eea4f5"
|
||||||
|
@ -9501,37 +9434,7 @@ koa2-ratelimit@1.1.1:
|
||||||
resolved "https://registry.yarnpkg.com/koa2-ratelimit/-/koa2-ratelimit-1.1.1.tgz#9c1d8257770e4a0a08063ba2ddcaf690fd457d23"
|
resolved "https://registry.yarnpkg.com/koa2-ratelimit/-/koa2-ratelimit-1.1.1.tgz#9c1d8257770e4a0a08063ba2ddcaf690fd457d23"
|
||||||
integrity sha512-IpxGMdZqEhMykW0yYKGVB4vDEacPvSBH4hNpDL38ABj3W2KHNLujAljGEDg7eEjXvrRbXRSWXzANhV3c9v7nyg==
|
integrity sha512-IpxGMdZqEhMykW0yYKGVB4vDEacPvSBH4hNpDL38ABj3W2KHNLujAljGEDg7eEjXvrRbXRSWXzANhV3c9v7nyg==
|
||||||
|
|
||||||
koa@2.7.0:
|
koa@2.13.4, koa@^2.13.1, koa@^2.13.4:
|
||||||
version "2.7.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/koa/-/koa-2.7.0.tgz#7e00843506942b9d82c6cc33749f657c6e5e7adf"
|
|
||||||
integrity sha512-7ojD05s2Q+hFudF8tDLZ1CpCdVZw8JQELWSkcfG9bdtoTDzMmkRF6BQBU7JzIzCCOY3xd3tftiy/loHBUYaY2Q==
|
|
||||||
dependencies:
|
|
||||||
accepts "^1.3.5"
|
|
||||||
cache-content-type "^1.0.0"
|
|
||||||
content-disposition "~0.5.2"
|
|
||||||
content-type "^1.0.4"
|
|
||||||
cookies "~0.7.1"
|
|
||||||
debug "~3.1.0"
|
|
||||||
delegates "^1.0.0"
|
|
||||||
depd "^1.1.2"
|
|
||||||
destroy "^1.0.4"
|
|
||||||
error-inject "^1.0.0"
|
|
||||||
escape-html "^1.0.3"
|
|
||||||
fresh "~0.5.2"
|
|
||||||
http-assert "^1.3.0"
|
|
||||||
http-errors "^1.6.3"
|
|
||||||
is-generator-function "^1.0.7"
|
|
||||||
koa-compose "^4.1.0"
|
|
||||||
koa-convert "^1.2.0"
|
|
||||||
koa-is-json "^1.0.0"
|
|
||||||
on-finished "^2.3.0"
|
|
||||||
only "~0.0.2"
|
|
||||||
parseurl "^1.3.2"
|
|
||||||
statuses "^1.5.0"
|
|
||||||
type-is "^1.6.16"
|
|
||||||
vary "^1.1.2"
|
|
||||||
|
|
||||||
koa@^2.13.1, koa@^2.13.4:
|
|
||||||
version "2.13.4"
|
version "2.13.4"
|
||||||
resolved "https://registry.yarnpkg.com/koa/-/koa-2.13.4.tgz#ee5b0cb39e0b8069c38d115139c774833d32462e"
|
resolved "https://registry.yarnpkg.com/koa/-/koa-2.13.4.tgz#ee5b0cb39e0b8069c38d115139c774833d32462e"
|
||||||
integrity sha512-43zkIKubNbnrULWlHdN5h1g3SEKXOEzoAlRsHOTFpnlDu8JlAOZSMJBLULusuXRequboiwJcj5vtYXKB3k7+2g==
|
integrity sha512-43zkIKubNbnrULWlHdN5h1g3SEKXOEzoAlRsHOTFpnlDu8JlAOZSMJBLULusuXRequboiwJcj5vtYXKB3k7+2g==
|
||||||
|
@ -10125,7 +10028,7 @@ merge2@^1.3.0, merge2@^1.4.1:
|
||||||
resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
|
resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
|
||||||
integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==
|
integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==
|
||||||
|
|
||||||
methods@^1.0.1, methods@^1.1.1, methods@^1.1.2:
|
methods@^1.1.1, methods@^1.1.2:
|
||||||
version "1.1.2"
|
version "1.1.2"
|
||||||
resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
|
resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
|
||||||
integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==
|
integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==
|
||||||
|
@ -11218,7 +11121,7 @@ path-parser@^6.1.0:
|
||||||
search-params "3.0.0"
|
search-params "3.0.0"
|
||||||
tslib "^1.10.0"
|
tslib "^1.10.0"
|
||||||
|
|
||||||
path-to-regexp@1.x, path-to-regexp@^1.1.1:
|
path-to-regexp@1.x:
|
||||||
version "1.8.0"
|
version "1.8.0"
|
||||||
resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a"
|
resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a"
|
||||||
integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==
|
integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==
|
||||||
|
@ -14226,7 +14129,7 @@ uri-js@^4.2.2:
|
||||||
dependencies:
|
dependencies:
|
||||||
punycode "^2.1.0"
|
punycode "^2.1.0"
|
||||||
|
|
||||||
urijs@^1.19.0, urijs@^1.19.2:
|
urijs@^1.19.2:
|
||||||
version "1.19.11"
|
version "1.19.11"
|
||||||
resolved "https://registry.yarnpkg.com/urijs/-/urijs-1.19.11.tgz#204b0d6b605ae80bea54bea39280cdb7c9f923cc"
|
resolved "https://registry.yarnpkg.com/urijs/-/urijs-1.19.11.tgz#204b0d6b605ae80bea54bea39280cdb7c9f923cc"
|
||||||
integrity sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==
|
integrity sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
"@types/koa": "2.13.4",
|
"@types/koa": "2.13.4",
|
||||||
"@types/node": "14.18.20",
|
"@types/node": "14.18.20",
|
||||||
"rimraf": "3.0.2",
|
"rimraf": "3.0.2",
|
||||||
"typescript": "4.7.3"
|
"typescript": "4.7.3",
|
||||||
|
"@types/pouchdb": "6.4.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
import { AppBackupTrigger, AppBackupType } from "../../../documents"
|
||||||
|
|
||||||
|
export interface SearchAppBackupsRequest {
|
||||||
|
trigger: AppBackupTrigger
|
||||||
|
type: AppBackupType
|
||||||
|
startDate: string
|
||||||
|
endDate: string
|
||||||
|
page?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateAppBackupRequest {
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpdateAppBackupRequest {
|
||||||
|
name: string
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
export * from "./backup"
|
|
@ -1,3 +1,5 @@
|
||||||
export * from "./analytics"
|
export * from "./analytics"
|
||||||
export * from "./user"
|
export * from "./user"
|
||||||
export * from "./errors"
|
export * from "./errors"
|
||||||
|
export * from "./schedule"
|
||||||
|
export * from "./app"
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
import {
|
||||||
|
ScheduleMetadata,
|
||||||
|
ScheduleRepeatPeriod,
|
||||||
|
ScheduleType,
|
||||||
|
} from "../../documents"
|
||||||
|
|
||||||
|
export interface CreateScheduleRequest {
|
||||||
|
type: ScheduleType
|
||||||
|
name: string
|
||||||
|
startDate: string
|
||||||
|
repeat: ScheduleRepeatPeriod
|
||||||
|
metadata: ScheduleMetadata
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpdateScheduleRequest extends CreateScheduleRequest {}
|
|
@ -44,3 +44,10 @@ export interface InviteUsersResponse {
|
||||||
successful: { email: string }[]
|
successful: { email: string }[]
|
||||||
unsuccessful: { email: string; reason: string }[]
|
unsuccessful: { email: string; reason: string }[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface SearchUsersRequest {
|
||||||
|
page?: string
|
||||||
|
email?: string
|
||||||
|
appId?: string
|
||||||
|
userIds?: string[]
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,68 @@
|
||||||
|
import { Document } from "../document"
|
||||||
|
import { User } from "../../"
|
||||||
|
|
||||||
|
export enum AppBackupType {
|
||||||
|
BACKUP = "backup",
|
||||||
|
RESTORE = "restore",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum AppBackupStatus {
|
||||||
|
STARTED = "started",
|
||||||
|
PENDING = "pending",
|
||||||
|
COMPLETE = "complete",
|
||||||
|
FAILED = "failed",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum AppBackupTrigger {
|
||||||
|
PUBLISH = "publish",
|
||||||
|
MANUAL = "manual",
|
||||||
|
SCHEDULED = "scheduled",
|
||||||
|
RESTORING = "restoring",
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AppBackupContents {
|
||||||
|
datasources: string[]
|
||||||
|
screens: string[]
|
||||||
|
automations: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AppBackupMetadata {
|
||||||
|
appId: string
|
||||||
|
trigger?: AppBackupTrigger
|
||||||
|
type: AppBackupType
|
||||||
|
status: AppBackupStatus
|
||||||
|
name?: string
|
||||||
|
createdBy?: string | User
|
||||||
|
timestamp: string
|
||||||
|
contents?: AppBackupContents
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AppBackup extends Document, AppBackupMetadata {
|
||||||
|
filename?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AppBackupFetchOpts = {
|
||||||
|
trigger?: AppBackupTrigger
|
||||||
|
type?: AppBackupType
|
||||||
|
limit?: number
|
||||||
|
page?: string
|
||||||
|
paginate?: boolean
|
||||||
|
startDate?: string
|
||||||
|
endDate?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AppBackupQueueData {
|
||||||
|
appId: string
|
||||||
|
docId: string
|
||||||
|
docRev: string
|
||||||
|
export?: {
|
||||||
|
trigger: AppBackupTrigger
|
||||||
|
name?: string
|
||||||
|
createdBy?: string
|
||||||
|
}
|
||||||
|
import?: {
|
||||||
|
backupId: string
|
||||||
|
nameForBackup: string
|
||||||
|
createdBy?: string
|
||||||
|
}
|
||||||
|
}
|
|
@ -10,3 +10,4 @@ export * from "./view"
|
||||||
export * from "../document"
|
export * from "../document"
|
||||||
export * from "./row"
|
export * from "./row"
|
||||||
export * from "./user"
|
export * from "./user"
|
||||||
|
export * from "./backup"
|
||||||
|
|
|
@ -16,6 +16,14 @@ export enum FieldType {
|
||||||
INTERNAL = "internal",
|
INTERNAL = "internal",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface RowAttachment {
|
||||||
|
size: number
|
||||||
|
name: string
|
||||||
|
url: string
|
||||||
|
extension: string
|
||||||
|
key: string
|
||||||
|
}
|
||||||
|
|
||||||
export interface Row extends Document {
|
export interface Row extends Document {
|
||||||
type?: string
|
type?: string
|
||||||
tableId?: string
|
tableId?: string
|
||||||
|
|
|
@ -49,6 +49,7 @@ export interface Table extends Document {
|
||||||
sourceId?: string
|
sourceId?: string
|
||||||
relatedFormula?: string[]
|
relatedFormula?: string[]
|
||||||
constrained?: string[]
|
constrained?: string[]
|
||||||
|
sql?: boolean
|
||||||
indexes?: { [key: string]: any }
|
indexes?: { [key: string]: any }
|
||||||
dataImport?: { [key: string]: any }
|
dataImport?: { [key: string]: any }
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,3 +3,4 @@ export * from "./user"
|
||||||
export * from "./userGroup"
|
export * from "./userGroup"
|
||||||
export * from "./plugin"
|
export * from "./plugin"
|
||||||
export * from "./quotas"
|
export * from "./quotas"
|
||||||
|
export * from "./schedule"
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
import { Document } from "../document"
|
||||||
|
|
||||||
|
export enum ScheduleType {
|
||||||
|
APP_BACKUP = "app_backup",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum ScheduleRepeatPeriod {
|
||||||
|
DAILY = "daily",
|
||||||
|
WEEKLY = "weekly",
|
||||||
|
MONTHLY = "monthly",
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Schedule extends Document {
|
||||||
|
type: ScheduleType
|
||||||
|
name: string
|
||||||
|
startDate: string
|
||||||
|
repeat: ScheduleRepeatPeriod
|
||||||
|
metadata: ScheduleMetadata
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ScheduleMetadata = AppBackupScheduleMetadata
|
||||||
|
|
||||||
|
export const isAppBackupMetadata = (
|
||||||
|
type: ScheduleType,
|
||||||
|
metadata: ScheduleMetadata
|
||||||
|
): metadata is AppBackupScheduleMetadata => {
|
||||||
|
return type === ScheduleType.APP_BACKUP
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AppBackupScheduleMetadata {
|
||||||
|
apps: string[]
|
||||||
|
}
|
|
@ -0,0 +1,22 @@
|
||||||
|
export type PouchOptions = {
|
||||||
|
inMemory: boolean
|
||||||
|
replication: boolean
|
||||||
|
onDisk: boolean
|
||||||
|
find: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum SortOption {
|
||||||
|
ASCENDING = "asc",
|
||||||
|
DESCENDING = "desc",
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CouchFindOptions = {
|
||||||
|
selector: PouchDB.Find.Selector
|
||||||
|
fields?: string[]
|
||||||
|
sort?: {
|
||||||
|
[key: string]: SortOption
|
||||||
|
}[]
|
||||||
|
limit?: number
|
||||||
|
skip?: number
|
||||||
|
bookmark?: string
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
import { BaseEvent } from "./event"
|
||||||
|
|
||||||
|
export interface AppBackupRestoreEvent extends BaseEvent {
|
||||||
|
appId: string
|
||||||
|
backupName: string
|
||||||
|
backupCreatedAt: string
|
||||||
|
}
|
|
@ -168,6 +168,9 @@ export enum Event {
|
||||||
PLUGIN_INIT = "plugin:init",
|
PLUGIN_INIT = "plugin:init",
|
||||||
PLUGIN_IMPORTED = "plugin:imported",
|
PLUGIN_IMPORTED = "plugin:imported",
|
||||||
PLUGIN_DELETED = "plugin:deleted",
|
PLUGIN_DELETED = "plugin:deleted",
|
||||||
|
|
||||||
|
// BACKUP
|
||||||
|
APP_BACKUP_RESTORED = "app:backup:restored",
|
||||||
}
|
}
|
||||||
|
|
||||||
// properties added at the final stage of the event pipeline
|
// properties added at the final stage of the event pipeline
|
||||||
|
|
|
@ -20,3 +20,4 @@ export * from "./backfill"
|
||||||
export * from "./identification"
|
export * from "./identification"
|
||||||
export * from "./userGroup"
|
export * from "./userGroup"
|
||||||
export * from "./plugin"
|
export * from "./plugin"
|
||||||
|
export * from "./backup"
|
||||||
|
|
|
@ -8,3 +8,4 @@ export * from "./search"
|
||||||
export * from "./koa"
|
export * from "./koa"
|
||||||
export * from "./auth"
|
export * from "./auth"
|
||||||
export * from "./locks"
|
export * from "./locks"
|
||||||
|
export * from "./db"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { Context } from "koa"
|
import { Context, Request } from "koa"
|
||||||
import { User } from "../documents"
|
import { User } from "../documents"
|
||||||
import { License } from "../sdk"
|
import { License } from "../sdk"
|
||||||
|
|
||||||
|
@ -7,7 +7,11 @@ export interface ContextUser extends User {
|
||||||
license: License
|
license: License
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface BBContext extends Context {
|
export interface BBRequest extends Request {
|
||||||
user?: ContextUser
|
|
||||||
body: any
|
body: any
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface BBContext extends Context {
|
||||||
|
request: BBRequest
|
||||||
|
user?: ContextUser
|
||||||
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
export enum Feature {
|
export enum Feature {
|
||||||
USER_GROUPS = "userGroups",
|
USER_GROUPS = "userGroups",
|
||||||
|
APP_BACKUPS = "appBackups",
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ export enum MonthlyQuotaName {
|
||||||
|
|
||||||
export enum ConstantQuotaName {
|
export enum ConstantQuotaName {
|
||||||
AUTOMATION_LOG_RETENTION_DAYS = "automationLogRetentionDays",
|
AUTOMATION_LOG_RETENTION_DAYS = "automationLogRetentionDays",
|
||||||
|
APP_BACKUPS_RETENTION_DAYS = "appBackupRetentionDays",
|
||||||
}
|
}
|
||||||
|
|
||||||
export type MeteredQuotaName = StaticQuotaName | MonthlyQuotaName
|
export type MeteredQuotaName = StaticQuotaName | MonthlyQuotaName
|
||||||
|
@ -76,6 +77,7 @@ export type StaticQuotas = {
|
||||||
|
|
||||||
export type ConstantQuotas = {
|
export type ConstantQuotas = {
|
||||||
[ConstantQuotaName.AUTOMATION_LOG_RETENTION_DAYS]: Quota
|
[ConstantQuotaName.AUTOMATION_LOG_RETENTION_DAYS]: Quota
|
||||||
|
[ConstantQuotaName.APP_BACKUPS_RETENTION_DAYS]: Quota
|
||||||
}
|
}
|
||||||
|
|
||||||
export type Quotas = {
|
export type Quotas = {
|
||||||
|
|
|
@ -39,6 +39,13 @@
|
||||||
"@types/keygrip" "*"
|
"@types/keygrip" "*"
|
||||||
"@types/node" "*"
|
"@types/node" "*"
|
||||||
|
|
||||||
|
"@types/debug@*":
|
||||||
|
version "4.1.7"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.7.tgz#7cc0ea761509124709b8b2d1090d8f6c17aadb82"
|
||||||
|
integrity sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==
|
||||||
|
dependencies:
|
||||||
|
"@types/ms" "*"
|
||||||
|
|
||||||
"@types/express-serve-static-core@^4.17.18":
|
"@types/express-serve-static-core@^4.17.18":
|
||||||
version "4.17.29"
|
version "4.17.29"
|
||||||
resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.29.tgz#2a1795ea8e9e9c91b4a4bbe475034b20c1ec711c"
|
resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.29.tgz#2a1795ea8e9e9c91b4a4bbe475034b20c1ec711c"
|
||||||
|
@ -113,6 +120,11 @@
|
||||||
resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a"
|
resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a"
|
||||||
integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==
|
integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==
|
||||||
|
|
||||||
|
"@types/ms@*":
|
||||||
|
version "0.7.31"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197"
|
||||||
|
integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==
|
||||||
|
|
||||||
"@types/node@*":
|
"@types/node@*":
|
||||||
version "18.0.6"
|
version "18.0.6"
|
||||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.0.6.tgz#0ba49ac517ad69abe7a1508bc9b3a5483df9d5d7"
|
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.0.6.tgz#0ba49ac517ad69abe7a1508bc9b3a5483df9d5d7"
|
||||||
|
@ -123,6 +135,152 @@
|
||||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.20.tgz#268f028b36eaf51181c3300252f605488c4f0650"
|
resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.20.tgz#268f028b36eaf51181c3300252f605488c4f0650"
|
||||||
integrity sha512-Q8KKwm9YqEmUBRsqJ2GWJDtXltBDxTdC4m5vTdXBolu2PeQh8LX+f6BTwU+OuXPu37fLxoN6gidqBmnky36FXA==
|
integrity sha512-Q8KKwm9YqEmUBRsqJ2GWJDtXltBDxTdC4m5vTdXBolu2PeQh8LX+f6BTwU+OuXPu37fLxoN6gidqBmnky36FXA==
|
||||||
|
|
||||||
|
"@types/pouchdb-adapter-cordova-sqlite@*":
|
||||||
|
version "1.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-cordova-sqlite/-/pouchdb-adapter-cordova-sqlite-1.0.1.tgz#49e5ee6df7cc0c23196fcb340f43a560e74eb1d6"
|
||||||
|
integrity sha512-nqlXpW1ho3KBg1mUQvZgH2755y3z/rw4UA7ZJCPMRTHofxGMY8izRVw5rHBL4/7P615or0J2udpRYxgkT3D02g==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-adapter-fruitdown@*":
|
||||||
|
version "6.1.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-fruitdown/-/pouchdb-adapter-fruitdown-6.1.3.tgz#9b140ad9645cc56068728acf08ec19ac0046658e"
|
||||||
|
integrity sha512-Wz1Z1JLOW1hgmFQjqnSkmyyfH7by/iWb4abKn684WMvQfmxx6BxKJpJ4+eulkVPQzzgMMSgU1MpnQOm9FgRkbw==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-adapter-http@*":
|
||||||
|
version "6.1.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-http/-/pouchdb-adapter-http-6.1.3.tgz#6e592d5f48deb6274a21ddac1498dd308096bcf3"
|
||||||
|
integrity sha512-9Z4TLbF/KJWy/D2sWRPBA+RNU0odQimfdvlDX+EY7rGcd3aVoH8qjD/X0Xcd/0dfBH5pKrNIMFFQgW/TylRCmA==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-adapter-idb@*":
|
||||||
|
version "6.1.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-idb/-/pouchdb-adapter-idb-6.1.4.tgz#cb9a18864585d600820cd325f007614c5c3989cd"
|
||||||
|
integrity sha512-KIAXbkF4uYUz0ZwfNEFLtEkK44mEWopAsD76UhucH92XnJloBysav+TjI4FFfYQyTjoW3S1s6V+Z14CUJZ0F6w==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-adapter-leveldb@*":
|
||||||
|
version "6.1.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-leveldb/-/pouchdb-adapter-leveldb-6.1.3.tgz#17c7e75d75b992050bca15991e97fba575c61bb3"
|
||||||
|
integrity sha512-ex8NFqQGFwEpFi7AaZ5YofmuemfZNsL3nTFZBUCAKYMBkazQij1pe2ILLStSvJr0XS0qxgXjCEW19T5Wqiiskg==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-adapter-localstorage@*":
|
||||||
|
version "6.1.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-localstorage/-/pouchdb-adapter-localstorage-6.1.3.tgz#0dde02ba6b9d6073a295a20196563942ba9a54bd"
|
||||||
|
integrity sha512-oor040tye1KKiGLWYtIy7rRT7C2yoyX3Tf6elEJRpjOA7Ja/H8lKc4LaSh9ATbptIcES6MRqZDxtp7ly9hsW3Q==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-adapter-memory@*":
|
||||||
|
version "6.1.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-memory/-/pouchdb-adapter-memory-6.1.3.tgz#9eabdbc890fcf58960ee8b68b8685f837e75c844"
|
||||||
|
integrity sha512-gVbsIMzDzgZYThFVT4eVNsmuZwVm/4jDxP1sjlgc3qtDIxbtBhGgyNfcskwwz9Zu5Lv1avkDsIWvcxQhnvRlHg==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-adapter-node-websql@*":
|
||||||
|
version "6.1.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-node-websql/-/pouchdb-adapter-node-websql-6.1.3.tgz#aa18bc68af8cf509acd12c400010dcd5fab2243d"
|
||||||
|
integrity sha512-F/P+os6Jsa7CgHtH64+Z0HfwIcj0hIRB5z8gNhF7L7dxPWoAfkopK5H2gydrP3sQrlGyN4WInF+UJW/Zu1+FKg==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-adapter-websql" "*"
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-adapter-websql@*":
|
||||||
|
version "6.1.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-websql/-/pouchdb-adapter-websql-6.1.4.tgz#359fbe42ccac0ac90b492ddb8c32fafd0aa96d79"
|
||||||
|
integrity sha512-zMJQCtXC40hBsIDRn0GhmpeGMK0f9l/OGWfLguvczROzxxcOD7REI+e6SEmX7gJKw5JuMvlfuHzkQwjmvSJbtg==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-browser@*":
|
||||||
|
version "6.1.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-browser/-/pouchdb-browser-6.1.3.tgz#8f33d6ef58d6817d1f6d36979148a1c7f63244d8"
|
||||||
|
integrity sha512-EdYowrWxW9SWBMX/rux2eq7dbHi5Zeyzz+FF/IAsgQKnUxgeCO5VO2j4zTzos0SDyJvAQU+EYRc11r7xGn5tvA==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-adapter-http" "*"
|
||||||
|
"@types/pouchdb-adapter-idb" "*"
|
||||||
|
"@types/pouchdb-adapter-websql" "*"
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
"@types/pouchdb-mapreduce" "*"
|
||||||
|
"@types/pouchdb-replication" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-core@*":
|
||||||
|
version "7.0.10"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-core/-/pouchdb-core-7.0.10.tgz#d1ea1549e7fad6cb579f71459b1bc27252e06a5a"
|
||||||
|
integrity sha512-mKhjLlWWXyV3PTTjDhzDV1kc2dolO7VYFa75IoKM/hr8Er9eo8RIbS7mJLfC8r/C3p6ihZu9yZs1PWC1LQ0SOA==
|
||||||
|
dependencies:
|
||||||
|
"@types/debug" "*"
|
||||||
|
"@types/pouchdb-find" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-find@*":
|
||||||
|
version "7.3.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-find/-/pouchdb-find-7.3.0.tgz#b917030e9f4bf6e56bf8c3b9fe4b2a25e989009a"
|
||||||
|
integrity sha512-sFPli5tBjGX9UfXioik1jUzPdcN84eV82n0lmEFuoPepWqkLjQcyri0eOa++HYOaNPyMDhKFBqEALEZivK2dRg==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-http@*":
|
||||||
|
version "6.1.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-http/-/pouchdb-http-6.1.3.tgz#09576c0d409da1f8dee34ec5b768415e2472ea52"
|
||||||
|
integrity sha512-0e9E5SqNOyPl/3FnEIbENssB4FlJsNYuOy131nxrZk36S+y1R/6qO7ZVRypWpGTqBWSuVd7gCsq2UDwO/285+w==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-adapter-http" "*"
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-mapreduce@*":
|
||||||
|
version "6.1.7"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-mapreduce/-/pouchdb-mapreduce-6.1.7.tgz#9ab32d1e0f234f1bf6d1e4c5d7e216e9e23ac0a3"
|
||||||
|
integrity sha512-WzBwm7tmO9QhfRzVaWT4v6JQSS/fG2OoUDrWrhX87rPe2Pn6laPvdK5li6myNRxCoI/l5e8Jd+oYBAFnaiFucA==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-node@*":
|
||||||
|
version "6.1.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-node/-/pouchdb-node-6.1.4.tgz#5214c0169fcfd2237d373380bbd65a934feb5dfb"
|
||||||
|
integrity sha512-wnTCH8X1JOPpNOfVhz8HW0AvmdHh6pt40MuRj0jQnK7QEHsHS79WujsKTKSOF8QXtPwpvCNSsI7ut7H7tfxxJQ==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-adapter-http" "*"
|
||||||
|
"@types/pouchdb-adapter-leveldb" "*"
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
"@types/pouchdb-mapreduce" "*"
|
||||||
|
"@types/pouchdb-replication" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb-replication@*":
|
||||||
|
version "6.4.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb-replication/-/pouchdb-replication-6.4.4.tgz#743406c90f13a988fa3e346ea74ce40acd170d00"
|
||||||
|
integrity sha512-BsE5LKpjJK4iAf6Fx5kyrMw+33V+Ip7uWldUnU2BYrrvtR+MLD22dcImm7DZN1st2wPPb91i0XEnQzvP0w1C/Q==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
"@types/pouchdb-find" "*"
|
||||||
|
|
||||||
|
"@types/pouchdb@6.4.0":
|
||||||
|
version "6.4.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/pouchdb/-/pouchdb-6.4.0.tgz#f9c41ca64b23029f9bf2eb4bf6956e6431cb79f8"
|
||||||
|
integrity sha512-eGCpX+NXhd5VLJuJMzwe3L79fa9+IDTrAG3CPaf4s/31PD56hOrhDJTSmRELSXuiqXr6+OHzzP0PldSaWsFt7w==
|
||||||
|
dependencies:
|
||||||
|
"@types/pouchdb-adapter-cordova-sqlite" "*"
|
||||||
|
"@types/pouchdb-adapter-fruitdown" "*"
|
||||||
|
"@types/pouchdb-adapter-http" "*"
|
||||||
|
"@types/pouchdb-adapter-idb" "*"
|
||||||
|
"@types/pouchdb-adapter-leveldb" "*"
|
||||||
|
"@types/pouchdb-adapter-localstorage" "*"
|
||||||
|
"@types/pouchdb-adapter-memory" "*"
|
||||||
|
"@types/pouchdb-adapter-node-websql" "*"
|
||||||
|
"@types/pouchdb-adapter-websql" "*"
|
||||||
|
"@types/pouchdb-browser" "*"
|
||||||
|
"@types/pouchdb-core" "*"
|
||||||
|
"@types/pouchdb-http" "*"
|
||||||
|
"@types/pouchdb-mapreduce" "*"
|
||||||
|
"@types/pouchdb-node" "*"
|
||||||
|
"@types/pouchdb-replication" "*"
|
||||||
|
|
||||||
"@types/qs@*":
|
"@types/qs@*":
|
||||||
version "6.9.7"
|
version "6.9.7"
|
||||||
resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb"
|
resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb"
|
||||||
|
|
|
@ -72,7 +72,6 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "26.0.23",
|
"@types/jest": "26.0.23",
|
||||||
"@types/koa": "2.13.4",
|
"@types/koa": "2.13.4",
|
||||||
"@types/koa-router": "7.4.4",
|
|
||||||
"@types/koa__router": "8.0.11",
|
"@types/koa__router": "8.0.11",
|
||||||
"@types/node": "14.18.20",
|
"@types/node": "14.18.20",
|
||||||
"@types/uuid": "8.3.4",
|
"@types/uuid": "8.3.4",
|
||||||
|
|
|
@ -7,6 +7,7 @@ import {
|
||||||
CloudAccount,
|
CloudAccount,
|
||||||
InviteUserRequest,
|
InviteUserRequest,
|
||||||
InviteUsersRequest,
|
InviteUsersRequest,
|
||||||
|
SearchUsersRequest,
|
||||||
User,
|
User,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import {
|
import {
|
||||||
|
@ -144,7 +145,8 @@ export const destroy = async (ctx: any) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
export const search = async (ctx: any) => {
|
export const search = async (ctx: any) => {
|
||||||
const paginated = await sdk.users.paginatedUsers(ctx.request.body)
|
const body = ctx.request.body as SearchUsersRequest
|
||||||
|
const paginated = await sdk.users.paginatedUsers(body)
|
||||||
// user hashed password shouldn't ever be returned
|
// user hashed password shouldn't ever be returned
|
||||||
for (let user of paginated.data) {
|
for (let user of paginated.data) {
|
||||||
if (user) {
|
if (user) {
|
||||||
|
|
|
@ -27,6 +27,7 @@ import {
|
||||||
MigrationType,
|
MigrationType,
|
||||||
PlatformUserByEmail,
|
PlatformUserByEmail,
|
||||||
RowResponse,
|
RowResponse,
|
||||||
|
SearchUsersRequest,
|
||||||
User,
|
User,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { sendEmail } from "../../utilities/email"
|
import { sendEmail } from "../../utilities/email"
|
||||||
|
@ -56,7 +57,8 @@ export const paginatedUsers = async ({
|
||||||
page,
|
page,
|
||||||
email,
|
email,
|
||||||
appId,
|
appId,
|
||||||
}: { page?: string; email?: string; appId?: string } = {}) => {
|
userIds,
|
||||||
|
}: SearchUsersRequest = {}) => {
|
||||||
const db = tenancy.getGlobalDB()
|
const db = tenancy.getGlobalDB()
|
||||||
// get one extra document, to have the next page
|
// get one extra document, to have the next page
|
||||||
const opts: any = {
|
const opts: any = {
|
||||||
|
@ -94,16 +96,7 @@ export const paginatedUsers = async ({
|
||||||
*/
|
*/
|
||||||
export const getUser = async (userId: string) => {
|
export const getUser = async (userId: string) => {
|
||||||
const db = tenancy.getGlobalDB()
|
const db = tenancy.getGlobalDB()
|
||||||
let user
|
let user = await db.get(userId)
|
||||||
try {
|
|
||||||
user = await db.get(userId)
|
|
||||||
} catch (err: any) {
|
|
||||||
// no user found, just return nothing
|
|
||||||
if (err.status === 404) {
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
if (user) {
|
if (user) {
|
||||||
delete user.password
|
delete user.password
|
||||||
}
|
}
|
||||||
|
|
|
@ -1026,13 +1026,6 @@
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/koa" "*"
|
"@types/koa" "*"
|
||||||
|
|
||||||
"@types/koa-router@7.4.4":
|
|
||||||
version "7.4.4"
|
|
||||||
resolved "https://registry.yarnpkg.com/@types/koa-router/-/koa-router-7.4.4.tgz#db72bde3616365d74f00178d5f243c4fce7da572"
|
|
||||||
integrity sha512-3dHlZ6CkhgcWeF6wafEUvyyqjWYfKmev3vy1PtOmr0mBc3wpXPU5E8fBBd4YQo5bRpHPfmwC5yDaX7s4jhIN6A==
|
|
||||||
dependencies:
|
|
||||||
"@types/koa" "*"
|
|
||||||
|
|
||||||
"@types/koa@*", "@types/koa@2.13.4":
|
"@types/koa@*", "@types/koa@2.13.4":
|
||||||
version "2.13.4"
|
version "2.13.4"
|
||||||
resolved "https://registry.yarnpkg.com/@types/koa/-/koa-2.13.4.tgz#10620b3f24a8027ef5cbae88b393d1b31205726b"
|
resolved "https://registry.yarnpkg.com/@types/koa/-/koa-2.13.4.tgz#10620b3f24a8027ef5cbae88b393d1b31205726b"
|
||||||
|
|
|
@ -1,23 +1,23 @@
|
||||||
import { Screen } from "@budibase/types"
|
import { Screen } from "@budibase/types"
|
||||||
import { Response } from "node-fetch"
|
import { Response } from "node-fetch"
|
||||||
import InternalAPIClient from "./InternalAPIClient"
|
import InternalAPIClient from "./InternalAPIClient"
|
||||||
|
|
||||||
export default class ScreenApi {
|
|
||||||
api: InternalAPIClient
|
|
||||||
|
|
||||||
constructor(apiClient: InternalAPIClient) {
|
|
||||||
this.api = apiClient
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(body: any): Promise<[Response, Screen]> {
|
|
||||||
const response = await this.api.post(`/screens`, { body })
|
|
||||||
const json = await response.json()
|
|
||||||
return [response, json]
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(screenId: string, rev: string): Promise<[Response, Screen]> {
|
export default class ScreenApi {
|
||||||
const response = await this.api.del(`/screens/${screenId}/${rev}`)
|
api: InternalAPIClient
|
||||||
const json = await response.json()
|
|
||||||
return [response, json]
|
constructor(apiClient: InternalAPIClient) {
|
||||||
}
|
this.api = apiClient
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async create(body: any): Promise<[Response, Screen]> {
|
||||||
|
const response = await this.api.post(`/screens`, { body })
|
||||||
|
const json = await response.json()
|
||||||
|
return [response, json]
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(screenId: string, rev: string): Promise<[Response, Screen]> {
|
||||||
|
const response = await this.api.del(`/screens/${screenId}/${rev}`)
|
||||||
|
const json = await response.json()
|
||||||
|
return [response, json]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -3,32 +3,32 @@ import generator from "../../generator"
|
||||||
const randomId = generator.guid()
|
const randomId = generator.guid()
|
||||||
|
|
||||||
const generateScreen = (roleId: string): any => ({
|
const generateScreen = (roleId: string): any => ({
|
||||||
showNavigation: true,
|
showNavigation: true,
|
||||||
width: "Large",
|
width: "Large",
|
||||||
name: randomId,
|
name: randomId,
|
||||||
template: "createFromScratch",
|
template: "createFromScratch",
|
||||||
props: {
|
props: {
|
||||||
_id: randomId,
|
_id: randomId,
|
||||||
_component:
|
_component: "@budibase/standard-components/container",
|
||||||
"@budibase/standard-components/container",
|
_styles: {
|
||||||
_styles: {
|
normal: {},
|
||||||
normal: {},
|
hover: {},
|
||||||
hover: {},
|
active: {},
|
||||||
active: {},
|
selected: {},
|
||||||
selected: {}
|
|
||||||
},
|
|
||||||
_children: [],
|
|
||||||
_instanceName: "New Screen",
|
|
||||||
direction: "column",
|
|
||||||
hAlign: "stretch",
|
|
||||||
vAlign: "top",
|
|
||||||
size: "grow",
|
|
||||||
gap: "M"
|
|
||||||
}, routing: {
|
|
||||||
route: "/test",
|
|
||||||
roleId: roleId,
|
|
||||||
homeScreen: false
|
|
||||||
},
|
},
|
||||||
|
_children: [],
|
||||||
|
_instanceName: "New Screen",
|
||||||
|
direction: "column",
|
||||||
|
hAlign: "stretch",
|
||||||
|
vAlign: "top",
|
||||||
|
size: "grow",
|
||||||
|
gap: "M",
|
||||||
|
},
|
||||||
|
routing: {
|
||||||
|
route: "/test",
|
||||||
|
roleId: roleId,
|
||||||
|
homeScreen: false,
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
export default generateScreen
|
export default generateScreen
|
||||||
|
|
|
@ -5,7 +5,6 @@ import generateApp from "../../../config/internal-api/fixtures/applications"
|
||||||
import { Screen } from "@budibase/types"
|
import { Screen } from "@budibase/types"
|
||||||
import generateScreen from "../../../config/internal-api/fixtures/screens"
|
import generateScreen from "../../../config/internal-api/fixtures/screens"
|
||||||
|
|
||||||
|
|
||||||
describe("Internal API - /screens endpoints", () => {
|
describe("Internal API - /screens endpoints", () => {
|
||||||
const api = new InternalAPIClient()
|
const api = new InternalAPIClient()
|
||||||
const config = new TestConfiguration<Screen>(api)
|
const config = new TestConfiguration<Screen>(api)
|
||||||
|
@ -27,7 +26,9 @@ describe("Internal API - /screens endpoints", () => {
|
||||||
const roleArray = ["BASIC", "POWER", "ADMIN", "PUBLIC"]
|
const roleArray = ["BASIC", "POWER", "ADMIN", "PUBLIC"]
|
||||||
appConfig.applications.api.appId = app.appId
|
appConfig.applications.api.appId = app.appId
|
||||||
for (let role in roleArray) {
|
for (let role in roleArray) {
|
||||||
const [response, screen] = await config.screen.create(generateScreen(roleArray[role]))
|
const [response, screen] = await config.screen.create(
|
||||||
|
generateScreen(roleArray[role])
|
||||||
|
)
|
||||||
expect(response).toHaveStatusCode(200)
|
expect(response).toHaveStatusCode(200)
|
||||||
expect(screen.routing.roleId).toEqual(roleArray[role])
|
expect(screen.routing.roleId).toEqual(roleArray[role])
|
||||||
}
|
}
|
||||||
|
@ -39,7 +40,9 @@ describe("Internal API - /screens endpoints", () => {
|
||||||
|
|
||||||
// Create Screen
|
// Create Screen
|
||||||
appConfig.applications.api.appId = app.appId
|
appConfig.applications.api.appId = app.appId
|
||||||
const [response, screen] = await config.screen.create(generateScreen("BASIC"))
|
const [response, screen] = await config.screen.create(
|
||||||
|
generateScreen("BASIC")
|
||||||
|
)
|
||||||
|
|
||||||
// Check screen exists
|
// Check screen exists
|
||||||
const [routesResponse, routes] = await appConfig.applications.getRoutes()
|
const [routesResponse, routes] = await appConfig.applications.getRoutes()
|
||||||
|
@ -53,7 +56,9 @@ describe("Internal API - /screens endpoints", () => {
|
||||||
|
|
||||||
// Create Screen
|
// Create Screen
|
||||||
appConfig.applications.api.appId = app.appId
|
appConfig.applications.api.appId = app.appId
|
||||||
const [screenResponse, screen] = await config.screen.create(generateScreen("BASIC"))
|
const [screenResponse, screen] = await config.screen.create(
|
||||||
|
generateScreen("BASIC")
|
||||||
|
)
|
||||||
|
|
||||||
// Delete Screen
|
// Delete Screen
|
||||||
const [response] = await config.screen.delete(screen._id!, screen._rev!)
|
const [response] = await config.screen.delete(screen._id!, screen._rev!)
|
||||||
|
|
Loading…
Reference in New Issue