Refactoring more to Typescript, adding the ability to use the _find API of CouchDB.

This commit is contained in:
mike12345567 2022-10-12 17:02:23 +01:00
parent 7439ade518
commit 33e31fe2e4
26 changed files with 369 additions and 130 deletions

View File

@ -1,8 +1,11 @@
const pouch = require("./pouch") import pouch from "./pouch"
const env = require("../environment") import env from "../environment"
import { checkSlashesInUrl } from "../helpers"
import fetch from "node-fetch"
import { PouchOptions, CouchFindOptions } from "@budibase/types"
const openDbs = [] const openDbs: string[] = []
let PouchDB let PouchDB: any
let initialised = false let initialised = false
const dbList = new Set() const dbList = new Set()
@ -14,8 +17,8 @@ if (env.MEMORY_LEAK_CHECK) {
} }
const put = const put =
dbPut => (dbPut: any) =>
async (doc, options = {}) => { async (doc: any, options = {}) => {
if (!doc.createdAt) { if (!doc.createdAt) {
doc.createdAt = new Date().toISOString() doc.createdAt = new Date().toISOString()
} }
@ -29,7 +32,7 @@ const checkInitialised = () => {
} }
} }
exports.init = opts => { export async function init(opts: PouchOptions) {
PouchDB = pouch.getPouch(opts) PouchDB = pouch.getPouch(opts)
initialised = true initialised = true
} }
@ -37,7 +40,7 @@ exports.init = opts => {
// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION // NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION
// this function is prone to leaks, should only be used // this function is prone to leaks, should only be used
// in situations that using the function doWithDB does not work // in situations that using the function doWithDB does not work
exports.dangerousGetDB = (dbName, opts) => { export async function dangerousGetDB(dbName: string, opts: any) {
checkInitialised() checkInitialised()
if (env.isTest()) { if (env.isTest()) {
dbList.add(dbName) dbList.add(dbName)
@ -53,7 +56,7 @@ exports.dangerousGetDB = (dbName, opts) => {
// use this function if you have called dangerousGetDB - close // use this function if you have called dangerousGetDB - close
// the databases you've opened once finished // the databases you've opened once finished
exports.closeDB = async db => { export async function closeDB(db: PouchDB.Database) {
if (!db || env.isTest()) { if (!db || env.isTest()) {
return return
} }
@ -71,7 +74,7 @@ exports.closeDB = async db => {
// we have to use a callback for this so that we can close // we have to use a callback for this so that we can close
// the DB when we're done, without this manual requests would // the DB when we're done, without this manual requests would
// need to close the database when done with it to avoid memory leaks // need to close the database when done with it to avoid memory leaks
exports.doWithDB = async (dbName, cb, opts = {}) => { export async function doWithDB(dbName: string, cb: any, opts = {}) {
const db = exports.dangerousGetDB(dbName, opts) const db = exports.dangerousGetDB(dbName, opts)
// need this to be async so that we can correctly close DB after all // need this to be async so that we can correctly close DB after all
// async operations have been completed // async operations have been completed
@ -82,10 +85,39 @@ exports.doWithDB = async (dbName, cb, opts = {}) => {
} }
} }
exports.allDbs = () => { export function allDbs() {
if (!env.isTest()) { if (!env.isTest()) {
throw new Error("Cannot be used outside test environment.") throw new Error("Cannot be used outside test environment.")
} }
checkInitialised() checkInitialised()
return [...dbList] return [...dbList]
} }
export async function directCouchQuery(
path: string,
method: string = "GET",
body?: any
) {
let { url, cookie } = pouch.getCouchInfo()
const couchUrl = `${url}/${path}`
const params: any = {
method: method,
headers: {
Authorization: cookie,
},
}
if (body && method !== "GET") {
params.body = body
}
const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), params)
if (response.status < 300) {
return await response.json()
} else {
throw "Cannot connect to CouchDB instance"
}
}
export async function directCouchFind(dbName: string, opts: CouchFindOptions) {
const json = await directCouchQuery(`${dbName}/_find`, "POST", opts)
return { rows: json.docs, bookmark: json.bookmark }
}

View File

@ -4,11 +4,8 @@ import env from "../environment"
import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants" import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants"
import { getTenantId, getGlobalDB } from "../context" import { getTenantId, getGlobalDB } from "../context"
import { getGlobalDBName } from "./tenancy" import { getGlobalDBName } from "./tenancy"
import fetch from "node-fetch" import { doWithDB, allDbs, directCouchQuery } from "./index"
import { doWithDB, allDbs } from "./index"
import { getCouchInfo } from "./pouch"
import { getAppMetadata } from "../cache/appMetadata" import { getAppMetadata } from "../cache/appMetadata"
import { checkSlashesInUrl } from "../helpers"
import { isDevApp, isDevAppID, getProdAppID } from "./conversions" import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
import { APP_PREFIX } from "./constants" import { APP_PREFIX } from "./constants"
import * as events from "../events" import * as events from "../events"
@ -209,22 +206,11 @@ export async function getAllDbs(opts = { efficient: false }) {
return allDbs() return allDbs()
} }
let dbs: any[] = [] let dbs: any[] = []
let { url, cookie } = getCouchInfo() async function addDbs(couchPath: string) {
async function addDbs(couchUrl: string) { const json = await directCouchQuery(couchPath)
const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), { dbs = dbs.concat(json)
method: "GET",
headers: {
Authorization: cookie,
},
})
if (response.status === 200) {
let json = await response.json()
dbs = dbs.concat(json)
} else {
throw "Cannot connect to CouchDB instance"
}
} }
let couchUrl = `${url}/_all_dbs` let couchPath = "/_all_dbs"
let tenantId = getTenantId() let tenantId = getTenantId()
if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) { if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) {
// just get all DBs when: // just get all DBs when:
@ -232,12 +218,12 @@ export async function getAllDbs(opts = { efficient: false }) {
// - default tenant // - default tenant
// - apps dbs don't contain tenant id // - apps dbs don't contain tenant id
// - non-default tenant dbs are filtered out application side in getAllApps // - non-default tenant dbs are filtered out application side in getAllApps
await addDbs(couchUrl) await addDbs(couchPath)
} else { } else {
// get prod apps // get prod apps
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP, tenantId)) await addDbs(getStartEndKeyURL(couchPath, DocumentType.APP, tenantId))
// get dev apps // get dev apps
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP_DEV, tenantId)) await addDbs(getStartEndKeyURL(couchPath, DocumentType.APP_DEV, tenantId))
// add global db name // add global db name
dbs.push(getGlobalDBName(tenantId)) dbs.push(getGlobalDBName(tenantId))
} }

View File

@ -150,7 +150,7 @@ async function createInstance(template: any) {
throw "Error loading database dump from memory." throw "Error loading database dump from memory."
} }
} else if (template && template.useTemplate === "true") { } else if (template && template.useTemplate === "true") {
await sdk.apps.imports.importApp(appId, db, template) await sdk.backups.importApp(appId, db, template)
} else { } else {
// create the users table // create the users table
await db.put(USERS_TABLE_SCHEMA) await db.put(USERS_TABLE_SCHEMA)

View File

@ -9,7 +9,7 @@ export async function exportAppDump(ctx: any) {
excludeRows = isQsTrue(excludeRows) excludeRows = isQsTrue(excludeRows)
const backupIdentifier = `${appName}-export-${new Date().getTime()}.tar.gz` const backupIdentifier = `${appName}-export-${new Date().getTime()}.tar.gz`
ctx.attachment(backupIdentifier) ctx.attachment(backupIdentifier)
ctx.body = await sdk.apps.exports.streamExportApp(appId, excludeRows) ctx.body = await sdk.backups.streamExportApp(appId, excludeRows)
await context.doInAppContext(appId, async () => { await context.doInAppContext(appId, async () => {
const appDb = context.getAppDB() const appDb = context.getAppDB()

View File

@ -9,7 +9,7 @@ exports.exportApps = async ctx => {
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.") ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
} }
const apps = await getAllApps({ all: true }) const apps = await getAllApps({ all: true })
const globalDBString = await sdk.apps.exports.exportDB(getGlobalDBName(), { const globalDBString = await sdk.backups.exportDB(getGlobalDBName(), {
filter: doc => !doc._id.startsWith(DocumentType.USER), filter: doc => !doc._id.startsWith(DocumentType.USER),
}) })
// only export the dev apps as they will be the latest, the user can republish the apps // only export the dev apps as they will be the latest, the user can republish the apps
@ -17,10 +17,7 @@ exports.exportApps = async ctx => {
let appIds = apps let appIds = apps
.map(app => app.appId || app._id) .map(app => app.appId || app._id)
.filter(appId => isDevAppID(appId)) .filter(appId => isDevAppID(appId))
const tmpPath = await sdk.apps.exports.exportMultipleApps( const tmpPath = await sdk.backups.exportMultipleApps(appIds, globalDBString)
appIds,
globalDBString
)
const filename = `cloud-export-${new Date().getTime()}.tar.gz` const filename = `cloud-export-${new Date().getTime()}.tar.gz`
ctx.attachment(filename) ctx.attachment(filename)
ctx.body = streamFile(tmpPath) ctx.body = streamFile(tmpPath)

View File

@ -1,10 +1,11 @@
const { FieldTypes, FormulaTypes } = require("../../../constants") const { FieldTypes, FormulaTypes } = require("../../../constants")
const { getAllInternalTables, clearColumns } = require("./utils") const { clearColumns } = require("./utils")
const { doesContainStrings } = require("@budibase/string-templates") const { doesContainStrings } = require("@budibase/string-templates")
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
const { isEqual, uniq } = require("lodash") const { isEqual, uniq } = require("lodash")
const { updateAllFormulasInTable } = require("../row/staticFormula") const { updateAllFormulasInTable } = require("../row/staticFormula")
const { getAppDB } = require("@budibase/backend-core/context") const { getAppDB } = require("@budibase/backend-core/context")
const sdk = require("../../../sdk")
function isStaticFormula(column) { function isStaticFormula(column) {
return ( return (
@ -39,7 +40,7 @@ function getFormulaThatUseColumn(table, columnNames) {
*/ */
async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) { async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
// start by retrieving all tables, remove the current table from the list // start by retrieving all tables, remove the current table from the list
const tables = (await getAllInternalTables()).filter( const tables = (await sdk.tables.getAllInternalTables()).filter(
tbl => tbl._id !== table._id tbl => tbl._id !== table._id
) )
const schemaToUse = oldTable ? oldTable.schema : table.schema const schemaToUse = oldTable ? oldTable.schema : table.schema
@ -99,7 +100,7 @@ async function updateRelatedFormulaLinksOnTables(
) { ) {
const db = getAppDB() const db = getAppDB()
// start by retrieving all tables, remove the current table from the list // start by retrieving all tables, remove the current table from the list
const tables = (await getAllInternalTables()).filter( const tables = (await sdk.tables.getAllInternalTables()).filter(
tbl => tbl._id !== table._id tbl => tbl._id !== table._id
) )
// clone the tables, so we can compare at end // clone the tables, so we can compare at end

View File

@ -3,7 +3,6 @@ const {
breakExternalTableId, breakExternalTableId,
} = require("../../../integrations/utils") } = require("../../../integrations/utils")
const { const {
getTable,
generateForeignKey, generateForeignKey,
generateJunctionTableName, generateJunctionTableName,
foreignKeyStructure, foreignKeyStructure,
@ -20,6 +19,7 @@ const csvParser = require("../../../utilities/csvParser")
const { handleRequest } = require("../row/external") const { handleRequest } = require("../row/external")
const { getAppDB } = require("@budibase/backend-core/context") const { getAppDB } = require("@budibase/backend-core/context")
const { events } = require("@budibase/backend-core") const { events } = require("@budibase/backend-core")
const sdk = require("../../../sdk")
async function makeTableRequest( async function makeTableRequest(
datasource, datasource,
@ -181,7 +181,7 @@ exports.save = async function (ctx) {
let oldTable let oldTable
if (ctx.request.body && ctx.request.body._id) { if (ctx.request.body && ctx.request.body._id) {
oldTable = await getTable(ctx.request.body._id) oldTable = await sdk.tables.getTable(ctx.request.body._id)
} }
if (hasTypeChanged(tableToSave, oldTable)) { if (hasTypeChanged(tableToSave, oldTable)) {
@ -281,7 +281,7 @@ exports.save = async function (ctx) {
} }
exports.destroy = async function (ctx) { exports.destroy = async function (ctx) {
const tableToDelete = await getTable(ctx.params.tableId) const tableToDelete = await sdk.tables.getTable(ctx.params.tableId)
if (!tableToDelete || !tableToDelete.created) { if (!tableToDelete || !tableToDelete.created) {
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.") ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
} }
@ -303,7 +303,7 @@ exports.destroy = async function (ctx) {
} }
exports.bulkImport = async function (ctx) { exports.bulkImport = async function (ctx) {
const table = await getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { dataImport } = ctx.request.body const { dataImport } = ctx.request.body
if (!dataImport || !dataImport.schema || !dataImport.csvString) { if (!dataImport || !dataImport.schema || !dataImport.csvString) {
ctx.throw(400, "Provided data import information is invalid.") ctx.throw(400, "Provided data import information is invalid.")

View File

@ -4,8 +4,8 @@ const csvParser = require("../../../utilities/csvParser")
const { isExternalTable, isSQL } = require("../../../integrations/utils") const { isExternalTable, isSQL } = require("../../../integrations/utils")
const { getDatasourceParams } = require("../../../db/utils") const { getDatasourceParams } = require("../../../db/utils")
const { getAppDB } = require("@budibase/backend-core/context") const { getAppDB } = require("@budibase/backend-core/context")
const { getTable, getAllInternalTables } = require("./utils")
const { events } = require("@budibase/backend-core") const { events } = require("@budibase/backend-core")
const sdk = require("../../../sdk")
function pickApi({ tableId, table }) { function pickApi({ tableId, table }) {
if (table && !tableId) { if (table && !tableId) {
@ -23,7 +23,7 @@ function pickApi({ tableId, table }) {
exports.fetch = async function (ctx) { exports.fetch = async function (ctx) {
const db = getAppDB() const db = getAppDB()
const internal = await getAllInternalTables() const internal = await sdk.tables.getAllInternalTables()
const externalTables = await db.allDocs( const externalTables = await db.allDocs(
getDatasourceParams("plus", { getDatasourceParams("plus", {
@ -50,7 +50,7 @@ exports.fetch = async function (ctx) {
exports.find = async function (ctx) { exports.find = async function (ctx) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
ctx.body = await getTable(tableId) ctx.body = await sdk.tables.getTable(tableId)
} }
exports.save = async function (ctx) { exports.save = async function (ctx) {
@ -101,7 +101,7 @@ exports.validateCSVSchema = async function (ctx) {
const { csvString, schema = {}, tableId } = ctx.request.body const { csvString, schema = {}, tableId } = ctx.request.body
let existingTable let existingTable
if (tableId) { if (tableId) {
existingTable = await getTable(tableId) existingTable = await sdk.tables.getTable(tableId)
} }
let result = await csvParser.parse(csvString, schema) let result = await csvParser.parse(csvString, schema)
if (existingTable) { if (existingTable) {

View File

@ -1,12 +1,7 @@
import { updateLinks, EventType } from "../../../db/linkedRows" import { updateLinks, EventType } from "../../../db/linkedRows"
import { getRowParams, generateTableID } from "../../../db/utils" import { getRowParams, generateTableID } from "../../../db/utils"
import { FieldTypes } from "../../../constants" import { FieldTypes } from "../../../constants"
import { import { TableSaveFunctions, hasTypeChanged, handleDataImport } from "./utils"
TableSaveFunctions,
hasTypeChanged,
getTable,
handleDataImport,
} from "./utils"
const { getAppDB } = require("@budibase/backend-core/context") const { getAppDB } = require("@budibase/backend-core/context")
import { isTest } from "../../../environment" import { isTest } from "../../../environment"
import { import {
@ -19,6 +14,7 @@ import { quotas } from "@budibase/pro"
import { isEqual } from "lodash" import { isEqual } from "lodash"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import env from "../../../environment" import env from "../../../environment"
import sdk from "../../../sdk"
function checkAutoColumns(table: Table, oldTable: Table) { function checkAutoColumns(table: Table, oldTable: Table) {
if (!table.schema) { if (!table.schema) {
@ -188,7 +184,7 @@ export async function destroy(ctx: any) {
} }
export async function bulkImport(ctx: any) { export async function bulkImport(ctx: any) {
const table = await getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { dataImport } = ctx.request.body const { dataImport } = ctx.request.body
await handleDataImport(ctx.user, table, dataImport) await handleDataImport(ctx.user, table, dataImport)
return table return table

View File

@ -256,46 +256,6 @@ class TableSaveFunctions {
} }
} }
export async function getAllInternalTables() {
const db = getAppDB()
const internalTables = await db.allDocs(
getTableParams(null, {
include_docs: true,
})
)
return internalTables.rows.map((tableDoc: any) => ({
...tableDoc.doc,
type: "internal",
sourceId: BudibaseInternalDB._id,
}))
}
export async function getAllExternalTables(datasourceId: any) {
const db = getAppDB()
const datasource = await db.get(datasourceId)
if (!datasource || !datasource.entities) {
throw "Datasource is not configured fully."
}
return datasource.entities
}
export async function getExternalTable(datasourceId: any, tableName: any) {
const entities = await getAllExternalTables(datasourceId)
return entities[tableName]
}
export async function getTable(tableId: any) {
const db = getAppDB()
if (isExternalTable(tableId)) {
let { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource = await db.get(datasourceId)
const table = await getExternalTable(datasourceId, tableName)
return { ...table, sql: isSQL(datasource) }
} else {
return db.get(tableId)
}
}
export async function checkForViewUpdates( export async function checkForViewUpdates(
table: any, table: any,
rename: any, rename: any,

View File

@ -3,12 +3,12 @@ const { apiFileReturn } = require("../../../utilities/fileSystem")
const exporters = require("./exporters") const exporters = require("./exporters")
const { saveView, getView, getViews, deleteView } = require("./utils") const { saveView, getView, getViews, deleteView } = require("./utils")
const { fetchView } = require("../row") const { fetchView } = require("../row")
const { getTable } = require("../table/utils")
const { FieldTypes } = require("../../../constants") const { FieldTypes } = require("../../../constants")
const { getAppDB } = require("@budibase/backend-core/context") const { getAppDB } = require("@budibase/backend-core/context")
const { events } = require("@budibase/backend-core") const { events } = require("@budibase/backend-core")
const { DocumentType } = require("../../../db/utils") const { DocumentType } = require("../../../db/utils")
const { cloneDeep, isEqual } = require("lodash") const { cloneDeep, isEqual } = require("lodash")
const sdk = require("../../../sdk")
exports.fetch = async ctx => { exports.fetch = async ctx => {
ctx.body = await getViews() ctx.body = await getViews()
@ -144,7 +144,7 @@ exports.exportView = async ctx => {
let schema = view && view.meta && view.meta.schema let schema = view && view.meta && view.meta.schema
const tableId = ctx.params.tableId || view.meta.tableId const tableId = ctx.params.tableId || view.meta.tableId
const table = await getTable(tableId) const table = await sdk.tables.getTable(tableId)
if (!schema) { if (!schema) {
schema = table.schema schema = table.schema
} }

View File

@ -1,10 +1,10 @@
const { getTable } = require("../api/controllers/table/utils")
const { const {
findHBSBlocks, findHBSBlocks,
decodeJSBinding, decodeJSBinding,
isJSBinding, isJSBinding,
encodeJSBinding, encodeJSBinding,
} = require("@budibase/string-templates") } = require("@budibase/string-templates")
const sdk = require("../sdk")
/** /**
* When values are input to the system generally they will be of type string as this is required for template strings. * When values are input to the system generally they will be of type string as this is required for template strings.
@ -64,7 +64,7 @@ exports.cleanInputValues = (inputs, schema) => {
* @returns {Promise<Object>} The cleaned up rows object, will should now have all the required primitive types. * @returns {Promise<Object>} The cleaned up rows object, will should now have all the required primitive types.
*/ */
exports.cleanUpRow = async (tableId, row) => { exports.cleanUpRow = async (tableId, row) => {
let table = await getTable(tableId) let table = await sdk.tables.getTable(tableId)
return exports.cleanInputValues(row, { properties: table.schema }) return exports.cleanInputValues(row, { properties: table.schema })
} }

View File

@ -1,18 +1,11 @@
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
import { getTableParams } from "../../../../db/utils" import sdk from "../../../../sdk"
import { Table } from "@budibase/types"
const getTables = async (appDb: any): Promise<Table[]> => { export const backfill = async (
const response = await appDb.allDocs( appDb: PouchDB.Database,
getTableParams(null, { timestamp: string | number
include_docs: true, ) => {
}) const tables = await sdk.tables.getAllInternalTables(appDb)
)
return response.rows.map((row: any) => row.doc)
}
export const backfill = async (appDb: any, timestamp: string | number) => {
const tables = await getTables(appDb)
for (const table of tables) { for (const table of tables) {
await events.table.created(table, timestamp) await events.table.created(table, timestamp)

View File

@ -1,13 +1,13 @@
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { budibaseTempDir } from "../../utilities/budibaseDir" import { budibaseTempDir } from "../../../utilities/budibaseDir"
import { retrieveDirectory } from "../../utilities/fileSystem/utilities" import { retrieveDirectory } from "../../../utilities/fileSystem/utilities"
import { streamFile } from "../../utilities/fileSystem" import { streamFile } from "../../../utilities/fileSystem"
import { ObjectStoreBuckets } from "../../constants" import { ObjectStoreBuckets } from "../../../constants"
import { import {
LINK_USER_METADATA_PREFIX, LINK_USER_METADATA_PREFIX,
TABLE_ROW_PREFIX, TABLE_ROW_PREFIX,
USER_METDATA_PREFIX, USER_METDATA_PREFIX,
} from "../../db/utils" } from "../../../db/utils"
import { import {
DB_EXPORT_FILE, DB_EXPORT_FILE,
GLOBAL_DB_EXPORT_FILE, GLOBAL_DB_EXPORT_FILE,

View File

@ -1,10 +1,12 @@
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { budibaseTempDir } from "../../utilities/budibaseDir" import { budibaseTempDir } from "../../../utilities/budibaseDir"
import { DB_EXPORT_FILE, ATTACHMENT_DIR } from "./constants" import { DB_EXPORT_FILE, ATTACHMENT_DIR } from "./constants"
import { uploadDirectory } from "../../utilities/fileSystem/utilities" import { uploadDirectory } from "../../../utilities/fileSystem/utilities"
import { ObjectStoreBuckets } from "../../constants" import { ObjectStoreBuckets, FieldTypes } from "../../../constants"
import { join } from "path" import { join } from "path"
import fs from "fs" import fs from "fs"
import sdk from "../../"
import { CouchFindOptions, Row } from "@budibase/types"
const uuid = require("uuid/v4") const uuid = require("uuid/v4")
const tar = require("tar") const tar = require("tar")
@ -64,8 +66,29 @@ export async function importApp(
const { ok } = await db.load(dbStream) const { ok } = await db.load(dbStream)
if (!ok) { if (!ok) {
throw "Error loading database dump from template." throw "Error loading database dump from template."
} else { }
// TODO: need to iterate over attachments and update their URLs // iterate through attachment documents and update them
const tables = await sdk.tables.getAllInternalTables(db)
for (let table of tables) {
const attachmentCols: string[] = []
for (let [key, column] of Object.entries(table.schema)) {
if (column.type === FieldTypes.ATTACHMENT) {
attachmentCols.push(key)
}
}
// no attachment columns, nothing to do
if (attachmentCols.length === 0) {
continue
}
// use the CouchDB Mango query API to lookup rows that have attachments
const params: CouchFindOptions = { selector: {} }
attachmentCols.forEach(col => (params.selector[col] = { $exists: true }))
const { rows } = await dbCore.directCouchFind(db.name, params)
for (let row of rows) {
// TODO:
}
// write back the updated attachments
await db.bulkDocs(rows)
} }
return ok return ok
} }

View File

@ -0,0 +1,7 @@
import * as exportApps from "./exports"
import * as importApps from "./imports"
export default {
...exportApps,
...importApps,
}

View File

@ -1,2 +0,0 @@
export * as exports from "./exports"
export * as imports from "./imports"

View File

@ -0,0 +1,60 @@
import { getAppDB } from "@budibase/backend-core/context"
import { BudibaseInternalDB, getTableParams } from "../../../db/utils"
import {
breakExternalTableId,
isExternalTable,
isSQL,
} from "../../../integrations/utils"
import { Table } from "@budibase/types"
async function getAllInternalTables(db?: PouchDB.Database): Promise<Table[]> {
if (!db) {
db = getAppDB() as PouchDB.Database
}
const internalTables = await db.allDocs(
getTableParams(null, {
include_docs: true,
})
)
return internalTables.rows.map((tableDoc: any) => ({
...tableDoc.doc,
type: "internal",
sourceId: BudibaseInternalDB._id,
}))
}
async function getAllExternalTables(datasourceId: any): Promise<Table[]> {
const db = getAppDB()
const datasource = await db.get(datasourceId)
if (!datasource || !datasource.entities) {
throw "Datasource is not configured fully."
}
return datasource.entities
}
async function getExternalTable(
datasourceId: any,
tableName: any
): Promise<Table> {
const entities = await getAllExternalTables(datasourceId)
return entities[tableName]
}
async function getTable(tableId: any): Promise<Table> {
const db = getAppDB()
if (isExternalTable(tableId)) {
let { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource = await db.get(datasourceId)
const table = await getExternalTable(datasourceId, tableName)
return { ...table, sql: isSQL(datasource) }
} else {
return db.get(tableId)
}
}
export default {
getAllInternalTables,
getAllExternalTables,
getExternalTable,
getTable,
}

View File

@ -1,5 +1,7 @@
import * as apps from "./app" import { default as backups } from "./app/backups"
import { default as tables } from "./app/tables"
export default { export default {
apps, backups,
tables,
} }

View File

@ -16,6 +16,7 @@
"@types/koa": "2.13.4", "@types/koa": "2.13.4",
"@types/node": "14.18.20", "@types/node": "14.18.20",
"rimraf": "3.0.2", "rimraf": "3.0.2",
"typescript": "4.7.3" "typescript": "4.7.3",
"@types/pouchdb": "6.4.0"
} }
} }

View File

@ -0,0 +1,22 @@
export type PouchOptions = {
inMemory: boolean
replication: boolean
onDisk: boolean
find: boolean
}
export enum SortOption {
ASCENDING = "asc",
DESCENDING = "desc",
}
export type CouchFindOptions = {
selector: PouchDB.Find.Selector
fields?: string[]
sort?: {
[key: string]: SortOption
}[]
limit?: number
skip?: number
bookmark?: string
}

View File

@ -0,0 +1 @@
export * from "./db"

View File

@ -49,4 +49,5 @@ export interface Table extends Document {
sourceId?: string sourceId?: string
relatedFormula?: string[] relatedFormula?: string[]
constrained?: string[] constrained?: string[]
sql?: boolean
} }

View File

@ -1,3 +1,4 @@
export * from "./documents" export * from "./documents"
export * from "./sdk" export * from "./sdk"
export * from "./api" export * from "./api"
export * from "./core"

View File

@ -39,6 +39,13 @@
"@types/keygrip" "*" "@types/keygrip" "*"
"@types/node" "*" "@types/node" "*"
"@types/debug@*":
version "4.1.7"
resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.7.tgz#7cc0ea761509124709b8b2d1090d8f6c17aadb82"
integrity sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==
dependencies:
"@types/ms" "*"
"@types/express-serve-static-core@^4.17.18": "@types/express-serve-static-core@^4.17.18":
version "4.17.29" version "4.17.29"
resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.29.tgz#2a1795ea8e9e9c91b4a4bbe475034b20c1ec711c" resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.29.tgz#2a1795ea8e9e9c91b4a4bbe475034b20c1ec711c"
@ -113,6 +120,11 @@
resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a"
integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==
"@types/ms@*":
version "0.7.31"
resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197"
integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==
"@types/node@*": "@types/node@*":
version "18.0.6" version "18.0.6"
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.0.6.tgz#0ba49ac517ad69abe7a1508bc9b3a5483df9d5d7" resolved "https://registry.yarnpkg.com/@types/node/-/node-18.0.6.tgz#0ba49ac517ad69abe7a1508bc9b3a5483df9d5d7"
@ -123,6 +135,152 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.20.tgz#268f028b36eaf51181c3300252f605488c4f0650" resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.20.tgz#268f028b36eaf51181c3300252f605488c4f0650"
integrity sha512-Q8KKwm9YqEmUBRsqJ2GWJDtXltBDxTdC4m5vTdXBolu2PeQh8LX+f6BTwU+OuXPu37fLxoN6gidqBmnky36FXA== integrity sha512-Q8KKwm9YqEmUBRsqJ2GWJDtXltBDxTdC4m5vTdXBolu2PeQh8LX+f6BTwU+OuXPu37fLxoN6gidqBmnky36FXA==
"@types/pouchdb-adapter-cordova-sqlite@*":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-cordova-sqlite/-/pouchdb-adapter-cordova-sqlite-1.0.1.tgz#49e5ee6df7cc0c23196fcb340f43a560e74eb1d6"
integrity sha512-nqlXpW1ho3KBg1mUQvZgH2755y3z/rw4UA7ZJCPMRTHofxGMY8izRVw5rHBL4/7P615or0J2udpRYxgkT3D02g==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-fruitdown@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-fruitdown/-/pouchdb-adapter-fruitdown-6.1.3.tgz#9b140ad9645cc56068728acf08ec19ac0046658e"
integrity sha512-Wz1Z1JLOW1hgmFQjqnSkmyyfH7by/iWb4abKn684WMvQfmxx6BxKJpJ4+eulkVPQzzgMMSgU1MpnQOm9FgRkbw==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-http@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-http/-/pouchdb-adapter-http-6.1.3.tgz#6e592d5f48deb6274a21ddac1498dd308096bcf3"
integrity sha512-9Z4TLbF/KJWy/D2sWRPBA+RNU0odQimfdvlDX+EY7rGcd3aVoH8qjD/X0Xcd/0dfBH5pKrNIMFFQgW/TylRCmA==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-idb@*":
version "6.1.4"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-idb/-/pouchdb-adapter-idb-6.1.4.tgz#cb9a18864585d600820cd325f007614c5c3989cd"
integrity sha512-KIAXbkF4uYUz0ZwfNEFLtEkK44mEWopAsD76UhucH92XnJloBysav+TjI4FFfYQyTjoW3S1s6V+Z14CUJZ0F6w==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-leveldb@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-leveldb/-/pouchdb-adapter-leveldb-6.1.3.tgz#17c7e75d75b992050bca15991e97fba575c61bb3"
integrity sha512-ex8NFqQGFwEpFi7AaZ5YofmuemfZNsL3nTFZBUCAKYMBkazQij1pe2ILLStSvJr0XS0qxgXjCEW19T5Wqiiskg==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-localstorage@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-localstorage/-/pouchdb-adapter-localstorage-6.1.3.tgz#0dde02ba6b9d6073a295a20196563942ba9a54bd"
integrity sha512-oor040tye1KKiGLWYtIy7rRT7C2yoyX3Tf6elEJRpjOA7Ja/H8lKc4LaSh9ATbptIcES6MRqZDxtp7ly9hsW3Q==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-memory@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-memory/-/pouchdb-adapter-memory-6.1.3.tgz#9eabdbc890fcf58960ee8b68b8685f837e75c844"
integrity sha512-gVbsIMzDzgZYThFVT4eVNsmuZwVm/4jDxP1sjlgc3qtDIxbtBhGgyNfcskwwz9Zu5Lv1avkDsIWvcxQhnvRlHg==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-node-websql@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-node-websql/-/pouchdb-adapter-node-websql-6.1.3.tgz#aa18bc68af8cf509acd12c400010dcd5fab2243d"
integrity sha512-F/P+os6Jsa7CgHtH64+Z0HfwIcj0hIRB5z8gNhF7L7dxPWoAfkopK5H2gydrP3sQrlGyN4WInF+UJW/Zu1+FKg==
dependencies:
"@types/pouchdb-adapter-websql" "*"
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-websql@*":
version "6.1.4"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-websql/-/pouchdb-adapter-websql-6.1.4.tgz#359fbe42ccac0ac90b492ddb8c32fafd0aa96d79"
integrity sha512-zMJQCtXC40hBsIDRn0GhmpeGMK0f9l/OGWfLguvczROzxxcOD7REI+e6SEmX7gJKw5JuMvlfuHzkQwjmvSJbtg==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-browser@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-browser/-/pouchdb-browser-6.1.3.tgz#8f33d6ef58d6817d1f6d36979148a1c7f63244d8"
integrity sha512-EdYowrWxW9SWBMX/rux2eq7dbHi5Zeyzz+FF/IAsgQKnUxgeCO5VO2j4zTzos0SDyJvAQU+EYRc11r7xGn5tvA==
dependencies:
"@types/pouchdb-adapter-http" "*"
"@types/pouchdb-adapter-idb" "*"
"@types/pouchdb-adapter-websql" "*"
"@types/pouchdb-core" "*"
"@types/pouchdb-mapreduce" "*"
"@types/pouchdb-replication" "*"
"@types/pouchdb-core@*":
version "7.0.10"
resolved "https://registry.yarnpkg.com/@types/pouchdb-core/-/pouchdb-core-7.0.10.tgz#d1ea1549e7fad6cb579f71459b1bc27252e06a5a"
integrity sha512-mKhjLlWWXyV3PTTjDhzDV1kc2dolO7VYFa75IoKM/hr8Er9eo8RIbS7mJLfC8r/C3p6ihZu9yZs1PWC1LQ0SOA==
dependencies:
"@types/debug" "*"
"@types/pouchdb-find" "*"
"@types/pouchdb-find@*":
version "7.3.0"
resolved "https://registry.yarnpkg.com/@types/pouchdb-find/-/pouchdb-find-7.3.0.tgz#b917030e9f4bf6e56bf8c3b9fe4b2a25e989009a"
integrity sha512-sFPli5tBjGX9UfXioik1jUzPdcN84eV82n0lmEFuoPepWqkLjQcyri0eOa++HYOaNPyMDhKFBqEALEZivK2dRg==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-http@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-http/-/pouchdb-http-6.1.3.tgz#09576c0d409da1f8dee34ec5b768415e2472ea52"
integrity sha512-0e9E5SqNOyPl/3FnEIbENssB4FlJsNYuOy131nxrZk36S+y1R/6qO7ZVRypWpGTqBWSuVd7gCsq2UDwO/285+w==
dependencies:
"@types/pouchdb-adapter-http" "*"
"@types/pouchdb-core" "*"
"@types/pouchdb-mapreduce@*":
version "6.1.7"
resolved "https://registry.yarnpkg.com/@types/pouchdb-mapreduce/-/pouchdb-mapreduce-6.1.7.tgz#9ab32d1e0f234f1bf6d1e4c5d7e216e9e23ac0a3"
integrity sha512-WzBwm7tmO9QhfRzVaWT4v6JQSS/fG2OoUDrWrhX87rPe2Pn6laPvdK5li6myNRxCoI/l5e8Jd+oYBAFnaiFucA==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-node@*":
version "6.1.4"
resolved "https://registry.yarnpkg.com/@types/pouchdb-node/-/pouchdb-node-6.1.4.tgz#5214c0169fcfd2237d373380bbd65a934feb5dfb"
integrity sha512-wnTCH8X1JOPpNOfVhz8HW0AvmdHh6pt40MuRj0jQnK7QEHsHS79WujsKTKSOF8QXtPwpvCNSsI7ut7H7tfxxJQ==
dependencies:
"@types/pouchdb-adapter-http" "*"
"@types/pouchdb-adapter-leveldb" "*"
"@types/pouchdb-core" "*"
"@types/pouchdb-mapreduce" "*"
"@types/pouchdb-replication" "*"
"@types/pouchdb-replication@*":
version "6.4.4"
resolved "https://registry.yarnpkg.com/@types/pouchdb-replication/-/pouchdb-replication-6.4.4.tgz#743406c90f13a988fa3e346ea74ce40acd170d00"
integrity sha512-BsE5LKpjJK4iAf6Fx5kyrMw+33V+Ip7uWldUnU2BYrrvtR+MLD22dcImm7DZN1st2wPPb91i0XEnQzvP0w1C/Q==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-find" "*"
"@types/pouchdb@6.4.0":
version "6.4.0"
resolved "https://registry.yarnpkg.com/@types/pouchdb/-/pouchdb-6.4.0.tgz#f9c41ca64b23029f9bf2eb4bf6956e6431cb79f8"
integrity sha512-eGCpX+NXhd5VLJuJMzwe3L79fa9+IDTrAG3CPaf4s/31PD56hOrhDJTSmRELSXuiqXr6+OHzzP0PldSaWsFt7w==
dependencies:
"@types/pouchdb-adapter-cordova-sqlite" "*"
"@types/pouchdb-adapter-fruitdown" "*"
"@types/pouchdb-adapter-http" "*"
"@types/pouchdb-adapter-idb" "*"
"@types/pouchdb-adapter-leveldb" "*"
"@types/pouchdb-adapter-localstorage" "*"
"@types/pouchdb-adapter-memory" "*"
"@types/pouchdb-adapter-node-websql" "*"
"@types/pouchdb-adapter-websql" "*"
"@types/pouchdb-browser" "*"
"@types/pouchdb-core" "*"
"@types/pouchdb-http" "*"
"@types/pouchdb-mapreduce" "*"
"@types/pouchdb-node" "*"
"@types/pouchdb-replication" "*"
"@types/qs@*": "@types/qs@*":
version "6.9.7" version "6.9.7"
resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb"