Merge branch 'develop' of github.com:Budibase/budibase into feature/auth-core

This commit is contained in:
mike12345567 2022-01-11 15:16:07 +00:00
commit b33523a73d
24 changed files with 322 additions and 93 deletions

View File

@ -1,5 +1,5 @@
{ {
"version": "1.0.27-alpha.7", "version": "1.0.27-alpha.11",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/backend-core", "name": "@budibase/backend-core",
"version": "1.0.27-alpha.7", "version": "1.0.27-alpha.11",
"description": "Budibase backend core libraries used in server and worker", "description": "Budibase backend core libraries used in server and worker",
"main": "src/index.js", "main": "src/index.js",
"author": "Budibase", "author": "Budibase",

View File

@ -224,8 +224,15 @@ exports.getAllDbs = async () => {
} }
} }
let couchUrl = `${exports.getCouchUrl()}/_all_dbs` let couchUrl = `${exports.getCouchUrl()}/_all_dbs`
if (env.MULTI_TENANCY) {
let tenantId = getTenantId() let tenantId = getTenantId()
if (!env.MULTI_TENANCY || tenantId == DEFAULT_TENANT_ID) {
// just get all DBs when:
// - single tenancy
// - default tenant
// - apps dbs don't contain tenant id
// - non-default tenant dbs are filtered out application side in getAllApps
await addDbs(couchUrl)
} else {
// get prod apps // get prod apps
await addDbs( await addDbs(
exports.getStartEndKeyURL(couchUrl, DocumentTypes.APP, tenantId) exports.getStartEndKeyURL(couchUrl, DocumentTypes.APP, tenantId)
@ -236,9 +243,6 @@ exports.getAllDbs = async () => {
) )
// add global db name // add global db name
dbs.push(getGlobalDBName(tenantId)) dbs.push(getGlobalDBName(tenantId))
} else {
// just get all DBs in self host
await addDbs(couchUrl)
} }
return dbs return dbs
} }

View File

@ -206,6 +206,34 @@ exports.retrieveToTmp = async (bucketName, filepath) => {
return outputPath return outputPath
} }
/**
* Delete a single file.
*/
exports.deleteFile = async (bucketName, filepath) => {
const objectStore = exports.ObjectStore(bucketName)
await exports.makeSureBucketExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
Key: filepath,
}
return objectStore.deleteObject(params)
}
exports.deleteFiles = async (bucketName, filepaths) => {
const objectStore = exports.ObjectStore(bucketName)
await exports.makeSureBucketExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
Delete: {
Objects: filepaths.map(path => ({ Key: path })),
},
}
return objectStore.deleteObjects(params).promise()
}
/**
* Delete a path, including everything within.
*/
exports.deleteFolder = async (bucketName, folder) => { exports.deleteFolder = async (bucketName, folder) => {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder) folder = sanitizeKey(folder)

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/bbui", "name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.", "description": "A UI solution used in the different Budibase projects.",
"version": "1.0.27-alpha.7", "version": "1.0.27-alpha.11",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"module": "dist/bbui.es.js", "module": "dist/bbui.es.js",

View File

@ -2076,9 +2076,9 @@ postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.27:
supports-color "^6.1.0" supports-color "^6.1.0"
postcss@^8.2.9: postcss@^8.2.9:
version "8.2.10" version "8.2.13"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.2.10.tgz#ca7a042aa8aff494b334d0ff3e9e77079f6f702b" resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.2.13.tgz#dbe043e26e3c068e45113b1ed6375d2d37e2129f"
integrity sha512-b/h7CPV7QEdrqIxtAf2j31U5ef05uBDuvoXv6L51Q4rcS1jdlXAVKJv+atCFdUXYl9dyTHGyoMzIepwowRJjFw== integrity sha512-FCE5xLH+hjbzRdpbRb1IMCvPv9yZx2QnDarBEYSN0N0HYk+TcXsEhwdFcFb+SRWOKzKGErhIEbBK2ogyLdTtfQ==
dependencies: dependencies:
colorette "^1.2.2" colorette "^1.2.2"
nanoid "^3.1.22" nanoid "^3.1.22"

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/builder", "name": "@budibase/builder",
"version": "1.0.27-alpha.7", "version": "1.0.27-alpha.11",
"license": "GPL-3.0", "license": "GPL-3.0",
"private": true, "private": true,
"scripts": { "scripts": {
@ -65,10 +65,10 @@
} }
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.27-alpha.7", "@budibase/bbui": "^1.0.27-alpha.11",
"@budibase/client": "^1.0.27-alpha.7", "@budibase/client": "^1.0.27-alpha.11",
"@budibase/colorpicker": "1.1.2", "@budibase/colorpicker": "1.1.2",
"@budibase/string-templates": "^1.0.27-alpha.7", "@budibase/string-templates": "^1.0.27-alpha.11",
"@sentry/browser": "5.19.1", "@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1", "@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1", "@spectrum-css/vars": "^3.0.1",

View File

@ -23,10 +23,10 @@ function prepareData(config) {
return datasource return datasource
} }
export async function saveDatasource(config) { export async function saveDatasource(config, skipFetch = false) {
const datasource = prepareData(config) const datasource = prepareData(config)
// Create datasource // Create datasource
const resp = await datasources.save(datasource, datasource.plus) const resp = await datasources.save(datasource, !skipFetch && datasource.plus)
// update the tables incase data source plus // update the tables incase data source plus
await tables.fetch() await tables.fetch()

View File

@ -199,7 +199,6 @@
<Body> <Body>
Tell budibase how your tables are related to get even more smart features. Tell budibase how your tables are related to get even more smart features.
</Body> </Body>
{/if}
{#if relationshipInfo && relationshipInfo.length > 0} {#if relationshipInfo && relationshipInfo.length > 0}
<Table <Table
on:click={({ detail }) => openRelationshipModal(detail.from, detail.to)} on:click={({ detail }) => openRelationshipModal(detail.from, detail.to)}
@ -212,6 +211,7 @@
{:else} {:else}
<Body size="S"><i>No relationships configured.</i></Body> <Body size="S"><i>No relationships configured.</i></Body>
{/if} {/if}
{/if}
<style> <style>
.query-header { .query-header {

View File

@ -5,22 +5,28 @@
import { IntegrationNames } from "constants/backend" import { IntegrationNames } from "constants/backend"
import cloneDeep from "lodash/cloneDeepWith" import cloneDeep from "lodash/cloneDeepWith"
import { saveDatasource as save } from "builderStore/datasource" import { saveDatasource as save } from "builderStore/datasource"
import { onMount } from "svelte"
export let integration export let integration
export let modal export let modal
// kill the reference so the input isn't saved // kill the reference so the input isn't saved
let datasource = cloneDeep(integration) let datasource = cloneDeep(integration)
let skipFetch = false
async function saveDatasource() { async function saveDatasource() {
try { try {
const resp = await save(datasource) const resp = await save(datasource, skipFetch)
$goto(`./datasource/${resp._id}`) $goto(`./datasource/${resp._id}`)
notifications.success(`Datasource updated successfully.`) notifications.success(`Datasource updated successfully.`)
} catch (err) { } catch (err) {
notifications.error(`Error saving datasource: ${err}`) notifications.error(`Error saving datasource: ${err}`)
} }
} }
onMount(() => {
skipFetch = false
})
</script> </script>
<ModalContent <ModalContent
@ -28,9 +34,16 @@
onConfirm={() => saveDatasource()} onConfirm={() => saveDatasource()}
onCancel={() => modal.show()} onCancel={() => modal.show()}
confirmText={datasource.plus confirmText={datasource.plus
? "Fetch tables from database" ? "Save and fetch tables"
: "Save and continue to query"} : "Save and continue to query"}
cancelText="Back" cancelText="Back"
showSecondaryButton={datasource.plus}
secondaryButtonText={datasource.plus ? "Skip table fetch" : undefined}
secondaryAction={() => {
skipFetch = true
saveDatasource()
return true
}}
size="L" size="L"
> >
<Layout noPadding> <Layout noPadding>

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/cli", "name": "@budibase/cli",
"version": "1.0.27-alpha.7", "version": "1.0.27-alpha.11",
"description": "Budibase CLI, for developers, self hosting and migrations.", "description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js", "main": "src/index.js",
"bin": { "bin": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/client", "name": "@budibase/client",
"version": "1.0.27-alpha.7", "version": "1.0.27-alpha.11",
"license": "MPL-2.0", "license": "MPL-2.0",
"module": "dist/budibase-client.js", "module": "dist/budibase-client.js",
"main": "dist/budibase-client.js", "main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw" "dev:builder": "rollup -cw"
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.27-alpha.7", "@budibase/bbui": "^1.0.27-alpha.11",
"@budibase/standard-components": "^0.9.139", "@budibase/standard-components": "^0.9.139",
"@budibase/string-templates": "^1.0.27-alpha.7", "@budibase/string-templates": "^1.0.27-alpha.11",
"regexparam": "^1.3.0", "regexparam": "^1.3.0",
"shortid": "^2.2.15", "shortid": "^2.2.15",
"svelte-spa-router": "^3.0.5" "svelte-spa-router": "^3.0.5"

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/server", "name": "@budibase/server",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "1.0.27-alpha.7", "version": "1.0.27-alpha.11",
"description": "Budibase Web Server", "description": "Budibase Web Server",
"main": "src/index.ts", "main": "src/index.ts",
"repository": { "repository": {
@ -70,9 +70,9 @@
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@apidevtools/swagger-parser": "^10.0.3", "@apidevtools/swagger-parser": "^10.0.3",
"@budibase/backend-core": "^1.0.27-alpha.7", "@budibase/backend-core": "^1.0.27-alpha.11",
"@budibase/client": "^1.0.27-alpha.7", "@budibase/client": "^1.0.27-alpha.11",
"@budibase/string-templates": "^1.0.27-alpha.7", "@budibase/string-templates": "^1.0.27-alpha.11",
"@bull-board/api": "^3.7.0", "@bull-board/api": "^3.7.0",
"@bull-board/koa": "^3.7.0", "@bull-board/koa": "^3.7.0",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",

View File

@ -10,6 +10,7 @@ const {
const { BuildSchemaErrors, InvalidColumns } = require("../../constants") const { BuildSchemaErrors, InvalidColumns } = require("../../constants")
const { integrations } = require("../../integrations") const { integrations } = require("../../integrations")
const { getDatasourceAndQuery } = require("./row/utils") const { getDatasourceAndQuery } = require("./row/utils")
const { invalidateDynamicVariables } = require("../../threads/utils")
exports.fetch = async function (ctx) { exports.fetch = async function (ctx) {
const database = new CouchDB(ctx.appId) const database = new CouchDB(ctx.appId)
@ -57,10 +58,43 @@ exports.buildSchemaFromDb = async function (ctx) {
ctx.body = response ctx.body = response
} }
/**
* Check for variables that have been updated or removed and invalidate them.
*/
const invalidateVariables = async (existingDatasource, updatedDatasource) => {
const existingVariables = existingDatasource.config.dynamicVariables
const updatedVariables = updatedDatasource.config.dynamicVariables
const toInvalidate = []
if (!existingVariables) {
return
}
if (!updatedVariables) {
// invalidate all
toInvalidate.push(...existingVariables)
} else {
// invaldate changed / removed
existingVariables.forEach(existing => {
const unchanged = updatedVariables.find(
updated =>
existing.name === updated.name &&
existing.queryId === updated.queryId &&
existing.value === updated.value
)
if (!unchanged) {
toInvalidate.push(existing)
}
})
}
await invalidateDynamicVariables(toInvalidate)
}
exports.update = async function (ctx) { exports.update = async function (ctx) {
const db = new CouchDB(ctx.appId) const db = new CouchDB(ctx.appId)
const datasourceId = ctx.params.datasourceId const datasourceId = ctx.params.datasourceId
let datasource = await db.get(datasourceId) let datasource = await db.get(datasourceId)
await invalidateVariables(datasource, ctx.request.body)
datasource = { ...datasource, ...ctx.request.body } datasource = { ...datasource, ...ctx.request.body }
const response = await db.put(datasource) const response = await db.put(datasource)

View File

@ -11,6 +11,7 @@ const {
inputProcessing, inputProcessing,
outputProcessing, outputProcessing,
processAutoColumn, processAutoColumn,
cleanupAttachments,
} = require("../../../utilities/rowProcessor") } = require("../../../utilities/rowProcessor")
const { FieldTypes } = require("../../../constants") const { FieldTypes } = require("../../../constants")
const { isEqual } = require("lodash") const { isEqual } = require("lodash")
@ -25,6 +26,7 @@ const {
getFromDesignDoc, getFromDesignDoc,
getFromMemoryDoc, getFromMemoryDoc,
} = require("../view/utils") } = require("../view/utils")
const { cloneDeep } = require("lodash/fp")
const CALCULATION_TYPES = { const CALCULATION_TYPES = {
SUM: "sum", SUM: "sum",
@ -109,14 +111,14 @@ exports.patch = async ctx => {
const inputs = ctx.request.body const inputs = ctx.request.body
const tableId = inputs.tableId const tableId = inputs.tableId
const isUserTable = tableId === InternalTables.USER_METADATA const isUserTable = tableId === InternalTables.USER_METADATA
let dbRow let oldRow
try { try {
dbRow = await db.get(inputs._id) oldRow = await db.get(inputs._id)
} catch (err) { } catch (err) {
if (isUserTable) { if (isUserTable) {
// don't include the rev, it'll be the global rev // don't include the rev, it'll be the global rev
// this time // this time
dbRow = { oldRow = {
_id: inputs._id, _id: inputs._id,
} }
} else { } else {
@ -125,13 +127,14 @@ exports.patch = async ctx => {
} }
let dbTable = await db.get(tableId) let dbTable = await db.get(tableId)
// need to build up full patch fields before coerce // need to build up full patch fields before coerce
let combinedRow = cloneDeep(oldRow)
for (let key of Object.keys(inputs)) { for (let key of Object.keys(inputs)) {
if (!dbTable.schema[key]) continue if (!dbTable.schema[key]) continue
dbRow[key] = inputs[key] combinedRow[key] = inputs[key]
} }
// this returns the table and row incase they have been updated // this returns the table and row incase they have been updated
let { table, row } = inputProcessing(ctx.user, dbTable, dbRow) let { table, row } = inputProcessing(ctx.user, dbTable, combinedRow)
const validateResult = await validate({ const validateResult = await validate({
row, row,
table, table,
@ -149,6 +152,8 @@ exports.patch = async ctx => {
tableId: row.tableId, tableId: row.tableId,
table, table,
}) })
// check if any attachments removed
await cleanupAttachments(appId, table, { oldRow, row })
if (isUserTable) { if (isUserTable) {
// the row has been updated, need to put it into the ctx // the row has been updated, need to put it into the ctx
@ -295,6 +300,8 @@ exports.destroy = async function (ctx) {
row, row,
tableId: row.tableId, tableId: row.tableId,
}) })
// remove any attachments that were on the row from object storage
await cleanupAttachments(appId, table, { row })
let response let response
if (ctx.params.tableId === InternalTables.USER_METADATA) { if (ctx.params.tableId === InternalTables.USER_METADATA) {
@ -341,6 +348,8 @@ exports.bulkDestroy = async ctx => {
} else { } else {
await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true }))) await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true })))
} }
// remove any attachments that were on the rows from object storage
await cleanupAttachments(appId, table, { rows })
await Promise.all(updates) await Promise.all(updates)
return { response: { ok: true }, rows } return { response: { ok: true }, rows }
} }

View File

@ -4,6 +4,7 @@ let setup = require("./utilities")
let { basicDatasource } = setup.structures let { basicDatasource } = setup.structures
let { checkBuilderEndpoint } = require("./utilities/TestFunctions") let { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const pg = require("pg") const pg = require("pg")
const { checkCacheForDynamicVariable } = require("../../../threads/utils")
describe("/datasources", () => { describe("/datasources", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -31,6 +32,50 @@ describe("/datasources", () => {
}) })
}) })
describe("update", () => {
it("should update an existing datasource", async () => {
datasource.name = "Updated Test"
const res = await request
.put(`/api/datasources/${datasource._id}`)
.send(datasource)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
expect(res.body.datasource.name).toEqual("Updated Test")
expect(res.body.errors).toBeUndefined()
})
describe("dynamic variables", () => {
async function preview(datasource, fields) {
return config.previewQuery(request, config, datasource, fields)
}
it("should invalidate changed or removed variables", async () => {
const { datasource, query } = await config.dynamicVariableDatasource()
// preview once to cache variables
await preview(datasource, { path: "www.test.com", queryString: "test={{ variable3 }}" })
// check variables in cache
let contents = await checkCacheForDynamicVariable(query._id, "variable3")
expect(contents.rows.length).toEqual(1)
// update the datasource to remove the variables
datasource.config.dynamicVariables = []
const res = await request
.put(`/api/datasources/${datasource._id}`)
.send(datasource)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
expect(res.body.errors).toBeUndefined()
// check variables no longer in cache
contents = await checkCacheForDynamicVariable(query._id, "variable3")
expect(contents).toBe(null)
})
})
})
describe("fetch", () => { describe("fetch", () => {
it("returns all the datasources from the server", async () => { it("returns all the datasources from the server", async () => {
const res = await request const res = await request

View File

@ -229,52 +229,14 @@ describe("/queries", () => {
}) })
}) })
describe("test variables", () => { describe("variables", () => {
async function restDatasource(cfg) {
return await config.createDatasource({
datasource: {
...basicDatasource().datasource,
source: "REST",
config: cfg || {},
},
})
}
async function dynamicVariableDatasource() {
const datasource = await restDatasource()
const basedOnQuery = await config.createQuery({
...basicQuery(datasource._id),
fields: {
path: "www.google.com",
},
})
await config.updateDatasource({
...datasource,
config: {
dynamicVariables: [
{ queryId: basedOnQuery._id, name: "variable3", value: "{{ data.0.[value] }}" }
]
}
})
return { datasource, query: basedOnQuery }
}
async function preview(datasource, fields) { async function preview(datasource, fields) {
return await request return config.previewQuery(request, config, datasource, fields)
.post(`/api/queries/preview`)
.send({
datasourceId: datasource._id,
parameters: {},
fields,
queryVerb: "read",
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
} }
it("should work with static variables", async () => { it("should work with static variables", async () => {
const datasource = await restDatasource({ const datasource = await config.restDatasource({
staticVariables: { staticVariables: {
variable: "google", variable: "google",
variable2: "1", variable2: "1",
@ -290,7 +252,7 @@ describe("/queries", () => {
}) })
it("should work with dynamic variables", async () => { it("should work with dynamic variables", async () => {
const { datasource } = await dynamicVariableDatasource() const { datasource } = await config.dynamicVariableDatasource()
const res = await preview(datasource, { const res = await preview(datasource, {
path: "www.google.com", path: "www.google.com",
queryString: "test={{ variable3 }}", queryString: "test={{ variable3 }}",
@ -300,7 +262,7 @@ describe("/queries", () => {
}) })
it("check that it automatically retries on fail with cached dynamics", async () => { it("check that it automatically retries on fail with cached dynamics", async () => {
const { datasource, query: base } = await dynamicVariableDatasource() const { datasource, query: base } = await config.dynamicVariableDatasource()
// preview once to cache // preview once to cache
await preview(datasource, { path: "www.google.com", queryString: "test={{ variable3 }}" }) await preview(datasource, { path: "www.google.com", queryString: "test={{ variable3 }}" })
// check its in cache // check its in cache
@ -313,5 +275,24 @@ describe("/queries", () => {
expect(res.body.schemaFields).toEqual(["fails", "url", "opts"]) expect(res.body.schemaFields).toEqual(["fails", "url", "opts"])
expect(res.body.rows[0].fails).toEqual(1) expect(res.body.rows[0].fails).toEqual(1)
}) })
it("deletes variables when linked query is deleted", async () => {
const { datasource, query: base } = await config.dynamicVariableDatasource()
// preview once to cache
await preview(datasource, { path: "www.google.com", queryString: "test={{ variable3 }}" })
// check its in cache
let contents = await checkCacheForDynamicVariable(base._id, "variable3")
expect(contents.rows.length).toEqual(1)
// delete the query
await request
.delete(`/api/queries/${base._id}/${base._rev}`)
.set(config.defaultHeaders())
.expect(200)
// check variables no longer in cache
contents = await checkCacheForDynamicVariable(base._id, "variable3")
expect(contents).toBe(null)
})
}) })
}) })

View File

@ -36,7 +36,7 @@ function generateSchema(
case FieldTypes.STRING: case FieldTypes.STRING:
case FieldTypes.OPTIONS: case FieldTypes.OPTIONS:
case FieldTypes.LONGFORM: case FieldTypes.LONGFORM:
schema.string(key) schema.text(key)
break break
case FieldTypes.NUMBER: case FieldTypes.NUMBER:
// if meta is specified then this is a junction table entry // if meta is specified then this is a junction table entry

View File

@ -326,6 +326,53 @@ class TestConfiguration {
return this.datasource return this.datasource
} }
async restDatasource(cfg) {
return this.createDatasource({
datasource: {
...basicDatasource().datasource,
source: "REST",
config: cfg || {},
},
})
}
async dynamicVariableDatasource() {
let datasource = await this.restDatasource()
const basedOnQuery = await this.createQuery({
...basicQuery(datasource._id),
fields: {
path: "www.google.com",
},
})
datasource = await this.updateDatasource({
...datasource,
config: {
dynamicVariables: [
{
queryId: basedOnQuery._id,
name: "variable3",
value: "{{ data.0.[value] }}",
},
],
},
})
return { datasource, query: basedOnQuery }
}
async previewQuery(request, config, datasource, fields) {
return request
.post(`/api/queries/preview`)
.send({
datasourceId: datasource._id,
parameters: {},
fields,
queryVerb: "read",
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
}
async createQuery(config = null) { async createQuery(config = null) {
if (!this.datasource && !config) { if (!this.datasource && !config) {
throw "No data source created for query." throw "No data source created for query."

View File

@ -46,7 +46,10 @@ class QueryRunner {
// transform as required // transform as required
if (transformer) { if (transformer) {
const runner = new ScriptRunner(transformer, { data: rows }) const runner = new ScriptRunner(transformer, {
data: rows,
params: parameters,
})
rows = runner.execute() rows = runner.execute()
} }

View File

@ -2,6 +2,7 @@ const {
ObjectStore, ObjectStore,
makeSureBucketExists, makeSureBucketExists,
upload, upload,
deleteFiles,
streamUpload, streamUpload,
retrieve, retrieve,
retrieveToTmp, retrieveToTmp,
@ -28,3 +29,4 @@ exports.retrieveToTmp = retrieveToTmp
exports.deleteFolder = deleteFolder exports.deleteFolder = deleteFolder
exports.uploadDirectory = uploadDirectory exports.uploadDirectory = uploadDirectory
exports.downloadTarball = downloadTarball exports.downloadTarball = downloadTarball
exports.deleteFiles = deleteFiles

View File

@ -3,6 +3,10 @@ const { cloneDeep } = require("lodash/fp")
const { FieldTypes, AutoFieldSubTypes } = require("../../constants") const { FieldTypes, AutoFieldSubTypes } = require("../../constants")
const { attachmentsRelativeURL } = require("../index") const { attachmentsRelativeURL } = require("../index")
const { processFormulas } = require("./utils") const { processFormulas } = require("./utils")
const { deleteFiles } = require("../../utilities/fileSystem/utilities")
const { ObjectStoreBuckets } = require("../../constants")
const { isProdAppID, getDeployedAppID, dbExists } = require("@budibase/auth/db")
const CouchDB = require("../../db")
const BASE_AUTO_ID = 1 const BASE_AUTO_ID = 1
@ -95,6 +99,23 @@ const TYPE_TRANSFORM_MAP = {
}, },
} }
/**
* Given the old state of the row and the new one after an update, this will
* find the keys that have been removed in the updated row.
*/
function getRemovedAttachmentKeys(oldRow, row, attachmentKey) {
if (!oldRow[attachmentKey]) {
return []
}
const oldKeys = oldRow[attachmentKey].map(attachment => attachment.key)
// no attachments in new row, all removed
if (!row[attachmentKey]) {
return oldKeys
}
const newKeys = row[attachmentKey].map(attachment => attachment.key)
return oldKeys.filter(key => newKeys.indexOf(key) === -1)
}
/** /**
* This will update any auto columns that are found on the row/table with the correct information based on * This will update any auto columns that are found on the row/table with the correct information based on
* time now and the current logged in user making the request. * time now and the current logged in user making the request.
@ -272,3 +293,45 @@ exports.outputProcessing = async (
} }
return wasArray ? enriched : enriched[0] return wasArray ? enriched : enriched[0]
} }
/**
* Clean up any attachments that were attached to a row.
* @param {string} appId The ID of the app from which a row is being deleted.
* @param {object} table The table from which a row is being removed.
* @param {any} row optional - the row being removed.
* @param {any} rows optional - if multiple rows being deleted can do this in bulk.
* @param {any} oldRow optional - if updating a row this will determine the difference.
* @return {Promise<void>} When all attachments have been removed this will return.
*/
exports.cleanupAttachments = async (appId, table, { row, rows, oldRow }) => {
if (!isProdAppID(appId)) {
const prodAppId = getDeployedAppID(appId)
// if prod exists, then don't allow deleting
const exists = await dbExists(CouchDB, prodAppId)
if (exists) {
return
}
}
let files = []
function addFiles(row, key) {
if (row[key]) {
files = files.concat(row[key].map(attachment => attachment.key))
}
}
for (let [key, schema] of Object.entries(table.schema)) {
if (schema.type !== FieldTypes.ATTACHMENT) {
continue
}
// if updating, need to manage the differences
if (oldRow && row) {
files = files.concat(getRemovedAttachmentKeys(oldRow, row, key))
} else if (row) {
addFiles(row, key)
} else if (rows) {
rows.forEach(row => addFiles(row, key))
}
}
if (files.length > 0) {
return deleteFiles(ObjectStoreBuckets.APPS, files)
}
}

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/string-templates", "name": "@budibase/string-templates",
"version": "1.0.27-alpha.7", "version": "1.0.27-alpha.11",
"description": "Handlebars wrapper for Budibase templating.", "description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs", "main": "src/index.cjs",
"module": "dist/bundle.mjs", "module": "dist/bundle.mjs",

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/worker", "name": "@budibase/worker",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "1.0.27-alpha.7", "version": "1.0.27-alpha.11",
"description": "Budibase background service", "description": "Budibase background service",
"main": "src/index.js", "main": "src/index.js",
"repository": { "repository": {
@ -29,8 +29,8 @@
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@budibase/backend-core": "^1.0.27-alpha.7", "@budibase/backend-core": "^1.0.27-alpha.11",
"@budibase/string-templates": "^1.0.27-alpha.7", "@budibase/string-templates": "^1.0.27-alpha.11",
"@koa/router": "^8.0.0", "@koa/router": "^8.0.0",
"@sentry/node": "^6.0.0", "@sentry/node": "^6.0.0",
"@techpass/passport-openidconnect": "^0.3.0", "@techpass/passport-openidconnect": "^0.3.0",