Typescript conversion of the table controllers.

This commit is contained in:
mike12345567 2022-11-22 13:56:01 +00:00
parent 4cdcafac36
commit 8a4da7d4ce
13 changed files with 304 additions and 254 deletions

View File

@ -1,10 +1,5 @@
import { BuiltinPermissionID, PermissionLevel } from "./permissions" import { BuiltinPermissionID, PermissionLevel } from "./permissions"
import { import { generateRoleID, getRoleParams, DocumentType, SEPARATOR } from "../db"
generateRoleID,
getRoleParams,
DocumentType,
SEPARATOR,
} from "../db/utils"
import { getAppDB } from "../context" import { getAppDB } from "../context"
import { doWithDB } from "../db" import { doWithDB } from "../db"
import { Screen, Role as RoleDoc } from "@budibase/types" import { Screen, Role as RoleDoc } from "@budibase/types"
@ -30,20 +25,17 @@ const EXTERNAL_BUILTIN_ROLE_IDS = [
BUILTIN_IDS.PUBLIC, BUILTIN_IDS.PUBLIC,
] ]
export class Role { export class Role implements RoleDoc {
_id: string _id: string
name: string name: string
permissionId?: string permissionId: string
inherits?: string inherits?: string
permissions = {}
constructor(id: string, name: string) { constructor(id: string, name: string, permissionId: string) {
this._id = id this._id = id
this.name = name this.name = name
}
addPermission(permissionId: string) {
this.permissionId = permissionId this.permissionId = permissionId
return this
} }
addInheritance(inherits: string) { addInheritance(inherits: string) {
@ -53,24 +45,26 @@ export class Role {
} }
const BUILTIN_ROLES = { const BUILTIN_ROLES = {
ADMIN: new Role(BUILTIN_IDS.ADMIN, "Admin") ADMIN: new Role(
.addPermission(BuiltinPermissionID.ADMIN) BUILTIN_IDS.ADMIN,
.addInheritance(BUILTIN_IDS.POWER), "Admin",
POWER: new Role(BUILTIN_IDS.POWER, "Power")
.addPermission(BuiltinPermissionID.POWER)
.addInheritance(BUILTIN_IDS.BASIC),
BASIC: new Role(BUILTIN_IDS.BASIC, "Basic")
.addPermission(BuiltinPermissionID.WRITE)
.addInheritance(BUILTIN_IDS.PUBLIC),
PUBLIC: new Role(BUILTIN_IDS.PUBLIC, "Public").addPermission(
BuiltinPermissionID.PUBLIC
),
BUILDER: new Role(BUILTIN_IDS.BUILDER, "Builder").addPermission(
BuiltinPermissionID.ADMIN BuiltinPermissionID.ADMIN
), ).addInheritance(BUILTIN_IDS.POWER),
POWER: new Role(
BUILTIN_IDS.POWER,
"Power",
BuiltinPermissionID.POWER
).addInheritance(BUILTIN_IDS.BASIC),
BASIC: new Role(
BUILTIN_IDS.BASIC,
"Basic",
BuiltinPermissionID.WRITE
).addInheritance(BUILTIN_IDS.PUBLIC),
PUBLIC: new Role(BUILTIN_IDS.PUBLIC, "Public", BuiltinPermissionID.PUBLIC),
BUILDER: new Role(BUILTIN_IDS.BUILDER, "Builder", BuiltinPermissionID.ADMIN),
} }
export function getBuiltinRoles() { export function getBuiltinRoles(): { [key: string]: RoleDoc } {
return cloneDeep(BUILTIN_ROLES) return cloneDeep(BUILTIN_ROLES)
} }
@ -104,7 +98,7 @@ export function builtinRoleToNumber(id?: string) {
if (!role) { if (!role) {
break break
} }
role = builtins[role.inherits] role = builtins[role.inherits!]
count++ count++
} while (role !== null) } while (role !== null)
return count return count
@ -129,12 +123,12 @@ export async function roleToNumber(id?: string) {
/** /**
* Returns whichever builtin roleID is lower. * Returns whichever builtin roleID is lower.
*/ */
export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string) { export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
if (!roleId1) { if (!roleId1) {
return roleId2 return roleId2 as string
} }
if (!roleId2) { if (!roleId2) {
return roleId1 return roleId1 as string
} }
return builtinRoleToNumber(roleId1) > builtinRoleToNumber(roleId2) return builtinRoleToNumber(roleId1) > builtinRoleToNumber(roleId2)
? roleId2 ? roleId2

View File

@ -1,13 +1,14 @@
const { FieldTypes, FormulaTypes } = require("../../../constants") import { FieldTypes, FormulaTypes } from "../../../constants"
const { clearColumns } = require("./utils") import { clearColumns } from "./utils"
const { doesContainStrings } = require("@budibase/string-templates") import { doesContainStrings } from "@budibase/string-templates"
const { cloneDeep } = require("lodash/fp") import { cloneDeep } from "lodash/fp"
const { isEqual, uniq } = require("lodash") import { isEqual, uniq } from "lodash"
const { updateAllFormulasInTable } = require("../row/staticFormula") import { updateAllFormulasInTable } from "../row/staticFormula"
const { getAppDB } = require("@budibase/backend-core/context") import { context } from "@budibase/backend-core"
const sdk = require("../../../sdk") import { FieldSchema, Table } from "@budibase/types"
import sdk from "../../../sdk"
function isStaticFormula(column) { function isStaticFormula(column: FieldSchema) {
return ( return (
column.type === FieldTypes.FORMULA && column.type === FieldTypes.FORMULA &&
column.formulaType === FormulaTypes.STATIC column.formulaType === FormulaTypes.STATIC
@ -18,8 +19,8 @@ function isStaticFormula(column) {
* This retrieves the formula columns from a table schema that use a specified column name * This retrieves the formula columns from a table schema that use a specified column name
* in the formula. * in the formula.
*/ */
function getFormulaThatUseColumn(table, columnNames) { function getFormulaThatUseColumn(table: Table, columnNames: string[] | string) {
let formula = [] let formula: string[] = []
columnNames = Array.isArray(columnNames) ? columnNames : [columnNames] columnNames = Array.isArray(columnNames) ? columnNames : [columnNames]
for (let column of Object.values(table.schema)) { for (let column of Object.values(table.schema)) {
// not a static formula, or doesn't contain a relationship // not a static formula, or doesn't contain a relationship
@ -38,7 +39,10 @@ function getFormulaThatUseColumn(table, columnNames) {
* This functions checks for when a related table, column or related column is deleted, if any * This functions checks for when a related table, column or related column is deleted, if any
* tables need to have the formula column removed. * tables need to have the formula column removed.
*/ */
async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) { async function checkIfFormulaNeedsCleared(
table: Table,
{ oldTable, deletion }: { oldTable?: Table; deletion?: boolean }
) {
// start by retrieving all tables, remove the current table from the list // start by retrieving all tables, remove the current table from the list
const tables = (await sdk.tables.getAllInternalTables()).filter( const tables = (await sdk.tables.getAllInternalTables()).filter(
tbl => tbl._id !== table._id tbl => tbl._id !== table._id
@ -49,11 +53,14 @@ async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
) )
// remove any formula columns that used related columns // remove any formula columns that used related columns
for (let removed of removedColumns) { for (let removed of removedColumns) {
let tableToUse = table let tableToUse: Table | undefined = table
// if relationship, get the related table // if relationship, get the related table
if (removed.type === FieldTypes.LINK) { if (removed.type === FieldTypes.LINK) {
tableToUse = tables.find(table => table._id === removed.tableId) tableToUse = tables.find(table => table._id === removed.tableId)
} }
if (!tableToUse) {
continue
}
const columnsToDelete = getFormulaThatUseColumn(tableToUse, removed.name) const columnsToDelete = getFormulaThatUseColumn(tableToUse, removed.name)
if (columnsToDelete.length > 0) { if (columnsToDelete.length > 0) {
await clearColumns(table, columnsToDelete) await clearColumns(table, columnsToDelete)
@ -71,11 +78,11 @@ async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
// look to see if the column was used in a relationship formula, // look to see if the column was used in a relationship formula,
// relationships won't be used for this // relationships won't be used for this
if (relatedTable && relatedColumns && removed.type !== FieldTypes.LINK) { if (relatedTable && relatedColumns && removed.type !== FieldTypes.LINK) {
let relatedFormulaToRemove = [] let relatedFormulaToRemove: string[] = []
for (let column of relatedColumns) { for (let column of relatedColumns) {
relatedFormulaToRemove = relatedFormulaToRemove.concat( relatedFormulaToRemove = relatedFormulaToRemove.concat(
getFormulaThatUseColumn(relatedTable, [ getFormulaThatUseColumn(relatedTable, [
column.fieldName, column.fieldName!,
removed.name, removed.name,
]) ])
) )
@ -95,13 +102,14 @@ async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
* specifically only for static formula. * specifically only for static formula.
*/ */
async function updateRelatedFormulaLinksOnTables( async function updateRelatedFormulaLinksOnTables(
table, table: Table,
{ deletion } = { deletion: false } { deletion }: { deletion?: boolean } = {}
) { ) {
const db = getAppDB() const tableId: string = table._id!
const db = context.getAppDB()
// start by retrieving all tables, remove the current table from the list // start by retrieving all tables, remove the current table from the list
const tables = (await sdk.tables.getAllInternalTables()).filter( const tables = (await sdk.tables.getAllInternalTables()).filter(
tbl => tbl._id !== table._id tbl => tbl._id !== tableId
) )
// clone the tables, so we can compare at end // clone the tables, so we can compare at end
const initialTables = cloneDeep(tables) const initialTables = cloneDeep(tables)
@ -114,7 +122,7 @@ async function updateRelatedFormulaLinksOnTables(
if (!otherTable.relatedFormula) { if (!otherTable.relatedFormula) {
continue continue
} }
const index = otherTable.relatedFormula.indexOf(table._id) const index = otherTable.relatedFormula.indexOf(tableId)
if (index !== -1) { if (index !== -1) {
otherTable.relatedFormula.splice(index, 1) otherTable.relatedFormula.splice(index, 1)
} }
@ -133,11 +141,11 @@ async function updateRelatedFormulaLinksOnTables(
if ( if (
relatedTable && relatedTable &&
(!relatedTable.relatedFormula || (!relatedTable.relatedFormula ||
!relatedTable.relatedFormula.includes(table._id)) !relatedTable.relatedFormula.includes(tableId))
) { ) {
relatedTable.relatedFormula = relatedTable.relatedFormula relatedTable.relatedFormula = relatedTable.relatedFormula
? [...relatedTable.relatedFormula, table._id] ? [...relatedTable.relatedFormula, tableId]
: [table._id] : [tableId]
} }
} }
} }
@ -150,7 +158,10 @@ async function updateRelatedFormulaLinksOnTables(
} }
} }
async function checkIfFormulaUpdated(table, { oldTable }) { async function checkIfFormulaUpdated(
table: Table,
{ oldTable }: { oldTable?: Table }
) {
// look to see if any formula values have changed // look to see if any formula values have changed
const shouldUpdate = Object.values(table.schema).find( const shouldUpdate = Object.values(table.schema).find(
column => column =>
@ -165,7 +176,10 @@ async function checkIfFormulaUpdated(table, { oldTable }) {
} }
} }
exports.runStaticFormulaChecks = async (table, { oldTable, deletion }) => { export async function runStaticFormulaChecks(
table: Table,
{ oldTable, deletion }: { oldTable?: Table; deletion?: boolean }
) {
await updateRelatedFormulaLinksOnTables(table, { deletion }) await updateRelatedFormulaLinksOnTables(table, { deletion })
await checkIfFormulaNeedsCleared(table, { oldTable, deletion }) await checkIfFormulaNeedsCleared(table, { oldTable, deletion })
if (!deletion) { if (!deletion) {

View File

@ -1,38 +1,47 @@
const { import {
buildExternalTableId, buildExternalTableId,
breakExternalTableId, breakExternalTableId,
} = require("../../../integrations/utils") } from "../../../integrations/utils"
const { import {
generateForeignKey, generateForeignKey,
generateJunctionTableName, generateJunctionTableName,
foreignKeyStructure, foreignKeyStructure,
hasTypeChanged, hasTypeChanged,
} = require("./utils") } from "./utils"
const { import {
DataSourceOperation, DataSourceOperation,
FieldTypes, FieldTypes,
RelationshipTypes, RelationshipTypes,
} = require("../../../constants") } from "../../../constants"
const { makeExternalQuery } = require("../../../integrations/base/query") import { makeExternalQuery } from "../../../integrations/base/query"
import csvParser from "../../../utilities/csvParser"
import { handleRequest } from "../row/external"
import { events, context } from "@budibase/backend-core"
import {
Datasource,
Table,
QueryJson,
Operation,
RenameColumn,
FieldSchema,
BBContext,
TableRequest,
} from "@budibase/types"
import sdk from "../../../sdk"
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
const csvParser = require("../../../utilities/csvParser")
const { handleRequest } = require("../row/external")
const { getAppDB } = require("@budibase/backend-core/context")
const { events } = require("@budibase/backend-core")
const sdk = require("../../../sdk")
async function makeTableRequest( async function makeTableRequest(
datasource, datasource: Datasource,
operation, operation: Operation,
table, table: Table,
tables, tables: Record<string, Table>,
oldTable = null, oldTable?: Table,
renamed = null renamed?: RenameColumn
) { ) {
const json = { const json: QueryJson = {
endpoint: { endpoint: {
datasourceId: datasource._id, datasourceId: datasource._id!,
entityId: table._id, entityId: table._id!,
operation, operation,
}, },
meta: { meta: {
@ -41,15 +50,19 @@ async function makeTableRequest(
table, table,
} }
if (oldTable) { if (oldTable) {
json.meta.table = oldTable json.meta!.table = oldTable
} }
if (renamed) { if (renamed) {
json.meta.renamed = renamed json.meta!.renamed = renamed
} }
return makeExternalQuery(datasource, json) return makeExternalQuery(datasource, json)
} }
function cleanupRelationships(table, tables, oldTable = null) { function cleanupRelationships(
table: Table,
tables: Record<string, Table>,
oldTable?: Table
) {
const tableToIterate = oldTable ? oldTable : table const tableToIterate = oldTable ? oldTable : table
// clean up relationships in couch table schemas // clean up relationships in couch table schemas
for (let [key, schema] of Object.entries(tableToIterate.schema)) { for (let [key, schema] of Object.entries(tableToIterate.schema)) {
@ -78,7 +91,7 @@ function cleanupRelationships(table, tables, oldTable = null) {
} }
} }
function getDatasourceId(table) { function getDatasourceId(table: Table) {
if (!table) { if (!table) {
throw "No table supplied" throw "No table supplied"
} }
@ -88,7 +101,7 @@ function getDatasourceId(table) {
return breakExternalTableId(table._id).datasourceId return breakExternalTableId(table._id).datasourceId
} }
function otherRelationshipType(type) { function otherRelationshipType(type?: string) {
if (type === RelationshipTypes.MANY_TO_MANY) { if (type === RelationshipTypes.MANY_TO_MANY) {
return RelationshipTypes.MANY_TO_MANY return RelationshipTypes.MANY_TO_MANY
} }
@ -97,13 +110,21 @@ function otherRelationshipType(type) {
: RelationshipTypes.ONE_TO_MANY : RelationshipTypes.ONE_TO_MANY
} }
function generateManyLinkSchema(datasource, column, table, relatedTable) { function generateManyLinkSchema(
datasource: Datasource,
column: FieldSchema,
table: Table,
relatedTable: Table
): Table {
if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate many link schema, no primary keys")
}
const primary = table.name + table.primary[0] const primary = table.name + table.primary[0]
const relatedPrimary = relatedTable.name + relatedTable.primary[0] const relatedPrimary = relatedTable.name + relatedTable.primary[0]
const jcTblName = generateJunctionTableName(column, table, relatedTable) const jcTblName = generateJunctionTableName(column, table, relatedTable)
// first create the new table // first create the new table
const junctionTable = { const junctionTable = {
_id: buildExternalTableId(datasource._id, jcTblName), _id: buildExternalTableId(datasource._id!, jcTblName),
name: jcTblName, name: jcTblName,
primary: [primary, relatedPrimary], primary: [primary, relatedPrimary],
constrained: [primary, relatedPrimary], constrained: [primary, relatedPrimary],
@ -125,7 +146,15 @@ function generateManyLinkSchema(datasource, column, table, relatedTable) {
return junctionTable return junctionTable
} }
function generateLinkSchema(column, table, relatedTable, type) { function generateLinkSchema(
column: FieldSchema,
table: Table,
relatedTable: Table,
type: string
) {
if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate link schema, no primary keys")
}
const isOneSide = type === RelationshipTypes.ONE_TO_MANY const isOneSide = type === RelationshipTypes.ONE_TO_MANY
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0] const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
// generate a foreign key // generate a foreign key
@ -136,7 +165,12 @@ function generateLinkSchema(column, table, relatedTable, type) {
return foreignKey return foreignKey
} }
function generateRelatedSchema(linkColumn, table, relatedTable, columnName) { function generateRelatedSchema(
linkColumn: FieldSchema,
table: Table,
relatedTable: Table,
columnName: string
) {
// generate column for other table // generate column for other table
const relatedSchema = cloneDeep(linkColumn) const relatedSchema = cloneDeep(linkColumn)
// swap them from the main link // swap them from the main link
@ -159,21 +193,21 @@ function generateRelatedSchema(linkColumn, table, relatedTable, columnName) {
table.schema[columnName] = relatedSchema table.schema[columnName] = relatedSchema
} }
function isRelationshipSetup(column) { function isRelationshipSetup(column: FieldSchema) {
return column.foreignKey || column.through return column.foreignKey || column.through
} }
exports.save = async function (ctx) { export async function save(ctx: BBContext) {
const table = ctx.request.body const table: TableRequest = ctx.request.body
const { _rename: renamed } = table const renamed = table?._rename
// can't do this right now // can't do this right now
delete table.dataImport delete table.dataImport
const datasourceId = getDatasourceId(ctx.request.body) const datasourceId = getDatasourceId(ctx.request.body)!
// table doesn't exist already, note that it is created // table doesn't exist already, note that it is created
if (!table._id) { if (!table._id) {
table.created = true table.created = true
} }
let tableToSave = { let tableToSave: TableRequest = {
type: "table", type: "table",
_id: buildExternalTableId(datasourceId, table.name), _id: buildExternalTableId(datasourceId, table.name),
...table, ...table,
@ -188,10 +222,10 @@ exports.save = async function (ctx) {
ctx.throw(400, "A column type has changed.") ctx.throw(400, "A column type has changed.")
} }
const db = getAppDB() const db = context.getAppDB()
const datasource = await db.get(datasourceId) const datasource = await db.get(datasourceId)
const oldTables = cloneDeep(datasource.entities) const oldTables = cloneDeep(datasource.entities)
const tables = datasource.entities const tables: Record<string, Table> = datasource.entities
const extraTablesToUpdate = [] const extraTablesToUpdate = []
@ -203,8 +237,11 @@ exports.save = async function (ctx) {
const relatedTable = Object.values(tables).find( const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId table => table._id === schema.tableId
) )
const relatedColumnName = schema.fieldName if (!relatedTable) {
const relationType = schema.relationshipType continue
}
const relatedColumnName = schema.fieldName!
const relationType = schema.relationshipType!
if (relationType === RelationshipTypes.MANY_TO_MANY) { if (relationType === RelationshipTypes.MANY_TO_MANY) {
const junctionTable = generateManyLinkSchema( const junctionTable = generateManyLinkSchema(
datasource, datasource,
@ -244,9 +281,7 @@ exports.save = async function (ctx) {
cleanupRelationships(tableToSave, tables, oldTable) cleanupRelationships(tableToSave, tables, oldTable)
const operation = oldTable const operation = oldTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
? DataSourceOperation.UPDATE_TABLE
: DataSourceOperation.CREATE_TABLE
await makeTableRequest( await makeTableRequest(
datasource, datasource,
operation, operation,
@ -258,9 +293,7 @@ exports.save = async function (ctx) {
// update any extra tables (like foreign keys in other tables) // update any extra tables (like foreign keys in other tables)
for (let extraTable of extraTablesToUpdate) { for (let extraTable of extraTablesToUpdate) {
const oldExtraTable = oldTables[extraTable.name] const oldExtraTable = oldTables[extraTable.name]
let op = oldExtraTable let op = oldExtraTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
? DataSourceOperation.UPDATE_TABLE
: DataSourceOperation.CREATE_TABLE
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable) await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
} }
@ -280,18 +313,20 @@ exports.save = async function (ctx) {
return tableToSave return tableToSave
} }
exports.destroy = async function (ctx) { export async function destroy(ctx: BBContext) {
const tableToDelete = await sdk.tables.getTable(ctx.params.tableId) const tableToDelete: TableRequest = await sdk.tables.getTable(
ctx.params.tableId
)
if (!tableToDelete || !tableToDelete.created) { if (!tableToDelete || !tableToDelete.created) {
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.") ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
} }
const datasourceId = getDatasourceId(tableToDelete) const datasourceId = getDatasourceId(tableToDelete)
const db = getAppDB() const db = context.getAppDB()
const datasource = await db.get(datasourceId) const datasource = await db.get(datasourceId)
const tables = datasource.entities const tables = datasource.entities
const operation = DataSourceOperation.DELETE_TABLE const operation = Operation.DELETE_TABLE
await makeTableRequest(datasource, operation, tableToDelete, tables) await makeTableRequest(datasource, operation, tableToDelete, tables)
cleanupRelationships(tableToDelete, tables) cleanupRelationships(tableToDelete, tables)
@ -302,7 +337,7 @@ exports.destroy = async function (ctx) {
return tableToDelete return tableToDelete
} }
exports.bulkImport = async function (ctx) { export async function bulkImport(ctx: BBContext) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { dataImport } = ctx.request.body const { dataImport } = ctx.request.body
if (!dataImport || !dataImport.schema || !dataImport.csvString) { if (!dataImport || !dataImport.schema || !dataImport.csvString) {

View File

@ -1,13 +1,13 @@
const internal = require("./internal") import * as internal from "./internal"
const external = require("./external") import * as external from "./external"
const csvParser = require("../../../utilities/csvParser") import csvParser from "../../../utilities/csvParser"
const { isExternalTable, isSQL } = require("../../../integrations/utils") import { isExternalTable, isSQL } from "../../../integrations/utils"
const { getDatasourceParams } = require("../../../db/utils") import { getDatasourceParams } from "../../../db/utils"
const { getAppDB } = require("@budibase/backend-core/context") import { context, events } from "@budibase/backend-core"
const { events } = require("@budibase/backend-core") import { Table, BBContext } from "@budibase/types"
const sdk = require("../../../sdk") import sdk from "../../../sdk"
function pickApi({ tableId, table }) { function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) { if (table && !tableId) {
tableId = table._id tableId = table._id
} }
@ -20,8 +20,8 @@ function pickApi({ tableId, table }) {
} }
// covers both internal and external // covers both internal and external
exports.fetch = async function (ctx) { export async function fetch(ctx: BBContext) {
const db = getAppDB() const db = context.getAppDB()
const internal = await sdk.tables.getAllInternalTables() const internal = await sdk.tables.getAllInternalTables()
@ -34,7 +34,7 @@ exports.fetch = async function (ctx) {
const external = externalTables.rows.flatMap(tableDoc => { const external = externalTables.rows.flatMap(tableDoc => {
let entities = tableDoc.doc.entities let entities = tableDoc.doc.entities
if (entities) { if (entities) {
return Object.values(entities).map(entity => ({ return Object.values(entities).map((entity: any) => ({
...entity, ...entity,
type: "external", type: "external",
sourceId: tableDoc.doc._id, sourceId: tableDoc.doc._id,
@ -48,12 +48,12 @@ exports.fetch = async function (ctx) {
ctx.body = [...internal, ...external] ctx.body = [...internal, ...external]
} }
exports.find = async function (ctx) { export async function find(ctx: BBContext) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
ctx.body = await sdk.tables.getTable(tableId) ctx.body = await sdk.tables.getTable(tableId)
} }
exports.save = async function (ctx) { export async function save(ctx: BBContext) {
const appId = ctx.appId const appId = ctx.appId
const table = ctx.request.body const table = ctx.request.body
const importFormat = const importFormat =
@ -74,7 +74,7 @@ exports.save = async function (ctx) {
ctx.body = savedTable ctx.body = savedTable
} }
exports.destroy = async function (ctx) { export async function destroy(ctx: BBContext) {
const appId = ctx.appId const appId = ctx.appId
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const deletedTable = await pickApi({ tableId }).destroy(ctx) const deletedTable = await pickApi({ tableId }).destroy(ctx)
@ -86,7 +86,7 @@ exports.destroy = async function (ctx) {
ctx.body = { message: `Table ${tableId} deleted.` } ctx.body = { message: `Table ${tableId} deleted.` }
} }
exports.bulkImport = async function (ctx) { export async function bulkImport(ctx: BBContext) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
await pickApi({ tableId }).bulkImport(ctx) await pickApi({ tableId }).bulkImport(ctx)
// right now we don't trigger anything for bulk import because it // right now we don't trigger anything for bulk import because it
@ -96,7 +96,7 @@ exports.bulkImport = async function (ctx) {
ctx.body = { message: `Bulk rows created.` } ctx.body = { message: `Bulk rows created.` }
} }
exports.validateCSVSchema = async function (ctx) { export async function validateCSVSchema(ctx: BBContext) {
// tableId being specified means its an import to an existing table // tableId being specified means its an import to an existing table
const { csvString, schema = {}, tableId } = ctx.request.body const { csvString, schema = {}, tableId } = ctx.request.body
let existingTable let existingTable

View File

@ -13,28 +13,28 @@ import {
} from "../../../constants" } from "../../../constants"
import { getViews, saveView } from "../view/utils" import { getViews, saveView } from "../view/utils"
import viewTemplate from "../view/viewBuilder" import viewTemplate from "../view/viewBuilder"
const { getAppDB } = require("@budibase/backend-core/context")
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { events } from "@budibase/backend-core" import { events, context } from "@budibase/backend-core"
import { Database } from "@budibase/types"
export async function clearColumns(table: any, columnNames: any) { export async function clearColumns(table: any, columnNames: any) {
const db = getAppDB() const db: Database = context.getAppDB()
const rows = await db.allDocs( const rows = await db.allDocs(
getRowParams(table._id, null, { getRowParams(table._id, null, {
include_docs: true, include_docs: true,
}) })
) )
return db.bulkDocs( return (await db.bulkDocs(
rows.rows.map(({ doc }: any) => { rows.rows.map(({ doc }: any) => {
columnNames.forEach((colName: any) => delete doc[colName]) columnNames.forEach((colName: any) => delete doc[colName])
return doc return doc
}) })
) )) as { id: string; _rev?: string }[]
} }
export async function checkForColumnUpdates(oldTable: any, updatedTable: any) { export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
const db = getAppDB() const db = context.getAppDB()
let updatedRows = [] let updatedRows = []
const rename = updatedTable._rename const rename = updatedTable._rename
let deletedColumns: any = [] let deletedColumns: any = []
@ -133,7 +133,7 @@ export async function handleDataImport(user: any, table: any, dataImport: any) {
return table return table
} }
const db = getAppDB() const db = context.getAppDB()
// Populate the table with rows imported from CSV in a bulk update // Populate the table with rows imported from CSV in a bulk update
const data = await transform({ const data = await transform({
...dataImport, ...dataImport,
@ -150,7 +150,7 @@ export async function handleDataImport(user: any, table: any, dataImport: any) {
} }
export async function handleSearchIndexes(table: any) { export async function handleSearchIndexes(table: any) {
const db = getAppDB() const db = context.getAppDB()
// create relevant search indexes // create relevant search indexes
if (table.indexes && table.indexes.length > 0) { if (table.indexes && table.indexes.length > 0) {
const currentIndexes = await db.getIndexes() const currentIndexes = await db.getIndexes()
@ -214,7 +214,7 @@ class TableSaveFunctions {
rows: any rows: any
constructor({ user, oldTable, dataImport }: any) { constructor({ user, oldTable, dataImport }: any) {
this.db = getAppDB() this.db = context.getAppDB()
this.user = user this.user = user
this.oldTable = oldTable this.oldTable = oldTable
this.dataImport = dataImport this.dataImport = dataImport
@ -338,7 +338,7 @@ export function generateJunctionTableName(
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}` return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
} }
export function foreignKeyStructure(keyName: any, meta = null) { export function foreignKeyStructure(keyName: any, meta?: any) {
const structure: any = { const structure: any = {
type: FieldTypes.NUMBER, type: FieldTypes.NUMBER,
constraints: {}, constraints: {},

View File

@ -1,17 +1,20 @@
const env = require("../environment") import env from "../environment"
const { OBJ_STORE_DIRECTORY } = require("../constants") import { OBJ_STORE_DIRECTORY } from "../constants"
const { sanitizeKey } = require("@budibase/backend-core/objectStore") import { objectStore, context } from "@budibase/backend-core"
const { generateMetadataID } = require("../db/utils") import { generateMetadataID } from "../db/utils"
const Readable = require("stream").Readable import { Document } from "@budibase/types"
const { getAppDB } = require("@budibase/backend-core/context") import stream from "stream"
const Readable = stream.Readable
exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms)) export function wait(ms: number) {
return new Promise(resolve => setTimeout(resolve, ms))
}
exports.isDev = env.isDev export const isDev = env.isDev
exports.NUMBER_REGEX = /^[+-]?([0-9]*[.])?[0-9]+$/g export const NUMBER_REGEX = /^[+-]?([0-9]*[.])?[0-9]+$/g
exports.removeFromArray = (array, element) => { export function removeFromArray(array: any[], element: any) {
const index = array.indexOf(element) const index = array.indexOf(element)
if (index !== -1) { if (index !== -1) {
array.splice(index, 1) array.splice(index, 1)
@ -25,7 +28,7 @@ exports.removeFromArray = (array, element) => {
* @param {string} url The URL to test and remove any extra double slashes. * @param {string} url The URL to test and remove any extra double slashes.
* @return {string} The updated url. * @return {string} The updated url.
*/ */
exports.checkSlashesInUrl = url => { export function checkSlashesInUrl(url: string) {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2") return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
} }
@ -33,7 +36,7 @@ exports.checkSlashesInUrl = url => {
* Gets the address of the object store, depending on whether self hosted or in cloud. * Gets the address of the object store, depending on whether self hosted or in cloud.
* @return {string} The base URL of the object store (MinIO or S3). * @return {string} The base URL of the object store (MinIO or S3).
*/ */
exports.objectStoreUrl = () => { export function objectStoreUrl() {
if (env.SELF_HOSTED || env.MINIO_URL) { if (env.SELF_HOSTED || env.MINIO_URL) {
// can use a relative url for this as all goes through the proxy (this is hosted in minio) // can use a relative url for this as all goes through the proxy (this is hosted in minio)
return OBJ_STORE_DIRECTORY return OBJ_STORE_DIRECTORY
@ -52,9 +55,9 @@ exports.objectStoreUrl = () => {
* @return {string} The URL to be inserted into appPackage response or server rendered * @return {string} The URL to be inserted into appPackage response or server rendered
* app index file. * app index file.
*/ */
exports.clientLibraryPath = (appId, version) => { export function clientLibraryPath(appId: string, version: string) {
if (env.isProd()) { if (env.isProd()) {
let url = `${exports.objectStoreUrl()}/${sanitizeKey( let url = `${objectStoreUrl()}/${objectStore.sanitizeKey(
appId appId
)}/budibase-client.js` )}/budibase-client.js`
@ -68,18 +71,19 @@ exports.clientLibraryPath = (appId, version) => {
} }
} }
exports.attachmentsRelativeURL = attachmentKey => { export function attachmentsRelativeURL(attachmentKey: string) {
return exports.checkSlashesInUrl( return checkSlashesInUrl(`${objectStoreUrl()}/${attachmentKey}`)
`${exports.objectStoreUrl()}/${attachmentKey}`
)
} }
exports.updateEntityMetadata = async (type, entityId, updateFn) => { export async function updateEntityMetadata(
const db = getAppDB() type: string,
entityId: string,
updateFn: any
) {
const db = context.getAppDB()
const id = generateMetadataID(type, entityId) const id = generateMetadataID(type, entityId)
// read it to see if it exists, we'll overwrite it no matter what // read it to see if it exists, we'll overwrite it no matter what
let rev, let rev, metadata: Document
metadata = {}
try { try {
const oldMetadata = await db.get(id) const oldMetadata = await db.get(id)
rev = oldMetadata._rev rev = oldMetadata._rev
@ -100,14 +104,18 @@ exports.updateEntityMetadata = async (type, entityId, updateFn) => {
} }
} }
exports.saveEntityMetadata = async (type, entityId, metadata) => { export async function saveEntityMetadata(
return exports.updateEntityMetadata(type, entityId, () => { type: string,
entityId: string,
metadata: Document
) {
return updateEntityMetadata(type, entityId, () => {
return metadata return metadata
}) })
} }
exports.deleteEntityMetadata = async (type, entityId) => { export async function deleteEntityMetadata(type: string, entityId: string) {
const db = getAppDB() const db = context.getAppDB()
const id = generateMetadataID(type, entityId) const id = generateMetadataID(type, entityId)
let rev let rev
try { try {
@ -123,7 +131,7 @@ exports.deleteEntityMetadata = async (type, entityId) => {
} }
} }
exports.escapeDangerousCharacters = string => { export function escapeDangerousCharacters(string: string) {
return string return string
.replace(/[\\]/g, "\\\\") .replace(/[\\]/g, "\\\\")
.replace(/[\b]/g, "\\b") .replace(/[\b]/g, "\\b")
@ -133,7 +141,7 @@ exports.escapeDangerousCharacters = string => {
.replace(/[\t]/g, "\\t") .replace(/[\t]/g, "\\t")
} }
exports.stringToReadStream = string => { export function stringToReadStream(string: string) {
return new Readable({ return new Readable({
read() { read() {
this.push(string) this.push(string)
@ -142,7 +150,7 @@ exports.stringToReadStream = string => {
}) })
} }
exports.formatBytes = bytes => { export function formatBytes(bytes: string) {
const units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"] const units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
const byteIncrements = 1024 const byteIncrements = 1024
let unit = 0 let unit = 0
@ -153,7 +161,7 @@ exports.formatBytes = bytes => {
return `${size.toFixed(size < 10 && unit > 0 ? 1 : 0)}${units[unit]}` return `${size.toFixed(size < 10 && unit > 0 ? 1 : 0)}${units[unit]}`
} }
exports.convertBookmark = bookmark => { export function convertBookmark(bookmark: string) {
const IS_NUMBER = /^\d+\.?\d*$/ const IS_NUMBER = /^\d+\.?\d*$/
if (typeof bookmark === "string" && bookmark.match(IS_NUMBER)) { if (typeof bookmark === "string" && bookmark.match(IS_NUMBER)) {
return parseFloat(bookmark) return parseFloat(bookmark)
@ -161,7 +169,7 @@ exports.convertBookmark = bookmark => {
return bookmark return bookmark
} }
exports.isQsTrue = param => { export function isQsTrue(param: string) {
if (typeof param === "string") { if (typeof param === "string") {
return param.toLowerCase() === "true" return param.toLowerCase() === "true"
} else { } else {

View File

@ -8,6 +8,7 @@ import { InternalTables } from "../../db/utils"
import { TYPE_TRANSFORM_MAP } from "./map" import { TYPE_TRANSFORM_MAP } from "./map"
import { Row, User, Table } from "@budibase/types" import { Row, User, Table } from "@budibase/types"
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
export * from "./utils"
type AutoColumnProcessingOpts = { type AutoColumnProcessingOpts = {
reprocessing?: boolean reprocessing?: boolean
@ -229,7 +230,7 @@ export async function cleanupAttachments(
rows, rows,
oldRow, oldRow,
oldTable, oldTable,
}: { row?: Row; rows?: Row[]; oldRow?: Row; oldTable: Table } }: { row?: Row; rows?: Row[]; oldRow?: Row; oldTable?: Table }
): Promise<any> { ): Promise<any> {
const appId = context.getAppId() const appId = context.getAppId()
if (!dbCore.isProdAppID(appId)) { if (!dbCore.isProdAppID(appId)) {

View File

@ -1,71 +0,0 @@
const {
PermissionLevel,
PermissionType,
getBuiltinPermissionByID,
isPermissionLevelHigherThanRead,
} = require("@budibase/backend-core/permissions")
const {
lowerBuiltinRoleID,
getBuiltinRoles,
} = require("@budibase/backend-core/roles")
const { DocumentType } = require("../db/utils")
const CURRENTLY_SUPPORTED_LEVELS = [
PermissionLevel.WRITE,
PermissionLevel.READ,
PermissionLevel.EXECUTE,
]
exports.getPermissionType = resourceId => {
const docType = Object.values(DocumentType).filter(docType =>
resourceId.startsWith(docType)
)[0]
switch (docType) {
case DocumentType.TABLE:
case DocumentType.ROW:
return PermissionType.TABLE
case DocumentType.AUTOMATION:
return PermissionType.AUTOMATION
case DocumentType.WEBHOOK:
return PermissionType.WEBHOOK
case DocumentType.QUERY:
case DocumentType.DATASOURCE:
return PermissionType.QUERY
default:
// views don't have an ID, will end up here
return PermissionType.VIEW
}
}
/**
* works out the basic permissions based on builtin roles for a resource, using its ID
* @param resourceId
* @returns {{}}
*/
exports.getBasePermissions = resourceId => {
const type = exports.getPermissionType(resourceId)
const permissions = {}
for (let [roleId, role] of Object.entries(getBuiltinRoles())) {
if (!role.permissionId) {
continue
}
const perms = getBuiltinPermissionByID(role.permissionId)
const typedPermission = perms.permissions.find(perm => perm.type === type)
if (
typedPermission &&
CURRENTLY_SUPPORTED_LEVELS.indexOf(typedPermission.level) !== -1
) {
const level = typedPermission.level
permissions[level] = lowerBuiltinRoleID(permissions[level], roleId)
if (isPermissionLevelHigherThanRead(level)) {
permissions[PermissionLevel.READ] = lowerBuiltinRoleID(
permissions[PermissionLevel.READ],
roleId
)
}
}
}
return permissions
}
exports.CURRENTLY_SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS

View File

@ -0,0 +1,65 @@
import { permissions, roles } from "@budibase/backend-core"
import { DocumentType } from "../db/utils"
export const CURRENTLY_SUPPORTED_LEVELS: string[] = [
permissions.PermissionLevel.WRITE,
permissions.PermissionLevel.READ,
permissions.PermissionLevel.EXECUTE,
]
export function getPermissionType(resourceId: string) {
const docType = Object.values(DocumentType).filter(docType =>
resourceId.startsWith(docType)
)[0]
switch (docType) {
case DocumentType.TABLE:
case DocumentType.ROW:
return permissions.PermissionType.TABLE
case DocumentType.AUTOMATION:
return permissions.PermissionType.AUTOMATION
case DocumentType.WEBHOOK:
return permissions.PermissionType.WEBHOOK
case DocumentType.QUERY:
case DocumentType.DATASOURCE:
return permissions.PermissionType.QUERY
default:
// views don't have an ID, will end up here
return permissions.PermissionType.VIEW
}
}
/**
* works out the basic permissions based on builtin roles for a resource, using its ID
*/
export function getBasePermissions(resourceId: string) {
const type = getPermissionType(resourceId)
const basePermissions: { [key: string]: string } = {}
for (let [roleId, role] of Object.entries(roles.getBuiltinRoles())) {
if (!role.permissionId) {
continue
}
const perms = permissions.getBuiltinPermissionByID(role.permissionId)
if (!perms) {
continue
}
const typedPermission = perms.permissions.find(perm => perm.type === type)
if (
typedPermission &&
CURRENTLY_SUPPORTED_LEVELS.indexOf(typedPermission.level) !== -1
) {
const level = typedPermission.level
basePermissions[level] = roles.lowerBuiltinRoleID(
basePermissions[level],
roleId
)
if (permissions.isPermissionLevelHigherThanRead(level)) {
basePermissions[permissions.PermissionLevel.READ] =
roles.lowerBuiltinRoleID(
basePermissions[permissions.PermissionLevel.READ],
roleId
)
}
}
}
return basePermissions
}

View File

@ -1,12 +1,11 @@
const { InternalTables } = require("../db/utils") import { InternalTables } from "../db/utils"
const { getGlobalUser } = require("../utilities/global") import { getGlobalUser } from "./global"
const { getAppDB } = require("@budibase/backend-core/context") import { context, db as dbCore, roles } from "@budibase/backend-core"
const { getProdAppID } = require("@budibase/backend-core/db") import { BBContext } from "@budibase/types"
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
exports.getFullUser = async (ctx, userId) => { export async function getFullUser(ctx: BBContext, userId: string) {
const global = await getGlobalUser(userId) const global = await getGlobalUser(userId)
let metadata = {} let metadata: any = {}
// always prefer the user metadata _id and _rev // always prefer the user metadata _id and _rev
delete global._id delete global._id
@ -14,7 +13,7 @@ exports.getFullUser = async (ctx, userId) => {
try { try {
// this will throw an error if the db doesn't exist, or there is no appId // this will throw an error if the db doesn't exist, or there is no appId
const db = getAppDB() const db = context.getAppDB()
metadata = await db.get(userId) metadata = await db.get(userId)
} catch (err) { } catch (err) {
// it is fine if there is no user metadata yet // it is fine if there is no user metadata yet
@ -23,14 +22,14 @@ exports.getFullUser = async (ctx, userId) => {
return { return {
...metadata, ...metadata,
...global, ...global,
roleId: global.roleId || BUILTIN_ROLE_IDS.PUBLIC, roleId: global.roleId || roles.BUILTIN_ROLE_IDS.PUBLIC,
tableId: InternalTables.USER_METADATA, tableId: InternalTables.USER_METADATA,
// make sure the ID is always a local ID, not a global one // make sure the ID is always a local ID, not a global one
_id: userId, _id: userId,
} }
} }
exports.publicApiUserFix = ctx => { export function publicApiUserFix(ctx: BBContext) {
if (!ctx.request.body) { if (!ctx.request.body) {
return ctx return ctx
} }
@ -40,10 +39,9 @@ exports.publicApiUserFix = ctx => {
if (!ctx.request.body.roles) { if (!ctx.request.body.roles) {
ctx.request.body.roles = {} ctx.request.body.roles = {}
} else { } else {
const newRoles = {} const newRoles: { [key: string]: any } = {}
for (let [appId, role] of Object.entries(ctx.request.body.roles)) { for (let [appId, role] of Object.entries(ctx.request.body.roles)) {
// @ts-ignore newRoles[dbCore.getProdAppID(appId)] = role
newRoles[getProdAppID(appId)] = role
} }
ctx.request.body.roles = newRoles ctx.request.body.roles = newRoles
} }

View File

@ -2,6 +2,6 @@ import { Document } from "../document"
export interface Role extends Document { export interface Role extends Document {
permissionId: string permissionId: string
inherits: string inherits?: string
permissions: { [key: string]: string[] } permissions: { [key: string]: string[] }
} }

View File

@ -1,5 +1,6 @@
import { Document } from "../document" import { Document } from "../document"
import { View } from "./view" import { View } from "./view"
import { RenameColumn } from "../../sdk"
export interface FieldSchema { export interface FieldSchema {
// TODO: replace with field types enum when done // TODO: replace with field types enum when done
@ -55,3 +56,8 @@ export interface Table extends Document {
indexes?: { [key: string]: any } indexes?: { [key: string]: any }
dataImport?: { [key: string]: any } dataImport?: { [key: string]: any }
} }
export interface TableRequest extends Table {
_rename?: RenameColumn
created?: boolean
}

View File

@ -72,7 +72,7 @@ export interface QueryJson {
operation: Operation operation: Operation
schema?: string schema?: string
} }
resource: { resource?: {
fields: string[] fields: string[]
} }
filters?: SearchFilters filters?: SearchFilters
@ -83,7 +83,7 @@ export interface QueryJson {
meta?: { meta?: {
table?: Table table?: Table
tables?: Record<string, Table> tables?: Record<string, Table>
renamed: RenameColumn renamed?: RenameColumn
} }
extra?: { extra?: {
idFilter?: SearchFilters idFilter?: SearchFilters