Some typescript conversions, moving a few imports around.

This commit is contained in:
mike12345567 2022-11-21 18:33:34 +00:00
parent f192fc72b3
commit ee4ebbfb56
26 changed files with 336 additions and 362 deletions

View File

@ -390,7 +390,7 @@ export const uploadDirectory = async (
return files return files
} }
exports.downloadTarballDirect = async ( export const downloadTarballDirect = async (
url: string, url: string,
path: string, path: string,
headers = {} headers = {}

View File

@ -17,13 +17,9 @@ const { clientLibraryPath } = require("../../../utilities")
const { upload, deleteFiles } = require("../../../utilities/fileSystem") const { upload, deleteFiles } = require("../../../utilities/fileSystem")
const { attachmentsRelativeURL } = require("../../../utilities") const { attachmentsRelativeURL } = require("../../../utilities")
const { DocumentType } = require("../../../db/utils") const { DocumentType } = require("../../../db/utils")
const { getAppDB, getAppId } = require("@budibase/backend-core/context") const { context, objectStore, utils } = require("@budibase/backend-core")
const { setCookie, clearCookie } = require("@budibase/backend-core/utils")
const AWS = require("aws-sdk") const AWS = require("aws-sdk")
const fs = require("fs") const fs = require("fs")
const {
downloadTarballDirect,
} = require("../../../utilities/fileSystem/utilities")
async function prepareUpload({ s3Key, bucket, metadata, file }: any) { async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
const response = await upload({ const response = await upload({
@ -48,7 +44,7 @@ export const toggleBetaUiFeature = async function (ctx: any) {
const cookieName = `beta:${ctx.params.feature}` const cookieName = `beta:${ctx.params.feature}`
if (ctx.cookies.get(cookieName)) { if (ctx.cookies.get(cookieName)) {
clearCookie(ctx, cookieName) utils.clearCookie(ctx, cookieName)
ctx.body = { ctx.body = {
message: `${ctx.params.feature} disabled`, message: `${ctx.params.feature} disabled`,
} }
@ -61,11 +57,11 @@ export const toggleBetaUiFeature = async function (ctx: any) {
if (!fs.existsSync(builderPath)) { if (!fs.existsSync(builderPath)) {
fs.mkdirSync(builderPath) fs.mkdirSync(builderPath)
} }
await downloadTarballDirect( await objectStore.downloadTarballDirect(
"https://cdn.budi.live/beta:design_ui/new_ui.tar.gz", "https://cdn.budi.live/beta:design_ui/new_ui.tar.gz",
builderPath builderPath
) )
setCookie(ctx, {}, cookieName) utils.setCookie(ctx, {}, cookieName)
ctx.body = { ctx.body = {
message: `${ctx.params.feature} enabled`, message: `${ctx.params.feature} enabled`,
@ -103,9 +99,9 @@ export const deleteObjects = async function (ctx: any) {
} }
export const serveApp = async function (ctx: any) { export const serveApp = async function (ctx: any) {
const db = getAppDB({ skip_setup: true }) const db = context.getAppDB({ skip_setup: true })
const appInfo = await db.get(DocumentType.APP_METADATA) const appInfo = await db.get(DocumentType.APP_METADATA)
let appId = getAppId() let appId = context.getAppId()
if (!env.isJest()) { if (!env.isJest()) {
const App = require("./templates/BudibaseApp.svelte").default const App = require("./templates/BudibaseApp.svelte").default
@ -134,11 +130,11 @@ export const serveApp = async function (ctx: any) {
} }
export const serveBuilderPreview = async function (ctx: any) { export const serveBuilderPreview = async function (ctx: any) {
const db = getAppDB({ skip_setup: true }) const db = context.getAppDB({ skip_setup: true })
const appInfo = await db.get(DocumentType.APP_METADATA) const appInfo = await db.get(DocumentType.APP_METADATA)
if (!env.isJest()) { if (!env.isJest()) {
let appId = getAppId() let appId = context.getAppId()
const previewHbs = loadHandlebarsFile(`${__dirname}/templates/preview.hbs`) const previewHbs = loadHandlebarsFile(`${__dirname}/templates/preview.hbs`)
ctx.body = await processString(previewHbs, { ctx.body = await processString(previewHbs, {
clientLibPath: clientLibraryPath(appId, appInfo.version, ctx), clientLibPath: clientLibraryPath(appId, appInfo.version, ctx),
@ -156,7 +152,7 @@ export const serveClientLibrary = async function (ctx: any) {
} }
export const getSignedUploadURL = async function (ctx: any) { export const getSignedUploadURL = async function (ctx: any) {
const database = getAppDB() const database = context.getAppDB()
// Ensure datasource is valid // Ensure datasource is valid
let datasource let datasource

View File

@ -4,7 +4,7 @@ import authorized from "../../middleware/authorized"
import { BUILDER } from "@budibase/backend-core/permissions" import { BUILDER } from "@budibase/backend-core/permissions"
import { applicationValidator } from "./utils/validators" import { applicationValidator } from "./utils/validators"
const router = new Router() const router: Router = new Router()
router router
.post("/api/applications/:appId/sync", authorized(BUILDER), controller.sync) .post("/api/applications/:appId/sync", authorized(BUILDER), controller.sync)

View File

@ -1,7 +1,7 @@
import Router from "@koa/router" import Router from "@koa/router"
import * as controller from "../controllers/auth" import * as controller from "../controllers/auth"
const router = new Router() const router: Router = new Router()
router.get("/api/self", controller.fetchSelf) router.get("/api/self", controller.fetchSelf)

View File

@ -3,7 +3,7 @@ import * as controller from "../controllers/backup"
import authorized from "../../middleware/authorized" import authorized from "../../middleware/authorized"
import { BUILDER } from "@budibase/backend-core/permissions" import { BUILDER } from "@budibase/backend-core/permissions"
const router = new Router() const router: Router = new Router()
router.get("/api/backups/export", authorized(BUILDER), controller.exportAppDump) router.get("/api/backups/export", authorized(BUILDER), controller.exportAppDump)

View File

@ -1,17 +0,0 @@
const Router = require("@koa/router")
const controller = require("../controllers/deploy")
const authorized = require("../../middleware/authorized")
const { BUILDER } = require("@budibase/backend-core/permissions")
const router = new Router()
router
.get("/api/deployments", authorized(BUILDER), controller.fetchDeployments)
.get(
"/api/deploy/:deploymentId",
authorized(BUILDER),
controller.deploymentProgress
)
.post("/api/deploy", authorized(BUILDER), controller.deployApp)
module.exports = router

View File

@ -0,0 +1,21 @@
import Router from "@koa/router"
import * as controller from "../controllers/deploy"
import authorized from "../../middleware/authorized"
import { permissions } from "@budibase/backend-core"
const router: Router = new Router()
router
.get(
"/api/deployments",
authorized(permissions.BUILDER),
controller.fetchDeployments
)
.get(
"/api/deploy/:deploymentId",
authorized(permissions.BUILDER),
controller.deploymentProgress
)
.post("/api/deploy", authorized(permissions.BUILDER), controller.deployApp)
export default router

View File

@ -3,7 +3,7 @@ import * as controller from "../controllers/plugin"
import authorized from "../../middleware/authorized" import authorized from "../../middleware/authorized"
import { BUILDER } from "@budibase/backend-core/permissions" import { BUILDER } from "@budibase/backend-core/permissions"
const router = new Router() const router: Router = new Router()
router router
.post("/api/plugin/upload", authorized(BUILDER), controller.upload) .post("/api/plugin/upload", authorized(BUILDER), controller.upload)

View File

@ -8,7 +8,7 @@ const {
} = require("@budibase/backend-core/permissions") } = require("@budibase/backend-core/permissions")
const { internalSearchValidator } = require("./utils/validators") const { internalSearchValidator } = require("./utils/validators")
const router = new Router() const router: Router = new Router()
router router
/** /**

View File

@ -10,7 +10,7 @@ import {
import * as env from "../../environment" import * as env from "../../environment"
import { paramResource } from "../../middleware/resourceId" import { paramResource } from "../../middleware/resourceId"
const router = new Router() const router: Router = new Router()
/* istanbul ignore next */ /* istanbul ignore next */
router.param("file", async (file: any, ctx: any, next: any) => { router.param("file", async (file: any, ctx: any, next: any) => {

View File

@ -1,20 +1,16 @@
const { joiValidator } = require("@budibase/backend-core/auth") import { auth, permissions } from "@budibase/backend-core"
const { DataSourceOperation } = require("../../../constants") import { DataSourceOperation } from "../../../constants"
const { import { WebhookActionType } from "@budibase/types"
BuiltinPermissionID, import Joi from "joi"
PermissionLevel,
} = require("@budibase/backend-core/permissions")
const { WebhookActionType } = require("@budibase/types")
const Joi = require("joi")
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("") const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
const OPTIONAL_NUMBER = Joi.number().optional().allow(null) const OPTIONAL_NUMBER = Joi.number().optional().allow(null)
const OPTIONAL_BOOLEAN = Joi.boolean().optional().allow(null) const OPTIONAL_BOOLEAN = Joi.boolean().optional().allow(null)
const APP_NAME_REGEX = /^[\w\s]+$/ const APP_NAME_REGEX = /^[\w\s]+$/
exports.tableValidator = () => { export function tableValidator() {
// prettier-ignore // prettier-ignore
return joiValidator.body(Joi.object({ return auth.joiValidator.body(Joi.object({
_id: OPTIONAL_STRING, _id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING, _rev: OPTIONAL_STRING,
type: OPTIONAL_STRING.valid("table", "internal", "external"), type: OPTIONAL_STRING.valid("table", "internal", "external"),
@ -26,16 +22,16 @@ exports.tableValidator = () => {
}).unknown(true)) }).unknown(true))
} }
exports.nameValidator = () => { export function nameValidator() {
// prettier-ignore // prettier-ignore
return joiValidator.body(Joi.object({ return auth.joiValidator.body(Joi.object({
name: OPTIONAL_STRING, name: OPTIONAL_STRING,
})) }))
} }
exports.datasourceValidator = () => { export function datasourceValidator() {
// prettier-ignore // prettier-ignore
return joiValidator.body(Joi.object({ return auth.joiValidator.body(Joi.object({
_id: Joi.string(), _id: Joi.string(),
_rev: Joi.string(), _rev: Joi.string(),
type: OPTIONAL_STRING.allow("datasource_plus"), type: OPTIONAL_STRING.allow("datasource_plus"),
@ -64,9 +60,9 @@ function filterObject() {
}).unknown(true) }).unknown(true)
} }
exports.internalSearchValidator = () => { export function internalSearchValidator() {
// prettier-ignore // prettier-ignore
return joiValidator.body(Joi.object({ return auth.joiValidator.body(Joi.object({
tableId: OPTIONAL_STRING, tableId: OPTIONAL_STRING,
query: filterObject(), query: filterObject(),
limit: OPTIONAL_NUMBER, limit: OPTIONAL_NUMBER,
@ -78,8 +74,8 @@ exports.internalSearchValidator = () => {
})) }))
} }
exports.externalSearchValidator = () => { export function externalSearchValidator() {
return joiValidator.body( return auth.joiValidator.body(
Joi.object({ Joi.object({
query: filterObject(), query: filterObject(),
paginate: Joi.boolean().optional(), paginate: Joi.boolean().optional(),
@ -96,9 +92,9 @@ exports.externalSearchValidator = () => {
) )
} }
exports.datasourceQueryValidator = () => { export function datasourceQueryValidator() {
// prettier-ignore // prettier-ignore
return joiValidator.body(Joi.object({ return auth.joiValidator.body(Joi.object({
endpoint: Joi.object({ endpoint: Joi.object({
datasourceId: Joi.string().required(), datasourceId: Joi.string().required(),
operation: Joi.string().required().valid(...Object.values(DataSourceOperation)), operation: Joi.string().required().valid(...Object.values(DataSourceOperation)),
@ -117,9 +113,9 @@ exports.datasourceQueryValidator = () => {
})) }))
} }
exports.webhookValidator = () => { export function webhookValidator() {
// prettier-ignore // prettier-ignore
return joiValidator.body(Joi.object({ return auth.joiValidator.body(Joi.object({
live: Joi.bool(), live: Joi.bool(),
_id: OPTIONAL_STRING, _id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING, _rev: OPTIONAL_STRING,
@ -132,15 +128,15 @@ exports.webhookValidator = () => {
}).unknown(true)) }).unknown(true))
} }
exports.roleValidator = () => { export function roleValidator() {
const permLevelArray = Object.values(PermissionLevel) const permLevelArray = Object.values(permissions.PermissionLevel)
// prettier-ignore // prettier-ignore
return joiValidator.body(Joi.object({ return auth.joiValidator.body(Joi.object({
_id: OPTIONAL_STRING, _id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING, _rev: OPTIONAL_STRING,
name: Joi.string().required(), name: Joi.string().required(),
// this is the base permission ID (for now a built in) // this is the base permission ID (for now a built in)
permissionId: Joi.string().valid(...Object.values(BuiltinPermissionID)).required(), permissionId: Joi.string().valid(...Object.values(permissions.BuiltinPermissionID)).required(),
permissions: Joi.object() permissions: Joi.object()
.pattern(/.*/, [Joi.string().valid(...permLevelArray)]) .pattern(/.*/, [Joi.string().valid(...permLevelArray)])
.optional(), .optional(),
@ -148,19 +144,19 @@ exports.roleValidator = () => {
}).unknown(true)) }).unknown(true))
} }
exports.permissionValidator = () => { export function permissionValidator() {
const permLevelArray = Object.values(PermissionLevel) const permLevelArray = Object.values(permissions.PermissionLevel)
// prettier-ignore // prettier-ignore
return joiValidator.params(Joi.object({ return auth.joiValidator.params(Joi.object({
level: Joi.string().valid(...permLevelArray).required(), level: Joi.string().valid(...permLevelArray).required(),
resourceId: Joi.string(), resourceId: Joi.string(),
roleId: Joi.string(), roleId: Joi.string(),
}).unknown(true)) }).unknown(true))
} }
exports.screenValidator = () => { export function screenValidator() {
// prettier-ignore // prettier-ignore
return joiValidator.body(Joi.object({ return auth.joiValidator.body(Joi.object({
name: Joi.string().required(), name: Joi.string().required(),
showNavigation: OPTIONAL_BOOLEAN, showNavigation: OPTIONAL_BOOLEAN,
width: OPTIONAL_STRING, width: OPTIONAL_STRING,
@ -181,7 +177,7 @@ exports.screenValidator = () => {
}).unknown(true)) }).unknown(true))
} }
function generateStepSchema(allowStepTypes) { function generateStepSchema(allowStepTypes: string[]) {
// prettier-ignore // prettier-ignore
return Joi.object({ return Joi.object({
stepId: Joi.string().required(), stepId: Joi.string().required(),
@ -196,9 +192,9 @@ function generateStepSchema(allowStepTypes) {
}).unknown(true) }).unknown(true)
} }
exports.automationValidator = (existing = false) => { export function automationValidator(existing = false) {
// prettier-ignore // prettier-ignore
return joiValidator.body(Joi.object({ return auth.joiValidator.body(Joi.object({
_id: existing ? Joi.string().required() : OPTIONAL_STRING, _id: existing ? Joi.string().required() : OPTIONAL_STRING,
_rev: existing ? Joi.string().required() : OPTIONAL_STRING, _rev: existing ? Joi.string().required() : OPTIONAL_STRING,
name: Joi.string().required(), name: Joi.string().required(),
@ -210,9 +206,9 @@ exports.automationValidator = (existing = false) => {
}).unknown(true)) }).unknown(true))
} }
exports.applicationValidator = (opts = { isCreate: true }) => { export function applicationValidator(opts = { isCreate: true }) {
// prettier-ignore // prettier-ignore
const base = { const base: any = {
_id: OPTIONAL_STRING, _id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING, _rev: OPTIONAL_STRING,
url: OPTIONAL_STRING, url: OPTIONAL_STRING,
@ -230,7 +226,7 @@ exports.applicationValidator = (opts = { isCreate: true }) => {
base.name = appNameValidator.optional() base.name = appNameValidator.optional()
} }
return joiValidator.body( return auth.joiValidator.body(
Joi.object({ Joi.object({
_id: OPTIONAL_STRING, _id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING, _rev: OPTIONAL_STRING,

View File

@ -5,7 +5,7 @@ import { permissions } from "@budibase/backend-core"
import { webhookValidator } from "./utils/validators" import { webhookValidator } from "./utils/validators"
const BUILDER = permissions.BUILDER const BUILDER = permissions.BUILDER
const router = new Router() const router: Router = new Router()
router router
.get("/api/webhooks", authorized(BUILDER), controller.fetch) .get("/api/webhooks", authorized(BUILDER), controller.fetch)

View File

@ -1,23 +1,13 @@
import { import { roles, permissions, auth, context } from "@budibase/backend-core"
getUserRoleHierarchy, import { Role } from "@budibase/types"
getRequiredResourceRole, import builderMiddleware from "./builder"
BUILTIN_ROLE_IDS, import { isWebhookEndpoint } from "./utils"
} from "@budibase/backend-core/roles"
const {
PermissionType,
PermissionLevel,
doesHaveBasePermission,
} = require("@budibase/backend-core/permissions")
const builderMiddleware = require("./builder")
const { isWebhookEndpoint } = require("./utils")
const { buildCsrfMiddleware } = require("@budibase/backend-core/auth")
const { getAppId } = require("@budibase/backend-core/context")
function hasResource(ctx: any) { function hasResource(ctx: any) {
return ctx.resourceId != null return ctx.resourceId != null
} }
const csrf = buildCsrfMiddleware() const csrf = auth.buildCsrfMiddleware()
/** /**
* Apply authorization to the requested resource: * Apply authorization to the requested resource:
@ -33,7 +23,7 @@ const checkAuthorized = async (
) => { ) => {
// check if this is a builder api and the user is not a builder // check if this is a builder api and the user is not a builder
const isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global const isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global
const isBuilderApi = permType === PermissionType.BUILDER const isBuilderApi = permType === permissions.PermissionType.BUILDER
if (isBuilderApi && !isBuilder) { if (isBuilderApi && !isBuilder) {
return ctx.throw(403, "Not Authorized") return ctx.throw(403, "Not Authorized")
} }
@ -51,10 +41,10 @@ const checkAuthorizedResource = async (
permLevel: any permLevel: any
) => { ) => {
// get the user's roles // get the user's roles
const roleId = ctx.roleId || BUILTIN_ROLE_IDS.PUBLIC const roleId = ctx.roleId || roles.BUILTIN_ROLE_IDS.PUBLIC
const userRoles = (await getUserRoleHierarchy(roleId, { const userRoles = (await roles.getUserRoleHierarchy(roleId, {
idOnly: false, idOnly: false,
})) as { _id: string }[] })) as Role[]
const permError = "User does not have permission" const permError = "User does not have permission"
// check if the user has the required role // check if the user has the required role
if (resourceRoles.length > 0) { if (resourceRoles.length > 0) {
@ -66,7 +56,9 @@ const checkAuthorizedResource = async (
ctx.throw(403, permError) ctx.throw(403, permError)
} }
// fallback to the base permissions when no resource roles are found // fallback to the base permissions when no resource roles are found
} else if (!doesHaveBasePermission(permType, permLevel, userRoles)) { } else if (
!permissions.doesHaveBasePermission(permType, permLevel, userRoles)
) {
ctx.throw(403, permError) ctx.throw(403, permError)
} }
} }
@ -91,21 +83,22 @@ export = (permType: any, permLevel: any = null, opts = { schema: false }) =>
let resourceRoles: any = [] let resourceRoles: any = []
let otherLevelRoles: any = [] let otherLevelRoles: any = []
const otherLevel = const otherLevel =
permLevel === PermissionLevel.READ permLevel === permissions.PermissionLevel.READ
? PermissionLevel.WRITE ? permissions.PermissionLevel.WRITE
: PermissionLevel.READ : permissions.PermissionLevel.READ
const appId = getAppId() const appId = context.getAppId()
if (appId && hasResource(ctx)) { if (appId && hasResource(ctx)) {
resourceRoles = await getRequiredResourceRole(permLevel, ctx) resourceRoles = await roles.getRequiredResourceRole(permLevel, ctx)
if (opts && opts.schema) { if (opts && opts.schema) {
otherLevelRoles = await getRequiredResourceRole(otherLevel, ctx) otherLevelRoles = await roles.getRequiredResourceRole(otherLevel, ctx)
} }
} }
// if the resource is public, proceed // if the resource is public, proceed
if ( if (
resourceRoles.includes(BUILTIN_ROLE_IDS.PUBLIC) || resourceRoles.includes(roles.BUILTIN_ROLE_IDS.PUBLIC) ||
(otherLevelRoles && otherLevelRoles.includes(BUILTIN_ROLE_IDS.PUBLIC)) (otherLevelRoles &&
otherLevelRoles.includes(roles.BUILTIN_ROLE_IDS.PUBLIC))
) { ) {
return next() return next()
} }

View File

@ -9,8 +9,7 @@ jest.mock("../../environment", () => ({
) )
const authorizedMiddleware = require("../authorized") const authorizedMiddleware = require("../authorized")
const env = require("../../environment") const env = require("../../environment")
const { PermissionType, PermissionLevel } = require("@budibase/backend-core/permissions") const { permissions } = require("@budibase/backend-core")
const { doInAppContext } = require("@budibase/backend-core/context")
const APP_ID = "" const APP_ID = ""
@ -113,7 +112,7 @@ describe("Authorization middleware", () => {
it("throws if the user does not have builder permissions", async () => { it("throws if the user does not have builder permissions", async () => {
config.setEnvironment(false) config.setEnvironment(false)
config.setMiddlewareRequiredPermission(PermissionType.BUILDER) config.setMiddlewareRequiredPermission(permissions.PermissionType.BUILDER)
config.setUser({ config.setUser({
role: { role: {
_id: "" _id: ""
@ -125,13 +124,13 @@ describe("Authorization middleware", () => {
}) })
it("passes on to next() middleware if the user has resource permission", async () => { it("passes on to next() middleware if the user has resource permission", async () => {
config.setResourceId(PermissionType.QUERY) config.setResourceId(permissions.PermissionType.QUERY)
config.setUser({ config.setUser({
role: { role: {
_id: "" _id: ""
} }
}) })
config.setMiddlewareRequiredPermission(PermissionType.QUERY) config.setMiddlewareRequiredPermission(permissions.PermissionType.QUERY)
await config.executeMiddleware() await config.executeMiddleware()
expect(config.next).toHaveBeenCalled() expect(config.next).toHaveBeenCalled()

View File

@ -1,6 +1,5 @@
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore, objectStore } from "@budibase/backend-core"
import { budibaseTempDir } from "../../../utilities/budibaseDir" import { budibaseTempDir } from "../../../utilities/budibaseDir"
import { retrieveDirectory } from "../../../utilities/fileSystem/utilities"
import { streamFile, createTempFolder } from "../../../utilities/fileSystem" import { streamFile, createTempFolder } from "../../../utilities/fileSystem"
import { ObjectStoreBuckets } from "../../../constants" import { ObjectStoreBuckets } from "../../../constants"
import { import {
@ -88,7 +87,10 @@ export async function exportApp(appId: string, config?: ExportOpts) {
// export bucket contents // export bucket contents
let tmpPath let tmpPath
if (!env.isTest()) { if (!env.isTest()) {
tmpPath = await retrieveDirectory(ObjectStoreBuckets.APPS, appPath) tmpPath = await objectStore.retrieveDirectory(
ObjectStoreBuckets.APPS,
appPath
)
} else { } else {
tmpPath = createTempFolder(uuid()) tmpPath = createTempFolder(uuid())
} }

View File

@ -1,12 +1,8 @@
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore, objectStore } from "@budibase/backend-core"
import { Database } from "@budibase/types" import { Database } from "@budibase/types"
import { getAutomationParams, TABLE_ROW_PREFIX } from "../../../db/utils" import { getAutomationParams, TABLE_ROW_PREFIX } from "../../../db/utils"
import { budibaseTempDir } from "../../../utilities/budibaseDir" import { budibaseTempDir } from "../../../utilities/budibaseDir"
import { DB_EXPORT_FILE, GLOBAL_DB_EXPORT_FILE } from "./constants" import { DB_EXPORT_FILE, GLOBAL_DB_EXPORT_FILE } from "./constants"
import {
upload,
uploadDirectory,
} from "../../../utilities/fileSystem/utilities"
import { downloadTemplate } from "../../../utilities/fileSystem" import { downloadTemplate } from "../../../utilities/fileSystem"
import { FieldTypes, ObjectStoreBuckets } from "../../../constants" import { FieldTypes, ObjectStoreBuckets } from "../../../constants"
import { join } from "path" import { join } from "path"
@ -174,11 +170,11 @@ export async function importApp(
filename = join(prodAppId, filename) filename = join(prodAppId, filename)
if (fs.lstatSync(path).isDirectory()) { if (fs.lstatSync(path).isDirectory()) {
promises.push( promises.push(
uploadDirectory(ObjectStoreBuckets.APPS, path, filename) objectStore.uploadDirectory(ObjectStoreBuckets.APPS, path, filename)
) )
} else { } else {
promises.push( promises.push(
upload({ objectStore.upload({
bucket: ObjectStoreBuckets.APPS, bucket: ObjectStoreBuckets.APPS,
path, path,
filename, filename,

View File

@ -1,5 +1,4 @@
require("../../db").init() require("../../db").init()
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
const env = require("../../environment") const env = require("../../environment")
const { const {
basicTable, basicTable,
@ -13,18 +12,21 @@ const {
basicWebhook, basicWebhook,
TENANT_ID, TENANT_ID,
} = require("./structures") } = require("./structures")
const {
constants,
tenancy,
sessions,
cache,
context,
db: dbCore,
encryption,
auth,
roles,
} = require("@budibase/backend-core")
const controllers = require("./controllers") const controllers = require("./controllers")
const supertest = require("supertest") const supertest = require("supertest")
const { cleanup } = require("../../utilities/fileSystem") const { cleanup } = require("../../utilities/fileSystem")
const { Cookie, Header } = require("@budibase/backend-core/constants")
const { jwt } = require("@budibase/backend-core/auth")
const { doInTenant, doWithGlobalDB } = require("@budibase/backend-core/tenancy")
const { createASession } = require("@budibase/backend-core/sessions")
const { user: userCache } = require("@budibase/backend-core/cache")
const newid = require("../../db/newid") const newid = require("../../db/newid")
const context = require("@budibase/backend-core/context")
const { generateDevInfoID, SEPARATOR } = require("@budibase/backend-core/db")
const { encrypt } = require("@budibase/backend-core/encryption")
const { DocumentType, generateUserMetadataID } = require("../../db/utils") const { DocumentType, generateUserMetadataID } = require("../../db/utils")
const { startup } = require("../../startup") const { startup } = require("../../startup")
@ -83,7 +85,7 @@ class TestConfiguration {
if (!appId) { if (!appId) {
appId = this.appId appId = this.appId
} }
return doInTenant(TENANT_ID, () => { return tenancy.doInTenant(TENANT_ID, () => {
// check if already in a context // check if already in a context
if (context.getAppId() == null && appId !== null) { if (context.getAppId() == null && appId !== null) {
return context.doInAppContext(appId, async () => { return context.doInAppContext(appId, async () => {
@ -155,7 +157,7 @@ class TestConfiguration {
email = EMAIL, email = EMAIL,
roles, roles,
} = {}) { } = {}) {
return doWithGlobalDB(TENANT_ID, async db => { return tenancy.doWithGlobalDB(TENANT_ID, async db => {
let existing let existing
try { try {
existing = await db.get(id) existing = await db.get(id)
@ -170,7 +172,7 @@ class TestConfiguration {
firstName, firstName,
lastName, lastName,
} }
await createASession(id, { await sessions.createASession(id, {
sessionId: "sessionid", sessionId: "sessionid",
tenantId: TENANT_ID, tenantId: TENANT_ID,
csrfToken: CSRF_TOKEN, csrfToken: CSRF_TOKEN,
@ -212,7 +214,7 @@ class TestConfiguration {
admin, admin,
roles, roles,
}) })
await userCache.invalidateUser(globalId) await cache.user.invalidateUser(globalId)
return { return {
...resp, ...resp,
globalId, globalId,
@ -227,19 +229,19 @@ class TestConfiguration {
throw "Server has not been opened, cannot login." throw "Server has not been opened, cannot login."
} }
// make sure the user exists in the global DB // make sure the user exists in the global DB
if (roleId !== BUILTIN_ROLE_IDS.PUBLIC) { if (roleId !== roles.BUILTIN_ROLE_IDS.PUBLIC) {
await this.globalUser({ await this.globalUser({
id: userId, id: userId,
builder, builder,
roles: { [this.prodAppId]: roleId }, roles: { [this.prodAppId]: roleId },
}) })
} }
await createASession(userId, { await sessions.createASession(userId, {
sessionId: "sessionid", sessionId: "sessionid",
tenantId: TENANT_ID, tenantId: TENANT_ID,
}) })
// have to fake this // have to fake this
const auth = { const authObj = {
userId, userId,
sessionId: "sessionid", sessionId: "sessionid",
tenantId: TENANT_ID, tenantId: TENANT_ID,
@ -248,45 +250,45 @@ class TestConfiguration {
roleId: roleId, roleId: roleId,
appId, appId,
} }
const authToken = jwt.sign(auth, env.JWT_SECRET) const authToken = auth.jwt.sign(authObj, env.JWT_SECRET)
const appToken = jwt.sign(app, env.JWT_SECRET) const appToken = auth.jwt.sign(app, env.JWT_SECRET)
// returning necessary request headers // returning necessary request headers
await userCache.invalidateUser(userId) await cache.user.invalidateUser(userId)
return { return {
Accept: "application/json", Accept: "application/json",
Cookie: [ Cookie: [
`${Cookie.Auth}=${authToken}`, `${constants.Cookie.Auth}=${authToken}`,
`${Cookie.CurrentApp}=${appToken}`, `${constants.Cookie.CurrentApp}=${appToken}`,
], ],
[Header.APP_ID]: appId, [constants.Header.APP_ID]: appId,
} }
}) })
} }
defaultHeaders(extras = {}) { defaultHeaders(extras = {}) {
const auth = { const authObj = {
userId: GLOBAL_USER_ID, userId: GLOBAL_USER_ID,
sessionId: "sessionid", sessionId: "sessionid",
tenantId: TENANT_ID, tenantId: TENANT_ID,
} }
const app = { const app = {
roleId: BUILTIN_ROLE_IDS.ADMIN, roleId: roles.BUILTIN_ROLE_IDS.ADMIN,
appId: this.appId, appId: this.appId,
} }
const authToken = jwt.sign(auth, env.JWT_SECRET) const authToken = auth.jwt.sign(authObj, env.JWT_SECRET)
const appToken = jwt.sign(app, env.JWT_SECRET) const appToken = auth.jwt.sign(app, env.JWT_SECRET)
const headers = { const headers = {
Accept: "application/json", Accept: "application/json",
Cookie: [ Cookie: [
`${Cookie.Auth}=${authToken}`, `${constants.Cookie.Auth}=${authToken}`,
`${Cookie.CurrentApp}=${appToken}`, `${constants.Cookie.CurrentApp}=${appToken}`,
], ],
[Header.CSRF_TOKEN]: CSRF_TOKEN, [constants.Header.CSRF_TOKEN]: CSRF_TOKEN,
...extras, ...extras,
} }
if (this.appId) { if (this.appId) {
headers[Header.APP_ID] = this.appId headers[constants.Header.APP_ID] = this.appId
} }
return headers return headers
} }
@ -298,14 +300,14 @@ class TestConfiguration {
Accept: "application/json", Accept: "application/json",
} }
if (appId) { if (appId) {
headers[Header.APP_ID] = appId headers[constants.Header.APP_ID] = appId
} }
return headers return headers
} }
async roleHeaders({ async roleHeaders({
email = EMAIL, email = EMAIL,
roleId = BUILTIN_ROLE_IDS.ADMIN, roleId = roles.BUILTIN_ROLE_IDS.ADMIN,
builder = false, builder = false,
prodApp = true, prodApp = true,
} = {}) { } = {}) {
@ -315,15 +317,17 @@ class TestConfiguration {
// API // API
async generateApiKey(userId = GLOBAL_USER_ID) { async generateApiKey(userId = GLOBAL_USER_ID) {
return doWithGlobalDB(TENANT_ID, async db => { return tenancy.doWithGlobalDB(TENANT_ID, async db => {
const id = generateDevInfoID(userId) const id = dbCore.generateDevInfoID(userId)
let devInfo let devInfo
try { try {
devInfo = await db.get(id) devInfo = await db.get(id)
} catch (err) { } catch (err) {
devInfo = { _id: id, userId } devInfo = { _id: id, userId }
} }
devInfo.apiKey = encrypt(`${TENANT_ID}${SEPARATOR}${newid()}`) devInfo.apiKey = encryption.encrypt(
`${TENANT_ID}${dbCore.SEPARATOR}${newid()}`
)
await db.put(devInfo) await db.put(devInfo)
return devInfo.apiKey return devInfo.apiKey
}) })

View File

@ -1,5 +1,4 @@
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const { roles, permissions } = require("@budibase/backend-core")
const { BuiltinPermissionID } = require("@budibase/backend-core/permissions")
const { createHomeScreen } = require("../../constants/screens") const { createHomeScreen } = require("../../constants/screens")
const { EMPTY_LAYOUT } = require("../../constants/layouts") const { EMPTY_LAYOUT } = require("../../constants/layouts")
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
@ -134,8 +133,8 @@ exports.basicLinkedRow = (tableId, linkedRowId, linkField = "link") => {
exports.basicRole = () => { exports.basicRole = () => {
return { return {
name: "NewRole", name: "NewRole",
inherits: BUILTIN_ROLE_IDS.BASIC, inherits: roles.BUILTIN_ROLE_IDS.BASIC,
permissionId: BuiltinPermissionID.READ_ONLY, permissionId: permissions.BuiltinPermissionID.READ_ONLY,
} }
} }

View File

@ -1,7 +1,7 @@
const { join } = require("path") const { join } = require("path")
const { ObjectStoreBuckets } = require("../../constants") const { ObjectStoreBuckets } = require("../../constants")
const fs = require("fs") const fs = require("fs")
const { upload, retrieveToTmp, streamUpload } = require("./utilities") const { objectStore } = require("@budibase/backend-core")
const { resolve } = require("../centralPath") const { resolve } = require("../centralPath")
const env = require("../../environment") const env = require("../../environment")
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
@ -38,13 +38,13 @@ exports.backupClientLibrary = async appId => {
let tmpManifestPath let tmpManifestPath
try { try {
// Try to load the manifest from the new file location // Try to load the manifest from the new file location
tmpManifestPath = await retrieveToTmp( tmpManifestPath = await objectStore.retrieveToTmp(
ObjectStoreBuckets.APPS, ObjectStoreBuckets.APPS,
join(appId, "manifest.json") join(appId, "manifest.json")
) )
} catch (error) { } catch (error) {
// Fallback to loading it from the old location for old apps // Fallback to loading it from the old location for old apps
tmpManifestPath = await retrieveToTmp( tmpManifestPath = await objectStore.retrieveToTmp(
ObjectStoreBuckets.APPS, ObjectStoreBuckets.APPS,
join( join(
appId, appId,
@ -58,19 +58,19 @@ exports.backupClientLibrary = async appId => {
} }
// Copy existing client lib to tmp // Copy existing client lib to tmp
const tmpClientPath = await retrieveToTmp( const tmpClientPath = await objectStore.retrieveToTmp(
ObjectStoreBuckets.APPS, ObjectStoreBuckets.APPS,
join(appId, "budibase-client.js") join(appId, "budibase-client.js")
) )
// Upload manifest and client library as backups // Upload manifest and client library as backups
const manifestUpload = upload({ const manifestUpload = objectStore.upload({
bucket: ObjectStoreBuckets.APPS, bucket: ObjectStoreBuckets.APPS,
filename: join(appId, "manifest.json.bak"), filename: join(appId, "manifest.json.bak"),
path: tmpManifestPath, path: tmpManifestPath,
type: "application/json", type: "application/json",
}) })
const clientUpload = upload({ const clientUpload = objectStore.upload({
bucket: ObjectStoreBuckets.APPS, bucket: ObjectStoreBuckets.APPS,
filename: join(appId, "budibase-client.js.bak"), filename: join(appId, "budibase-client.js.bak"),
path: tmpClientPath, path: tmpClientPath,
@ -99,7 +99,7 @@ exports.updateClientLibrary = async appId => {
} }
// Upload latest manifest and client library // Upload latest manifest and client library
const manifestUpload = streamUpload( const manifestUpload = objectStore.streamUpload(
ObjectStoreBuckets.APPS, ObjectStoreBuckets.APPS,
join(appId, "manifest.json"), join(appId, "manifest.json"),
fs.createReadStream(manifest), fs.createReadStream(manifest),
@ -107,7 +107,7 @@ exports.updateClientLibrary = async appId => {
ContentType: "application/json", ContentType: "application/json",
} }
) )
const clientUpload = streamUpload( const clientUpload = objectStore.streamUpload(
ObjectStoreBuckets.APPS, ObjectStoreBuckets.APPS,
join(appId, "budibase-client.js"), join(appId, "budibase-client.js"),
fs.createReadStream(client), fs.createReadStream(client),
@ -126,25 +126,25 @@ exports.updateClientLibrary = async appId => {
*/ */
exports.revertClientLibrary = async appId => { exports.revertClientLibrary = async appId => {
// Copy backups manifest to tmp directory // Copy backups manifest to tmp directory
const tmpManifestPath = await retrieveToTmp( const tmpManifestPath = await objectStore.retrieveToTmp(
ObjectStoreBuckets.APPS, ObjectStoreBuckets.APPS,
join(appId, "manifest.json.bak") join(appId, "manifest.json.bak")
) )
// Copy backup client lib to tmp // Copy backup client lib to tmp
const tmpClientPath = await retrieveToTmp( const tmpClientPath = await objectStore.retrieveToTmp(
ObjectStoreBuckets.APPS, ObjectStoreBuckets.APPS,
join(appId, "budibase-client.js.bak") join(appId, "budibase-client.js.bak")
) )
// Upload backups as new versions // Upload backups as new versions
const manifestUpload = upload({ const manifestUpload = objectStore.upload({
bucket: ObjectStoreBuckets.APPS, bucket: ObjectStoreBuckets.APPS,
filename: join(appId, "manifest.json"), filename: join(appId, "manifest.json"),
path: tmpManifestPath, path: tmpManifestPath,
type: "application/json", type: "application/json",
}) })
const clientUpload = upload({ const clientUpload = objectStore.upload({
bucket: ObjectStoreBuckets.APPS, bucket: ObjectStoreBuckets.APPS,
filename: join(appId, "budibase-client.js"), filename: join(appId, "budibase-client.js"),
path: tmpClientPath, path: tmpClientPath,

View File

@ -2,7 +2,13 @@ const { budibaseTempDir } = require("../budibaseDir")
const fs = require("fs") const fs = require("fs")
const { join } = require("path") const { join } = require("path")
const uuid = require("uuid/v4") const uuid = require("uuid/v4")
const { context, objectStore } = require("@budibase/backend-core")
const { ObjectStoreBuckets } = require("../../constants") const { ObjectStoreBuckets } = require("../../constants")
const { updateClientLibrary } = require("./clientLibrary")
const { checkSlashesInUrl } = require("../")
const env = require("../../environment")
const tar = require("tar")
const fetch = require("node-fetch")
const { const {
upload, upload,
retrieve, retrieve,
@ -11,13 +17,7 @@ const {
downloadTarball, downloadTarball,
downloadTarballDirect, downloadTarballDirect,
deleteFiles, deleteFiles,
} = require("./utilities") } = objectStore
const { updateClientLibrary } = require("./clientLibrary")
const { checkSlashesInUrl } = require("../")
const env = require("../../environment")
const { getAppId } = require("@budibase/backend-core/context")
const tar = require("tar")
const fetch = require("node-fetch")
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules") const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
@ -165,7 +165,7 @@ exports.downloadTemplate = async (type, name) => {
* Retrieves component libraries from object store (or tmp symlink if in local) * Retrieves component libraries from object store (or tmp symlink if in local)
*/ */
exports.getComponentLibraryManifest = async library => { exports.getComponentLibraryManifest = async library => {
const appId = getAppId() const appId = context.getAppId()
const filename = "manifest.json" const filename = "manifest.json"
/* istanbul ignore next */ /* istanbul ignore next */
// when testing in cypress and so on we need to get the package // when testing in cypress and so on we need to get the package

View File

@ -1,36 +0,0 @@
const {
ObjectStore,
makeSureBucketExists,
upload,
deleteFiles,
streamUpload,
retrieve,
retrieveToTmp,
retrieveDirectory,
deleteFolder,
uploadDirectory,
downloadTarball,
downloadTarballDirect,
} = require("@budibase/backend-core/objectStore")
/***********************************
* NOTE *
* This file purely exists so that *
* the object store functionality *
* can easily be mocked out of *
* the server without mocking the *
* entire core library. *
***********************************/
exports.ObjectStore = ObjectStore
exports.makeSureBucketExists = makeSureBucketExists
exports.upload = upload
exports.streamUpload = streamUpload
exports.retrieve = retrieve
exports.retrieveToTmp = retrieveToTmp
exports.retrieveDirectory = retrieveDirectory
exports.deleteFolder = deleteFolder
exports.uploadDirectory = uploadDirectory
exports.downloadTarball = downloadTarball
exports.downloadTarballDirect = downloadTarballDirect
exports.deleteFiles = deleteFiles

View File

@ -1,129 +1,40 @@
const linkRows = require("../../db/linkedRows") import linkRows from "../../db/linkedRows"
import { FieldTypes, AutoFieldSubTypes } from "../../constants"
import { attachmentsRelativeURL } from "../index"
import { processFormulas, fixAutoColumnSubType } from "./utils"
import { ObjectStoreBuckets } from "../../constants"
import { context, db as dbCore, objectStore } from "@budibase/backend-core"
import { InternalTables } from "../../db/utils"
import { TYPE_TRANSFORM_MAP } from "./map"
import { Row, User, Table } from "@budibase/types"
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
const { FieldTypes, AutoFieldSubTypes } = require("../../constants")
const { attachmentsRelativeURL } = require("../index") type AutoColumnProcessingOpts = {
const { processFormulas, fixAutoColumnSubType } = require("./utils") reprocessing?: boolean
const { deleteFiles } = require("../../utilities/fileSystem/utilities") noAutoRelationships?: boolean
const { ObjectStoreBuckets } = require("../../constants") }
const {
isProdAppID,
getProdAppID,
dbExists,
} = require("@budibase/backend-core/db")
const { getAppId } = require("@budibase/backend-core/context")
const { InternalTables } = require("../../db/utils")
const BASE_AUTO_ID = 1 const BASE_AUTO_ID = 1
/**
* A map of how we convert various properties in rows to each other based on the row type.
*/
const TYPE_TRANSFORM_MAP = {
[FieldTypes.LINK]: {
"": [],
[null]: [],
[undefined]: undefined,
parse: link => {
if (Array.isArray(link) && typeof link[0] === "object") {
return link.map(el => (el && el._id ? el._id : el))
}
if (typeof link === "string") {
return [link]
}
return link
},
},
[FieldTypes.OPTIONS]: {
"": null,
[null]: null,
[undefined]: undefined,
},
[FieldTypes.ARRAY]: {
"": [],
[null]: [],
[undefined]: undefined,
},
[FieldTypes.STRING]: {
"": "",
[null]: "",
[undefined]: undefined,
},
[FieldTypes.BARCODEQR]: {
"": "",
[null]: "",
[undefined]: undefined,
},
[FieldTypes.FORMULA]: {
"": "",
[null]: "",
[undefined]: undefined,
},
[FieldTypes.LONGFORM]: {
"": "",
[null]: "",
[undefined]: undefined,
},
[FieldTypes.NUMBER]: {
"": null,
[null]: null,
[undefined]: undefined,
parse: n => parseFloat(n),
},
[FieldTypes.DATETIME]: {
"": null,
[undefined]: undefined,
[null]: null,
parse: date => {
if (date instanceof Date) {
return date.toISOString()
}
return date
},
},
[FieldTypes.ATTACHMENT]: {
"": [],
[null]: [],
[undefined]: undefined,
},
[FieldTypes.BOOLEAN]: {
"": null,
[null]: null,
[undefined]: undefined,
true: true,
false: false,
},
[FieldTypes.AUTO]: {
parse: () => undefined,
},
[FieldTypes.JSON]: {
parse: input => {
try {
if (input === "") {
return undefined
}
return JSON.parse(input)
} catch (err) {
return input
}
},
},
}
/** /**
* Given the old state of the row and the new one after an update, this will * Given the old state of the row and the new one after an update, this will
* find the keys that have been removed in the updated row. * find the keys that have been removed in the updated row.
*/ */
function getRemovedAttachmentKeys(oldRow, row, attachmentKey) { function getRemovedAttachmentKeys(
oldRow: Row,
row: Row,
attachmentKey: string
) {
if (!oldRow[attachmentKey]) { if (!oldRow[attachmentKey]) {
return [] return []
} }
const oldKeys = oldRow[attachmentKey].map(attachment => attachment.key) const oldKeys = oldRow[attachmentKey].map((attachment: any) => attachment.key)
// no attachments in new row, all removed // no attachments in new row, all removed
if (!row[attachmentKey]) { if (!row[attachmentKey]) {
return oldKeys return oldKeys
} }
const newKeys = row[attachmentKey].map(attachment => attachment.key) const newKeys = row[attachmentKey].map((attachment: any) => attachment.key)
return oldKeys.filter(key => newKeys.indexOf(key) === -1) return oldKeys.filter((key: any) => newKeys.indexOf(key) === -1)
} }
/** /**
@ -136,11 +47,11 @@ function getRemovedAttachmentKeys(oldRow, row, attachmentKey) {
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated * @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
* for automatic ID purposes. * for automatic ID purposes.
*/ */
function processAutoColumn( export function processAutoColumn(
user, user: User,
table, table: Table,
row, row: Row,
opts = { reprocessing: false, noAutoRelationships: false } opts: AutoColumnProcessingOpts
) { ) {
let noUser = !user || !user.userId let noUser = !user || !user.userId
let isUserTable = table._id === InternalTables.USER_METADATA let isUserTable = table._id === InternalTables.USER_METADATA
@ -186,9 +97,6 @@ function processAutoColumn(
} }
return { table, row } return { table, row }
} }
exports.processAutoColumn = processAutoColumn
exports.fixAutoColumnSubType = fixAutoColumnSubType
exports.processFormulas = processFormulas
/** /**
* This will coerce a value to the correct types based on the type transform map * This will coerce a value to the correct types based on the type transform map
@ -196,15 +104,17 @@ exports.processFormulas = processFormulas
* @param {object} type The type fo coerce to * @param {object} type The type fo coerce to
* @returns {object} The coerced value * @returns {object} The coerced value
*/ */
exports.coerce = (row, type) => { export function coerce(row: any, type: any) {
// no coercion specified for type, skip it // no coercion specified for type, skip it
if (!TYPE_TRANSFORM_MAP[type]) { if (!TYPE_TRANSFORM_MAP[type]) {
return row return row
} }
// eslint-disable-next-line no-prototype-builtins // eslint-disable-next-line no-prototype-builtins
if (TYPE_TRANSFORM_MAP[type].hasOwnProperty(row)) { if (TYPE_TRANSFORM_MAP[type].hasOwnProperty(row)) {
// @ts-ignore
return TYPE_TRANSFORM_MAP[type][row] return TYPE_TRANSFORM_MAP[type][row]
} else if (TYPE_TRANSFORM_MAP[type].parse) { } else if (TYPE_TRANSFORM_MAP[type].parse) {
// @ts-ignore
return TYPE_TRANSFORM_MAP[type].parse(row) return TYPE_TRANSFORM_MAP[type].parse(row)
} }
@ -220,12 +130,12 @@ exports.coerce = (row, type) => {
* @param {object} opts some input processing options (like disabling auto-column relationships). * @param {object} opts some input processing options (like disabling auto-column relationships).
* @returns {object} the row which has been prepared to be written to the DB. * @returns {object} the row which has been prepared to be written to the DB.
*/ */
exports.inputProcessing = ( export function inputProcessing(
user = {}, user: User,
table, table: Table,
row, row: Row,
opts = { noAutoRelationships: false } opts: AutoColumnProcessingOpts
) => { ) {
let clonedRow = cloneDeep(row) let clonedRow = cloneDeep(row)
// need to copy the table so it can be differenced on way out // need to copy the table so it can be differenced on way out
const copiedTable = cloneDeep(table) const copiedTable = cloneDeep(table)
@ -245,7 +155,7 @@ exports.inputProcessing = (
} }
// otherwise coerce what is there to correct types // otherwise coerce what is there to correct types
else { else {
clonedRow[key] = exports.coerce(value, field.type) clonedRow[key] = coerce(value, field.type)
} }
} }
@ -267,7 +177,11 @@ exports.inputProcessing = (
* @param {object} opts used to set some options for the output, such as disabling relationship squashing. * @param {object} opts used to set some options for the output, such as disabling relationship squashing.
* @returns {object[]|object} the enriched rows will be returned. * @returns {object[]|object} the enriched rows will be returned.
*/ */
exports.outputProcessing = async (table, rows, opts = { squash: true }) => { export async function outputProcessing(
table: Table,
rows: Row[],
opts = { squash: true }
) {
let wasArray = true let wasArray = true
if (!(rows instanceof Array)) { if (!(rows instanceof Array)) {
rows = [rows] rows = [rows]
@ -286,7 +200,7 @@ exports.outputProcessing = async (table, rows, opts = { squash: true }) => {
if (row[property] == null || !Array.isArray(row[property])) { if (row[property] == null || !Array.isArray(row[property])) {
continue continue
} }
row[property].forEach(attachment => { row[property].forEach((attachment: any) => {
attachment.url = attachmentsRelativeURL(attachment.key) attachment.url = attachmentsRelativeURL(attachment.key)
}) })
} }
@ -308,20 +222,28 @@ exports.outputProcessing = async (table, rows, opts = { squash: true }) => {
* deleted attachment columns. * deleted attachment columns.
* @return {Promise<void>} When all attachments have been removed this will return. * @return {Promise<void>} When all attachments have been removed this will return.
*/ */
exports.cleanupAttachments = async (table, { row, rows, oldRow, oldTable }) => { export async function cleanupAttachments(
const appId = getAppId() table: Table,
if (!isProdAppID(appId)) { {
const prodAppId = getProdAppID(appId) row,
rows,
oldRow,
oldTable,
}: { row?: Row; rows?: Row[]; oldRow?: Row; oldTable: Table }
): Promise<any> {
const appId = context.getAppId()
if (!dbCore.isProdAppID(appId)) {
const prodAppId = dbCore.getProdAppID(appId!)
// if prod exists, then don't allow deleting // if prod exists, then don't allow deleting
const exists = await dbExists(prodAppId) const exists = await dbCore.dbExists(prodAppId)
if (exists) { if (exists) {
return return
} }
} }
let files = [] let files: string[] = []
function addFiles(row, key) { function addFiles(row: Row, key: string) {
if (row[key]) { if (row[key]) {
files = files.concat(row[key].map(attachment => attachment.key)) files = files.concat(row[key].map((attachment: any) => attachment.key))
} }
} }
const schemaToUse = oldTable ? oldTable.schema : table.schema const schemaToUse = oldTable ? oldTable.schema : table.schema
@ -330,7 +252,7 @@ exports.cleanupAttachments = async (table, { row, rows, oldRow, oldTable }) => {
continue continue
} }
// old table had this column, new table doesn't - delete it // old table had this column, new table doesn't - delete it
if (oldTable && !table.schema[key]) { if (rows && oldTable && !table.schema[key]) {
rows.forEach(row => addFiles(row, key)) rows.forEach(row => addFiles(row, key))
} else if (oldRow && row) { } else if (oldRow && row) {
// if updating, need to manage the differences // if updating, need to manage the differences
@ -342,6 +264,6 @@ exports.cleanupAttachments = async (table, { row, rows, oldRow, oldTable }) => {
} }
} }
if (files.length > 0) { if (files.length > 0) {
return deleteFiles(ObjectStoreBuckets.APPS, files) return objectStore.deleteFiles(ObjectStoreBuckets.APPS, files)
} }
} }

View File

@ -0,0 +1,95 @@
const { FieldTypes } = require("../../constants")
/**
* A map of how we convert various properties in rows to each other based on the row type.
*/
exports.TYPE_TRANSFORM_MAP = {
[FieldTypes.LINK]: {
"": [],
[null]: [],
[undefined]: undefined,
parse: link => {
if (Array.isArray(link) && typeof link[0] === "object") {
return link.map(el => (el && el._id ? el._id : el))
}
if (typeof link === "string") {
return [link]
}
return link
},
},
[FieldTypes.OPTIONS]: {
"": null,
[null]: null,
[undefined]: undefined,
},
[FieldTypes.ARRAY]: {
"": [],
[null]: [],
[undefined]: undefined,
},
[FieldTypes.STRING]: {
"": "",
[null]: "",
[undefined]: undefined,
},
[FieldTypes.BARCODEQR]: {
"": "",
[null]: "",
[undefined]: undefined,
},
[FieldTypes.FORMULA]: {
"": "",
[null]: "",
[undefined]: undefined,
},
[FieldTypes.LONGFORM]: {
"": "",
[null]: "",
[undefined]: undefined,
},
[FieldTypes.NUMBER]: {
"": null,
[null]: null,
[undefined]: undefined,
parse: n => parseFloat(n),
},
[FieldTypes.DATETIME]: {
"": null,
[undefined]: undefined,
[null]: null,
parse: date => {
if (date instanceof Date) {
return date.toISOString()
}
return date
},
},
[FieldTypes.ATTACHMENT]: {
"": [],
[null]: [],
[undefined]: undefined,
},
[FieldTypes.BOOLEAN]: {
"": null,
[null]: null,
[undefined]: undefined,
true: true,
false: false,
},
[FieldTypes.AUTO]: {
parse: () => undefined,
},
[FieldTypes.JSON]: {
parse: input => {
try {
if (input === "") {
return undefined
}
return JSON.parse(input)
} catch (err) {
return input
}
},
},
}

View File

@ -1,16 +1,17 @@
const { import {
FieldTypes, FieldTypes,
FormulaTypes, FormulaTypes,
AutoFieldDefaultNames, AutoFieldDefaultNames,
AutoFieldSubTypes, AutoFieldSubTypes,
} = require("../../constants") } from "../../constants"
const { processStringSync } = require("@budibase/string-templates") import { processStringSync } from "@budibase/string-templates"
import { FieldSchema, Table, Row } from "@budibase/types"
/** /**
* If the subtype has been lost for any reason this works out what * If the subtype has been lost for any reason this works out what
* subtype the auto column should be. * subtype the auto column should be.
*/ */
exports.fixAutoColumnSubType = column => { export function fixAutoColumnSubType(column: FieldSchema) {
if (!column.autocolumn || !column.name || column.subtype) { if (!column.autocolumn || !column.name || column.subtype) {
return column return column
} }
@ -32,11 +33,11 @@ exports.fixAutoColumnSubType = column => {
/** /**
* Looks through the rows provided and finds formulas - which it then processes. * Looks through the rows provided and finds formulas - which it then processes.
*/ */
exports.processFormulas = ( export function processFormulas(
table, table: Table,
rows, rows: Row[],
{ dynamic, contextRows } = { dynamic: true } { dynamic, contextRows }: any = { dynamic: true }
) => { ) {
const single = !Array.isArray(rows) const single = !Array.isArray(rows)
if (single) { if (single) {
rows = [rows] rows = [rows]
@ -70,7 +71,7 @@ exports.processFormulas = (
* Processes any date columns and ensures that those without the ignoreTimezones * Processes any date columns and ensures that those without the ignoreTimezones
* flag set are parsed as UTC rather than local time. * flag set are parsed as UTC rather than local time.
*/ */
exports.processDates = (table, rows) => { export function processDates(table: Table, rows: Row[]) {
let datesWithTZ = [] let datesWithTZ = []
for (let [column, schema] of Object.entries(table.schema)) { for (let [column, schema] of Object.entries(table.schema)) {
if (schema.type !== FieldTypes.DATETIME) { if (schema.type !== FieldTypes.DATETIME) {

View File

@ -19,6 +19,8 @@ export interface FieldSchema {
formulaType?: string formulaType?: string
main?: boolean main?: boolean
ignoreTimezones?: boolean ignoreTimezones?: boolean
timeOnly?: boolean
lastID?: number
meta?: { meta?: {
toTable: string toTable: string
toKey: string toKey: string

View File

@ -17,6 +17,7 @@ export interface User extends Document {
userGroups?: string[] userGroups?: string[]
forceResetPassword?: boolean forceResetPassword?: boolean
dayPassRecordedAt?: string dayPassRecordedAt?: string
userId?: string
} }
export interface UserRoles { export interface UserRoles {