Some server typescript re-writes, only automations left and some controllers.

This commit is contained in:
mike12345567 2022-11-25 15:01:46 +00:00
parent d3c4c99e16
commit 7eac8fafd6
38 changed files with 667 additions and 666 deletions

View File

@ -1,7 +1,5 @@
import {
createTempFolder,
downloadTarballDirect,
} from "../../../utilities/fileSystem"
import { createTempFolder } from "../../../utilities/fileSystem"
import { objectStore } from "@budibase/backend-core"
export async function downloadUnzipTarball(
url: string,
@ -10,7 +8,7 @@ export async function downloadUnzipTarball(
) {
try {
const path = createTempFolder(name)
await downloadTarballDirect(url, path, headers)
await objectStore.downloadTarballDirect(url, path, headers)
return path
} catch (e: any) {

View File

@ -14,7 +14,6 @@ const {
} = require("../../../utilities/fileSystem")
const env = require("../../../environment")
const { clientLibraryPath } = require("../../../utilities")
const { upload, deleteFiles } = require("../../../utilities/fileSystem")
const { attachmentsRelativeURL } = require("../../../utilities")
const { DocumentType } = require("../../../db/utils")
const { context, objectStore, utils } = require("@budibase/backend-core")
@ -22,7 +21,7 @@ const AWS = require("aws-sdk")
const fs = require("fs")
async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
const response = await upload({
const response = await objectStore.upload({
bucket,
metadata,
filename: s3Key,
@ -95,7 +94,10 @@ export const uploadFile = async function (ctx: any) {
}
export const deleteObjects = async function (ctx: any) {
ctx.body = await deleteFiles(ObjectStoreBuckets.APPS, ctx.request.body.keys)
ctx.body = await objectStore.deleteFiles(
ObjectStoreBuckets.APPS,
ctx.request.body.keys
)
}
export const serveApp = async function (ctx: any) {

View File

@ -1,206 +0,0 @@
const { objectStore, roles, constants } = require("@budibase/backend-core")
const FilterTypes = {
STRING: "string",
FUZZY: "fuzzy",
RANGE: "range",
EQUAL: "equal",
NOT_EQUAL: "notEqual",
EMPTY: "empty",
NOT_EMPTY: "notEmpty",
CONTAINS: "contains",
NOT_CONTAINS: "notContains",
ONE_OF: "oneOf",
}
exports.FilterTypes = FilterTypes
exports.NoEmptyFilterStrings = [
FilterTypes.STRING,
FilterTypes.FUZZY,
FilterTypes.EQUAL,
FilterTypes.NOT_EQUAL,
FilterTypes.CONTAINS,
FilterTypes.NOT_CONTAINS,
]
exports.FieldTypes = {
STRING: "string",
BARCODEQR: "barcodeqr",
LONGFORM: "longform",
OPTIONS: "options",
NUMBER: "number",
BOOLEAN: "boolean",
ARRAY: "array",
DATETIME: "datetime",
ATTACHMENT: "attachment",
LINK: "link",
FORMULA: "formula",
AUTO: "auto",
JSON: "json",
INTERNAL: "internal",
}
exports.CanSwitchTypes = [
[exports.FieldTypes.JSON, exports.FieldTypes.ARRAY],
[
exports.FieldTypes.STRING,
exports.FieldTypes.OPTIONS,
exports.FieldTypes.LONGFORM,
exports.FieldTypes.BARCODEQR,
],
[exports.FieldTypes.BOOLEAN, exports.FieldTypes.NUMBER],
]
exports.SwitchableTypes = exports.CanSwitchTypes.reduce((prev, current) =>
prev ? prev.concat(current) : current
)
exports.RelationshipTypes = {
ONE_TO_MANY: "one-to-many",
MANY_TO_ONE: "many-to-one",
MANY_TO_MANY: "many-to-many",
}
exports.FormulaTypes = {
STATIC: "static",
DYNAMIC: "dynamic",
}
exports.AuthTypes = {
APP: "app",
BUILDER: "builder",
EXTERNAL: "external",
}
exports.DataSourceOperation = {
CREATE: "CREATE",
READ: "READ",
UPDATE: "UPDATE",
DELETE: "DELETE",
BULK_CREATE: "BULK_CREATE",
CREATE_TABLE: "CREATE_TABLE",
UPDATE_TABLE: "UPDATE_TABLE",
DELETE_TABLE: "DELETE_TABLE",
}
exports.DatasourceAuthTypes = {
GOOGLE: "google",
}
exports.SortDirection = {
ASCENDING: "ASCENDING",
DESCENDING: "DESCENDING",
}
exports.USERS_TABLE_SCHEMA = {
_id: "ta_users",
type: "table",
views: {},
name: "Users",
// TODO: ADMIN PANEL - when implemented this doesn't need to be carried out
schema: {
email: {
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
email: true,
length: {
maximum: "",
},
presence: true,
},
fieldName: "email",
name: "email",
},
firstName: {
name: "firstName",
fieldName: "firstName",
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
},
},
lastName: {
name: "lastName",
fieldName: "lastName",
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
},
},
roleId: {
fieldName: "roleId",
name: "roleId",
type: exports.FieldTypes.OPTIONS,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
inclusion: Object.values(roles.BUILTIN_ROLE_IDS),
},
},
status: {
fieldName: "status",
name: "status",
type: exports.FieldTypes.OPTIONS,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
inclusion: Object.values(constants.UserStatus),
},
},
},
primaryDisplay: "email",
}
exports.AutoFieldSubTypes = {
CREATED_BY: "createdBy",
CREATED_AT: "createdAt",
UPDATED_BY: "updatedBy",
UPDATED_AT: "updatedAt",
AUTO_ID: "autoID",
}
exports.AutoFieldDefaultNames = {
CREATED_BY: "Created By",
CREATED_AT: "Created At",
UPDATED_BY: "Updated By",
UPDATED_AT: "Updated At",
AUTO_ID: "Auto ID",
}
exports.OBJ_STORE_DIRECTORY = "/prod-budi-app-assets"
exports.BaseQueryVerbs = {
CREATE: "create",
READ: "read",
UPDATE: "update",
DELETE: "delete",
}
exports.MetadataTypes = {
AUTOMATION_TEST_INPUT: "automationTestInput",
AUTOMATION_TEST_HISTORY: "automationTestHistory",
}
exports.InvalidColumns = {
ID: "_id",
REV: "_rev",
TABLE_ID: "tableId",
}
exports.BuildSchemaErrors = {
NO_KEY: "no_key",
INVALID_COLUMN: "invalid_column",
}
exports.AutomationErrors = {
INCORRECT_TYPE: "INCORRECT_TYPE",
MAX_ITERATIONS: "MAX_ITERATIONS_REACHED",
FAILURE_CONDITION: "FAILURE_CONDITION_MET",
}
// pass through the list from the auth/core lib
exports.ObjectStoreBuckets = objectStore.ObjectStoreBuckets
exports.MAX_AUTOMATION_RECURRING_ERRORS = 5

View File

@ -0,0 +1,204 @@
import { objectStore, roles, constants } from "@budibase/backend-core"
export enum FilterTypes {
STRING = "string",
FUZZY = "fuzzy",
RANGE = "range",
EQUAL = "equal",
NOT_EQUAL = "notEqual",
EMPTY = "empty",
NOT_EMPTY = "notEmpty",
CONTAINS = "contains",
NOT_CONTAINS = "notContains",
ONE_OF = "oneOf",
}
export const NoEmptyFilterStrings = [
FilterTypes.STRING,
FilterTypes.FUZZY,
FilterTypes.EQUAL,
FilterTypes.NOT_EQUAL,
FilterTypes.CONTAINS,
FilterTypes.NOT_CONTAINS,
]
export enum FieldTypes {
STRING = "string",
BARCODEQR = "barcodeqr",
LONGFORM = "longform",
OPTIONS = "options",
NUMBER = "number",
BOOLEAN = "boolean",
ARRAY = "array",
DATETIME = "datetime",
ATTACHMENT = "attachment",
LINK = "link",
FORMULA = "formula",
AUTO = "auto",
JSON = "json",
INTERNAL = "internal",
}
export const CanSwitchTypes = [
[exports.FieldTypes.JSON, exports.FieldTypes.ARRAY],
[
exports.FieldTypes.STRING,
exports.FieldTypes.OPTIONS,
exports.FieldTypes.LONGFORM,
exports.FieldTypes.BARCODEQR,
],
[exports.FieldTypes.BOOLEAN, exports.FieldTypes.NUMBER],
]
export const SwitchableTypes = CanSwitchTypes.reduce((prev, current) =>
prev ? prev.concat(current) : current
)
export enum RelationshipTypes {
ONE_TO_MANY = "one-to-many",
MANY_TO_ONE = "many-to-one",
MANY_TO_MANY = "many-to-many",
}
export enum FormulaTypes {
STATIC = "static",
DYNAMIC = "dynamic",
}
export enum AuthTypes {
APP = "app",
BUILDER = "builder",
EXTERNAL = "external",
}
export enum DataSourceOperation {
CREATE = "CREATE",
READ = "READ",
UPDATE = "UPDATE",
DELETE = "DELETE",
BULK_CREATE = "BULK_CREATE",
CREATE_TABLE = "CREATE_TABLE",
UPDATE_TABLE = "UPDATE_TABLE",
DELETE_TABLE = "DELETE_TABLE",
}
export enum DatasourceAuthTypes {
GOOGLE = "google",
}
export enum SortDirection {
ASCENDING = "ASCENDING",
DESCENDING = "DESCENDING",
}
export const USERS_TABLE_SCHEMA = {
_id: "ta_users",
type: "table",
views: {},
name: "Users",
// TODO: ADMIN PANEL - when implemented this doesn't need to be carried out
schema: {
email: {
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
email: true,
length: {
maximum: "",
},
presence: true,
},
fieldName: "email",
name: "email",
},
firstName: {
name: "firstName",
fieldName: "firstName",
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
},
},
lastName: {
name: "lastName",
fieldName: "lastName",
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
},
},
roleId: {
fieldName: "roleId",
name: "roleId",
type: exports.FieldTypes.OPTIONS,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
inclusion: Object.values(roles.BUILTIN_ROLE_IDS),
},
},
status: {
fieldName: "status",
name: "status",
type: exports.FieldTypes.OPTIONS,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
inclusion: Object.values(constants.UserStatus),
},
},
},
primaryDisplay: "email",
}
export enum AutoFieldSubTypes {
CREATED_BY = "createdBy",
CREATED_AT = "createdAt",
UPDATED_BY = "updatedBy",
UPDATED_AT = "updatedAt",
AUTO_ID = "autoID",
}
export enum AutoFieldDefaultNames {
CREATED_BY = "Created By",
CREATED_AT = "Created At",
UPDATED_BY = "Updated By",
UPDATED_AT = "Updated At",
AUTO_ID = "Auto ID",
}
export const OBJ_STORE_DIRECTORY = "/prod-budi-app-assets"
export enum BaseQueryVerbs {
CREATE = "create",
READ = "read",
UPDATE = "update",
DELETE = "delete",
}
export enum MetadataTypes {
AUTOMATION_TEST_INPUT = "automationTestInput",
AUTOMATION_TEST_HISTORY = "automationTestHistory",
}
export enum InvalidColumns {
ID = "_id",
REV = "_rev",
TABLE_ID = "tableId",
}
export enum BuildSchemaErrors {
NO_KEY = "no_key",
INVALID_COLUMN = "invalid_column",
}
export enum AutomationErrors {
INCORRECT_TYPE = "INCORRECT_TYPE",
MAX_ITERATIONS = "MAX_ITERATIONS_REACHED",
FAILURE_CONDITION = "FAILURE_CONDITION_MET",
}
// pass through the list from the auth/core lib
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
export const MAX_AUTOMATION_RECURRING_ERRORS = 5

View File

@ -1,9 +1,9 @@
const BASE_LAYOUT_PROP_IDS = {
export const BASE_LAYOUT_PROP_IDS = {
PRIVATE: "layout_private_master",
PUBLIC: "layout_public_master",
}
const EMPTY_LAYOUT = {
export const EMPTY_LAYOUT = {
componentLibraries: ["@budibase/standard-components"],
title: "{{ name }}",
favicon: "./_shared/favicon.png",
@ -48,7 +48,7 @@ const EMPTY_LAYOUT = {
},
}
const BASE_LAYOUTS = [
export const BASE_LAYOUTS = [
{
_id: BASE_LAYOUT_PROP_IDS.PRIVATE,
componentLibraries: ["@budibase/standard-components"],
@ -145,9 +145,3 @@ const BASE_LAYOUTS = [
},
},
]
module.exports = {
BASE_LAYOUTS,
BASE_LAYOUT_PROP_IDS,
EMPTY_LAYOUT,
}

View File

@ -1,46 +0,0 @@
const { roles } = require("@budibase/backend-core")
const { BASE_LAYOUT_PROP_IDS } = require("./layouts")
exports.createHomeScreen = () => ({
description: "",
url: "",
layoutId: BASE_LAYOUT_PROP_IDS.PRIVATE,
props: {
_id: "d834fea2-1b3e-4320-ab34-f9009f5ecc59",
_component: "@budibase/standard-components/container",
_styles: {
normal: {},
hover: {},
active: {},
selected: {},
},
_transition: "fade",
_children: [
{
_id: "ef60083f-4a02-4df3-80f3-a0d3d16847e7",
_component: "@budibase/standard-components/heading",
_styles: {
hover: {},
active: {},
selected: {},
},
text: "Welcome to your Budibase App 👋",
size: "M",
align: "left",
_instanceName: "Heading",
_children: [],
},
],
_instanceName: "Home",
direction: "column",
hAlign: "stretch",
vAlign: "top",
size: "grow",
gap: "M",
},
routing: {
route: "/",
roleId: roles.BUILTIN_ROLE_IDS.BASIC,
},
name: "home-screen",
})

View File

@ -0,0 +1,48 @@
import { roles } from "@budibase/backend-core"
import { BASE_LAYOUT_PROP_IDS } from "./layouts"
export function createHomeScreen() {
return {
description: "",
url: "",
layoutId: BASE_LAYOUT_PROP_IDS.PRIVATE,
props: {
_id: "d834fea2-1b3e-4320-ab34-f9009f5ecc59",
_component: "@budibase/standard-components/container",
_styles: {
normal: {},
hover: {},
active: {},
selected: {},
},
_transition: "fade",
_children: [
{
_id: "ef60083f-4a02-4df3-80f3-a0d3d16847e7",
_component: "@budibase/standard-components/heading",
_styles: {
hover: {},
active: {},
selected: {},
},
text: "Welcome to your Budibase App 👋",
size: "M",
align: "left",
_instanceName: "Heading",
_children: [],
},
],
_instanceName: "Home",
direction: "column",
hAlign: "stretch",
vAlign: "top",
size: "grow",
gap: "M",
},
routing: {
route: "/",
roleId: roles.BUILTIN_ROLE_IDS.BASIC,
},
name: "home-screen",
}
}

View File

@ -1,31 +1,32 @@
const {
import {
FieldTypes,
AutoFieldSubTypes,
RelationshipTypes,
} = require("../../constants/index")
const { importToRows } = require("../../api/controllers/table/utils")
const { cloneDeep } = require("lodash/fp")
const LinkDocument = require("../linkedRows/LinkDocument")
const { inventoryImport } = require("./inventoryImport")
const { employeeImport } = require("./employeeImport")
const { jobsImport } = require("./jobsImport")
const { expensesImport } = require("./expensesImport")
const { db: dbCore } = require("@budibase/backend-core")
} from "../../constants"
import { importToRows } from "../../api/controllers/table/utils"
import { cloneDeep } from "lodash/fp"
import LinkDocument from "../linkedRows/LinkDocument"
import { inventoryImport } from "./inventoryImport"
import { employeeImport } from "./employeeImport"
import { jobsImport } from "./jobsImport"
import { expensesImport } from "./expensesImport"
import { db as dbCore } from "@budibase/backend-core"
import { Table, Row } from "@budibase/types"
exports.DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
exports.DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
exports.DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
exports.DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
exports.DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
exports.DEFAULT_BB_DATASOURCE = {
_id: this.DEFAULT_BB_DATASOURCE_ID,
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
export const DEFAULT_BB_DATASOURCE = {
_id: DEFAULT_BB_DATASOURCE_ID,
type: dbCore.BUDIBASE_DATASOURCE_TYPE,
name: "Sample Data",
source: "BUDIBASE",
config: {},
}
const syncLastIds = (table, rowCount) => {
function syncLastIds(table: Table, rowCount: number) {
Object.keys(table.schema).forEach(key => {
const entry = table.schema[key]
if (entry.autocolumn && entry.subtype == "autoID") {
@ -34,7 +35,7 @@ const syncLastIds = (table, rowCount) => {
})
}
const tableImport = (table, data) => {
function tableImport(table: Table, data: Row) {
const cloneTable = cloneDeep(table)
const rowDocs = importToRows(data, cloneTable)
syncLastIds(cloneTable, rowDocs.length)
@ -77,11 +78,11 @@ const AUTO_COLUMNS = {
},
}
exports.DEFAULT_INVENTORY_TABLE_SCHEMA = {
_id: this.DEFAULT_INVENTORY_TABLE_ID,
export const DEFAULT_INVENTORY_TABLE_SCHEMA: Table = {
_id: DEFAULT_INVENTORY_TABLE_ID,
type: "internal",
views: {},
sourceId: exports.DEFAULT_BB_DATASOURCE_ID,
sourceId: DEFAULT_BB_DATASOURCE_ID,
primaryDisplay: "Item Name",
name: "Inventory",
schema: {
@ -186,12 +187,12 @@ exports.DEFAULT_INVENTORY_TABLE_SCHEMA = {
},
}
exports.DEFAULT_EMPLOYEE_TABLE_SCHEMA = {
_id: this.DEFAULT_EMPLOYEE_TABLE_ID,
export const DEFAULT_EMPLOYEE_TABLE_SCHEMA = {
_id: DEFAULT_EMPLOYEE_TABLE_ID,
type: "internal",
views: {},
name: "Employees",
sourceId: exports.DEFAULT_BB_DATASOURCE_ID,
sourceId: DEFAULT_BB_DATASOURCE_ID,
primaryDisplay: "First Name",
schema: {
"First Name": {
@ -300,7 +301,7 @@ exports.DEFAULT_EMPLOYEE_TABLE_SCHEMA = {
fieldName: "Assigned",
name: "Jobs",
relationshipType: RelationshipTypes.MANY_TO_MANY,
tableId: this.DEFAULT_JOBS_TABLE_ID,
tableId: DEFAULT_JOBS_TABLE_ID,
},
"Start Date": {
type: FieldTypes.DATETIME,
@ -334,11 +335,11 @@ exports.DEFAULT_EMPLOYEE_TABLE_SCHEMA = {
},
}
exports.DEFAULT_JOBS_TABLE_SCHEMA = {
_id: this.DEFAULT_JOBS_TABLE_ID,
export const DEFAULT_JOBS_TABLE_SCHEMA: Table = {
_id: DEFAULT_JOBS_TABLE_ID,
type: "internal",
name: "Jobs",
sourceId: exports.DEFAULT_BB_DATASOURCE_ID,
sourceId: DEFAULT_BB_DATASOURCE_ID,
primaryDisplay: "Job ID",
schema: {
"Job ID": {
@ -456,7 +457,7 @@ exports.DEFAULT_JOBS_TABLE_SCHEMA = {
Assigned: {
name: "Assigned",
type: FieldTypes.LINK,
tableId: this.DEFAULT_EMPLOYEE_TABLE_ID,
tableId: DEFAULT_EMPLOYEE_TABLE_ID,
fieldName: "Jobs",
relationshipType: RelationshipTypes.MANY_TO_MANY,
// sortable: true,
@ -491,12 +492,12 @@ exports.DEFAULT_JOBS_TABLE_SCHEMA = {
},
}
exports.DEFAULT_EXPENSES_TABLE_SCHEMA = {
_id: this.DEFAULT_EXPENSES_TABLE_ID,
export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = {
_id: DEFAULT_EXPENSES_TABLE_ID,
type: "internal",
views: {},
name: "Expenses",
sourceId: exports.DEFAULT_BB_DATASOURCE_ID,
sourceId: DEFAULT_BB_DATASOURCE_ID,
primaryDisplay: "Expense ID",
schema: {
"Expense ID": {
@ -601,38 +602,40 @@ exports.DEFAULT_EXPENSES_TABLE_SCHEMA = {
},
}
exports.buildDefaultDocs = () => {
export function buildDefaultDocs() {
const inventoryData = tableImport(
this.DEFAULT_INVENTORY_TABLE_SCHEMA,
DEFAULT_INVENTORY_TABLE_SCHEMA,
inventoryImport
)
const employeeData = tableImport(
this.DEFAULT_EMPLOYEE_TABLE_SCHEMA,
DEFAULT_EMPLOYEE_TABLE_SCHEMA,
employeeImport
)
const jobData = tableImport(this.DEFAULT_JOBS_TABLE_SCHEMA, jobsImport)
const jobData = tableImport(DEFAULT_JOBS_TABLE_SCHEMA, jobsImport)
const expensesData = tableImport(
this.DEFAULT_EXPENSES_TABLE_SCHEMA,
DEFAULT_EXPENSES_TABLE_SCHEMA,
expensesImport
)
// Build one link doc for each employee/job
const jobEmployeeLinks = employeeData.rows.map((employee, index) => {
const jobEmployeeLinks = employeeData.rows.map(
(employee: any, index: any) => {
return new LinkDocument(
employeeData.table._id,
employeeData.table._id!,
"Jobs",
employeeData.rows[index]._id,
jobData.table._id,
jobData.table._id!,
"Assigned",
jobData.rows[index]._id
)
})
}
)
return [
this.DEFAULT_BB_DATASOURCE,
DEFAULT_BB_DATASOURCE,
inventoryData.table,
employeeData.table,
jobData.table,

View File

@ -1,4 +1,4 @@
exports.employeeImport = [
export const employeeImport = [
{
"First Name": "Julie",
"Last Name": "Jimenez",

View File

@ -1,4 +1,4 @@
exports.expensesImport = [
export const expensesImport = [
{
"Date Paid": "2022-11-12T12:00:00.000",
"Payment Due": "2022-11-01T12:00:00.000",

View File

@ -1,4 +1,4 @@
exports.inventoryImport = [
export const inventoryImport = [
{
Status: ["Available"],
"Item Name": "Little Blue Van",

View File

@ -1,4 +1,4 @@
exports.jobsImport = [
export const jobsImport = [
{
"Works End": "2023-01-28T12:00:00.000",
"Customer Email": "susie.peterson@example.com",

View File

@ -1,8 +1,7 @@
let { merge } = require("lodash")
let env = require("../environment")
import { merge } from "lodash"
import env from "../environment"
const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
exports.AWS_REGION = AWS_REGION
export const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
const TableInfo = {
API_KEYS: {
@ -16,10 +15,36 @@ const TableInfo = {
},
}
let docClient = null
let docClient: any = null
type GetOpts = {
primary: string
sort?: string
otherProps?: any
}
type UpdateOpts = {
primary: string
sort?: string
expression?: string
condition?: string
names?: string[]
values?: any[]
exists?: boolean
otherProps?: any
}
type PutOpts = {
item: any
otherProps?: any
}
class Table {
constructor(tableInfo) {
_name: string
_primary: string
_sort?: string
constructor(tableInfo: { name: string; primary: string; sort?: string }) {
if (!tableInfo.name || !tableInfo.primary) {
throw "Table info must specify a name and a primary key"
}
@ -28,7 +53,7 @@ class Table {
this._sort = tableInfo.sort
}
async get({ primary, sort, otherProps }) {
async get({ primary, sort, otherProps }: GetOpts) {
let params = {
TableName: this._name,
Key: {
@ -54,8 +79,8 @@ class Table {
values,
exists,
otherProps,
}) {
let params = {
}: UpdateOpts) {
let params: any = {
TableName: this._name,
Key: {
[this._primary]: primary,
@ -83,7 +108,7 @@ class Table {
return docClient.update(params).promise()
}
async put({ item, otherProps }) {
async put({ item, otherProps }: PutOpts) {
if (
item[this._primary] == null ||
(this._sort && item[this._sort] == null)
@ -101,9 +126,9 @@ class Table {
}
}
exports.init = endpoint => {
export function init(endpoint: string) {
let AWS = require("aws-sdk")
let docClientParams = {
let docClientParams: any = {
correctClockSkew: true,
region: AWS_REGION,
}
@ -115,13 +140,8 @@ exports.init = endpoint => {
docClient = new AWS.DynamoDB.DocumentClient(docClientParams)
}
exports.apiKeyTable = new Table(TableInfo.API_KEYS)
exports.userTable = new Table(TableInfo.USERS)
if (env.isProd()) {
exports.init(`https://dynamodb.${AWS_REGION}.amazonaws.com`)
} else {
if (!env.isProd()) {
env._set("AWS_ACCESS_KEY_ID", "KEY_ID")
env._set("AWS_SECRET_ACCESS_KEY", "SECRET_KEY")
exports.init("http://localhost:8333")
init("http://localhost:8333")
}

View File

@ -1,11 +1,17 @@
const newid = require("./newid")
import newid from "./newid"
import { Row, View, Document } from "@budibase/types"
// bypass the main application db config
// use in memory pouchdb directly
const { db: dbCore } = require("@budibase/backend-core")
import { db as dbCore } from "@budibase/backend-core"
const Pouch = dbCore.getPouch({ inMemory: true })
exports.runView = async (view, calculation, group, data) => {
export async function runView(
view: View,
calculation: boolean,
group: string,
data: Row[]
) {
// use a different ID each time for the DB, make sure they
// are always unique for each query, don't want overlap
// which could cause 409s
@ -18,16 +24,16 @@ exports.runView = async (view, calculation, group, data) => {
_rev: undefined,
}))
)
let fn = (doc, emit) => emit(doc._id)
eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)"))
const queryFns = {
let fn = (doc: Document, emit: any) => emit(doc._id)
eval("fn = " + view?.map?.replace("function (doc)", "function (doc, emit)"))
const queryFns: any = {
meta: view.meta,
map: fn,
}
if (view.reduce) {
queryFns.reduce = view.reduce
}
const response = await db.query(queryFns, {
const response: { rows: Row[] } = await db.query(queryFns, {
include_docs: !calculation,
group: !!group,
})

View File

@ -1,16 +0,0 @@
const core = require("@budibase/backend-core")
const env = require("../environment")
exports.init = () => {
const dbConfig = {
replication: true,
find: true,
}
if (env.isTest() && !env.COUCH_DB_URL) {
dbConfig.inMemory = true
dbConfig.allDbs = true
}
core.init({ db: dbConfig })
}

View File

@ -0,0 +1,16 @@
import { init as coreInit } from "@budibase/backend-core"
import env = require("../environment")
export function init() {
const dbConfig: any = {
replication: true,
find: true,
}
if (env.isTest() && !env.COUCH_DB_URL) {
dbConfig.inMemory = true
dbConfig.allDbs = true
}
coreInit({ db: dbConfig })
}

View File

@ -1,5 +1,5 @@
const { v4 } = require("uuid")
module.exports = function () {
export = function (): string {
return v4().replace(/-/g, "")
}

View File

@ -1,5 +1,6 @@
const { context } = require("@budibase/backend-core")
const { DocumentType, SEPARATOR, ViewName, SearchIndexes } = require("../utils")
import { context } from "@budibase/backend-core"
import { DocumentType, SEPARATOR, ViewName, SearchIndexes } from "../utils"
import { LinkDocument, Row } from "@budibase/types"
const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR
/**************************************************
@ -19,16 +20,17 @@ const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR
* @returns {Promise<void>} The view now exists, please note that the next view of this query will actually build it,
* so it may be slow.
*/
exports.createLinkView = async () => {
export async function createLinkView() {
const db = context.getAppDB()
const designDoc = await db.get("_design/database")
const view = {
map: function (doc) {
map: function (doc: LinkDocument) {
// everything in this must remain constant as its going to Pouch, no external variables
if (doc.type === "link") {
let doc1 = doc.doc1
let doc2 = doc.doc2
// eslint-disable-next-line no-undef
// @ts-ignore
emit([doc1.tableId, doc1.rowId], {
id: doc2.rowId,
thisId: doc1.rowId,
@ -37,6 +39,7 @@ exports.createLinkView = async () => {
// if linking to same table can't emit twice
if (doc1.tableId !== doc2.tableId) {
// eslint-disable-next-line no-undef
// @ts-ignore
emit([doc2.tableId, doc2.rowId], {
id: doc1.rowId,
thisId: doc2.rowId,
@ -53,7 +56,7 @@ exports.createLinkView = async () => {
await db.put(designDoc)
}
exports.createRoutingView = async () => {
export async function createRoutingView() {
const db = context.getAppDB()
const designDoc = await db.get("_design/database")
const view = {
@ -74,7 +77,7 @@ exports.createRoutingView = async () => {
await db.put(designDoc)
}
async function searchIndex(indexName, fnString) {
async function searchIndex(indexName: string, fnString: string) {
const db = context.getAppDB()
const designDoc = await db.get("_design/database")
designDoc.indexes = {
@ -86,11 +89,11 @@ async function searchIndex(indexName, fnString) {
await db.put(designDoc)
}
exports.createAllSearchIndex = async () => {
export async function createAllSearchIndex() {
await searchIndex(
SearchIndexes.ROWS,
function (doc) {
function idx(input, prev) {
function (doc: Row) {
function idx(input: Row, prev?: string) {
for (let key of Object.keys(input)) {
let idxKey = prev != null ? `${prev}.${key}` : key
idxKey = idxKey.replace(/ /g, "_")
@ -98,6 +101,7 @@ exports.createAllSearchIndex = async () => {
for (let val of input[key]) {
if (typeof val !== "object") {
// eslint-disable-next-line no-undef
// @ts-ignore
index(idxKey, val, { store: true })
}
}
@ -106,17 +110,20 @@ exports.createAllSearchIndex = async () => {
}
if (typeof input[key] === "string") {
// eslint-disable-next-line no-undef
// @ts-ignore
index(idxKey, input[key].toLowerCase(), { store: true })
} else if (typeof input[key] !== "object") {
// eslint-disable-next-line no-undef
// @ts-ignore
index(idxKey, input[key], { store: true })
} else {
idx(input[key], idxKey)
}
}
}
if (doc._id.startsWith("ro_")) {
if (doc._id!.startsWith("ro_")) {
// eslint-disable-next-line no-undef
// @ts-ignore
index("default", doc._id)
idx(doc)
}

View File

@ -1,6 +1,7 @@
const { rowEmission, tableEmission } = require("./utils")
const mainEmitter = require("./index")
const env = require("../environment")
import { rowEmission, tableEmission } from "./utils"
import mainEmitter from "./index"
import env from "../environment"
import { Table, Row } from "@budibase/types"
// max number of automations that can chain on top of each other
// TODO: in future make this configurable at the automation level
@ -13,14 +14,17 @@ const MAX_AUTOMATION_CHAIN = env.SELF_HOSTED ? 5 : 0
* from getting stuck endlessly chaining.
*/
class AutomationEmitter {
constructor(chainCount) {
chainCount: number
metadata: { automationChainCount: number }
constructor(chainCount: number) {
this.chainCount = chainCount
this.metadata = {
automationChainCount: chainCount,
}
}
emitRow(eventName, appId, row, table = null) {
emitRow(eventName: string, appId: string, row: Row, table?: Table) {
// don't emit even if we've reached max automation chain
if (this.chainCount >= MAX_AUTOMATION_CHAIN) {
return
@ -35,11 +39,12 @@ class AutomationEmitter {
})
}
emitTable(eventName, appId, table = null) {
emitTable(eventName: string, appId: string, table?: Table) {
// don't emit even if we've reached max automation chain
if (this.chainCount > MAX_AUTOMATION_CHAIN) {
return
}
tableEmission({
emitter: mainEmitter,
eventName,
@ -50,4 +55,4 @@ class AutomationEmitter {
}
}
module.exports = AutomationEmitter
export = AutomationEmitter

View File

@ -1,5 +1,6 @@
const EventEmitter = require("events").EventEmitter
const { rowEmission, tableEmission } = require("./utils")
import { EventEmitter } from "events"
import { rowEmission, tableEmission } from "./utils"
import { Table, Row } from "@budibase/types"
/**
* keeping event emitter in one central location as it might be used for things other than
@ -12,19 +13,17 @@ const { rowEmission, tableEmission } = require("./utils")
* This is specifically quite important for template strings used in automations.
*/
class BudibaseEmitter extends EventEmitter {
emitRow(eventName, appId, row, table = null) {
emitRow(eventName: string, appId: string, row: Row, table?: Table) {
rowEmission({ emitter: this, eventName, appId, row, table })
}
emitTable(eventName, appId, table = null) {
emitTable(eventName: string, appId: string, table?: Table) {
tableEmission({ emitter: this, eventName, appId, table })
}
emitPort(portNumber) {
emitPort(portNumber: number) {
this.emit("internal:port", portNumber)
}
}
const emitter = new BudibaseEmitter()
module.exports = emitter
export = BudibaseEmitter

View File

@ -0,0 +1,5 @@
import BudibaseEmitter from "./BudibaseEmitter"
const emitter = new BudibaseEmitter()
export = emitter

View File

@ -1,38 +0,0 @@
exports.rowEmission = ({ emitter, eventName, appId, row, table, metadata }) => {
let event = {
row,
appId,
tableId: row.tableId,
}
if (table) {
event.table = table
}
event.id = row._id
if (row._rev) {
event.revision = row._rev
}
if (metadata) {
event.metadata = metadata
}
emitter.emit(eventName, event)
}
exports.tableEmission = ({ emitter, eventName, appId, table, metadata }) => {
const tableId = table._id
let event = {
table: {
...table,
tableId: tableId,
},
appId,
tableId: tableId,
}
event.id = tableId
if (table._rev) {
event.revision = table._rev
}
if (metadata) {
event.metadata = metadata
}
emitter.emit(eventName, event)
}

View File

@ -0,0 +1,78 @@
import { Table, Row } from "@budibase/types"
import BudibaseEmitter from "./BudibaseEmitter"
type BBEventOpts = {
emitter: BudibaseEmitter
eventName: string
appId: string
table?: Table
row?: Row
metadata?: any
}
interface BBEventTable extends Table {
tableId?: string
}
type BBEvent = {
appId: string
tableId?: string
row?: Row
table?: BBEventTable
id?: string
revision?: string
metadata?: any
}
export function rowEmission({
emitter,
eventName,
appId,
row,
table,
metadata,
}: BBEventOpts) {
let event: BBEvent = {
row,
appId,
tableId: row?.tableId,
}
if (table) {
event.table = table
}
event.id = row?._id
if (row?._rev) {
event.revision = row._rev
}
if (metadata) {
event.metadata = metadata
}
emitter.emit(eventName, event)
}
export function tableEmission({
emitter,
eventName,
appId,
table,
metadata,
}: BBEventOpts) {
const tableId = table?._id
const inputTable: BBEventTable | undefined = table
if (inputTable) {
inputTable.tableId = tableId
}
let event: BBEvent = {
table: inputTable,
appId,
tableId: tableId,
}
event.id = tableId
if (table?._rev) {
event.revision = table._rev
}
if (metadata) {
event.metadata = metadata
}
emitter.emit(eventName, event)
}

View File

@ -1,4 +1,4 @@
const path = require("path")
import path from "path"
// this simply runs all of our path join and resolve functions through
// a central location incase we need to add some protection to file paths
@ -8,7 +8,7 @@ const path = require("path")
* @param args Any number of string arguments to add to a path
* @returns {string} The final path ready to use
*/
exports.join = function (...args) {
export function join(...args: any) {
return path.join(...args)
}
@ -17,6 +17,6 @@ exports.join = function (...args) {
* @param args Any number of string arguments to add to a path
* @returns {string} The final path ready to use
*/
exports.resolve = function (...args) {
export function resolve(...args: any) {
return path.resolve(...args)
}

View File

@ -1,18 +1,25 @@
const csv = require("csvtojson")
const { FieldTypes } = require("../constants")
import { FieldSchema, Table } from "@budibase/types"
import csv from "csvtojson"
import { FieldTypes } from "../constants"
type CsvParseOpts = {
schema?: { [key: string]: any }
existingTable: Table
csvString?: string
}
const VALIDATORS = {
[FieldTypes.STRING]: () => true,
[FieldTypes.OPTIONS]: () => true,
[FieldTypes.BARCODEQR]: () => true,
[FieldTypes.NUMBER]: attribute => {
[FieldTypes.NUMBER]: (attribute?: string) => {
// allow not to be present
if (!attribute) {
return true
}
return !isNaN(Number(attribute))
},
[FieldTypes.DATETIME]: attribute => {
[FieldTypes.DATETIME]: (attribute?: string) => {
// allow not to be present
if (!attribute) {
return true
@ -22,13 +29,13 @@ const VALIDATORS = {
}
const PARSERS = {
[FieldTypes.NUMBER]: attribute => {
[FieldTypes.NUMBER]: (attribute?: string) => {
if (!attribute) {
return attribute
}
return Number(attribute)
},
[FieldTypes.DATETIME]: attribute => {
[FieldTypes.DATETIME]: (attribute?: string) => {
if (!attribute) {
return attribute
}
@ -36,10 +43,10 @@ const PARSERS = {
},
}
function parse(csvString, parsers) {
export function parse(csvString: string, parsers: any) {
const result = csv().fromString(csvString)
const schema = {}
const schema: Record<string, any> = {}
return new Promise((resolve, reject) => {
result.on("header", headers => {
@ -77,16 +84,22 @@ function parse(csvString, parsers) {
})
}
function updateSchema({ schema, existingTable }) {
export function updateSchema({
schema,
existingTable,
}: {
schema?: Record<string, any>
existingTable: Table
}) {
if (!schema) {
return schema
}
const finalSchema = {}
const schemaKeyMap = {}
const finalSchema: Record<string, FieldSchema> = {}
const schemaKeyMap: Record<string, any> = {}
Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key))
for (let [key, field] of Object.entries(existingTable.schema)) {
const lcKey = key.toLowerCase()
const foundKey = schemaKeyMap[lcKey]
const foundKey: string = schemaKeyMap[lcKey]
if (foundKey) {
finalSchema[key] = schema[foundKey]
finalSchema[key].type = field.type
@ -95,15 +108,22 @@ function updateSchema({ schema, existingTable }) {
return finalSchema
}
async function transform({ schema, csvString, existingTable }) {
const colParser = {}
export async function transform({
schema,
csvString,
existingTable,
}: CsvParseOpts) {
if (!schema || !csvString) {
throw new Error("Unable to transform CSV without schema")
}
const colParser: any = {}
// make sure the table has all the columns required for import
if (existingTable) {
schema = updateSchema({ schema, existingTable })
}
for (let [key, field] of Object.entries(schema)) {
for (let [key, field] of Object.entries(schema || {})) {
// don't import data to auto columns
if (!field.autocolumn) {
colParser[key] = PARSERS[field.type] || field.type
@ -112,8 +132,10 @@ async function transform({ schema, csvString, existingTable }) {
try {
const data = await csv({ colParser }).fromString(csvString)
const schemaKeyMap = {}
Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key))
const schemaKeyMap: any = {}
Object.keys(schema || {}).forEach(
key => (schemaKeyMap[key.toLowerCase()] = key)
)
for (let element of data) {
if (!data) {
continue
@ -137,9 +159,3 @@ async function transform({ schema, csvString, existingTable }) {
throw err
}
}
module.exports = {
parse,
transform,
updateSchema,
}

View File

@ -1,9 +1,9 @@
const { join } = require("path")
const { ObjectStoreBuckets } = require("../../constants")
const fs = require("fs")
const { objectStore } = require("@budibase/backend-core")
const { resolve } = require("../centralPath")
const env = require("../../environment")
import { join } from "path"
import { ObjectStoreBuckets } from "../../constants"
import fs from "fs"
import { objectStore } from "@budibase/backend-core"
import { resolve } from "../centralPath"
import env from "../../environment"
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
/**
@ -33,7 +33,7 @@ const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
* @param appId The app ID to backup
* @returns {Promise<void>}
*/
exports.backupClientLibrary = async appId => {
export async function backupClientLibrary(appId: string) {
// Copy existing manifest to tmp
let tmpManifestPath
try {
@ -85,7 +85,7 @@ exports.backupClientLibrary = async appId => {
* @param appId The app ID to update
* @returns {Promise<void>}
*/
exports.updateClientLibrary = async appId => {
export async function updateClientLibrary(appId: string) {
let manifest, client
if (env.isDev()) {
@ -124,7 +124,7 @@ exports.updateClientLibrary = async appId => {
* @param appId The app ID to revert
* @returns {Promise<void>}
*/
exports.revertClientLibrary = async appId => {
export async function revertClientLibrary(appId: string) {
// Copy backups manifest to tmp directory
const tmpManifestPath = await objectStore.retrieveToTmp(
ObjectStoreBuckets.APPS,

View File

@ -1,26 +1,17 @@
const { budibaseTempDir } = require("../budibaseDir")
const fs = require("fs")
const { join } = require("path")
import { budibaseTempDir } from "../budibaseDir"
import fs from "fs"
import { join } from "path"
import { context, objectStore } from "@budibase/backend-core"
import { ObjectStoreBuckets } from "../../constants"
import { updateClientLibrary } from "./clientLibrary"
import { checkSlashesInUrl } from "../"
import env from "../../environment"
import fetch from "node-fetch"
const uuid = require("uuid/v4")
const { context, objectStore } = require("@budibase/backend-core")
const { ObjectStoreBuckets } = require("../../constants")
const { updateClientLibrary } = require("./clientLibrary")
const { checkSlashesInUrl } = require("../")
const env = require("../../environment")
const tar = require("tar")
const fetch = require("node-fetch")
const {
upload,
retrieve,
retrieveToTmp,
deleteFolder,
downloadTarball,
downloadTarballDirect,
deleteFiles,
} = objectStore
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
export const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
export const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
/**
@ -34,14 +25,14 @@ const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
/**
* Upon first startup of instance there may not be everything we need in tmp directory, set it up.
*/
exports.init = () => {
export function init() {
const tempDir = budibaseTempDir()
if (!fs.existsSync(tempDir)) {
// some test cases fire this quickly enough that
// synchronous cases can end up here at the same time
try {
fs.mkdirSync(tempDir)
} catch (err) {
} catch (err: any) {
if (!err || err.code !== "EEXIST") {
throw err
}
@ -81,7 +72,7 @@ exports.checkDevelopmentEnvironment = () => {
* @param {string} path The path to the handlebars file which is to be loaded.
* @returns {string} The loaded handlebars file as a string - loaded as utf8.
*/
exports.loadHandlebarsFile = path => {
export function loadHandlebarsFile(path: string) {
return fs.readFileSync(path, "utf8")
}
@ -91,13 +82,13 @@ exports.loadHandlebarsFile = path => {
* @param {string} contents the contents of the file which is to be returned from the API.
* @return {Object} the read stream which can be put into the koa context body.
*/
exports.apiFileReturn = contents => {
export function apiFileReturn(contents: string) {
const path = join(budibaseTempDir(), uuid())
fs.writeFileSync(path, contents)
return fs.createReadStream(path)
}
exports.streamFile = path => {
export function streamFile(path: string) {
return fs.createReadStream(path)
}
@ -106,7 +97,7 @@ exports.streamFile = path => {
* @param {string} fileContents contents which will be written to a temp file.
* @return {string} the path to the temp file.
*/
exports.storeTempFile = fileContents => {
export function storeTempFile(fileContents: string) {
const path = join(budibaseTempDir(), uuid())
fs.writeFileSync(path, fileContents)
return path
@ -116,7 +107,7 @@ exports.storeTempFile = fileContents => {
* Utility function for getting a file read stream - a simple in memory buffered read
* stream doesn't work for pouchdb.
*/
exports.stringToFileStream = contents => {
export function stringToFileStream(contents: string) {
const path = exports.storeTempFile(contents)
return fs.createReadStream(path)
}
@ -125,7 +116,7 @@ exports.stringToFileStream = contents => {
* Creates a temp file and returns it from the API.
* @param {string} fileContents the contents to be returned in file.
*/
exports.sendTempFile = fileContents => {
export function sendTempFile(fileContents: string) {
const path = exports.storeTempFile(fileContents)
return fs.createReadStream(path)
}
@ -135,7 +126,7 @@ exports.sendTempFile = fileContents => {
* @param {string} appId The ID of the app which is being created.
* @return {Promise<void>} once promise completes app resources should be ready in object store.
*/
exports.createApp = async appId => {
export async function createApp(appId: string) {
await updateClientLibrary(appId)
}
@ -144,8 +135,8 @@ exports.createApp = async appId => {
* @param {string} appId The ID of the app which is being deleted.
* @return {Promise<void>} once promise completes the app resources will be removed from object store.
*/
exports.deleteApp = async appId => {
await deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
export async function deleteApp(appId: string) {
await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
}
/**
@ -154,17 +145,21 @@ exports.deleteApp = async appId => {
* @param name
* @return {Promise<*>}
*/
exports.downloadTemplate = async (type, name) => {
export async function downloadTemplate(type: string, name: string) {
const DEFAULT_TEMPLATES_BUCKET =
"prod-budi-templates.s3-eu-west-1.amazonaws.com"
const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
return downloadTarball(templateUrl, ObjectStoreBuckets.TEMPLATES, type)
return objectStore.downloadTarball(
templateUrl,
ObjectStoreBuckets.TEMPLATES,
type
)
}
/**
* Retrieves component libraries from object store (or tmp symlink if in local)
*/
exports.getComponentLibraryManifest = async library => {
export async function getComponentLibraryManifest(library: string) {
const appId = context.getAppId()
const filename = "manifest.json"
/* istanbul ignore next */
@ -182,12 +177,16 @@ exports.getComponentLibraryManifest = async library => {
return require(path)
}
if (!appId) {
throw new Error("No app ID found - cannot get component libraries")
}
let resp
let path
try {
// Try to load the manifest from the new file location
path = join(appId, filename)
resp = await retrieve(ObjectStoreBuckets.APPS, path)
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
} catch (error) {
console.error(
`component-manifest-objectstore=failed appId=${appId} path=${path}`,
@ -195,7 +194,7 @@ exports.getComponentLibraryManifest = async library => {
)
// Fallback to loading it from the old location for old apps
path = join(appId, "node_modules", library, "package", filename)
resp = await retrieve(ObjectStoreBuckets.APPS, path)
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
}
if (typeof resp !== "string") {
resp = resp.toString("utf8")
@ -207,14 +206,17 @@ exports.getComponentLibraryManifest = async library => {
* All file reads come through here just to make sure all of them make sense
* allows a centralised location to check logic is all good.
*/
exports.readFileSync = (filepath, options = "utf8") => {
return fs.readFileSync(filepath, options)
export function readFileSync(
filepath: string,
options: BufferEncoding = "utf8"
) {
return fs.readFileSync(filepath, { encoding: options })
}
/**
* Given a set of app IDs makes sure file system is cleared of any of their temp info.
*/
exports.cleanup = appIds => {
export function cleanup(appIds: string[]) {
for (let appId of appIds) {
const path = join(budibaseTempDir(), appId)
if (fs.existsSync(path)) {
@ -223,7 +225,7 @@ exports.cleanup = appIds => {
}
}
const createTempFolder = item => {
export function createTempFolder(item: string) {
const path = join(budibaseTempDir(), item)
try {
// remove old tmp directories automatically - don't combine
@ -231,24 +233,22 @@ const createTempFolder = item => {
fs.rmSync(path, { recursive: true, force: true })
}
fs.mkdirSync(path)
} catch (err) {
} catch (err: any) {
throw new Error(`Path cannot be created: ${err.message}`)
}
return path
}
exports.createTempFolder = createTempFolder
const extractTarball = async (fromFilePath, toPath) => {
export async function extractTarball(fromFilePath: string, toPath: string) {
await tar.extract({
file: fromFilePath,
C: toPath,
})
}
exports.extractTarball = extractTarball
const getPluginMetadata = async path => {
let metadata = {}
export async function getPluginMetadata(path: string) {
let metadata: { schema?: any; package?: any } = {}
try {
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
const schema = fs.readFileSync(join(path, "schema.json"), "utf8")
@ -265,7 +265,7 @@ const getPluginMetadata = async path => {
"package.json is missing one of 'name', 'version' or 'description'."
)
}
} catch (err) {
} catch (err: any) {
throw new Error(
`Unable to process schema.json/package.json in plugin. ${err.message}`
)
@ -273,9 +273,12 @@ const getPluginMetadata = async path => {
return { metadata, directory: path }
}
exports.getPluginMetadata = getPluginMetadata
exports.getDatasourcePlugin = async (name, url, hash) => {
export async function getDatasourcePlugin(
name: string,
url: string,
hash: string
) {
if (!fs.existsSync(DATASOURCE_PATH)) {
fs.mkdirSync(DATASOURCE_PATH)
}
@ -311,7 +314,7 @@ exports.getDatasourcePlugin = async (name, url, hash) => {
/**
* Find for a file recursively from start path applying filter, return first match
*/
exports.findFileRec = (startPath, filter) => {
export function findFileRec(startPath: string, filter: any) {
if (!fs.existsSync(startPath)) {
return
}
@ -332,21 +335,10 @@ exports.findFileRec = (startPath, filter) => {
/**
* Remove a folder which is not empty from the file system
*/
exports.deleteFolderFileSystem = path => {
export function deleteFolderFileSystem(path: string) {
if (!fs.existsSync(path)) {
return
}
fs.rmSync(path, { recursive: true, force: true })
}
/**
* Full function definition for below can be found in the utilities.
*/
exports.upload = upload
exports.retrieve = retrieve
exports.retrieveToTmp = retrieveToTmp
exports.deleteFiles = deleteFiles
exports.downloadTarballDirect = downloadTarballDirect
exports.TOP_LEVEL_PATH = TOP_LEVEL_PATH
exports.NODE_MODULES_PATH = NODE_MODULES_PATH

View File

@ -1,21 +1,19 @@
const jimp = require("jimp")
import jimp from "jimp"
const FORMATS = {
IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"],
}
function processImage(file) {
function processImage(file: { path: string }) {
// this will overwrite the temp file
return jimp.read(file.path).then(img => {
return img.resize(300, jimp.AUTO).write(file.path)
})
}
async function process(file) {
export async function process(file: { extension: string; path: string }) {
if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) {
await processImage(file)
}
return file
}
exports.process = process

View File

@ -1,8 +1,9 @@
const env = require("../environment")
const { plugins: ProPlugins } = require("@budibase/pro")
const { objectStore } = require("@budibase/backend-core")
import env from "../environment"
import { plugins as ProPlugins } from "@budibase/pro"
import { objectStore } from "@budibase/backend-core"
import { Plugin } from "@budibase/types"
exports.enrichPluginURLs = plugins => {
export function enrichPluginURLs(plugins: Plugin[]) {
if (!plugins || !plugins.length) {
return []
}

View File

@ -1,122 +0,0 @@
let events = require("events")
/**
* Bull works with a Job wrapper around all messages that contains a lot more information about
* the state of the message, this object constructor implements the same schema of Bull jobs
* for the sake of maintaining API consistency.
* @param {string} queue The name of the queue which the message will be carried on.
* @param {object} message The JSON message which will be passed back to the consumer.
* @returns {Object} A new job which can now be put onto the queue, this is mostly an
* internal structure so that an in memory queue can be easily swapped for a Bull queue.
*/
function newJob(queue, message) {
return {
timestamp: Date.now(),
queue: queue,
data: message,
}
}
/**
* This is designed to replicate Bull (https://github.com/OptimalBits/bull) in memory as a sort of mock.
* It is relatively simple, using an event emitter internally to register when messages are available
* to the consumers - in can support many inputs and many consumers.
*/
class InMemoryQueue {
/**
* The constructor the queue, exactly the same as that of Bulls.
* @param {string} name The name of the queue which is being configured.
* @param {object|null} opts This is not used by the in memory queue as there is no real use
* case when in memory, but is the same API as Bull
*/
constructor(name, opts = null) {
this._name = name
this._opts = opts
this._messages = []
this._emitter = new events.EventEmitter()
}
/**
* Same callback API as Bull, each callback passed to this will consume messages as they are
* available. Please note this is a queue service, not a notification service, so each
* consumer will receive different messages.
* @param {function<object>} func The callback function which will return a "Job", the same
* as the Bull API, within this job the property "data" contains the JSON message. Please
* note this is incredibly limited compared to Bull as in reality the Job would contain
* a lot more information about the queue and current status of Bull cluster.
*/
process(func) {
this._emitter.on("message", async () => {
if (this._messages.length <= 0) {
return
}
let msg = this._messages.shift()
let resp = func(msg)
if (resp.then != null) {
await resp
}
})
}
// simply puts a message to the queue and emits to the queue for processing
/**
* Simple function to replicate the add message functionality of Bull, putting
* a new message on the queue. This then emits an event which will be used to
* return the message to a consumer (if one is attached).
* @param {object} msg A message to be transported over the queue, this should be
* a JSON message as this is required by Bull.
*/
// eslint-disable-next-line no-unused-vars
add(msg, repeat) {
if (typeof msg !== "object") {
throw "Queue only supports carrying JSON."
}
this._messages.push(newJob(this._name, msg))
this._emitter.emit("message")
}
/**
* replicating the close function from bull, which waits for jobs to finish.
*/
async close() {
return []
}
/**
* This removes a cron which has been implemented, this is part of Bull API.
* @param {string} cronJobId The cron which is to be removed.
*/
removeRepeatableByKey(cronJobId) {
// TODO: implement for testing
console.log(cronJobId)
}
/**
* Implemented for tests
*/
getRepeatableJobs() {
return []
}
// eslint-disable-next-line no-unused-vars
removeJobs(pattern) {
// no-op
}
/**
* Implemented for tests
*/
async clean() {
return []
}
async getJob() {
return {}
}
on() {
// do nothing
}
}
module.exports = InMemoryQueue

View File

@ -1,4 +1,5 @@
const { FieldTypes } = require("../../constants")
// @ts-nocheck
import { FieldTypes } from "../../constants"
/**
* A map of how we convert various properties in rows to each other based on the row type.

View File

@ -1,9 +1,13 @@
const fetch = require("node-fetch")
const { VM, VMScript } = require("vm2")
import fetch from "node-fetch"
import { VM, VMScript } from "vm2"
const JS_TIMEOUT_MS = 1000
class ScriptRunner {
constructor(script, context) {
vm: VM
results: { out: string }
script: VMScript
constructor(script: string, context: any) {
const code = `let fn = () => {\n${script}\n}; results.out = fn();`
this.vm = new VM({
timeout: JS_TIMEOUT_MS,
@ -21,4 +25,4 @@ class ScriptRunner {
}
}
module.exports = ScriptRunner
export = ScriptRunner

View File

@ -1,4 +1,4 @@
module.exports = {
export = {
OK: 200,
UNAUTHORIZED: 401,
FORBIDDEN: 403,

View File

@ -7,7 +7,7 @@ function getNewQuotaReset() {
return Date.now() + 2592000000
}
function resetQuotasIfRequired(quota) {
function resetQuotasIfRequired(quota: { quotaReset: number; usageQuota: any }) {
// Check if the quota needs reset
if (Date.now() >= quota.quotaReset) {
quota.quotaReset = getNewQuotaReset()

View File

@ -12,3 +12,4 @@ export * from "./row"
export * from "./user"
export * from "./backup"
export * from "./webhook"
export * from "./links"

View File

@ -0,0 +1,13 @@
export interface LinkDocument {
type: string
doc1: {
rowId: string
fieldName: string
tableId: string
}
doc2: {
rowId: string
fieldName: string
tableId: string
}
}

View File

@ -8,10 +8,12 @@ export interface FieldSchema {
externalType?: string
fieldName?: string
name: string
sortable?: boolean
tableId?: string
relationshipType?: string
through?: string
foreignKey?: string
icon?: string
autocolumn?: boolean
subtype?: string
throughFrom?: string
@ -22,6 +24,7 @@ export interface FieldSchema {
ignoreTimezones?: boolean
timeOnly?: boolean
lastID?: number
useRichText?: boolean | null
meta?: {
toTable: string
toKey: string
@ -31,10 +34,22 @@ export interface FieldSchema {
email?: boolean
inclusion?: string[]
length?: {
minimum?: string | number
maximum?: string | number
minimum?: string | number | null
maximum?: string | number | null
}
numericality?: {
greaterThanOrEqualTo: string | null
lessThanOrEqualTo: string | null
}
presence?:
| boolean
| {
allowEmpty?: boolean
}
datetime?: {
latest: string
earliest: string
}
presence?: boolean
}
}

View File

@ -5,6 +5,9 @@ export interface View {
filters: ViewFilter[]
schema: ViewSchema
calculation?: ViewCalculation
map?: string
reduce?: any
meta?: Record<string, any>
}
export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema