Some server typescript re-writes, only automations left and some controllers.

This commit is contained in:
mike12345567 2022-11-25 15:01:46 +00:00
parent d3c4c99e16
commit 7eac8fafd6
38 changed files with 667 additions and 666 deletions

View File

@ -1,7 +1,5 @@
import { import { createTempFolder } from "../../../utilities/fileSystem"
createTempFolder, import { objectStore } from "@budibase/backend-core"
downloadTarballDirect,
} from "../../../utilities/fileSystem"
export async function downloadUnzipTarball( export async function downloadUnzipTarball(
url: string, url: string,
@ -10,7 +8,7 @@ export async function downloadUnzipTarball(
) { ) {
try { try {
const path = createTempFolder(name) const path = createTempFolder(name)
await downloadTarballDirect(url, path, headers) await objectStore.downloadTarballDirect(url, path, headers)
return path return path
} catch (e: any) { } catch (e: any) {

View File

@ -14,7 +14,6 @@ const {
} = require("../../../utilities/fileSystem") } = require("../../../utilities/fileSystem")
const env = require("../../../environment") const env = require("../../../environment")
const { clientLibraryPath } = require("../../../utilities") const { clientLibraryPath } = require("../../../utilities")
const { upload, deleteFiles } = require("../../../utilities/fileSystem")
const { attachmentsRelativeURL } = require("../../../utilities") const { attachmentsRelativeURL } = require("../../../utilities")
const { DocumentType } = require("../../../db/utils") const { DocumentType } = require("../../../db/utils")
const { context, objectStore, utils } = require("@budibase/backend-core") const { context, objectStore, utils } = require("@budibase/backend-core")
@ -22,7 +21,7 @@ const AWS = require("aws-sdk")
const fs = require("fs") const fs = require("fs")
async function prepareUpload({ s3Key, bucket, metadata, file }: any) { async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
const response = await upload({ const response = await objectStore.upload({
bucket, bucket,
metadata, metadata,
filename: s3Key, filename: s3Key,
@ -95,7 +94,10 @@ export const uploadFile = async function (ctx: any) {
} }
export const deleteObjects = async function (ctx: any) { export const deleteObjects = async function (ctx: any) {
ctx.body = await deleteFiles(ObjectStoreBuckets.APPS, ctx.request.body.keys) ctx.body = await objectStore.deleteFiles(
ObjectStoreBuckets.APPS,
ctx.request.body.keys
)
} }
export const serveApp = async function (ctx: any) { export const serveApp = async function (ctx: any) {

View File

@ -1,206 +0,0 @@
const { objectStore, roles, constants } = require("@budibase/backend-core")
const FilterTypes = {
STRING: "string",
FUZZY: "fuzzy",
RANGE: "range",
EQUAL: "equal",
NOT_EQUAL: "notEqual",
EMPTY: "empty",
NOT_EMPTY: "notEmpty",
CONTAINS: "contains",
NOT_CONTAINS: "notContains",
ONE_OF: "oneOf",
}
exports.FilterTypes = FilterTypes
exports.NoEmptyFilterStrings = [
FilterTypes.STRING,
FilterTypes.FUZZY,
FilterTypes.EQUAL,
FilterTypes.NOT_EQUAL,
FilterTypes.CONTAINS,
FilterTypes.NOT_CONTAINS,
]
exports.FieldTypes = {
STRING: "string",
BARCODEQR: "barcodeqr",
LONGFORM: "longform",
OPTIONS: "options",
NUMBER: "number",
BOOLEAN: "boolean",
ARRAY: "array",
DATETIME: "datetime",
ATTACHMENT: "attachment",
LINK: "link",
FORMULA: "formula",
AUTO: "auto",
JSON: "json",
INTERNAL: "internal",
}
exports.CanSwitchTypes = [
[exports.FieldTypes.JSON, exports.FieldTypes.ARRAY],
[
exports.FieldTypes.STRING,
exports.FieldTypes.OPTIONS,
exports.FieldTypes.LONGFORM,
exports.FieldTypes.BARCODEQR,
],
[exports.FieldTypes.BOOLEAN, exports.FieldTypes.NUMBER],
]
exports.SwitchableTypes = exports.CanSwitchTypes.reduce((prev, current) =>
prev ? prev.concat(current) : current
)
exports.RelationshipTypes = {
ONE_TO_MANY: "one-to-many",
MANY_TO_ONE: "many-to-one",
MANY_TO_MANY: "many-to-many",
}
exports.FormulaTypes = {
STATIC: "static",
DYNAMIC: "dynamic",
}
exports.AuthTypes = {
APP: "app",
BUILDER: "builder",
EXTERNAL: "external",
}
exports.DataSourceOperation = {
CREATE: "CREATE",
READ: "READ",
UPDATE: "UPDATE",
DELETE: "DELETE",
BULK_CREATE: "BULK_CREATE",
CREATE_TABLE: "CREATE_TABLE",
UPDATE_TABLE: "UPDATE_TABLE",
DELETE_TABLE: "DELETE_TABLE",
}
exports.DatasourceAuthTypes = {
GOOGLE: "google",
}
exports.SortDirection = {
ASCENDING: "ASCENDING",
DESCENDING: "DESCENDING",
}
exports.USERS_TABLE_SCHEMA = {
_id: "ta_users",
type: "table",
views: {},
name: "Users",
// TODO: ADMIN PANEL - when implemented this doesn't need to be carried out
schema: {
email: {
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
email: true,
length: {
maximum: "",
},
presence: true,
},
fieldName: "email",
name: "email",
},
firstName: {
name: "firstName",
fieldName: "firstName",
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
},
},
lastName: {
name: "lastName",
fieldName: "lastName",
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
},
},
roleId: {
fieldName: "roleId",
name: "roleId",
type: exports.FieldTypes.OPTIONS,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
inclusion: Object.values(roles.BUILTIN_ROLE_IDS),
},
},
status: {
fieldName: "status",
name: "status",
type: exports.FieldTypes.OPTIONS,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
inclusion: Object.values(constants.UserStatus),
},
},
},
primaryDisplay: "email",
}
exports.AutoFieldSubTypes = {
CREATED_BY: "createdBy",
CREATED_AT: "createdAt",
UPDATED_BY: "updatedBy",
UPDATED_AT: "updatedAt",
AUTO_ID: "autoID",
}
exports.AutoFieldDefaultNames = {
CREATED_BY: "Created By",
CREATED_AT: "Created At",
UPDATED_BY: "Updated By",
UPDATED_AT: "Updated At",
AUTO_ID: "Auto ID",
}
exports.OBJ_STORE_DIRECTORY = "/prod-budi-app-assets"
exports.BaseQueryVerbs = {
CREATE: "create",
READ: "read",
UPDATE: "update",
DELETE: "delete",
}
exports.MetadataTypes = {
AUTOMATION_TEST_INPUT: "automationTestInput",
AUTOMATION_TEST_HISTORY: "automationTestHistory",
}
exports.InvalidColumns = {
ID: "_id",
REV: "_rev",
TABLE_ID: "tableId",
}
exports.BuildSchemaErrors = {
NO_KEY: "no_key",
INVALID_COLUMN: "invalid_column",
}
exports.AutomationErrors = {
INCORRECT_TYPE: "INCORRECT_TYPE",
MAX_ITERATIONS: "MAX_ITERATIONS_REACHED",
FAILURE_CONDITION: "FAILURE_CONDITION_MET",
}
// pass through the list from the auth/core lib
exports.ObjectStoreBuckets = objectStore.ObjectStoreBuckets
exports.MAX_AUTOMATION_RECURRING_ERRORS = 5

View File

@ -0,0 +1,204 @@
import { objectStore, roles, constants } from "@budibase/backend-core"
export enum FilterTypes {
STRING = "string",
FUZZY = "fuzzy",
RANGE = "range",
EQUAL = "equal",
NOT_EQUAL = "notEqual",
EMPTY = "empty",
NOT_EMPTY = "notEmpty",
CONTAINS = "contains",
NOT_CONTAINS = "notContains",
ONE_OF = "oneOf",
}
export const NoEmptyFilterStrings = [
FilterTypes.STRING,
FilterTypes.FUZZY,
FilterTypes.EQUAL,
FilterTypes.NOT_EQUAL,
FilterTypes.CONTAINS,
FilterTypes.NOT_CONTAINS,
]
export enum FieldTypes {
STRING = "string",
BARCODEQR = "barcodeqr",
LONGFORM = "longform",
OPTIONS = "options",
NUMBER = "number",
BOOLEAN = "boolean",
ARRAY = "array",
DATETIME = "datetime",
ATTACHMENT = "attachment",
LINK = "link",
FORMULA = "formula",
AUTO = "auto",
JSON = "json",
INTERNAL = "internal",
}
export const CanSwitchTypes = [
[exports.FieldTypes.JSON, exports.FieldTypes.ARRAY],
[
exports.FieldTypes.STRING,
exports.FieldTypes.OPTIONS,
exports.FieldTypes.LONGFORM,
exports.FieldTypes.BARCODEQR,
],
[exports.FieldTypes.BOOLEAN, exports.FieldTypes.NUMBER],
]
export const SwitchableTypes = CanSwitchTypes.reduce((prev, current) =>
prev ? prev.concat(current) : current
)
export enum RelationshipTypes {
ONE_TO_MANY = "one-to-many",
MANY_TO_ONE = "many-to-one",
MANY_TO_MANY = "many-to-many",
}
export enum FormulaTypes {
STATIC = "static",
DYNAMIC = "dynamic",
}
export enum AuthTypes {
APP = "app",
BUILDER = "builder",
EXTERNAL = "external",
}
export enum DataSourceOperation {
CREATE = "CREATE",
READ = "READ",
UPDATE = "UPDATE",
DELETE = "DELETE",
BULK_CREATE = "BULK_CREATE",
CREATE_TABLE = "CREATE_TABLE",
UPDATE_TABLE = "UPDATE_TABLE",
DELETE_TABLE = "DELETE_TABLE",
}
export enum DatasourceAuthTypes {
GOOGLE = "google",
}
export enum SortDirection {
ASCENDING = "ASCENDING",
DESCENDING = "DESCENDING",
}
export const USERS_TABLE_SCHEMA = {
_id: "ta_users",
type: "table",
views: {},
name: "Users",
// TODO: ADMIN PANEL - when implemented this doesn't need to be carried out
schema: {
email: {
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
email: true,
length: {
maximum: "",
},
presence: true,
},
fieldName: "email",
name: "email",
},
firstName: {
name: "firstName",
fieldName: "firstName",
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
},
},
lastName: {
name: "lastName",
fieldName: "lastName",
type: exports.FieldTypes.STRING,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
},
},
roleId: {
fieldName: "roleId",
name: "roleId",
type: exports.FieldTypes.OPTIONS,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
inclusion: Object.values(roles.BUILTIN_ROLE_IDS),
},
},
status: {
fieldName: "status",
name: "status",
type: exports.FieldTypes.OPTIONS,
constraints: {
type: exports.FieldTypes.STRING,
presence: false,
inclusion: Object.values(constants.UserStatus),
},
},
},
primaryDisplay: "email",
}
export enum AutoFieldSubTypes {
CREATED_BY = "createdBy",
CREATED_AT = "createdAt",
UPDATED_BY = "updatedBy",
UPDATED_AT = "updatedAt",
AUTO_ID = "autoID",
}
export enum AutoFieldDefaultNames {
CREATED_BY = "Created By",
CREATED_AT = "Created At",
UPDATED_BY = "Updated By",
UPDATED_AT = "Updated At",
AUTO_ID = "Auto ID",
}
export const OBJ_STORE_DIRECTORY = "/prod-budi-app-assets"
export enum BaseQueryVerbs {
CREATE = "create",
READ = "read",
UPDATE = "update",
DELETE = "delete",
}
export enum MetadataTypes {
AUTOMATION_TEST_INPUT = "automationTestInput",
AUTOMATION_TEST_HISTORY = "automationTestHistory",
}
export enum InvalidColumns {
ID = "_id",
REV = "_rev",
TABLE_ID = "tableId",
}
export enum BuildSchemaErrors {
NO_KEY = "no_key",
INVALID_COLUMN = "invalid_column",
}
export enum AutomationErrors {
INCORRECT_TYPE = "INCORRECT_TYPE",
MAX_ITERATIONS = "MAX_ITERATIONS_REACHED",
FAILURE_CONDITION = "FAILURE_CONDITION_MET",
}
// pass through the list from the auth/core lib
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
export const MAX_AUTOMATION_RECURRING_ERRORS = 5

View File

@ -1,9 +1,9 @@
const BASE_LAYOUT_PROP_IDS = { export const BASE_LAYOUT_PROP_IDS = {
PRIVATE: "layout_private_master", PRIVATE: "layout_private_master",
PUBLIC: "layout_public_master", PUBLIC: "layout_public_master",
} }
const EMPTY_LAYOUT = { export const EMPTY_LAYOUT = {
componentLibraries: ["@budibase/standard-components"], componentLibraries: ["@budibase/standard-components"],
title: "{{ name }}", title: "{{ name }}",
favicon: "./_shared/favicon.png", favicon: "./_shared/favicon.png",
@ -48,7 +48,7 @@ const EMPTY_LAYOUT = {
}, },
} }
const BASE_LAYOUTS = [ export const BASE_LAYOUTS = [
{ {
_id: BASE_LAYOUT_PROP_IDS.PRIVATE, _id: BASE_LAYOUT_PROP_IDS.PRIVATE,
componentLibraries: ["@budibase/standard-components"], componentLibraries: ["@budibase/standard-components"],
@ -145,9 +145,3 @@ const BASE_LAYOUTS = [
}, },
}, },
] ]
module.exports = {
BASE_LAYOUTS,
BASE_LAYOUT_PROP_IDS,
EMPTY_LAYOUT,
}

View File

@ -1,46 +0,0 @@
const { roles } = require("@budibase/backend-core")
const { BASE_LAYOUT_PROP_IDS } = require("./layouts")
exports.createHomeScreen = () => ({
description: "",
url: "",
layoutId: BASE_LAYOUT_PROP_IDS.PRIVATE,
props: {
_id: "d834fea2-1b3e-4320-ab34-f9009f5ecc59",
_component: "@budibase/standard-components/container",
_styles: {
normal: {},
hover: {},
active: {},
selected: {},
},
_transition: "fade",
_children: [
{
_id: "ef60083f-4a02-4df3-80f3-a0d3d16847e7",
_component: "@budibase/standard-components/heading",
_styles: {
hover: {},
active: {},
selected: {},
},
text: "Welcome to your Budibase App 👋",
size: "M",
align: "left",
_instanceName: "Heading",
_children: [],
},
],
_instanceName: "Home",
direction: "column",
hAlign: "stretch",
vAlign: "top",
size: "grow",
gap: "M",
},
routing: {
route: "/",
roleId: roles.BUILTIN_ROLE_IDS.BASIC,
},
name: "home-screen",
})

View File

@ -0,0 +1,48 @@
import { roles } from "@budibase/backend-core"
import { BASE_LAYOUT_PROP_IDS } from "./layouts"
export function createHomeScreen() {
return {
description: "",
url: "",
layoutId: BASE_LAYOUT_PROP_IDS.PRIVATE,
props: {
_id: "d834fea2-1b3e-4320-ab34-f9009f5ecc59",
_component: "@budibase/standard-components/container",
_styles: {
normal: {},
hover: {},
active: {},
selected: {},
},
_transition: "fade",
_children: [
{
_id: "ef60083f-4a02-4df3-80f3-a0d3d16847e7",
_component: "@budibase/standard-components/heading",
_styles: {
hover: {},
active: {},
selected: {},
},
text: "Welcome to your Budibase App 👋",
size: "M",
align: "left",
_instanceName: "Heading",
_children: [],
},
],
_instanceName: "Home",
direction: "column",
hAlign: "stretch",
vAlign: "top",
size: "grow",
gap: "M",
},
routing: {
route: "/",
roleId: roles.BUILTIN_ROLE_IDS.BASIC,
},
name: "home-screen",
}
}

View File

@ -1,31 +1,32 @@
const { import {
FieldTypes, FieldTypes,
AutoFieldSubTypes, AutoFieldSubTypes,
RelationshipTypes, RelationshipTypes,
} = require("../../constants/index") } from "../../constants"
const { importToRows } = require("../../api/controllers/table/utils") import { importToRows } from "../../api/controllers/table/utils"
const { cloneDeep } = require("lodash/fp") import { cloneDeep } from "lodash/fp"
const LinkDocument = require("../linkedRows/LinkDocument") import LinkDocument from "../linkedRows/LinkDocument"
const { inventoryImport } = require("./inventoryImport") import { inventoryImport } from "./inventoryImport"
const { employeeImport } = require("./employeeImport") import { employeeImport } from "./employeeImport"
const { jobsImport } = require("./jobsImport") import { jobsImport } from "./jobsImport"
const { expensesImport } = require("./expensesImport") import { expensesImport } from "./expensesImport"
const { db: dbCore } = require("@budibase/backend-core") import { db as dbCore } from "@budibase/backend-core"
import { Table, Row } from "@budibase/types"
exports.DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs" export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
exports.DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory" export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
exports.DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses" export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
exports.DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee" export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
exports.DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
exports.DEFAULT_BB_DATASOURCE = { export const DEFAULT_BB_DATASOURCE = {
_id: this.DEFAULT_BB_DATASOURCE_ID, _id: DEFAULT_BB_DATASOURCE_ID,
type: dbCore.BUDIBASE_DATASOURCE_TYPE, type: dbCore.BUDIBASE_DATASOURCE_TYPE,
name: "Sample Data", name: "Sample Data",
source: "BUDIBASE", source: "BUDIBASE",
config: {}, config: {},
} }
const syncLastIds = (table, rowCount) => { function syncLastIds(table: Table, rowCount: number) {
Object.keys(table.schema).forEach(key => { Object.keys(table.schema).forEach(key => {
const entry = table.schema[key] const entry = table.schema[key]
if (entry.autocolumn && entry.subtype == "autoID") { if (entry.autocolumn && entry.subtype == "autoID") {
@ -34,7 +35,7 @@ const syncLastIds = (table, rowCount) => {
}) })
} }
const tableImport = (table, data) => { function tableImport(table: Table, data: Row) {
const cloneTable = cloneDeep(table) const cloneTable = cloneDeep(table)
const rowDocs = importToRows(data, cloneTable) const rowDocs = importToRows(data, cloneTable)
syncLastIds(cloneTable, rowDocs.length) syncLastIds(cloneTable, rowDocs.length)
@ -77,11 +78,11 @@ const AUTO_COLUMNS = {
}, },
} }
exports.DEFAULT_INVENTORY_TABLE_SCHEMA = { export const DEFAULT_INVENTORY_TABLE_SCHEMA: Table = {
_id: this.DEFAULT_INVENTORY_TABLE_ID, _id: DEFAULT_INVENTORY_TABLE_ID,
type: "internal", type: "internal",
views: {}, views: {},
sourceId: exports.DEFAULT_BB_DATASOURCE_ID, sourceId: DEFAULT_BB_DATASOURCE_ID,
primaryDisplay: "Item Name", primaryDisplay: "Item Name",
name: "Inventory", name: "Inventory",
schema: { schema: {
@ -186,12 +187,12 @@ exports.DEFAULT_INVENTORY_TABLE_SCHEMA = {
}, },
} }
exports.DEFAULT_EMPLOYEE_TABLE_SCHEMA = { export const DEFAULT_EMPLOYEE_TABLE_SCHEMA = {
_id: this.DEFAULT_EMPLOYEE_TABLE_ID, _id: DEFAULT_EMPLOYEE_TABLE_ID,
type: "internal", type: "internal",
views: {}, views: {},
name: "Employees", name: "Employees",
sourceId: exports.DEFAULT_BB_DATASOURCE_ID, sourceId: DEFAULT_BB_DATASOURCE_ID,
primaryDisplay: "First Name", primaryDisplay: "First Name",
schema: { schema: {
"First Name": { "First Name": {
@ -300,7 +301,7 @@ exports.DEFAULT_EMPLOYEE_TABLE_SCHEMA = {
fieldName: "Assigned", fieldName: "Assigned",
name: "Jobs", name: "Jobs",
relationshipType: RelationshipTypes.MANY_TO_MANY, relationshipType: RelationshipTypes.MANY_TO_MANY,
tableId: this.DEFAULT_JOBS_TABLE_ID, tableId: DEFAULT_JOBS_TABLE_ID,
}, },
"Start Date": { "Start Date": {
type: FieldTypes.DATETIME, type: FieldTypes.DATETIME,
@ -334,11 +335,11 @@ exports.DEFAULT_EMPLOYEE_TABLE_SCHEMA = {
}, },
} }
exports.DEFAULT_JOBS_TABLE_SCHEMA = { export const DEFAULT_JOBS_TABLE_SCHEMA: Table = {
_id: this.DEFAULT_JOBS_TABLE_ID, _id: DEFAULT_JOBS_TABLE_ID,
type: "internal", type: "internal",
name: "Jobs", name: "Jobs",
sourceId: exports.DEFAULT_BB_DATASOURCE_ID, sourceId: DEFAULT_BB_DATASOURCE_ID,
primaryDisplay: "Job ID", primaryDisplay: "Job ID",
schema: { schema: {
"Job ID": { "Job ID": {
@ -456,7 +457,7 @@ exports.DEFAULT_JOBS_TABLE_SCHEMA = {
Assigned: { Assigned: {
name: "Assigned", name: "Assigned",
type: FieldTypes.LINK, type: FieldTypes.LINK,
tableId: this.DEFAULT_EMPLOYEE_TABLE_ID, tableId: DEFAULT_EMPLOYEE_TABLE_ID,
fieldName: "Jobs", fieldName: "Jobs",
relationshipType: RelationshipTypes.MANY_TO_MANY, relationshipType: RelationshipTypes.MANY_TO_MANY,
// sortable: true, // sortable: true,
@ -491,12 +492,12 @@ exports.DEFAULT_JOBS_TABLE_SCHEMA = {
}, },
} }
exports.DEFAULT_EXPENSES_TABLE_SCHEMA = { export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = {
_id: this.DEFAULT_EXPENSES_TABLE_ID, _id: DEFAULT_EXPENSES_TABLE_ID,
type: "internal", type: "internal",
views: {}, views: {},
name: "Expenses", name: "Expenses",
sourceId: exports.DEFAULT_BB_DATASOURCE_ID, sourceId: DEFAULT_BB_DATASOURCE_ID,
primaryDisplay: "Expense ID", primaryDisplay: "Expense ID",
schema: { schema: {
"Expense ID": { "Expense ID": {
@ -601,38 +602,40 @@ exports.DEFAULT_EXPENSES_TABLE_SCHEMA = {
}, },
} }
exports.buildDefaultDocs = () => { export function buildDefaultDocs() {
const inventoryData = tableImport( const inventoryData = tableImport(
this.DEFAULT_INVENTORY_TABLE_SCHEMA, DEFAULT_INVENTORY_TABLE_SCHEMA,
inventoryImport inventoryImport
) )
const employeeData = tableImport( const employeeData = tableImport(
this.DEFAULT_EMPLOYEE_TABLE_SCHEMA, DEFAULT_EMPLOYEE_TABLE_SCHEMA,
employeeImport employeeImport
) )
const jobData = tableImport(this.DEFAULT_JOBS_TABLE_SCHEMA, jobsImport) const jobData = tableImport(DEFAULT_JOBS_TABLE_SCHEMA, jobsImport)
const expensesData = tableImport( const expensesData = tableImport(
this.DEFAULT_EXPENSES_TABLE_SCHEMA, DEFAULT_EXPENSES_TABLE_SCHEMA,
expensesImport expensesImport
) )
// Build one link doc for each employee/job // Build one link doc for each employee/job
const jobEmployeeLinks = employeeData.rows.map((employee, index) => { const jobEmployeeLinks = employeeData.rows.map(
(employee: any, index: any) => {
return new LinkDocument( return new LinkDocument(
employeeData.table._id, employeeData.table._id!,
"Jobs", "Jobs",
employeeData.rows[index]._id, employeeData.rows[index]._id,
jobData.table._id, jobData.table._id!,
"Assigned", "Assigned",
jobData.rows[index]._id jobData.rows[index]._id
) )
}) }
)
return [ return [
this.DEFAULT_BB_DATASOURCE, DEFAULT_BB_DATASOURCE,
inventoryData.table, inventoryData.table,
employeeData.table, employeeData.table,
jobData.table, jobData.table,

View File

@ -1,4 +1,4 @@
exports.employeeImport = [ export const employeeImport = [
{ {
"First Name": "Julie", "First Name": "Julie",
"Last Name": "Jimenez", "Last Name": "Jimenez",

View File

@ -1,4 +1,4 @@
exports.expensesImport = [ export const expensesImport = [
{ {
"Date Paid": "2022-11-12T12:00:00.000", "Date Paid": "2022-11-12T12:00:00.000",
"Payment Due": "2022-11-01T12:00:00.000", "Payment Due": "2022-11-01T12:00:00.000",

View File

@ -1,4 +1,4 @@
exports.inventoryImport = [ export const inventoryImport = [
{ {
Status: ["Available"], Status: ["Available"],
"Item Name": "Little Blue Van", "Item Name": "Little Blue Van",

View File

@ -1,4 +1,4 @@
exports.jobsImport = [ export const jobsImport = [
{ {
"Works End": "2023-01-28T12:00:00.000", "Works End": "2023-01-28T12:00:00.000",
"Customer Email": "susie.peterson@example.com", "Customer Email": "susie.peterson@example.com",

View File

@ -1,8 +1,7 @@
let { merge } = require("lodash") import { merge } from "lodash"
let env = require("../environment") import env from "../environment"
const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1" export const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
exports.AWS_REGION = AWS_REGION
const TableInfo = { const TableInfo = {
API_KEYS: { API_KEYS: {
@ -16,10 +15,36 @@ const TableInfo = {
}, },
} }
let docClient = null let docClient: any = null
type GetOpts = {
primary: string
sort?: string
otherProps?: any
}
type UpdateOpts = {
primary: string
sort?: string
expression?: string
condition?: string
names?: string[]
values?: any[]
exists?: boolean
otherProps?: any
}
type PutOpts = {
item: any
otherProps?: any
}
class Table { class Table {
constructor(tableInfo) { _name: string
_primary: string
_sort?: string
constructor(tableInfo: { name: string; primary: string; sort?: string }) {
if (!tableInfo.name || !tableInfo.primary) { if (!tableInfo.name || !tableInfo.primary) {
throw "Table info must specify a name and a primary key" throw "Table info must specify a name and a primary key"
} }
@ -28,7 +53,7 @@ class Table {
this._sort = tableInfo.sort this._sort = tableInfo.sort
} }
async get({ primary, sort, otherProps }) { async get({ primary, sort, otherProps }: GetOpts) {
let params = { let params = {
TableName: this._name, TableName: this._name,
Key: { Key: {
@ -54,8 +79,8 @@ class Table {
values, values,
exists, exists,
otherProps, otherProps,
}) { }: UpdateOpts) {
let params = { let params: any = {
TableName: this._name, TableName: this._name,
Key: { Key: {
[this._primary]: primary, [this._primary]: primary,
@ -83,7 +108,7 @@ class Table {
return docClient.update(params).promise() return docClient.update(params).promise()
} }
async put({ item, otherProps }) { async put({ item, otherProps }: PutOpts) {
if ( if (
item[this._primary] == null || item[this._primary] == null ||
(this._sort && item[this._sort] == null) (this._sort && item[this._sort] == null)
@ -101,9 +126,9 @@ class Table {
} }
} }
exports.init = endpoint => { export function init(endpoint: string) {
let AWS = require("aws-sdk") let AWS = require("aws-sdk")
let docClientParams = { let docClientParams: any = {
correctClockSkew: true, correctClockSkew: true,
region: AWS_REGION, region: AWS_REGION,
} }
@ -115,13 +140,8 @@ exports.init = endpoint => {
docClient = new AWS.DynamoDB.DocumentClient(docClientParams) docClient = new AWS.DynamoDB.DocumentClient(docClientParams)
} }
exports.apiKeyTable = new Table(TableInfo.API_KEYS) if (!env.isProd()) {
exports.userTable = new Table(TableInfo.USERS)
if (env.isProd()) {
exports.init(`https://dynamodb.${AWS_REGION}.amazonaws.com`)
} else {
env._set("AWS_ACCESS_KEY_ID", "KEY_ID") env._set("AWS_ACCESS_KEY_ID", "KEY_ID")
env._set("AWS_SECRET_ACCESS_KEY", "SECRET_KEY") env._set("AWS_SECRET_ACCESS_KEY", "SECRET_KEY")
exports.init("http://localhost:8333") init("http://localhost:8333")
} }

View File

@ -1,11 +1,17 @@
const newid = require("./newid") import newid from "./newid"
import { Row, View, Document } from "@budibase/types"
// bypass the main application db config // bypass the main application db config
// use in memory pouchdb directly // use in memory pouchdb directly
const { db: dbCore } = require("@budibase/backend-core") import { db as dbCore } from "@budibase/backend-core"
const Pouch = dbCore.getPouch({ inMemory: true }) const Pouch = dbCore.getPouch({ inMemory: true })
exports.runView = async (view, calculation, group, data) => { export async function runView(
view: View,
calculation: boolean,
group: string,
data: Row[]
) {
// use a different ID each time for the DB, make sure they // use a different ID each time for the DB, make sure they
// are always unique for each query, don't want overlap // are always unique for each query, don't want overlap
// which could cause 409s // which could cause 409s
@ -18,16 +24,16 @@ exports.runView = async (view, calculation, group, data) => {
_rev: undefined, _rev: undefined,
})) }))
) )
let fn = (doc, emit) => emit(doc._id) let fn = (doc: Document, emit: any) => emit(doc._id)
eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)")) eval("fn = " + view?.map?.replace("function (doc)", "function (doc, emit)"))
const queryFns = { const queryFns: any = {
meta: view.meta, meta: view.meta,
map: fn, map: fn,
} }
if (view.reduce) { if (view.reduce) {
queryFns.reduce = view.reduce queryFns.reduce = view.reduce
} }
const response = await db.query(queryFns, { const response: { rows: Row[] } = await db.query(queryFns, {
include_docs: !calculation, include_docs: !calculation,
group: !!group, group: !!group,
}) })

View File

@ -1,16 +0,0 @@
const core = require("@budibase/backend-core")
const env = require("../environment")
exports.init = () => {
const dbConfig = {
replication: true,
find: true,
}
if (env.isTest() && !env.COUCH_DB_URL) {
dbConfig.inMemory = true
dbConfig.allDbs = true
}
core.init({ db: dbConfig })
}

View File

@ -0,0 +1,16 @@
import { init as coreInit } from "@budibase/backend-core"
import env = require("../environment")
export function init() {
const dbConfig: any = {
replication: true,
find: true,
}
if (env.isTest() && !env.COUCH_DB_URL) {
dbConfig.inMemory = true
dbConfig.allDbs = true
}
coreInit({ db: dbConfig })
}

View File

@ -1,5 +1,5 @@
const { v4 } = require("uuid") const { v4 } = require("uuid")
module.exports = function () { export = function (): string {
return v4().replace(/-/g, "") return v4().replace(/-/g, "")
} }

View File

@ -1,5 +1,6 @@
const { context } = require("@budibase/backend-core") import { context } from "@budibase/backend-core"
const { DocumentType, SEPARATOR, ViewName, SearchIndexes } = require("../utils") import { DocumentType, SEPARATOR, ViewName, SearchIndexes } from "../utils"
import { LinkDocument, Row } from "@budibase/types"
const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR
/************************************************** /**************************************************
@ -19,16 +20,17 @@ const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR
* @returns {Promise<void>} The view now exists, please note that the next view of this query will actually build it, * @returns {Promise<void>} The view now exists, please note that the next view of this query will actually build it,
* so it may be slow. * so it may be slow.
*/ */
exports.createLinkView = async () => { export async function createLinkView() {
const db = context.getAppDB() const db = context.getAppDB()
const designDoc = await db.get("_design/database") const designDoc = await db.get("_design/database")
const view = { const view = {
map: function (doc) { map: function (doc: LinkDocument) {
// everything in this must remain constant as its going to Pouch, no external variables // everything in this must remain constant as its going to Pouch, no external variables
if (doc.type === "link") { if (doc.type === "link") {
let doc1 = doc.doc1 let doc1 = doc.doc1
let doc2 = doc.doc2 let doc2 = doc.doc2
// eslint-disable-next-line no-undef // eslint-disable-next-line no-undef
// @ts-ignore
emit([doc1.tableId, doc1.rowId], { emit([doc1.tableId, doc1.rowId], {
id: doc2.rowId, id: doc2.rowId,
thisId: doc1.rowId, thisId: doc1.rowId,
@ -37,6 +39,7 @@ exports.createLinkView = async () => {
// if linking to same table can't emit twice // if linking to same table can't emit twice
if (doc1.tableId !== doc2.tableId) { if (doc1.tableId !== doc2.tableId) {
// eslint-disable-next-line no-undef // eslint-disable-next-line no-undef
// @ts-ignore
emit([doc2.tableId, doc2.rowId], { emit([doc2.tableId, doc2.rowId], {
id: doc1.rowId, id: doc1.rowId,
thisId: doc2.rowId, thisId: doc2.rowId,
@ -53,7 +56,7 @@ exports.createLinkView = async () => {
await db.put(designDoc) await db.put(designDoc)
} }
exports.createRoutingView = async () => { export async function createRoutingView() {
const db = context.getAppDB() const db = context.getAppDB()
const designDoc = await db.get("_design/database") const designDoc = await db.get("_design/database")
const view = { const view = {
@ -74,7 +77,7 @@ exports.createRoutingView = async () => {
await db.put(designDoc) await db.put(designDoc)
} }
async function searchIndex(indexName, fnString) { async function searchIndex(indexName: string, fnString: string) {
const db = context.getAppDB() const db = context.getAppDB()
const designDoc = await db.get("_design/database") const designDoc = await db.get("_design/database")
designDoc.indexes = { designDoc.indexes = {
@ -86,11 +89,11 @@ async function searchIndex(indexName, fnString) {
await db.put(designDoc) await db.put(designDoc)
} }
exports.createAllSearchIndex = async () => { export async function createAllSearchIndex() {
await searchIndex( await searchIndex(
SearchIndexes.ROWS, SearchIndexes.ROWS,
function (doc) { function (doc: Row) {
function idx(input, prev) { function idx(input: Row, prev?: string) {
for (let key of Object.keys(input)) { for (let key of Object.keys(input)) {
let idxKey = prev != null ? `${prev}.${key}` : key let idxKey = prev != null ? `${prev}.${key}` : key
idxKey = idxKey.replace(/ /g, "_") idxKey = idxKey.replace(/ /g, "_")
@ -98,6 +101,7 @@ exports.createAllSearchIndex = async () => {
for (let val of input[key]) { for (let val of input[key]) {
if (typeof val !== "object") { if (typeof val !== "object") {
// eslint-disable-next-line no-undef // eslint-disable-next-line no-undef
// @ts-ignore
index(idxKey, val, { store: true }) index(idxKey, val, { store: true })
} }
} }
@ -106,17 +110,20 @@ exports.createAllSearchIndex = async () => {
} }
if (typeof input[key] === "string") { if (typeof input[key] === "string") {
// eslint-disable-next-line no-undef // eslint-disable-next-line no-undef
// @ts-ignore
index(idxKey, input[key].toLowerCase(), { store: true }) index(idxKey, input[key].toLowerCase(), { store: true })
} else if (typeof input[key] !== "object") { } else if (typeof input[key] !== "object") {
// eslint-disable-next-line no-undef // eslint-disable-next-line no-undef
// @ts-ignore
index(idxKey, input[key], { store: true }) index(idxKey, input[key], { store: true })
} else { } else {
idx(input[key], idxKey) idx(input[key], idxKey)
} }
} }
} }
if (doc._id.startsWith("ro_")) { if (doc._id!.startsWith("ro_")) {
// eslint-disable-next-line no-undef // eslint-disable-next-line no-undef
// @ts-ignore
index("default", doc._id) index("default", doc._id)
idx(doc) idx(doc)
} }

View File

@ -1,6 +1,7 @@
const { rowEmission, tableEmission } = require("./utils") import { rowEmission, tableEmission } from "./utils"
const mainEmitter = require("./index") import mainEmitter from "./index"
const env = require("../environment") import env from "../environment"
import { Table, Row } from "@budibase/types"
// max number of automations that can chain on top of each other // max number of automations that can chain on top of each other
// TODO: in future make this configurable at the automation level // TODO: in future make this configurable at the automation level
@ -13,14 +14,17 @@ const MAX_AUTOMATION_CHAIN = env.SELF_HOSTED ? 5 : 0
* from getting stuck endlessly chaining. * from getting stuck endlessly chaining.
*/ */
class AutomationEmitter { class AutomationEmitter {
constructor(chainCount) { chainCount: number
metadata: { automationChainCount: number }
constructor(chainCount: number) {
this.chainCount = chainCount this.chainCount = chainCount
this.metadata = { this.metadata = {
automationChainCount: chainCount, automationChainCount: chainCount,
} }
} }
emitRow(eventName, appId, row, table = null) { emitRow(eventName: string, appId: string, row: Row, table?: Table) {
// don't emit even if we've reached max automation chain // don't emit even if we've reached max automation chain
if (this.chainCount >= MAX_AUTOMATION_CHAIN) { if (this.chainCount >= MAX_AUTOMATION_CHAIN) {
return return
@ -35,11 +39,12 @@ class AutomationEmitter {
}) })
} }
emitTable(eventName, appId, table = null) { emitTable(eventName: string, appId: string, table?: Table) {
// don't emit even if we've reached max automation chain // don't emit even if we've reached max automation chain
if (this.chainCount > MAX_AUTOMATION_CHAIN) { if (this.chainCount > MAX_AUTOMATION_CHAIN) {
return return
} }
tableEmission({ tableEmission({
emitter: mainEmitter, emitter: mainEmitter,
eventName, eventName,
@ -50,4 +55,4 @@ class AutomationEmitter {
} }
} }
module.exports = AutomationEmitter export = AutomationEmitter

View File

@ -1,5 +1,6 @@
const EventEmitter = require("events").EventEmitter import { EventEmitter } from "events"
const { rowEmission, tableEmission } = require("./utils") import { rowEmission, tableEmission } from "./utils"
import { Table, Row } from "@budibase/types"
/** /**
* keeping event emitter in one central location as it might be used for things other than * keeping event emitter in one central location as it might be used for things other than
@ -12,19 +13,17 @@ const { rowEmission, tableEmission } = require("./utils")
* This is specifically quite important for template strings used in automations. * This is specifically quite important for template strings used in automations.
*/ */
class BudibaseEmitter extends EventEmitter { class BudibaseEmitter extends EventEmitter {
emitRow(eventName, appId, row, table = null) { emitRow(eventName: string, appId: string, row: Row, table?: Table) {
rowEmission({ emitter: this, eventName, appId, row, table }) rowEmission({ emitter: this, eventName, appId, row, table })
} }
emitTable(eventName, appId, table = null) { emitTable(eventName: string, appId: string, table?: Table) {
tableEmission({ emitter: this, eventName, appId, table }) tableEmission({ emitter: this, eventName, appId, table })
} }
emitPort(portNumber) { emitPort(portNumber: number) {
this.emit("internal:port", portNumber) this.emit("internal:port", portNumber)
} }
} }
const emitter = new BudibaseEmitter() export = BudibaseEmitter
module.exports = emitter

View File

@ -0,0 +1,5 @@
import BudibaseEmitter from "./BudibaseEmitter"
const emitter = new BudibaseEmitter()
export = emitter

View File

@ -1,38 +0,0 @@
exports.rowEmission = ({ emitter, eventName, appId, row, table, metadata }) => {
let event = {
row,
appId,
tableId: row.tableId,
}
if (table) {
event.table = table
}
event.id = row._id
if (row._rev) {
event.revision = row._rev
}
if (metadata) {
event.metadata = metadata
}
emitter.emit(eventName, event)
}
exports.tableEmission = ({ emitter, eventName, appId, table, metadata }) => {
const tableId = table._id
let event = {
table: {
...table,
tableId: tableId,
},
appId,
tableId: tableId,
}
event.id = tableId
if (table._rev) {
event.revision = table._rev
}
if (metadata) {
event.metadata = metadata
}
emitter.emit(eventName, event)
}

View File

@ -0,0 +1,78 @@
import { Table, Row } from "@budibase/types"
import BudibaseEmitter from "./BudibaseEmitter"
type BBEventOpts = {
emitter: BudibaseEmitter
eventName: string
appId: string
table?: Table
row?: Row
metadata?: any
}
interface BBEventTable extends Table {
tableId?: string
}
type BBEvent = {
appId: string
tableId?: string
row?: Row
table?: BBEventTable
id?: string
revision?: string
metadata?: any
}
export function rowEmission({
emitter,
eventName,
appId,
row,
table,
metadata,
}: BBEventOpts) {
let event: BBEvent = {
row,
appId,
tableId: row?.tableId,
}
if (table) {
event.table = table
}
event.id = row?._id
if (row?._rev) {
event.revision = row._rev
}
if (metadata) {
event.metadata = metadata
}
emitter.emit(eventName, event)
}
export function tableEmission({
emitter,
eventName,
appId,
table,
metadata,
}: BBEventOpts) {
const tableId = table?._id
const inputTable: BBEventTable | undefined = table
if (inputTable) {
inputTable.tableId = tableId
}
let event: BBEvent = {
table: inputTable,
appId,
tableId: tableId,
}
event.id = tableId
if (table?._rev) {
event.revision = table._rev
}
if (metadata) {
event.metadata = metadata
}
emitter.emit(eventName, event)
}

View File

@ -1,4 +1,4 @@
const path = require("path") import path from "path"
// this simply runs all of our path join and resolve functions through // this simply runs all of our path join and resolve functions through
// a central location incase we need to add some protection to file paths // a central location incase we need to add some protection to file paths
@ -8,7 +8,7 @@ const path = require("path")
* @param args Any number of string arguments to add to a path * @param args Any number of string arguments to add to a path
* @returns {string} The final path ready to use * @returns {string} The final path ready to use
*/ */
exports.join = function (...args) { export function join(...args: any) {
return path.join(...args) return path.join(...args)
} }
@ -17,6 +17,6 @@ exports.join = function (...args) {
* @param args Any number of string arguments to add to a path * @param args Any number of string arguments to add to a path
* @returns {string} The final path ready to use * @returns {string} The final path ready to use
*/ */
exports.resolve = function (...args) { export function resolve(...args: any) {
return path.resolve(...args) return path.resolve(...args)
} }

View File

@ -1,18 +1,25 @@
const csv = require("csvtojson") import { FieldSchema, Table } from "@budibase/types"
const { FieldTypes } = require("../constants") import csv from "csvtojson"
import { FieldTypes } from "../constants"
type CsvParseOpts = {
schema?: { [key: string]: any }
existingTable: Table
csvString?: string
}
const VALIDATORS = { const VALIDATORS = {
[FieldTypes.STRING]: () => true, [FieldTypes.STRING]: () => true,
[FieldTypes.OPTIONS]: () => true, [FieldTypes.OPTIONS]: () => true,
[FieldTypes.BARCODEQR]: () => true, [FieldTypes.BARCODEQR]: () => true,
[FieldTypes.NUMBER]: attribute => { [FieldTypes.NUMBER]: (attribute?: string) => {
// allow not to be present // allow not to be present
if (!attribute) { if (!attribute) {
return true return true
} }
return !isNaN(Number(attribute)) return !isNaN(Number(attribute))
}, },
[FieldTypes.DATETIME]: attribute => { [FieldTypes.DATETIME]: (attribute?: string) => {
// allow not to be present // allow not to be present
if (!attribute) { if (!attribute) {
return true return true
@ -22,13 +29,13 @@ const VALIDATORS = {
} }
const PARSERS = { const PARSERS = {
[FieldTypes.NUMBER]: attribute => { [FieldTypes.NUMBER]: (attribute?: string) => {
if (!attribute) { if (!attribute) {
return attribute return attribute
} }
return Number(attribute) return Number(attribute)
}, },
[FieldTypes.DATETIME]: attribute => { [FieldTypes.DATETIME]: (attribute?: string) => {
if (!attribute) { if (!attribute) {
return attribute return attribute
} }
@ -36,10 +43,10 @@ const PARSERS = {
}, },
} }
function parse(csvString, parsers) { export function parse(csvString: string, parsers: any) {
const result = csv().fromString(csvString) const result = csv().fromString(csvString)
const schema = {} const schema: Record<string, any> = {}
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
result.on("header", headers => { result.on("header", headers => {
@ -77,16 +84,22 @@ function parse(csvString, parsers) {
}) })
} }
function updateSchema({ schema, existingTable }) { export function updateSchema({
schema,
existingTable,
}: {
schema?: Record<string, any>
existingTable: Table
}) {
if (!schema) { if (!schema) {
return schema return schema
} }
const finalSchema = {} const finalSchema: Record<string, FieldSchema> = {}
const schemaKeyMap = {} const schemaKeyMap: Record<string, any> = {}
Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key)) Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key))
for (let [key, field] of Object.entries(existingTable.schema)) { for (let [key, field] of Object.entries(existingTable.schema)) {
const lcKey = key.toLowerCase() const lcKey = key.toLowerCase()
const foundKey = schemaKeyMap[lcKey] const foundKey: string = schemaKeyMap[lcKey]
if (foundKey) { if (foundKey) {
finalSchema[key] = schema[foundKey] finalSchema[key] = schema[foundKey]
finalSchema[key].type = field.type finalSchema[key].type = field.type
@ -95,15 +108,22 @@ function updateSchema({ schema, existingTable }) {
return finalSchema return finalSchema
} }
async function transform({ schema, csvString, existingTable }) { export async function transform({
const colParser = {} schema,
csvString,
existingTable,
}: CsvParseOpts) {
if (!schema || !csvString) {
throw new Error("Unable to transform CSV without schema")
}
const colParser: any = {}
// make sure the table has all the columns required for import // make sure the table has all the columns required for import
if (existingTable) { if (existingTable) {
schema = updateSchema({ schema, existingTable }) schema = updateSchema({ schema, existingTable })
} }
for (let [key, field] of Object.entries(schema)) { for (let [key, field] of Object.entries(schema || {})) {
// don't import data to auto columns // don't import data to auto columns
if (!field.autocolumn) { if (!field.autocolumn) {
colParser[key] = PARSERS[field.type] || field.type colParser[key] = PARSERS[field.type] || field.type
@ -112,8 +132,10 @@ async function transform({ schema, csvString, existingTable }) {
try { try {
const data = await csv({ colParser }).fromString(csvString) const data = await csv({ colParser }).fromString(csvString)
const schemaKeyMap = {} const schemaKeyMap: any = {}
Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key)) Object.keys(schema || {}).forEach(
key => (schemaKeyMap[key.toLowerCase()] = key)
)
for (let element of data) { for (let element of data) {
if (!data) { if (!data) {
continue continue
@ -137,9 +159,3 @@ async function transform({ schema, csvString, existingTable }) {
throw err throw err
} }
} }
module.exports = {
parse,
transform,
updateSchema,
}

View File

@ -1,9 +1,9 @@
const { join } = require("path") import { join } from "path"
const { ObjectStoreBuckets } = require("../../constants") import { ObjectStoreBuckets } from "../../constants"
const fs = require("fs") import fs from "fs"
const { objectStore } = require("@budibase/backend-core") import { objectStore } from "@budibase/backend-core"
const { resolve } = require("../centralPath") import { resolve } from "../centralPath"
const env = require("../../environment") import env from "../../environment"
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
/** /**
@ -33,7 +33,7 @@ const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
* @param appId The app ID to backup * @param appId The app ID to backup
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
exports.backupClientLibrary = async appId => { export async function backupClientLibrary(appId: string) {
// Copy existing manifest to tmp // Copy existing manifest to tmp
let tmpManifestPath let tmpManifestPath
try { try {
@ -85,7 +85,7 @@ exports.backupClientLibrary = async appId => {
* @param appId The app ID to update * @param appId The app ID to update
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
exports.updateClientLibrary = async appId => { export async function updateClientLibrary(appId: string) {
let manifest, client let manifest, client
if (env.isDev()) { if (env.isDev()) {
@ -124,7 +124,7 @@ exports.updateClientLibrary = async appId => {
* @param appId The app ID to revert * @param appId The app ID to revert
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
exports.revertClientLibrary = async appId => { export async function revertClientLibrary(appId: string) {
// Copy backups manifest to tmp directory // Copy backups manifest to tmp directory
const tmpManifestPath = await objectStore.retrieveToTmp( const tmpManifestPath = await objectStore.retrieveToTmp(
ObjectStoreBuckets.APPS, ObjectStoreBuckets.APPS,

View File

@ -1,26 +1,17 @@
const { budibaseTempDir } = require("../budibaseDir") import { budibaseTempDir } from "../budibaseDir"
const fs = require("fs") import fs from "fs"
const { join } = require("path") import { join } from "path"
import { context, objectStore } from "@budibase/backend-core"
import { ObjectStoreBuckets } from "../../constants"
import { updateClientLibrary } from "./clientLibrary"
import { checkSlashesInUrl } from "../"
import env from "../../environment"
import fetch from "node-fetch"
const uuid = require("uuid/v4") const uuid = require("uuid/v4")
const { context, objectStore } = require("@budibase/backend-core")
const { ObjectStoreBuckets } = require("../../constants")
const { updateClientLibrary } = require("./clientLibrary")
const { checkSlashesInUrl } = require("../")
const env = require("../../environment")
const tar = require("tar") const tar = require("tar")
const fetch = require("node-fetch")
const {
upload,
retrieve,
retrieveToTmp,
deleteFolder,
downloadTarball,
downloadTarballDirect,
deleteFiles,
} = objectStore
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") export const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules") export const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource") const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
/** /**
@ -34,14 +25,14 @@ const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
/** /**
* Upon first startup of instance there may not be everything we need in tmp directory, set it up. * Upon first startup of instance there may not be everything we need in tmp directory, set it up.
*/ */
exports.init = () => { export function init() {
const tempDir = budibaseTempDir() const tempDir = budibaseTempDir()
if (!fs.existsSync(tempDir)) { if (!fs.existsSync(tempDir)) {
// some test cases fire this quickly enough that // some test cases fire this quickly enough that
// synchronous cases can end up here at the same time // synchronous cases can end up here at the same time
try { try {
fs.mkdirSync(tempDir) fs.mkdirSync(tempDir)
} catch (err) { } catch (err: any) {
if (!err || err.code !== "EEXIST") { if (!err || err.code !== "EEXIST") {
throw err throw err
} }
@ -81,7 +72,7 @@ exports.checkDevelopmentEnvironment = () => {
* @param {string} path The path to the handlebars file which is to be loaded. * @param {string} path The path to the handlebars file which is to be loaded.
* @returns {string} The loaded handlebars file as a string - loaded as utf8. * @returns {string} The loaded handlebars file as a string - loaded as utf8.
*/ */
exports.loadHandlebarsFile = path => { export function loadHandlebarsFile(path: string) {
return fs.readFileSync(path, "utf8") return fs.readFileSync(path, "utf8")
} }
@ -91,13 +82,13 @@ exports.loadHandlebarsFile = path => {
* @param {string} contents the contents of the file which is to be returned from the API. * @param {string} contents the contents of the file which is to be returned from the API.
* @return {Object} the read stream which can be put into the koa context body. * @return {Object} the read stream which can be put into the koa context body.
*/ */
exports.apiFileReturn = contents => { export function apiFileReturn(contents: string) {
const path = join(budibaseTempDir(), uuid()) const path = join(budibaseTempDir(), uuid())
fs.writeFileSync(path, contents) fs.writeFileSync(path, contents)
return fs.createReadStream(path) return fs.createReadStream(path)
} }
exports.streamFile = path => { export function streamFile(path: string) {
return fs.createReadStream(path) return fs.createReadStream(path)
} }
@ -106,7 +97,7 @@ exports.streamFile = path => {
* @param {string} fileContents contents which will be written to a temp file. * @param {string} fileContents contents which will be written to a temp file.
* @return {string} the path to the temp file. * @return {string} the path to the temp file.
*/ */
exports.storeTempFile = fileContents => { export function storeTempFile(fileContents: string) {
const path = join(budibaseTempDir(), uuid()) const path = join(budibaseTempDir(), uuid())
fs.writeFileSync(path, fileContents) fs.writeFileSync(path, fileContents)
return path return path
@ -116,7 +107,7 @@ exports.storeTempFile = fileContents => {
* Utility function for getting a file read stream - a simple in memory buffered read * Utility function for getting a file read stream - a simple in memory buffered read
* stream doesn't work for pouchdb. * stream doesn't work for pouchdb.
*/ */
exports.stringToFileStream = contents => { export function stringToFileStream(contents: string) {
const path = exports.storeTempFile(contents) const path = exports.storeTempFile(contents)
return fs.createReadStream(path) return fs.createReadStream(path)
} }
@ -125,7 +116,7 @@ exports.stringToFileStream = contents => {
* Creates a temp file and returns it from the API. * Creates a temp file and returns it from the API.
* @param {string} fileContents the contents to be returned in file. * @param {string} fileContents the contents to be returned in file.
*/ */
exports.sendTempFile = fileContents => { export function sendTempFile(fileContents: string) {
const path = exports.storeTempFile(fileContents) const path = exports.storeTempFile(fileContents)
return fs.createReadStream(path) return fs.createReadStream(path)
} }
@ -135,7 +126,7 @@ exports.sendTempFile = fileContents => {
* @param {string} appId The ID of the app which is being created. * @param {string} appId The ID of the app which is being created.
* @return {Promise<void>} once promise completes app resources should be ready in object store. * @return {Promise<void>} once promise completes app resources should be ready in object store.
*/ */
exports.createApp = async appId => { export async function createApp(appId: string) {
await updateClientLibrary(appId) await updateClientLibrary(appId)
} }
@ -144,8 +135,8 @@ exports.createApp = async appId => {
* @param {string} appId The ID of the app which is being deleted. * @param {string} appId The ID of the app which is being deleted.
* @return {Promise<void>} once promise completes the app resources will be removed from object store. * @return {Promise<void>} once promise completes the app resources will be removed from object store.
*/ */
exports.deleteApp = async appId => { export async function deleteApp(appId: string) {
await deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`) await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
} }
/** /**
@ -154,17 +145,21 @@ exports.deleteApp = async appId => {
* @param name * @param name
* @return {Promise<*>} * @return {Promise<*>}
*/ */
exports.downloadTemplate = async (type, name) => { export async function downloadTemplate(type: string, name: string) {
const DEFAULT_TEMPLATES_BUCKET = const DEFAULT_TEMPLATES_BUCKET =
"prod-budi-templates.s3-eu-west-1.amazonaws.com" "prod-budi-templates.s3-eu-west-1.amazonaws.com"
const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz` const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
return downloadTarball(templateUrl, ObjectStoreBuckets.TEMPLATES, type) return objectStore.downloadTarball(
templateUrl,
ObjectStoreBuckets.TEMPLATES,
type
)
} }
/** /**
* Retrieves component libraries from object store (or tmp symlink if in local) * Retrieves component libraries from object store (or tmp symlink if in local)
*/ */
exports.getComponentLibraryManifest = async library => { export async function getComponentLibraryManifest(library: string) {
const appId = context.getAppId() const appId = context.getAppId()
const filename = "manifest.json" const filename = "manifest.json"
/* istanbul ignore next */ /* istanbul ignore next */
@ -182,12 +177,16 @@ exports.getComponentLibraryManifest = async library => {
return require(path) return require(path)
} }
if (!appId) {
throw new Error("No app ID found - cannot get component libraries")
}
let resp let resp
let path let path
try { try {
// Try to load the manifest from the new file location // Try to load the manifest from the new file location
path = join(appId, filename) path = join(appId, filename)
resp = await retrieve(ObjectStoreBuckets.APPS, path) resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
} catch (error) { } catch (error) {
console.error( console.error(
`component-manifest-objectstore=failed appId=${appId} path=${path}`, `component-manifest-objectstore=failed appId=${appId} path=${path}`,
@ -195,7 +194,7 @@ exports.getComponentLibraryManifest = async library => {
) )
// Fallback to loading it from the old location for old apps // Fallback to loading it from the old location for old apps
path = join(appId, "node_modules", library, "package", filename) path = join(appId, "node_modules", library, "package", filename)
resp = await retrieve(ObjectStoreBuckets.APPS, path) resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
} }
if (typeof resp !== "string") { if (typeof resp !== "string") {
resp = resp.toString("utf8") resp = resp.toString("utf8")
@ -207,14 +206,17 @@ exports.getComponentLibraryManifest = async library => {
* All file reads come through here just to make sure all of them make sense * All file reads come through here just to make sure all of them make sense
* allows a centralised location to check logic is all good. * allows a centralised location to check logic is all good.
*/ */
exports.readFileSync = (filepath, options = "utf8") => { export function readFileSync(
return fs.readFileSync(filepath, options) filepath: string,
options: BufferEncoding = "utf8"
) {
return fs.readFileSync(filepath, { encoding: options })
} }
/** /**
* Given a set of app IDs makes sure file system is cleared of any of their temp info. * Given a set of app IDs makes sure file system is cleared of any of their temp info.
*/ */
exports.cleanup = appIds => { export function cleanup(appIds: string[]) {
for (let appId of appIds) { for (let appId of appIds) {
const path = join(budibaseTempDir(), appId) const path = join(budibaseTempDir(), appId)
if (fs.existsSync(path)) { if (fs.existsSync(path)) {
@ -223,7 +225,7 @@ exports.cleanup = appIds => {
} }
} }
const createTempFolder = item => { export function createTempFolder(item: string) {
const path = join(budibaseTempDir(), item) const path = join(budibaseTempDir(), item)
try { try {
// remove old tmp directories automatically - don't combine // remove old tmp directories automatically - don't combine
@ -231,24 +233,22 @@ const createTempFolder = item => {
fs.rmSync(path, { recursive: true, force: true }) fs.rmSync(path, { recursive: true, force: true })
} }
fs.mkdirSync(path) fs.mkdirSync(path)
} catch (err) { } catch (err: any) {
throw new Error(`Path cannot be created: ${err.message}`) throw new Error(`Path cannot be created: ${err.message}`)
} }
return path return path
} }
exports.createTempFolder = createTempFolder
const extractTarball = async (fromFilePath, toPath) => { export async function extractTarball(fromFilePath: string, toPath: string) {
await tar.extract({ await tar.extract({
file: fromFilePath, file: fromFilePath,
C: toPath, C: toPath,
}) })
} }
exports.extractTarball = extractTarball
const getPluginMetadata = async path => { export async function getPluginMetadata(path: string) {
let metadata = {} let metadata: { schema?: any; package?: any } = {}
try { try {
const pkg = fs.readFileSync(join(path, "package.json"), "utf8") const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
const schema = fs.readFileSync(join(path, "schema.json"), "utf8") const schema = fs.readFileSync(join(path, "schema.json"), "utf8")
@ -265,7 +265,7 @@ const getPluginMetadata = async path => {
"package.json is missing one of 'name', 'version' or 'description'." "package.json is missing one of 'name', 'version' or 'description'."
) )
} }
} catch (err) { } catch (err: any) {
throw new Error( throw new Error(
`Unable to process schema.json/package.json in plugin. ${err.message}` `Unable to process schema.json/package.json in plugin. ${err.message}`
) )
@ -273,9 +273,12 @@ const getPluginMetadata = async path => {
return { metadata, directory: path } return { metadata, directory: path }
} }
exports.getPluginMetadata = getPluginMetadata
exports.getDatasourcePlugin = async (name, url, hash) => { export async function getDatasourcePlugin(
name: string,
url: string,
hash: string
) {
if (!fs.existsSync(DATASOURCE_PATH)) { if (!fs.existsSync(DATASOURCE_PATH)) {
fs.mkdirSync(DATASOURCE_PATH) fs.mkdirSync(DATASOURCE_PATH)
} }
@ -311,7 +314,7 @@ exports.getDatasourcePlugin = async (name, url, hash) => {
/** /**
* Find for a file recursively from start path applying filter, return first match * Find for a file recursively from start path applying filter, return first match
*/ */
exports.findFileRec = (startPath, filter) => { export function findFileRec(startPath: string, filter: any) {
if (!fs.existsSync(startPath)) { if (!fs.existsSync(startPath)) {
return return
} }
@ -332,21 +335,10 @@ exports.findFileRec = (startPath, filter) => {
/** /**
* Remove a folder which is not empty from the file system * Remove a folder which is not empty from the file system
*/ */
exports.deleteFolderFileSystem = path => { export function deleteFolderFileSystem(path: string) {
if (!fs.existsSync(path)) { if (!fs.existsSync(path)) {
return return
} }
fs.rmSync(path, { recursive: true, force: true }) fs.rmSync(path, { recursive: true, force: true })
} }
/**
* Full function definition for below can be found in the utilities.
*/
exports.upload = upload
exports.retrieve = retrieve
exports.retrieveToTmp = retrieveToTmp
exports.deleteFiles = deleteFiles
exports.downloadTarballDirect = downloadTarballDirect
exports.TOP_LEVEL_PATH = TOP_LEVEL_PATH
exports.NODE_MODULES_PATH = NODE_MODULES_PATH

View File

@ -1,21 +1,19 @@
const jimp = require("jimp") import jimp from "jimp"
const FORMATS = { const FORMATS = {
IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"], IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"],
} }
function processImage(file) { function processImage(file: { path: string }) {
// this will overwrite the temp file // this will overwrite the temp file
return jimp.read(file.path).then(img => { return jimp.read(file.path).then(img => {
return img.resize(300, jimp.AUTO).write(file.path) return img.resize(300, jimp.AUTO).write(file.path)
}) })
} }
async function process(file) { export async function process(file: { extension: string; path: string }) {
if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) { if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) {
await processImage(file) await processImage(file)
} }
return file return file
} }
exports.process = process

View File

@ -1,8 +1,9 @@
const env = require("../environment") import env from "../environment"
const { plugins: ProPlugins } = require("@budibase/pro") import { plugins as ProPlugins } from "@budibase/pro"
const { objectStore } = require("@budibase/backend-core") import { objectStore } from "@budibase/backend-core"
import { Plugin } from "@budibase/types"
exports.enrichPluginURLs = plugins => { export function enrichPluginURLs(plugins: Plugin[]) {
if (!plugins || !plugins.length) { if (!plugins || !plugins.length) {
return [] return []
} }

View File

@ -1,122 +0,0 @@
let events = require("events")
/**
* Bull works with a Job wrapper around all messages that contains a lot more information about
* the state of the message, this object constructor implements the same schema of Bull jobs
* for the sake of maintaining API consistency.
* @param {string} queue The name of the queue which the message will be carried on.
* @param {object} message The JSON message which will be passed back to the consumer.
* @returns {Object} A new job which can now be put onto the queue, this is mostly an
* internal structure so that an in memory queue can be easily swapped for a Bull queue.
*/
function newJob(queue, message) {
return {
timestamp: Date.now(),
queue: queue,
data: message,
}
}
/**
* This is designed to replicate Bull (https://github.com/OptimalBits/bull) in memory as a sort of mock.
* It is relatively simple, using an event emitter internally to register when messages are available
* to the consumers - in can support many inputs and many consumers.
*/
class InMemoryQueue {
/**
* The constructor the queue, exactly the same as that of Bulls.
* @param {string} name The name of the queue which is being configured.
* @param {object|null} opts This is not used by the in memory queue as there is no real use
* case when in memory, but is the same API as Bull
*/
constructor(name, opts = null) {
this._name = name
this._opts = opts
this._messages = []
this._emitter = new events.EventEmitter()
}
/**
* Same callback API as Bull, each callback passed to this will consume messages as they are
* available. Please note this is a queue service, not a notification service, so each
* consumer will receive different messages.
* @param {function<object>} func The callback function which will return a "Job", the same
* as the Bull API, within this job the property "data" contains the JSON message. Please
* note this is incredibly limited compared to Bull as in reality the Job would contain
* a lot more information about the queue and current status of Bull cluster.
*/
process(func) {
this._emitter.on("message", async () => {
if (this._messages.length <= 0) {
return
}
let msg = this._messages.shift()
let resp = func(msg)
if (resp.then != null) {
await resp
}
})
}
// simply puts a message to the queue and emits to the queue for processing
/**
* Simple function to replicate the add message functionality of Bull, putting
* a new message on the queue. This then emits an event which will be used to
* return the message to a consumer (if one is attached).
* @param {object} msg A message to be transported over the queue, this should be
* a JSON message as this is required by Bull.
*/
// eslint-disable-next-line no-unused-vars
add(msg, repeat) {
if (typeof msg !== "object") {
throw "Queue only supports carrying JSON."
}
this._messages.push(newJob(this._name, msg))
this._emitter.emit("message")
}
/**
* replicating the close function from bull, which waits for jobs to finish.
*/
async close() {
return []
}
/**
* This removes a cron which has been implemented, this is part of Bull API.
* @param {string} cronJobId The cron which is to be removed.
*/
removeRepeatableByKey(cronJobId) {
// TODO: implement for testing
console.log(cronJobId)
}
/**
* Implemented for tests
*/
getRepeatableJobs() {
return []
}
// eslint-disable-next-line no-unused-vars
removeJobs(pattern) {
// no-op
}
/**
* Implemented for tests
*/
async clean() {
return []
}
async getJob() {
return {}
}
on() {
// do nothing
}
}
module.exports = InMemoryQueue

View File

@ -1,4 +1,5 @@
const { FieldTypes } = require("../../constants") // @ts-nocheck
import { FieldTypes } from "../../constants"
/** /**
* A map of how we convert various properties in rows to each other based on the row type. * A map of how we convert various properties in rows to each other based on the row type.

View File

@ -1,9 +1,13 @@
const fetch = require("node-fetch") import fetch from "node-fetch"
const { VM, VMScript } = require("vm2") import { VM, VMScript } from "vm2"
const JS_TIMEOUT_MS = 1000 const JS_TIMEOUT_MS = 1000
class ScriptRunner { class ScriptRunner {
constructor(script, context) { vm: VM
results: { out: string }
script: VMScript
constructor(script: string, context: any) {
const code = `let fn = () => {\n${script}\n}; results.out = fn();` const code = `let fn = () => {\n${script}\n}; results.out = fn();`
this.vm = new VM({ this.vm = new VM({
timeout: JS_TIMEOUT_MS, timeout: JS_TIMEOUT_MS,
@ -21,4 +25,4 @@ class ScriptRunner {
} }
} }
module.exports = ScriptRunner export = ScriptRunner

View File

@ -1,4 +1,4 @@
module.exports = { export = {
OK: 200, OK: 200,
UNAUTHORIZED: 401, UNAUTHORIZED: 401,
FORBIDDEN: 403, FORBIDDEN: 403,

View File

@ -7,7 +7,7 @@ function getNewQuotaReset() {
return Date.now() + 2592000000 return Date.now() + 2592000000
} }
function resetQuotasIfRequired(quota) { function resetQuotasIfRequired(quota: { quotaReset: number; usageQuota: any }) {
// Check if the quota needs reset // Check if the quota needs reset
if (Date.now() >= quota.quotaReset) { if (Date.now() >= quota.quotaReset) {
quota.quotaReset = getNewQuotaReset() quota.quotaReset = getNewQuotaReset()

View File

@ -12,3 +12,4 @@ export * from "./row"
export * from "./user" export * from "./user"
export * from "./backup" export * from "./backup"
export * from "./webhook" export * from "./webhook"
export * from "./links"

View File

@ -0,0 +1,13 @@
export interface LinkDocument {
type: string
doc1: {
rowId: string
fieldName: string
tableId: string
}
doc2: {
rowId: string
fieldName: string
tableId: string
}
}

View File

@ -8,10 +8,12 @@ export interface FieldSchema {
externalType?: string externalType?: string
fieldName?: string fieldName?: string
name: string name: string
sortable?: boolean
tableId?: string tableId?: string
relationshipType?: string relationshipType?: string
through?: string through?: string
foreignKey?: string foreignKey?: string
icon?: string
autocolumn?: boolean autocolumn?: boolean
subtype?: string subtype?: string
throughFrom?: string throughFrom?: string
@ -22,6 +24,7 @@ export interface FieldSchema {
ignoreTimezones?: boolean ignoreTimezones?: boolean
timeOnly?: boolean timeOnly?: boolean
lastID?: number lastID?: number
useRichText?: boolean | null
meta?: { meta?: {
toTable: string toTable: string
toKey: string toKey: string
@ -31,10 +34,22 @@ export interface FieldSchema {
email?: boolean email?: boolean
inclusion?: string[] inclusion?: string[]
length?: { length?: {
minimum?: string | number minimum?: string | number | null
maximum?: string | number maximum?: string | number | null
}
numericality?: {
greaterThanOrEqualTo: string | null
lessThanOrEqualTo: string | null
}
presence?:
| boolean
| {
allowEmpty?: boolean
}
datetime?: {
latest: string
earliest: string
} }
presence?: boolean
} }
} }

View File

@ -5,6 +5,9 @@ export interface View {
filters: ViewFilter[] filters: ViewFilter[]
schema: ViewSchema schema: ViewSchema
calculation?: ViewCalculation calculation?: ViewCalculation
map?: string
reduce?: any
meta?: Record<string, any>
} }
export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema