Merge branch 'master' into fix/selected-by-user-obscured-nav-item

This commit is contained in:
Andrew Kingston 2023-10-12 08:28:32 +01:00 committed by GitHub
commit 98cb8afdd4
14 changed files with 34 additions and 440 deletions

View File

@ -1,5 +1,5 @@
{
"version": "2.11.24",
"version": "2.11.27",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -74,7 +74,6 @@
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"build:docs": "lerna run --stream build:docs",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run --stream env:multi:enable",
"env:multi:disable": "lerna run --stream env:multi:disable",

View File

@ -20,7 +20,6 @@
"test:watch": "jest --watch",
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/",
"build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docs": "node ./scripts/docs/generate.js open",
"run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up",
@ -112,7 +111,7 @@
"to-json-schema": "0.2.5",
"uuid": "3.3.2",
"validate.js": "0.13.1",
"vm2": "3.9.17",
"vm2": "^3.9.19",
"worker-farm": "1.7.0",
"xml2js": "0.5.0",
"yargs": "13.2.4"

View File

@ -1,31 +0,0 @@
### Documentation
This directory contains the scripts required to generate the APIDoc based documentation.
You can find the docs about comment structure at the [APIDocs page](https://apidocjs.com/).
In general most API endpoints will look like:
```js
/**
* @api {post} /api/:param/url Give it a name
* @apiName Give it a name
* @apiGroup group
* @apiPermission permission
* @apiDescription Describe what the endpoint does, any special cases the user
* should be aware of.
*
* @apiParam {string} param describe a URL parameter.
*
* @apiParam (Body) input describe a field on the body.
*
* @apiSuccess {object} output describe the output.
*/
```
There are a few key points to note when writing API docs:
1. Don't use `@apiBody` - this isn't currently supported by our swagger converter.
2. Make sure to always have an `@api` definition at the start, which must always have the
HTTP verb, the endpoint URL and the name.
3. There are three ways you can specify parameters used as inputs for your endpoint,
`@apiParam` for a URL param, `@apiParam (Body)` for a field on the request body and `@apiParam (Query)`
for query string parameters.
4. The `@apiGroup` should be the same for all API Doc comments in a route file.

View File

@ -1,74 +0,0 @@
const fs = require("fs")
const { join } = require("path")
const { createDoc } = require("apidoc")
const packageJson = require("../../package.json")
const toSwagger = require("./toSwagger")
const open = require("open")
const config = {
name: "Budibase API",
version: packageJson.version,
description: "Documenting the Budibase backend API",
title: "Budibase app service API",
}
const shouldOpen = process.argv[2]
const disallowed = []
function filter(parsedRouteFiles) {
const tagToSearch = "url"
for (let routeFile of parsedRouteFiles) {
for (let route of routeFile) {
let routeInfo = route["local"]
if (disallowed.includes(routeInfo[tagToSearch])) {
const idx = routeFile.indexOf(route)
routeFile.splice(idx, 1)
}
}
}
}
async function generate() {
// start by writing a config file
const configPath = join(__dirname, "config.json")
fs.writeFileSync(configPath, JSON.stringify(config))
const mainPath = join(__dirname, "..", "..")
const srcPath = join(mainPath, "src", "api", "routes")
const assetsPath = join(mainPath, "builder", "assets", "docs")
if (!fs.existsSync(assetsPath)) {
fs.mkdirSync(assetsPath, { recursive: true })
}
const options = {
src: [srcPath],
dest: assetsPath,
filters: {
main: {
postFilter: filter,
},
},
config: configPath,
}
const doc = createDoc(options)
if (typeof doc !== "boolean") {
const swagger = toSwagger(JSON.parse(doc.data), JSON.parse(doc.project))
fs.writeFileSync(join(assetsPath, "swagger.json"), JSON.stringify(swagger))
fs.writeFileSync(join(assetsPath, "apidoc.json"), doc.data)
fs.writeFileSync(join(assetsPath, "project.json"), doc.project)
console.log(
`Docs generated successfully, find in ${assetsPath}, swagger.json, apidoc.json and project.json`
)
} else {
throw "Unable to generate docs."
}
// delete the temporary config file
fs.unlinkSync(configPath)
setTimeout(async () => {
if (shouldOpen === "open") {
await open(join(assetsPath, "index.html"), { wait: false })
}
}, 2000)
}
generate().catch(err => {
console.error(err)
})

View File

@ -1,320 +0,0 @@
let _ = require("lodash")
let { pathToRegexp } = require("path-to-regexp")
/********************************************************
* Based on: https://github.com/fsbahman/apidoc-swagger *
********************************************************/
let swagger = {
swagger: "2.0",
info: {},
paths: {},
definitions: {},
}
function toSwagger(apidocJson, projectJson) {
swagger.info = addInfo(projectJson)
swagger.paths = extractPaths(apidocJson)
return swagger
}
let tagsRegex = /(<([^>]+)>)/gi
// Removes <p> </p> tags from text
function removeTags(text) {
return text ? text.replace(tagsRegex, "") : text
}
function addInfo(projectJson) {
let info = {}
info["title"] = projectJson.title || projectJson.name
info["version"] = projectJson.version
info["description"] = projectJson.description
return info
}
/**
* Extracts paths provided in json format
* post, patch, put request parameters are extracted in body
* get and delete are extracted to path parameters
* @param apidocJson
* @returns {{}}
*/
function extractPaths(apidocJson) {
let apiPaths = groupByUrl(apidocJson)
let paths = {}
for (let i = 0; i < apiPaths.length; i++) {
let verbs = apiPaths[i].verbs
let url = verbs[0].url
let pattern = pathToRegexp(url, null)
let matches = pattern.exec(url)
// Surrounds URL parameters with curly brackets -> :email with {email}
let pathKeys = []
for (let j = 1; j < matches.length; j++) {
let key = matches[j].slice(1)
url = url.replace(matches[j], "{" + key + "}")
pathKeys.push(key)
}
for (let j = 0; j < verbs.length; j++) {
let verb = verbs[j]
let type = verb.type
let obj = (paths[url] = paths[url] || {})
if (type === "post" || type === "patch" || type === "put") {
_.extend(
obj,
createPostPushPutOutput(verb, swagger.definitions, pathKeys)
)
} else {
_.extend(obj, createGetDeleteOutput(verb, swagger.definitions))
}
}
}
return paths
}
function createPostPushPutOutput(verbs, definitions, pathKeys) {
let pathItemObject = {}
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
let params = []
let pathParams = createPathParameters(verbs, pathKeys)
pathParams = _.filter(pathParams, function (param) {
let hasKey = pathKeys.indexOf(param.name) !== -1
return !(param.in === "path" && !hasKey)
})
params = params.concat(pathParams)
let required =
verbs.parameter &&
verbs.parameter.fields &&
verbs.parameter.fields.Parameter &&
verbs.parameter.fields.Parameter.length > 0
params.push({
in: "body",
name: "body",
description: removeTags(verbs.description),
required: required,
schema: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelParametersRef,
},
})
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: params,
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
function createVerbDefinitions(verbs, definitions) {
let result = {
topLevelParametersRef: null,
topLevelSuccessRef: null,
topLevelSuccessRefType: null,
}
let defaultObjectName = verbs.name
let fieldArrayResult = {}
if (verbs && verbs.parameter && verbs.parameter.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.parameter.fields.Parameter,
definitions,
verbs.name,
defaultObjectName
)
result.topLevelParametersRef = fieldArrayResult.topLevelRef
}
if (verbs && verbs.success && verbs.success.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.success.fields["Success 200"],
definitions,
verbs.name,
defaultObjectName
)
result.topLevelSuccessRef = fieldArrayResult.topLevelRef
result.topLevelSuccessRefType = fieldArrayResult.topLevelRefType
}
return result
}
function createFieldArrayDefinitions(
fieldArray,
definitions,
topLevelRef,
defaultObjectName
) {
let result = {
topLevelRef: topLevelRef,
topLevelRefType: null,
}
if (!fieldArray) {
return result
}
for (let i = 0; i < fieldArray.length; i++) {
let parameter = fieldArray[i]
let nestedName = createNestedName(parameter.field)
let objectName = nestedName.objectName
if (!objectName) {
objectName = defaultObjectName
}
let type = parameter.type
if (i === 0) {
result.topLevelRefType = type
if (parameter.type === "Object") {
objectName = nestedName.propertyName
nestedName.propertyName = null
} else if (parameter.type === "Array") {
objectName = nestedName.propertyName
nestedName.propertyName = null
result.topLevelRefType = "array"
}
result.topLevelRef = objectName
}
definitions[objectName] = definitions[objectName] || {
properties: {},
required: [],
}
if (nestedName.propertyName) {
let prop = {
type: (parameter.type || "").toLowerCase(),
description: removeTags(parameter.description),
}
if (parameter.type === "Object") {
prop.$ref = "#/definitions/" + parameter.field
}
let typeIndex = type.indexOf("[]")
if (typeIndex !== -1 && typeIndex === type.length - 2) {
prop.type = "array"
prop.items = {
type: type.slice(0, type.length - 2),
}
}
definitions[objectName]["properties"][nestedName.propertyName] = prop
if (!parameter.optional) {
let arr = definitions[objectName]["required"]
if (arr.indexOf(nestedName.propertyName) === -1) {
arr.push(nestedName.propertyName)
}
}
}
}
return result
}
function createNestedName(field) {
let propertyName = field
let objectName
let propertyNames = field.split(".")
if (propertyNames && propertyNames.length > 1) {
propertyName = propertyNames[propertyNames.length - 1]
propertyNames.pop()
objectName = propertyNames.join(".")
}
return {
propertyName: propertyName,
objectName: objectName,
}
}
/**
* Generate get, delete method output
* @param verbs
* @param definitions
* @returns {{}}
*/
function createGetDeleteOutput(verbs, definitions) {
let pathItemObject = {}
verbs.type = verbs.type === "del" ? "delete" : verbs.type
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: createPathParameters(verbs),
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
/**
* Iterate through all method parameters and create array of parameter objects which are stored as path parameters
* @param verbs
* @returns {Array}
*/
function createPathParameters(verbs) {
let pathItemObject = []
if (verbs.parameter && verbs.parameter.fields.Parameter) {
for (let i = 0; i < verbs.parameter.fields.Parameter.length; i++) {
let param = verbs.parameter.fields.Parameter[i]
let field = param.field
let type = param.type
pathItemObject.push({
name: field,
in: type === "file" ? "formData" : "path",
required: !param.optional,
type: param.type.toLowerCase(),
description: removeTags(param.description),
})
}
}
return pathItemObject
}
function groupByUrl(apidocJson) {
return _.chain(apidocJson)
.groupBy("url")
.toPairs()
.map(function (element) {
return _.zipObject(["url", "verbs"], element)
})
.value()
}
module.exports = toSwagger

View File

@ -859,7 +859,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@ -1064,7 +1065,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@ -1280,7 +1282,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},

View File

@ -782,6 +782,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@ -946,6 +947,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@ -1117,6 +1119,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:

View File

@ -1580,6 +1580,9 @@ describe.each([
(row: Row) => ({
_id: row._id,
primaryDisplay: row.email,
email: row.email,
firstName: row.firstName,
lastName: row.lastName,
}),
],
])("links - %s", (__, relSchema, dataGenerator, resultMapper) => {

View File

@ -279,7 +279,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
| "bigint";
| "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
@ -386,7 +387,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
| "bigint";
| "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
@ -495,7 +497,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
| "bigint";
| "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */

View File

@ -82,6 +82,9 @@ export async function processOutputBBReferences(
return users.map(u => ({
_id: u._id,
primaryDisplay: u.email,
email: u.email,
firstName: u.firstName,
lastName: u.lastName,
}))
default:

View File

@ -180,6 +180,9 @@ describe("bbReferenceProcessor", () => {
{
_id: user._id,
primaryDisplay: user.email,
email: user.email,
firstName: user.firstName,
lastName: user.lastName,
},
])
expect(cacheGetUsersSpy).toBeCalledTimes(1)
@ -204,6 +207,9 @@ describe("bbReferenceProcessor", () => {
[user1, user2].map(u => ({
_id: u._id,
primaryDisplay: u.email,
email: u.email,
firstName: u.firstName,
lastName: u.lastName,
}))
)
)

View File

@ -29,7 +29,7 @@
"dayjs": "^1.10.8",
"handlebars": "^4.7.6",
"lodash": "^4.17.20",
"vm2": "^3.9.15"
"vm2": "^3.9.19"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^17.1.0",

View File

@ -21750,10 +21750,10 @@ vlq@^0.2.2:
resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==
vm2@3.9.17, vm2@^3.9.15, vm2@^3.9.8:
version "3.9.17"
resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.17.tgz#251b165ff8a0e034942b5181057305e39570aeab"
integrity sha512-AqwtCnZ/ERcX+AVj9vUsphY56YANXxRuqMb7GsDtAr0m0PcQX3u0Aj3KWiXM0YAHy7i6JEeHrwOnwXbGYgRpAw==
vm2@^3.9.19:
version "3.9.19"
resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.19.tgz#be1e1d7a106122c6c492b4d51c2e8b93d3ed6a4a"
integrity sha512-J637XF0DHDMV57R6JyVsTak7nIL8gy5KH4r1HiwWLf/4GBbb5MKL5y7LpmF4A8E2nR6XmzpmMFQ7V7ppPTmUQg==
dependencies:
acorn "^8.7.0"
acorn-walk "^8.2.0"