Updating docs for tables, adding a swagger generation tool and a README describing how to use it.

This commit is contained in:
mike12345567 2021-11-26 14:13:53 +00:00
parent a99bf1bba3
commit 4697b21ef1
6 changed files with 540 additions and 31 deletions

View File

@ -146,6 +146,7 @@
"eslint": "^6.8.0",
"jest": "^27.0.5",
"nodemon": "^2.0.4",
"path-to-regexp": "^6.2.0",
"prettier": "^2.3.1",
"rimraf": "^3.0.2",
"supertest": "^4.0.2",

View File

@ -0,0 +1,31 @@
### Documentation
This directory contains the scripts required to generate the APIDoc based documentation.
You can find the docs about comment structure at the [APIDocs page](https://apidocjs.com/).
In general most API endpoints will look like:
```js
/**
* @api {post} /api/:param/url Give it a name
* @apiName Give it a name
* @apiGroup group
* @apiPermission permission
* @apiDescription Describe what the endpoint does, any special cases the user
* should be aware of.
*
* @apiParam {string} param describe a URL parameter.
*
* @apiParam (Body) input describe a field on the body.
*
* @apiSuccess {object} output describe the output.
*/
```
There are a few key points to note when writing API docs:
1. Don't use `@apiBody` - this isn't currently supported by our swagger converter.
2. Make sure to always have an `@api` definition at the start, which must always have the
HTTP verb, the endpoint URL and the name.
3. There are three ways you can specify parameters used as inputs for your endpoint,
`@apiParam` for a URL param, `@apiParam (Body)` for a field on the request body and `@apiParam (Query)`
for query string parameters.
4. The `@apiGroup` should be the same for all API Doc comments in a route file.

View File

@ -2,6 +2,7 @@ const fs = require("fs")
const { join } = require("path")
const { createDoc } = require("apidoc")
const packageJson = require("../../package.json")
const toSwagger = require("./toSwagger")
const open = require("open")
const config = {
@ -37,7 +38,7 @@ async function generate() {
if (!fs.existsSync(assetsPath)) {
fs.mkdirSync(assetsPath, { recursive: true })
}
const doc = createDoc({
const options = {
src: [srcPath],
dest: assetsPath,
filters: {
@ -46,17 +47,28 @@ async function generate() {
},
},
config: configPath,
})
}
const doc = createDoc(options)
if (typeof doc !== "boolean") {
console.log("Docs generated successfully.")
const swagger = toSwagger(JSON.parse(doc.data), JSON.parse(doc.project))
fs.writeFileSync(join(assetsPath, "swagger.json"), JSON.stringify(swagger))
fs.writeFileSync(join(assetsPath, "apidoc.json"), doc.data)
fs.writeFileSync(join(assetsPath, "project.json"), doc.project)
console.log(
`Docs generated successfully, find in ${assetsPath}, swagger.json, apidoc.json and project.json`
)
} else {
console.error("Unable to generate docs.")
throw "Unable to generate docs."
}
// delete the temporary config file
fs.unlinkSync(configPath)
setTimeout(async () => {
if (shouldOpen === "open") {
await open(join(assetsPath, "index.html"), { wait: false })
}
}, 2000)
}
generate()
generate().catch(err => {
console.error(err)
})

View File

@ -0,0 +1,320 @@
let _ = require("lodash")
let { pathToRegexp } = require("path-to-regexp")
/********************************************************
* Based on: https://github.com/fsbahman/apidoc-swagger *
********************************************************/
let swagger = {
swagger: "2.0",
info: {},
paths: {},
definitions: {},
}
function toSwagger(apidocJson, projectJson) {
swagger.info = addInfo(projectJson)
swagger.paths = extractPaths(apidocJson)
return swagger
}
let tagsRegex = /(<([^>]+)>)/gi
// Removes <p> </p> tags from text
function removeTags(text) {
return text ? text.replace(tagsRegex, "") : text
}
function addInfo(projectJson) {
let info = {}
info["title"] = projectJson.title || projectJson.name
info["version"] = projectJson.version
info["description"] = projectJson.description
return info
}
/**
* Extracts paths provided in json format
* post, patch, put request parameters are extracted in body
* get and delete are extracted to path parameters
* @param apidocJson
* @returns {{}}
*/
function extractPaths(apidocJson) {
let apiPaths = groupByUrl(apidocJson)
let paths = {}
for (let i = 0; i < apiPaths.length; i++) {
let verbs = apiPaths[i].verbs
let url = verbs[0].url
let pattern = pathToRegexp(url, null)
let matches = pattern.exec(url)
// Surrounds URL parameters with curly brackets -> :email with {email}
let pathKeys = []
for (let j = 1; j < matches.length; j++) {
let key = matches[j].substr(1)
url = url.replace(matches[j], "{" + key + "}")
pathKeys.push(key)
}
for (let j = 0; j < verbs.length; j++) {
let verb = verbs[j]
let type = verb.type
let obj = (paths[url] = paths[url] || {})
if (type === "post" || type === "patch" || type === "put") {
_.extend(
obj,
createPostPushPutOutput(verb, swagger.definitions, pathKeys)
)
} else {
_.extend(obj, createGetDeleteOutput(verb, swagger.definitions))
}
}
}
return paths
}
function createPostPushPutOutput(verbs, definitions, pathKeys) {
let pathItemObject = {}
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
let params = []
let pathParams = createPathParameters(verbs, pathKeys)
pathParams = _.filter(pathParams, function (param) {
let hasKey = pathKeys.indexOf(param.name) !== -1
return !(param.in === "path" && !hasKey)
})
params = params.concat(pathParams)
let required =
verbs.parameter &&
verbs.parameter.fields &&
verbs.parameter.fields.Parameter &&
verbs.parameter.fields.Parameter.length > 0
params.push({
in: "body",
name: "body",
description: removeTags(verbs.description),
required: required,
schema: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelParametersRef,
},
})
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: params,
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
function createVerbDefinitions(verbs, definitions) {
let result = {
topLevelParametersRef: null,
topLevelSuccessRef: null,
topLevelSuccessRefType: null,
}
let defaultObjectName = verbs.name
let fieldArrayResult = {}
if (verbs && verbs.parameter && verbs.parameter.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.parameter.fields.Parameter,
definitions,
verbs.name,
defaultObjectName
)
result.topLevelParametersRef = fieldArrayResult.topLevelRef
}
if (verbs && verbs.success && verbs.success.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.success.fields["Success 200"],
definitions,
verbs.name,
defaultObjectName
)
result.topLevelSuccessRef = fieldArrayResult.topLevelRef
result.topLevelSuccessRefType = fieldArrayResult.topLevelRefType
}
return result
}
function createFieldArrayDefinitions(
fieldArray,
definitions,
topLevelRef,
defaultObjectName
) {
let result = {
topLevelRef: topLevelRef,
topLevelRefType: null,
}
if (!fieldArray) {
return result
}
for (let i = 0; i < fieldArray.length; i++) {
let parameter = fieldArray[i]
let nestedName = createNestedName(parameter.field)
let objectName = nestedName.objectName
if (!objectName) {
objectName = defaultObjectName
}
let type = parameter.type
if (i === 0) {
result.topLevelRefType = type
if (parameter.type === "Object") {
objectName = nestedName.propertyName
nestedName.propertyName = null
} else if (parameter.type === "Array") {
objectName = nestedName.propertyName
nestedName.propertyName = null
result.topLevelRefType = "array"
}
result.topLevelRef = objectName
}
definitions[objectName] = definitions[objectName] || {
properties: {},
required: [],
}
if (nestedName.propertyName) {
let prop = {
type: (parameter.type || "").toLowerCase(),
description: removeTags(parameter.description),
}
if (parameter.type === "Object") {
prop.$ref = "#/definitions/" + parameter.field
}
let typeIndex = type.indexOf("[]")
if (typeIndex !== -1 && typeIndex === type.length - 2) {
prop.type = "array"
prop.items = {
type: type.slice(0, type.length - 2),
}
}
definitions[objectName]["properties"][nestedName.propertyName] = prop
if (!parameter.optional) {
let arr = definitions[objectName]["required"]
if (arr.indexOf(nestedName.propertyName) === -1) {
arr.push(nestedName.propertyName)
}
}
}
}
return result
}
function createNestedName(field) {
let propertyName = field
let objectName
let propertyNames = field.split(".")
if (propertyNames && propertyNames.length > 1) {
propertyName = propertyNames[propertyNames.length - 1]
propertyNames.pop()
objectName = propertyNames.join(".")
}
return {
propertyName: propertyName,
objectName: objectName,
}
}
/**
* Generate get, delete method output
* @param verbs
* @param definitions
* @returns {{}}
*/
function createGetDeleteOutput(verbs, definitions) {
let pathItemObject = {}
verbs.type = verbs.type === "del" ? "delete" : verbs.type
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: createPathParameters(verbs),
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
/**
* Iterate through all method parameters and create array of parameter objects which are stored as path parameters
* @param verbs
* @returns {Array}
*/
function createPathParameters(verbs) {
let pathItemObject = []
if (verbs.parameter && verbs.parameter.fields.Parameter) {
for (let i = 0; i < verbs.parameter.fields.Parameter.length; i++) {
let param = verbs.parameter.fields.Parameter[i]
let field = param.field
let type = param.type
pathItemObject.push({
name: field,
in: type === "file" ? "formData" : "path",
required: !param.optional,
type: param.type.toLowerCase(),
description: removeTags(param.description),
})
}
}
return pathItemObject
}
function groupByUrl(apidocJson) {
return _.chain(apidocJson)
.groupBy("url")
.toPairs()
.map(function (element) {
return _.zipObject(["url", "verbs"], element)
})
.value()
}
module.exports = toSwagger

View File

@ -65,7 +65,7 @@ router
* @apiParam {string} tableId The ID of the table to retrieve a row from.
* @apiParam {string} rowId The ID of the row to retrieve.
*
* @apiSuccess {object} row The response body will be the row that was found.
* @apiSuccess {object} body The response body will be the row that was found.
*/
.get(
"/api/:tableId/rows/:rowId",
@ -84,21 +84,21 @@ router
*
* @apiParam {string} tableId The ID of the table to retrieve rows from.
*
* @apiBody {boolean} [paginate] If pagination is required then this should be set to true,
* @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true,
* defaults to false.
* @apiBody {object} [query] This contains a set of filters which should be applied, if none
* @apiParam (Body) {object} [query] This contains a set of filters which should be applied, if none
* specified then the request will be unfiltered. An example with all of the possible query
* options has been supplied below.
* @apiBody {number} [limit] This sets a limit for the number of rows that will be returned,
* @apiParam (Body) {number} [limit] This sets a limit for the number of rows that will be returned,
* this will be implemented at the database level if supported for performance reasons. This
* is useful when paginating to set exactly how many rows per page.
* @apiBody {string} [bookmark] If pagination is enabled then a bookmark will be returned
* @apiParam (Body) {string} [bookmark] If pagination is enabled then a bookmark will be returned
* with each successful search request, this should be supplied back to get the next page.
* @apiBody {object} [sort] If sort is desired this should contain the name of the column to
* @apiParam (Body) {object} [sort] If sort is desired this should contain the name of the column to
* sort on.
* @apiBody {string} [sortOrder] If sort is enabled then this can be either "descending" or
* @apiParam (Body) {string} [sortOrder] If sort is enabled then this can be either "descending" or
* "ascending" as required.
* @apiBody {string} [sortType] If sort is enabled then you must specify the type of search
* @apiParam (Body) {string} [sortType] If sort is enabled then you must specify the type of search
* being used, either "string" or "number". This is only used for internal tables.
*
* @apiParamExample {json} Example:
@ -164,17 +164,17 @@ router
*
* @apiParam {string} tableId The ID of the table to save a row to.
*
* @apiBody {string} [_id] If the row exists already then an ID for the row must be provided.
* @apiBody {string} [_rev] If working with an existing row for an internal table its revision
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
* must also be provided.
* @apiBody {string} tableId The ID of the table should also be specified in the row body itself.
* @apiBody {any} [any] Any field supplied in the body will be assessed to see if it matches
* @apiParam (Body) {string} tableId The ID of the table should also be specified in the row body itself.
* @apiParam (Body) {any} [any] Any field supplied in the body will be assessed to see if it matches
* a column in the specified table. All other fields will be dropped and not stored.
*
* @apiSuccess {string} _id The ID of the row that was just saved, if it was just created this
* is the rows new ID.
* @apiSuccess {string} [_rev] If saving to an internal table a revision will also be returned.
* @apiSuccess {any} [any] The contents of the row that was saved will be returned as well.
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
*/
.post(
"/api/:tableId/rows",
@ -208,7 +208,7 @@ router
*
* @apiParam {string} tableId The ID of the table the row is to be validated for.
*
* @apiBody {any} [any] Any fields provided in the request body will be tested
* @apiParam (Body) {any} [any] Any fields provided in the request body will be tested
* against the table schema and constraints.
*
* @apiSuccess {boolean} valid If inputs provided are acceptable within the table schema this
@ -231,13 +231,15 @@ router
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
* fashion.
*
* @apiBody {object[]} [rows] If bulk deletion is desired then provide the rows in this
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
* key of the request body that are to be deleted.
* @apiBody {string} [_id] If deleting a single row then provide its ID in this field.
* @apiBody {string} [_rev] If deleting a single row from an internal table then provide its
* @apiParam (Body) {string} [_id] If deleting a single row then provide its ID in this field.
* @apiParam (Body) {string} [_rev] If deleting a single row from an internal table then provide its
* revision here.
*
* @apiSuccess {object[]|object} rows If deleting bulk then the response body will be an array
* @apiSuccess {object[]|object} body If deleting bulk then the response body will be an array
* of the deleted rows, if deleting a single row then the body will contain a "row" property which
* is the deleted row.
*/

View File

@ -27,13 +27,110 @@ function generateSaveValidator() {
}
router
/**
* @api {get} /api/tables Fetch all tables
* @apiName Fetch all tables
* @apiGroup tables
* @apiPermission table read access
* @apiDescription This endpoint retrieves all of the tables which have been created in
* an app. This includes all of the external and internal tables; to tell the difference
* between these look for the "type" property on each table, either being "internal" or "external".
*
* @apiSuccess {object[]} body The response body will be the list of tables that was found - as
* this does not take any parameters the only error scenario is no access.
*/
.get("/api/tables", authorized(BUILDER), tableController.fetch)
/**
* @api {get} /api/tables/:id Fetch a single table
* @apiName Fetch a single table
* @apiGroup tables
* @apiPermission table read access
* @apiDescription Retrieves a single table this could be be internal or external based on
* the provided table ID.
*
* @apiParam {string} id The ID of the table which is to be retrieved.
*
* @apiSuccess {object[]} body The response body will be the table that was found.
*/
.get(
"/api/tables/:id",
paramResource("id"),
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
tableController.find
)
/**
* @api {post} /api/tables Save a table
* @apiName Save a table
* @apiGroup tables
* @apiPermission builder
* @apiDescription Create or update a table with this endpoint, this will function for both internal
* external tables.
*
* @apiParam (Body) {string} [_id] If updating an existing table then the ID of the table must be specified.
* @apiParam (Body) {string} [_rev] If updating an existing internal table then the revision must also be specified.
* @apiParam (Body) {string} type] This should either be "internal" or "external" depending on the table type -
* this will default to internal.
* @apiParam (Body) {string} [sourceId] If creating an external table then this should be set to the data source ID. If
* building an internal table this does not need to be set, although it will be returned as "bb_internal".
* @apiParam (Body) {string} name The name of the table, this will be used in the UI. To rename the table simply
* supply the table structure to this endpoint with the name changed.
* @apiParam (Body) {object} schema A key value object which has all of the columns in the table as the keys in this
* object. For each column a "type" and "constraints" must be specified, with some types requiring further information.
* More information about the schema structure can be found in the Typescript definitions.
* @apiParam (Body) {string} [primaryDisplay] The name of the column which should be used when displaying rows
* from this table as relationships.
* @apiParam (Body) {object[]} [indexes] Specifies the search indexes - this is deprecated behaviour with the introduction
* of lucene indexes. This functionality is only available for internal tables.
* @apiParam (Body) {object} [_rename] If a column is to be renamed then the "old" column name should be set in this
* structure, and the "updated", new column name should also be supplied. The schema should also be updated, this field
* lets the server know that a field hasn't just been deleted, that the data has moved to a new name, this will fix
* the rows in the table. This functionality is only available for internal tables.
* @apiParam (Body) {object} [dataImport] When creating an internal table it can be built from a CSV, by using the
* CSV validation endpoint. Send the CSV data to the validation endpoint, then put the results of that call
* into this property, along with the CSV and a table/rows will be built from it. This is not supported when updating
* or for external tables.
*
* @apiParamExample {json} Example:
* {
* "_id": "ta_05541307fa0f4044abee071ca2a82119",
* "_rev": "10-0fbe4e78f69b255d79f1017e2eeef807",
* "type": "internal",
* "views": {},
* "name": "tableName",
* "schema": {
* "column": {
* "type": "string",
* "constraints": {
* "type": "string",
* "length": {
* "maximum": null
* },
* "presence": false
* },
* "name": "column"
* },
* },
* "primaryDisplay": "column",
* "indexes": [],
* "sourceId": "bb_internal",
* "_rename": {
* "old": "columnName",
* "updated": "newColumnName",
* },
* "dataImport": {
* "csvString": "column\nvalue",
* "primaryDisplay": "column",
* "schema": {
* "column": {
* "type": "string"
* }
* }
* }
* }
*
* @apiSuccess {object} table The response body will contain the table structure after being cleaned up and
* saved to the database.
*/
.post(
"/api/tables",
// allows control over updating a table
@ -42,22 +139,68 @@ router
generateSaveValidator(),
tableController.save
)
/**
* @api {post} /api/tables/csv/validate Validate a CSV for a table
* @apiName Validate a CSV for a table
* @apiGroup tables
* @apiPermission builder
* @apiDescription When creating a new table, or importing a CSV to an existing table the CSV must be validated and
* converted into a Budibase schema; this endpoint does this.
*
* @apiParam (Body) {string} csvString The CSV which is to be validated as a string.
* @apiParam (Body) {object} [schema] When a CSV has been validated it is possible to re-validate after changing the
* type of a field, by default everything will be strings as there is no way to infer types. The returned schema can
* be updated and then returned to the endpoint to re-validate and check if the type will work for the CSV, e.g.
* using a number instead of strings.
* @apiParam (Body) {string} [tableId] If importing data to an existing table this will pull the current table and
* remove any fields from the CSV schema which do not exist on the table/don't match the type of the table. When
* importing a CSV to an existing table only fields that are present on the table can be imported.
*
* @apiSuccess {object} schema The response body will contain a "schema" object that represents the schema found for
* the CSV - this will be in the same format used for table schema.s
*/
.post(
"/api/tables/csv/validate",
authorized(BUILDER),
tableController.validateCSVSchema
)
/**
* @api {post} /api/tables/:tableId/:revId Delete a table
* @apiName Delete a table
* @apiGroup tables
* @apiPermission builder
* @apiDescription This endpoint will delete a table and all of its associated data, for this reason it is
* quite dangerous - it will work for internal and external tables.
*
* @apiParam {string} tableId The ID of the table which is to be deleted.
* @apiParam {string} [revId] If deleting an internal table then the revision must also be supplied (_rev), for
* external tables this can simply be set to anything, e.g. "external".
*
* @apiSuccess {string} message A message stating that the table was deleted successfully.
*/
.delete(
"/api/tables/:tableId/:revId",
paramResource("tableId"),
authorized(BUILDER),
tableController.destroy
)
// this is currently builder only, but in the future
// it could be carried out by an end user in app,
// however some thought will need to be had about
// implications for automations (triggers)
// new trigger type, bulk rows created
/**
* @api {post} /api/tables/:tableId/:revId Import CSV to existing table
* @apiName Import CSV to existing table
* @apiGroup tables
* @apiPermission builder
* @apiDescription This endpoint will import data to existing tables, internal or external. It is used in combination
* with the CSV validation endpoint. Take the output of the CSV validation endpoint and pass it to this endpoint to
* import the data; please note this will only import fields that already exist on the table/match the type.
*
* @apiParam {string} tableId The ID of the table which the data should be imported to.
*
* @apiParam (Body) {object} dataImport This is the same as the structure used when creating an internal table with
* a CSV, it will have the "schema" returned from the CSV validation endpoint and the "csvString" which is to be
* turned into rows.
*
* @apiSuccess {string} message A message stating that the data was imported successfully.
*/
.post(
"/api/tables/:tableId/import",
paramResource("tableId"),