Merge pull request #3554 from Budibase/labday/docs
API Docs, generation, tables and rows and Swagger definitions
This commit is contained in:
commit
cba41d8e9d
|
@ -47,6 +47,7 @@
|
|||
"build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
||||
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
"build:docs": "lerna run build:docs",
|
||||
"release:helm": "./scripts/release_helm_chart.sh",
|
||||
"env:multi:enable": "lerna run env:multi:enable",
|
||||
"env:multi:disable": "lerna run env:multi:disable",
|
||||
|
|
|
@ -102,6 +102,9 @@
|
|||
if (field.type === AUTO_TYPE) {
|
||||
field = buildAutoColumn($tables.draft.name, field.name, field.subtype)
|
||||
}
|
||||
if (field.type !== LINK_TYPE) {
|
||||
delete field.fieldName
|
||||
}
|
||||
try {
|
||||
await tables.saveField({
|
||||
originalName,
|
||||
|
|
|
@ -122,7 +122,7 @@ export function createTablesStore() {
|
|||
update(state => {
|
||||
// delete the original if renaming
|
||||
// need to handle if the column had no name, empty string
|
||||
if (originalName || originalName === "") {
|
||||
if (originalName != null && originalName !== field.name) {
|
||||
delete state.draft.schema[originalName]
|
||||
state.draft._rename = {
|
||||
old: originalName,
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"email": "hi@budibase.com",
|
||||
"version": "0.9.190-alpha.7",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.js",
|
||||
"main": "src/index.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Budibase/budibase.git"
|
||||
|
@ -15,6 +15,7 @@
|
|||
"test:watch": "jest --watch",
|
||||
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client",
|
||||
"build:docker": "yarn run predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
|
||||
"build:docs": "node ./scripts/docs/generate.js open",
|
||||
"run:docker": "node dist/index.js",
|
||||
"dev:stack:up": "node scripts/dev/manage.js up",
|
||||
"dev:stack:down": "node scripts/dev/manage.js down",
|
||||
|
@ -106,7 +107,7 @@
|
|||
"mssql": "6.2.3",
|
||||
"mysql2": "^2.3.1",
|
||||
"node-fetch": "2.6.0",
|
||||
"open": "7.3.0",
|
||||
"open": "^8.4.0",
|
||||
"pg": "8.5.1",
|
||||
"pino-pretty": "4.0.0",
|
||||
"posthog-node": "^1.1.4",
|
||||
|
@ -130,6 +131,7 @@
|
|||
"@babel/preset-env": "^7.14.4",
|
||||
"@budibase/standard-components": "^0.9.139",
|
||||
"@jest/test-sequencer": "^24.8.0",
|
||||
"@types/apidoc": "^0.50.0",
|
||||
"@types/bull": "^3.15.1",
|
||||
"@types/jest": "^26.0.23",
|
||||
"@types/koa": "^2.13.3",
|
||||
|
@ -137,12 +139,14 @@
|
|||
"@types/node": "^15.12.4",
|
||||
"@types/oracledb": "^5.2.1",
|
||||
"@typescript-eslint/parser": "4.28.0",
|
||||
"apidoc": "^0.50.2",
|
||||
"babel-jest": "^27.0.2",
|
||||
"copyfiles": "^2.4.1",
|
||||
"docker-compose": "^0.23.6",
|
||||
"eslint": "^6.8.0",
|
||||
"jest": "^27.0.5",
|
||||
"nodemon": "^2.0.4",
|
||||
"path-to-regexp": "^6.2.0",
|
||||
"prettier": "^2.3.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"supertest": "^4.0.2",
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
### Documentation
|
||||
|
||||
This directory contains the scripts required to generate the APIDoc based documentation.
|
||||
You can find the docs about comment structure at the [APIDocs page](https://apidocjs.com/).
|
||||
|
||||
In general most API endpoints will look like:
|
||||
```js
|
||||
/**
|
||||
* @api {post} /api/:param/url Give it a name
|
||||
* @apiName Give it a name
|
||||
* @apiGroup group
|
||||
* @apiPermission permission
|
||||
* @apiDescription Describe what the endpoint does, any special cases the user
|
||||
* should be aware of.
|
||||
*
|
||||
* @apiParam {string} param describe a URL parameter.
|
||||
*
|
||||
* @apiParam (Body) input describe a field on the body.
|
||||
*
|
||||
* @apiSuccess {object} output describe the output.
|
||||
*/
|
||||
```
|
||||
|
||||
There are a few key points to note when writing API docs:
|
||||
1. Don't use `@apiBody` - this isn't currently supported by our swagger converter.
|
||||
2. Make sure to always have an `@api` definition at the start, which must always have the
|
||||
HTTP verb, the endpoint URL and the name.
|
||||
3. There are three ways you can specify parameters used as inputs for your endpoint,
|
||||
`@apiParam` for a URL param, `@apiParam (Body)` for a field on the request body and `@apiParam (Query)`
|
||||
for query string parameters.
|
||||
4. The `@apiGroup` should be the same for all API Doc comments in a route file.
|
|
@ -0,0 +1,74 @@
|
|||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
const { createDoc } = require("apidoc")
|
||||
const packageJson = require("../../package.json")
|
||||
const toSwagger = require("./toSwagger")
|
||||
const open = require("open")
|
||||
|
||||
const config = {
|
||||
name: "Budibase API",
|
||||
version: packageJson.version,
|
||||
description: "Documenting the Budibase backend API",
|
||||
title: "Budibase app service API",
|
||||
}
|
||||
|
||||
const shouldOpen = process.argv[2]
|
||||
const disallowed = []
|
||||
|
||||
function filter(parsedRouteFiles) {
|
||||
const tagToSearch = "url"
|
||||
for (let routeFile of parsedRouteFiles) {
|
||||
for (let route of routeFile) {
|
||||
let routeInfo = route["local"]
|
||||
if (disallowed.includes(routeInfo[tagToSearch])) {
|
||||
const idx = routeFile.indexOf(route)
|
||||
routeFile.splice(idx, 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function generate() {
|
||||
// start by writing a config file
|
||||
const configPath = join(__dirname, "config.json")
|
||||
fs.writeFileSync(configPath, JSON.stringify(config))
|
||||
const mainPath = join(__dirname, "..", "..")
|
||||
const srcPath = join(mainPath, "src", "api", "routes")
|
||||
const assetsPath = join(mainPath, "builder", "assets", "docs")
|
||||
if (!fs.existsSync(assetsPath)) {
|
||||
fs.mkdirSync(assetsPath, { recursive: true })
|
||||
}
|
||||
const options = {
|
||||
src: [srcPath],
|
||||
dest: assetsPath,
|
||||
filters: {
|
||||
main: {
|
||||
postFilter: filter,
|
||||
},
|
||||
},
|
||||
config: configPath,
|
||||
}
|
||||
const doc = createDoc(options)
|
||||
if (typeof doc !== "boolean") {
|
||||
const swagger = toSwagger(JSON.parse(doc.data), JSON.parse(doc.project))
|
||||
fs.writeFileSync(join(assetsPath, "swagger.json"), JSON.stringify(swagger))
|
||||
fs.writeFileSync(join(assetsPath, "apidoc.json"), doc.data)
|
||||
fs.writeFileSync(join(assetsPath, "project.json"), doc.project)
|
||||
console.log(
|
||||
`Docs generated successfully, find in ${assetsPath}, swagger.json, apidoc.json and project.json`
|
||||
)
|
||||
} else {
|
||||
throw "Unable to generate docs."
|
||||
}
|
||||
// delete the temporary config file
|
||||
fs.unlinkSync(configPath)
|
||||
setTimeout(async () => {
|
||||
if (shouldOpen === "open") {
|
||||
await open(join(assetsPath, "index.html"), { wait: false })
|
||||
}
|
||||
}, 2000)
|
||||
}
|
||||
|
||||
generate().catch(err => {
|
||||
console.error(err)
|
||||
})
|
|
@ -0,0 +1,320 @@
|
|||
let _ = require("lodash")
|
||||
let { pathToRegexp } = require("path-to-regexp")
|
||||
|
||||
/********************************************************
|
||||
* Based on: https://github.com/fsbahman/apidoc-swagger *
|
||||
********************************************************/
|
||||
|
||||
let swagger = {
|
||||
swagger: "2.0",
|
||||
info: {},
|
||||
paths: {},
|
||||
definitions: {},
|
||||
}
|
||||
|
||||
function toSwagger(apidocJson, projectJson) {
|
||||
swagger.info = addInfo(projectJson)
|
||||
swagger.paths = extractPaths(apidocJson)
|
||||
return swagger
|
||||
}
|
||||
|
||||
let tagsRegex = /(<([^>]+)>)/gi
|
||||
// Removes <p> </p> tags from text
|
||||
function removeTags(text) {
|
||||
return text ? text.replace(tagsRegex, "") : text
|
||||
}
|
||||
|
||||
function addInfo(projectJson) {
|
||||
let info = {}
|
||||
info["title"] = projectJson.title || projectJson.name
|
||||
info["version"] = projectJson.version
|
||||
info["description"] = projectJson.description
|
||||
return info
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts paths provided in json format
|
||||
* post, patch, put request parameters are extracted in body
|
||||
* get and delete are extracted to path parameters
|
||||
* @param apidocJson
|
||||
* @returns {{}}
|
||||
*/
|
||||
function extractPaths(apidocJson) {
|
||||
let apiPaths = groupByUrl(apidocJson)
|
||||
let paths = {}
|
||||
for (let i = 0; i < apiPaths.length; i++) {
|
||||
let verbs = apiPaths[i].verbs
|
||||
let url = verbs[0].url
|
||||
let pattern = pathToRegexp(url, null)
|
||||
let matches = pattern.exec(url)
|
||||
|
||||
// Surrounds URL parameters with curly brackets -> :email with {email}
|
||||
let pathKeys = []
|
||||
for (let j = 1; j < matches.length; j++) {
|
||||
let key = matches[j].substr(1)
|
||||
url = url.replace(matches[j], "{" + key + "}")
|
||||
pathKeys.push(key)
|
||||
}
|
||||
|
||||
for (let j = 0; j < verbs.length; j++) {
|
||||
let verb = verbs[j]
|
||||
let type = verb.type
|
||||
|
||||
let obj = (paths[url] = paths[url] || {})
|
||||
|
||||
if (type === "post" || type === "patch" || type === "put") {
|
||||
_.extend(
|
||||
obj,
|
||||
createPostPushPutOutput(verb, swagger.definitions, pathKeys)
|
||||
)
|
||||
} else {
|
||||
_.extend(obj, createGetDeleteOutput(verb, swagger.definitions))
|
||||
}
|
||||
}
|
||||
}
|
||||
return paths
|
||||
}
|
||||
|
||||
function createPostPushPutOutput(verbs, definitions, pathKeys) {
|
||||
let pathItemObject = {}
|
||||
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
|
||||
|
||||
let params = []
|
||||
let pathParams = createPathParameters(verbs, pathKeys)
|
||||
pathParams = _.filter(pathParams, function (param) {
|
||||
let hasKey = pathKeys.indexOf(param.name) !== -1
|
||||
return !(param.in === "path" && !hasKey)
|
||||
})
|
||||
|
||||
params = params.concat(pathParams)
|
||||
let required =
|
||||
verbs.parameter &&
|
||||
verbs.parameter.fields &&
|
||||
verbs.parameter.fields.Parameter &&
|
||||
verbs.parameter.fields.Parameter.length > 0
|
||||
|
||||
params.push({
|
||||
in: "body",
|
||||
name: "body",
|
||||
description: removeTags(verbs.description),
|
||||
required: required,
|
||||
schema: {
|
||||
$ref: "#/definitions/" + verbDefinitionResult.topLevelParametersRef,
|
||||
},
|
||||
})
|
||||
|
||||
pathItemObject[verbs.type] = {
|
||||
tags: [verbs.group],
|
||||
summary: removeTags(verbs.description),
|
||||
consumes: ["application/json"],
|
||||
produces: ["application/json"],
|
||||
parameters: params,
|
||||
}
|
||||
|
||||
if (verbDefinitionResult.topLevelSuccessRef) {
|
||||
pathItemObject[verbs.type].responses = {
|
||||
200: {
|
||||
description: "successful operation",
|
||||
schema: {
|
||||
type: verbDefinitionResult.topLevelSuccessRefType,
|
||||
items: {
|
||||
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return pathItemObject
|
||||
}
|
||||
|
||||
function createVerbDefinitions(verbs, definitions) {
|
||||
let result = {
|
||||
topLevelParametersRef: null,
|
||||
topLevelSuccessRef: null,
|
||||
topLevelSuccessRefType: null,
|
||||
}
|
||||
let defaultObjectName = verbs.name
|
||||
|
||||
let fieldArrayResult = {}
|
||||
if (verbs && verbs.parameter && verbs.parameter.fields) {
|
||||
fieldArrayResult = createFieldArrayDefinitions(
|
||||
verbs.parameter.fields.Parameter,
|
||||
definitions,
|
||||
verbs.name,
|
||||
defaultObjectName
|
||||
)
|
||||
result.topLevelParametersRef = fieldArrayResult.topLevelRef
|
||||
}
|
||||
|
||||
if (verbs && verbs.success && verbs.success.fields) {
|
||||
fieldArrayResult = createFieldArrayDefinitions(
|
||||
verbs.success.fields["Success 200"],
|
||||
definitions,
|
||||
verbs.name,
|
||||
defaultObjectName
|
||||
)
|
||||
result.topLevelSuccessRef = fieldArrayResult.topLevelRef
|
||||
result.topLevelSuccessRefType = fieldArrayResult.topLevelRefType
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function createFieldArrayDefinitions(
|
||||
fieldArray,
|
||||
definitions,
|
||||
topLevelRef,
|
||||
defaultObjectName
|
||||
) {
|
||||
let result = {
|
||||
topLevelRef: topLevelRef,
|
||||
topLevelRefType: null,
|
||||
}
|
||||
|
||||
if (!fieldArray) {
|
||||
return result
|
||||
}
|
||||
|
||||
for (let i = 0; i < fieldArray.length; i++) {
|
||||
let parameter = fieldArray[i]
|
||||
|
||||
let nestedName = createNestedName(parameter.field)
|
||||
let objectName = nestedName.objectName
|
||||
if (!objectName) {
|
||||
objectName = defaultObjectName
|
||||
}
|
||||
let type = parameter.type
|
||||
if (i === 0) {
|
||||
result.topLevelRefType = type
|
||||
if (parameter.type === "Object") {
|
||||
objectName = nestedName.propertyName
|
||||
nestedName.propertyName = null
|
||||
} else if (parameter.type === "Array") {
|
||||
objectName = nestedName.propertyName
|
||||
nestedName.propertyName = null
|
||||
result.topLevelRefType = "array"
|
||||
}
|
||||
result.topLevelRef = objectName
|
||||
}
|
||||
|
||||
definitions[objectName] = definitions[objectName] || {
|
||||
properties: {},
|
||||
required: [],
|
||||
}
|
||||
|
||||
if (nestedName.propertyName) {
|
||||
let prop = {
|
||||
type: (parameter.type || "").toLowerCase(),
|
||||
description: removeTags(parameter.description),
|
||||
}
|
||||
if (parameter.type === "Object") {
|
||||
prop.$ref = "#/definitions/" + parameter.field
|
||||
}
|
||||
|
||||
let typeIndex = type.indexOf("[]")
|
||||
if (typeIndex !== -1 && typeIndex === type.length - 2) {
|
||||
prop.type = "array"
|
||||
prop.items = {
|
||||
type: type.slice(0, type.length - 2),
|
||||
}
|
||||
}
|
||||
|
||||
definitions[objectName]["properties"][nestedName.propertyName] = prop
|
||||
if (!parameter.optional) {
|
||||
let arr = definitions[objectName]["required"]
|
||||
if (arr.indexOf(nestedName.propertyName) === -1) {
|
||||
arr.push(nestedName.propertyName)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function createNestedName(field) {
|
||||
let propertyName = field
|
||||
let objectName
|
||||
let propertyNames = field.split(".")
|
||||
if (propertyNames && propertyNames.length > 1) {
|
||||
propertyName = propertyNames[propertyNames.length - 1]
|
||||
propertyNames.pop()
|
||||
objectName = propertyNames.join(".")
|
||||
}
|
||||
|
||||
return {
|
||||
propertyName: propertyName,
|
||||
objectName: objectName,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate get, delete method output
|
||||
* @param verbs
|
||||
* @param definitions
|
||||
* @returns {{}}
|
||||
*/
|
||||
function createGetDeleteOutput(verbs, definitions) {
|
||||
let pathItemObject = {}
|
||||
verbs.type = verbs.type === "del" ? "delete" : verbs.type
|
||||
|
||||
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
|
||||
pathItemObject[verbs.type] = {
|
||||
tags: [verbs.group],
|
||||
summary: removeTags(verbs.description),
|
||||
consumes: ["application/json"],
|
||||
produces: ["application/json"],
|
||||
parameters: createPathParameters(verbs),
|
||||
}
|
||||
if (verbDefinitionResult.topLevelSuccessRef) {
|
||||
pathItemObject[verbs.type].responses = {
|
||||
200: {
|
||||
description: "successful operation",
|
||||
schema: {
|
||||
type: verbDefinitionResult.topLevelSuccessRefType,
|
||||
items: {
|
||||
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
return pathItemObject
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate through all method parameters and create array of parameter objects which are stored as path parameters
|
||||
* @param verbs
|
||||
* @returns {Array}
|
||||
*/
|
||||
function createPathParameters(verbs) {
|
||||
let pathItemObject = []
|
||||
if (verbs.parameter && verbs.parameter.fields.Parameter) {
|
||||
for (let i = 0; i < verbs.parameter.fields.Parameter.length; i++) {
|
||||
let param = verbs.parameter.fields.Parameter[i]
|
||||
let field = param.field
|
||||
let type = param.type
|
||||
pathItemObject.push({
|
||||
name: field,
|
||||
in: type === "file" ? "formData" : "path",
|
||||
required: !param.optional,
|
||||
type: param.type.toLowerCase(),
|
||||
description: removeTags(param.description),
|
||||
})
|
||||
}
|
||||
}
|
||||
return pathItemObject
|
||||
}
|
||||
|
||||
function groupByUrl(apidocJson) {
|
||||
return _.chain(apidocJson)
|
||||
.groupBy("url")
|
||||
.toPairs()
|
||||
.map(function (element) {
|
||||
return _.zipObject(["url", "verbs"], element)
|
||||
})
|
||||
.value()
|
||||
}
|
||||
|
||||
module.exports = toSwagger
|
|
@ -44,14 +44,10 @@ exports.save = async function (ctx) {
|
|||
// the column if you want to change the type
|
||||
if (oldTable && oldTable.schema) {
|
||||
for (let propKey of Object.keys(tableToSave.schema)) {
|
||||
let column = tableToSave.schema[propKey]
|
||||
let oldColumn = oldTable.schema[propKey]
|
||||
if (oldColumn && oldColumn.type === "internal") {
|
||||
oldColumn.type = "auto"
|
||||
}
|
||||
if (oldColumn && oldColumn.type !== column.type) {
|
||||
ctx.throw(400, "Cannot change the type of a column")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -14,24 +14,130 @@ const {
|
|||
const router = Router()
|
||||
|
||||
router
|
||||
/**
|
||||
* @api {get} /api/:tableId/:rowId/enrich Get an enriched row
|
||||
* @apiName Get an enriched row
|
||||
* @apiGroup rows
|
||||
* @apiPermission table read access
|
||||
* @apiDescription This API is only useful when dealing with rows that have relationships.
|
||||
* Normally when a row is a returned from the API relationships will only have the structure
|
||||
* `{ primaryDisplay: "name", _id: ... }` but this call will return the full related rows
|
||||
* for each relationship instead.
|
||||
*
|
||||
* @apiParam {string} rowId The ID of the row which is to be retrieved and enriched.
|
||||
*
|
||||
* @apiSuccess {object} row The response body will be the enriched row.
|
||||
*/
|
||||
.get(
|
||||
"/api/:tableId/:rowId/enrich",
|
||||
paramSubResource("tableId", "rowId"),
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
rowController.fetchEnrichedRow
|
||||
)
|
||||
/**
|
||||
* @api {get} /api/:tableId/rows Get all rows in a table
|
||||
* @apiName Get all rows in a table
|
||||
* @apiGroup rows
|
||||
* @apiPermission table read access
|
||||
* @apiDescription This is a deprecated endpoint that should not be used anymore, instead use the search endpoint.
|
||||
* This endpoint gets all of the rows within the specified table - it is not heavily used
|
||||
* due to its lack of support for pagination. With SQL tables this will retrieve up to a limit and then
|
||||
* will simply stop.
|
||||
*
|
||||
* @apiParam {string} tableId The ID of the table to retrieve all rows within.
|
||||
*
|
||||
* @apiSuccess {object[]} rows The response body will be an array of all rows found.
|
||||
*/
|
||||
.get(
|
||||
"/api/:tableId/rows",
|
||||
paramResource("tableId"),
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
rowController.fetch
|
||||
)
|
||||
/**
|
||||
* @api {get} /api/:tableId/rows/:rowId Retrieve a single row
|
||||
* @apiName Retrieve a single row
|
||||
* @apiGroup rows
|
||||
* @apiPermission table read access
|
||||
* @apiDescription This endpoint retrieves only the specified row. If you wish to retrieve
|
||||
* a row by anything other than its _id field, use the search endpoint.
|
||||
*
|
||||
* @apiParam {string} tableId The ID of the table to retrieve a row from.
|
||||
* @apiParam {string} rowId The ID of the row to retrieve.
|
||||
*
|
||||
* @apiSuccess {object} body The response body will be the row that was found.
|
||||
*/
|
||||
.get(
|
||||
"/api/:tableId/rows/:rowId",
|
||||
paramSubResource("tableId", "rowId"),
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
rowController.find
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/:tableId/search Search for rows in a table
|
||||
* @apiName Search for rows in a table
|
||||
* @apiGroup rows
|
||||
* @apiPermission table read access
|
||||
* @apiDescription This is the primary method of accessing rows in Budibase, the data provider
|
||||
* and data UI in the builder are built atop this. All filtering, sorting and pagination is
|
||||
* handled through this, for internal and external (datasource plus, e.g. SQL) tables.
|
||||
*
|
||||
* @apiParam {string} tableId The ID of the table to retrieve rows from.
|
||||
*
|
||||
* @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true,
|
||||
* defaults to false.
|
||||
* @apiParam (Body) {object} [query] This contains a set of filters which should be applied, if none
|
||||
* specified then the request will be unfiltered. An example with all of the possible query
|
||||
* options has been supplied below.
|
||||
* @apiParam (Body) {number} [limit] This sets a limit for the number of rows that will be returned,
|
||||
* this will be implemented at the database level if supported for performance reasons. This
|
||||
* is useful when paginating to set exactly how many rows per page.
|
||||
* @apiParam (Body) {string} [bookmark] If pagination is enabled then a bookmark will be returned
|
||||
* with each successful search request, this should be supplied back to get the next page.
|
||||
* @apiParam (Body) {object} [sort] If sort is desired this should contain the name of the column to
|
||||
* sort on.
|
||||
* @apiParam (Body) {string} [sortOrder] If sort is enabled then this can be either "descending" or
|
||||
* "ascending" as required.
|
||||
* @apiParam (Body) {string} [sortType] If sort is enabled then you must specify the type of search
|
||||
* being used, either "string" or "number". This is only used for internal tables.
|
||||
*
|
||||
* @apiParamExample {json} Example:
|
||||
* {
|
||||
* "tableId": "ta_70260ff0b85c467ca74364aefc46f26d",
|
||||
* "query": {
|
||||
* "string": {},
|
||||
* "fuzzy": {},
|
||||
* "range": {
|
||||
* "columnName": {
|
||||
* "high": 20,
|
||||
* "low": 10,
|
||||
* }
|
||||
* },
|
||||
* "equal": {
|
||||
* "columnName": "someValue"
|
||||
* },
|
||||
* "notEqual": {},
|
||||
* "empty": {},
|
||||
* "notEmpty": {},
|
||||
* "contains": {},
|
||||
* "notContains": {}
|
||||
* "oneOf": {
|
||||
* "columnName": ["value"]
|
||||
* }
|
||||
* },
|
||||
* "limit": 10,
|
||||
* "sort": "name",
|
||||
* "sortOrder": "descending",
|
||||
* "sortType": "string",
|
||||
* "paginate": true
|
||||
* }
|
||||
*
|
||||
* @apiSuccess {object[]} rows An array of rows that was found based on the supplied parameters.
|
||||
* @apiSuccess {boolean} hasNextPage If pagination was enabled then this specifies whether or
|
||||
* not there is another page after this request.
|
||||
* @apiSuccess {string} bookmark The bookmark to be sent with the next request to get the next
|
||||
* page.
|
||||
*/
|
||||
.post(
|
||||
"/api/:tableId/search",
|
||||
paramResource("tableId"),
|
||||
|
@ -46,6 +152,30 @@ router
|
|||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
rowController.search
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/:tableId/rows Creates a new row
|
||||
* @apiName Creates a new row
|
||||
* @apiGroup rows
|
||||
* @apiPermission table write access
|
||||
* @apiDescription This API will create a new row based on the supplied body. If the
|
||||
* body includes an "_id" field then it will update an existing row if the field
|
||||
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
|
||||
* already used by Budibase tables and cannot be used for columns.
|
||||
*
|
||||
* @apiParam {string} tableId The ID of the table to save a row to.
|
||||
*
|
||||
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
|
||||
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
|
||||
* must also be provided.
|
||||
* @apiParam (Body) {string} tableId The ID of the table should also be specified in the row body itself.
|
||||
* @apiParam (Body) {any} [any] Any field supplied in the body will be assessed to see if it matches
|
||||
* a column in the specified table. All other fields will be dropped and not stored.
|
||||
*
|
||||
* @apiSuccess {string} _id The ID of the row that was just saved, if it was just created this
|
||||
* is the rows new ID.
|
||||
* @apiSuccess {string} [_rev] If saving to an internal table a revision will also be returned.
|
||||
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
|
||||
*/
|
||||
.post(
|
||||
"/api/:tableId/rows",
|
||||
paramResource("tableId"),
|
||||
|
@ -53,18 +183,66 @@ router
|
|||
usage,
|
||||
rowController.save
|
||||
)
|
||||
/**
|
||||
* @api {patch} /api/:tableId/rows Updates a row
|
||||
* @apiName Update a row
|
||||
* @apiGroup rows
|
||||
* @apiPermission table write access
|
||||
* @apiDescription This endpoint is identical to the row creation endpoint but instead it will
|
||||
* error if an _id isn't provided, it will only function for existing rows.
|
||||
*/
|
||||
.patch(
|
||||
"/api/:tableId/rows",
|
||||
paramResource("tableId"),
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||
rowController.patch
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/:tableId/rows/validate Validate inputs for a row
|
||||
* @apiName Validate inputs for a row
|
||||
* @apiGroup rows
|
||||
* @apiPermission table write access
|
||||
* @apiDescription When attempting to save a row you may want to check if the row is valid
|
||||
* given the table schema, this will iterate through all the constraints on the table and
|
||||
* check if the request body is valid.
|
||||
*
|
||||
* @apiParam {string} tableId The ID of the table the row is to be validated for.
|
||||
*
|
||||
* @apiParam (Body) {any} [any] Any fields provided in the request body will be tested
|
||||
* against the table schema and constraints.
|
||||
*
|
||||
* @apiSuccess {boolean} valid If inputs provided are acceptable within the table schema this
|
||||
* will be true, if it is not then then errors property will be populated.
|
||||
* @apiSuccess {object} [errors] A key value map of information about fields on the input
|
||||
* which do not match the table schema. The key name will be the column names that have breached
|
||||
* the schema.
|
||||
*/
|
||||
.post(
|
||||
"/api/:tableId/rows/validate",
|
||||
paramResource("tableId"),
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||
rowController.validate
|
||||
)
|
||||
/**
|
||||
* @api {delete} /api/:tableId/rows Delete rows
|
||||
* @apiName Delete rows
|
||||
* @apiGroup rows
|
||||
* @apiPermission table write access
|
||||
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
|
||||
* fashion.
|
||||
*
|
||||
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
|
||||
*
|
||||
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
|
||||
* key of the request body that are to be deleted.
|
||||
* @apiParam (Body) {string} [_id] If deleting a single row then provide its ID in this field.
|
||||
* @apiParam (Body) {string} [_rev] If deleting a single row from an internal table then provide its
|
||||
* revision here.
|
||||
*
|
||||
* @apiSuccess {object[]|object} body If deleting bulk then the response body will be an array
|
||||
* of the deleted rows, if deleting a single row then the body will contain a "row" property which
|
||||
* is the deleted row.
|
||||
*/
|
||||
.delete(
|
||||
"/api/:tableId/rows",
|
||||
paramResource("tableId"),
|
||||
|
|
|
@ -27,13 +27,110 @@ function generateSaveValidator() {
|
|||
}
|
||||
|
||||
router
|
||||
/**
|
||||
* @api {get} /api/tables Fetch all tables
|
||||
* @apiName Fetch all tables
|
||||
* @apiGroup tables
|
||||
* @apiPermission table read access
|
||||
* @apiDescription This endpoint retrieves all of the tables which have been created in
|
||||
* an app. This includes all of the external and internal tables; to tell the difference
|
||||
* between these look for the "type" property on each table, either being "internal" or "external".
|
||||
*
|
||||
* @apiSuccess {object[]} body The response body will be the list of tables that was found - as
|
||||
* this does not take any parameters the only error scenario is no access.
|
||||
*/
|
||||
.get("/api/tables", authorized(BUILDER), tableController.fetch)
|
||||
/**
|
||||
* @api {get} /api/tables/:id Fetch a single table
|
||||
* @apiName Fetch a single table
|
||||
* @apiGroup tables
|
||||
* @apiPermission table read access
|
||||
* @apiDescription Retrieves a single table this could be be internal or external based on
|
||||
* the provided table ID.
|
||||
*
|
||||
* @apiParam {string} id The ID of the table which is to be retrieved.
|
||||
*
|
||||
* @apiSuccess {object[]} body The response body will be the table that was found.
|
||||
*/
|
||||
.get(
|
||||
"/api/tables/:id",
|
||||
paramResource("id"),
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
tableController.find
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/tables Save a table
|
||||
* @apiName Save a table
|
||||
* @apiGroup tables
|
||||
* @apiPermission builder
|
||||
* @apiDescription Create or update a table with this endpoint, this will function for both internal
|
||||
* external tables.
|
||||
*
|
||||
* @apiParam (Body) {string} [_id] If updating an existing table then the ID of the table must be specified.
|
||||
* @apiParam (Body) {string} [_rev] If updating an existing internal table then the revision must also be specified.
|
||||
* @apiParam (Body) {string} type] This should either be "internal" or "external" depending on the table type -
|
||||
* this will default to internal.
|
||||
* @apiParam (Body) {string} [sourceId] If creating an external table then this should be set to the data source ID. If
|
||||
* building an internal table this does not need to be set, although it will be returned as "bb_internal".
|
||||
* @apiParam (Body) {string} name The name of the table, this will be used in the UI. To rename the table simply
|
||||
* supply the table structure to this endpoint with the name changed.
|
||||
* @apiParam (Body) {object} schema A key value object which has all of the columns in the table as the keys in this
|
||||
* object. For each column a "type" and "constraints" must be specified, with some types requiring further information.
|
||||
* More information about the schema structure can be found in the Typescript definitions.
|
||||
* @apiParam (Body) {string} [primaryDisplay] The name of the column which should be used when displaying rows
|
||||
* from this table as relationships.
|
||||
* @apiParam (Body) {object[]} [indexes] Specifies the search indexes - this is deprecated behaviour with the introduction
|
||||
* of lucene indexes. This functionality is only available for internal tables.
|
||||
* @apiParam (Body) {object} [_rename] If a column is to be renamed then the "old" column name should be set in this
|
||||
* structure, and the "updated", new column name should also be supplied. The schema should also be updated, this field
|
||||
* lets the server know that a field hasn't just been deleted, that the data has moved to a new name, this will fix
|
||||
* the rows in the table. This functionality is only available for internal tables.
|
||||
* @apiParam (Body) {object} [dataImport] When creating an internal table it can be built from a CSV, by using the
|
||||
* CSV validation endpoint. Send the CSV data to the validation endpoint, then put the results of that call
|
||||
* into this property, along with the CSV and a table/rows will be built from it. This is not supported when updating
|
||||
* or for external tables.
|
||||
*
|
||||
* @apiParamExample {json} Example:
|
||||
* {
|
||||
* "_id": "ta_05541307fa0f4044abee071ca2a82119",
|
||||
* "_rev": "10-0fbe4e78f69b255d79f1017e2eeef807",
|
||||
* "type": "internal",
|
||||
* "views": {},
|
||||
* "name": "tableName",
|
||||
* "schema": {
|
||||
* "column": {
|
||||
* "type": "string",
|
||||
* "constraints": {
|
||||
* "type": "string",
|
||||
* "length": {
|
||||
* "maximum": null
|
||||
* },
|
||||
* "presence": false
|
||||
* },
|
||||
* "name": "column"
|
||||
* },
|
||||
* },
|
||||
* "primaryDisplay": "column",
|
||||
* "indexes": [],
|
||||
* "sourceId": "bb_internal",
|
||||
* "_rename": {
|
||||
* "old": "columnName",
|
||||
* "updated": "newColumnName",
|
||||
* },
|
||||
* "dataImport": {
|
||||
* "csvString": "column\nvalue",
|
||||
* "primaryDisplay": "column",
|
||||
* "schema": {
|
||||
* "column": {
|
||||
* "type": "string"
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* @apiSuccess {object} table The response body will contain the table structure after being cleaned up and
|
||||
* saved to the database.
|
||||
*/
|
||||
.post(
|
||||
"/api/tables",
|
||||
// allows control over updating a table
|
||||
|
@ -42,22 +139,68 @@ router
|
|||
generateSaveValidator(),
|
||||
tableController.save
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/tables/csv/validate Validate a CSV for a table
|
||||
* @apiName Validate a CSV for a table
|
||||
* @apiGroup tables
|
||||
* @apiPermission builder
|
||||
* @apiDescription When creating a new table, or importing a CSV to an existing table the CSV must be validated and
|
||||
* converted into a Budibase schema; this endpoint does this.
|
||||
*
|
||||
* @apiParam (Body) {string} csvString The CSV which is to be validated as a string.
|
||||
* @apiParam (Body) {object} [schema] When a CSV has been validated it is possible to re-validate after changing the
|
||||
* type of a field, by default everything will be strings as there is no way to infer types. The returned schema can
|
||||
* be updated and then returned to the endpoint to re-validate and check if the type will work for the CSV, e.g.
|
||||
* using a number instead of strings.
|
||||
* @apiParam (Body) {string} [tableId] If importing data to an existing table this will pull the current table and
|
||||
* remove any fields from the CSV schema which do not exist on the table/don't match the type of the table. When
|
||||
* importing a CSV to an existing table only fields that are present on the table can be imported.
|
||||
*
|
||||
* @apiSuccess {object} schema The response body will contain a "schema" object that represents the schema found for
|
||||
* the CSV - this will be in the same format used for table schema.s
|
||||
*/
|
||||
.post(
|
||||
"/api/tables/csv/validate",
|
||||
authorized(BUILDER),
|
||||
tableController.validateCSVSchema
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/tables/:tableId/:revId Delete a table
|
||||
* @apiName Delete a table
|
||||
* @apiGroup tables
|
||||
* @apiPermission builder
|
||||
* @apiDescription This endpoint will delete a table and all of its associated data, for this reason it is
|
||||
* quite dangerous - it will work for internal and external tables.
|
||||
*
|
||||
* @apiParam {string} tableId The ID of the table which is to be deleted.
|
||||
* @apiParam {string} [revId] If deleting an internal table then the revision must also be supplied (_rev), for
|
||||
* external tables this can simply be set to anything, e.g. "external".
|
||||
*
|
||||
* @apiSuccess {string} message A message stating that the table was deleted successfully.
|
||||
*/
|
||||
.delete(
|
||||
"/api/tables/:tableId/:revId",
|
||||
paramResource("tableId"),
|
||||
authorized(BUILDER),
|
||||
tableController.destroy
|
||||
)
|
||||
// this is currently builder only, but in the future
|
||||
// it could be carried out by an end user in app,
|
||||
// however some thought will need to be had about
|
||||
// implications for automations (triggers)
|
||||
// new trigger type, bulk rows created
|
||||
/**
|
||||
* @api {post} /api/tables/:tableId/:revId Import CSV to existing table
|
||||
* @apiName Import CSV to existing table
|
||||
* @apiGroup tables
|
||||
* @apiPermission builder
|
||||
* @apiDescription This endpoint will import data to existing tables, internal or external. It is used in combination
|
||||
* with the CSV validation endpoint. Take the output of the CSV validation endpoint and pass it to this endpoint to
|
||||
* import the data; please note this will only import fields that already exist on the table/match the type.
|
||||
*
|
||||
* @apiParam {string} tableId The ID of the table which the data should be imported to.
|
||||
*
|
||||
* @apiParam (Body) {object} dataImport This is the same as the structure used when creating an internal table with
|
||||
* a CSV, it will have the "schema" returned from the CSV validation endpoint and the "csvString" which is to be
|
||||
* turned into rows.
|
||||
*
|
||||
* @apiSuccess {string} message A message stating that the data was imported successfully.
|
||||
*/
|
||||
.post(
|
||||
"/api/tables/:tableId/import",
|
||||
paramResource("tableId"),
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue