Merge branch 'master' into fix/internal-db-enrich-perf
This commit is contained in:
commit
a23f918880
|
@ -18,7 +18,7 @@ jobs:
|
|||
- name: Maximize build space
|
||||
uses: easimon/maximize-build-space@master
|
||||
with:
|
||||
root-reserve-mb: 35000
|
||||
root-reserve-mb: 30000
|
||||
swap-size-mb: 1024
|
||||
remove-android: 'true'
|
||||
remove-dotnet: 'true'
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.11.22",
|
||||
"version": "2.11.30",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -74,7 +74,6 @@
|
|||
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
|
||||
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
|
||||
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
|
||||
"build:docs": "lerna run --stream build:docs",
|
||||
"release:helm": "node scripts/releaseHelmChart",
|
||||
"env:multi:enable": "lerna run --stream env:multi:enable",
|
||||
"env:multi:disable": "lerna run --stream env:multi:disable",
|
||||
|
|
|
@ -102,7 +102,7 @@
|
|||
</div>
|
||||
{/if}
|
||||
<div class="text" title={showTooltip ? text : null}>
|
||||
{text}
|
||||
<span title={text}>{text}</span>
|
||||
{#if selectedBy}
|
||||
<UserAvatars size="XS" users={selectedBy} />
|
||||
{/if}
|
||||
|
@ -227,9 +227,6 @@
|
|||
.text {
|
||||
font-weight: 600;
|
||||
font-size: 12px;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
flex: 1 1 auto;
|
||||
color: var(--spectrum-global-color-gray-900);
|
||||
order: 2;
|
||||
|
@ -238,6 +235,11 @@
|
|||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
.text span {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
.scrollable .text {
|
||||
flex: 0 0 auto;
|
||||
max-width: 160px;
|
||||
|
|
|
@ -3,16 +3,17 @@
|
|||
* e.g.
|
||||
* name all names result
|
||||
* ------ ----------- --------
|
||||
* ("foo") ["foo"] "foo (1)"
|
||||
* ("foo") ["foo", "foo (1)"] "foo (2)"
|
||||
* ("foo (1)") ["foo", "foo (1)"] "foo (2)"
|
||||
* ("foo") ["foo", "foo (2)"] "foo (1)"
|
||||
* ("foo") ["foo"] "foo 1"
|
||||
* ("foo") ["foo", "foo 1"] "foo 2"
|
||||
* ("foo 1") ["foo", "foo 1"] "foo 2"
|
||||
* ("foo") ["foo", "foo 2"] "foo 1"
|
||||
*
|
||||
* Repl
|
||||
*/
|
||||
export const duplicateName = (name, allNames) => {
|
||||
const baseName = name.split(" (")[0]
|
||||
const isDuplicate = new RegExp(`${baseName}\\s\\((\\d+)\\)$`)
|
||||
const duplicatePattern = new RegExp(`\\s(\\d+)$`)
|
||||
const baseName = name.split(duplicatePattern)[0]
|
||||
const isDuplicate = new RegExp(`${baseName}\\s(\\d+)$`)
|
||||
|
||||
// get the sequence from matched names
|
||||
const sequence = []
|
||||
|
@ -28,7 +29,6 @@ export const duplicateName = (name, allNames) => {
|
|||
return false
|
||||
})
|
||||
sequence.sort((a, b) => a - b)
|
||||
|
||||
// get the next number in the sequence
|
||||
let number
|
||||
if (sequence.length === 0) {
|
||||
|
@ -46,5 +46,5 @@ export const duplicateName = (name, allNames) => {
|
|||
}
|
||||
}
|
||||
|
||||
return `${baseName} (${number})`
|
||||
return `${baseName} ${number}`
|
||||
}
|
||||
|
|
|
@ -9,34 +9,34 @@ describe("duplicate", () => {
|
|||
|
||||
const duplicate = duplicateName(name, names)
|
||||
|
||||
expect(duplicate).toBe("foo (1)")
|
||||
expect(duplicate).toBe("foo 1")
|
||||
})
|
||||
|
||||
it("with multiple existing", async () => {
|
||||
const names = ["foo", "foo (1)", "foo (2)"]
|
||||
const names = ["foo", "foo 1", "foo 2"]
|
||||
const name = "foo"
|
||||
|
||||
const duplicate = duplicateName(name, names)
|
||||
|
||||
expect(duplicate).toBe("foo (3)")
|
||||
expect(duplicate).toBe("foo 3")
|
||||
})
|
||||
|
||||
it("with mixed multiple existing", async () => {
|
||||
const names = ["foo", "foo (1)", "foo (2)", "bar", "bar (1)", "bar (2)"]
|
||||
const names = ["foo", "foo 1", "foo 2", "bar", "bar 1", "bar 2"]
|
||||
const name = "foo"
|
||||
|
||||
const duplicate = duplicateName(name, names)
|
||||
|
||||
expect(duplicate).toBe("foo (3)")
|
||||
expect(duplicate).toBe("foo 3")
|
||||
})
|
||||
|
||||
it("with incomplete sequence", async () => {
|
||||
const names = ["foo", "foo (2)", "foo (3)"]
|
||||
const names = ["foo", "foo 2", "foo 3"]
|
||||
const name = "foo"
|
||||
|
||||
const duplicate = duplicateName(name, names)
|
||||
|
||||
expect(duplicate).toBe("foo (1)")
|
||||
expect(duplicate).toBe("foo 1")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -3419,6 +3419,17 @@
|
|||
"value": "custom"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "event",
|
||||
"label": "On change",
|
||||
"key": "onChange",
|
||||
"context": [
|
||||
{
|
||||
"label": "Field Value",
|
||||
"key": "value"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "validation/string",
|
||||
"label": "Validation",
|
||||
|
|
|
@ -128,6 +128,7 @@
|
|||
<div class="manual-input">
|
||||
<Input
|
||||
bind:value
|
||||
updateOnChange={false}
|
||||
on:change={() => {
|
||||
dispatch("change", value)
|
||||
}}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"test:watch": "jest --watch",
|
||||
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/",
|
||||
"build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
|
||||
"build:docs": "node ./scripts/docs/generate.js open",
|
||||
"run:docker": "node dist/index.js",
|
||||
"run:docker:cluster": "pm2-runtime start pm2.config.js",
|
||||
"dev:stack:up": "node scripts/dev/manage.js up",
|
||||
|
@ -112,7 +111,7 @@
|
|||
"to-json-schema": "0.2.5",
|
||||
"uuid": "3.3.2",
|
||||
"validate.js": "0.13.1",
|
||||
"vm2": "3.9.17",
|
||||
"vm2": "^3.9.19",
|
||||
"worker-farm": "1.7.0",
|
||||
"xml2js": "0.5.0",
|
||||
"yargs": "13.2.4"
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
### Documentation
|
||||
|
||||
This directory contains the scripts required to generate the APIDoc based documentation.
|
||||
You can find the docs about comment structure at the [APIDocs page](https://apidocjs.com/).
|
||||
|
||||
In general most API endpoints will look like:
|
||||
```js
|
||||
/**
|
||||
* @api {post} /api/:param/url Give it a name
|
||||
* @apiName Give it a name
|
||||
* @apiGroup group
|
||||
* @apiPermission permission
|
||||
* @apiDescription Describe what the endpoint does, any special cases the user
|
||||
* should be aware of.
|
||||
*
|
||||
* @apiParam {string} param describe a URL parameter.
|
||||
*
|
||||
* @apiParam (Body) input describe a field on the body.
|
||||
*
|
||||
* @apiSuccess {object} output describe the output.
|
||||
*/
|
||||
```
|
||||
|
||||
There are a few key points to note when writing API docs:
|
||||
1. Don't use `@apiBody` - this isn't currently supported by our swagger converter.
|
||||
2. Make sure to always have an `@api` definition at the start, which must always have the
|
||||
HTTP verb, the endpoint URL and the name.
|
||||
3. There are three ways you can specify parameters used as inputs for your endpoint,
|
||||
`@apiParam` for a URL param, `@apiParam (Body)` for a field on the request body and `@apiParam (Query)`
|
||||
for query string parameters.
|
||||
4. The `@apiGroup` should be the same for all API Doc comments in a route file.
|
|
@ -1,74 +0,0 @@
|
|||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
const { createDoc } = require("apidoc")
|
||||
const packageJson = require("../../package.json")
|
||||
const toSwagger = require("./toSwagger")
|
||||
const open = require("open")
|
||||
|
||||
const config = {
|
||||
name: "Budibase API",
|
||||
version: packageJson.version,
|
||||
description: "Documenting the Budibase backend API",
|
||||
title: "Budibase app service API",
|
||||
}
|
||||
|
||||
const shouldOpen = process.argv[2]
|
||||
const disallowed = []
|
||||
|
||||
function filter(parsedRouteFiles) {
|
||||
const tagToSearch = "url"
|
||||
for (let routeFile of parsedRouteFiles) {
|
||||
for (let route of routeFile) {
|
||||
let routeInfo = route["local"]
|
||||
if (disallowed.includes(routeInfo[tagToSearch])) {
|
||||
const idx = routeFile.indexOf(route)
|
||||
routeFile.splice(idx, 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function generate() {
|
||||
// start by writing a config file
|
||||
const configPath = join(__dirname, "config.json")
|
||||
fs.writeFileSync(configPath, JSON.stringify(config))
|
||||
const mainPath = join(__dirname, "..", "..")
|
||||
const srcPath = join(mainPath, "src", "api", "routes")
|
||||
const assetsPath = join(mainPath, "builder", "assets", "docs")
|
||||
if (!fs.existsSync(assetsPath)) {
|
||||
fs.mkdirSync(assetsPath, { recursive: true })
|
||||
}
|
||||
const options = {
|
||||
src: [srcPath],
|
||||
dest: assetsPath,
|
||||
filters: {
|
||||
main: {
|
||||
postFilter: filter,
|
||||
},
|
||||
},
|
||||
config: configPath,
|
||||
}
|
||||
const doc = createDoc(options)
|
||||
if (typeof doc !== "boolean") {
|
||||
const swagger = toSwagger(JSON.parse(doc.data), JSON.parse(doc.project))
|
||||
fs.writeFileSync(join(assetsPath, "swagger.json"), JSON.stringify(swagger))
|
||||
fs.writeFileSync(join(assetsPath, "apidoc.json"), doc.data)
|
||||
fs.writeFileSync(join(assetsPath, "project.json"), doc.project)
|
||||
console.log(
|
||||
`Docs generated successfully, find in ${assetsPath}, swagger.json, apidoc.json and project.json`
|
||||
)
|
||||
} else {
|
||||
throw "Unable to generate docs."
|
||||
}
|
||||
// delete the temporary config file
|
||||
fs.unlinkSync(configPath)
|
||||
setTimeout(async () => {
|
||||
if (shouldOpen === "open") {
|
||||
await open(join(assetsPath, "index.html"), { wait: false })
|
||||
}
|
||||
}, 2000)
|
||||
}
|
||||
|
||||
generate().catch(err => {
|
||||
console.error(err)
|
||||
})
|
|
@ -1,320 +0,0 @@
|
|||
let _ = require("lodash")
|
||||
let { pathToRegexp } = require("path-to-regexp")
|
||||
|
||||
/********************************************************
|
||||
* Based on: https://github.com/fsbahman/apidoc-swagger *
|
||||
********************************************************/
|
||||
|
||||
let swagger = {
|
||||
swagger: "2.0",
|
||||
info: {},
|
||||
paths: {},
|
||||
definitions: {},
|
||||
}
|
||||
|
||||
function toSwagger(apidocJson, projectJson) {
|
||||
swagger.info = addInfo(projectJson)
|
||||
swagger.paths = extractPaths(apidocJson)
|
||||
return swagger
|
||||
}
|
||||
|
||||
let tagsRegex = /(<([^>]+)>)/gi
|
||||
// Removes <p> </p> tags from text
|
||||
function removeTags(text) {
|
||||
return text ? text.replace(tagsRegex, "") : text
|
||||
}
|
||||
|
||||
function addInfo(projectJson) {
|
||||
let info = {}
|
||||
info["title"] = projectJson.title || projectJson.name
|
||||
info["version"] = projectJson.version
|
||||
info["description"] = projectJson.description
|
||||
return info
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts paths provided in json format
|
||||
* post, patch, put request parameters are extracted in body
|
||||
* get and delete are extracted to path parameters
|
||||
* @param apidocJson
|
||||
* @returns {{}}
|
||||
*/
|
||||
function extractPaths(apidocJson) {
|
||||
let apiPaths = groupByUrl(apidocJson)
|
||||
let paths = {}
|
||||
for (let i = 0; i < apiPaths.length; i++) {
|
||||
let verbs = apiPaths[i].verbs
|
||||
let url = verbs[0].url
|
||||
let pattern = pathToRegexp(url, null)
|
||||
let matches = pattern.exec(url)
|
||||
|
||||
// Surrounds URL parameters with curly brackets -> :email with {email}
|
||||
let pathKeys = []
|
||||
for (let j = 1; j < matches.length; j++) {
|
||||
let key = matches[j].slice(1)
|
||||
url = url.replace(matches[j], "{" + key + "}")
|
||||
pathKeys.push(key)
|
||||
}
|
||||
|
||||
for (let j = 0; j < verbs.length; j++) {
|
||||
let verb = verbs[j]
|
||||
let type = verb.type
|
||||
|
||||
let obj = (paths[url] = paths[url] || {})
|
||||
|
||||
if (type === "post" || type === "patch" || type === "put") {
|
||||
_.extend(
|
||||
obj,
|
||||
createPostPushPutOutput(verb, swagger.definitions, pathKeys)
|
||||
)
|
||||
} else {
|
||||
_.extend(obj, createGetDeleteOutput(verb, swagger.definitions))
|
||||
}
|
||||
}
|
||||
}
|
||||
return paths
|
||||
}
|
||||
|
||||
function createPostPushPutOutput(verbs, definitions, pathKeys) {
|
||||
let pathItemObject = {}
|
||||
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
|
||||
|
||||
let params = []
|
||||
let pathParams = createPathParameters(verbs, pathKeys)
|
||||
pathParams = _.filter(pathParams, function (param) {
|
||||
let hasKey = pathKeys.indexOf(param.name) !== -1
|
||||
return !(param.in === "path" && !hasKey)
|
||||
})
|
||||
|
||||
params = params.concat(pathParams)
|
||||
let required =
|
||||
verbs.parameter &&
|
||||
verbs.parameter.fields &&
|
||||
verbs.parameter.fields.Parameter &&
|
||||
verbs.parameter.fields.Parameter.length > 0
|
||||
|
||||
params.push({
|
||||
in: "body",
|
||||
name: "body",
|
||||
description: removeTags(verbs.description),
|
||||
required: required,
|
||||
schema: {
|
||||
$ref: "#/definitions/" + verbDefinitionResult.topLevelParametersRef,
|
||||
},
|
||||
})
|
||||
|
||||
pathItemObject[verbs.type] = {
|
||||
tags: [verbs.group],
|
||||
summary: removeTags(verbs.description),
|
||||
consumes: ["application/json"],
|
||||
produces: ["application/json"],
|
||||
parameters: params,
|
||||
}
|
||||
|
||||
if (verbDefinitionResult.topLevelSuccessRef) {
|
||||
pathItemObject[verbs.type].responses = {
|
||||
200: {
|
||||
description: "successful operation",
|
||||
schema: {
|
||||
type: verbDefinitionResult.topLevelSuccessRefType,
|
||||
items: {
|
||||
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return pathItemObject
|
||||
}
|
||||
|
||||
function createVerbDefinitions(verbs, definitions) {
|
||||
let result = {
|
||||
topLevelParametersRef: null,
|
||||
topLevelSuccessRef: null,
|
||||
topLevelSuccessRefType: null,
|
||||
}
|
||||
let defaultObjectName = verbs.name
|
||||
|
||||
let fieldArrayResult = {}
|
||||
if (verbs && verbs.parameter && verbs.parameter.fields) {
|
||||
fieldArrayResult = createFieldArrayDefinitions(
|
||||
verbs.parameter.fields.Parameter,
|
||||
definitions,
|
||||
verbs.name,
|
||||
defaultObjectName
|
||||
)
|
||||
result.topLevelParametersRef = fieldArrayResult.topLevelRef
|
||||
}
|
||||
|
||||
if (verbs && verbs.success && verbs.success.fields) {
|
||||
fieldArrayResult = createFieldArrayDefinitions(
|
||||
verbs.success.fields["Success 200"],
|
||||
definitions,
|
||||
verbs.name,
|
||||
defaultObjectName
|
||||
)
|
||||
result.topLevelSuccessRef = fieldArrayResult.topLevelRef
|
||||
result.topLevelSuccessRefType = fieldArrayResult.topLevelRefType
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function createFieldArrayDefinitions(
|
||||
fieldArray,
|
||||
definitions,
|
||||
topLevelRef,
|
||||
defaultObjectName
|
||||
) {
|
||||
let result = {
|
||||
topLevelRef: topLevelRef,
|
||||
topLevelRefType: null,
|
||||
}
|
||||
|
||||
if (!fieldArray) {
|
||||
return result
|
||||
}
|
||||
|
||||
for (let i = 0; i < fieldArray.length; i++) {
|
||||
let parameter = fieldArray[i]
|
||||
|
||||
let nestedName = createNestedName(parameter.field)
|
||||
let objectName = nestedName.objectName
|
||||
if (!objectName) {
|
||||
objectName = defaultObjectName
|
||||
}
|
||||
let type = parameter.type
|
||||
if (i === 0) {
|
||||
result.topLevelRefType = type
|
||||
if (parameter.type === "Object") {
|
||||
objectName = nestedName.propertyName
|
||||
nestedName.propertyName = null
|
||||
} else if (parameter.type === "Array") {
|
||||
objectName = nestedName.propertyName
|
||||
nestedName.propertyName = null
|
||||
result.topLevelRefType = "array"
|
||||
}
|
||||
result.topLevelRef = objectName
|
||||
}
|
||||
|
||||
definitions[objectName] = definitions[objectName] || {
|
||||
properties: {},
|
||||
required: [],
|
||||
}
|
||||
|
||||
if (nestedName.propertyName) {
|
||||
let prop = {
|
||||
type: (parameter.type || "").toLowerCase(),
|
||||
description: removeTags(parameter.description),
|
||||
}
|
||||
if (parameter.type === "Object") {
|
||||
prop.$ref = "#/definitions/" + parameter.field
|
||||
}
|
||||
|
||||
let typeIndex = type.indexOf("[]")
|
||||
if (typeIndex !== -1 && typeIndex === type.length - 2) {
|
||||
prop.type = "array"
|
||||
prop.items = {
|
||||
type: type.slice(0, type.length - 2),
|
||||
}
|
||||
}
|
||||
|
||||
definitions[objectName]["properties"][nestedName.propertyName] = prop
|
||||
if (!parameter.optional) {
|
||||
let arr = definitions[objectName]["required"]
|
||||
if (arr.indexOf(nestedName.propertyName) === -1) {
|
||||
arr.push(nestedName.propertyName)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function createNestedName(field) {
|
||||
let propertyName = field
|
||||
let objectName
|
||||
let propertyNames = field.split(".")
|
||||
if (propertyNames && propertyNames.length > 1) {
|
||||
propertyName = propertyNames[propertyNames.length - 1]
|
||||
propertyNames.pop()
|
||||
objectName = propertyNames.join(".")
|
||||
}
|
||||
|
||||
return {
|
||||
propertyName: propertyName,
|
||||
objectName: objectName,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate get, delete method output
|
||||
* @param verbs
|
||||
* @param definitions
|
||||
* @returns {{}}
|
||||
*/
|
||||
function createGetDeleteOutput(verbs, definitions) {
|
||||
let pathItemObject = {}
|
||||
verbs.type = verbs.type === "del" ? "delete" : verbs.type
|
||||
|
||||
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
|
||||
pathItemObject[verbs.type] = {
|
||||
tags: [verbs.group],
|
||||
summary: removeTags(verbs.description),
|
||||
consumes: ["application/json"],
|
||||
produces: ["application/json"],
|
||||
parameters: createPathParameters(verbs),
|
||||
}
|
||||
if (verbDefinitionResult.topLevelSuccessRef) {
|
||||
pathItemObject[verbs.type].responses = {
|
||||
200: {
|
||||
description: "successful operation",
|
||||
schema: {
|
||||
type: verbDefinitionResult.topLevelSuccessRefType,
|
||||
items: {
|
||||
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
return pathItemObject
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate through all method parameters and create array of parameter objects which are stored as path parameters
|
||||
* @param verbs
|
||||
* @returns {Array}
|
||||
*/
|
||||
function createPathParameters(verbs) {
|
||||
let pathItemObject = []
|
||||
if (verbs.parameter && verbs.parameter.fields.Parameter) {
|
||||
for (let i = 0; i < verbs.parameter.fields.Parameter.length; i++) {
|
||||
let param = verbs.parameter.fields.Parameter[i]
|
||||
let field = param.field
|
||||
let type = param.type
|
||||
pathItemObject.push({
|
||||
name: field,
|
||||
in: type === "file" ? "formData" : "path",
|
||||
required: !param.optional,
|
||||
type: param.type.toLowerCase(),
|
||||
description: removeTags(param.description),
|
||||
})
|
||||
}
|
||||
}
|
||||
return pathItemObject
|
||||
}
|
||||
|
||||
function groupByUrl(apidocJson) {
|
||||
return _.chain(apidocJson)
|
||||
.groupBy("url")
|
||||
.toPairs()
|
||||
.map(function (element) {
|
||||
return _.zipObject(["url", "verbs"], element)
|
||||
})
|
||||
.value()
|
||||
}
|
||||
|
||||
module.exports = toSwagger
|
|
@ -859,7 +859,8 @@
|
|||
"json",
|
||||
"internal",
|
||||
"barcodeqr",
|
||||
"bigint"
|
||||
"bigint",
|
||||
"bb_reference"
|
||||
],
|
||||
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
|
||||
},
|
||||
|
@ -1064,7 +1065,8 @@
|
|||
"json",
|
||||
"internal",
|
||||
"barcodeqr",
|
||||
"bigint"
|
||||
"bigint",
|
||||
"bb_reference"
|
||||
],
|
||||
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
|
||||
},
|
||||
|
@ -1280,7 +1282,8 @@
|
|||
"json",
|
||||
"internal",
|
||||
"barcodeqr",
|
||||
"bigint"
|
||||
"bigint",
|
||||
"bb_reference"
|
||||
],
|
||||
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
|
||||
},
|
||||
|
|
|
@ -782,6 +782,7 @@ components:
|
|||
- internal
|
||||
- barcodeqr
|
||||
- bigint
|
||||
- bb_reference
|
||||
description: Defines the type of the column, most explain themselves, a link
|
||||
column is a relationship.
|
||||
constraints:
|
||||
|
@ -946,6 +947,7 @@ components:
|
|||
- internal
|
||||
- barcodeqr
|
||||
- bigint
|
||||
- bb_reference
|
||||
description: Defines the type of the column, most explain themselves, a link
|
||||
column is a relationship.
|
||||
constraints:
|
||||
|
@ -1117,6 +1119,7 @@ components:
|
|||
- internal
|
||||
- barcodeqr
|
||||
- bigint
|
||||
- bb_reference
|
||||
description: Defines the type of the column, most explain themselves, a link
|
||||
column is a relationship.
|
||||
constraints:
|
||||
|
|
|
@ -16,7 +16,9 @@ import { context, events } from "@budibase/backend-core"
|
|||
import { isRows, isSchema, parse } from "../../../utilities/schema"
|
||||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
Datasource,
|
||||
FieldSchema,
|
||||
ManyToManyRelationshipFieldMetadata,
|
||||
ManyToOneRelationshipFieldMetadata,
|
||||
OneToManyRelationshipFieldMetadata,
|
||||
|
@ -385,7 +387,9 @@ export async function destroy(ctx: UserCtx) {
|
|||
return tableToDelete
|
||||
}
|
||||
|
||||
export async function bulkImport(ctx: UserCtx<BulkImportRequest>) {
|
||||
export async function bulkImport(
|
||||
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
|
||||
) {
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { rows } = ctx.request.body
|
||||
const schema = table.schema
|
||||
|
|
|
@ -9,6 +9,7 @@ import { isExternalTable, isSQL } from "../../../integrations/utils"
|
|||
import { events } from "@budibase/backend-core"
|
||||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
FetchTablesResponse,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
|
@ -20,7 +21,7 @@ import {
|
|||
import sdk from "../../../sdk"
|
||||
import { jsonFromCsvString } from "../../../utilities/csv"
|
||||
import { builderSocket } from "../../../websockets"
|
||||
import { cloneDeep } from "lodash"
|
||||
import { cloneDeep, isEqual } from "lodash"
|
||||
|
||||
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
|
||||
if (table && !tableId) {
|
||||
|
@ -99,9 +100,17 @@ export async function destroy(ctx: UserCtx) {
|
|||
builderSocket?.emitTableDeletion(ctx, deletedTable)
|
||||
}
|
||||
|
||||
export async function bulkImport(ctx: UserCtx<BulkImportRequest>) {
|
||||
export async function bulkImport(
|
||||
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
|
||||
) {
|
||||
const tableId = ctx.params.tableId
|
||||
await pickApi({ tableId }).bulkImport(ctx)
|
||||
let tableBefore = await sdk.tables.getTable(tableId)
|
||||
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
|
||||
|
||||
if (!isEqual(tableBefore, tableAfter)) {
|
||||
await sdk.tables.saveTable(tableAfter)
|
||||
}
|
||||
|
||||
// right now we don't trigger anything for bulk import because it
|
||||
// can only be done in the builder, but in the future we may need to
|
||||
// think about events for bulk items
|
||||
|
|
|
@ -11,6 +11,7 @@ import {
|
|||
import { runStaticFormulaChecks } from "./bulkFormula"
|
||||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
RenameColumn,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
|
@ -207,7 +208,9 @@ export async function destroy(ctx: any) {
|
|||
return tableToDelete
|
||||
}
|
||||
|
||||
export async function bulkImport(ctx: UserCtx<BulkImportRequest>) {
|
||||
export async function bulkImport(
|
||||
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
|
||||
) {
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { rows, identifierFields } = ctx.request.body
|
||||
await handleDataImport(ctx.user, table, rows, identifierFields)
|
||||
|
|
|
@ -1580,6 +1580,9 @@ describe.each([
|
|||
(row: Row) => ({
|
||||
_id: row._id,
|
||||
primaryDisplay: row.email,
|
||||
email: row.email,
|
||||
firstName: row.firstName,
|
||||
lastName: row.lastName,
|
||||
}),
|
||||
],
|
||||
])("links - %s", (__, relSchema, dataGenerator, resultMapper) => {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { events, context } from "@budibase/backend-core"
|
||||
import {
|
||||
FieldType,
|
||||
|
@ -6,6 +5,7 @@ import {
|
|||
RelationshipType,
|
||||
Table,
|
||||
ViewCalculation,
|
||||
AutoFieldSubTypes,
|
||||
} from "@budibase/types"
|
||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import * as setup from "./utilities"
|
||||
|
@ -188,6 +188,36 @@ describe("/tables", () => {
|
|||
1
|
||||
)
|
||||
})
|
||||
|
||||
it("should update Auto ID field after bulk import", async () => {
|
||||
const table = await config.createTable({
|
||||
name: "TestTable",
|
||||
type: "table",
|
||||
schema: {
|
||||
autoId: {
|
||||
name: "id",
|
||||
type: FieldType.NUMBER,
|
||||
subtype: AutoFieldSubTypes.AUTO_ID,
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
let row = await config.api.row.save(table._id!, {})
|
||||
expect(row.autoId).toEqual(1)
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [{ autoId: 2 }],
|
||||
identifierFields: [],
|
||||
})
|
||||
|
||||
row = await config.api.row.save(table._id!, {})
|
||||
expect(row.autoId).toEqual(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe("fetch", () => {
|
||||
|
|
|
@ -279,7 +279,8 @@ export interface components {
|
|||
| "json"
|
||||
| "internal"
|
||||
| "barcodeqr"
|
||||
| "bigint";
|
||||
| "bigint"
|
||||
| "bb_reference";
|
||||
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
|
||||
constraints?: {
|
||||
/** @enum {string} */
|
||||
|
@ -386,7 +387,8 @@ export interface components {
|
|||
| "json"
|
||||
| "internal"
|
||||
| "barcodeqr"
|
||||
| "bigint";
|
||||
| "bigint"
|
||||
| "bb_reference";
|
||||
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
|
||||
constraints?: {
|
||||
/** @enum {string} */
|
||||
|
@ -495,7 +497,8 @@ export interface components {
|
|||
| "json"
|
||||
| "internal"
|
||||
| "barcodeqr"
|
||||
| "bigint";
|
||||
| "bigint"
|
||||
| "bb_reference";
|
||||
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
|
||||
constraints?: {
|
||||
/** @enum {string} */
|
||||
|
|
|
@ -4,6 +4,8 @@ import {
|
|||
Row,
|
||||
ValidateResponse,
|
||||
ExportRowsRequest,
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
|
@ -123,6 +125,19 @@ export class RowAPI extends TestAPI {
|
|||
return request
|
||||
}
|
||||
|
||||
bulkImport = async (
|
||||
tableId: string,
|
||||
body: BulkImportRequest,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
): Promise<BulkImportResponse> => {
|
||||
let request = this.request
|
||||
.post(`/api/tables/${tableId}/import`)
|
||||
.send(body)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect(expectStatus)
|
||||
return (await request).body
|
||||
}
|
||||
|
||||
search = async (
|
||||
sourceId: string,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
|
|
|
@ -82,6 +82,9 @@ export async function processOutputBBReferences(
|
|||
return users.map(u => ({
|
||||
_id: u._id,
|
||||
primaryDisplay: u.email,
|
||||
email: u.email,
|
||||
firstName: u.firstName,
|
||||
lastName: u.lastName,
|
||||
}))
|
||||
|
||||
default:
|
||||
|
|
|
@ -180,6 +180,9 @@ describe("bbReferenceProcessor", () => {
|
|||
{
|
||||
_id: user._id,
|
||||
primaryDisplay: user.email,
|
||||
email: user.email,
|
||||
firstName: user.firstName,
|
||||
lastName: user.lastName,
|
||||
},
|
||||
])
|
||||
expect(cacheGetUsersSpy).toBeCalledTimes(1)
|
||||
|
@ -204,6 +207,9 @@ describe("bbReferenceProcessor", () => {
|
|||
[user1, user2].map(u => ({
|
||||
_id: u._id,
|
||||
primaryDisplay: u.email,
|
||||
email: u.email,
|
||||
firstName: u.firstName,
|
||||
lastName: u.lastName,
|
||||
}))
|
||||
)
|
||||
)
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
"dayjs": "^1.10.8",
|
||||
"handlebars": "^4.7.6",
|
||||
"lodash": "^4.17.20",
|
||||
"vm2": "^3.9.15"
|
||||
"vm2": "^3.9.19"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-commonjs": "^17.1.0",
|
||||
|
|
|
@ -29,3 +29,7 @@ export interface BulkImportRequest {
|
|||
rows: Row[]
|
||||
identifierFields?: Array<string>
|
||||
}
|
||||
|
||||
export interface BulkImportResponse {
|
||||
message: string
|
||||
}
|
||||
|
|
|
@ -21750,10 +21750,10 @@ vlq@^0.2.2:
|
|||
resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
|
||||
integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==
|
||||
|
||||
vm2@3.9.17, vm2@^3.9.15, vm2@^3.9.8:
|
||||
version "3.9.17"
|
||||
resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.17.tgz#251b165ff8a0e034942b5181057305e39570aeab"
|
||||
integrity sha512-AqwtCnZ/ERcX+AVj9vUsphY56YANXxRuqMb7GsDtAr0m0PcQX3u0Aj3KWiXM0YAHy7i6JEeHrwOnwXbGYgRpAw==
|
||||
vm2@^3.9.19:
|
||||
version "3.9.19"
|
||||
resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.19.tgz#be1e1d7a106122c6c492b4d51c2e8b93d3ed6a4a"
|
||||
integrity sha512-J637XF0DHDMV57R6JyVsTak7nIL8gy5KH4r1HiwWLf/4GBbb5MKL5y7LpmF4A8E2nR6XmzpmMFQ7V7ppPTmUQg==
|
||||
dependencies:
|
||||
acorn "^8.7.0"
|
||||
acorn-walk "^8.2.0"
|
||||
|
|
Loading…
Reference in New Issue