Merge remote-tracking branch 'origin/master' into feature/monolith-js-refactor
This commit is contained in:
commit
07b0dd016e
|
@ -165,7 +165,7 @@ Budibase is dedicated to providing everyone a welcoming, diverse, and harassment
|
|||
## 🙌 Contributing to Budibase
|
||||
|
||||
From opening a bug report to creating a pull request: every contribution is appreciated and welcomed. If you're planning to implement a new feature or change the API, please create an issue first. This way, we can ensure your work is not in vain.
|
||||
Environment setup instructions are available for [Debian](https://github.com/Budibase/budibase/tree/HEAD/docs/DEV-SETUP-DEBIAN.md) and [MacOSX](https://github.com/Budibase/budibase/tree/HEAD/docs/DEV-SETUP-MACOSX.md)
|
||||
Environment setup instructions are available [here](https://github.com/Budibase/budibase/tree/HEAD/docs/CONTRIBUTING.md).
|
||||
|
||||
### Not Sure Where to Start?
|
||||
A good place to start contributing is the [First time issues project](https://github.com/Budibase/budibase/projects/22).
|
||||
|
|
|
@ -1,76 +0,0 @@
|
|||
## Dev Environment on Debian 11
|
||||
|
||||
### Install NVM & Node 14
|
||||
|
||||
NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
|
||||
|
||||
Install NVM
|
||||
|
||||
```
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
|
||||
```
|
||||
|
||||
Install Node 14
|
||||
|
||||
```
|
||||
nvm install 14
|
||||
```
|
||||
|
||||
### Install npm requirements
|
||||
|
||||
```
|
||||
npm install -g yarn jest lerna
|
||||
```
|
||||
|
||||
### Install Docker and Docker Compose
|
||||
|
||||
```
|
||||
apt install docker.io
|
||||
pip3 install docker-compose
|
||||
```
|
||||
|
||||
### Clone the repo
|
||||
|
||||
```
|
||||
git clone https://github.com/Budibase/budibase.git
|
||||
```
|
||||
|
||||
### Check Versions
|
||||
|
||||
This setup process was tested on Debian 11 (bullseye) with version numbers show below. Your mileage may vary using anything else.
|
||||
|
||||
- Docker: 20.10.5
|
||||
- Docker-Compose: 1.29.2
|
||||
- Node: v14.20.1
|
||||
- Yarn: 1.22.19
|
||||
- Lerna: 5.1.4
|
||||
|
||||
### Build
|
||||
|
||||
```
|
||||
cd budibase
|
||||
yarn setup
|
||||
```
|
||||
|
||||
The yarn setup command runs several build steps i.e.
|
||||
|
||||
```
|
||||
node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev
|
||||
```
|
||||
|
||||
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
|
||||
|
||||
The dev version will be available on port 10000 i.e.
|
||||
|
||||
http://127.0.0.1:10000/builder/admin
|
||||
|
||||
### File descriptor issues with Vite and Chrome in Linux
|
||||
|
||||
If your dev environment stalls forever, with some network requests stuck in flight, it's likely that Chrome is trying to open more file descriptors than your system allows.
|
||||
To fix this, apply the following tweaks.
|
||||
|
||||
Debian based distros:
|
||||
Add `* - nofile 65536` to `/etc/security/limits.conf`.
|
||||
|
||||
Arch:
|
||||
Add `DefaultLimitNOFILE=65536` to `/etc/systemd/system.conf`.
|
|
@ -1,84 +0,0 @@
|
|||
## Dev Environment on MAC OSX 12 (Monterey)
|
||||
|
||||
### Install Homebrew
|
||||
|
||||
Install instructions [here](https://brew.sh/)
|
||||
|
||||
| **NOTE**: If you are working on a M1 Apple Silicon which is running Z shell, you could need to add
|
||||
`eval $(/opt/homebrew/bin/brew shellenv)` line to your `.zshrc`. This will make your zsh to find the apps you install
|
||||
through brew.
|
||||
|
||||
### Install Node
|
||||
|
||||
Budibase requires a recent version of node 14:
|
||||
|
||||
```
|
||||
brew install node npm
|
||||
node -v
|
||||
```
|
||||
|
||||
### Install npm requirements
|
||||
|
||||
```
|
||||
npm install -g yarn jest lerna
|
||||
```
|
||||
|
||||
### Install Docker and Docker Compose
|
||||
|
||||
```
|
||||
brew install docker docker-compose
|
||||
```
|
||||
|
||||
### Clone the repo
|
||||
|
||||
```
|
||||
git clone https://github.com/Budibase/budibase.git
|
||||
```
|
||||
|
||||
### Check Versions
|
||||
|
||||
This setup process was tested on Mac OSX 12 (Monterey) with version numbers shown below. Your mileage may vary using anything else.
|
||||
|
||||
- Docker: 20.10.14
|
||||
- Docker-Compose: 2.6.0
|
||||
- Node: 14.20.1
|
||||
- Yarn: 1.22.19
|
||||
- Lerna: 5.1.4
|
||||
|
||||
### Build
|
||||
|
||||
```
|
||||
cd budibase
|
||||
yarn setup
|
||||
```
|
||||
|
||||
The yarn setup command runs several build steps i.e.
|
||||
|
||||
```
|
||||
node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev
|
||||
```
|
||||
|
||||
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
|
||||
|
||||
The dev version will be available on port 10000 i.e.
|
||||
|
||||
http://127.0.0.1:10000/builder/admin
|
||||
|
||||
| **NOTE**: If you are working on a M1 Apple Silicon, you will need to uncomment `# platform: linux/amd64` line in
|
||||
[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
|
||||
|
||||
### Troubleshootings
|
||||
|
||||
#### Yarn setup errors
|
||||
|
||||
If there are errors with the `yarn setup` command, you can try installing nvm and node 14. This is the same as the instructions for Debian 11.
|
||||
|
||||
#### Node 14.20.1 not supported for arm64
|
||||
|
||||
If you are working with M1 or M2 Mac and trying the Node installation via `nvm`, probably you will find the error `curl: (22) The requested URL returned error: 404`.
|
||||
|
||||
Version `v14.20.1` is not supported for arm64; in order to use it, you can switch the CPU architecture for this by the following command:
|
||||
|
||||
```shell
|
||||
arch -x86_64 zsh #Run this before nvm install
|
||||
```
|
|
@ -1,92 +0,0 @@
|
|||
## Dev Environment on Windows 10/11 (WSL2)
|
||||
|
||||
### Install WSL with Ubuntu LTS
|
||||
|
||||
Enable WSL 2 on Windows 10/11 for docker support.
|
||||
|
||||
```
|
||||
wsl --set-default-version 2
|
||||
```
|
||||
|
||||
Install Ubuntu LTS.
|
||||
|
||||
```
|
||||
wsl --install Ubuntu
|
||||
```
|
||||
|
||||
Or follow the instruction here:
|
||||
https://learn.microsoft.com/en-us/windows/wsl/install
|
||||
|
||||
### Install Docker in windows
|
||||
|
||||
Download the installer from docker and install it.
|
||||
|
||||
Check this url for more detailed instructions:
|
||||
https://docs.docker.com/desktop/install/windows-install/
|
||||
|
||||
You should follow the next steps from within the Ubuntu terminal.
|
||||
|
||||
### Install NVM & Node 14
|
||||
|
||||
NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
|
||||
|
||||
Install NVM
|
||||
|
||||
```
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
|
||||
```
|
||||
|
||||
Install Node 14
|
||||
|
||||
```
|
||||
nvm install 14
|
||||
```
|
||||
|
||||
### Install npm requirements
|
||||
|
||||
```
|
||||
npm install -g yarn jest lerna
|
||||
```
|
||||
|
||||
### Clone the repo
|
||||
|
||||
```
|
||||
git clone https://github.com/Budibase/budibase.git
|
||||
```
|
||||
|
||||
### Check Versions
|
||||
|
||||
This setup process was tested on Windows 11 with version numbers show below. Your mileage may vary using anything else.
|
||||
|
||||
- Docker: 20.10.7
|
||||
- Docker-Compose: 2.10.2
|
||||
- Node: v14.20.1
|
||||
- Yarn: 1.22.19
|
||||
- Lerna: 5.5.4
|
||||
|
||||
### Build
|
||||
|
||||
```
|
||||
cd budibase
|
||||
yarn setup
|
||||
```
|
||||
|
||||
The yarn setup command runs several build steps i.e.
|
||||
|
||||
```
|
||||
node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev
|
||||
```
|
||||
|
||||
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
|
||||
|
||||
The dev version will be available on port 10000 i.e.
|
||||
|
||||
http://127.0.0.1:10000/builder/admin
|
||||
|
||||
### Working with the code
|
||||
|
||||
Here are the instructions to work on the application from within Visual Studio Code (in Windows) through the WSL. All the commands and files are within the Ubuntu system and it should run as if you were working on a Linux machine.
|
||||
|
||||
https://code.visualstudio.com/docs/remote/wsl
|
||||
|
||||
Note you will be able to run the application from within the WSL terminal and you will be able to access the application from the a browser in Windows.
|
|
@ -207,8 +207,7 @@ Desde comunicar un bug a solventar un error en el codigo, toda contribucion es a
|
|||
implementar una nueva funcionalidad o un realizar un cambio en la API, por favor crea un [nuevo mensaje aqui](https://github.com/Budibase/budibase/issues),
|
||||
de esta manera nos encargaremos que tu trabajo no sea en vano.
|
||||
|
||||
Aqui tienes instrucciones de como configurar tu entorno Budibase para [Debian](https://github.com/Budibase/budibase/tree/HEAD/docs/DEV-SETUP-DEBIAN.md)
|
||||
y [MacOSX](https://github.com/Budibase/budibase/tree/HEAD/docs/DEV-SETUP-MACOSX.md)
|
||||
Aqui tienes instrucciones de como configurar tu entorno Budibase para [aquí](https://github.com/Budibase/budibase/tree/HEAD/docs/CONTRIBUTING.md).
|
||||
|
||||
### No estas seguro por donde empezar?
|
||||
Un buen lugar para empezar a contribuir con nosotros es [aqui](https://github.com/Budibase/budibase/projects/22).
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.15.7",
|
||||
"version": "2.16.0",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -46,7 +46,7 @@
|
|||
"kill-accountportal": "kill-port 3001 4003",
|
||||
"kill-all": "yarn run kill-builder && yarn run kill-server && yarn kill-accountportal",
|
||||
"dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
|
||||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
|
||||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
|
||||
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
|
||||
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
|
||||
"dev:all": "yarn run kill-all && lerna run --stream dev",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 64290ce8957d093bc997190402922df10d092953
|
||||
Subproject commit 485ec16a9eed48c548a5f1239772139f3319f028
|
|
@ -5,10 +5,10 @@ if [[ -n $CI ]]
|
|||
then
|
||||
# Running in ci, where resources are limited
|
||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||
echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail"
|
||||
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail
|
||||
echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
|
||||
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
|
||||
else
|
||||
# --maxWorkers performs better in development
|
||||
echo "jest --coverage --maxWorkers=2 --forceExit"
|
||||
jest --coverage --maxWorkers=2 --forceExit
|
||||
echo "jest --coverage --maxWorkers=2 --forceExit $@"
|
||||
jest --coverage --maxWorkers=2 --forceExit $@
|
||||
fi
|
|
@ -42,7 +42,7 @@ const datasets = {
|
|||
}
|
||||
|
||||
describe("Rest Importer", () => {
|
||||
const config = new TestConfig(false)
|
||||
const config = new TestConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
|
|
@ -12,7 +12,7 @@ let apiKey: string, table: Table, app: App, makeRequest: any
|
|||
|
||||
beforeAll(async () => {
|
||||
app = await config.init()
|
||||
table = await config.updateTable()
|
||||
table = await config.upsertTable()
|
||||
apiKey = await config.generateApiKey()
|
||||
makeRequest = generateMakeRequest(apiKey)
|
||||
})
|
||||
|
@ -69,7 +69,7 @@ describe("check the applications endpoints", () => {
|
|||
describe("check the tables endpoints", () => {
|
||||
it("should allow retrieving tables through search", async () => {
|
||||
await config.createApp("new app 1")
|
||||
table = await config.updateTable()
|
||||
table = await config.upsertTable()
|
||||
const res = await makeRequest("post", "/tables/search")
|
||||
expect(res).toSatisfyApiSpec()
|
||||
})
|
||||
|
@ -108,7 +108,7 @@ describe("check the tables endpoints", () => {
|
|||
describe("check the rows endpoints", () => {
|
||||
let row: Row
|
||||
it("should allow retrieving rows through search", async () => {
|
||||
table = await config.updateTable()
|
||||
table = await config.upsertTable()
|
||||
const res = await makeRequest("post", `/tables/${table._id}/rows/search`, {
|
||||
query: {},
|
||||
})
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const tk = require("timekeeper")
|
||||
tk.freeze(Date.now())
|
||||
import tk from "timekeeper"
|
||||
|
||||
// Mock out postgres for this
|
||||
jest.mock("pg")
|
||||
|
@ -17,16 +16,24 @@ jest.mock("@budibase/backend-core", () => {
|
|||
},
|
||||
}
|
||||
})
|
||||
const setup = require("./utilities")
|
||||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||
const { checkCacheForDynamicVariable } = require("../../../threads/utils")
|
||||
import * as setup from "./utilities"
|
||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import { checkCacheForDynamicVariable } from "../../../threads/utils"
|
||||
|
||||
const { basicQuery, basicDatasource } = setup.structures
|
||||
const { events, db: dbCore } = require("@budibase/backend-core")
|
||||
import { events, db as dbCore } from "@budibase/backend-core"
|
||||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
|
||||
tk.freeze(Date.now())
|
||||
|
||||
const mockIsProdAppID = dbCore.isProdAppID as jest.MockedFunction<
|
||||
typeof dbCore.isProdAppID
|
||||
>
|
||||
|
||||
describe("/queries", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let datasource, query
|
||||
let datasource: Datasource & Required<Pick<Datasource, "_id">>, query: Query
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
|
@ -40,18 +47,7 @@ describe("/queries", () => {
|
|||
await setupTest()
|
||||
})
|
||||
|
||||
async function createInvalidIntegration() {
|
||||
const datasource = await config.createDatasource({
|
||||
datasource: {
|
||||
...basicDatasource().datasource,
|
||||
source: "INVALID_INTEGRATION",
|
||||
},
|
||||
})
|
||||
const query = await config.createQuery()
|
||||
return { datasource, query }
|
||||
}
|
||||
|
||||
const createQuery = async query => {
|
||||
const createQuery = async (query: Query) => {
|
||||
return request
|
||||
.post(`/api/queries`)
|
||||
.send(query)
|
||||
|
@ -67,7 +63,7 @@ describe("/queries", () => {
|
|||
jest.clearAllMocks()
|
||||
const res = await createQuery(query)
|
||||
|
||||
expect(res.res.statusMessage).toEqual(
|
||||
expect((res as any).res.statusMessage).toEqual(
|
||||
`Query ${query.name} saved successfully.`
|
||||
)
|
||||
expect(res.body).toEqual({
|
||||
|
@ -92,7 +88,7 @@ describe("/queries", () => {
|
|||
query._rev = res.body._rev
|
||||
await createQuery(query)
|
||||
|
||||
expect(res.res.statusMessage).toEqual(
|
||||
expect((res as any).res.statusMessage).toEqual(
|
||||
`Query ${query.name} saved successfully.`
|
||||
)
|
||||
expect(res.body).toEqual({
|
||||
|
@ -168,8 +164,8 @@ describe("/queries", () => {
|
|||
|
||||
it("should remove sensitive info for prod apps", async () => {
|
||||
// Mock isProdAppID to pretend we are using a prod app
|
||||
dbCore.isProdAppID.mockClear()
|
||||
dbCore.isProdAppID.mockImplementation(() => true)
|
||||
mockIsProdAppID.mockClear()
|
||||
mockIsProdAppID.mockImplementation(() => true)
|
||||
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
|
@ -184,7 +180,7 @@ describe("/queries", () => {
|
|||
|
||||
// Reset isProdAppID mock
|
||||
expect(dbCore.isProdAppID).toHaveBeenCalledTimes(1)
|
||||
dbCore.isProdAppID.mockImplementation(() => false)
|
||||
mockIsProdAppID.mockImplementation(() => false)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -211,10 +207,11 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
const query = await config.createQuery()
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "DELETE",
|
||||
url: `/api/queries/${config._id}/${config._rev}`,
|
||||
url: `/api/queries/${query._id}/${query._rev}`,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -272,20 +269,21 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
it("should fail with invalid integration type", async () => {
|
||||
let error
|
||||
try {
|
||||
await createInvalidIntegration()
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
expect(error).toBeDefined()
|
||||
expect(error.message).toBe("No datasource implementation found.")
|
||||
const response = await config.api.datasource.create(
|
||||
{
|
||||
...basicDatasource().datasource,
|
||||
source: "INVALID_INTEGRATION" as SourceName,
|
||||
},
|
||||
{ expectStatus: 500, rawResponse: true }
|
||||
)
|
||||
|
||||
expect(response.body.message).toBe("No datasource implementation found.")
|
||||
})
|
||||
})
|
||||
|
||||
describe("variables", () => {
|
||||
async function preview(datasource, fields) {
|
||||
return config.previewQuery(request, config, datasource, fields)
|
||||
async function preview(datasource: Datasource, fields: any) {
|
||||
return config.previewQuery(request, config, datasource, fields, undefined)
|
||||
}
|
||||
|
||||
it("should work with static variables", async () => {
|
||||
|
@ -370,11 +368,19 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
describe("Current User Request Mapping", () => {
|
||||
async function previewGet(datasource, fields, params) {
|
||||
async function previewGet(
|
||||
datasource: Datasource,
|
||||
fields: any,
|
||||
params: any
|
||||
) {
|
||||
return config.previewQuery(request, config, datasource, fields, params)
|
||||
}
|
||||
|
||||
async function previewPost(datasource, fields, params) {
|
||||
async function previewPost(
|
||||
datasource: Datasource,
|
||||
fields: any,
|
||||
params: any
|
||||
) {
|
||||
return config.previewQuery(
|
||||
request,
|
||||
config,
|
||||
|
@ -394,14 +400,18 @@ describe("/queries", () => {
|
|||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
})
|
||||
const res = await previewGet(datasource, {
|
||||
path: "www.google.com",
|
||||
queryString: "email={{[user].[email]}}",
|
||||
headers: {
|
||||
queryHdr: "{{[user].[firstName]}}",
|
||||
secondHdr: "1234",
|
||||
const res = await previewGet(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "email={{[user].[email]}}",
|
||||
headers: {
|
||||
queryHdr: "{{[user].[firstName]}}",
|
||||
secondHdr: "1234",
|
||||
},
|
||||
},
|
||||
})
|
||||
undefined
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(res.body.extra.raw)
|
||||
expect(parsedRequest.opts.headers).toEqual({
|
|
@ -581,7 +581,7 @@ describe.each([
|
|||
tableId: InternalTable.USER_METADATA,
|
||||
}
|
||||
|
||||
let table = await config.api.table.create({
|
||||
let table = await config.api.table.save({
|
||||
name: "TestTable",
|
||||
type: "table",
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
|
@ -1690,7 +1690,7 @@ describe.each([
|
|||
tableConfig.sourceType = TableSourceType.EXTERNAL
|
||||
}
|
||||
}
|
||||
const table = await config.api.table.create({
|
||||
const table = await config.api.table.save({
|
||||
...tableConfig,
|
||||
schema: {
|
||||
...tableConfig.schema,
|
||||
|
|
|
@ -438,7 +438,7 @@ describe("/tables", () => {
|
|||
})
|
||||
|
||||
it("should successfully migrate a one-to-many user relationship to a user column", async () => {
|
||||
const table = await config.api.table.create({
|
||||
const table = await config.api.table.save({
|
||||
name: "table",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
|
@ -496,7 +496,7 @@ describe("/tables", () => {
|
|||
// We found a bug just after releasing this feature where if the row was created from the
|
||||
// users table, not the table linking to it, the migration would succeed but lose the data.
|
||||
// This happened because the order of the documents in the link was reversed.
|
||||
const table = await config.api.table.create({
|
||||
const table = await config.api.table.save({
|
||||
name: "table",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
|
@ -554,7 +554,7 @@ describe("/tables", () => {
|
|||
})
|
||||
|
||||
it("should successfully migrate a many-to-many user relationship to a users column", async () => {
|
||||
const table = await config.api.table.create({
|
||||
const table = await config.api.table.save({
|
||||
name: "table",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
|
@ -611,7 +611,7 @@ describe("/tables", () => {
|
|||
})
|
||||
|
||||
it("should successfully migrate a many-to-one user relationship to a users column", async () => {
|
||||
const table = await config.api.table.create({
|
||||
const table = await config.api.table.save({
|
||||
name: "table",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
|
@ -670,7 +670,7 @@ describe("/tables", () => {
|
|||
describe("unhappy paths", () => {
|
||||
let table: Table
|
||||
beforeAll(async () => {
|
||||
table = await config.api.table.create({
|
||||
table = await config.api.table.save({
|
||||
name: "table",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
|
|
|
@ -5,7 +5,7 @@ import {
|
|||
} from "@budibase/string-templates"
|
||||
import sdk from "../sdk"
|
||||
import { Row } from "@budibase/types"
|
||||
import { LoopStep, LoopStepType, LoopInput } from "../definitions/automations"
|
||||
import { LoopInput, LoopStep, LoopStepType } from "../definitions/automations"
|
||||
|
||||
/**
|
||||
* When values are input to the system generally they will be of type string as this is required for template strings.
|
||||
|
@ -128,23 +128,28 @@ export function substituteLoopStep(hbsString: string, substitute: string) {
|
|||
}
|
||||
|
||||
export function stringSplit(value: string | string[]) {
|
||||
if (value == null || Array.isArray(value)) {
|
||||
return value || []
|
||||
if (value == null) {
|
||||
return []
|
||||
}
|
||||
if (value.split("\n").length > 1) {
|
||||
value = value.split("\n")
|
||||
} else {
|
||||
value = value.split(",")
|
||||
if (Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
return value
|
||||
if (typeof value !== "string") {
|
||||
throw new Error(`Unable to split value of type ${typeof value}: ${value}`)
|
||||
}
|
||||
const splitOnNewLine = value.split("\n")
|
||||
if (splitOnNewLine.length > 1) {
|
||||
return splitOnNewLine
|
||||
}
|
||||
return value.split(",")
|
||||
}
|
||||
|
||||
export function typecastForLooping(loopStep: LoopStep, input: LoopInput) {
|
||||
export function typecastForLooping(input: LoopInput) {
|
||||
if (!input || !input.binding) {
|
||||
return null
|
||||
}
|
||||
try {
|
||||
switch (loopStep.inputs.option) {
|
||||
switch (input.option) {
|
||||
case LoopStepType.ARRAY:
|
||||
if (typeof input.binding === "string") {
|
||||
return JSON.parse(input.binding)
|
||||
|
|
|
@ -3,11 +3,13 @@ import * as triggers from "../triggers"
|
|||
import { loopAutomation } from "../../tests/utilities/structures"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import * as setup from "./utilities"
|
||||
import { Row, Table } from "@budibase/types"
|
||||
import { LoopInput, LoopStepType } from "../../definitions/automations"
|
||||
|
||||
describe("Attempt to run a basic loop automation", () => {
|
||||
let config = setup.getConfig(),
|
||||
table: any,
|
||||
row: any
|
||||
table: Table,
|
||||
row: Row
|
||||
|
||||
beforeEach(async () => {
|
||||
await automation.init()
|
||||
|
@ -18,12 +20,12 @@ describe("Attempt to run a basic loop automation", () => {
|
|||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
async function runLoop(loopOpts?: any) {
|
||||
async function runLoop(loopOpts?: LoopInput) {
|
||||
const appId = config.getAppId()
|
||||
return await context.doInAppContext(appId, async () => {
|
||||
const params = { fields: { appId } }
|
||||
return await triggers.externalTrigger(
|
||||
loopAutomation(table._id, loopOpts),
|
||||
loopAutomation(table._id!, loopOpts),
|
||||
params,
|
||||
{ getResponses: true }
|
||||
)
|
||||
|
@ -37,9 +39,17 @@ describe("Attempt to run a basic loop automation", () => {
|
|||
|
||||
it("test a loop with a string", async () => {
|
||||
const resp = await runLoop({
|
||||
type: "String",
|
||||
option: LoopStepType.STRING,
|
||||
binding: "a,b,c",
|
||||
})
|
||||
expect(resp.steps[2].outputs.iterations).toBe(3)
|
||||
})
|
||||
|
||||
it("test a loop with a binding that returns an integer", async () => {
|
||||
const resp = await runLoop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: "{{ 1 }}",
|
||||
})
|
||||
expect(resp.steps[2].outputs.iterations).toBe(1)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -67,7 +67,7 @@ describe("test the update row action", () => {
|
|||
tableId: InternalTable.USER_METADATA,
|
||||
}
|
||||
|
||||
let table = await config.api.table.create({
|
||||
let table = await config.api.table.save({
|
||||
name: uuid.v4(),
|
||||
type: "table",
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
|
@ -120,7 +120,7 @@ describe("test the update row action", () => {
|
|||
tableId: InternalTable.USER_METADATA,
|
||||
}
|
||||
|
||||
let table = await config.api.table.create({
|
||||
let table = await config.api.table.save({
|
||||
name: uuid.v4(),
|
||||
type: "table",
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
|
|
|
@ -9,7 +9,7 @@ import * as utils from "./utils"
|
|||
import env from "../environment"
|
||||
import { context, db as dbCore } from "@budibase/backend-core"
|
||||
import { Automation, Row, AutomationData, AutomationJob } from "@budibase/types"
|
||||
import { executeSynchronously } from "../threads/automation"
|
||||
import { executeInThread } from "../threads/automation"
|
||||
|
||||
export const TRIGGER_DEFINITIONS = definitions
|
||||
const JOB_OPTS = {
|
||||
|
@ -117,8 +117,7 @@ export async function externalTrigger(
|
|||
appId: context.getAppId(),
|
||||
automation,
|
||||
}
|
||||
const job = { data } as AutomationJob
|
||||
return executeSynchronously(job)
|
||||
return executeInThread({ data } as AutomationJob)
|
||||
} else {
|
||||
return automationQueue.add(data, JOB_OPTS)
|
||||
}
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
const automationUtils = require("../automationUtils")
|
||||
import { LoopStep, LoopStepType } from "../../definitions/automations"
|
||||
import {
|
||||
typecastForLooping,
|
||||
cleanInputValues,
|
||||
substituteLoopStep,
|
||||
} from "../automationUtils"
|
||||
|
||||
describe("automationUtils", () => {
|
||||
describe("substituteLoopStep", () => {
|
||||
it("should allow multiple loop binding substitutes", () => {
|
||||
expect(
|
||||
automationUtils.substituteLoopStep(
|
||||
substituteLoopStep(
|
||||
`{{ loop.currentItem._id }} {{ loop.currentItem._id }} {{ loop.currentItem._id }}`,
|
||||
"step.2"
|
||||
)
|
||||
|
@ -15,7 +20,7 @@ describe("automationUtils", () => {
|
|||
|
||||
it("should handle not subsituting outside of curly braces", () => {
|
||||
expect(
|
||||
automationUtils.substituteLoopStep(
|
||||
substituteLoopStep(
|
||||
`loop {{ loop.currentItem._id }}loop loop{{ loop.currentItem._id }}loop`,
|
||||
"step.2"
|
||||
)
|
||||
|
@ -28,37 +33,20 @@ describe("automationUtils", () => {
|
|||
describe("typeCastForLooping", () => {
|
||||
it("should parse to correct type", () => {
|
||||
expect(
|
||||
automationUtils.typecastForLooping(
|
||||
{ inputs: { option: "Array" } },
|
||||
{ binding: [1, 2, 3] }
|
||||
)
|
||||
typecastForLooping({ option: LoopStepType.ARRAY, binding: [1, 2, 3] })
|
||||
).toEqual([1, 2, 3])
|
||||
expect(
|
||||
automationUtils.typecastForLooping(
|
||||
{ inputs: { option: "Array" } },
|
||||
{ binding: "[1, 2, 3]" }
|
||||
)
|
||||
typecastForLooping({ option: LoopStepType.ARRAY, binding: "[1,2,3]" })
|
||||
).toEqual([1, 2, 3])
|
||||
expect(
|
||||
automationUtils.typecastForLooping(
|
||||
{ inputs: { option: "String" } },
|
||||
{ binding: [1, 2, 3] }
|
||||
)
|
||||
typecastForLooping({ option: LoopStepType.STRING, binding: [1, 2, 3] })
|
||||
).toEqual("1,2,3")
|
||||
})
|
||||
it("should handle null values", () => {
|
||||
// expect it to handle where the binding is null
|
||||
expect(
|
||||
automationUtils.typecastForLooping(
|
||||
{ inputs: { option: "Array" } },
|
||||
{ binding: null }
|
||||
)
|
||||
).toEqual(null)
|
||||
expect(typecastForLooping({ option: LoopStepType.ARRAY })).toEqual(null)
|
||||
expect(() =>
|
||||
automationUtils.typecastForLooping(
|
||||
{ inputs: { option: "Array" } },
|
||||
{ binding: "test" }
|
||||
)
|
||||
typecastForLooping({ option: LoopStepType.ARRAY, binding: "test" })
|
||||
).toThrow()
|
||||
})
|
||||
})
|
||||
|
@ -80,7 +68,7 @@ describe("automationUtils", () => {
|
|||
},
|
||||
}
|
||||
expect(
|
||||
automationUtils.cleanInputValues(
|
||||
cleanInputValues(
|
||||
{
|
||||
row: {
|
||||
relationship: `[{"_id": "ro_ta_users_us_3"}]`,
|
||||
|
@ -113,7 +101,7 @@ describe("automationUtils", () => {
|
|||
},
|
||||
}
|
||||
expect(
|
||||
automationUtils.cleanInputValues(
|
||||
cleanInputValues(
|
||||
{
|
||||
row: {
|
||||
relationship: `ro_ta_users_us_3`,
|
||||
|
|
|
@ -324,7 +324,7 @@ describe("test the link controller", () => {
|
|||
name: "link",
|
||||
autocolumn: true,
|
||||
}
|
||||
await config.updateTable(table)
|
||||
await config.upsertTable(table)
|
||||
})
|
||||
|
||||
it("should be able to remove a linked field from a table, even if the linked table does not exist", async () => {
|
||||
|
|
|
@ -6,14 +6,14 @@ export enum LoopStepType {
|
|||
}
|
||||
|
||||
export interface LoopStep extends AutomationStep {
|
||||
inputs: {
|
||||
option: LoopStepType
|
||||
[key: string]: any
|
||||
}
|
||||
inputs: LoopInput
|
||||
}
|
||||
|
||||
export interface LoopInput {
|
||||
binding: string[] | string
|
||||
option: LoopStepType
|
||||
binding?: string[] | string | number[]
|
||||
iterations?: string
|
||||
failure?: any
|
||||
}
|
||||
|
||||
export interface TriggerOutput {
|
||||
|
|
|
@ -5,7 +5,7 @@ import { QuotaUsageType, StaticQuotaName } from "@budibase/types"
|
|||
import { db as dbCore, context } from "@budibase/backend-core"
|
||||
|
||||
describe("syncRows", () => {
|
||||
let config = new TestConfig(false)
|
||||
const config = new TestConfig()
|
||||
|
||||
beforeEach(async () => {
|
||||
await config.init()
|
||||
|
|
|
@ -8,10 +8,10 @@ import {
|
|||
FieldType,
|
||||
Table,
|
||||
AutoFieldSubType,
|
||||
AutoColumnFieldMetadata,
|
||||
} from "@budibase/types"
|
||||
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import { cache } from "@budibase/backend-core"
|
||||
|
||||
tk.freeze(Date.now())
|
||||
|
||||
|
@ -213,8 +213,10 @@ describe("sdk >> rows >> internal", () => {
|
|||
)
|
||||
|
||||
const persistedTable = await config.getTable(table._id)
|
||||
expect((table as any).schema.id.lastID).toBe(0)
|
||||
expect(persistedTable.schema.id.lastID).toBe(20)
|
||||
expect((table.schema.id as AutoColumnFieldMetadata).lastID).toBe(0)
|
||||
expect((persistedTable.schema.id as AutoColumnFieldMetadata).lastID).toBe(
|
||||
20
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -9,7 +9,7 @@ describe("tables", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
table = await config.api.table.create(basicTable())
|
||||
table = await config.api.table.save(basicTable())
|
||||
})
|
||||
|
||||
describe("getTables", () => {
|
||||
|
|
|
@ -27,7 +27,18 @@ import {
|
|||
sessions,
|
||||
tenancy,
|
||||
} from "@budibase/backend-core"
|
||||
import * as controllers from "./controllers"
|
||||
import {
|
||||
app as appController,
|
||||
deploy as deployController,
|
||||
role as roleController,
|
||||
automation as automationController,
|
||||
webhook as webhookController,
|
||||
query as queryController,
|
||||
screen as screenController,
|
||||
layout as layoutController,
|
||||
view as viewController,
|
||||
} from "./controllers"
|
||||
|
||||
import { cleanup } from "../../utilities/fileSystem"
|
||||
import newid from "../../db/newid"
|
||||
import { generateUserMetadataID } from "../../db/utils"
|
||||
|
@ -44,13 +55,14 @@ import {
|
|||
RelationshipFieldMetadata,
|
||||
RelationshipType,
|
||||
Row,
|
||||
SearchFilters,
|
||||
SearchParams,
|
||||
SourceName,
|
||||
Table,
|
||||
TableSourceType,
|
||||
User,
|
||||
UserRoles,
|
||||
View,
|
||||
WithRequired,
|
||||
} from "@budibase/types"
|
||||
|
||||
import API from "./api"
|
||||
|
@ -543,11 +555,7 @@ class TestConfiguration {
|
|||
// clear any old app
|
||||
this.appId = null
|
||||
this.app = await context.doInTenant(this.tenantId!, async () => {
|
||||
const app = await this._req(
|
||||
{ name: appName },
|
||||
null,
|
||||
controllers.app.create
|
||||
)
|
||||
const app = await this._req({ name: appName }, null, appController.create)
|
||||
this.appId = app.appId!
|
||||
return app
|
||||
})
|
||||
|
@ -563,7 +571,7 @@ class TestConfiguration {
|
|||
}
|
||||
|
||||
async publish() {
|
||||
await this._req(null, null, controllers.deploy.publishApp)
|
||||
await this._req(null, null, deployController.publishApp)
|
||||
// @ts-ignore
|
||||
const prodAppId = this.getAppId().replace("_dev", "")
|
||||
this.prodAppId = prodAppId
|
||||
|
@ -578,7 +586,7 @@ class TestConfiguration {
|
|||
const response = await this._req(
|
||||
null,
|
||||
{ appId: this.appId },
|
||||
controllers.app.unpublish
|
||||
appController.unpublish
|
||||
)
|
||||
this.prodAppId = null
|
||||
this.prodApp = null
|
||||
|
@ -587,14 +595,16 @@ class TestConfiguration {
|
|||
|
||||
// TABLE
|
||||
|
||||
async updateTable(
|
||||
async upsertTable(
|
||||
config?: TableToBuild,
|
||||
{ skipReassigning } = { skipReassigning: false }
|
||||
): Promise<Table> {
|
||||
config = config || basicTable()
|
||||
config.sourceType = config.sourceType || TableSourceType.INTERNAL
|
||||
config.sourceId = config.sourceId || INTERNAL_TABLE_SOURCE_ID
|
||||
const response = await this._req(config, null, controllers.table.save)
|
||||
const response = await this.api.table.save({
|
||||
...config,
|
||||
sourceType: config.sourceType || TableSourceType.INTERNAL,
|
||||
sourceId: config.sourceId || INTERNAL_TABLE_SOURCE_ID,
|
||||
})
|
||||
if (!skipReassigning) {
|
||||
this.table = response
|
||||
}
|
||||
|
@ -612,7 +622,7 @@ class TestConfiguration {
|
|||
if (!config.sourceId) {
|
||||
config.sourceId = INTERNAL_TABLE_SOURCE_ID
|
||||
}
|
||||
return this.updateTable(config, options)
|
||||
return this.upsertTable(config, options)
|
||||
}
|
||||
|
||||
async createExternalTable(
|
||||
|
@ -627,12 +637,12 @@ class TestConfiguration {
|
|||
config.sourceId = this.datasource._id
|
||||
config.sourceType = TableSourceType.EXTERNAL
|
||||
}
|
||||
return this.updateTable(config, options)
|
||||
return this.upsertTable(config, options)
|
||||
}
|
||||
|
||||
async getTable(tableId?: string) {
|
||||
tableId = tableId || this.table!._id!
|
||||
return this._req(null, { tableId }, controllers.table.find)
|
||||
return this.api.table.get(tableId)
|
||||
}
|
||||
|
||||
async createLinkedTable(
|
||||
|
@ -680,37 +690,35 @@ class TestConfiguration {
|
|||
if (!this.table) {
|
||||
throw "Test requires table to be configured."
|
||||
}
|
||||
const tableId = (config && config.tableId) || this.table._id
|
||||
const tableId = (config && config.tableId) || this.table._id!
|
||||
config = config || basicRow(tableId!)
|
||||
return this._req(config, { tableId }, controllers.row.save)
|
||||
return this.api.row.save(tableId, config)
|
||||
}
|
||||
|
||||
async getRow(tableId: string, rowId: string): Promise<Row> {
|
||||
return this._req(null, { tableId, rowId }, controllers.row.find)
|
||||
const res = await this.api.row.get(tableId, rowId)
|
||||
return res.body
|
||||
}
|
||||
|
||||
async getRows(tableId: string) {
|
||||
if (!tableId && this.table) {
|
||||
tableId = this.table._id!
|
||||
}
|
||||
return this._req(null, { tableId }, controllers.row.fetch)
|
||||
return this.api.row.fetch(tableId)
|
||||
}
|
||||
|
||||
async searchRows(tableId: string, searchParams: SearchFilters = {}) {
|
||||
async searchRows(tableId: string, searchParams?: SearchParams) {
|
||||
if (!tableId && this.table) {
|
||||
tableId = this.table._id!
|
||||
}
|
||||
const body = {
|
||||
query: searchParams,
|
||||
}
|
||||
return this._req(body, { tableId }, controllers.row.search)
|
||||
return this.api.row.search(tableId, searchParams)
|
||||
}
|
||||
|
||||
// ROLE
|
||||
|
||||
async createRole(config?: any) {
|
||||
config = config || basicRole()
|
||||
return this._req(config, null, controllers.role.save)
|
||||
return this._req(config, null, roleController.save)
|
||||
}
|
||||
|
||||
// VIEW
|
||||
|
@ -723,7 +731,7 @@ class TestConfiguration {
|
|||
tableId: this.table!._id,
|
||||
name: generator.guid(),
|
||||
}
|
||||
return this._req(view, null, controllers.view.v1.save)
|
||||
return this._req(view, null, viewController.v1.save)
|
||||
}
|
||||
|
||||
async createView(
|
||||
|
@ -753,13 +761,13 @@ class TestConfiguration {
|
|||
delete config._rev
|
||||
}
|
||||
this.automation = (
|
||||
await this._req(config, null, controllers.automation.create)
|
||||
await this._req(config, null, automationController.create)
|
||||
).automation
|
||||
return this.automation
|
||||
}
|
||||
|
||||
async getAllAutomations() {
|
||||
return this._req(null, null, controllers.automation.fetch)
|
||||
return this._req(null, null, automationController.fetch)
|
||||
}
|
||||
|
||||
async deleteAutomation(automation?: any) {
|
||||
|
@ -770,7 +778,7 @@ class TestConfiguration {
|
|||
return this._req(
|
||||
null,
|
||||
{ id: automation._id, rev: automation._rev },
|
||||
controllers.automation.destroy
|
||||
automationController.destroy
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -779,28 +787,27 @@ class TestConfiguration {
|
|||
throw "Must create an automation before creating webhook."
|
||||
}
|
||||
config = config || basicWebhook(this.automation._id)
|
||||
return (await this._req(config, null, controllers.webhook.save)).webhook
|
||||
|
||||
return (await this._req(config, null, webhookController.save)).webhook
|
||||
}
|
||||
|
||||
// DATASOURCE
|
||||
|
||||
async createDatasource(config?: {
|
||||
datasource: Datasource
|
||||
}): Promise<Datasource> {
|
||||
}): Promise<WithRequired<Datasource, "_id">> {
|
||||
config = config || basicDatasource()
|
||||
const response = await this._req(config, null, controllers.datasource.save)
|
||||
this.datasource = response.datasource
|
||||
return this.datasource!
|
||||
const response = await this.api.datasource.create(config.datasource)
|
||||
this.datasource = response
|
||||
return { ...this.datasource, _id: this.datasource!._id! }
|
||||
}
|
||||
|
||||
async updateDatasource(datasource: Datasource): Promise<Datasource> {
|
||||
const response = await this._req(
|
||||
datasource,
|
||||
{ datasourceId: datasource._id },
|
||||
controllers.datasource.update
|
||||
)
|
||||
this.datasource = response.datasource
|
||||
return this.datasource!
|
||||
async updateDatasource(
|
||||
datasource: Datasource
|
||||
): Promise<WithRequired<Datasource, "_id">> {
|
||||
const response = await this.api.datasource.update(datasource)
|
||||
this.datasource = response
|
||||
return { ...this.datasource, _id: this.datasource!._id! }
|
||||
}
|
||||
|
||||
async restDatasource(cfg?: any) {
|
||||
|
@ -815,6 +822,7 @@ class TestConfiguration {
|
|||
|
||||
async dynamicVariableDatasource() {
|
||||
let datasource = await this.restDatasource()
|
||||
|
||||
const basedOnQuery = await this.createQuery({
|
||||
...basicQuery(datasource._id!),
|
||||
fields: {
|
||||
|
@ -886,21 +894,21 @@ class TestConfiguration {
|
|||
throw "No datasource created for query."
|
||||
}
|
||||
config = config || basicQuery(this.datasource!._id!)
|
||||
return this._req(config, null, controllers.query.save)
|
||||
return this._req(config, null, queryController.save)
|
||||
}
|
||||
|
||||
// SCREEN
|
||||
|
||||
async createScreen(config?: any) {
|
||||
config = config || basicScreen()
|
||||
return this._req(config, null, controllers.screen.save)
|
||||
return this._req(config, null, screenController.save)
|
||||
}
|
||||
|
||||
// LAYOUT
|
||||
|
||||
async createLayout(config?: any) {
|
||||
config = config || basicLayout()
|
||||
return await this._req(config, null, controllers.layout.save)
|
||||
return await this._req(config, null, layoutController.save)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,20 +2,23 @@ import {
|
|||
CreateDatasourceRequest,
|
||||
Datasource,
|
||||
VerifyDatasourceRequest,
|
||||
VerifyDatasourceResponse,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import supertest from "supertest"
|
||||
|
||||
export class DatasourceAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
create = async (
|
||||
create = async <B extends boolean = false>(
|
||||
config: Datasource,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
): Promise<Datasource> => {
|
||||
{
|
||||
expectStatus,
|
||||
rawResponse,
|
||||
}: { expectStatus?: number; rawResponse?: B } = {}
|
||||
): Promise<B extends false ? Datasource : supertest.Response> => {
|
||||
const body: CreateDatasourceRequest = {
|
||||
datasource: config,
|
||||
tablesFilter: [],
|
||||
|
@ -25,8 +28,11 @@ export class DatasourceAPI extends TestAPI {
|
|||
.send(body)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return result.body.datasource as Datasource
|
||||
.expect(expectStatus || 200)
|
||||
if (rawResponse) {
|
||||
return result as any
|
||||
}
|
||||
return result.body.datasource
|
||||
}
|
||||
|
||||
update = async (
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
SearchRowResponse,
|
||||
SearchParams,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
|
@ -154,10 +155,12 @@ export class RowAPI extends TestAPI {
|
|||
|
||||
search = async (
|
||||
sourceId: string,
|
||||
params?: SearchParams,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
): Promise<SearchRowResponse> => {
|
||||
const request = this.request
|
||||
.post(`/api/${sourceId}/search`)
|
||||
.send(params)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect(expectStatus)
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ export class TableAPI extends TestAPI {
|
|||
super(config)
|
||||
}
|
||||
|
||||
create = async (
|
||||
save = async (
|
||||
data: SaveTableRequest,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
): Promise<SaveTableResponse> => {
|
||||
|
|
|
@ -21,8 +21,9 @@ import {
|
|||
Table,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
TableSourceType,
|
||||
AutomationIOType,
|
||||
Query,
|
||||
} from "@budibase/types"
|
||||
import { LoopInput, LoopStepType } from "../../definitions/automations"
|
||||
|
||||
const { BUILTIN_ROLE_IDS } = roles
|
||||
|
||||
|
@ -204,10 +205,13 @@ export function serverLogAutomation(appId?: string): Automation {
|
|||
}
|
||||
}
|
||||
|
||||
export function loopAutomation(tableId: string, loopOpts?: any): Automation {
|
||||
export function loopAutomation(
|
||||
tableId: string,
|
||||
loopOpts?: LoopInput
|
||||
): Automation {
|
||||
if (!loopOpts) {
|
||||
loopOpts = {
|
||||
option: "Array",
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: "{{ steps.1.rows }}",
|
||||
}
|
||||
}
|
||||
|
@ -360,7 +364,7 @@ export function basicDatasource(): { datasource: Datasource } {
|
|||
}
|
||||
}
|
||||
|
||||
export function basicQuery(datasourceId: string) {
|
||||
export function basicQuery(datasourceId: string): Query {
|
||||
return {
|
||||
datasourceId: datasourceId,
|
||||
name: "New Query",
|
||||
|
@ -368,6 +372,8 @@ export function basicQuery(datasourceId: string) {
|
|||
fields: {},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
transformer: null,
|
||||
readable: true,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -43,22 +43,19 @@ const CRON_STEP_ID = triggerDefs.CRON.stepId
|
|||
const STOPPED_STATUS = { success: true, status: AutomationStatus.STOPPED }
|
||||
|
||||
function getLoopIterations(loopStep: LoopStep) {
|
||||
let binding = loopStep.inputs.binding
|
||||
const binding = loopStep.inputs.binding
|
||||
if (!binding) {
|
||||
return 0
|
||||
}
|
||||
const isString = typeof binding === "string"
|
||||
try {
|
||||
if (isString) {
|
||||
binding = JSON.parse(binding)
|
||||
const json = typeof binding === "string" ? JSON.parse(binding) : binding
|
||||
if (Array.isArray(json)) {
|
||||
return json.length
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore error - wasn't able to parse
|
||||
}
|
||||
if (Array.isArray(binding)) {
|
||||
return binding.length
|
||||
}
|
||||
if (isString) {
|
||||
if (typeof binding === "string") {
|
||||
return automationUtils.stringSplit(binding).length
|
||||
}
|
||||
return 0
|
||||
|
@ -256,7 +253,7 @@ class Orchestrator {
|
|||
this._context.env = await sdkUtils.getEnvironmentVariables()
|
||||
let automation = this._automation
|
||||
let stopped = false
|
||||
let loopStep: AutomationStep | undefined = undefined
|
||||
let loopStep: LoopStep | undefined = undefined
|
||||
|
||||
let stepCount = 0
|
||||
let loopStepNumber: any = undefined
|
||||
|
@ -311,7 +308,7 @@ class Orchestrator {
|
|||
|
||||
stepCount++
|
||||
if (step.stepId === LOOP_STEP_ID) {
|
||||
loopStep = step
|
||||
loopStep = step as LoopStep
|
||||
loopStepNumber = stepCount
|
||||
continue
|
||||
}
|
||||
|
@ -331,7 +328,6 @@ class Orchestrator {
|
|||
}
|
||||
try {
|
||||
loopStep.inputs.binding = automationUtils.typecastForLooping(
|
||||
loopStep as LoopStep,
|
||||
loopStep.inputs as LoopInput
|
||||
)
|
||||
} catch (err) {
|
||||
|
@ -348,7 +344,7 @@ class Orchestrator {
|
|||
loopStep = undefined
|
||||
break
|
||||
}
|
||||
let item = []
|
||||
let item: any[] = []
|
||||
if (
|
||||
typeof loopStep.inputs.binding === "string" &&
|
||||
loopStep.inputs.option === "String"
|
||||
|
@ -399,7 +395,8 @@ class Orchestrator {
|
|||
|
||||
if (
|
||||
index === env.AUTOMATION_MAX_ITERATIONS ||
|
||||
index === parseInt(loopStep.inputs.iterations)
|
||||
(loopStep.inputs.iterations &&
|
||||
index === parseInt(loopStep.inputs.iterations))
|
||||
) {
|
||||
this.updateContextAndOutput(
|
||||
loopStepNumber,
|
||||
|
@ -615,7 +612,7 @@ export function execute(job: Job<AutomationData>, callback: WorkerCallback) {
|
|||
})
|
||||
}
|
||||
|
||||
export function executeSynchronously(job: Job) {
|
||||
export async function executeInThread(job: Job<AutomationData>) {
|
||||
const appId = job.data.event.appId
|
||||
if (!appId) {
|
||||
throw new Error("Unable to execute, event doesn't contain app ID.")
|
||||
|
@ -627,10 +624,10 @@ export function executeSynchronously(job: Job) {
|
|||
}, job.data.event.timeout || 12000)
|
||||
})
|
||||
|
||||
return context.doInAppContext(appId, async () => {
|
||||
return await context.doInAppContext(appId, async () => {
|
||||
const envVars = await sdkUtils.getEnvironmentVariables()
|
||||
// put into automation thread for whole context
|
||||
return context.doInEnvironmentContext(envVars, async () => {
|
||||
return await context.doInEnvironmentContext(envVars, async () => {
|
||||
const automationOrchestrator = new Orchestrator(job)
|
||||
return await Promise.race([
|
||||
automationOrchestrator.execute(),
|
||||
|
|
|
@ -7,3 +7,5 @@ export type ISO8601 = string
|
|||
export type RequiredKeys<T> = {
|
||||
[K in keyof Required<T>]: T[K]
|
||||
}
|
||||
|
||||
export type WithRequired<T, K extends keyof T> = T & Required<Pick<T, K>>
|
||||
|
|
Loading…
Reference in New Issue