Merge branch 'develop' into feature/clickable-container
This commit is contained in:
commit
d222dba681
|
@ -7,3 +7,4 @@ packages/server/client
|
|||
packages/builder/.routify
|
||||
packages/builder/cypress/support/queryLevelTransformerFunction.js
|
||||
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
|
||||
packages/builder/cypress/reports
|
|
@ -1,76 +0,0 @@
|
|||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||
level of experience, education, socio-economic status, nationality, personal
|
||||
appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at community@budibase.com. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see
|
||||
https://www.contributor-covenant.org/faq
|
|
@ -0,0 +1 @@
|
|||
../docs/CODE_OF_CONDUCT.md
|
|
@ -1,208 +0,0 @@
|
|||
# Contributing
|
||||
|
||||
From opening a bug report to creating a pull request: every contribution is appreciated and welcome. If you're planning to implement a new feature or change the api please create an issue first. This way we can ensure that your precious work is not in vain.
|
||||
|
||||
## Not Sure Where to Start?
|
||||
|
||||
Budibase is a low-code web application builder that creates svelte based web applications.
|
||||
|
||||
Budibase is a monorepo managed by [lerna](https://github.com/lerna/lerna). Lerna manages the building and publishing of the budibase packages. At a high level, here are the packages that make up budibase.
|
||||
|
||||
- **packages/builder** - contains code for the budibase builder client side svelte application.
|
||||
|
||||
- **packages/client** - A module that runs in the browser responsible for reading JSON definition and creating living, breathing web apps from it.
|
||||
|
||||
- **packages/server** - The budibase server. This [Koa](https://koajs.com/) app is responsible for serving the JS for the builder and budibase apps, as well as providing the API for interaction with the database and file system.
|
||||
|
||||
- **packages/worker** - This [Koa](https://koajs.com/) app is responsible for providing global apis for managing your budibase installation. Authentication, Users, Email, Org and Auth configs are all provided by the worker.
|
||||
|
||||
## Contributor License Agreement (CLA)
|
||||
|
||||
In order to accept your pull request, we need you to submit a CLA. You only need to do this once. If you are submitting a pull request for the first time, just submit a Pull Request and our CLA Bot will give you instructions on how to sign the CLA before merging your Pull Request.
|
||||
|
||||
All contributors must sign an [Individual Contributor License Agreement](https://github.com/budibase/budibase/blob/next/.github/cla/individual-cla.md).
|
||||
|
||||
If contributing on behalf of your company, your company must sign a [Corporate Contributor License Agreement](https://github.com/budibase/budibase/blob/next/.github/cla/corporate-cla.md). If so, please contact us via community@budibase.com.
|
||||
|
||||
## Glossary of Terms
|
||||
|
||||
To understand the budibase API, it can be helpful to understand the top level entities that make up Budibase.
|
||||
|
||||
### Client
|
||||
|
||||
A client represents a single budibase customer. Each budibase client will have 1 or more budibase servers. Every client is assigned a unique ID.
|
||||
|
||||
### App
|
||||
|
||||
A client can have one or more budibase applications. Budibase applications would be things like "Developer Inventory Management" or "Goat Herder CRM". Think of a budibase application as a tree.
|
||||
|
||||
### Database
|
||||
|
||||
An App can have one or more databases. Keeping with our [dendrology](https://en.wikipedia.org/wiki/Dendrology) analogy - think of an database as a branch on the tree. Databases are used to keep data separate for different instances of your app. For example, if you had a CRM app, you may create a database for your US office, and a database for your Australian office. Databases allow us to support [multitenancy](https://www.gartner.com/en/information-technology/glossary/multitenancy) in budibase applications.
|
||||
|
||||
### Table
|
||||
|
||||
Tables in budibase are almost akin to tables in relational databases. A table may be a "Car" or an "Employee". They are the main building blocks for the creation and management of backend data in budibase.
|
||||
|
||||
### View
|
||||
|
||||
A View is an advanced feature in budibase that allows you to write a custom query using [MapReduce](https://pouchdb.com/guides/queries.html) queries. Views enable powerful query functionality and calculations, allowing you to do more with your data.
|
||||
|
||||
### Page
|
||||
|
||||
A page in budibase is actually a single, self contained svelte web app. There are only 2 pages in budibase. The **login** page and the **main** page.
|
||||
|
||||
### Screen
|
||||
|
||||
A screen is a component within a single page. Generally, screens represent client side routes, and can be switched without refreshing the page.
|
||||
|
||||
### Component
|
||||
|
||||
A component is the basic frontend building block of a budibase app.
|
||||
|
||||
### Component Library
|
||||
|
||||
Component libraries are collections of components as well as the definition of their props contained in a file called `components.json`.
|
||||
|
||||
## Contributing to Budibase
|
||||
|
||||
* Please maintain the existing code style.
|
||||
|
||||
* Please try to keep your commits small and focused.
|
||||
|
||||
* Please write tests.
|
||||
|
||||
* If the project diverges from your branch, please rebase instead of merging. This makes the commit graph easier to read.
|
||||
|
||||
* Once your work is completed, please raise a PR against the `develop` branch with some information about what has changed and why.
|
||||
|
||||
### Getting Started For Contributors
|
||||
#### 1. Prerequisites
|
||||
|
||||
NodeJS Version `14.x.x`
|
||||
|
||||
*yarn -* `npm install -g yarn`
|
||||
|
||||
*jest* - `npm install -g jest`
|
||||
|
||||
#### 2. Clone this repository
|
||||
|
||||
`git clone https://github.com/Budibase/budibase.git`
|
||||
|
||||
then `cd ` into your local copy.
|
||||
|
||||
#### 3. Install and Build
|
||||
|
||||
| **NOTE**: On Windows, all yarn commands must be executed on a bash shell (e.g. git bash)
|
||||
|
||||
To develop the Budibase platform you'll need [Docker](https://www.docker.com/) and [Docker Compose](https://docs.docker.com/compose/) installed.
|
||||
|
||||
##### Quick method
|
||||
|
||||
`yarn setup` will check that all necessary components are installed and setup the repo for usage.
|
||||
|
||||
##### Manual method
|
||||
|
||||
The following commands can be executed to manually get Budibase up and running (assuming Docker/Docker Compose has been installed).
|
||||
|
||||
`yarn` to install project dependencies
|
||||
|
||||
`yarn bootstrap` will install all budibase modules and symlink them together using lerna.
|
||||
|
||||
`yarn build` will build all budibase packages.
|
||||
|
||||
#### 4. Running
|
||||
|
||||
To run the budibase server and builder in dev mode (i.e. with live reloading):
|
||||
|
||||
1. Open a new console
|
||||
2. `yarn dev` (from root)
|
||||
3. Access the builder on http://localhost:10000/builder
|
||||
|
||||
This will enable watch mode for both the builder app, server, client library and any component libraries.
|
||||
|
||||
#### 5. Debugging using VS Code
|
||||
|
||||
To debug the budibase server and worker a VS Code launch configuration has been provided.
|
||||
|
||||
Visit the debug window and select `Budibase Server` or `Budibase Worker` to debug the respective component.
|
||||
Alternatively to start both components simultaneously select `Start Budibase`.
|
||||
|
||||
In addition to the above, the remaining budibase components may be ran in dev mode using: `yarn dev:noserver`.
|
||||
|
||||
#### 6. Cleanup
|
||||
|
||||
If you wish to delete all the apps created in development and reset the environment then run the following:
|
||||
|
||||
1. `yarn nuke:docker` will wipe all the Budibase services
|
||||
2. `yarn dev` will restart all the services
|
||||
|
||||
### Backend
|
||||
|
||||
For the backend we run [Redis](https://redis.io/), [CouchDB](https://couchdb.apache.org/), [MinIO](https://min.io/) and [NGINX](https://www.nginx.com/) in Docker compose. This means that to develop Budibase you will need Docker and Docker compose installed. The backend services are then ran separately as Node services with nodemon so that they can be debugged outside of Docker.
|
||||
|
||||
### Data Storage
|
||||
|
||||
When you are running locally, budibase stores data on disk using docker volumes. The volumes and the types of data associated with each are:
|
||||
|
||||
- `redis_data`
|
||||
- Sessions, email tokens
|
||||
- `couchdb3_data`
|
||||
- Global and app databases
|
||||
- `minio_data`
|
||||
- App manifest, budibase client, static assets
|
||||
|
||||
### Devlopment Modes
|
||||
|
||||
A combination of environment variables controls the mode that budibase runs in.
|
||||
Yarn commands can be used to mimic the different modes that budibase can be ran in
|
||||
|
||||
#### Self Hosted
|
||||
The default mode. A single tenant installation with no usage restrictions.
|
||||
|
||||
To enable this mode, use:
|
||||
```
|
||||
yarn mode:self
|
||||
```
|
||||
|
||||
#### Cloud
|
||||
The cloud mode, with account portal turned off.
|
||||
|
||||
To enable this mode, use:
|
||||
```
|
||||
yarn mode:cloud
|
||||
```
|
||||
#### Cloud & Account
|
||||
The cloud mode, with account portal turned on. This is a replica of the mode that runs at https://budibase.app
|
||||
|
||||
|
||||
To enable this mode, use:
|
||||
```
|
||||
yarn mode:account
|
||||
```
|
||||
### CI
|
||||
An overview of the CI pipelines can be found [here](./workflows/README.md)
|
||||
### Troubleshooting
|
||||
|
||||
Sometimes, things go wrong. This can be due to incompatible updates on the budibase platform. To clear down your development environment and start again follow **Step 6. Cleanup**, then proceed from **Step 3. Install and Build** in the setup guide above. You should have a fresh Budibase installation.
|
||||
### Running tests
|
||||
|
||||
#### End-to-end Tests
|
||||
|
||||
Budibase uses Cypress to run a number of E2E tests. To run the tests execute the following command in the root folder:
|
||||
|
||||
```
|
||||
yarn test:e2e
|
||||
```
|
||||
|
||||
Or if you are in the builder you can run `yarn cy:test`.
|
||||
|
||||
|
||||
### Other Useful Information
|
||||
|
||||
* The contributors are listed in [AUTHORS.md](https://github.com/Budibase/budibase/blob/master/.github/AUTHORS.md) (add yourself).
|
||||
|
||||
* This project uses a modified version of the MPLv2 license, see [LICENSE](https://github.com/budibase/server/blob/master/LICENSE).
|
||||
|
||||
* We use the [C4 (Collective Code Construction Contract)](https://rfc.zeromq.org/spec:42/C4/) process for contributions.
|
||||
Please read this if you are unfamiliar with it.
|
|
@ -0,0 +1 @@
|
|||
../docs/CONTRIBUTING.md
|
|
@ -6,7 +6,7 @@ Welcome to the budibase CI pipelines directory. This document details what each
|
|||
## All CI Pipelines
|
||||
|
||||
### Note
|
||||
- When running workflow dispatch jobs, ensure you always run them off the `master` branch. It defaults to `develop`, so double check before running any jobs.
|
||||
- When running workflow dispatch jobs, ensure you always run them off the `master` branch. It defaults to `develop`, so double check before running any jobs. The exception to this case is the `deploy-release` job which requires the develop branch.
|
||||
|
||||
### Standard CI Build Job (budibase_ci.yml)
|
||||
Triggers:
|
||||
|
@ -24,14 +24,14 @@ The standard CI Build job is what runs when you raise a PR to develop or master.
|
|||
Triggers:
|
||||
- Push to develop
|
||||
|
||||
The job responsible for building, tagging and pushing docker images out to the test and staging environments.
|
||||
The job responsible for building, tagging and pushing docker images out to the test and release environments.
|
||||
- Installs all dependencies
|
||||
- builds the project
|
||||
- run the unit tests
|
||||
- publish the budibase JS packages under a prerelease tag to NPM
|
||||
- build, tag and push docker images under the `develop` tag to docker hub
|
||||
|
||||
These images will then be pulled by the test and staging environments, updating the latest automatically. Discord notifications are sent to the #infra channel when this occurs.
|
||||
These images will then be pulled by the test and release environments, updating the latest automatically. Discord notifications are sent to the #infra channel when this occurs.
|
||||
|
||||
### Release Job (release.yml)
|
||||
Triggers:
|
||||
|
@ -57,8 +57,33 @@ This job relies on the release job to have run first, so the latest image is pus
|
|||
- Build and release the budibase helm chart for kubernetes users
|
||||
- Perform a github release with the latest version. You can see previous releases here (https://github.com/Budibase/budibase/releases)
|
||||
|
||||
### Deploy Release (deploy-release.yml)
|
||||
Triggers:
|
||||
- Manual Workflow Dispatch Trigger
|
||||
|
||||
### Cloud Deploy (deploy-cloud.yml)
|
||||
This job is responsible for deploying to our release, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. After kicking off this job, the following will occur:
|
||||
|
||||
- Checks out the release branch
|
||||
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
|
||||
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
|
||||
- Configures AWS Credentials
|
||||
- Deploys the helm chart in the budibase repo to our preproduction EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
|
||||
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
|
||||
|
||||
### Deploy Preprod (deploy-preprod.yml)
|
||||
Triggers:
|
||||
- Manual Workflow Dispatch Trigger
|
||||
|
||||
This job is responsible for deploying to our preprod, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. After kicking off this job, the following will occur:
|
||||
|
||||
- Checks out the master branch
|
||||
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
|
||||
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
|
||||
- Configures AWS Credentials
|
||||
- Deploys the helm chart in the budibase repo to our preprod EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
|
||||
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
|
||||
|
||||
### Deploy Production (deploy-cloud.yml)
|
||||
Triggers:
|
||||
- Manual Workflow Dispatch Trigger
|
||||
|
||||
|
@ -91,3 +116,74 @@ This job is responsible for deploying to our production, cloud kubernetes enviro
|
|||
- Kick off cloud deploy job
|
||||
- Ensure you are running off master
|
||||
- Enter the version number of the last known good version of budibase. For example `1.0.0`
|
||||
|
||||
## Pro
|
||||
|
||||
### Installing Pro
|
||||
|
||||
The pro package is always installed from source in our CI jobs.
|
||||
|
||||
This is done to prevent pro needing to be published prior to CI runs in budiabse. This is required for two reasons:
|
||||
- To reduce developer need to manually bump versions, i.e:
|
||||
- release pro, bump pro dep in budibase, now ci can run successfully
|
||||
- The cyclic dependency on backend-core, i.e:
|
||||
- pro depends on backend-core
|
||||
- server depends on pro
|
||||
- backend-core lives in the monorepo, so it can't be released independently to be used in pro
|
||||
- therefore the only option is to pull pro from source and release it as a part of the monorepo release, as if it were a mono package
|
||||
|
||||
The install is performed using the same steps as local development, via the `yarn bootstrap` command, see the [Contributing Guide#Pro](../CONTRIBUTING.md#pro)
|
||||
|
||||
The branch to install pro from can vary depending on ref of the commit that triggered the budibase CI job. This is done to enable branches which have changes in both the monorepo and the pro repo to have their CI pass successfully.
|
||||
|
||||
This is done using the [pro/install.sh](../../scripts/pro/install.sh) script. The script will:
|
||||
- Clone pro to it's default branch (`develop`)
|
||||
- Check if the clone worked, on forked versions of budibase this will fail due to no access
|
||||
- This is fine as the `yarn` command will install the version from NPM
|
||||
- Community PRs should never touch pro so this will always work
|
||||
- Checkout the `BRANCH` argument, if this fails fallback to `BASE_BRANCH`
|
||||
- This enables the more complex case of a feature branch being merged to another feature branch, e.g.
|
||||
- I am working on a branch `epic/stonks` which exists on budibase and pro.
|
||||
- I want to merge a change to this branch in budibase from `feature/stonks-ui`, which only exists in budibase
|
||||
- The base branch ensures that `epic/stonks` in pro will still be checked out for the CI run, rather than falling back to `develop`
|
||||
- Run `yarn setup` to build and install dependencies
|
||||
- `yarn`
|
||||
- `yarn bootstrap`
|
||||
- `yarn build`
|
||||
- The will build .ts files, and also update the `main` and `types` of `package.json` to point to `dist` rather than src
|
||||
- The build command will only ever work in CI, it is prevented in local dev
|
||||
|
||||
#### `BRANCH` and `BASE_BRANCH` arguments
|
||||
These arguments are supplied by the various budibase build and release pipelines
|
||||
- `budibase_ci`
|
||||
- `BRANCH: ${{ github.event.pull_request.head.ref }}` -> The branch being merged
|
||||
- `BASE_BRANCH: ${{ github.event.pull_request.base.ref}}` -> The base branch
|
||||
- `release-develop`
|
||||
- `BRANCH: develop` -> always use the `develop` branch in pro
|
||||
- `release`
|
||||
- `BRANCH: master` -> always use the `master` branch in pro
|
||||
|
||||
|
||||
### Releasing Pro
|
||||
After budibase dependencies have been released we will release the new version of pro to match the release version of budibase dependencies. This is to ensure that we are always keeping the version of `backend-core` in sync in the pro package and in budibase packages. Without this we could run into scenarios where different versions are being used when installed via `yarn` inside the docker images, creating very difficult to debug cases.
|
||||
|
||||
Pro is released using the [pro/release.sh](../../scripts/pro/release.sh) script. The script will:
|
||||
- Inspect the `VERSION` from the `lerna.json` file in budibase
|
||||
- Determine whether to use the `latest` or `develop` tag based on the command argument
|
||||
- Go to pro directory
|
||||
- install npm creds
|
||||
- update the version of `backend-core` to be `VERSION`, the version just released by lerna
|
||||
- publish to npm. Uses a `lerna publish` command, pro itself is a mono repo.
|
||||
- force the version to be the same as `VERSION` to keep pro and budibase in sync
|
||||
- reverts the changes to `main` and `types` in `package.json` that were made by the build step, to point back to source
|
||||
- commit & push: `Prep next development iteration`
|
||||
- Go to budibase
|
||||
- Update to the new version of pro in `server` and `worker` so the latest pro version is used in the docker builds
|
||||
- commit & push: `Update pro version to $VERSION`
|
||||
|
||||
|
||||
#### `COMMAND` argument
|
||||
This argument is supplied by the existing `release` and `release:develop` budibase commands, which invoke the pro release
|
||||
- `release` will supply no command and default to use `latest`
|
||||
- `release:develop` will supply `develop`
|
||||
|
||||
|
|
|
@ -7,10 +7,12 @@ on:
|
|||
branches:
|
||||
- master
|
||||
- develop
|
||||
- new-design-ui
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- release
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
|
@ -58,3 +60,19 @@ jobs:
|
|||
with:
|
||||
install: false
|
||||
command: yarn test:e2e:ci
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
|
||||
- name: Upload to S3
|
||||
if: github.ref == 'refs/heads/new-design-ui'
|
||||
run: |
|
||||
tar -czvf new_ui.tar.gz packages/server/assets packages/server/index.html
|
||||
aws s3 cp new_ui.tar.gz s3://prod-budi-app-assets/beta:design_ui/
|
||||
aws s3 cp packages/client/dist/budibase-client.js s3://prod-budi-app-assets/beta:design_ui/budibase-client.js
|
||||
aws cloudfront create-invalidation --distribution-id E3ELKP4RCEHVLW --paths "/beta:design_ui/*"
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
name: Budibase Cloud Deploy
|
||||
name: Budibase Deploy Production
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
name: Budibase Release Preprod
|
||||
name: Budibase Deploy Preprod
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
|
||||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
|
||||
jobs:
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
name: Budibase Deploy Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
|
||||
- name: Fail if branch is not develop
|
||||
if: github.ref != 'refs/heads/develop'
|
||||
run: |
|
||||
echo "Ref is not develop, you must run this job from develop."
|
||||
exit 1
|
||||
|
||||
- name: Get the latest budibase release version
|
||||
id: version
|
||||
run: |
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Tag and release Proxy service docker image
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker:proxy:release
|
||||
docker tag proxy-service budibase/proxy:$RELEASE_TAG
|
||||
docker push budibase/proxy:$RELEASE_TAG
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
RELEASE_TAG: k8s-release
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||
-H 'Accept: application/vnd.github.v3.raw' \
|
||||
-o values.release.yaml \
|
||||
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-release/values.yaml
|
||||
wc -l values.release.yaml
|
||||
|
||||
- name: Deploy to Release Environment
|
||||
uses: glopezep/helm@v1.7.1
|
||||
with:
|
||||
release: budibase-release
|
||||
namespace: budibase
|
||||
chart: charts/budibase
|
||||
token: ${{ github.token }}
|
||||
helm: helm3
|
||||
values: |
|
||||
globals:
|
||||
appVersion: develop
|
||||
ingress:
|
||||
enabled: true
|
||||
nginx: true
|
||||
value-files: >-
|
||||
[
|
||||
"values.release.yaml"
|
||||
]
|
||||
env:
|
||||
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v4.0.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
|
||||
content: "Release Env Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Release Env."
|
||||
embed-title: ${{ env.RELEASE_VERSION }}
|
|
@ -1,4 +1,5 @@
|
|||
name: Budibase Release Staging
|
||||
name: Budibase Prerelease
|
||||
concurrency: release-prerelease
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -17,10 +18,11 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
|
||||
# Posthog token used by ui at build time
|
||||
POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
|
||||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
|
||||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
FEATURE_PREVIEW_URL: https://budirelease.live
|
||||
|
||||
jobs:
|
||||
release:
|
||||
|
@ -71,3 +73,56 @@ jobs:
|
|||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
|
||||
- name: Get the latest budibase release version
|
||||
id: version
|
||||
run: |
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Tag and release Proxy service docker image
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker:proxy:release
|
||||
docker tag proxy-service budibase/proxy:$RELEASE_TAG
|
||||
docker push budibase/proxy:$RELEASE_TAG
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
RELEASE_TAG: k8s-release
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||
-H 'Accept: application/vnd.github.v3.raw' \
|
||||
-o values.release.yaml \
|
||||
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-release/values.yaml
|
||||
wc -l values.release.yaml
|
||||
|
||||
- name: Deploy to Release Environment
|
||||
uses: glopezep/helm@v1.7.1
|
||||
with:
|
||||
release: budibase-release
|
||||
namespace: budibase
|
||||
chart: charts/budibase
|
||||
token: ${{ github.token }}
|
||||
helm: helm3
|
||||
values: |
|
||||
globals:
|
||||
appVersion: develop
|
||||
ingress:
|
||||
enabled: true
|
||||
nginx: true
|
||||
value-files: >-
|
||||
[
|
||||
"values.release.yaml"
|
||||
]
|
||||
env:
|
||||
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v4.0.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
|
||||
content: "Release Env Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Release Env."
|
||||
embed-title: ${{ env.RELEASE_VERSION }}
|
||||
|
|
|
@ -87,3 +87,10 @@ jobs:
|
|||
packages/cli/build/cli-macos
|
||||
packages/server/specs/openapi.yaml
|
||||
packages/server/specs/openapi.json
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v4.0.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
|
||||
content: "Self Host Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Self Host."
|
||||
embed-title: ${{ env.RELEASE_VERSION }}
|
|
@ -1,4 +1,5 @@
|
|||
name: Budibase Release
|
||||
concurrency: release
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -17,9 +18,9 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
|
||||
# Posthog token used by ui at build time
|
||||
POSTHOG_TOKEN: phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS
|
||||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
|
||||
|
|
|
@ -33,23 +33,20 @@ jobs:
|
|||
with:
|
||||
record: true
|
||||
install: false
|
||||
tag: nightly
|
||||
command: yarn test:e2e:ci:record
|
||||
env:
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
|
||||
# TODO: upload recordings to s3
|
||||
# - name: Configure AWS Credentials
|
||||
# uses: aws-actions/configure-aws-credentials@v1
|
||||
# with:
|
||||
# aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
# aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
# aws-region: eu-west-1
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v4.0.0
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
webhook-url: ${{ secrets.BUDI_QA_WEBHOOK }}
|
||||
content: "Smoke test run completed with ${{ steps.cypress.outcome }}. See results at ${{ steps.cypress.outputs.dashboardUrl }}"
|
||||
embed-title: ${{ steps.cypress.outcome }}
|
||||
embed-color: ${{ steps.cypress.outcome == 'success' && '3066993' || '15548997' }}
|
||||
name: Test Reports
|
||||
path: packages/builder/cypress/reports/testReport.html
|
||||
|
||||
- name: Cypress Discord Notify
|
||||
run: yarn test:e2e:ci:notify
|
||||
env:
|
||||
CYPRESS_WEBHOOK_URL: ${{ secrets.BUDI_QA_WEBHOOK }}
|
||||
CYPRESS_OUTCOME: ${{ steps.cypress.outcome }}
|
||||
CYPRESS_DASHBOARD_URL: ${{ steps.cypress.outputs.dashboardUrl }}
|
||||
GITHUB_RUN_URL: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
|
||||
|
|
|
@ -98,4 +98,8 @@ hosting/proxy/.generated-nginx.prod.conf
|
|||
bin/
|
||||
hosting/.generated*
|
||||
packages/builder/cypress.env.json
|
||||
packages/builder/cypress/reports
|
||||
stats.html
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
|
@ -174,6 +174,7 @@ Budibase is dedicated to providing a welcoming, diverse, and harrassment-free ex
|
|||
## 🙌 Contributing to Budibase
|
||||
|
||||
From opening a bug report to creating a pull request: every contribution is appreciated and welcomed. If you're planning to implement a new feature or change the API please create an issue first. This way we can ensure your work is not in vain.
|
||||
Environment setup instructions are available for [Debian](https://github.com/Budibase/budibase/tree/HEAD/docs/DEV-SETUP-DEBIAN.md) and [MacOSX](https://github.com/Budibase/budibase/tree/HEAD/docs/DEV-SETUP-MACOSX.md)
|
||||
|
||||
### Not Sure Where to Start?
|
||||
A good place to start contributing, is the [First time issues project](https://github.com/Budibase/budibase/projects/22).
|
||||
|
@ -187,7 +188,7 @@ Budibase is a monorepo managed by lerna. Lerna manages the building and publishi
|
|||
|
||||
- [packages/server](https://github.com/Budibase/budibase/tree/HEAD/packages/server) - The budibase server. This Koa app is responsible for serving the JS for the builder and budibase apps, as well as providing the API for interaction with the database and file system.
|
||||
|
||||
For more information, see [CONTRIBUTING.md](https://github.com/Budibase/budibase/blob/HEAD/.github/CONTRIBUTING.md)
|
||||
For more information, see [CONTRIBUTING.md](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md)
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
@ -202,7 +203,7 @@ Budibase is open-source, licensed as [GPL v3](https://www.gnu.org/licenses/gpl-3
|
|||
|
||||
[![Stargazers over time](https://starchart.cc/Budibase/budibase.svg)](https://starchart.cc/Budibase/budibase)
|
||||
|
||||
If you are having issues between updates of the builder, please use the guide [here](https://github.com/Budibase/budibase/blob/HEAD/.github/CONTRIBUTING.md#troubleshooting) to clear down your environment.
|
||||
If you are having issues between updates of the builder, please use the guide [here](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md#troubleshooting) to clear down your environment.
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ sources:
|
|||
- https://github.com/Budibase/budibase
|
||||
- https://budibase.com
|
||||
type: application
|
||||
version: 0.2.9
|
||||
version: 0.2.10
|
||||
appVersion: 1.0.48
|
||||
dependencies:
|
||||
- name: couchdb
|
||||
|
|
|
@ -28,6 +28,8 @@ spec:
|
|||
- env:
|
||||
- name: BUDIBASE_ENVIRONMENT
|
||||
value: {{ .Values.globals.budibaseEnv }}
|
||||
- name: DEPLOYMENT_ENVIRONMENT
|
||||
value: "kubernetes"
|
||||
- name: COUCH_DB_URL
|
||||
{{ if .Values.services.couchdb.url }}
|
||||
value: {{ .Values.services.couchdb.url }}
|
||||
|
@ -78,6 +80,10 @@ spec:
|
|||
value: {{ .Values.services.objectStore.url }}
|
||||
- name: PORT
|
||||
value: {{ .Values.services.apps.port | quote }}
|
||||
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
|
||||
- name: API_REQ_LIMIT_PER_SEC
|
||||
value: {{ .Values.globals.apps.publicApiRateLimitPerSecond | quote }}
|
||||
{{ end }}
|
||||
- name: MULTI_TENANCY
|
||||
value: {{ .Values.globals.multiTenancy | quote }}
|
||||
- name: LOG_LEVEL
|
||||
|
@ -119,6 +125,12 @@ spec:
|
|||
|
||||
image: budibase/apps:{{ .Values.globals.appVersion }}
|
||||
imagePullPolicy: Always
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
port: {{ .Values.services.apps.port }}
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
name: bbapps
|
||||
ports:
|
||||
- containerPort: {{ .Values.services.apps.port }}
|
||||
|
|
|
@ -27,6 +27,8 @@ spec:
|
|||
spec:
|
||||
containers:
|
||||
- env:
|
||||
- name: DEPLOYMENT_ENVIRONMENT
|
||||
value: "kubernetes"
|
||||
- name: CLUSTER_PORT
|
||||
value: {{ .Values.services.worker.port | quote }}
|
||||
{{ if .Values.services.couchdb.enabled }}
|
||||
|
@ -91,6 +93,10 @@ spec:
|
|||
value: {{ .Values.globals.selfHosted | quote }}
|
||||
- name: SENTRY_DSN
|
||||
value: {{ .Values.globals.sentryDSN }}
|
||||
- name: ENABLE_ANALYTICS
|
||||
value: {{ .Values.globals.enableAnalytics | quote }}
|
||||
- name: POSTHOG_TOKEN
|
||||
value: {{ .Values.globals.posthogToken }}
|
||||
- name: ACCOUNT_PORTAL_URL
|
||||
value: {{ .Values.globals.accountPortalUrl | quote }}
|
||||
- name: ACCOUNT_PORTAL_API_KEY
|
||||
|
@ -117,8 +123,16 @@ spec:
|
|||
value: {{ .Values.globals.google.clientId | quote }}
|
||||
- name: GOOGLE_CLIENT_SECRET
|
||||
value: {{ .Values.globals.google.secret | quote }}
|
||||
- name: TENANT_FEATURE_FLAGS
|
||||
value: {{ .Values.globals.tenantFeatureFlags | quote }}
|
||||
image: budibase/worker:{{ .Values.globals.appVersion }}
|
||||
imagePullPolicy: Always
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
port: {{ .Values.services.worker.port }}
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
name: bbworker
|
||||
ports:
|
||||
- containerPort: {{ .Values.services.worker.port }}
|
||||
|
|
|
@ -89,9 +89,9 @@ affinity: {}
|
|||
globals:
|
||||
appVersion: "latest"
|
||||
budibaseEnv: PRODUCTION
|
||||
enableAnalytics: true
|
||||
enableAnalytics: "1"
|
||||
sentryDSN: ""
|
||||
posthogToken: ""
|
||||
posthogToken: "phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS"
|
||||
logLevel: info
|
||||
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
||||
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
||||
|
@ -103,7 +103,7 @@ globals:
|
|||
google:
|
||||
clientId: ""
|
||||
secret: ""
|
||||
automationMaxIterations: "500"
|
||||
automationMaxIterations: "200"
|
||||
|
||||
createSecrets: true # creates an internal API key, JWT secrets and redis password for you
|
||||
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||
level of experience, education, socio-economic status, nationality, personal
|
||||
appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at community@budibase.com. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see
|
||||
https://www.contributor-covenant.org/faq
|
|
@ -0,0 +1,231 @@
|
|||
# Contributing
|
||||
|
||||
From opening a bug report to creating a pull request: every contribution is appreciated and welcome. If you're planning to implement a new feature or change the api please [create an issue](https://github.com/Budibase/budibase/issues/new/choose) first. This way we can ensure that your precious work is not in vain.
|
||||
|
||||
## Table of contents
|
||||
|
||||
- [Quick start](#quick-start)
|
||||
- [Status](#status)
|
||||
- [What's included](#whats-included)
|
||||
- [Bugs and feature requests](#bugs-and-feature-requests)
|
||||
|
||||
|
||||
## Not Sure Where to Start?
|
||||
|
||||
Budibase is a low-code web application builder that creates svelte-based web applications.
|
||||
|
||||
Budibase is a monorepo managed by [lerna](https://github.com/lerna/lerna). Lerna manages the building and publishing of the budibase packages. At a high level, here are the packages that make up budibase.
|
||||
|
||||
- **packages/builder** - contains code for the budibase builder client side svelte application.
|
||||
|
||||
- **packages/client** - A module that runs in the browser responsible for reading JSON definition and creating living, breathing web apps from it.
|
||||
|
||||
- **packages/server** - The budibase server. This [Koa](https://koajs.com/) app is responsible for serving the JS for the builder and budibase apps, as well as providing the API for interaction with the database and file system.
|
||||
|
||||
- **packages/worker** - This [Koa](https://koajs.com/) app is responsible for providing global apis for managing your budibase installation. Authentication, Users, Email, Org and Auth configs are all provided by the worker.
|
||||
|
||||
## Contributor License Agreement (CLA)
|
||||
|
||||
In order to accept your pull request, we need you to submit a CLA. You only need to do this once. If you are submitting a pull request for the first time, just submit a Pull Request and our CLA Bot will give you instructions on how to sign the CLA before merging your Pull Request.
|
||||
|
||||
All contributors must sign an [Individual Contributor License Agreement](https://github.com/budibase/budibase/blob/next/.github/cla/individual-cla.md).
|
||||
|
||||
If contributing on behalf of your company, your company must sign a [Corporate Contributor License Agreement](https://github.com/budibase/budibase/blob/next/.github/cla/corporate-cla.md). If so, please contact us via community@budibase.com.
|
||||
|
||||
## Glossary of Terms
|
||||
|
||||
To understand the budibase API, it can be helpful to understand the top level entities that make up Budibase.
|
||||
|
||||
### Client
|
||||
|
||||
A client represents a single budibase customer. Each budibase client will have 1 or more budibase servers. Every client is assigned a unique ID.
|
||||
|
||||
### App
|
||||
|
||||
A client can have one or more budibase applications. Budibase applications would be things like "Developer Inventory Management" or "Goat Herder CRM". Think of a budibase application as a tree.
|
||||
|
||||
### Database
|
||||
|
||||
An App can have one or more databases. Keeping with our [dendrology](https://en.wikipedia.org/wiki/Dendrology) analogy - think of an database as a branch on the tree. Databases are used to keep data separate for different instances of your app. For example, if you had a CRM app, you may create a database for your US office, and a database for your Australian office. Databases allow us to support [multitenancy](https://www.gartner.com/en/information-technology/glossary/multitenancy) in budibase applications.
|
||||
|
||||
### Table
|
||||
|
||||
Tables in budibase are almost akin to tables in relational databases. A table may be a "Car" or an "Employee". They are the main building blocks for the creation and management of backend data in budibase.
|
||||
|
||||
### View
|
||||
|
||||
A View is an advanced feature in budibase that allows you to write a custom query using [MapReduce](https://pouchdb.com/guides/queries.html) queries. Views enable powerful query functionality and calculations, allowing you to do more with your data.
|
||||
|
||||
### Page
|
||||
|
||||
A page in budibase is actually a single, self contained svelte web app. There are only 2 pages in budibase. The **login** page and the **main** page.
|
||||
|
||||
### Screen
|
||||
|
||||
A screen is a component within a single page. Generally, screens represent client side routes, and can be switched without refreshing the page.
|
||||
|
||||
### Component
|
||||
|
||||
A component is the basic frontend building block of a budibase app.
|
||||
|
||||
### Component Library
|
||||
|
||||
Component libraries are collections of components as well as the definition of their props contained in a file called `components.json`.
|
||||
|
||||
## Contributing to Budibase
|
||||
|
||||
* Please maintain the existing code style.
|
||||
|
||||
* Please try to keep your commits small and focused.
|
||||
|
||||
* Please write tests.
|
||||
|
||||
* If the project diverges from your branch, please rebase instead of merging. This makes the commit graph easier to read.
|
||||
|
||||
* Once your work is completed, please raise a PR against the `develop` branch with some information about what has changed and why.
|
||||
|
||||
### Getting Started For Contributors
|
||||
#### 1. Prerequisites
|
||||
|
||||
NodeJS Version `14.x.x`
|
||||
|
||||
*yarn -* `npm install -g yarn`
|
||||
|
||||
*jest* - `npm install -g jest`
|
||||
|
||||
#### 2. Clone this repository
|
||||
|
||||
`git clone https://github.com/Budibase/budibase.git`
|
||||
|
||||
then `cd ` into your local copy.
|
||||
|
||||
#### 3. Install and Build
|
||||
|
||||
| **NOTE**: On Windows, all yarn commands must be executed on a bash shell (e.g. git bash)
|
||||
|
||||
To develop the Budibase platform you'll need [Docker](https://www.docker.com/) and [Docker Compose](https://docs.docker.com/compose/) installed.
|
||||
|
||||
##### Quick method
|
||||
|
||||
`yarn setup` will check that all necessary components are installed and setup the repo for usage.
|
||||
|
||||
##### Manual method
|
||||
|
||||
The following commands can be executed to manually get Budibase up and running (assuming Docker/Docker Compose has been installed).
|
||||
|
||||
`yarn` to install project dependencies
|
||||
|
||||
`yarn bootstrap` will install all budibase modules and symlink them together using lerna.
|
||||
|
||||
`yarn build` will build all budibase packages.
|
||||
|
||||
#### 4. Running
|
||||
|
||||
To run the budibase server and builder in dev mode (i.e. with live reloading):
|
||||
|
||||
1. Open a new console
|
||||
2. `yarn dev` (from root)
|
||||
3. Access the builder on http://localhost:10000/builder
|
||||
|
||||
This will enable watch mode for both the builder app, server, client library and any component libraries.
|
||||
|
||||
#### 5. Debugging using VS Code
|
||||
|
||||
To debug the budibase server and worker a VS Code launch configuration has been provided.
|
||||
|
||||
Visit the debug window and select `Budibase Server` or `Budibase Worker` to debug the respective component.
|
||||
Alternatively to start both components simultaneously select `Start Budibase`.
|
||||
|
||||
In addition to the above, the remaining budibase components may be run in dev mode using: `yarn dev:noserver`.
|
||||
|
||||
#### 6. Cleanup
|
||||
|
||||
If you wish to delete all the apps created in development and reset the environment then run the following:
|
||||
|
||||
1. `yarn nuke:docker` will wipe all the Budibase services
|
||||
2. `yarn dev` will restart all the services
|
||||
|
||||
### Backend
|
||||
|
||||
For the backend we run [Redis](https://redis.io/), [CouchDB](https://couchdb.apache.org/), [MinIO](https://min.io/) and [NGINX](https://www.nginx.com/) in Docker compose. This means that to develop Budibase you will need Docker and Docker compose installed. The backend services are then run separately as Node services with nodemon so that they can be debugged outside of Docker.
|
||||
|
||||
### Data Storage
|
||||
|
||||
When you are running locally, budibase stores data on disk using docker volumes. The volumes and the types of data associated with each are:
|
||||
|
||||
- `redis_data`
|
||||
- Sessions, email tokens
|
||||
- `couchdb3_data`
|
||||
- Global and app databases
|
||||
- `minio_data`
|
||||
- App manifest, budibase client, static assets
|
||||
|
||||
### Development Modes
|
||||
|
||||
A combination of environment variables controls the mode budibase runs in.
|
||||
Yarn commands can be used to mimic the different modes as described in the sections below:
|
||||
|
||||
#### Self Hosted
|
||||
The default mode. A single tenant installation with no usage restrictions.
|
||||
|
||||
To enable this mode, use:
|
||||
```
|
||||
yarn mode:self
|
||||
```
|
||||
|
||||
#### Cloud
|
||||
The cloud mode, with account portal turned off.
|
||||
|
||||
To enable this mode, use:
|
||||
```
|
||||
yarn mode:cloud
|
||||
```
|
||||
#### Cloud & Account
|
||||
The cloud mode, with account portal turned on. This is a replica of the mode that runs at https://budibase.app
|
||||
|
||||
|
||||
To enable this mode, use:
|
||||
```
|
||||
yarn mode:account
|
||||
```
|
||||
### CI
|
||||
An overview of the CI pipelines can be found [here](./workflows/README.md)
|
||||
|
||||
### Pro
|
||||
|
||||
@budibase/pro is the closed source package that supports licensed features in budibase. By default the package will be pulled from NPM and will not normally need to be touched in local development. If you require to update code inside the pro package it can be cloned to the same root level as budibase, e.g.
|
||||
|
||||
```
|
||||
.
|
||||
|_ budibase
|
||||
|_ budibase-pro
|
||||
```
|
||||
|
||||
Note that only budibase maintainers will be able to access the pro repo.
|
||||
|
||||
The `yarn bootstrap` command can be used to replace the NPM supplied dependency with the local source aware version. This is achieved using the `yarn link` command. To see specifically how dependencies are linked see [scripts/link-dependencies.sh](../scripts/link-dependencies.sh). The same link script is used to link dependencies to account-portal in local dev.
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
Sometimes, things go wrong. This can be due to incompatible updates on the budibase platform. To clear down your development environment and start again follow **Step 6. Cleanup**, then proceed from **Step 3. Install and Build** in the setup guide above to create a fresh Budibase installation.
|
||||
### Running tests
|
||||
|
||||
#### End-to-end Tests
|
||||
|
||||
Budibase uses Cypress to run a number of E2E tests. To run the tests execute the following command in the root folder:
|
||||
|
||||
```
|
||||
yarn test:e2e
|
||||
```
|
||||
|
||||
Or if you are in the builder you can run `yarn cy:test`.
|
||||
|
||||
|
||||
### Other Useful Information
|
||||
|
||||
* The contributors are listed in [AUTHORS.md](https://github.com/Budibase/budibase/blob/master/.github/AUTHORS.md) (add yourself).
|
||||
|
||||
* This project uses a modified version of the MPLv2 license, see [LICENSE](https://github.com/budibase/server/blob/master/LICENSE).
|
||||
|
||||
* We use the [C4 (Collective Code Construction Contract)](https://rfc.zeromq.org/spec:42/C4/) process for contributions.
|
||||
Please read this if you are unfamiliar with it.
|
|
@ -0,0 +1,52 @@
|
|||
## Dev Environment on Debian 11
|
||||
|
||||
### Install Node
|
||||
|
||||
Budibase requires a recent version of node (14+):
|
||||
```
|
||||
curl -sL https://deb.nodesource.com/setup_16.x | sudo bash -
|
||||
apt -y install nodejs
|
||||
node -v
|
||||
```
|
||||
|
||||
### Install npm requirements
|
||||
|
||||
```
|
||||
npm install -g yarn jest lerna
|
||||
```
|
||||
### Install Docker and Docker Compose
|
||||
|
||||
```
|
||||
apt install docker.io
|
||||
pip3 install docker-compose
|
||||
```
|
||||
### Clone the repo
|
||||
```
|
||||
git clone https://github.com/Budibase/budibase.git
|
||||
```
|
||||
|
||||
### Check Versions
|
||||
|
||||
This setup process was tested on Debian 11 (bullseye) with version numbers show below. Your mileage may vary using anything else.
|
||||
|
||||
- Docker: 20.10.5
|
||||
- Docker-Compose: 1.29.2
|
||||
- Node: v16.15.1
|
||||
- Yarn: 1.22.19
|
||||
- Lerna: 5.1.4
|
||||
|
||||
### Build
|
||||
|
||||
```
|
||||
cd budibase
|
||||
yarn setup
|
||||
```
|
||||
The yarn setup command runs several build steps i.e.
|
||||
```
|
||||
node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev
|
||||
```
|
||||
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
|
||||
|
||||
The dev version will be available on port 10000 i.e.
|
||||
|
||||
http://127.0.0.1:10000/builder/admin
|
|
@ -0,0 +1,54 @@
|
|||
## Dev Environment on MAC OSX 12 (Monterey)
|
||||
|
||||
### Install Homebrew
|
||||
|
||||
Install instructions [here](https://brew.sh/)
|
||||
|
||||
### Install Node
|
||||
|
||||
Budibase requires a recent version of node (14+):
|
||||
```
|
||||
brew install node npm
|
||||
node -v
|
||||
```
|
||||
|
||||
### Install npm requirements
|
||||
|
||||
```
|
||||
npm install -g yarn jest lerna
|
||||
```
|
||||
### Install Docker and Docker Compose
|
||||
|
||||
```
|
||||
brew install docker docker-compose
|
||||
```
|
||||
### Clone the repo
|
||||
```
|
||||
git clone https://github.com/Budibase/budibase.git
|
||||
```
|
||||
|
||||
### Check Versions
|
||||
|
||||
This setup process was tested on Mac OSX 12 (Monterey) with version numbers shown below. Your mileage may vary using anything else.
|
||||
|
||||
- Docker: 20.10.14
|
||||
- Docker-Compose: 2.6.0
|
||||
- Node: 18.3.0
|
||||
- Yarn: 1.22.19
|
||||
- Lerna: 5.1.4
|
||||
|
||||
### Build
|
||||
|
||||
```
|
||||
cd budibase
|
||||
yarn setup
|
||||
```
|
||||
The yarn setup command runs several build steps i.e.
|
||||
```
|
||||
node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev
|
||||
```
|
||||
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
|
||||
|
||||
The dev version will be available on port 10000 i.e.
|
||||
|
||||
http://127.0.0.1:10000/builder/admin
|
|
@ -0,0 +1,13 @@
|
|||
#!/bin/bash
|
||||
CUSTOM_DOMAIN="$1"
|
||||
|
||||
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
||||
certbot certonly --webroot --webroot-path="/var/www/html" \
|
||||
--register-unsafely-without-email \
|
||||
--domains $CUSTOM_DOMAIN \
|
||||
--rsa-key-size 4096 \
|
||||
--agree-tos \
|
||||
--force-renewal
|
||||
|
||||
nginx -s reload
|
||||
fi
|
|
@ -0,0 +1,23 @@
|
|||
#!/bin/bash
|
||||
CUSTOM_DOMAIN="$1"
|
||||
# Request from Lets Encrypt
|
||||
certbot certonly --webroot --webroot-path="/var/www/html" \
|
||||
--register-unsafely-without-email \
|
||||
--domains $CUSTOM_DOMAIN \
|
||||
--rsa-key-size 4096 \
|
||||
--agree-tos \
|
||||
--force-renewal
|
||||
|
||||
if (($? != 0)); then
|
||||
echo "ERROR: certbot request failed for $CUSTOM_DOMAIN use http on port 80 - exiting"
|
||||
exit 1
|
||||
else
|
||||
cp /app/letsencrypt/options-ssl-nginx.conf /etc/letsencrypt/options-ssl-nginx.conf
|
||||
cp /app/letsencrypt/ssl-dhparams.pem /etc/letsencrypt/ssl-dhparams.pem
|
||||
cp /app/letsencrypt/nginx-ssl.conf /etc/nginx/sites-available/nginx-ssl.conf
|
||||
sed -i "s/CUSTOM_DOMAIN/$CUSTOM_DOMAIN/g" /etc/nginx/sites-available/nginx-ssl.conf
|
||||
ln -s /etc/nginx/sites-available/nginx-ssl.conf /etc/nginx/sites-enabled/nginx-ssl.conf
|
||||
|
||||
echo "INFO: restart nginx after certbot request"
|
||||
/etc/init.d/nginx restart
|
||||
fi
|
|
@ -0,0 +1,96 @@
|
|||
server {
|
||||
listen 443 ssl default_server;
|
||||
listen [::]:443 ssl default_server;
|
||||
server_name _;
|
||||
ssl_certificate /etc/letsencrypt/live/CUSTOM_DOMAIN/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/CUSTOM_DOMAIN/privkey.pem;
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
|
||||
client_max_body_size 1000m;
|
||||
ignore_invalid_headers off;
|
||||
proxy_buffering off;
|
||||
# port_in_redirect off;
|
||||
|
||||
location ^~ /.well-known/acme-challenge/ {
|
||||
default_type "text/plain";
|
||||
root /var/www/html;
|
||||
break;
|
||||
}
|
||||
location = /.well-known/acme-challenge/ {
|
||||
return 404;
|
||||
}
|
||||
|
||||
location /app {
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location = / {
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location ~ ^/(builder|app_) {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
proxy_pass http://127.0.0.1:4002;
|
||||
}
|
||||
|
||||
location /worker/ {
|
||||
proxy_pass http://127.0.0.1:4002;
|
||||
rewrite ^/worker/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
# calls to the API are rate limited with bursting
|
||||
limit_req zone=ratelimit burst=20 nodelay;
|
||||
|
||||
# 120s timeout on API requests
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location /db/ {
|
||||
proxy_pass http://127.0.0.1:5984;
|
||||
rewrite ^/db/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_connect_timeout 300;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
chunked_transfer_encoding off;
|
||||
proxy_pass http://127.0.0.1:9000;
|
||||
}
|
||||
|
||||
client_header_timeout 60;
|
||||
client_body_timeout 60;
|
||||
keepalive_timeout 60;
|
||||
|
||||
# gzip
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_proxied any;
|
||||
gzip_comp_level 6;
|
||||
gzip_types text/plain text/css text/xml application/json application/javascript application/rss+xml application/atom+xml image/svg+xml;
|
||||
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
# This file contains important security parameters. If you modify this file
|
||||
# manually, Certbot will be unable to automatically provide future security
|
||||
# updates. Instead, Certbot will print and log an error message with a path to
|
||||
# the up-to-date file that you will need to refer to when manually updating
|
||||
# this file.
|
||||
|
||||
ssl_session_cache shared:le_nginx_SSL:10m;
|
||||
ssl_session_timeout 1440m;
|
||||
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384";
|
|
@ -0,0 +1,8 @@
|
|||
-----BEGIN DH PARAMETERS-----
|
||||
MIIBCAKCAQEA//////////+t+FRYortKmq/cViAnPTzx2LnFg84tNpWp4TZBFGQz
|
||||
+8yTnc4kmz75fS/jY2MMddj2gbICrsRhetPfHtXV/WVhJDP1H18GbtCFY2VVPe0a
|
||||
87VXE15/V8k1mE8McODmi3fipona8+/och3xWKE2rec1MKzKT0g6eXq8CrGCsyT7
|
||||
YdEIqUuyyOP7uWrat2DX9GgdT0Kj3jlN9K5W7edjcrsZCwenyO4KbXCeAvzhzffi
|
||||
7MA0BM0oNC9hkXL+nOmFg/+OTxIy7vKBg8P+OxtMb61zO7X8vC7CIAXFjvGDfRaD
|
||||
ssbzSibBsu/6iGtCOGEoXJf//////////wIBAg==
|
||||
-----END DH PARAMETERS-----
|
|
@ -48,7 +48,7 @@ http {
|
|||
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
|
||||
set $csp_object "object-src 'none'";
|
||||
set $csp_base_uri "base-uri 'self'";
|
||||
set $csp_connect "connect-src 'self' https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com";
|
||||
set $csp_connect "connect-src 'self' https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com";
|
||||
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
|
||||
set $csp_frame "frame-src 'self' https:";
|
||||
set $csp_img "img-src http: https: data: blob:";
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
#!/bin/bash
|
||||
|
||||
echo ${TARGETBUILD} > /buildtarget.txt
|
||||
if [[ "${TARGETBUILD}" = "aas" ]]; then
|
||||
# Azure AppService uses /home for persisent data & SSH on port 2222
|
||||
mkdir -p /home/budibase/{minio,couchdb}
|
||||
mkdir -p /home/budibase/couchdb/data
|
||||
chown -R couchdb:couchdb /home/budibase/couchdb/
|
||||
apt update
|
||||
apt-get install -y openssh-server
|
||||
sed -i 's#dir=/opt/couchdb/data/search#dir=/home/budibase/couchdb/data/search#' /opt/clouseau/clouseau.ini
|
||||
sed -i 's#/minio/minio server /minio &#/minio/minio server /home/budibase/minio &#' /runner.sh
|
||||
sed -i 's#database_dir = ./data#database_dir = /home/budibase/couchdb/data#' /opt/couchdb/etc/default.ini
|
||||
sed -i 's#view_index_dir = ./data#view_index_dir = /home/budibase/couchdb/data#' /opt/couchdb/etc/default.ini
|
||||
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
|
||||
/etc/init.d/ssh restart
|
||||
fi
|
|
@ -0,0 +1,40 @@
|
|||
#!/usr/bin/env bash
|
||||
healthy=true
|
||||
|
||||
if [[ $(curl -Lfk -s -w "%{http_code}\n" http://localhost/ -o /dev/null) -ne 200 ]]; then
|
||||
echo 'ERROR: Budibase is not running';
|
||||
healthy=false
|
||||
fi
|
||||
|
||||
if [[ $(curl -s -w "%{http_code}\n" http://localhost:4001/health -o /dev/null) -ne 200 ]]; then
|
||||
echo 'ERROR: Budibase backend is not running';
|
||||
healthy=false
|
||||
fi
|
||||
|
||||
if [[ $(curl -s -w "%{http_code}\n" http://localhost:4002/health -o /dev/null) -ne 200 ]]; then
|
||||
echo 'ERROR: Budibase worker is not running';
|
||||
healthy=false
|
||||
fi
|
||||
|
||||
if [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/ -o /dev/null) -ne 200 ]]; then
|
||||
echo 'ERROR: CouchDB is not running';
|
||||
healthy=false
|
||||
fi
|
||||
if [[ $(redis-cli -a $REDIS_PASSWORD --no-auth-warning ping) != 'PONG' ]]; then
|
||||
echo 'ERROR: Redis is down';
|
||||
healthy=false
|
||||
fi
|
||||
# mino, clouseau,
|
||||
nginx -t -q
|
||||
NGINX_STATUS=$?
|
||||
|
||||
if [[ $NGINX_STATUS -gt 0 ]]; then
|
||||
echo 'ERROR: Nginx config problem';
|
||||
healthy=false
|
||||
fi
|
||||
|
||||
if [ $healthy == true ]; then
|
||||
exit 0
|
||||
else
|
||||
exit 1
|
||||
fi
|
|
@ -1,77 +1,93 @@
|
|||
FROM couchdb
|
||||
FROM node:14-slim as build
|
||||
|
||||
ENV COUCHDB_PASSWORD=budibase
|
||||
ENV COUCHDB_USER=budibase
|
||||
ENV COUCH_DB_URL=http://budibase:budibase@localhost:5984
|
||||
ENV BUDIBASE_ENVIRONMENT=PRODUCTION
|
||||
ENV MINIO_URL=http://localhost:9000
|
||||
ENV REDIS_URL=localhost:6379
|
||||
ENV WORKER_URL=http://localhost:4002
|
||||
ENV INTERNAL_API_KEY=budibase
|
||||
ENV JWT_SECRET=testsecret
|
||||
ENV MINIO_ACCESS_KEY=budibase
|
||||
ENV MINIO_SECRET_KEY=budibase
|
||||
ENV SELF_HOSTED=1
|
||||
ENV CLUSTER_PORT=10000
|
||||
ENV REDIS_PASSWORD=budibase
|
||||
ENV ARCHITECTURE=amd
|
||||
ENV APP_PORT=4001
|
||||
ENV WORKER_PORT=4002
|
||||
# install node-gyp dependencies
|
||||
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install software-properties-common wget nginx -y
|
||||
RUN apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main'
|
||||
RUN apt-get update
|
||||
# add pin script
|
||||
WORKDIR /
|
||||
ADD scripts/pinVersions.js scripts/cleanup.sh ./
|
||||
RUN chmod +x /cleanup.sh
|
||||
|
||||
# build server
|
||||
WORKDIR /app
|
||||
ADD packages/server .
|
||||
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
|
||||
|
||||
# build worker
|
||||
WORKDIR /worker
|
||||
ADD packages/worker .
|
||||
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
|
||||
|
||||
FROM couchdb:3.2.1
|
||||
# TARGETARCH can be amd64 or arm e.g. docker build --build-arg TARGETARCH=amd64
|
||||
ARG TARGETARCH amd64
|
||||
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
|
||||
# e.g. docker build --build-arg TARGETBUILD=aas ....
|
||||
ARG TARGETBUILD single
|
||||
ENV TARGETBUILD $TARGETBUILD
|
||||
|
||||
COPY --from=build /app /app
|
||||
COPY --from=build /worker /worker
|
||||
|
||||
ENV \
|
||||
APP_PORT=4001 \
|
||||
ARCHITECTURE=amd \
|
||||
BUDIBASE_ENVIRONMENT=PRODUCTION \
|
||||
CLUSTER_PORT=80 \
|
||||
COUCHDB_PASSWORD=budibase \
|
||||
COUCHDB_USER=budibase \
|
||||
COUCH_DB_URL=http://budibase:budibase@localhost:5984 \
|
||||
# CUSTOM_DOMAIN=budi001.custom.com \
|
||||
DEPLOYMENT_ENVIRONMENT=docker \
|
||||
INTERNAL_API_KEY=budibase \
|
||||
JWT_SECRET=testsecret \
|
||||
MINIO_ACCESS_KEY=budibase \
|
||||
MINIO_SECRET_KEY=budibase \
|
||||
MINIO_URL=http://localhost:9000 \
|
||||
POSTHOG_TOKEN=phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS \
|
||||
REDIS_PASSWORD=budibase \
|
||||
REDIS_URL=localhost:6379 \
|
||||
SELF_HOSTED=1 \
|
||||
TARGETBUILD=$TARGETBUILD \
|
||||
WORKER_PORT=4002 \
|
||||
WORKER_URL=http://localhost:4002
|
||||
|
||||
# install base dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y software-properties-common wget nginx && \
|
||||
apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \
|
||||
apt-get update
|
||||
|
||||
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
|
||||
WORKDIR /nodejs
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh && \
|
||||
bash /tmp/nodesource_setup.sh && \
|
||||
apt-get install -y libaio1 nodejs nginx openjdk-8-jdk redis-server unzip && \
|
||||
npm install --global yarn pm2
|
||||
|
||||
# setup nginx
|
||||
ADD hosting/single/nginx.conf /etc/nginx
|
||||
RUN mkdir /etc/nginx/logs
|
||||
RUN useradd www
|
||||
RUN touch /etc/nginx/logs/error.log
|
||||
RUN touch /etc/nginx/logs/nginx.pid
|
||||
ADD hosting/single/nginx-default-site.conf /etc/nginx/sites-enabled/default
|
||||
RUN mkdir -p /var/log/nginx && \
|
||||
touch /var/log/nginx/error.log && \
|
||||
touch /var/run/nginx.pid
|
||||
|
||||
# install java
|
||||
RUN apt-get install openjdk-8-jdk -y
|
||||
|
||||
# setup nodejs
|
||||
WORKDIR /nodejs
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh
|
||||
RUN bash /tmp/nodesource_setup.sh
|
||||
RUN apt-get install nodejs
|
||||
RUN npm install --global yarn
|
||||
RUN npm install --global pm2
|
||||
|
||||
# setup redis
|
||||
RUN apt install redis-server -y
|
||||
|
||||
# setup server
|
||||
WORKDIR /app
|
||||
ADD packages/server .
|
||||
RUN ls -al
|
||||
RUN yarn
|
||||
RUN yarn build
|
||||
# Install client for oracle datasource
|
||||
RUN apt-get install unzip libaio1
|
||||
RUN /bin/bash -e scripts/integrations/oracle/instantclient/linux/x86-64/install.sh
|
||||
|
||||
# setup worker
|
||||
WORKDIR /worker
|
||||
ADD packages/worker .
|
||||
RUN yarn
|
||||
RUN yarn build
|
||||
WORKDIR /
|
||||
RUN mkdir -p scripts/integrations/oracle
|
||||
ADD packages/server/scripts/integrations/oracle scripts/integrations/oracle
|
||||
RUN /bin/bash -e ./scripts/integrations/oracle/instantclient/linux/install.sh
|
||||
|
||||
# setup clouseau
|
||||
WORKDIR /
|
||||
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip
|
||||
RUN unzip clouseau-2.21.0-dist.zip
|
||||
RUN mv clouseau-2.21.0 /opt/clouseau
|
||||
RUN rm clouseau-2.21.0-dist.zip
|
||||
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \
|
||||
unzip clouseau-2.21.0-dist.zip && \
|
||||
mv clouseau-2.21.0 /opt/clouseau && \
|
||||
rm clouseau-2.21.0-dist.zip
|
||||
|
||||
WORKDIR /opt/clouseau
|
||||
RUN mkdir ./bin
|
||||
ADD hosting/single/clouseau ./bin/
|
||||
ADD hosting/single/log4j.properties .
|
||||
ADD hosting/single/clouseau.ini .
|
||||
ADD hosting/single/log4j.properties hosting/single/clouseau.ini ./
|
||||
RUN chmod +x ./bin/clouseau
|
||||
|
||||
# setup CouchDB
|
||||
|
@ -80,18 +96,49 @@ ADD hosting/single/vm.args ./etc/
|
|||
|
||||
# setup minio
|
||||
WORKDIR /minio
|
||||
RUN wget https://dl.min.io/server/minio/release/linux-${ARCHITECTURE}64/minio
|
||||
RUN chmod +x minio
|
||||
ADD scripts/install-minio.sh ./install.sh
|
||||
RUN chmod +x install.sh && ./install.sh
|
||||
|
||||
# setup runner file
|
||||
WORKDIR /
|
||||
ADD hosting/single/runner.sh .
|
||||
RUN chmod +x ./runner.sh
|
||||
ADD hosting/scripts/healthcheck.sh .
|
||||
RUN chmod +x ./healthcheck.sh
|
||||
|
||||
EXPOSE 10000
|
||||
ADD hosting/scripts/build-target-paths.sh .
|
||||
RUN chmod +x ./build-target-paths.sh
|
||||
|
||||
# For Azure App Service install SSH & point data locations to /home
|
||||
RUN /build-target-paths.sh
|
||||
|
||||
# cleanup cache
|
||||
RUN yarn cache clean -f
|
||||
|
||||
EXPOSE 80
|
||||
EXPOSE 443
|
||||
VOLUME /opt/couchdb/data
|
||||
VOLUME /minio
|
||||
|
||||
# setup letsencrypt certificate
|
||||
RUN apt-get install -y certbot python3-certbot-nginx
|
||||
ADD hosting/letsencrypt /app/letsencrypt
|
||||
RUN chmod +x /app/letsencrypt/certificate-request.sh /app/letsencrypt/certificate-renew.sh
|
||||
# Remove cached files
|
||||
RUN rm -rf \
|
||||
/root/.cache \
|
||||
/root/.npm \
|
||||
/root/.pip \
|
||||
/usr/local/share/doc \
|
||||
/usr/share/doc \
|
||||
/usr/share/man \
|
||||
/var/lib/apt/lists/* \
|
||||
/tmp/*
|
||||
|
||||
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"
|
||||
|
||||
# must set this just before running
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /
|
||||
|
||||
CMD ["./runner.sh"]
|
||||
|
|
|
@ -0,0 +1,112 @@
|
|||
# Docker Single Image for Budibase
|
||||
|
||||
## Overview
|
||||
As an alternative to running several docker containers via docker-compose, the files under ./hosting/single can be used to build a docker image containing all of the Budibase components (minio, couch, clouseau etc).
|
||||
We call this the 'single image' container as the Dockerfile adds all the components to a single docker image.
|
||||
|
||||
## Usage
|
||||
|
||||
- Amend Environment Variables
|
||||
- Build Requirements
|
||||
- Build the Image
|
||||
- Run the Container
|
||||
|
||||
### Amend Environment Variables
|
||||
|
||||
Edit the Dockerfile in this directory amending the environment variables to suit your usage. Pay particular attention to changing passwords.
|
||||
The CUSTOM_DOMAIN variable will be used to request a certificate from LetsEncrypt and if successful you can point traffic to port 443. If you choose to use the CUSTOM_DOMAIN variable ensure that the DNS for your custom domain points to the public IP address where you are running Budibase - otherwise the certificate issuance will fail.
|
||||
If you have other arrangements for a proxy in front of the single image container you can omit the CUSTOM_DOMAIN environment variable and the request to LetsEncrypt will be skipped. You can then point traffic to port 80.
|
||||
|
||||
### Build Requirements
|
||||
We would suggest building the image with 6GB of RAM and 20GB of free disk space for build artifacts. The resulting image size will use approx 2GB of disk space.
|
||||
|
||||
### Build the Image
|
||||
The guidance below is based on building the Budibase single image on Debian 11 and AlmaLinux 8. If you use another distro or OS you will need to amend the commands to suit.
|
||||
#### Install Node
|
||||
Budibase requires a more recent version of node (14+) than is available in the base Debian repos so:
|
||||
|
||||
```
|
||||
curl -sL https://deb.nodesource.com/setup_16.x | sudo bash -
|
||||
apt install -y nodejs
|
||||
node -v
|
||||
```
|
||||
Install yarn and lerna:
|
||||
```
|
||||
npm install -g yarn jest lerna
|
||||
```
|
||||
#### Install Docker
|
||||
|
||||
```
|
||||
apt install -y docker.io
|
||||
```
|
||||
|
||||
Check the versions of each installed version. This process was tested with the version numbers below so YMMV using anything else:
|
||||
|
||||
- Docker: 20.10.5
|
||||
- node: 16.15.1
|
||||
- yarn: 1.22.19
|
||||
- lerna: 5.1.4
|
||||
|
||||
#### Get the Code
|
||||
Clone the Budibase repo
|
||||
```
|
||||
git clone https://github.com/Budibase/budibase.git
|
||||
cd budibase
|
||||
```
|
||||
#### Setup Node
|
||||
Node setup:
|
||||
```
|
||||
node ./hosting/scripts/setup.js
|
||||
yarn
|
||||
yarn bootstrap
|
||||
yarn build
|
||||
```
|
||||
#### Build Image
|
||||
The following yarn command does some prep and then runs the docker build command:
|
||||
```
|
||||
yarn build:docker:single
|
||||
```
|
||||
If the docker build step fails try running that step again manually with:
|
||||
```
|
||||
docker build --build-arg TARGETARCH=amd --no-cache -t budibase:latest -f ./hosting/single/Dockerfile .
|
||||
```
|
||||
|
||||
#### Azure App Services
|
||||
Azure have some specific requirements for running a container in their App Service. Specifically, installation of SSH to port 2222 and data storage under /home. If you would like to build a budibase container for Azure App Service add the build argument shown below setting it to 'aas'. You can remove the CUSTOM_DOMAIN env variable from the Dockerfile too as Azure terminate SSL before requests reach the container.
|
||||
```
|
||||
docker build --build-arg TARGETARCH=amd --build-arg TARGETBUILD=aas -t budibase:latest -f ./hosting/single/Dockerfile .
|
||||
```
|
||||
|
||||
### Run the Container
|
||||
```
|
||||
docker run -d -p 80:80 -p 443:443 --name budibase budibase:latest
|
||||
```
|
||||
Where:
|
||||
- -d runs the container in detached mode
|
||||
- -p forwards ports from your host to the ports inside the container. If you are already using port 80 on your host for something else you can try running with an alternative port e.g. `-p 8080:80`
|
||||
- --name is the name for the container as shown in `docker ps` and can be used with other docker commands e.g. `docker restart budibase`
|
||||
|
||||
When the container runs you should be able to access the container over http at your host address e.g. http://1.2.3.4/ or using your custom domain e.g. https://my.custom.domain/
|
||||
|
||||
When the Budibase UI appears you will be prompted to create an account to get started.
|
||||
|
||||
### Podman
|
||||
The single image container builds fine when using podman in place of docker. You may be prompted for the registry to use for the CouchDB image and the HEALTHCHECK parameter is not OCI compliant so is ignored.
|
||||
|
||||
### Check
|
||||
There are many things that could go wrong so if your container is not building or running as expected please check the following before opening a support issue.
|
||||
Verify the healthcheck status of the container:
|
||||
```
|
||||
docker ps
|
||||
```
|
||||
Check the container logs:
|
||||
```
|
||||
docker logs budibase
|
||||
```
|
||||
|
||||
### Support
|
||||
This single image build is still a work-in-progress so if you open an issue please provide the following information:
|
||||
- The OS and OS version you are building on
|
||||
- The versions you are using of docker, docker-compose, yarn, node, lerna
|
||||
- For build errors please provide zipped output
|
||||
- For container errors please provide zipped container logs
|
|
@ -0,0 +1,94 @@
|
|||
server {
|
||||
listen 80 default_server;
|
||||
listen [::]:80 default_server;
|
||||
server_name _;
|
||||
|
||||
client_max_body_size 1000m;
|
||||
ignore_invalid_headers off;
|
||||
proxy_buffering off;
|
||||
# port_in_redirect off;
|
||||
|
||||
location ^~ /.well-known/acme-challenge/ {
|
||||
default_type "text/plain";
|
||||
root /var/www/html;
|
||||
break;
|
||||
}
|
||||
location = /.well-known/acme-challenge/ {
|
||||
return 404;
|
||||
}
|
||||
|
||||
location /app {
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location = / {
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location ~ ^/(builder|app_) {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
proxy_pass http://127.0.0.1:4002;
|
||||
}
|
||||
|
||||
location /worker/ {
|
||||
proxy_pass http://127.0.0.1:4002;
|
||||
rewrite ^/worker/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
# calls to the API are rate limited with bursting
|
||||
limit_req zone=ratelimit burst=20 nodelay;
|
||||
|
||||
# 120s timeout on API requests
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location /db/ {
|
||||
proxy_pass http://127.0.0.1:5984;
|
||||
rewrite ^/db/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_connect_timeout 300;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
chunked_transfer_encoding off;
|
||||
proxy_pass http://127.0.0.1:9000;
|
||||
}
|
||||
|
||||
client_header_timeout 60;
|
||||
client_body_timeout 60;
|
||||
keepalive_timeout 60;
|
||||
|
||||
# gzip
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_proxied any;
|
||||
gzip_comp_level 6;
|
||||
gzip_types text/plain text/css text/xml application/json application/javascript application/rss+xml application/atom+xml image/svg+xml;
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
user www www;
|
||||
error_log /etc/nginx/logs/error.log;
|
||||
pid /etc/nginx/logs/nginx.pid;
|
||||
user www-data www-data;
|
||||
error_log /var/log/nginx/error.log;
|
||||
pid /var/run/nginx.pid;
|
||||
worker_processes auto;
|
||||
worker_rlimit_nofile 8192;
|
||||
|
||||
|
@ -32,85 +32,6 @@ http {
|
|||
default "upgrade";
|
||||
}
|
||||
|
||||
server {
|
||||
listen 10000 default_server;
|
||||
listen [::]:10000 default_server;
|
||||
server_name _;
|
||||
client_max_body_size 1000m;
|
||||
ignore_invalid_headers off;
|
||||
proxy_buffering off;
|
||||
# port_in_redirect off;
|
||||
include /etc/nginx/sites-enabled/*;
|
||||
|
||||
location /app {
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location = / {
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location ~ ^/(builder|app_) {
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
proxy_pass http://127.0.0.1:4002;
|
||||
}
|
||||
|
||||
location /worker/ {
|
||||
proxy_pass http://127.0.0.1:4002;
|
||||
rewrite ^/worker/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
# calls to the API are rate limited with bursting
|
||||
limit_req zone=ratelimit burst=20 nodelay;
|
||||
|
||||
# 120s timeout on API requests
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location /db/ {
|
||||
proxy_pass http://127.0.0.1:5984;
|
||||
rewrite ^/db/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_connect_timeout 300;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
chunked_transfer_encoding off;
|
||||
proxy_pass http://127.0.0.1:9000;
|
||||
}
|
||||
|
||||
client_header_timeout 60;
|
||||
client_body_timeout 60;
|
||||
keepalive_timeout 60;
|
||||
|
||||
# gzip
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_proxied any;
|
||||
gzip_comp_level 6;
|
||||
gzip_types text/plain text/css text/xml application/json application/javascript application/rss+xml application/atom+xml image/svg+xml;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,15 @@ redis-server --requirepass $REDIS_PASSWORD &
|
|||
/opt/clouseau/bin/clouseau &
|
||||
/minio/minio server /minio &
|
||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
|
||||
/etc/init.d/nginx restart
|
||||
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
||||
# Add monthly cron job to renew certbot certificate
|
||||
echo -n "* * 2 * * root exec /app/letsencrypt/certificate-renew.sh ${CUSTOM_DOMAIN}" >> /etc/cron.d/certificate-renew
|
||||
chmod +x /etc/cron.d/certificate-renew
|
||||
# Request the certbot certificate
|
||||
/app/letsencrypt/certificate-request.sh ${CUSTOM_DOMAIN}
|
||||
fi
|
||||
|
||||
/etc/init.d/nginx restart
|
||||
pushd app
|
||||
pm2 start --name app "yarn run:docker"
|
||||
|
@ -10,7 +19,6 @@ pushd worker
|
|||
pm2 start --name worker "yarn run:docker"
|
||||
popd
|
||||
sleep 10
|
||||
URL=http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984
|
||||
curl -X PUT ${URL}/_users
|
||||
curl -X PUT ${URL}/_replicator
|
||||
curl -X PUT ${COUCH_DB_URL}/_users
|
||||
curl -X PUT ${COUCH_DB_URL}/_replicator
|
||||
sleep infinity
|
|
@ -0,0 +1,4 @@
|
|||
#!/bin/bash
|
||||
id=$(docker run -t -d -p 80:80 budibase:latest)
|
||||
docker exec -it $id bash
|
||||
docker kill $id
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "1.0.151-alpha.1",
|
||||
"version": "1.0.212-alpha.6",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
13
package.json
13
package.json
|
@ -3,6 +3,8 @@
|
|||
"private": true,
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-json": "^4.0.2",
|
||||
"@types/mongodb": "3.6.3",
|
||||
"@typescript-eslint/parser": "4.28.0",
|
||||
"babel-eslint": "^10.0.3",
|
||||
"eslint": "^7.28.0",
|
||||
"eslint-plugin-cypress": "^2.11.3",
|
||||
|
@ -16,13 +18,13 @@
|
|||
"rimraf": "^3.0.2",
|
||||
"rollup-plugin-replace": "^2.2.0",
|
||||
"svelte": "^3.38.2",
|
||||
"@typescript-eslint/parser": "4.28.0",
|
||||
"typescript": "4.5.5"
|
||||
},
|
||||
"scripts": {
|
||||
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
|
||||
"bootstrap": "lerna link && lerna bootstrap && ./scripts/link-dependencies.sh",
|
||||
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
|
||||
"build": "lerna run build",
|
||||
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
|
||||
"release": "lerna publish patch --yes --force-publish && yarn release:pro",
|
||||
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop && yarn release:pro:develop",
|
||||
"release:pro": "bash scripts/pro/release.sh",
|
||||
|
@ -36,8 +38,8 @@
|
|||
"kill-server": "kill-port 4001 4002",
|
||||
"kill-all": "yarn run kill-builder && yarn run kill-server",
|
||||
"dev": "yarn run kill-all && lerna link && lerna run --parallel dev:builder --concurrency 1",
|
||||
"dev:noserver": "yarn run kill-builder && lerna link && lerna run dev:stack:up && lerna run --parallel dev:builder --concurrency 1 --ignore @budibase/server --ignore @budibase/worker",
|
||||
"dev:server": "yarn run kill-server && lerna run --parallel dev:builder --concurrency 1 --scope @budibase/worker --scope @budibase/server",
|
||||
"dev:noserver": "yarn run kill-builder && lerna link && lerna run dev:stack:up && lerna run --parallel dev:builder --concurrency 1 --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
||||
"dev:server": "yarn run kill-server && lerna run --parallel dev:builder --concurrency 1 --scope @budibase/backend-core --scope @budibase/worker --scope @budibase/server",
|
||||
"test": "lerna run test",
|
||||
"lint:eslint": "eslint packages",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
|
||||
|
@ -48,16 +50,19 @@
|
|||
"test:e2e": "lerna run cy:test --stream",
|
||||
"test:e2e:ci": "lerna run cy:ci --stream",
|
||||
"test:e2e:ci:record": "lerna run cy:ci:record --stream",
|
||||
"test:e2e:ci:notify": "lerna run cy:ci:notify",
|
||||
"build:specs": "lerna run specs",
|
||||
"build:docker": "lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
||||
"build:docker:proxy:compose": "node scripts/proxy/generateProxyConfig compose && npm run build:docker:proxy",
|
||||
"build:docker:proxy:preprod": "node scripts/proxy/generateProxyConfig preprod && npm run build:docker:proxy",
|
||||
"build:docker:proxy:release": "node scripts/proxy/generateProxyConfig release && npm run build:docker:proxy",
|
||||
"build:docker:proxy:prod": "node scripts/proxy/generateProxyConfig prod && npm run build:docker:proxy",
|
||||
"build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
||||
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
||||
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
|
||||
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",
|
||||
"build:docker:single": "lerna run build && lerna run predocker && npm run build:docker:single:image",
|
||||
"build:docs": "lerna run build:docs",
|
||||
|
|
|
@ -44,9 +44,6 @@ jspm_packages/
|
|||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
const generic = require("./src/cache/generic")
|
||||
|
||||
module.exports = {
|
||||
user: require("./src/cache/user"),
|
||||
app: require("./src/cache/appMetadata"),
|
||||
writethrough: require("./src/cache/writethrough"),
|
||||
...generic,
|
||||
}
|
||||
|
|
|
@ -5,8 +5,11 @@ const {
|
|||
getAppId,
|
||||
updateAppId,
|
||||
doInAppContext,
|
||||
doInTenant,
|
||||
} = require("./src/context")
|
||||
|
||||
const identity = require("./src/context/identity")
|
||||
|
||||
module.exports = {
|
||||
getAppDB,
|
||||
getDevAppDB,
|
||||
|
@ -14,4 +17,6 @@ module.exports = {
|
|||
getAppId,
|
||||
updateAppId,
|
||||
doInAppContext,
|
||||
doInTenant,
|
||||
identity,
|
||||
}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
module.exports = require("./src/logging")
|
|
@ -1,48 +1,81 @@
|
|||
{
|
||||
"name": "@budibase/backend-core",
|
||||
"version": "1.0.151-alpha.1",
|
||||
"version": "1.0.212-alpha.6",
|
||||
"description": "Budibase backend core libraries used in server and worker",
|
||||
"main": "src/index.js",
|
||||
"main": "dist/src/index.js",
|
||||
"types": "dist/src/index.d.ts",
|
||||
"exports": {
|
||||
".": "./dist/src/index.js",
|
||||
"./tests": "./dist/tests/index.js",
|
||||
"./*": "./dist/*.js"
|
||||
},
|
||||
"author": "Budibase",
|
||||
"license": "GPL-3.0",
|
||||
"scripts": {
|
||||
"prebuild": "rimraf dist/",
|
||||
"prepack": "cp package.json dist",
|
||||
"build": "tsc -p tsconfig.build.json",
|
||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||
"test": "jest",
|
||||
"test:watch": "jest --watchAll"
|
||||
},
|
||||
"dependencies": {
|
||||
"@techpass/passport-openidconnect": "^0.3.0",
|
||||
"aws-sdk": "^2.901.0",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"cls-hooked": "^4.2.2",
|
||||
"ioredis": "^4.27.1",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"koa-passport": "^4.1.4",
|
||||
"lodash": "^4.17.21",
|
||||
"lodash.isarguments": "^3.1.0",
|
||||
"node-fetch": "^2.6.1",
|
||||
"passport-google-auth": "^1.0.2",
|
||||
"passport-google-oauth": "^2.0.0",
|
||||
"passport-jwt": "^4.0.0",
|
||||
"passport-local": "^1.0.0",
|
||||
"posthog-node": "^1.3.0",
|
||||
"@budibase/types": "^1.0.212-alpha.6",
|
||||
"@techpass/passport-openidconnect": "0.3.2",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.0.1",
|
||||
"dotenv": "16.0.1",
|
||||
"emitter-listener": "1.1.2",
|
||||
"ioredis": "4.28.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"koa-passport": "4.1.4",
|
||||
"lodash": "4.17.21",
|
||||
"lodash.isarguments": "3.1.0",
|
||||
"node-fetch": "2.6.7",
|
||||
"passport-google-auth": "1.0.2",
|
||||
"passport-google-oauth": "2.0.0",
|
||||
"passport-jwt": "4.0.0",
|
||||
"passport-local": "1.0.0",
|
||||
"posthog-node": "1.3.0",
|
||||
"pouchdb": "7.3.0",
|
||||
"pouchdb-find": "^7.2.2",
|
||||
"pouchdb-replication-stream": "^1.2.9",
|
||||
"sanitize-s3-objectkey": "^0.0.1",
|
||||
"tar-fs": "^2.1.1",
|
||||
"uuid": "^8.3.2",
|
||||
"zlib": "^1.0.5"
|
||||
"pouchdb-find": "7.2.2",
|
||||
"pouchdb-replication-stream": "1.2.9",
|
||||
"redlock": "4.2.0",
|
||||
"sanitize-s3-objectkey": "0.0.1",
|
||||
"semver": "7.3.7",
|
||||
"tar-fs": "2.1.1",
|
||||
"uuid": "8.3.2",
|
||||
"zlib": "1.0.5"
|
||||
},
|
||||
"jest": {
|
||||
"preset": "ts-jest",
|
||||
"testEnvironment": "node",
|
||||
"moduleNameMapper": {
|
||||
"@budibase/types": "<rootDir>/../types/src"
|
||||
},
|
||||
"setupFiles": [
|
||||
"./scripts/jestSetup.js"
|
||||
"./scripts/jestSetup.ts"
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"ioredis-mock": "^5.5.5",
|
||||
"jest": "^26.6.3",
|
||||
"pouchdb-adapter-memory": "^7.2.2",
|
||||
"pouchdb-all-dbs": "^1.0.2"
|
||||
"@shopify/jest-koa-mocks": "3.1.5",
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/koa": "2.0.52",
|
||||
"@types/node": "14.18.20",
|
||||
"@types/node-fetch": "2.6.1",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"@types/redlock": "4.0.3",
|
||||
"@types/semver": "7.3.7",
|
||||
"@types/tar-fs": "2.0.1",
|
||||
"@types/uuid": "8.3.4",
|
||||
"ioredis-mock": "5.8.0",
|
||||
"jest": "27.5.1",
|
||||
"koa": "2.7.0",
|
||||
"nodemon": "2.0.16",
|
||||
"pouchdb-adapter-memory": "7.2.2",
|
||||
"timekeeper": "2.2.0",
|
||||
"ts-jest": "27.1.5",
|
||||
"typescript": "4.7.3"
|
||||
},
|
||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
module.exports = {
|
||||
Client: require("./src/redis"),
|
||||
utils: require("./src/redis/utils"),
|
||||
clients: require("./src/redis/init"),
|
||||
}
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
const env = require("../src/environment")
|
||||
|
||||
env._set("SELF_HOSTED", "1")
|
||||
env._set("NODE_ENV", "jest")
|
||||
env._set("JWT_SECRET", "test-jwtsecret")
|
||||
env._set("LOG_LEVEL", "silent")
|
|
@ -0,0 +1,12 @@
|
|||
import env from "../src/environment"
|
||||
import { mocks } from "../tests/utilities"
|
||||
|
||||
// mock all dates to 2020-01-01T00:00:00.000Z
|
||||
// use tk.reset() to use real dates in individual tests
|
||||
import tk from "timekeeper"
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
env._set("SELF_HOSTED", "1")
|
||||
env._set("NODE_ENV", "jest")
|
||||
env._set("JWT_SECRET", "test-jwtsecret")
|
||||
env._set("LOG_LEVEL", "silent")
|
|
@ -29,7 +29,7 @@ passport.deserializeUser(async (user, done) => {
|
|||
const user = await db.get(user._id)
|
||||
return done(null, user)
|
||||
} catch (err) {
|
||||
console.error("User not found", err)
|
||||
console.error(`User not found`, err)
|
||||
return done(null, false, { message: "User not found" })
|
||||
}
|
||||
})
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const redis = require("../redis/authRedis")
|
||||
const redis = require("../redis/init")
|
||||
const { doWithDB } = require("../db")
|
||||
const { DocumentTypes } = require("../db/constants")
|
||||
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
import { getTenantId } from "../../context"
|
||||
import redis from "../../redis/init"
|
||||
import RedisWrapper from "../../redis"
|
||||
|
||||
function generateTenantKey(key: string) {
|
||||
const tenantId = getTenantId()
|
||||
return `${key}:${tenantId}`
|
||||
}
|
||||
|
||||
export = class BaseCache {
|
||||
client: RedisWrapper | undefined
|
||||
|
||||
constructor(client: RedisWrapper | undefined = undefined) {
|
||||
this.client = client
|
||||
}
|
||||
|
||||
async getClient() {
|
||||
return !this.client ? await redis.getCacheClient() : this.client
|
||||
}
|
||||
|
||||
async keys(pattern: string) {
|
||||
const client = await this.getClient()
|
||||
return client.keys(pattern)
|
||||
}
|
||||
|
||||
/**
|
||||
* Read only from the cache.
|
||||
*/
|
||||
async get(key: string, opts = { useTenancy: true }) {
|
||||
key = opts.useTenancy ? generateTenantKey(key) : key
|
||||
const client = await this.getClient()
|
||||
return client.get(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Write to the cache.
|
||||
*/
|
||||
async store(
|
||||
key: string,
|
||||
value: any,
|
||||
ttl: number | null = null,
|
||||
opts = { useTenancy: true }
|
||||
) {
|
||||
key = opts.useTenancy ? generateTenantKey(key) : key
|
||||
const client = await this.getClient()
|
||||
await client.store(key, value, ttl)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove from cache.
|
||||
*/
|
||||
async delete(key: string, opts = { useTenancy: true }) {
|
||||
key = opts.useTenancy ? generateTenantKey(key) : key
|
||||
const client = await this.getClient()
|
||||
return client.delete(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from the cache. Write to the cache if not exists.
|
||||
*/
|
||||
async withCache(
|
||||
key: string,
|
||||
ttl: number,
|
||||
fetchFn: any,
|
||||
opts = { useTenancy: true }
|
||||
) {
|
||||
const cachedValue = await this.get(key, opts)
|
||||
if (cachedValue) {
|
||||
return cachedValue
|
||||
}
|
||||
|
||||
try {
|
||||
const fetchedValue = await fetchFn()
|
||||
|
||||
await this.store(key, fetchedValue, ttl, opts)
|
||||
return fetchedValue
|
||||
} catch (err) {
|
||||
console.error("Error fetching before cache - ", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async bustCache(key: string, opts = { client: null }) {
|
||||
const client = await this.getClient()
|
||||
try {
|
||||
await client.delete(generateTenantKey(key))
|
||||
} catch (err) {
|
||||
console.error("Error busting cache - ", err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
const BaseCache = require("./base")
|
||||
|
||||
const GENERIC = new BaseCache()
|
||||
|
||||
exports.CacheKeys = {
|
||||
CHECKLIST: "checklist",
|
||||
INSTALLATION: "installation",
|
||||
ANALYTICS_ENABLED: "analyticsEnabled",
|
||||
UNIQUE_TENANT_ID: "uniqueTenantId",
|
||||
EVENTS: "events",
|
||||
BACKFILL_METADATA: "backfillMetadata",
|
||||
}
|
||||
|
||||
exports.TTL = {
|
||||
ONE_MINUTE: 600,
|
||||
ONE_HOUR: 3600,
|
||||
ONE_DAY: 86400,
|
||||
}
|
||||
|
||||
function performExport(funcName) {
|
||||
return (...args) => GENERIC[funcName](...args)
|
||||
}
|
||||
|
||||
exports.keys = performExport("keys")
|
||||
exports.get = performExport("get")
|
||||
exports.store = performExport("store")
|
||||
exports.delete = performExport("delete")
|
||||
exports.withCache = performExport("withCache")
|
||||
exports.bustCache = performExport("bustCache")
|
|
@ -0,0 +1,59 @@
|
|||
require("../../../tests/utilities/TestConfiguration")
|
||||
const { Writethrough } = require("../writethrough")
|
||||
const { dangerousGetDB } = require("../../db")
|
||||
const tk = require("timekeeper")
|
||||
|
||||
const START_DATE = Date.now()
|
||||
tk.freeze(START_DATE)
|
||||
|
||||
const DELAY = 5000
|
||||
|
||||
const db = dangerousGetDB("test")
|
||||
const db2 = dangerousGetDB("test2")
|
||||
const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY)
|
||||
|
||||
describe("writethrough", () => {
|
||||
describe("put", () => {
|
||||
let first
|
||||
it("should be able to store, will go to DB", async () => {
|
||||
const response = await writethrough.put({ _id: "test", value: 1 })
|
||||
const output = await db.get(response.id)
|
||||
first = output
|
||||
expect(output.value).toBe(1)
|
||||
})
|
||||
|
||||
it("second put shouldn't update DB", async () => {
|
||||
const response = await writethrough.put({ ...first, value: 2 })
|
||||
const output = await db.get(response.id)
|
||||
expect(first._rev).toBe(output._rev)
|
||||
expect(output.value).toBe(1)
|
||||
})
|
||||
|
||||
it("should put it again after delay period", async () => {
|
||||
tk.freeze(START_DATE + DELAY + 1)
|
||||
const response = await writethrough.put({ ...first, value: 3 })
|
||||
const output = await db.get(response.id)
|
||||
expect(response.rev).not.toBe(first._rev)
|
||||
expect(output.value).toBe(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe("get", () => {
|
||||
it("should be able to retrieve", async () => {
|
||||
const response = await writethrough.get("test")
|
||||
expect(response.value).toBe(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe("same doc, different databases (tenancy)", () => {
|
||||
it("should be able to two different databases", async () => {
|
||||
const resp1 = await writethrough.put({ _id: "db1", value: "first" })
|
||||
const resp2 = await writethrough2.put({ _id: "db1", value: "second" })
|
||||
expect(resp1.rev).toBeDefined()
|
||||
expect(resp2.rev).toBeDefined()
|
||||
expect((await db.get("db1")).value).toBe("first")
|
||||
expect((await db2.get("db1")).value).toBe("second")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
const redis = require("../redis/authRedis")
|
||||
const redis = require("../redis/init")
|
||||
const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
|
||||
const env = require("../environment")
|
||||
const accounts = require("../cloud/accounts")
|
||||
|
|
|
@ -0,0 +1,120 @@
|
|||
import BaseCache from "./base"
|
||||
import { getWritethroughClient } from "../redis/init"
|
||||
|
||||
const DEFAULT_WRITE_RATE_MS = 10000
|
||||
let CACHE: BaseCache | null = null
|
||||
|
||||
interface CacheItem {
|
||||
doc: any
|
||||
lastWrite: number
|
||||
}
|
||||
|
||||
async function getCache() {
|
||||
if (!CACHE) {
|
||||
const client = await getWritethroughClient()
|
||||
CACHE = new BaseCache(client)
|
||||
}
|
||||
return CACHE
|
||||
}
|
||||
|
||||
function makeCacheKey(db: PouchDB.Database, key: string) {
|
||||
return db.name + key
|
||||
}
|
||||
|
||||
function makeCacheItem(doc: any, lastWrite: number | null = null): CacheItem {
|
||||
return { doc, lastWrite: lastWrite || Date.now() }
|
||||
}
|
||||
|
||||
export async function put(
|
||||
db: PouchDB.Database,
|
||||
doc: any,
|
||||
writeRateMs: number = DEFAULT_WRITE_RATE_MS
|
||||
) {
|
||||
const cache = await getCache()
|
||||
const key = doc._id
|
||||
let cacheItem: CacheItem | undefined = await cache.get(makeCacheKey(db, key))
|
||||
const updateDb = !cacheItem || cacheItem.lastWrite < Date.now() - writeRateMs
|
||||
let output = doc
|
||||
if (updateDb) {
|
||||
const writeDb = async (toWrite: any) => {
|
||||
// doc should contain the _id and _rev
|
||||
const response = await db.put(toWrite)
|
||||
output = {
|
||||
...doc,
|
||||
_id: response.id,
|
||||
_rev: response.rev,
|
||||
}
|
||||
}
|
||||
try {
|
||||
await writeDb(doc)
|
||||
} catch (err: any) {
|
||||
if (err.status !== 409) {
|
||||
throw err
|
||||
} else {
|
||||
// get the rev, update over it - this is risky, may change in future
|
||||
const readDoc = await db.get(doc._id)
|
||||
doc._rev = readDoc._rev
|
||||
await writeDb(doc)
|
||||
}
|
||||
}
|
||||
}
|
||||
// if we are updating the DB then need to set the lastWrite to now
|
||||
cacheItem = makeCacheItem(output, updateDb ? null : cacheItem?.lastWrite)
|
||||
await cache.store(makeCacheKey(db, key), cacheItem)
|
||||
return { ok: true, id: output._id, rev: output._rev }
|
||||
}
|
||||
|
||||
export async function get(db: PouchDB.Database, id: string): Promise<any> {
|
||||
const cache = await getCache()
|
||||
const cacheKey = makeCacheKey(db, id)
|
||||
let cacheItem: CacheItem = await cache.get(cacheKey)
|
||||
if (!cacheItem) {
|
||||
const doc = await db.get(id)
|
||||
cacheItem = makeCacheItem(doc)
|
||||
await cache.store(cacheKey, cacheItem)
|
||||
}
|
||||
return cacheItem.doc
|
||||
}
|
||||
|
||||
export async function remove(
|
||||
db: PouchDB.Database,
|
||||
docOrId: any,
|
||||
rev?: any
|
||||
): Promise<void> {
|
||||
const cache = await getCache()
|
||||
if (!docOrId) {
|
||||
throw new Error("No ID/Rev provided.")
|
||||
}
|
||||
const id = typeof docOrId === "string" ? docOrId : docOrId._id
|
||||
rev = typeof docOrId === "string" ? rev : docOrId._rev
|
||||
try {
|
||||
await cache.delete(makeCacheKey(db, id))
|
||||
} finally {
|
||||
await db.remove(id, rev)
|
||||
}
|
||||
}
|
||||
|
||||
export class Writethrough {
|
||||
db: PouchDB.Database
|
||||
writeRateMs: number
|
||||
|
||||
constructor(
|
||||
db: PouchDB.Database,
|
||||
writeRateMs: number = DEFAULT_WRITE_RATE_MS
|
||||
) {
|
||||
this.db = db
|
||||
this.writeRateMs = writeRateMs
|
||||
}
|
||||
|
||||
async put(doc: any) {
|
||||
return put(this.db, doc, this.writeRateMs)
|
||||
}
|
||||
|
||||
async get(id: string) {
|
||||
return get(this.db, id)
|
||||
}
|
||||
|
||||
async remove(docOrId: any, rev?: any) {
|
||||
return remove(this.db, docOrId, rev)
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
const API = require("./api")
|
||||
const env = require("../environment")
|
||||
const { Headers } = require("../constants")
|
||||
|
||||
const api = new API(env.ACCOUNT_PORTAL_URL)
|
||||
|
||||
exports.getAccount = async email => {
|
||||
const payload = {
|
||||
email,
|
||||
}
|
||||
const response = await api.post(`/api/accounts/search`, {
|
||||
body: payload,
|
||||
headers: {
|
||||
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
},
|
||||
})
|
||||
const json = await response.json()
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Error getting account by email ${email}`, json)
|
||||
}
|
||||
|
||||
return json[0]
|
||||
}
|
||||
|
||||
exports.getStatus = async () => {
|
||||
const response = await api.get(`/api/status`, {
|
||||
headers: {
|
||||
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
},
|
||||
})
|
||||
const json = await response.json()
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Error getting status`)
|
||||
}
|
||||
|
||||
return json
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
import API from "./api"
|
||||
import env from "../environment"
|
||||
import { Headers } from "../constants"
|
||||
import { CloudAccount } from "@budibase/types"
|
||||
|
||||
const api = new API(env.ACCOUNT_PORTAL_URL)
|
||||
|
||||
export const getAccount = async (
|
||||
email: string
|
||||
): Promise<CloudAccount | undefined> => {
|
||||
const payload = {
|
||||
email,
|
||||
}
|
||||
const response = await api.post(`/api/accounts/search`, {
|
||||
body: payload,
|
||||
headers: {
|
||||
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
},
|
||||
})
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Error getting account by email ${email}`)
|
||||
}
|
||||
|
||||
const json: CloudAccount[] = await response.json()
|
||||
return json[0]
|
||||
}
|
||||
|
||||
export const getAccountByTenantId = async (
|
||||
tenantId: string
|
||||
): Promise<CloudAccount | undefined> => {
|
||||
const payload = {
|
||||
tenantId,
|
||||
}
|
||||
const response = await api.post(`/api/accounts/search`, {
|
||||
body: payload,
|
||||
headers: {
|
||||
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
},
|
||||
})
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Error getting account by tenantId ${tenantId}`)
|
||||
}
|
||||
|
||||
const json: CloudAccount[] = await response.json()
|
||||
return json[0]
|
||||
}
|
||||
|
||||
export const getStatus = async () => {
|
||||
const response = await api.get(`/api/status`, {
|
||||
headers: {
|
||||
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
},
|
||||
})
|
||||
const json = await response.json()
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Error getting status`)
|
||||
}
|
||||
|
||||
return json
|
||||
}
|
|
@ -0,0 +1,650 @@
|
|||
const util = require("util")
|
||||
const assert = require("assert")
|
||||
const wrapEmitter = require("emitter-listener")
|
||||
const async_hooks = require("async_hooks")
|
||||
|
||||
const CONTEXTS_SYMBOL = "cls@contexts"
|
||||
const ERROR_SYMBOL = "error@context"
|
||||
|
||||
const DEBUG_CLS_HOOKED = process.env.DEBUG_CLS_HOOKED
|
||||
|
||||
let currentUid = -1
|
||||
|
||||
module.exports = {
|
||||
getNamespace: getNamespace,
|
||||
createNamespace: createNamespace,
|
||||
destroyNamespace: destroyNamespace,
|
||||
reset: reset,
|
||||
ERROR_SYMBOL: ERROR_SYMBOL,
|
||||
}
|
||||
|
||||
function Namespace(name) {
|
||||
this.name = name
|
||||
// changed in 2.7: no default context
|
||||
this.active = null
|
||||
this._set = []
|
||||
this.id = null
|
||||
this._contexts = new Map()
|
||||
this._indent = 0
|
||||
this._hook = null
|
||||
}
|
||||
|
||||
Namespace.prototype.set = function set(key, value) {
|
||||
if (!this.active) {
|
||||
throw new Error(
|
||||
"No context available. ns.run() or ns.bind() must be called first."
|
||||
)
|
||||
}
|
||||
|
||||
this.active[key] = value
|
||||
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
indentStr +
|
||||
"CONTEXT-SET KEY:" +
|
||||
key +
|
||||
"=" +
|
||||
value +
|
||||
" in ns:" +
|
||||
this.name +
|
||||
" currentUid:" +
|
||||
currentUid +
|
||||
" active:" +
|
||||
util.inspect(this.active, { showHidden: true, depth: 2, colors: true })
|
||||
)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
Namespace.prototype.get = function get(key) {
|
||||
if (!this.active) {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const asyncHooksCurrentId = async_hooks.currentId()
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-GETTING KEY NO ACTIVE NS: (${this.name}) ${key}=undefined currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${this._set.length}`
|
||||
)
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
indentStr +
|
||||
"CONTEXT-GETTING KEY:" +
|
||||
key +
|
||||
"=" +
|
||||
this.active[key] +
|
||||
" (" +
|
||||
this.name +
|
||||
") currentUid:" +
|
||||
currentUid +
|
||||
" active:" +
|
||||
util.inspect(this.active, { showHidden: true, depth: 2, colors: true })
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-GETTING KEY: (${this.name}) ${key}=${
|
||||
this.active[key]
|
||||
} currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${
|
||||
this._set.length
|
||||
} active:${util.inspect(this.active)}`
|
||||
)
|
||||
}
|
||||
return this.active[key]
|
||||
}
|
||||
|
||||
Namespace.prototype.createContext = function createContext() {
|
||||
// Prototype inherit existing context if created a new child context within existing context.
|
||||
let context = Object.create(this.active ? this.active : Object.prototype)
|
||||
context._ns_name = this.name
|
||||
context.id = currentUid
|
||||
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-CREATED Context: (${
|
||||
this.name
|
||||
}) currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${
|
||||
this._set.length
|
||||
} context:${util.inspect(context, {
|
||||
showHidden: true,
|
||||
depth: 2,
|
||||
colors: true,
|
||||
})}`
|
||||
)
|
||||
}
|
||||
|
||||
return context
|
||||
}
|
||||
|
||||
Namespace.prototype.run = function run(fn) {
|
||||
let context = this.createContext()
|
||||
this.enter(context)
|
||||
|
||||
try {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-RUN BEGIN: (${
|
||||
this.name
|
||||
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
|
||||
this._set.length
|
||||
} context:${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
fn(context)
|
||||
return context
|
||||
} catch (exception) {
|
||||
if (exception) {
|
||||
exception[ERROR_SYMBOL] = context
|
||||
}
|
||||
throw exception
|
||||
} finally {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-RUN END: (${
|
||||
this.name
|
||||
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
|
||||
this._set.length
|
||||
} ${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
this.exit(context)
|
||||
}
|
||||
}
|
||||
|
||||
Namespace.prototype.runAndReturn = function runAndReturn(fn) {
|
||||
let value
|
||||
this.run(function (context) {
|
||||
value = fn(context)
|
||||
})
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses global Promise and assumes Promise is cls friendly or wrapped already.
|
||||
* @param {function} fn
|
||||
* @returns {*}
|
||||
*/
|
||||
Namespace.prototype.runPromise = function runPromise(fn) {
|
||||
let context = this.createContext()
|
||||
this.enter(context)
|
||||
|
||||
let promise = fn(context)
|
||||
if (!promise || !promise.then || !promise.catch) {
|
||||
throw new Error("fn must return a promise.")
|
||||
}
|
||||
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
debug2(
|
||||
"CONTEXT-runPromise BEFORE: (" +
|
||||
this.name +
|
||||
") currentUid:" +
|
||||
currentUid +
|
||||
" len:" +
|
||||
this._set.length +
|
||||
" " +
|
||||
util.inspect(context)
|
||||
)
|
||||
}
|
||||
|
||||
return promise
|
||||
.then(result => {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
debug2(
|
||||
"CONTEXT-runPromise AFTER then: (" +
|
||||
this.name +
|
||||
") currentUid:" +
|
||||
currentUid +
|
||||
" len:" +
|
||||
this._set.length +
|
||||
" " +
|
||||
util.inspect(context)
|
||||
)
|
||||
}
|
||||
this.exit(context)
|
||||
return result
|
||||
})
|
||||
.catch(err => {
|
||||
err[ERROR_SYMBOL] = context
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
debug2(
|
||||
"CONTEXT-runPromise AFTER catch: (" +
|
||||
this.name +
|
||||
") currentUid:" +
|
||||
currentUid +
|
||||
" len:" +
|
||||
this._set.length +
|
||||
" " +
|
||||
util.inspect(context)
|
||||
)
|
||||
}
|
||||
this.exit(context)
|
||||
throw err
|
||||
})
|
||||
}
|
||||
|
||||
Namespace.prototype.bind = function bindFactory(fn, context) {
|
||||
if (!context) {
|
||||
if (!this.active) {
|
||||
context = this.createContext()
|
||||
} else {
|
||||
context = this.active
|
||||
}
|
||||
}
|
||||
|
||||
let self = this
|
||||
return function clsBind() {
|
||||
self.enter(context)
|
||||
try {
|
||||
return fn.apply(this, arguments)
|
||||
} catch (exception) {
|
||||
if (exception) {
|
||||
exception[ERROR_SYMBOL] = context
|
||||
}
|
||||
throw exception
|
||||
} finally {
|
||||
self.exit(context)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Namespace.prototype.enter = function enter(context) {
|
||||
assert.ok(context, "context must be provided for entering")
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-ENTER: (${
|
||||
this.name
|
||||
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
|
||||
this._set.length
|
||||
} ${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
|
||||
this._set.push(this.active)
|
||||
this.active = context
|
||||
}
|
||||
|
||||
Namespace.prototype.exit = function exit(context) {
|
||||
assert.ok(context, "context must be provided for exiting")
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-EXIT: (${
|
||||
this.name
|
||||
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
|
||||
this._set.length
|
||||
} ${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
|
||||
// Fast path for most exits that are at the top of the stack
|
||||
if (this.active === context) {
|
||||
assert.ok(this._set.length, "can't remove top context")
|
||||
this.active = this._set.pop()
|
||||
return
|
||||
}
|
||||
|
||||
// Fast search in the stack using lastIndexOf
|
||||
let index = this._set.lastIndexOf(context)
|
||||
|
||||
if (index < 0) {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
debug2(
|
||||
"??ERROR?? context exiting but not entered - ignoring: " +
|
||||
util.inspect(context)
|
||||
)
|
||||
}
|
||||
assert.ok(
|
||||
index >= 0,
|
||||
"context not currently entered; can't exit. \n" +
|
||||
util.inspect(this) +
|
||||
"\n" +
|
||||
util.inspect(context)
|
||||
)
|
||||
} else {
|
||||
assert.ok(index, "can't remove top context")
|
||||
this._set.splice(index, 1)
|
||||
}
|
||||
}
|
||||
|
||||
Namespace.prototype.bindEmitter = function bindEmitter(emitter) {
|
||||
assert.ok(
|
||||
emitter.on && emitter.addListener && emitter.emit,
|
||||
"can only bind real EEs"
|
||||
)
|
||||
|
||||
let namespace = this
|
||||
let thisSymbol = "context@" + this.name
|
||||
|
||||
// Capture the context active at the time the emitter is bound.
|
||||
function attach(listener) {
|
||||
if (!listener) {
|
||||
return
|
||||
}
|
||||
if (!listener[CONTEXTS_SYMBOL]) {
|
||||
listener[CONTEXTS_SYMBOL] = Object.create(null)
|
||||
}
|
||||
|
||||
listener[CONTEXTS_SYMBOL][thisSymbol] = {
|
||||
namespace: namespace,
|
||||
context: namespace.active,
|
||||
}
|
||||
}
|
||||
|
||||
// At emit time, bind the listener within the correct context.
|
||||
function bind(unwrapped) {
|
||||
if (!(unwrapped && unwrapped[CONTEXTS_SYMBOL])) {
|
||||
return unwrapped
|
||||
}
|
||||
|
||||
let wrapped = unwrapped
|
||||
let unwrappedContexts = unwrapped[CONTEXTS_SYMBOL]
|
||||
Object.keys(unwrappedContexts).forEach(function (name) {
|
||||
let thunk = unwrappedContexts[name]
|
||||
wrapped = thunk.namespace.bind(wrapped, thunk.context)
|
||||
})
|
||||
return wrapped
|
||||
}
|
||||
|
||||
wrapEmitter(emitter, attach, bind)
|
||||
}
|
||||
|
||||
/**
|
||||
* If an error comes out of a namespace, it will have a context attached to it.
|
||||
* This function knows how to find it.
|
||||
*
|
||||
* @param {Error} exception Possibly annotated error.
|
||||
*/
|
||||
Namespace.prototype.fromException = function fromException(exception) {
|
||||
return exception[ERROR_SYMBOL]
|
||||
}
|
||||
|
||||
function getNamespace(name) {
|
||||
return process.namespaces[name]
|
||||
}
|
||||
|
||||
function createNamespace(name) {
|
||||
assert.ok(name, "namespace must be given a name.")
|
||||
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
debug2(`NS-CREATING NAMESPACE (${name})`)
|
||||
}
|
||||
let namespace = new Namespace(name)
|
||||
namespace.id = currentUid
|
||||
|
||||
const hook = async_hooks.createHook({
|
||||
init(asyncId, type, triggerId, resource) {
|
||||
currentUid = async_hooks.executionAsyncId()
|
||||
|
||||
//CHAIN Parent's Context onto child if none exists. This is needed to pass net-events.spec
|
||||
// let initContext = namespace.active;
|
||||
// if(!initContext && triggerId) {
|
||||
// let parentContext = namespace._contexts.get(triggerId);
|
||||
// if (parentContext) {
|
||||
// namespace.active = parentContext;
|
||||
// namespace._contexts.set(currentUid, parentContext);
|
||||
// if (DEBUG_CLS_HOOKED) {
|
||||
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
|
||||
// debug2(`${indentStr}INIT [${type}] (${name}) WITH PARENT CONTEXT asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
|
||||
// }
|
||||
// } else if (DEBUG_CLS_HOOKED) {
|
||||
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
|
||||
// debug2(`${indentStr}INIT [${type}] (${name}) MISSING CONTEXT asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
|
||||
// }
|
||||
// }else {
|
||||
// namespace._contexts.set(currentUid, namespace.active);
|
||||
// if (DEBUG_CLS_HOOKED) {
|
||||
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
|
||||
// debug2(`${indentStr}INIT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
|
||||
// }
|
||||
// }
|
||||
if (namespace.active) {
|
||||
namespace._contexts.set(asyncId, namespace.active)
|
||||
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}INIT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} resource:${resource}`
|
||||
)
|
||||
}
|
||||
} else if (currentUid === 0) {
|
||||
// CurrentId will be 0 when triggered from C++. Promise events
|
||||
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const triggerIdContext = namespace._contexts.get(triggerId)
|
||||
if (triggerIdContext) {
|
||||
namespace._contexts.set(asyncId, triggerIdContext)
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}INIT USING CONTEXT FROM TRIGGERID [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} resource:${resource}`
|
||||
)
|
||||
}
|
||||
} else if (DEBUG_CLS_HOOKED) {
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}INIT MISSING CONTEXT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} resource:${resource}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (DEBUG_CLS_HOOKED && type === "PROMISE") {
|
||||
debug2(util.inspect(resource, { showHidden: true }))
|
||||
const parentId = resource.parentId
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}INIT RESOURCE-PROMISE [${type}] (${name}) parentId:${parentId} asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} resource:${resource}`
|
||||
)
|
||||
}
|
||||
},
|
||||
before(asyncId) {
|
||||
currentUid = async_hooks.executionAsyncId()
|
||||
let context
|
||||
|
||||
/*
|
||||
if(currentUid === 0){
|
||||
// CurrentId will be 0 when triggered from C++. Promise events
|
||||
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
|
||||
//const triggerId = async_hooks.triggerAsyncId();
|
||||
context = namespace._contexts.get(asyncId); // || namespace._contexts.get(triggerId);
|
||||
}else{
|
||||
context = namespace._contexts.get(currentUid);
|
||||
}
|
||||
*/
|
||||
|
||||
//HACK to work with promises until they are fixed in node > 8.1.1
|
||||
context =
|
||||
namespace._contexts.get(asyncId) || namespace._contexts.get(currentUid)
|
||||
|
||||
if (context) {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}BEFORE (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} context:${util.inspect(context)}`
|
||||
)
|
||||
namespace._indent += 2
|
||||
}
|
||||
|
||||
namespace.enter(context)
|
||||
} else if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}BEFORE MISSING CONTEXT (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} namespace._contexts:${util.inspect(namespace._contexts, {
|
||||
showHidden: true,
|
||||
depth: 2,
|
||||
colors: true,
|
||||
})}`
|
||||
)
|
||||
namespace._indent += 2
|
||||
}
|
||||
},
|
||||
after(asyncId) {
|
||||
currentUid = async_hooks.executionAsyncId()
|
||||
let context // = namespace._contexts.get(currentUid);
|
||||
/*
|
||||
if(currentUid === 0){
|
||||
// CurrentId will be 0 when triggered from C++. Promise events
|
||||
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
|
||||
//const triggerId = async_hooks.triggerAsyncId();
|
||||
context = namespace._contexts.get(asyncId); // || namespace._contexts.get(triggerId);
|
||||
}else{
|
||||
context = namespace._contexts.get(currentUid);
|
||||
}
|
||||
*/
|
||||
//HACK to work with promises until they are fixed in node > 8.1.1
|
||||
context =
|
||||
namespace._contexts.get(asyncId) || namespace._contexts.get(currentUid)
|
||||
|
||||
if (context) {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
namespace._indent -= 2
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}AFTER (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} context:${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
|
||||
namespace.exit(context)
|
||||
} else if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
namespace._indent -= 2
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}AFTER MISSING CONTEXT (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} context:${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
},
|
||||
destroy(asyncId) {
|
||||
currentUid = async_hooks.executionAsyncId()
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}DESTROY (${name}) currentUid:${currentUid} asyncId:${asyncId} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} context:${util.inspect(namespace._contexts.get(currentUid))}`
|
||||
)
|
||||
}
|
||||
|
||||
namespace._contexts.delete(asyncId)
|
||||
},
|
||||
})
|
||||
|
||||
hook.enable()
|
||||
namespace._hook = hook
|
||||
|
||||
process.namespaces[name] = namespace
|
||||
return namespace
|
||||
}
|
||||
|
||||
function destroyNamespace(name) {
|
||||
let namespace = getNamespace(name)
|
||||
|
||||
assert.ok(namespace, "can't delete nonexistent namespace! \"" + name + '"')
|
||||
assert.ok(
|
||||
namespace.id,
|
||||
"don't assign to process.namespaces directly! " + util.inspect(namespace)
|
||||
)
|
||||
|
||||
namespace._hook.disable()
|
||||
namespace._contexts = null
|
||||
process.namespaces[name] = null
|
||||
}
|
||||
|
||||
function reset() {
|
||||
// must unregister async listeners
|
||||
if (process.namespaces) {
|
||||
Object.keys(process.namespaces).forEach(function (name) {
|
||||
destroyNamespace(name)
|
||||
})
|
||||
}
|
||||
process.namespaces = Object.create(null)
|
||||
}
|
||||
|
||||
process.namespaces = process.namespaces || {}
|
||||
|
||||
//const fs = require('fs');
|
||||
function debug2(...args) {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
//fs.writeSync(1, `${util.format(...args)}\n`);
|
||||
process._rawDebug(`${util.format(...args)}`)
|
||||
}
|
||||
}
|
||||
|
||||
/*function getFunctionName(fn) {
|
||||
if (!fn) {
|
||||
return fn;
|
||||
}
|
||||
if (typeof fn === 'function') {
|
||||
if (fn.name) {
|
||||
return fn.name;
|
||||
}
|
||||
return (fn.toString().trim().match(/^function\s*([^\s(]+)/) || [])[1];
|
||||
} else if (fn.constructor && fn.constructor.name) {
|
||||
return fn.constructor.name;
|
||||
}
|
||||
}*/
|
|
@ -1,84 +1,47 @@
|
|||
const cls = require("cls-hooked")
|
||||
const cls = require("../clshooked")
|
||||
const { newid } = require("../hashing")
|
||||
|
||||
const REQUEST_ID_KEY = "requestId"
|
||||
const MAIN_CTX = cls.createNamespace("main")
|
||||
|
||||
class FunctionContext {
|
||||
static getMiddleware(
|
||||
updateCtxFn = null,
|
||||
destroyFn = null,
|
||||
contextName = "session"
|
||||
) {
|
||||
const namespace = this.createNamespace(contextName)
|
||||
|
||||
return async function (ctx, next) {
|
||||
await new Promise(
|
||||
namespace.bind(function (resolve, reject) {
|
||||
// store a contextual request ID that can be used anywhere (audit logs)
|
||||
namespace.set(REQUEST_ID_KEY, newid())
|
||||
namespace.bindEmitter(ctx.req)
|
||||
namespace.bindEmitter(ctx.res)
|
||||
|
||||
if (updateCtxFn) {
|
||||
updateCtxFn(ctx)
|
||||
}
|
||||
next()
|
||||
.then(resolve)
|
||||
.catch(reject)
|
||||
.finally(() => {
|
||||
if (destroyFn) {
|
||||
return destroyFn(ctx)
|
||||
}
|
||||
})
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
static run(callback, contextName = "session") {
|
||||
const namespace = this.createNamespace(contextName)
|
||||
|
||||
return namespace.runAndReturn(callback)
|
||||
}
|
||||
|
||||
static setOnContext(key, value, contextName = "session") {
|
||||
const namespace = this.createNamespace(contextName)
|
||||
namespace.set(key, value)
|
||||
}
|
||||
|
||||
static getContextStorage() {
|
||||
if (this._namespace && this._namespace.active) {
|
||||
let contextData = this._namespace.active
|
||||
function getContextStorage(namespace) {
|
||||
if (namespace && namespace.active) {
|
||||
let contextData = namespace.active
|
||||
delete contextData.id
|
||||
delete contextData._ns_name
|
||||
return contextData
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
|
||||
class FunctionContext {
|
||||
static run(callback) {
|
||||
return MAIN_CTX.runAndReturn(async () => {
|
||||
const namespaceId = newid()
|
||||
MAIN_CTX.set(REQUEST_ID_KEY, namespaceId)
|
||||
const namespace = cls.createNamespace(namespaceId)
|
||||
let response = await namespace.runAndReturn(callback)
|
||||
cls.destroyNamespace(namespaceId)
|
||||
return response
|
||||
})
|
||||
}
|
||||
|
||||
static setOnContext(key, value) {
|
||||
const namespaceId = MAIN_CTX.get(REQUEST_ID_KEY)
|
||||
const namespace = cls.getNamespace(namespaceId)
|
||||
namespace.set(key, value)
|
||||
}
|
||||
|
||||
static getFromContext(key) {
|
||||
const context = this.getContextStorage()
|
||||
const namespaceId = MAIN_CTX.get(REQUEST_ID_KEY)
|
||||
const namespace = cls.getNamespace(namespaceId)
|
||||
const context = getContextStorage(namespace)
|
||||
if (context) {
|
||||
return context[key]
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
static destroyNamespace(name = "session") {
|
||||
if (this._namespace) {
|
||||
cls.destroyNamespace(name)
|
||||
this._namespace = null
|
||||
}
|
||||
}
|
||||
|
||||
static createNamespace(name = "session") {
|
||||
if (!this._namespace) {
|
||||
this._namespace = cls.createNamespace(name)
|
||||
}
|
||||
return this._namespace
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FunctionContext
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
import {
|
||||
IdentityContext,
|
||||
IdentityType,
|
||||
User,
|
||||
UserContext,
|
||||
isCloudAccount,
|
||||
Account,
|
||||
AccountUserContext,
|
||||
} from "@budibase/types"
|
||||
import * as context from "."
|
||||
|
||||
export const getIdentity = (): IdentityContext | undefined => {
|
||||
return context.getIdentity()
|
||||
}
|
||||
|
||||
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
|
||||
return context.doInIdentityContext(identity, task)
|
||||
}
|
||||
|
||||
export const doInUserContext = (user: User, task: any) => {
|
||||
const userContext: UserContext = {
|
||||
...user,
|
||||
_id: user._id as string,
|
||||
type: IdentityType.USER,
|
||||
}
|
||||
return doInIdentityContext(userContext, task)
|
||||
}
|
||||
|
||||
export const doInAccountContext = (account: Account, task: any) => {
|
||||
const _id = getAccountUserId(account)
|
||||
const tenantId = account.tenantId
|
||||
const accountContext: AccountUserContext = {
|
||||
_id,
|
||||
type: IdentityType.USER,
|
||||
tenantId,
|
||||
account,
|
||||
}
|
||||
return doInIdentityContext(accountContext, task)
|
||||
}
|
||||
|
||||
export const getAccountUserId = (account: Account) => {
|
||||
let userId: string
|
||||
if (isCloudAccount(account)) {
|
||||
userId = account.budibaseUserId
|
||||
} else {
|
||||
// use account id as user id for self hosting
|
||||
userId = account.accountId
|
||||
}
|
||||
return userId
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
const env = require("../environment")
|
||||
const { Headers } = require("../../constants")
|
||||
const { SEPARATOR, DocumentTypes } = require("../db/constants")
|
||||
const { DEFAULT_TENANT_ID } = require("../constants")
|
||||
const cls = require("./FunctionContext")
|
||||
|
@ -16,6 +15,7 @@ const ContextKeys = {
|
|||
TENANT_ID: "tenantId",
|
||||
GLOBAL_DB: "globalDb",
|
||||
APP_ID: "appId",
|
||||
IDENTITY: "identity",
|
||||
// whatever the request app DB was
|
||||
CURRENT_DB: "currentDb",
|
||||
// get the prod app DB from the request
|
||||
|
@ -55,6 +55,15 @@ async function closeAppDBs() {
|
|||
}
|
||||
}
|
||||
|
||||
exports.closeTenancy = async () => {
|
||||
if (env.USE_COUCH) {
|
||||
await closeDB(exports.getGlobalDB())
|
||||
}
|
||||
// clear from context now that database is closed/task is finished
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, null)
|
||||
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
|
||||
}
|
||||
|
||||
exports.isDefaultTenant = () => {
|
||||
return exports.getTenantId() === exports.DEFAULT_TENANT_ID
|
||||
}
|
||||
|
@ -64,16 +73,13 @@ exports.isMultiTenant = () => {
|
|||
}
|
||||
|
||||
// used for automations, API endpoints should always be in context already
|
||||
exports.doInTenant = (tenantId, task) => {
|
||||
exports.doInTenant = (tenantId, task, { forceNew } = {}) => {
|
||||
// the internal function is so that we can re-use an existing
|
||||
// context - don't want to close DB on a parent context
|
||||
async function internal(opts = { existing: false }) {
|
||||
// set the tenant id
|
||||
if (!opts.existing) {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
|
||||
if (env.USE_COUCH) {
|
||||
exports.setGlobalDB(tenantId)
|
||||
}
|
||||
exports.updateTenantId(tenantId)
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -82,19 +88,19 @@ exports.doInTenant = (tenantId, task) => {
|
|||
} finally {
|
||||
const using = cls.getFromContext(ContextKeys.IN_USE)
|
||||
if (!using || using <= 1) {
|
||||
if (env.USE_COUCH) {
|
||||
await closeDB(exports.getGlobalDB())
|
||||
}
|
||||
// clear from context now that database is closed/task is finished
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, null)
|
||||
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
|
||||
await exports.closeTenancy()
|
||||
} else {
|
||||
cls.setOnContext(using - 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const using = cls.getFromContext(ContextKeys.IN_USE)
|
||||
if (using && cls.getFromContext(ContextKeys.TENANT_ID) === tenantId) {
|
||||
if (
|
||||
!forceNew &&
|
||||
using &&
|
||||
cls.getFromContext(ContextKeys.TENANT_ID) === tenantId
|
||||
) {
|
||||
cls.setOnContext(ContextKeys.IN_USE, using + 1)
|
||||
return internal({ existing: true })
|
||||
} else {
|
||||
|
@ -131,11 +137,13 @@ const setAppTenantId = appId => {
|
|||
exports.updateTenantId(appTenantId)
|
||||
}
|
||||
|
||||
exports.doInAppContext = (appId, task) => {
|
||||
exports.doInAppContext = (appId, task, { forceNew } = {}) => {
|
||||
if (!appId) {
|
||||
throw new Error("appId is required")
|
||||
}
|
||||
|
||||
const identity = exports.getIdentity()
|
||||
|
||||
// the internal function is so that we can re-use an existing
|
||||
// context - don't want to close DB on a parent context
|
||||
async function internal(opts = { existing: false }) {
|
||||
|
@ -145,6 +153,8 @@ exports.doInAppContext = (appId, task) => {
|
|||
}
|
||||
// set the app ID
|
||||
cls.setOnContext(ContextKeys.APP_ID, appId)
|
||||
// preserve the identity
|
||||
exports.setIdentity(identity)
|
||||
try {
|
||||
// invoke the task
|
||||
return await task()
|
||||
|
@ -158,7 +168,7 @@ exports.doInAppContext = (appId, task) => {
|
|||
}
|
||||
}
|
||||
const using = cls.getFromContext(ContextKeys.IN_USE)
|
||||
if (using && cls.getFromContext(ContextKeys.APP_ID) === appId) {
|
||||
if (!forceNew && using && cls.getFromContext(ContextKeys.APP_ID) === appId) {
|
||||
cls.setOnContext(ContextKeys.IN_USE, using + 1)
|
||||
return internal({ existing: true })
|
||||
} else {
|
||||
|
@ -169,9 +179,63 @@ exports.doInAppContext = (appId, task) => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.doInIdentityContext = (identity, task) => {
|
||||
if (!identity) {
|
||||
throw new Error("identity is required")
|
||||
}
|
||||
|
||||
async function internal(opts = { existing: false }) {
|
||||
if (!opts.existing) {
|
||||
cls.setOnContext(ContextKeys.IDENTITY, identity)
|
||||
// set the tenant so that doInTenant will preserve identity
|
||||
if (identity.tenantId) {
|
||||
exports.updateTenantId(identity.tenantId)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// invoke the task
|
||||
return await task()
|
||||
} finally {
|
||||
const using = cls.getFromContext(ContextKeys.IN_USE)
|
||||
if (!using || using <= 1) {
|
||||
exports.setIdentity(null)
|
||||
} else {
|
||||
cls.setOnContext(using - 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const existing = cls.getFromContext(ContextKeys.IDENTITY)
|
||||
const using = cls.getFromContext(ContextKeys.IN_USE)
|
||||
if (using && existing && existing._id === identity._id) {
|
||||
cls.setOnContext(ContextKeys.IN_USE, using + 1)
|
||||
return internal({ existing: true })
|
||||
} else {
|
||||
return cls.run(async () => {
|
||||
cls.setOnContext(ContextKeys.IN_USE, 1)
|
||||
return internal({ existing: false })
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
exports.setIdentity = identity => {
|
||||
cls.setOnContext(ContextKeys.IDENTITY, identity)
|
||||
}
|
||||
|
||||
exports.getIdentity = () => {
|
||||
try {
|
||||
return cls.getFromContext(ContextKeys.IDENTITY)
|
||||
} catch (e) {
|
||||
// do nothing - identity is not in context
|
||||
}
|
||||
}
|
||||
|
||||
exports.updateTenantId = tenantId => {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
|
||||
if (env.USE_COUCH) {
|
||||
exports.setGlobalDB(tenantId)
|
||||
}
|
||||
}
|
||||
|
||||
exports.updateAppId = async appId => {
|
||||
|
@ -188,45 +252,6 @@ exports.updateAppId = async appId => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.setTenantId = (
|
||||
ctx,
|
||||
opts = { allowQs: false, allowNoTenant: false }
|
||||
) => {
|
||||
let tenantId
|
||||
// exit early if not multi-tenant
|
||||
if (!exports.isMultiTenant()) {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, exports.DEFAULT_TENANT_ID)
|
||||
return exports.DEFAULT_TENANT_ID
|
||||
}
|
||||
|
||||
const allowQs = opts && opts.allowQs
|
||||
const allowNoTenant = opts && opts.allowNoTenant
|
||||
const header = ctx.request.headers[Headers.TENANT_ID]
|
||||
const user = ctx.user || {}
|
||||
if (allowQs) {
|
||||
const query = ctx.request.query || {}
|
||||
tenantId = query.tenantId
|
||||
}
|
||||
// override query string (if allowed) by user, or header
|
||||
// URL params cannot be used in a middleware, as they are
|
||||
// processed later in the chain
|
||||
tenantId = user.tenantId || header || tenantId
|
||||
|
||||
// Set the tenantId from the subdomain
|
||||
if (!tenantId) {
|
||||
tenantId = ctx.subdomains && ctx.subdomains[0]
|
||||
}
|
||||
|
||||
if (!tenantId && !allowNoTenant) {
|
||||
ctx.throw(403, "Tenant id not set")
|
||||
}
|
||||
// check tenant ID just incase no tenant was allowed
|
||||
if (tenantId) {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
|
||||
}
|
||||
return tenantId
|
||||
}
|
||||
|
||||
exports.setGlobalDB = tenantId => {
|
||||
const dbName = baseGlobalDBName(tenantId)
|
||||
const db = dangerousGetDB(dbName)
|
||||
|
@ -307,7 +332,7 @@ function getContextDB(key, opts) {
|
|||
* Opens the app database based on whatever the request
|
||||
* contained, dev or prod.
|
||||
*/
|
||||
exports.getAppDB = opts => {
|
||||
exports.getAppDB = (opts = null) => {
|
||||
return getContextDB(ContextKeys.CURRENT_DB, opts)
|
||||
}
|
||||
|
||||
|
@ -315,7 +340,7 @@ exports.getAppDB = opts => {
|
|||
* This specifically gets the prod app ID, if the request
|
||||
* contained a development app ID, this will open the prod one.
|
||||
*/
|
||||
exports.getProdAppDB = opts => {
|
||||
exports.getProdAppDB = (opts = null) => {
|
||||
return getContextDB(ContextKeys.PROD_DB, opts)
|
||||
}
|
||||
|
||||
|
@ -323,6 +348,6 @@ exports.getProdAppDB = opts => {
|
|||
* This specifically gets the dev app ID, if the request
|
||||
* contained a prod app ID, this will open the dev one.
|
||||
*/
|
||||
exports.getDevAppDB = opts => {
|
||||
exports.getDevAppDB = (opts = null) => {
|
||||
return getContextDB(ContextKeys.DEV_DB, opts)
|
||||
}
|
||||
|
|
|
@ -1,12 +1,16 @@
|
|||
const { dangerousGetDB, closeDB } = require(".")
|
||||
import { dangerousGetDB, closeDB } from "."
|
||||
|
||||
class Replication {
|
||||
source: any
|
||||
target: any
|
||||
replication: any
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {String} source - the DB you want to replicate or rollback to
|
||||
* @param {String} target - the DB you want to replicate to, or rollback from
|
||||
*/
|
||||
constructor({ source, target }) {
|
||||
constructor({ source, target }: any) {
|
||||
this.source = dangerousGetDB(source)
|
||||
this.target = dangerousGetDB(target)
|
||||
}
|
||||
|
@ -15,17 +19,17 @@ class Replication {
|
|||
return Promise.all([closeDB(this.source), closeDB(this.target)])
|
||||
}
|
||||
|
||||
promisify(operation, opts = {}) {
|
||||
promisify(operation: any, opts = {}) {
|
||||
return new Promise(resolve => {
|
||||
operation(this.target, opts)
|
||||
.on("denied", function (err) {
|
||||
.on("denied", function (err: any) {
|
||||
// a document failed to replicate (e.g. due to permissions)
|
||||
throw new Error(`Denied: Document failed to replicate ${err}`)
|
||||
})
|
||||
.on("complete", function (info) {
|
||||
.on("complete", function (info: any) {
|
||||
return resolve(info)
|
||||
})
|
||||
.on("error", function (err) {
|
||||
.on("error", function (err: any) {
|
||||
throw new Error(`Replication Error: ${err}`)
|
||||
})
|
||||
})
|
||||
|
@ -64,4 +68,4 @@ class Replication {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports = Replication
|
||||
export default Replication
|
|
@ -31,6 +31,7 @@ exports.StaticDatabases = {
|
|||
name: "global-info",
|
||||
docs: {
|
||||
tenants: "tenants",
|
||||
install: "install",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -3,13 +3,16 @@ const env = require("../environment")
|
|||
|
||||
let PouchDB
|
||||
let initialised = false
|
||||
const dbList = new Set()
|
||||
|
||||
const put =
|
||||
dbPut =>
|
||||
async (doc, options = {}) => {
|
||||
const response = await dbPut(doc, options)
|
||||
// TODO: add created / updated
|
||||
return response
|
||||
if (!doc.createdAt) {
|
||||
doc.createdAt = new Date().toISOString()
|
||||
}
|
||||
doc.updatedAt = new Date().toISOString()
|
||||
return dbPut(doc, options)
|
||||
}
|
||||
|
||||
const checkInitialised = () => {
|
||||
|
@ -28,6 +31,9 @@ exports.init = opts => {
|
|||
// in situations that using the function doWithDB does not work
|
||||
exports.dangerousGetDB = (dbName, opts) => {
|
||||
checkInitialised()
|
||||
if (env.isTest()) {
|
||||
dbList.add(dbName)
|
||||
}
|
||||
const db = new PouchDB(dbName, opts)
|
||||
const dbPut = db.put
|
||||
db.put = put(dbPut)
|
||||
|
@ -51,7 +57,7 @@ exports.closeDB = async db => {
|
|||
// we have to use a callback for this so that we can close
|
||||
// the DB when we're done, without this manual requests would
|
||||
// need to close the database when done with it to avoid memory leaks
|
||||
exports.doWithDB = async (dbName, cb, opts) => {
|
||||
exports.doWithDB = async (dbName, cb, opts = {}) => {
|
||||
const db = exports.dangerousGetDB(dbName, opts)
|
||||
// need this to be async so that we can correctly close DB after all
|
||||
// async operations have been completed
|
||||
|
@ -63,6 +69,9 @@ exports.doWithDB = async (dbName, cb, opts) => {
|
|||
}
|
||||
|
||||
exports.allDbs = () => {
|
||||
if (!env.isTest()) {
|
||||
throw new Error("Cannot be used outside test environment.")
|
||||
}
|
||||
checkInitialised()
|
||||
return PouchDB.allDbs()
|
||||
return [...dbList]
|
||||
}
|
||||
|
|
|
@ -1,21 +1,42 @@
|
|||
const PouchDB = require("pouchdb")
|
||||
const env = require("../environment")
|
||||
|
||||
function getUrlInfo() {
|
||||
let url = env.COUCH_DB_URL
|
||||
let username, password, host
|
||||
const [protocol, rest] = url.split("://")
|
||||
exports.getUrlInfo = (url = env.COUCH_DB_URL) => {
|
||||
let cleanUrl, username, password, host
|
||||
if (url) {
|
||||
// Ensure the URL starts with a protocol
|
||||
const protoRegex = /^https?:\/\//i
|
||||
if (!protoRegex.test(url)) {
|
||||
url = `http://${url}`
|
||||
}
|
||||
|
||||
// Split into protocol and remainder
|
||||
const split = url.split("://")
|
||||
const protocol = split[0]
|
||||
const rest = split.slice(1).join("://")
|
||||
|
||||
// Extract auth if specified
|
||||
if (url.includes("@")) {
|
||||
const hostParts = rest.split("@")
|
||||
host = hostParts[1]
|
||||
const authParts = hostParts[0].split(":")
|
||||
// Split into host and remainder
|
||||
let parts = rest.split("@")
|
||||
host = parts[parts.length - 1]
|
||||
let auth = parts.slice(0, -1).join("@")
|
||||
|
||||
// Split auth into username and password
|
||||
if (auth.includes(":")) {
|
||||
const authParts = auth.split(":")
|
||||
username = authParts[0]
|
||||
password = authParts[1]
|
||||
password = authParts.slice(1).join(":")
|
||||
} else {
|
||||
username = auth
|
||||
}
|
||||
} else {
|
||||
host = rest
|
||||
}
|
||||
cleanUrl = `${protocol}://${host}`
|
||||
}
|
||||
return {
|
||||
url: `${protocol}://${host}`,
|
||||
url: cleanUrl,
|
||||
auth: {
|
||||
username,
|
||||
password,
|
||||
|
@ -24,7 +45,7 @@ function getUrlInfo() {
|
|||
}
|
||||
|
||||
exports.getCouchInfo = () => {
|
||||
const urlInfo = getUrlInfo()
|
||||
const urlInfo = exports.getUrlInfo()
|
||||
let username
|
||||
let password
|
||||
if (env.COUCH_DB_USERNAME) {
|
||||
|
@ -92,11 +113,5 @@ exports.getPouch = (opts = {}) => {
|
|||
PouchDB.plugin(find)
|
||||
}
|
||||
|
||||
const Pouch = PouchDB.defaults(POUCH_DB_DEFAULTS)
|
||||
if (opts.allDbs) {
|
||||
const allDbs = require("pouchdb-all-dbs")
|
||||
allDbs(Pouch)
|
||||
}
|
||||
|
||||
return Pouch
|
||||
return PouchDB.defaults(POUCH_DB_DEFAULTS)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
require("../../../tests/utilities/TestConfiguration")
|
||||
const { dangerousGetDB } = require("../")
|
||||
|
||||
describe("db", () => {
|
||||
|
||||
describe("getDB", () => {
|
||||
it("returns a db", async () => {
|
||||
const db = dangerousGetDB("test")
|
||||
expect(db).toBeDefined()
|
||||
expect(db._adapter).toBe("memory")
|
||||
expect(db.prefix).toBe("_pouch_")
|
||||
expect(db.name).toBe("test")
|
||||
})
|
||||
|
||||
it("uses the custom put function", async () => {
|
||||
const db = dangerousGetDB("test")
|
||||
let doc = { _id: "test" }
|
||||
await db.put(doc)
|
||||
doc = await db.get(doc._id)
|
||||
expect(doc.createdAt).toBe(new Date().toISOString())
|
||||
expect(doc.updatedAt).toBe(new Date().toISOString())
|
||||
await db.destroy()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
require("../../../tests/utilities/TestConfiguration")
|
||||
const getUrlInfo = require("../pouch").getUrlInfo
|
||||
|
||||
describe("pouch", () => {
|
||||
describe("Couch DB URL parsing", () => {
|
||||
it("should handle a null Couch DB URL", () => {
|
||||
const info = getUrlInfo(null)
|
||||
expect(info.url).toBeUndefined()
|
||||
expect(info.auth.username).toBeUndefined()
|
||||
})
|
||||
it("should be able to parse a basic Couch DB URL", () => {
|
||||
const info = getUrlInfo("http://host.com")
|
||||
expect(info.url).toBe("http://host.com")
|
||||
expect(info.auth.username).toBeUndefined()
|
||||
})
|
||||
it("should be able to parse a Couch DB basic URL with HTTPS", () => {
|
||||
const info = getUrlInfo("https://host.com")
|
||||
expect(info.url).toBe("https://host.com")
|
||||
expect(info.auth.username).toBeUndefined()
|
||||
})
|
||||
it("should be able to parse a basic Couch DB URL with a custom port", () => {
|
||||
const info = getUrlInfo("https://host.com:1234")
|
||||
expect(info.url).toBe("https://host.com:1234")
|
||||
expect(info.auth.username).toBeUndefined()
|
||||
})
|
||||
it("should be able to parse a Couch DB URL with auth", () => {
|
||||
const info = getUrlInfo("https://user:pass@host.com:1234")
|
||||
expect(info.url).toBe("https://host.com:1234")
|
||||
expect(info.auth.username).toBe("user")
|
||||
expect(info.auth.password).toBe("pass")
|
||||
})
|
||||
it("should be able to parse a Couch DB URL with auth and special chars", () => {
|
||||
const info = getUrlInfo("https://user:s:p@s://@://:d@;][~s@host.com:1234")
|
||||
expect(info.url).toBe("https://host.com:1234")
|
||||
expect(info.auth.username).toBe("user")
|
||||
expect(info.auth.password).toBe("s:p@s://@://:d@;][~s")
|
||||
})
|
||||
it("should be able to parse a Couch DB URL without a protocol", () => {
|
||||
const info = getUrlInfo("host.com:1234")
|
||||
expect(info.url).toBe("http://host.com:1234")
|
||||
expect(info.auth.username).toBeUndefined()
|
||||
})
|
||||
it("should be able to parse a Couch DB URL with auth and without a protocol", () => {
|
||||
const info = getUrlInfo("user:s:p@s://@://:d@;][~s@host.com:1234")
|
||||
expect(info.url).toBe("http://host.com:1234")
|
||||
expect(info.auth.username).toBe("user")
|
||||
expect(info.auth.password).toBe("s:p@s://@://:d@;][~s")
|
||||
})
|
||||
it("should be able to parse a Couch DB URL with only username auth", () => {
|
||||
const info = getUrlInfo("https://user@host.com:1234")
|
||||
expect(info.url).toBe("https://host.com:1234")
|
||||
expect(info.auth.username).toBe("user")
|
||||
expect(info.auth.password).toBeUndefined()
|
||||
})
|
||||
it("should be able to parse a Couch DB URL with only username auth and without a protocol", () => {
|
||||
const info = getUrlInfo("user@host.com:1234")
|
||||
expect(info.url).toBe("http://host.com:1234")
|
||||
expect(info.auth.username).toBe("user")
|
||||
expect(info.auth.password).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,20 +1,28 @@
|
|||
require("../../../tests/utilities/TestConfiguration");
|
||||
const {
|
||||
generateAppID,
|
||||
getDevelopmentAppID,
|
||||
getProdAppID,
|
||||
isDevAppID,
|
||||
isProdAppID,
|
||||
getPlatformUrl,
|
||||
getScopedConfig
|
||||
} = require("../utils")
|
||||
const tenancy = require("../../tenancy");
|
||||
const { Configs, DEFAULT_TENANT_ID } = require("../../constants");
|
||||
const env = require("../../environment")
|
||||
|
||||
function getID() {
|
||||
describe("utils", () => {
|
||||
describe("app ID manipulation", () => {
|
||||
|
||||
function getID() {
|
||||
const appId = generateAppID()
|
||||
const split = appId.split("_")
|
||||
const uuid = split[split.length - 1]
|
||||
const devAppId = `app_dev_${uuid}`
|
||||
return { appId, devAppId, split, uuid }
|
||||
}
|
||||
}
|
||||
|
||||
describe("app ID manipulation", () => {
|
||||
it("should be able to generate a new app ID", () => {
|
||||
expect(generateAppID().startsWith("app_")).toEqual(true)
|
||||
})
|
||||
|
@ -58,4 +66,129 @@ describe("app ID manipulation", () => {
|
|||
const { devAppId } = getID()
|
||||
expect(isProdAppID(devAppId)).toEqual(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const DB_URL = "http://dburl.com"
|
||||
const DEFAULT_URL = "http://localhost:10000"
|
||||
const ENV_URL = "http://env.com"
|
||||
|
||||
const setDbPlatformUrl = async () => {
|
||||
const db = tenancy.getGlobalDB()
|
||||
db.put({
|
||||
_id: "config_settings",
|
||||
type: Configs.SETTINGS,
|
||||
config: {
|
||||
platformUrl: DB_URL
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const clearSettingsConfig = async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
const db = tenancy.getGlobalDB()
|
||||
try {
|
||||
const config = await db.get("config_settings")
|
||||
await db.remove("config_settings", config._rev)
|
||||
} catch (e) {
|
||||
if (e.status !== 404) {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
describe("getPlatformUrl", () => {
|
||||
describe("self host", () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
env._set("SELF_HOST", 1)
|
||||
await clearSettingsConfig()
|
||||
})
|
||||
|
||||
it("gets the default url", async () => {
|
||||
await tenancy.doInTenant(null, async () => {
|
||||
const url = await getPlatformUrl()
|
||||
expect(url).toBe(DEFAULT_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets the platform url from the environment", async () => {
|
||||
await tenancy.doInTenant(null, async () => {
|
||||
env._set("PLATFORM_URL", ENV_URL)
|
||||
const url = await getPlatformUrl()
|
||||
expect(url).toBe(ENV_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets the platform url from the database", async () => {
|
||||
await tenancy.doInTenant(null, async () => {
|
||||
await setDbPlatformUrl()
|
||||
const url = await getPlatformUrl()
|
||||
expect(url).toBe(DB_URL)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
describe("cloud", () => {
|
||||
const TENANT_AWARE_URL = "http://default.env.com"
|
||||
|
||||
beforeEach(async () => {
|
||||
env._set("SELF_HOSTED", 0)
|
||||
env._set("MULTI_TENANCY", 1)
|
||||
env._set("PLATFORM_URL", ENV_URL)
|
||||
await clearSettingsConfig()
|
||||
})
|
||||
|
||||
it("gets the platform url from the environment without tenancy", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
const url = await getPlatformUrl({ tenantAware: false })
|
||||
expect(url).toBe(ENV_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets the platform url from the environment with tenancy", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
const url = await getPlatformUrl()
|
||||
expect(url).toBe(TENANT_AWARE_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("never gets the platform url from the database", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
await setDbPlatformUrl()
|
||||
const url = await getPlatformUrl()
|
||||
expect(url).toBe(TENANT_AWARE_URL)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("getScopedConfig", () => {
|
||||
describe("settings config", () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
env._set("SELF_HOSTED", 1)
|
||||
env._set("PLATFORM_URL", "")
|
||||
await clearSettingsConfig()
|
||||
})
|
||||
|
||||
it("returns the platform url with an existing config", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
await setDbPlatformUrl()
|
||||
const db = tenancy.getGlobalDB()
|
||||
const config = await getScopedConfig(db, { type: Configs.SETTINGS })
|
||||
expect(config.platformUrl).toBe(DB_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("returns the platform url without an existing config", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
const db = tenancy.getGlobalDB()
|
||||
const config = await getScopedConfig(db, { type: Configs.SETTINGS })
|
||||
expect(config.platformUrl).toBe(DEFAULT_URL)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,53 +1,34 @@
|
|||
const { newid } = require("../hashing")
|
||||
const Replication = require("./Replication")
|
||||
const { DEFAULT_TENANT_ID, Configs } = require("../constants")
|
||||
const env = require("../environment")
|
||||
const {
|
||||
StaticDatabases,
|
||||
SEPARATOR,
|
||||
DocumentTypes,
|
||||
APP_PREFIX,
|
||||
APP_DEV,
|
||||
} = require("./constants")
|
||||
const { getTenantId, getGlobalDBName } = require("../tenancy")
|
||||
const fetch = require("node-fetch")
|
||||
const { doWithDB, allDbs } = require("./index")
|
||||
const { getCouchInfo } = require("./pouch")
|
||||
const { getAppMetadata } = require("../cache/appMetadata")
|
||||
const { checkSlashesInUrl } = require("../helpers")
|
||||
const {
|
||||
isDevApp,
|
||||
isProdAppID,
|
||||
isDevAppID,
|
||||
getDevelopmentAppID,
|
||||
getProdAppID,
|
||||
} = require("./conversions")
|
||||
import { newid } from "../hashing"
|
||||
import { DEFAULT_TENANT_ID, Configs } from "../constants"
|
||||
import env from "../environment"
|
||||
import { SEPARATOR, DocumentTypes } from "./constants"
|
||||
import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy"
|
||||
import fetch from "node-fetch"
|
||||
import { doWithDB, allDbs } from "./index"
|
||||
import { getCouchInfo } from "./pouch"
|
||||
import { getAppMetadata } from "../cache/appMetadata"
|
||||
import { checkSlashesInUrl } from "../helpers"
|
||||
import { isDevApp, isDevAppID } from "./conversions"
|
||||
import { APP_PREFIX } from "./constants"
|
||||
import * as events from "../events"
|
||||
|
||||
const UNICODE_MAX = "\ufff0"
|
||||
|
||||
exports.ViewNames = {
|
||||
export const ViewNames = {
|
||||
USER_BY_EMAIL: "by_email",
|
||||
BY_API_KEY: "by_api_key",
|
||||
USER_BY_BUILDERS: "by_builders",
|
||||
}
|
||||
|
||||
exports.StaticDatabases = StaticDatabases
|
||||
|
||||
exports.DocumentTypes = DocumentTypes
|
||||
exports.APP_PREFIX = APP_PREFIX
|
||||
exports.APP_DEV = exports.APP_DEV_PREFIX = APP_DEV
|
||||
exports.SEPARATOR = SEPARATOR
|
||||
exports.isDevApp = isDevApp
|
||||
exports.isProdAppID = isProdAppID
|
||||
exports.isDevAppID = isDevAppID
|
||||
exports.getDevelopmentAppID = getDevelopmentAppID
|
||||
exports.getProdAppID = getProdAppID
|
||||
export * from "./constants"
|
||||
export * from "./conversions"
|
||||
export { default as Replication } from "./Replication"
|
||||
|
||||
/**
|
||||
* Generates a new app ID.
|
||||
* @returns {string} The new app ID which the app doc can be stored under.
|
||||
*/
|
||||
exports.generateAppID = (tenantId = null) => {
|
||||
export const generateAppID = (tenantId = null) => {
|
||||
let id = APP_PREFIX
|
||||
if (tenantId) {
|
||||
id += `${tenantId}${SEPARATOR}`
|
||||
|
@ -67,7 +48,11 @@ exports.generateAppID = (tenantId = null) => {
|
|||
* @param {object} otherProps Add any other properties onto the request, e.g. include_docs.
|
||||
* @returns {object} Parameters which can then be used with an allDocs request.
|
||||
*/
|
||||
function getDocParams(docType, docId = null, otherProps = {}) {
|
||||
export function getDocParams(
|
||||
docType: any,
|
||||
docId: any = null,
|
||||
otherProps: any = {}
|
||||
) {
|
||||
if (docId == null) {
|
||||
docId = ""
|
||||
}
|
||||
|
@ -77,20 +62,19 @@ function getDocParams(docType, docId = null, otherProps = {}) {
|
|||
endkey: `${docType}${SEPARATOR}${docId}${UNICODE_MAX}`,
|
||||
}
|
||||
}
|
||||
exports.getDocParams = getDocParams
|
||||
|
||||
/**
|
||||
* Generates a new workspace ID.
|
||||
* @returns {string} The new workspace ID which the workspace doc can be stored under.
|
||||
*/
|
||||
exports.generateWorkspaceID = () => {
|
||||
export function generateWorkspaceID() {
|
||||
return `${DocumentTypes.WORKSPACE}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving workspaces.
|
||||
*/
|
||||
exports.getWorkspaceParams = (id = "", otherProps = {}) => {
|
||||
export function getWorkspaceParams(id = "", otherProps = {}) {
|
||||
return {
|
||||
...otherProps,
|
||||
startkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}`,
|
||||
|
@ -102,14 +86,14 @@ exports.getWorkspaceParams = (id = "", otherProps = {}) => {
|
|||
* Generates a new global user ID.
|
||||
* @returns {string} The new user ID which the user doc can be stored under.
|
||||
*/
|
||||
exports.generateGlobalUserID = id => {
|
||||
export function generateGlobalUserID(id?: any) {
|
||||
return `${DocumentTypes.USER}${SEPARATOR}${id || newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving users.
|
||||
*/
|
||||
exports.getGlobalUserParams = (globalId, otherProps = {}) => {
|
||||
export function getGlobalUserParams(globalId: any, otherProps = {}) {
|
||||
if (!globalId) {
|
||||
globalId = ""
|
||||
}
|
||||
|
@ -124,14 +108,18 @@ exports.getGlobalUserParams = (globalId, otherProps = {}) => {
|
|||
* Generates a template ID.
|
||||
* @param ownerId The owner/user of the template, this could be global or a workspace level.
|
||||
*/
|
||||
exports.generateTemplateID = ownerId => {
|
||||
export function generateTemplateID(ownerId: any) {
|
||||
return `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving templates. Owner ID must be specified, either global or a workspace level.
|
||||
*/
|
||||
exports.getTemplateParams = (ownerId, templateId, otherProps = {}) => {
|
||||
export function getTemplateParams(
|
||||
ownerId: any,
|
||||
templateId: any,
|
||||
otherProps = {}
|
||||
) {
|
||||
if (!templateId) {
|
||||
templateId = ""
|
||||
}
|
||||
|
@ -152,18 +140,18 @@ exports.getTemplateParams = (ownerId, templateId, otherProps = {}) => {
|
|||
* Generates a new role ID.
|
||||
* @returns {string} The new role ID which the role doc can be stored under.
|
||||
*/
|
||||
exports.generateRoleID = id => {
|
||||
export function generateRoleID(id: any) {
|
||||
return `${DocumentTypes.ROLE}${SEPARATOR}${id || newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving a role, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
exports.getRoleParams = (roleId = null, otherProps = {}) => {
|
||||
export function getRoleParams(roleId = null, otherProps = {}) {
|
||||
return getDocParams(DocumentTypes.ROLE, roleId, otherProps)
|
||||
}
|
||||
|
||||
exports.getStartEndKeyURL = (base, baseKey, tenantId = null) => {
|
||||
export function getStartEndKeyURL(base: any, baseKey: any, tenantId = null) {
|
||||
const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : ""
|
||||
return `${base}?startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"`
|
||||
}
|
||||
|
@ -174,15 +162,15 @@ exports.getStartEndKeyURL = (base, baseKey, tenantId = null) => {
|
|||
* opts.efficient can be provided to make sure this call is always quick in a multi-tenant environment,
|
||||
* but it may not be 100% accurate in full efficiency mode (some tenantless apps may be missed).
|
||||
*/
|
||||
exports.getAllDbs = async (opts = { efficient: false }) => {
|
||||
export async function getAllDbs(opts = { efficient: false }) {
|
||||
const efficient = opts && opts.efficient
|
||||
// specifically for testing we use the pouch package for this
|
||||
if (env.isTest()) {
|
||||
return allDbs()
|
||||
}
|
||||
let dbs = []
|
||||
let dbs: any[] = []
|
||||
let { url, cookie } = getCouchInfo()
|
||||
async function addDbs(couchUrl) {
|
||||
async function addDbs(couchUrl: string) {
|
||||
const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), {
|
||||
method: "GET",
|
||||
headers: {
|
||||
|
@ -207,13 +195,9 @@ exports.getAllDbs = async (opts = { efficient: false }) => {
|
|||
await addDbs(couchUrl)
|
||||
} else {
|
||||
// get prod apps
|
||||
await addDbs(
|
||||
exports.getStartEndKeyURL(couchUrl, DocumentTypes.APP, tenantId)
|
||||
)
|
||||
await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP, tenantId))
|
||||
// get dev apps
|
||||
await addDbs(
|
||||
exports.getStartEndKeyURL(couchUrl, DocumentTypes.APP_DEV, tenantId)
|
||||
)
|
||||
await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP_DEV, tenantId))
|
||||
// add global db name
|
||||
dbs.push(getGlobalDBName(tenantId))
|
||||
}
|
||||
|
@ -226,13 +210,13 @@ exports.getAllDbs = async (opts = { efficient: false }) => {
|
|||
*
|
||||
* @return {Promise<object[]>} returns the app information document stored in each app database.
|
||||
*/
|
||||
exports.getAllApps = async ({ dev, all, idsOnly, efficient } = {}) => {
|
||||
export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
|
||||
let tenantId = getTenantId()
|
||||
if (!env.MULTI_TENANCY && !tenantId) {
|
||||
tenantId = DEFAULT_TENANT_ID
|
||||
}
|
||||
let dbs = await exports.getAllDbs({ efficient })
|
||||
const appDbNames = dbs.filter(dbName => {
|
||||
let dbs = await getAllDbs({ efficient })
|
||||
const appDbNames = dbs.filter((dbName: any) => {
|
||||
const split = dbName.split(SEPARATOR)
|
||||
// it is an app, check the tenantId
|
||||
if (split[0] === DocumentTypes.APP) {
|
||||
|
@ -252,7 +236,7 @@ exports.getAllApps = async ({ dev, all, idsOnly, efficient } = {}) => {
|
|||
if (idsOnly) {
|
||||
return appDbNames
|
||||
}
|
||||
const appPromises = appDbNames.map(app =>
|
||||
const appPromises = appDbNames.map((app: any) =>
|
||||
// skip setup otherwise databases could be re-created
|
||||
getAppMetadata(app)
|
||||
)
|
||||
|
@ -261,17 +245,19 @@ exports.getAllApps = async ({ dev, all, idsOnly, efficient } = {}) => {
|
|||
} else {
|
||||
const response = await Promise.allSettled(appPromises)
|
||||
const apps = response
|
||||
.filter(result => result.status === "fulfilled" && result.value != null)
|
||||
.map(({ value }) => value)
|
||||
.filter(
|
||||
(result: any) => result.status === "fulfilled" && result.value != null
|
||||
)
|
||||
.map(({ value }: any) => value)
|
||||
if (!all) {
|
||||
return apps.filter(app => {
|
||||
return apps.filter((app: any) => {
|
||||
if (dev) {
|
||||
return isDevApp(app)
|
||||
}
|
||||
return !isDevApp(app)
|
||||
})
|
||||
} else {
|
||||
return apps.map(app => ({
|
||||
return apps.map((app: any) => ({
|
||||
...app,
|
||||
status: isDevApp(app) ? "development" : "published",
|
||||
}))
|
||||
|
@ -282,26 +268,26 @@ exports.getAllApps = async ({ dev, all, idsOnly, efficient } = {}) => {
|
|||
/**
|
||||
* Utility function for getAllApps but filters to production apps only.
|
||||
*/
|
||||
exports.getProdAppIDs = async () => {
|
||||
return (await exports.getAllApps({ idsOnly: true })).filter(
|
||||
id => !exports.isDevAppID(id)
|
||||
export async function getProdAppIDs() {
|
||||
return (await getAllApps({ idsOnly: true })).filter(
|
||||
(id: any) => !isDevAppID(id)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function for the inverse of above.
|
||||
*/
|
||||
exports.getDevAppIDs = async () => {
|
||||
return (await exports.getAllApps({ idsOnly: true })).filter(id =>
|
||||
exports.isDevAppID(id)
|
||||
export async function getDevAppIDs() {
|
||||
return (await getAllApps({ idsOnly: true })).filter((id: any) =>
|
||||
isDevAppID(id)
|
||||
)
|
||||
}
|
||||
|
||||
exports.dbExists = async dbName => {
|
||||
export async function dbExists(dbName: any) {
|
||||
let exists = false
|
||||
return doWithDB(
|
||||
dbName,
|
||||
async db => {
|
||||
async (db: any) => {
|
||||
try {
|
||||
// check if database exists
|
||||
const info = await db.info()
|
||||
|
@ -321,7 +307,7 @@ exports.dbExists = async dbName => {
|
|||
* Generates a new configuration ID.
|
||||
* @returns {string} The new configuration ID which the config doc can be stored under.
|
||||
*/
|
||||
const generateConfigID = ({ type, workspace, user }) => {
|
||||
export const generateConfigID = ({ type, workspace, user }: any) => {
|
||||
const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR)
|
||||
|
||||
return `${DocumentTypes.CONFIG}${SEPARATOR}${scope}`
|
||||
|
@ -330,7 +316,10 @@ const generateConfigID = ({ type, workspace, user }) => {
|
|||
/**
|
||||
* Gets parameters for retrieving configurations.
|
||||
*/
|
||||
const getConfigParams = ({ type, workspace, user }, otherProps = {}) => {
|
||||
export const getConfigParams = (
|
||||
{ type, workspace, user }: any,
|
||||
otherProps = {}
|
||||
) => {
|
||||
const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR)
|
||||
|
||||
return {
|
||||
|
@ -344,7 +333,7 @@ const getConfigParams = ({ type, workspace, user }, otherProps = {}) => {
|
|||
* Generates a new dev info document ID - this is scoped to a user.
|
||||
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
|
||||
*/
|
||||
const generateDevInfoID = userId => {
|
||||
export const generateDevInfoID = (userId: any) => {
|
||||
return `${DocumentTypes.DEV_INFO}${SEPARATOR}${userId}`
|
||||
}
|
||||
|
||||
|
@ -354,7 +343,10 @@ const generateDevInfoID = userId => {
|
|||
* @param {Object} scopes - the type, workspace and userID scopes of the configuration.
|
||||
* @returns The most granular configuration document based on the scope.
|
||||
*/
|
||||
const getScopedFullConfig = async function (db, { type, user, workspace }) {
|
||||
export const getScopedFullConfig = async function (
|
||||
db: any,
|
||||
{ type, user, workspace }: any
|
||||
) {
|
||||
const response = await db.allDocs(
|
||||
getConfigParams(
|
||||
{ type, user, workspace },
|
||||
|
@ -364,7 +356,7 @@ const getScopedFullConfig = async function (db, { type, user, workspace }) {
|
|||
)
|
||||
)
|
||||
|
||||
function determineScore(row) {
|
||||
function determineScore(row: any) {
|
||||
const config = row.doc
|
||||
|
||||
// Config is specific to a user and a workspace
|
||||
|
@ -385,21 +377,24 @@ const getScopedFullConfig = async function (db, { type, user, workspace }) {
|
|||
|
||||
// Find the config with the most granular scope based on context
|
||||
let scopedConfig = response.rows.sort(
|
||||
(a, b) => determineScore(a) - determineScore(b)
|
||||
(a: any, b: any) => determineScore(a) - determineScore(b)
|
||||
)[0]
|
||||
|
||||
// custom logic for settings doc
|
||||
// always provide the platform URL
|
||||
if (type === Configs.SETTINGS) {
|
||||
if (scopedConfig && scopedConfig.doc) {
|
||||
scopedConfig.doc.config.platformUrl = await getPlatformUrl(
|
||||
scopedConfig.doc.config
|
||||
)
|
||||
// overrides affected by environment variables
|
||||
scopedConfig.doc.config.platformUrl = await getPlatformUrl()
|
||||
scopedConfig.doc.config.analyticsEnabled =
|
||||
await events.analytics.enabled()
|
||||
} else {
|
||||
// defaults
|
||||
scopedConfig = {
|
||||
doc: {
|
||||
_id: generateConfigID({ type, user, workspace }),
|
||||
config: {
|
||||
platformUrl: await getPlatformUrl(),
|
||||
analyticsEnabled: await events.analytics.enabled(),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -409,34 +404,37 @@ const getScopedFullConfig = async function (db, { type, user, workspace }) {
|
|||
return scopedConfig && scopedConfig.doc
|
||||
}
|
||||
|
||||
const getPlatformUrl = async settings => {
|
||||
export const getPlatformUrl = async (opts = { tenantAware: true }) => {
|
||||
let platformUrl = env.PLATFORM_URL || "http://localhost:10000"
|
||||
|
||||
if (!env.SELF_HOSTED && env.MULTI_TENANCY) {
|
||||
if (!env.SELF_HOSTED && env.MULTI_TENANCY && opts.tenantAware) {
|
||||
// cloud and multi tenant - add the tenant to the default platform url
|
||||
const tenantId = getTenantId()
|
||||
if (!platformUrl.includes("localhost:")) {
|
||||
platformUrl = platformUrl.replace("://", `://${tenantId}.`)
|
||||
}
|
||||
} else {
|
||||
} else if (env.SELF_HOSTED) {
|
||||
const db = getGlobalDB()
|
||||
// get the doc directly instead of with getScopedConfig to prevent loop
|
||||
let settings
|
||||
try {
|
||||
settings = await db.get(generateConfigID({ type: Configs.SETTINGS }))
|
||||
} catch (e: any) {
|
||||
if (e.status !== 404) {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// self hosted - check for platform url override
|
||||
if (settings && settings.platformUrl) {
|
||||
platformUrl = settings.platformUrl
|
||||
if (settings && settings.config && settings.config.platformUrl) {
|
||||
platformUrl = settings.config.platformUrl
|
||||
}
|
||||
}
|
||||
|
||||
return platformUrl
|
||||
}
|
||||
|
||||
async function getScopedConfig(db, params) {
|
||||
export async function getScopedConfig(db: any, params: any) {
|
||||
const configDoc = await getScopedFullConfig(db, params)
|
||||
return configDoc && configDoc.config ? configDoc.config : configDoc
|
||||
}
|
||||
|
||||
exports.Replication = Replication
|
||||
exports.getScopedConfig = getScopedConfig
|
||||
exports.generateConfigID = generateConfigID
|
||||
exports.getConfigParams = getConfigParams
|
||||
exports.getScopedFullConfig = getScopedFullConfig
|
||||
exports.generateDevInfoID = generateDevInfoID
|
||||
exports.getPlatformUrl = getPlatformUrl
|
|
@ -10,7 +10,15 @@ function isDev() {
|
|||
return process.env.NODE_ENV !== "production"
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
let LOADED = false
|
||||
if (!LOADED && isDev() && !isTest()) {
|
||||
require("dotenv").config()
|
||||
LOADED = true
|
||||
}
|
||||
|
||||
const env = {
|
||||
isTest,
|
||||
isDev,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
||||
|
@ -30,9 +38,11 @@ module.exports = {
|
|||
process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app",
|
||||
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY,
|
||||
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
|
||||
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED),
|
||||
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""),
|
||||
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
|
||||
PLATFORM_URL: process.env.PLATFORM_URL,
|
||||
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
|
||||
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
||||
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
|
||||
BACKUPS_BUCKET_NAME: process.env.BACKUPS_BUCKET_NAME || "backups",
|
||||
APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || "prod-budi-app-assets",
|
||||
|
@ -41,18 +51,24 @@ module.exports = {
|
|||
GLOBAL_CLOUD_BUCKET_NAME:
|
||||
process.env.GLOBAL_CLOUD_BUCKET_NAME || "prod-budi-tenant-uploads",
|
||||
USE_COUCH: process.env.USE_COUCH || true,
|
||||
isTest,
|
||||
isDev,
|
||||
_set(key, value) {
|
||||
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
|
||||
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
|
||||
SERVICE: process.env.SERVICE || "budibase",
|
||||
DEPLOYMENT_ENVIRONMENT:
|
||||
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
|
||||
_set(key: any, value: any) {
|
||||
process.env[key] = value
|
||||
module.exports[key] = value
|
||||
},
|
||||
}
|
||||
|
||||
// clean up any environment variable edge cases
|
||||
for (let [key, value] of Object.entries(module.exports)) {
|
||||
for (let [key, value] of Object.entries(env)) {
|
||||
// handle the edge case of "0" to disable an environment variable
|
||||
if (value === "0") {
|
||||
module.exports[key] = 0
|
||||
// @ts-ignore
|
||||
env[key] = 0
|
||||
}
|
||||
}
|
||||
|
||||
export = env
|
|
@ -1,8 +1,8 @@
|
|||
class BudibaseError extends Error {
|
||||
constructor(message, type, code) {
|
||||
constructor(message, code, type) {
|
||||
super(message)
|
||||
this.type = type
|
||||
this.code = code
|
||||
this.type = type
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
const { BudibaseError } = require("./base")
|
||||
|
||||
class GenericError extends BudibaseError {
|
||||
constructor(message, code, type) {
|
||||
super(message, code, type ? type : "generic")
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
GenericError,
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
const { GenericError } = require("./generic")
|
||||
|
||||
class HTTPError extends GenericError {
|
||||
constructor(message, httpStatus, code = "http", type = "generic") {
|
||||
super(message, code, type)
|
||||
this.status = httpStatus
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
HTTPError,
|
||||
}
|
|
@ -1,12 +1,11 @@
|
|||
const http = require("./http")
|
||||
const licensing = require("./licensing")
|
||||
|
||||
const codes = {
|
||||
...licensing.codes,
|
||||
}
|
||||
|
||||
const types = {
|
||||
...licensing.types,
|
||||
}
|
||||
const types = [licensing.type]
|
||||
|
||||
const context = {
|
||||
...licensing.context,
|
||||
|
@ -36,6 +35,9 @@ const getPublicError = err => {
|
|||
module.exports = {
|
||||
codes,
|
||||
types,
|
||||
errors: {
|
||||
UsageLimitError: licensing.UsageLimitError,
|
||||
HTTPError: http.HTTPError,
|
||||
},
|
||||
getPublicError,
|
||||
}
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
const { BudibaseError } = require("./base")
|
||||
const { HTTPError } = require("./http")
|
||||
|
||||
const types = {
|
||||
LICENSE_ERROR: "license_error",
|
||||
}
|
||||
const type = "license_error"
|
||||
|
||||
const codes = {
|
||||
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
|
||||
|
@ -16,16 +14,15 @@ const context = {
|
|||
},
|
||||
}
|
||||
|
||||
class UsageLimitError extends BudibaseError {
|
||||
class UsageLimitError extends HTTPError {
|
||||
constructor(message, limitName) {
|
||||
super(message, types.LICENSE_ERROR, codes.USAGE_LIMIT_EXCEEDED)
|
||||
super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
|
||||
this.limitName = limitName
|
||||
this.status = 400
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
types,
|
||||
type,
|
||||
codes,
|
||||
context,
|
||||
UsageLimitError,
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
import env from "../environment"
|
||||
import tenancy from "../tenancy"
|
||||
import * as dbUtils from "../db/utils"
|
||||
import { Configs } from "../constants"
|
||||
import { withCache, TTL, CacheKeys } from "../cache/generic"
|
||||
|
||||
export const enabled = async () => {
|
||||
// cloud - always use the environment variable
|
||||
if (!env.SELF_HOSTED) {
|
||||
return !!env.ENABLE_ANALYTICS
|
||||
}
|
||||
|
||||
// self host - prefer the settings doc
|
||||
// use cache as events have high throughput
|
||||
const enabledInDB = await withCache(
|
||||
CacheKeys.ANALYTICS_ENABLED,
|
||||
TTL.ONE_DAY,
|
||||
async () => {
|
||||
const settings = await getSettingsDoc()
|
||||
|
||||
// need to do explicit checks in case the field is not set
|
||||
if (settings?.config?.analyticsEnabled === false) {
|
||||
return false
|
||||
} else if (settings?.config?.analyticsEnabled === true) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (enabledInDB !== undefined) {
|
||||
return enabledInDB
|
||||
}
|
||||
|
||||
// fallback to the environment variable
|
||||
// explicitly check for 0 or false here, undefined or otherwise is treated as true
|
||||
const envEnabled: any = env.ENABLE_ANALYTICS
|
||||
if (envEnabled === 0 || envEnabled === false) {
|
||||
return false
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const getSettingsDoc = async () => {
|
||||
const db = tenancy.getGlobalDB()
|
||||
let settings
|
||||
try {
|
||||
settings = await db.get(
|
||||
dbUtils.generateConfigID({ type: Configs.SETTINGS })
|
||||
)
|
||||
} catch (e: any) {
|
||||
if (e.status !== 404) {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
return settings
|
||||
}
|
|
@ -0,0 +1,183 @@
|
|||
import {
|
||||
Event,
|
||||
BackfillMetadata,
|
||||
CachedEvent,
|
||||
SSOCreatedEvent,
|
||||
AutomationCreatedEvent,
|
||||
AutomationStepCreatedEvent,
|
||||
DatasourceCreatedEvent,
|
||||
LayoutCreatedEvent,
|
||||
QueryCreatedEvent,
|
||||
RoleCreatedEvent,
|
||||
ScreenCreatedEvent,
|
||||
TableCreatedEvent,
|
||||
ViewCreatedEvent,
|
||||
ViewCalculationCreatedEvent,
|
||||
ViewFilterCreatedEvent,
|
||||
AppPublishedEvent,
|
||||
UserCreatedEvent,
|
||||
RoleAssignedEvent,
|
||||
UserPermissionAssignedEvent,
|
||||
AppCreatedEvent,
|
||||
} from "@budibase/types"
|
||||
import * as context from "../context"
|
||||
import { CacheKeys } from "../cache/generic"
|
||||
import * as cache from "../cache/generic"
|
||||
|
||||
// LIFECYCLE
|
||||
|
||||
export const start = async (events: Event[]) => {
|
||||
const metadata: BackfillMetadata = {
|
||||
eventWhitelist: events,
|
||||
}
|
||||
return saveBackfillMetadata(metadata)
|
||||
}
|
||||
|
||||
export const recordEvent = async (event: Event, properties: any) => {
|
||||
const eventKey = getEventKey(event, properties)
|
||||
// don't use a ttl - cleaned up by migration
|
||||
// don't use tenancy - already in the key
|
||||
await cache.store(eventKey, properties, undefined, { useTenancy: false })
|
||||
}
|
||||
|
||||
export const end = async () => {
|
||||
await deleteBackfillMetadata()
|
||||
await clearEvents()
|
||||
}
|
||||
|
||||
// CRUD
|
||||
|
||||
const getBackfillMetadata = async (): Promise<BackfillMetadata | null> => {
|
||||
return cache.get(CacheKeys.BACKFILL_METADATA)
|
||||
}
|
||||
|
||||
const saveBackfillMetadata = async (
|
||||
backfill: BackfillMetadata
|
||||
): Promise<void> => {
|
||||
// no TTL - deleted by backfill
|
||||
return cache.store(CacheKeys.BACKFILL_METADATA, backfill)
|
||||
}
|
||||
|
||||
const deleteBackfillMetadata = async (): Promise<void> => {
|
||||
await cache.delete(CacheKeys.BACKFILL_METADATA)
|
||||
}
|
||||
|
||||
const clearEvents = async () => {
|
||||
// wildcard
|
||||
const pattern = getEventKey()
|
||||
const keys = await cache.keys(pattern)
|
||||
|
||||
for (const key of keys) {
|
||||
// delete each key
|
||||
// don't use tenancy, already in the key
|
||||
await cache.delete(key, { useTenancy: false })
|
||||
}
|
||||
}
|
||||
|
||||
// HELPERS
|
||||
|
||||
export const isBackfillingEvent = async (event: Event) => {
|
||||
const backfill = await getBackfillMetadata()
|
||||
const events = backfill?.eventWhitelist
|
||||
if (events && events.includes(event)) {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export const isAlreadySent = async (event: Event, properties: any) => {
|
||||
const eventKey = getEventKey(event, properties)
|
||||
const cachedEvent: CachedEvent = await cache.get(eventKey, {
|
||||
useTenancy: false,
|
||||
})
|
||||
return !!cachedEvent
|
||||
}
|
||||
|
||||
const CUSTOM_PROPERTY_SUFFIX: any = {
|
||||
// APP EVENTS
|
||||
[Event.AUTOMATION_CREATED]: (properties: AutomationCreatedEvent) => {
|
||||
return properties.automationId
|
||||
},
|
||||
[Event.AUTOMATION_STEP_CREATED]: (properties: AutomationStepCreatedEvent) => {
|
||||
return properties.stepId
|
||||
},
|
||||
[Event.DATASOURCE_CREATED]: (properties: DatasourceCreatedEvent) => {
|
||||
return properties.datasourceId
|
||||
},
|
||||
[Event.LAYOUT_CREATED]: (properties: LayoutCreatedEvent) => {
|
||||
return properties.layoutId
|
||||
},
|
||||
[Event.QUERY_CREATED]: (properties: QueryCreatedEvent) => {
|
||||
return properties.queryId
|
||||
},
|
||||
[Event.ROLE_CREATED]: (properties: RoleCreatedEvent) => {
|
||||
return properties.roleId
|
||||
},
|
||||
[Event.SCREEN_CREATED]: (properties: ScreenCreatedEvent) => {
|
||||
return properties.screenId
|
||||
},
|
||||
[Event.TABLE_CREATED]: (properties: TableCreatedEvent) => {
|
||||
return properties.tableId
|
||||
},
|
||||
[Event.VIEW_CREATED]: (properties: ViewCreatedEvent) => {
|
||||
return properties.tableId // best uniqueness
|
||||
},
|
||||
[Event.VIEW_CALCULATION_CREATED]: (
|
||||
properties: ViewCalculationCreatedEvent
|
||||
) => {
|
||||
return properties.tableId // best uniqueness
|
||||
},
|
||||
[Event.VIEW_FILTER_CREATED]: (properties: ViewFilterCreatedEvent) => {
|
||||
return properties.tableId // best uniqueness
|
||||
},
|
||||
[Event.APP_CREATED]: (properties: AppCreatedEvent) => {
|
||||
return properties.appId // best uniqueness
|
||||
},
|
||||
[Event.APP_PUBLISHED]: (properties: AppPublishedEvent) => {
|
||||
return properties.appId // best uniqueness
|
||||
},
|
||||
// GLOBAL EVENTS
|
||||
[Event.AUTH_SSO_CREATED]: (properties: SSOCreatedEvent) => {
|
||||
return properties.type
|
||||
},
|
||||
[Event.AUTH_SSO_ACTIVATED]: (properties: SSOCreatedEvent) => {
|
||||
return properties.type
|
||||
},
|
||||
[Event.USER_CREATED]: (properties: UserCreatedEvent) => {
|
||||
return properties.userId
|
||||
},
|
||||
[Event.USER_PERMISSION_ADMIN_ASSIGNED]: (
|
||||
properties: UserPermissionAssignedEvent
|
||||
) => {
|
||||
return properties.userId
|
||||
},
|
||||
[Event.USER_PERMISSION_BUILDER_ASSIGNED]: (
|
||||
properties: UserPermissionAssignedEvent
|
||||
) => {
|
||||
return properties.userId
|
||||
},
|
||||
[Event.ROLE_ASSIGNED]: (properties: RoleAssignedEvent) => {
|
||||
return `${properties.roleId}-${properties.userId}`
|
||||
},
|
||||
}
|
||||
|
||||
const getEventKey = (event?: Event, properties?: any) => {
|
||||
let eventKey: string
|
||||
|
||||
const tenantId = context.getTenantId()
|
||||
if (event) {
|
||||
eventKey = `${CacheKeys.EVENTS}:${tenantId}:${event}`
|
||||
|
||||
// use some properties to make the key more unique
|
||||
const custom = CUSTOM_PROPERTY_SUFFIX[event]
|
||||
const suffix = custom ? custom(properties) : undefined
|
||||
if (suffix) {
|
||||
eventKey = `${eventKey}:${suffix}`
|
||||
}
|
||||
} else {
|
||||
eventKey = `${CacheKeys.EVENTS}:${tenantId}:*`
|
||||
}
|
||||
|
||||
return eventKey
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
import { Event } from "@budibase/types"
|
||||
import { processors } from "./processors"
|
||||
import * as identification from "./identification"
|
||||
import * as backfill from "./backfill"
|
||||
|
||||
export const publishEvent = async (
|
||||
event: Event,
|
||||
properties: any,
|
||||
timestamp?: string | number
|
||||
) => {
|
||||
// in future this should use async events via a distributed queue.
|
||||
const identity = await identification.getCurrentIdentity()
|
||||
|
||||
const backfilling = await backfill.isBackfillingEvent(event)
|
||||
// no backfill - send the event and exit
|
||||
if (!backfilling) {
|
||||
await processors.processEvent(event, identity, properties, timestamp)
|
||||
return
|
||||
}
|
||||
|
||||
// backfill active - check if the event has been sent already
|
||||
const alreadySent = await backfill.isAlreadySent(event, properties)
|
||||
if (alreadySent) {
|
||||
// do nothing
|
||||
return
|
||||
} else {
|
||||
// send and record the event
|
||||
await processors.processEvent(event, identity, properties, timestamp)
|
||||
await backfill.recordEvent(event, properties)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,302 @@
|
|||
import * as context from "../context"
|
||||
import * as identityCtx from "../context/identity"
|
||||
import env from "../environment"
|
||||
import {
|
||||
Hosting,
|
||||
User,
|
||||
Identity,
|
||||
IdentityType,
|
||||
Account,
|
||||
isCloudAccount,
|
||||
isSSOAccount,
|
||||
TenantGroup,
|
||||
SettingsConfig,
|
||||
CloudAccount,
|
||||
UserIdentity,
|
||||
InstallationGroup,
|
||||
UserContext,
|
||||
Group,
|
||||
} from "@budibase/types"
|
||||
import { processors } from "./processors"
|
||||
import * as dbUtils from "../db/utils"
|
||||
import { Configs } from "../constants"
|
||||
import * as hashing from "../hashing"
|
||||
import * as installation from "../installation"
|
||||
import { withCache, TTL, CacheKeys } from "../cache/generic"
|
||||
|
||||
const pkg = require("../../package.json")
|
||||
|
||||
/**
|
||||
* An identity can be:
|
||||
* - account user (Self host)
|
||||
* - budibase user
|
||||
* - tenant
|
||||
* - installation
|
||||
*/
|
||||
export const getCurrentIdentity = async (): Promise<Identity> => {
|
||||
let identityContext = identityCtx.getIdentity()
|
||||
const environment = getDeploymentEnvironment()
|
||||
|
||||
let identityType
|
||||
|
||||
if (!identityContext) {
|
||||
identityType = IdentityType.TENANT
|
||||
} else {
|
||||
identityType = identityContext.type
|
||||
}
|
||||
|
||||
if (identityType === IdentityType.INSTALLATION) {
|
||||
const installationId = await getInstallationId()
|
||||
const hosting = getHostingFromEnv()
|
||||
return {
|
||||
id: formatDistinctId(installationId, identityType),
|
||||
hosting,
|
||||
type: identityType,
|
||||
installationId,
|
||||
environment,
|
||||
}
|
||||
} else if (identityType === IdentityType.TENANT) {
|
||||
const installationId = await getInstallationId()
|
||||
const tenantId = await getEventTenantId(context.getTenantId())
|
||||
const hosting = getHostingFromEnv()
|
||||
|
||||
return {
|
||||
id: formatDistinctId(tenantId, identityType),
|
||||
type: identityType,
|
||||
hosting,
|
||||
installationId,
|
||||
tenantId,
|
||||
environment,
|
||||
}
|
||||
} else if (identityType === IdentityType.USER) {
|
||||
const userContext = identityContext as UserContext
|
||||
const tenantId = await getEventTenantId(context.getTenantId())
|
||||
const installationId = await getInstallationId()
|
||||
|
||||
const account = userContext.account
|
||||
let hosting
|
||||
if (account) {
|
||||
hosting = account.hosting
|
||||
} else {
|
||||
hosting = getHostingFromEnv()
|
||||
}
|
||||
|
||||
return {
|
||||
id: userContext._id,
|
||||
type: identityType,
|
||||
hosting,
|
||||
installationId,
|
||||
tenantId,
|
||||
environment,
|
||||
}
|
||||
} else {
|
||||
throw new Error("Unknown identity type")
|
||||
}
|
||||
}
|
||||
|
||||
export const identifyInstallationGroup = async (
|
||||
installId: string,
|
||||
timestamp?: string | number
|
||||
): Promise<void> => {
|
||||
const id = installId
|
||||
const type = IdentityType.INSTALLATION
|
||||
const hosting = getHostingFromEnv()
|
||||
const version = pkg.version
|
||||
const environment = getDeploymentEnvironment()
|
||||
|
||||
const group: InstallationGroup = {
|
||||
id,
|
||||
type,
|
||||
hosting,
|
||||
version,
|
||||
environment,
|
||||
}
|
||||
|
||||
await identifyGroup(group, timestamp)
|
||||
// need to create a normal identity for the group to be able to query it globally
|
||||
// match the posthog syntax to link this identity to the empty auto generated one
|
||||
await identify({ ...group, id: `$${type}_${id}` }, timestamp)
|
||||
}
|
||||
|
||||
export const identifyTenantGroup = async (
|
||||
tenantId: string,
|
||||
account: Account | undefined,
|
||||
timestamp?: string | number
|
||||
): Promise<void> => {
|
||||
const id = await getEventTenantId(tenantId)
|
||||
const type = IdentityType.TENANT
|
||||
const installationId = await getInstallationId()
|
||||
const environment = getDeploymentEnvironment()
|
||||
|
||||
let hosting: Hosting
|
||||
let profession: string | undefined
|
||||
let companySize: string | undefined
|
||||
|
||||
if (account) {
|
||||
profession = account.profession
|
||||
companySize = account.size
|
||||
hosting = account.hosting
|
||||
} else {
|
||||
hosting = getHostingFromEnv()
|
||||
}
|
||||
|
||||
const group: TenantGroup = {
|
||||
id,
|
||||
type,
|
||||
hosting,
|
||||
environment,
|
||||
installationId,
|
||||
profession,
|
||||
companySize,
|
||||
}
|
||||
|
||||
await identifyGroup(group, timestamp)
|
||||
// need to create a normal identity for the group to be able to query it globally
|
||||
// match the posthog syntax to link this identity to the auto generated one
|
||||
await identify({ ...group, id: `$${type}_${id}` }, timestamp)
|
||||
}
|
||||
|
||||
export const identifyUser = async (
|
||||
user: User,
|
||||
account: CloudAccount | undefined,
|
||||
timestamp?: string | number
|
||||
) => {
|
||||
const id = user._id as string
|
||||
const tenantId = await getEventTenantId(user.tenantId)
|
||||
const type = IdentityType.USER
|
||||
let builder = user.builder?.global || false
|
||||
let admin = user.admin?.global || false
|
||||
let providerType = user.providerType
|
||||
const accountHolder = account?.budibaseUserId === user._id || false
|
||||
const verified =
|
||||
account && account?.budibaseUserId === user._id ? account.verified : false
|
||||
const installationId = await getInstallationId()
|
||||
const hosting = account ? account.hosting : getHostingFromEnv()
|
||||
const environment = getDeploymentEnvironment()
|
||||
|
||||
const identity: UserIdentity = {
|
||||
id,
|
||||
type,
|
||||
hosting,
|
||||
installationId,
|
||||
tenantId,
|
||||
verified,
|
||||
accountHolder,
|
||||
providerType,
|
||||
builder,
|
||||
admin,
|
||||
environment,
|
||||
}
|
||||
|
||||
await identify(identity, timestamp)
|
||||
}
|
||||
|
||||
export const identifyAccount = async (account: Account) => {
|
||||
let id = account.accountId
|
||||
const tenantId = account.tenantId
|
||||
let type = IdentityType.USER
|
||||
let providerType = isSSOAccount(account) ? account.providerType : undefined
|
||||
const verified = account.verified
|
||||
const accountHolder = true
|
||||
const hosting = account.hosting
|
||||
const installationId = await getInstallationId()
|
||||
const environment = getDeploymentEnvironment()
|
||||
|
||||
if (isCloudAccount(account)) {
|
||||
if (account.budibaseUserId) {
|
||||
// use the budibase user as the id if set
|
||||
id = account.budibaseUserId
|
||||
}
|
||||
}
|
||||
|
||||
const identity: UserIdentity = {
|
||||
id,
|
||||
type,
|
||||
hosting,
|
||||
installationId,
|
||||
tenantId,
|
||||
providerType,
|
||||
verified,
|
||||
accountHolder,
|
||||
environment,
|
||||
}
|
||||
|
||||
await identify(identity)
|
||||
}
|
||||
|
||||
export const identify = async (
|
||||
identity: Identity,
|
||||
timestamp?: string | number
|
||||
) => {
|
||||
await processors.identify(identity, timestamp)
|
||||
}
|
||||
|
||||
export const identifyGroup = async (
|
||||
group: Group,
|
||||
timestamp?: string | number
|
||||
) => {
|
||||
await processors.identifyGroup(group, timestamp)
|
||||
}
|
||||
|
||||
const getDeploymentEnvironment = () => {
|
||||
if (env.isDev()) {
|
||||
return "development"
|
||||
} else {
|
||||
return env.DEPLOYMENT_ENVIRONMENT
|
||||
}
|
||||
}
|
||||
|
||||
const getHostingFromEnv = () => {
|
||||
return env.SELF_HOSTED ? Hosting.SELF : Hosting.CLOUD
|
||||
}
|
||||
|
||||
export const getInstallationId = async () => {
|
||||
if (isAccountPortal()) {
|
||||
return "account-portal"
|
||||
}
|
||||
const install = await installation.getInstall()
|
||||
return install.installId
|
||||
}
|
||||
|
||||
const getEventTenantId = async (tenantId: string): Promise<string> => {
|
||||
if (env.SELF_HOSTED) {
|
||||
return getUniqueTenantId(tenantId)
|
||||
} else {
|
||||
// tenant id's in the cloud are already unique
|
||||
return tenantId
|
||||
}
|
||||
}
|
||||
|
||||
const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
||||
// make sure this tenantId always matches the tenantId in context
|
||||
return context.doInTenant(tenantId, () => {
|
||||
return withCache(CacheKeys.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
||||
const db = context.getGlobalDB()
|
||||
const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, {
|
||||
type: Configs.SETTINGS,
|
||||
})
|
||||
|
||||
let uniqueTenantId: string
|
||||
if (config.config.uniqueTenantId) {
|
||||
return config.config.uniqueTenantId
|
||||
} else {
|
||||
uniqueTenantId = `${hashing.newid()}_${tenantId}`
|
||||
config.config.uniqueTenantId = uniqueTenantId
|
||||
await db.put(config)
|
||||
return uniqueTenantId
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const isAccountPortal = () => {
|
||||
return env.SERVICE === "account-portal"
|
||||
}
|
||||
|
||||
const formatDistinctId = (id: string, type: IdentityType) => {
|
||||
if (type === IdentityType.INSTALLATION || type === IdentityType.TENANT) {
|
||||
return `$${type}_${id}`
|
||||
} else {
|
||||
return id
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
export * from "./publishers"
|
||||
export * as processors from "./processors"
|
||||
export * as analytics from "./analytics"
|
||||
export * as identification from "./identification"
|
||||
export * as backfillCache from "./backfill"
|
||||
|
||||
import { processors } from "./processors"
|
||||
|
||||
export const shutdown = () => {
|
||||
processors.shutdown()
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
import { Event, Identity, Group, IdentityType } from "@budibase/types"
|
||||
import { EventProcessor } from "./types"
|
||||
import env from "../../environment"
|
||||
import * as analytics from "../analytics"
|
||||
import PosthogProcessor from "./PosthogProcessor"
|
||||
|
||||
/**
|
||||
* Events that are always captured.
|
||||
*/
|
||||
const EVENT_WHITELIST = [
|
||||
Event.INSTALLATION_VERSION_UPGRADED,
|
||||
Event.INSTALLATION_VERSION_DOWNGRADED,
|
||||
]
|
||||
const IDENTITY_WHITELIST = [IdentityType.INSTALLATION, IdentityType.TENANT]
|
||||
|
||||
export default class AnalyticsProcessor implements EventProcessor {
|
||||
posthog: PosthogProcessor | undefined
|
||||
|
||||
constructor() {
|
||||
if (env.POSTHOG_TOKEN && !env.isTest()) {
|
||||
this.posthog = new PosthogProcessor(env.POSTHOG_TOKEN)
|
||||
}
|
||||
}
|
||||
|
||||
async processEvent(
|
||||
event: Event,
|
||||
identity: Identity,
|
||||
properties: any,
|
||||
timestamp?: string | number
|
||||
): Promise<void> {
|
||||
if (!EVENT_WHITELIST.includes(event) && !(await analytics.enabled())) {
|
||||
return
|
||||
}
|
||||
if (this.posthog) {
|
||||
this.posthog.processEvent(event, identity, properties, timestamp)
|
||||
}
|
||||
}
|
||||
|
||||
async identify(identity: Identity, timestamp?: string | number) {
|
||||
// Group indentifications (tenant and installation) always on
|
||||
if (
|
||||
!IDENTITY_WHITELIST.includes(identity.type) &&
|
||||
!(await analytics.enabled())
|
||||
) {
|
||||
return
|
||||
}
|
||||
if (this.posthog) {
|
||||
this.posthog.identify(identity, timestamp)
|
||||
}
|
||||
}
|
||||
|
||||
async identifyGroup(group: Group, timestamp?: string | number) {
|
||||
// Group indentifications (tenant and installation) always on
|
||||
if (this.posthog) {
|
||||
this.posthog.identifyGroup(group, timestamp)
|
||||
}
|
||||
}
|
||||
|
||||
shutdown() {
|
||||
if (this.posthog) {
|
||||
this.posthog.shutdown()
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
import { Event, Identity, Group } from "@budibase/types"
|
||||
import { EventProcessor } from "./types"
|
||||
import env from "../../environment"
|
||||
|
||||
const getTimestampString = (timestamp?: string | number) => {
|
||||
let timestampString = ""
|
||||
if (timestamp) {
|
||||
timestampString = `[timestamp=${new Date(timestamp).toISOString()}]`
|
||||
}
|
||||
return timestampString
|
||||
}
|
||||
|
||||
const skipLogging = env.SELF_HOSTED && !env.isDev()
|
||||
|
||||
export default class LoggingProcessor implements EventProcessor {
|
||||
async processEvent(
|
||||
event: Event,
|
||||
identity: Identity,
|
||||
properties: any,
|
||||
timestamp?: string
|
||||
): Promise<void> {
|
||||
if (skipLogging) {
|
||||
return
|
||||
}
|
||||
let timestampString = getTimestampString(timestamp)
|
||||
console.log(
|
||||
`[audit] [tenant=${identity.tenantId}] [identityType=${identity.type}] [identity=${identity.id}] ${timestampString} ${event} `
|
||||
)
|
||||
}
|
||||
|
||||
async identify(identity: Identity, timestamp?: string | number) {
|
||||
if (skipLogging) {
|
||||
return
|
||||
}
|
||||
let timestampString = getTimestampString(timestamp)
|
||||
console.log(
|
||||
`[audit] [${JSON.stringify(identity)}] ${timestampString} identified`
|
||||
)
|
||||
}
|
||||
|
||||
async identifyGroup(group: Group, timestamp?: string | number) {
|
||||
if (skipLogging) {
|
||||
return
|
||||
}
|
||||
let timestampString = getTimestampString(timestamp)
|
||||
console.log(
|
||||
`[audit] [${JSON.stringify(group)}] ${timestampString} group identified`
|
||||
)
|
||||
}
|
||||
|
||||
shutdown(): void {
|
||||
// no-op
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
import PostHog from "posthog-node"
|
||||
import { Event, Identity, Group, BaseEvent } from "@budibase/types"
|
||||
import { EventProcessor } from "./types"
|
||||
import env from "../../environment"
|
||||
import context from "../../context"
|
||||
const pkg = require("../../../package.json")
|
||||
|
||||
export default class PosthogProcessor implements EventProcessor {
|
||||
posthog: PostHog
|
||||
|
||||
constructor(token: string | undefined) {
|
||||
if (!token) {
|
||||
throw new Error("Posthog token is not defined")
|
||||
}
|
||||
this.posthog = new PostHog(token)
|
||||
}
|
||||
|
||||
async processEvent(
|
||||
event: Event,
|
||||
identity: Identity,
|
||||
properties: BaseEvent,
|
||||
timestamp?: string | number
|
||||
): Promise<void> {
|
||||
properties.version = pkg.version
|
||||
properties.service = env.SERVICE
|
||||
properties.environment = identity.environment
|
||||
properties.hosting = identity.hosting
|
||||
|
||||
const appId = context.getAppId()
|
||||
if (appId) {
|
||||
properties.appId = appId
|
||||
}
|
||||
|
||||
const payload: any = { distinctId: identity.id, event, properties }
|
||||
|
||||
if (timestamp) {
|
||||
payload.timestamp = new Date(timestamp)
|
||||
}
|
||||
|
||||
// add groups to the event
|
||||
if (identity.installationId || identity.tenantId) {
|
||||
payload.groups = {}
|
||||
if (identity.installationId) {
|
||||
payload.groups.installation = identity.installationId
|
||||
payload.properties.installationId = identity.installationId
|
||||
}
|
||||
if (identity.tenantId) {
|
||||
payload.groups.tenant = identity.tenantId
|
||||
payload.properties.tenantId = identity.tenantId
|
||||
}
|
||||
}
|
||||
|
||||
this.posthog.capture(payload)
|
||||
}
|
||||
|
||||
async identify(identity: Identity, timestamp?: string | number) {
|
||||
const payload: any = { distinctId: identity.id, properties: identity }
|
||||
if (timestamp) {
|
||||
payload.timestamp = new Date(timestamp)
|
||||
}
|
||||
this.posthog.identify(payload)
|
||||
}
|
||||
|
||||
async identifyGroup(group: Group, timestamp?: string | number) {
|
||||
const payload: any = {
|
||||
distinctId: group.id,
|
||||
groupType: group.type,
|
||||
groupKey: group.id,
|
||||
properties: group,
|
||||
}
|
||||
|
||||
if (timestamp) {
|
||||
payload.timestamp = new Date(timestamp)
|
||||
}
|
||||
this.posthog.groupIdentify(payload)
|
||||
}
|
||||
|
||||
shutdown() {
|
||||
this.posthog.shutdown()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
import { Event, Identity, Group } from "@budibase/types"
|
||||
import { EventProcessor } from "./types"
|
||||
|
||||
export default class Processor implements EventProcessor {
|
||||
initialised: boolean = false
|
||||
processors: EventProcessor[] = []
|
||||
|
||||
constructor(processors: EventProcessor[]) {
|
||||
this.processors = processors
|
||||
}
|
||||
|
||||
async processEvent(
|
||||
event: Event,
|
||||
identity: Identity,
|
||||
properties: any,
|
||||
timestamp?: string | number
|
||||
): Promise<void> {
|
||||
for (const eventProcessor of this.processors) {
|
||||
await eventProcessor.processEvent(event, identity, properties, timestamp)
|
||||
}
|
||||
}
|
||||
|
||||
async identify(
|
||||
identity: Identity,
|
||||
timestamp?: string | number
|
||||
): Promise<void> {
|
||||
for (const eventProcessor of this.processors) {
|
||||
await eventProcessor.identify(identity, timestamp)
|
||||
}
|
||||
}
|
||||
|
||||
async identifyGroup(
|
||||
identity: Group,
|
||||
timestamp?: string | number
|
||||
): Promise<void> {
|
||||
for (const eventProcessor of this.processors) {
|
||||
await eventProcessor.identifyGroup(identity, timestamp)
|
||||
}
|
||||
}
|
||||
|
||||
shutdown() {
|
||||
for (const eventProcessor of this.processors) {
|
||||
eventProcessor.shutdown()
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
import AnalyticsProcessor from "./AnalyticsProcessor"
|
||||
import LoggingProcessor from "./LoggingProcessor"
|
||||
import Processors from "./Processors"
|
||||
|
||||
export const analyticsProcessor = new AnalyticsProcessor()
|
||||
const loggingProcessor = new LoggingProcessor()
|
||||
|
||||
export const processors = new Processors([analyticsProcessor, loggingProcessor])
|
|
@ -0,0 +1,18 @@
|
|||
import { Event, Identity, Group } from "@budibase/types"
|
||||
|
||||
export enum EventProcessorType {
|
||||
POSTHOG = "posthog",
|
||||
LOGGING = "logging",
|
||||
}
|
||||
|
||||
export interface EventProcessor {
|
||||
processEvent(
|
||||
event: Event,
|
||||
identity: Identity,
|
||||
properties: any,
|
||||
timestamp?: string | number
|
||||
): Promise<void>
|
||||
identify(identity: Identity, timestamp?: string | number): Promise<void>
|
||||
identifyGroup(group: Group, timestamp?: string | number): Promise<void>
|
||||
shutdown(): void
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Event,
|
||||
Account,
|
||||
AccountCreatedEvent,
|
||||
AccountDeletedEvent,
|
||||
AccountVerifiedEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
export async function created(account: Account) {
|
||||
const properties: AccountCreatedEvent = {
|
||||
tenantId: account.tenantId,
|
||||
}
|
||||
await publishEvent(Event.ACCOUNT_CREATED, properties)
|
||||
}
|
||||
|
||||
export async function deleted(account: Account) {
|
||||
const properties: AccountDeletedEvent = {
|
||||
tenantId: account.tenantId,
|
||||
}
|
||||
await publishEvent(Event.ACCOUNT_DELETED, properties)
|
||||
}
|
||||
|
||||
export async function verified(account: Account) {
|
||||
const properties: AccountVerifiedEvent = {
|
||||
tenantId: account.tenantId,
|
||||
}
|
||||
await publishEvent(Event.ACCOUNT_VERIFIED, properties)
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Event,
|
||||
App,
|
||||
AppCreatedEvent,
|
||||
AppUpdatedEvent,
|
||||
AppDeletedEvent,
|
||||
AppPublishedEvent,
|
||||
AppUnpublishedEvent,
|
||||
AppFileImportedEvent,
|
||||
AppTemplateImportedEvent,
|
||||
AppVersionUpdatedEvent,
|
||||
AppVersionRevertedEvent,
|
||||
AppRevertedEvent,
|
||||
AppExportedEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const created = async (app: App, timestamp?: string | number) => {
|
||||
const properties: AppCreatedEvent = {
|
||||
appId: app.appId,
|
||||
version: app.version,
|
||||
}
|
||||
await publishEvent(Event.APP_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function updated(app: App) {
|
||||
const properties: AppUpdatedEvent = {
|
||||
appId: app.appId,
|
||||
version: app.version,
|
||||
}
|
||||
await publishEvent(Event.APP_UPDATED, properties)
|
||||
}
|
||||
|
||||
export async function deleted(app: App) {
|
||||
const properties: AppDeletedEvent = {
|
||||
appId: app.appId,
|
||||
}
|
||||
await publishEvent(Event.APP_DELETED, properties)
|
||||
}
|
||||
|
||||
export async function published(app: App, timestamp?: string | number) {
|
||||
const properties: AppPublishedEvent = {
|
||||
appId: app.appId,
|
||||
}
|
||||
await publishEvent(Event.APP_PUBLISHED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function unpublished(app: App) {
|
||||
const properties: AppUnpublishedEvent = {
|
||||
appId: app.appId,
|
||||
}
|
||||
await publishEvent(Event.APP_UNPUBLISHED, properties)
|
||||
}
|
||||
|
||||
export async function fileImported(app: App) {
|
||||
const properties: AppFileImportedEvent = {
|
||||
appId: app.appId,
|
||||
}
|
||||
await publishEvent(Event.APP_FILE_IMPORTED, properties)
|
||||
}
|
||||
|
||||
export async function templateImported(app: App, templateKey: string) {
|
||||
const properties: AppTemplateImportedEvent = {
|
||||
appId: app.appId,
|
||||
templateKey,
|
||||
}
|
||||
await publishEvent(Event.APP_TEMPLATE_IMPORTED, properties)
|
||||
}
|
||||
|
||||
export async function versionUpdated(
|
||||
app: App,
|
||||
currentVersion: string,
|
||||
updatedToVersion: string
|
||||
) {
|
||||
const properties: AppVersionUpdatedEvent = {
|
||||
appId: app.appId,
|
||||
currentVersion,
|
||||
updatedToVersion,
|
||||
}
|
||||
await publishEvent(Event.APP_VERSION_UPDATED, properties)
|
||||
}
|
||||
|
||||
export async function versionReverted(
|
||||
app: App,
|
||||
currentVersion: string,
|
||||
revertedToVersion: string
|
||||
) {
|
||||
const properties: AppVersionRevertedEvent = {
|
||||
appId: app.appId,
|
||||
currentVersion,
|
||||
revertedToVersion,
|
||||
}
|
||||
await publishEvent(Event.APP_VERSION_REVERTED, properties)
|
||||
}
|
||||
|
||||
export async function reverted(app: App) {
|
||||
const properties: AppRevertedEvent = {
|
||||
appId: app.appId,
|
||||
}
|
||||
await publishEvent(Event.APP_REVERTED, properties)
|
||||
}
|
||||
|
||||
export async function exported(app: App) {
|
||||
const properties: AppExportedEvent = {
|
||||
appId: app.appId,
|
||||
}
|
||||
await publishEvent(Event.APP_EXPORTED, properties)
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Event,
|
||||
LoginEvent,
|
||||
LoginSource,
|
||||
LogoutEvent,
|
||||
SSOActivatedEvent,
|
||||
SSOCreatedEvent,
|
||||
SSODeactivatedEvent,
|
||||
SSOType,
|
||||
SSOUpdatedEvent,
|
||||
} from "@budibase/types"
|
||||
import { identification } from ".."
|
||||
|
||||
export async function login(source: LoginSource) {
|
||||
const identity = await identification.getCurrentIdentity()
|
||||
const properties: LoginEvent = {
|
||||
userId: identity.id,
|
||||
source,
|
||||
}
|
||||
await publishEvent(Event.AUTH_LOGIN, properties)
|
||||
}
|
||||
|
||||
export async function logout() {
|
||||
const identity = await identification.getCurrentIdentity()
|
||||
const properties: LogoutEvent = {
|
||||
userId: identity.id,
|
||||
}
|
||||
await publishEvent(Event.AUTH_LOGOUT, properties)
|
||||
}
|
||||
|
||||
export async function SSOCreated(type: SSOType, timestamp?: string | number) {
|
||||
const properties: SSOCreatedEvent = {
|
||||
type,
|
||||
}
|
||||
await publishEvent(Event.AUTH_SSO_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function SSOUpdated(type: SSOType) {
|
||||
const properties: SSOUpdatedEvent = {
|
||||
type,
|
||||
}
|
||||
await publishEvent(Event.AUTH_SSO_UPDATED, properties)
|
||||
}
|
||||
|
||||
export async function SSOActivated(type: SSOType, timestamp?: string | number) {
|
||||
const properties: SSOActivatedEvent = {
|
||||
type,
|
||||
}
|
||||
await publishEvent(Event.AUTH_SSO_ACTIVATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function SSODeactivated(type: SSOType) {
|
||||
const properties: SSODeactivatedEvent = {
|
||||
type,
|
||||
}
|
||||
await publishEvent(Event.AUTH_SSO_DEACTIVATED, properties)
|
||||
}
|
|
@ -0,0 +1,94 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Automation,
|
||||
Event,
|
||||
AutomationStep,
|
||||
AutomationCreatedEvent,
|
||||
AutomationDeletedEvent,
|
||||
AutomationTestedEvent,
|
||||
AutomationStepCreatedEvent,
|
||||
AutomationStepDeletedEvent,
|
||||
AutomationTriggerUpdatedEvent,
|
||||
AutomationsRunEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
export async function created(
|
||||
automation: Automation,
|
||||
timestamp?: string | number
|
||||
) {
|
||||
const properties: AutomationCreatedEvent = {
|
||||
appId: automation.appId,
|
||||
automationId: automation._id as string,
|
||||
triggerId: automation.definition?.trigger?.id,
|
||||
triggerType: automation.definition?.trigger?.stepId,
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function triggerUpdated(automation: Automation) {
|
||||
const properties: AutomationTriggerUpdatedEvent = {
|
||||
appId: automation.appId,
|
||||
automationId: automation._id as string,
|
||||
triggerId: automation.definition?.trigger?.id,
|
||||
triggerType: automation.definition?.trigger?.stepId,
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_TRIGGER_UPDATED, properties)
|
||||
}
|
||||
|
||||
export async function deleted(automation: Automation) {
|
||||
const properties: AutomationDeletedEvent = {
|
||||
appId: automation.appId,
|
||||
automationId: automation._id as string,
|
||||
triggerId: automation.definition?.trigger?.id,
|
||||
triggerType: automation.definition?.trigger?.stepId,
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_DELETED, properties)
|
||||
}
|
||||
|
||||
export async function tested(automation: Automation) {
|
||||
const properties: AutomationTestedEvent = {
|
||||
appId: automation.appId,
|
||||
automationId: automation._id as string,
|
||||
triggerId: automation.definition?.trigger?.id,
|
||||
triggerType: automation.definition?.trigger?.stepId,
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_TESTED, properties)
|
||||
}
|
||||
|
||||
export const run = async (count: number, timestamp?: string | number) => {
|
||||
const properties: AutomationsRunEvent = {
|
||||
count,
|
||||
}
|
||||
await publishEvent(Event.AUTOMATIONS_RUN, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function stepCreated(
|
||||
automation: Automation,
|
||||
step: AutomationStep,
|
||||
timestamp?: string | number
|
||||
) {
|
||||
const properties: AutomationStepCreatedEvent = {
|
||||
appId: automation.appId,
|
||||
automationId: automation._id as string,
|
||||
triggerId: automation.definition?.trigger?.id,
|
||||
triggerType: automation.definition?.trigger?.stepId,
|
||||
stepId: step.id,
|
||||
stepType: step.stepId,
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function stepDeleted(
|
||||
automation: Automation,
|
||||
step: AutomationStep
|
||||
) {
|
||||
const properties: AutomationStepDeletedEvent = {
|
||||
appId: automation.appId,
|
||||
automationId: automation._id as string,
|
||||
triggerId: automation.definition?.trigger?.id,
|
||||
triggerType: automation.definition?.trigger?.stepId,
|
||||
stepId: step.id,
|
||||
stepType: step.stepId,
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_STEP_DELETED, properties)
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Event,
|
||||
AppBackfillSucceededEvent,
|
||||
AppBackfillFailedEvent,
|
||||
TenantBackfillSucceededEvent,
|
||||
TenantBackfillFailedEvent,
|
||||
InstallationBackfillSucceededEvent,
|
||||
InstallationBackfillFailedEvent,
|
||||
} from "@budibase/types"
|
||||
const env = require("../../environment")
|
||||
|
||||
const shouldSkip = !env.SELF_HOSTED && !env.isDev()
|
||||
|
||||
export async function appSucceeded(properties: AppBackfillSucceededEvent) {
|
||||
if (shouldSkip) {
|
||||
return
|
||||
}
|
||||
await publishEvent(Event.APP_BACKFILL_SUCCEEDED, properties)
|
||||
}
|
||||
|
||||
export async function appFailed(error: any) {
|
||||
if (shouldSkip) {
|
||||
return
|
||||
}
|
||||
const properties: AppBackfillFailedEvent = {
|
||||
error: JSON.stringify(error, Object.getOwnPropertyNames(error)),
|
||||
}
|
||||
await publishEvent(Event.APP_BACKFILL_FAILED, properties)
|
||||
}
|
||||
|
||||
export async function tenantSucceeded(
|
||||
properties: TenantBackfillSucceededEvent
|
||||
) {
|
||||
if (shouldSkip) {
|
||||
return
|
||||
}
|
||||
await publishEvent(Event.TENANT_BACKFILL_SUCCEEDED, properties)
|
||||
}
|
||||
|
||||
export async function tenantFailed(error: any) {
|
||||
if (shouldSkip) {
|
||||
return
|
||||
}
|
||||
const properties: TenantBackfillFailedEvent = {
|
||||
error: JSON.stringify(error, Object.getOwnPropertyNames(error)),
|
||||
}
|
||||
await publishEvent(Event.TENANT_BACKFILL_FAILED, properties)
|
||||
}
|
||||
|
||||
export async function installationSucceeded() {
|
||||
if (shouldSkip) {
|
||||
return
|
||||
}
|
||||
const properties: InstallationBackfillSucceededEvent = {}
|
||||
await publishEvent(Event.INSTALLATION_BACKFILL_SUCCEEDED, properties)
|
||||
}
|
||||
|
||||
export async function installationFailed(error: any) {
|
||||
if (shouldSkip) {
|
||||
return
|
||||
}
|
||||
const properties: InstallationBackfillFailedEvent = {
|
||||
error: JSON.stringify(error, Object.getOwnPropertyNames(error)),
|
||||
}
|
||||
await publishEvent(Event.INSTALLATION_BACKFILL_FAILED, properties)
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Event,
|
||||
Datasource,
|
||||
DatasourceCreatedEvent,
|
||||
DatasourceUpdatedEvent,
|
||||
DatasourceDeletedEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
export async function created(
|
||||
datasource: Datasource,
|
||||
timestamp?: string | number
|
||||
) {
|
||||
const properties: DatasourceCreatedEvent = {
|
||||
datasourceId: datasource._id as string,
|
||||
source: datasource.source,
|
||||
}
|
||||
await publishEvent(Event.DATASOURCE_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function updated(datasource: Datasource) {
|
||||
const properties: DatasourceUpdatedEvent = {
|
||||
datasourceId: datasource._id as string,
|
||||
source: datasource.source,
|
||||
}
|
||||
await publishEvent(Event.DATASOURCE_UPDATED, properties)
|
||||
}
|
||||
|
||||
export async function deleted(datasource: Datasource) {
|
||||
const properties: DatasourceDeletedEvent = {
|
||||
datasourceId: datasource._id as string,
|
||||
source: datasource.source,
|
||||
}
|
||||
await publishEvent(Event.DATASOURCE_DELETED, properties)
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
import { publishEvent } from "../events"
|
||||
import { Event, SMTPCreatedEvent, SMTPUpdatedEvent } from "@budibase/types"
|
||||
|
||||
export async function SMTPCreated(timestamp?: string | number) {
|
||||
const properties: SMTPCreatedEvent = {}
|
||||
await publishEvent(Event.EMAIL_SMTP_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function SMTPUpdated() {
|
||||
const properties: SMTPUpdatedEvent = {}
|
||||
await publishEvent(Event.EMAIL_SMTP_UPDATED, properties)
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
export * as account from "./account"
|
||||
export * as app from "./app"
|
||||
export * as auth from "./auth"
|
||||
export * as automation from "./automation"
|
||||
export * as datasource from "./datasource"
|
||||
export * as email from "./email"
|
||||
export * as license from "./license"
|
||||
export * as layout from "./layout"
|
||||
export * as org from "./org"
|
||||
export * as query from "./query"
|
||||
export * as role from "./role"
|
||||
export * as screen from "./screen"
|
||||
export * as rows from "./rows"
|
||||
export * as table from "./table"
|
||||
export * as serve from "./serve"
|
||||
export * as user from "./user"
|
||||
export * as view from "./view"
|
||||
export * as installation from "./installation"
|
||||
export * as backfill from "./backfill"
|
|
@ -0,0 +1,31 @@
|
|||
import { publishEvent } from "../events"
|
||||
import { Event, VersionCheckedEvent, VersionChangeEvent } from "@budibase/types"
|
||||
|
||||
export async function versionChecked(version: string) {
|
||||
const properties: VersionCheckedEvent = {
|
||||
currentVersion: version,
|
||||
}
|
||||
await publishEvent(Event.INSTALLATION_VERSION_CHECKED, properties)
|
||||
}
|
||||
|
||||
export async function upgraded(from: string, to: string) {
|
||||
const properties: VersionChangeEvent = {
|
||||
from,
|
||||
to,
|
||||
}
|
||||
|
||||
await publishEvent(Event.INSTALLATION_VERSION_UPGRADED, properties)
|
||||
}
|
||||
|
||||
export async function downgraded(from: string, to: string) {
|
||||
const properties: VersionChangeEvent = {
|
||||
from,
|
||||
to,
|
||||
}
|
||||
await publishEvent(Event.INSTALLATION_VERSION_DOWNGRADED, properties)
|
||||
}
|
||||
|
||||
export async function firstStartup() {
|
||||
const properties = {}
|
||||
await publishEvent(Event.INSTALLATION_FIRST_STARTUP, properties)
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Event,
|
||||
Layout,
|
||||
LayoutCreatedEvent,
|
||||
LayoutDeletedEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
export async function created(layout: Layout, timestamp?: string | number) {
|
||||
const properties: LayoutCreatedEvent = {
|
||||
layoutId: layout._id as string,
|
||||
}
|
||||
await publishEvent(Event.LAYOUT_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function deleted(layoutId: string) {
|
||||
const properties: LayoutDeletedEvent = {
|
||||
layoutId,
|
||||
}
|
||||
await publishEvent(Event.LAYOUT_DELETED, properties)
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Event,
|
||||
License,
|
||||
LicenseActivatedEvent,
|
||||
LicenseDowngradedEvent,
|
||||
LicenseUpdatedEvent,
|
||||
LicenseUpgradedEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
// TODO
|
||||
export async function updgraded(license: License) {
|
||||
const properties: LicenseUpgradedEvent = {}
|
||||
await publishEvent(Event.LICENSE_UPGRADED, properties)
|
||||
}
|
||||
|
||||
// TODO
|
||||
export async function downgraded(license: License) {
|
||||
const properties: LicenseDowngradedEvent = {}
|
||||
await publishEvent(Event.LICENSE_DOWNGRADED, properties)
|
||||
}
|
||||
|
||||
// TODO
|
||||
export async function updated(license: License) {
|
||||
const properties: LicenseUpdatedEvent = {}
|
||||
await publishEvent(Event.LICENSE_UPDATED, properties)
|
||||
}
|
||||
|
||||
// TODO
|
||||
export async function activated(license: License) {
|
||||
const properties: LicenseActivatedEvent = {}
|
||||
await publishEvent(Event.LICENSE_ACTIVATED, properties)
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
import { publishEvent } from "../events"
|
||||
import { Event } from "@budibase/types"
|
||||
|
||||
export async function nameUpdated(timestamp?: string | number) {
|
||||
const properties = {}
|
||||
await publishEvent(Event.ORG_NAME_UPDATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function logoUpdated(timestamp?: string | number) {
|
||||
const properties = {}
|
||||
await publishEvent(Event.ORG_LOGO_UPDATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function platformURLUpdated(timestamp?: string | number) {
|
||||
const properties = {}
|
||||
await publishEvent(Event.ORG_PLATFORM_URL_UPDATED, properties, timestamp)
|
||||
}
|
||||
|
||||
// TODO
|
||||
|
||||
export async function analyticsOptOut() {
|
||||
const properties = {}
|
||||
await publishEvent(Event.ANALYTICS_OPT_OUT, properties)
|
||||
}
|
||||
|
||||
export async function analyticsOptIn() {
|
||||
const properties = {}
|
||||
await publishEvent(Event.ANALYTICS_OPT_OUT, properties)
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Event,
|
||||
Datasource,
|
||||
Query,
|
||||
QueryCreatedEvent,
|
||||
QueryUpdatedEvent,
|
||||
QueryDeletedEvent,
|
||||
QueryImportedEvent,
|
||||
QueryPreviewedEvent,
|
||||
QueriesRunEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
/* eslint-disable */
|
||||
|
||||
export const created = async (
|
||||
datasource: Datasource,
|
||||
query: Query,
|
||||
timestamp?: string | number
|
||||
) => {
|
||||
const properties: QueryCreatedEvent = {
|
||||
queryId: query._id as string,
|
||||
datasourceId: datasource._id as string,
|
||||
source: datasource.source,
|
||||
queryVerb: query.queryVerb,
|
||||
}
|
||||
await publishEvent(Event.QUERY_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export const updated = async (datasource: Datasource, query: Query) => {
|
||||
const properties: QueryUpdatedEvent = {
|
||||
queryId: query._id as string,
|
||||
datasourceId: datasource._id as string,
|
||||
source: datasource.source,
|
||||
queryVerb: query.queryVerb,
|
||||
}
|
||||
await publishEvent(Event.QUERY_UPDATED, properties)
|
||||
}
|
||||
|
||||
export const deleted = async (datasource: Datasource, query: Query) => {
|
||||
const properties: QueryDeletedEvent = {
|
||||
queryId: query._id as string,
|
||||
datasourceId: datasource._id as string,
|
||||
source: datasource.source,
|
||||
queryVerb: query.queryVerb,
|
||||
}
|
||||
await publishEvent(Event.QUERY_DELETED, properties)
|
||||
}
|
||||
|
||||
export const imported = async (
|
||||
datasource: Datasource,
|
||||
importSource: any,
|
||||
count: any
|
||||
) => {
|
||||
const properties: QueryImportedEvent = {
|
||||
datasourceId: datasource._id as string,
|
||||
source: datasource.source,
|
||||
count,
|
||||
importSource,
|
||||
}
|
||||
await publishEvent(Event.QUERY_IMPORT, properties)
|
||||
}
|
||||
|
||||
export const run = async (count: number, timestamp?: string | number) => {
|
||||
const properties: QueriesRunEvent = {
|
||||
count,
|
||||
}
|
||||
await publishEvent(Event.QUERIES_RUN, properties, timestamp)
|
||||
}
|
||||
|
||||
export const previewed = async (datasource: Datasource, query: Query) => {
|
||||
const properties: QueryPreviewedEvent = {
|
||||
queryId: query._id,
|
||||
datasourceId: datasource._id as string,
|
||||
source: datasource.source,
|
||||
queryVerb: query.queryVerb,
|
||||
}
|
||||
await publishEvent(Event.QUERY_PREVIEWED, properties)
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Event,
|
||||
Role,
|
||||
RoleAssignedEvent,
|
||||
RoleCreatedEvent,
|
||||
RoleDeletedEvent,
|
||||
RoleUnassignedEvent,
|
||||
RoleUpdatedEvent,
|
||||
User,
|
||||
} from "@budibase/types"
|
||||
|
||||
export async function created(role: Role, timestamp?: string | number) {
|
||||
const properties: RoleCreatedEvent = {
|
||||
roleId: role._id as string,
|
||||
permissionId: role.permissionId,
|
||||
inherits: role.inherits,
|
||||
}
|
||||
await publishEvent(Event.ROLE_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function updated(role: Role) {
|
||||
const properties: RoleUpdatedEvent = {
|
||||
roleId: role._id as string,
|
||||
permissionId: role.permissionId,
|
||||
inherits: role.inherits,
|
||||
}
|
||||
await publishEvent(Event.ROLE_UPDATED, properties)
|
||||
}
|
||||
|
||||
export async function deleted(role: Role) {
|
||||
const properties: RoleDeletedEvent = {
|
||||
roleId: role._id as string,
|
||||
permissionId: role.permissionId,
|
||||
inherits: role.inherits,
|
||||
}
|
||||
await publishEvent(Event.ROLE_DELETED, properties)
|
||||
}
|
||||
|
||||
export async function assigned(user: User, roleId: string, timestamp?: number) {
|
||||
const properties: RoleAssignedEvent = {
|
||||
userId: user._id as string,
|
||||
roleId,
|
||||
}
|
||||
await publishEvent(Event.ROLE_ASSIGNED, properties, timestamp)
|
||||
}
|
||||
|
||||
export async function unassigned(user: User, roleId: string) {
|
||||
const properties: RoleUnassignedEvent = {
|
||||
userId: user._id as string,
|
||||
roleId,
|
||||
}
|
||||
await publishEvent(Event.ROLE_UNASSIGNED, properties)
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue