Merge branch 'develop' of github.com:Budibase/budibase into global-bindings
This commit is contained in:
commit
b8bc25a39a
|
@ -10,4 +10,4 @@ packages/builder/.routify
|
|||
packages/builder/cypress/support/queryLevelTransformerFunction.js
|
||||
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
|
||||
packages/builder/cypress/reports
|
||||
packages/sdk/sdk
|
||||
packages/sdk/sdk
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"jest": true,
|
||||
"node": true
|
||||
},
|
||||
"parser": "babel-eslint",
|
||||
"parser": "@babel/eslint-parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2019,
|
||||
"sourceType": "module",
|
||||
|
@ -18,20 +18,24 @@
|
|||
"*.spec.js",
|
||||
"bundle.js"
|
||||
],
|
||||
"plugins": ["svelte3"],
|
||||
"extends": ["eslint:recommended"],
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["*.svelte"],
|
||||
"processor": "svelte3/svelte3"
|
||||
"files": ["**/*.svelte"],
|
||||
"extends": "plugin:svelte/recommended",
|
||||
"parser": "svelte-eslint-parser",
|
||||
"parserOptions": {
|
||||
"parser": "@babel/eslint-parser",
|
||||
"ecmaVersion": 2019,
|
||||
"sourceType": "module",
|
||||
"allowImportExportEverywhere": true
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"files": ["**/*.ts"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": [],
|
||||
"extends": [
|
||||
"eslint:recommended"
|
||||
],
|
||||
"extends": ["eslint:recommended"],
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"no-inner-declarations": "off",
|
||||
|
@ -43,7 +47,8 @@
|
|||
}
|
||||
],
|
||||
"rules": {
|
||||
"no-self-assign": "off"
|
||||
"no-self-assign": "off",
|
||||
"no-unused-vars": ["error", { "varsIgnorePattern": "^_", "argsIgnorePattern": "^_", "destructuredArrayIgnorePattern": "^_" }]
|
||||
},
|
||||
"globals": {
|
||||
"GeolocationPositionError": true
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 60
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
daysUntilClose: false
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
- roadmap
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
|
@ -1,98 +1,212 @@
|
|||
name: Budibase CI
|
||||
|
||||
on:
|
||||
# Trigger the workflow on push or pull request,
|
||||
# but only for the master branch
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
pull_request:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
# Trigger the workflow on push or pull request,
|
||||
# but only for the master branch
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
BRANCH: ${{ github.event.pull_request.head.ref }}
|
||||
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
|
||||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- run: yarn
|
||||
- run: yarn lint
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v3
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
- name: Checkout repo only
|
||||
uses: actions/checkout@v3
|
||||
if: github.repository != github.event.pull_request.head.repo.full_name
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 14.x
|
||||
cache: "yarn"
|
||||
- run: yarn
|
||||
- run: yarn lint
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v3
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
- name: Checkout repo only
|
||||
uses: actions/checkout@v3
|
||||
if: github.repository != github.event.pull_request.head.repo.full_name
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: Install Pro
|
||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||
cache: "yarn"
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
# Run build all the projects
|
||||
- run: yarn build
|
||||
# Check the types of the projects built via esbuild
|
||||
- run: yarn check:types
|
||||
|
||||
test:
|
||||
test-libraries:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v3
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
- name: Checkout repo only
|
||||
uses: actions/checkout@v3
|
||||
if: github.repository != github.event.pull_request.head.repo.full_name
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: Install Pro
|
||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||
cache: "yarn"
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn build
|
||||
- run: yarn test
|
||||
- run: yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
|
||||
- uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
|
||||
name: codecov-umbrella
|
||||
verbose: true
|
||||
|
||||
test-pro:
|
||||
test-services:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v3
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
- name: Checkout repo only
|
||||
uses: actions/checkout@v3
|
||||
if: github.repository != github.event.pull_request.head.repo.full_name
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: Install Pro
|
||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||
cache: "yarn"
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn test:pro
|
||||
- run: yarn test --scope=@budibase/worker --scope=@budibase/server
|
||||
- uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos
|
||||
name: codecov-umbrella
|
||||
verbose: true
|
||||
|
||||
test-pro:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
||||
steps:
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 14.x
|
||||
cache: "yarn"
|
||||
- run: yarn
|
||||
- run: yarn test --scope=@budibase/pro
|
||||
|
||||
integration-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v3
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
- name: Checkout repo only
|
||||
uses: actions/checkout@v3
|
||||
if: github.repository != github.event.pull_request.head.repo.full_name
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: Install Pro
|
||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||
- run: yarn && yarn bootstrap && yarn build
|
||||
- run: |
|
||||
cache: "yarn"
|
||||
- run: yarn
|
||||
- run: yarn build
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd qa-core
|
||||
yarn setup
|
||||
yarn test:ci
|
||||
yarn serve:test:self:ci
|
||||
env:
|
||||
BB_ADMIN_USER_EMAIL: admin
|
||||
BB_ADMIN_USER_PASSWORD: admin
|
||||
|
||||
check-pro-submodule:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
||||
steps:
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
|
||||
- name: Check pro commit
|
||||
id: get_pro_commits
|
||||
run: |
|
||||
cd packages/pro
|
||||
pro_commit=$(git rev-parse HEAD)
|
||||
|
||||
branch="${{ github.base_ref || github.ref_name }}"
|
||||
echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
|
||||
|
||||
if [[ $branch == "master" ]]; then
|
||||
base_commit=$(git rev-parse origin/master)
|
||||
else
|
||||
base_commit=$(git rev-parse origin/develop)
|
||||
fi
|
||||
|
||||
echo "pro_commit=$pro_commit"
|
||||
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
|
||||
echo "base_commit=$base_commit"
|
||||
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Check submodule merged to develop
|
||||
uses: actions/github-script@v4
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const submoduleCommit = '${{ steps.get_pro_commits.outputs.pro_commit }}';
|
||||
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
|
||||
|
||||
if (submoduleCommit !== baseCommit) {
|
||||
console.error('Submodule commit does not match the latest commit on the develop branch.');
|
||||
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('All good, the submodule had been merged and setup correctly!')
|
||||
}
|
||||
|
|
|
@ -1,67 +1,48 @@
|
|||
name: Budibase Deploy Production
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: Budibase release version. For example - 1.0.0
|
||||
required: false
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: Budibase release version. For example - 1.0.0
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Fail if branch is not master
|
||||
if: github.ref != 'refs/heads/master'
|
||||
run: |
|
||||
echo "Ref is not master, you must run this job from master."
|
||||
exit 1
|
||||
|
||||
- name: Fail if not a tag
|
||||
run: |
|
||||
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
||||
echo "Workflow Dispatch can only be run on tags"
|
||||
exit 1
|
||||
fi
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fail if tag is not in master
|
||||
run: |
|
||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
||||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \
|
||||
-H 'Accept: application/vnd.github.v3.raw' \
|
||||
-o values.production.yaml \
|
||||
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/values.yaml
|
||||
wc -l values.production.yaml
|
||||
|
||||
- name: Get the latest budibase release version
|
||||
id: version
|
||||
run: |
|
||||
run: |
|
||||
if [ -z "${{ github.event.inputs.version }}" ]; then
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
else
|
||||
release_version=${{ github.event.inputs.version }}
|
||||
fi
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
|
||||
- name: Deploy to EKS
|
||||
uses: craftech-io/eks-helm-deploy-action@v1
|
||||
- uses: passeidireto/trigger-external-workflow-action@main
|
||||
env:
|
||||
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS__KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
cluster-name: budibase-eks-production
|
||||
config-files: values.production.yaml
|
||||
chart-path: charts/budibase
|
||||
namespace: budibase
|
||||
values: globals.appVersion=v${{ env.RELEASE_VERSION }},services.couchdb.url=${{ secrets.PRODUCTION_COUCHDB_URL }},services.couchdb.password=${{ secrets.PRODUCTION_COUCHDB_PASSWORD }}
|
||||
name: budibase-prod
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v4.0.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
|
||||
content: "Production Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Cloud."
|
||||
embed-title: ${{ env.RELEASE_VERSION }}
|
||||
|
||||
repository: budibase/budibase-deploys
|
||||
event: budicloud-prod-deploy
|
||||
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
|
|
|
@ -1,66 +1,41 @@
|
|||
name: "deploy-preprod"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: Budibase release version. For example - 1.0.0
|
||||
required: false
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
deploy-to-legacy-preprod-env:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Fail if not a tag
|
||||
run: |
|
||||
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
||||
echo "Workflow Dispatch can only be run on tags"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fail if tag is not in master
|
||||
run: |
|
||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
||||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Get the latest budibase release version
|
||||
id: version
|
||||
run: |
|
||||
if [ -z "${{ github.event.inputs.version }}" ]; then
|
||||
git pull
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
else
|
||||
release_version=${{ github.event.inputs.version }}
|
||||
fi
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \
|
||||
-H 'Accept: application/vnd.github.v3.raw' \
|
||||
-o values.preprod.yaml \
|
||||
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-preprod/values.yaml
|
||||
wc -l values.preprod.yaml
|
||||
- name: Deploy to Preprod Environment
|
||||
uses: budibase/helm@v1.8.0
|
||||
with:
|
||||
release: budibase-preprod
|
||||
namespace: budibase
|
||||
chart: charts/budibase
|
||||
token: ${{ github.token }}
|
||||
helm: helm3
|
||||
values: |
|
||||
globals:
|
||||
appVersion: v${{ env.RELEASE_VERSION }}
|
||||
ingress:
|
||||
enabled: true
|
||||
nginx: true
|
||||
value-files: >-
|
||||
[
|
||||
"values.preprod.yaml"
|
||||
]
|
||||
- uses: passeidireto/trigger-external-workflow-action@main
|
||||
env:
|
||||
KUBECONFIG_FILE: '${{ secrets.PREPROD_KUBECONFIG }}'
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v4.0.0
|
||||
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
with:
|
||||
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
|
||||
content: "Preprod Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Pre-prod."
|
||||
embed-title: ${{ env.RELEASE_VERSION }}
|
||||
repository: budibase/budibase-deploys
|
||||
event: budicloud-preprod-deploy
|
||||
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
|
||||
|
|
|
@ -1,21 +1,13 @@
|
|||
name: Budibase Prerelease
|
||||
concurrency: release-prerelease
|
||||
concurrency:
|
||||
group: release-prerelease
|
||||
cancel-in-progress: false
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- '.aws/**'
|
||||
- '.github/**'
|
||||
- 'charts/**'
|
||||
- 'packages/**'
|
||||
- 'scripts/**'
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
workflow_dispatch:
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "*-alpha.*"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
# Posthog token used by ui at build time
|
||||
|
@ -24,43 +16,56 @@ env:
|
|||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
FEATURE_PREVIEW_URL: https://budirelease.live
|
||||
|
||||
|
||||
jobs:
|
||||
release-images:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Fail if branch is not develop
|
||||
if: github.ref != 'refs/heads/develop'
|
||||
run: |
|
||||
echo "Ref is not develop, you must run this job from develop."
|
||||
exit 1
|
||||
- name: Fail if not a tag
|
||||
run: |
|
||||
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
||||
echo "Workflow Dispatch can only be run on tags"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fail if tag is not develop
|
||||
run: |
|
||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/develop; then
|
||||
echo "Tag is not in develop"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
|
||||
- name: Install Pro
|
||||
run: yarn install:pro develop
|
||||
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn install --frozen-lockfile
|
||||
- name: Update versions
|
||||
run: ./scripts/updateVersions.sh
|
||||
- run: yarn build
|
||||
- run: yarn build:sdk
|
||||
# - run: yarn test
|
||||
|
||||
- name: Publish budibase packages to NPM
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
run: |
|
||||
run: |
|
||||
# setup the username and email.
|
||||
git config --global user.name "Budibase Staging Release Bot"
|
||||
git config --global user.email "<>"
|
||||
git submodule foreach git commit -a -m 'Release process'
|
||||
git commit -a -m 'Release process'
|
||||
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
|
||||
yarn release:develop
|
||||
|
||||
- name: Build/release Docker images
|
||||
run: |
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker:develop
|
||||
env:
|
||||
|
@ -84,7 +89,7 @@ jobs:
|
|||
git config user.name "Budibase Helm Bot"
|
||||
git config user.email "<>"
|
||||
git reset --hard
|
||||
git pull
|
||||
git fetch
|
||||
mkdir sync
|
||||
echo "Packaging chart to sync dir"
|
||||
helm package charts/budibase --version 0.0.0-develop --app-version develop --destination sync
|
||||
|
|
|
@ -1,58 +1,46 @@
|
|||
name: Budibase Release
|
||||
concurrency: release
|
||||
concurrency:
|
||||
group: release
|
||||
cancel-in-progress: false
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- '.aws/**'
|
||||
- '.github/**'
|
||||
- 'charts/**'
|
||||
- 'packages/**'
|
||||
- 'scripts/**'
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
versioning:
|
||||
type: choice
|
||||
description: "Versioning type: patch, minor, major"
|
||||
default: patch
|
||||
options:
|
||||
- patch
|
||||
- minor
|
||||
- major
|
||||
required: true
|
||||
push:
|
||||
tags:
|
||||
- "[0-9]+.[0-9]+.[0-9]+"
|
||||
# Exclude all pre-releases
|
||||
- "!*[0-9]+.[0-9]+.[0-9]+-*"
|
||||
|
||||
env:
|
||||
# Posthog token used by ui at build time
|
||||
# Posthog token used by ui at build time
|
||||
POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
|
||||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
|
||||
jobs:
|
||||
release-images:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Fail if branch is not master
|
||||
if: github.ref != 'refs/heads/master'
|
||||
run: |
|
||||
echo "Ref is not master, you must run this job from master."
|
||||
exit 1
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fail if tag is not in master
|
||||
run: |
|
||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
||||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
|
||||
- name: Install Pro
|
||||
run: yarn install:pro master
|
||||
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn install --frozen-lockfile
|
||||
- name: Update versions
|
||||
run: ./scripts/updateVersions.sh
|
||||
- run: yarn lint
|
||||
- run: yarn build
|
||||
- run: yarn build:sdk
|
||||
|
@ -60,26 +48,30 @@ jobs:
|
|||
- name: Publish budibase packages to NPM
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
RELEASE_VERSION_TYPE: ${{ github.event.inputs.versioning }}
|
||||
run: |
|
||||
# setup the username and email. I tend to use 'GitHub Actions Bot' with no email by default
|
||||
git config --global user.name "Budibase Release Bot"
|
||||
git config --global user.email "<>"
|
||||
git submodule foreach git commit -a -m 'Release process'
|
||||
git commit -a -m 'Release process'
|
||||
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
|
||||
yarn release
|
||||
|
||||
- name: 'Get Previous tag'
|
||||
id: previoustag
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
- name: "Get Current tag"
|
||||
id: currenttag
|
||||
run: |
|
||||
version=v$(./scripts/getCurrentVersion.sh)
|
||||
echo 'Using tag $version'
|
||||
echo "::set-output name=tag::$resversionult"
|
||||
|
||||
- name: Build/release Docker images
|
||||
run: |
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }}
|
||||
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.tag }}
|
||||
|
||||
release-helm-chart:
|
||||
needs: [release-images]
|
||||
|
@ -103,10 +95,10 @@ jobs:
|
|||
git config user.name "Budibase Helm Bot"
|
||||
git config user.email "<>"
|
||||
git reset --hard
|
||||
git pull
|
||||
git fetch
|
||||
mkdir sync
|
||||
echo "Packaging chart to sync dir"
|
||||
helm package charts/budibase --version 0.0.0-master --app-version v"$RELEASE_VERSION" --destination sync
|
||||
helm package charts/budibase --version 0.0.0-master --app-version "$RELEASE_VERSION" --destination sync
|
||||
echo "Packaging successful"
|
||||
git checkout gh-pages
|
||||
echo "Indexing helm repo"
|
||||
|
@ -133,7 +125,6 @@ jobs:
|
|||
- name: Get the latest budibase release version
|
||||
id: version
|
||||
run: |
|
||||
git pull
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
|
|
|
@ -1,22 +1,32 @@
|
|||
name: Budibase Release Selfhost
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Fail if branch is not master
|
||||
if: github.ref != 'refs/heads/master'
|
||||
- name: Fail if not a tag
|
||||
run: |
|
||||
echo "Ref is not master, you must run this job from master."
|
||||
exit 1
|
||||
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
||||
echo "Workflow Dispatch can only be run on tags"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch_depth: 0
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fail if tag is not in master
|
||||
run: |
|
||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
||||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
|
@ -30,10 +40,10 @@ jobs:
|
|||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Tag and release Docker images (Self Host)
|
||||
run: |
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
|
||||
release_tag=v${{ env.RELEASE_VERSION }}
|
||||
release_tag=${{ env.RELEASE_VERSION }}
|
||||
|
||||
# Pull apps and worker images
|
||||
docker pull budibase/apps:$release_tag
|
||||
|
@ -44,7 +54,7 @@ jobs:
|
|||
docker tag budibase/apps:$release_tag budibase/apps:$SELFHOST_TAG
|
||||
docker tag budibase/worker:$release_tag budibase/worker:$SELFHOST_TAG
|
||||
docker tag budibase/proxy:$release_tag budibase/proxy:$SELFHOST_TAG
|
||||
|
||||
|
||||
# Push images
|
||||
docker push budibase/apps:$SELFHOST_TAG
|
||||
docker push budibase/worker:$SELFHOST_TAG
|
||||
|
@ -66,19 +76,19 @@ jobs:
|
|||
yarn
|
||||
yarn specs
|
||||
popd
|
||||
|
||||
- name: Setup Helm
|
||||
|
||||
- name: Setup Helm
|
||||
uses: azure/setup-helm@v1
|
||||
id: helm-install
|
||||
|
||||
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
|
||||
# we need to create new package in a different dir, merge the index and move the package back
|
||||
- name: Build and release helm chart
|
||||
run: |
|
||||
run: |
|
||||
git config user.name "Budibase Helm Bot"
|
||||
git config user.email "<>"
|
||||
git reset --hard
|
||||
git pull
|
||||
git fetch
|
||||
mkdir sync
|
||||
echo "Packaging chart to sync dir"
|
||||
helm package charts/budibase --version "$RELEASE_VERSION" --app-version "$RELEASE_VERSION" --destination sync
|
||||
|
@ -98,8 +108,8 @@ jobs:
|
|||
- name: Perform Github Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: v${{ env.RELEASE_VERSION }}
|
||||
tag_name: v${{ env.RELEASE_VERSION }}
|
||||
name: ${{ env.RELEASE_VERSION }}
|
||||
tag_name: ${{ env.RELEASE_VERSION }}
|
||||
generate_release_notes: true
|
||||
files: |
|
||||
packages/cli/build/cli-win.exe
|
||||
|
|
|
@ -5,7 +5,7 @@ on:
|
|||
|
||||
env:
|
||||
CI: true
|
||||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
REGISTRY_URL: registry.hub.docker.com
|
||||
jobs:
|
||||
build:
|
||||
|
@ -15,13 +15,26 @@ jobs:
|
|||
matrix:
|
||||
node-version: [14.x]
|
||||
steps:
|
||||
- name: Fail if branch is not master
|
||||
if: github.ref != 'refs/heads/master'
|
||||
run: |
|
||||
echo "Ref is not master, you must run this job from master."
|
||||
exit 1
|
||||
- name: Fail if not a tag
|
||||
run: |
|
||||
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
||||
echo "Workflow Dispatch can only be run on tags"
|
||||
exit 1
|
||||
fi
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fail if tag is not in master
|
||||
run: |
|
||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
||||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
|
@ -33,10 +46,12 @@ jobs:
|
|||
uses: docker/setup-buildx-action@v1
|
||||
- name: Run Yarn
|
||||
run: yarn
|
||||
- name: Run Yarn Bootstrap
|
||||
run: yarn bootstrap
|
||||
- name: Update versions
|
||||
run: ./scripts/updateVersions.sh
|
||||
- name: Runt Yarn Lint
|
||||
run: yarn lint
|
||||
- name: Update versions
|
||||
run: ./scripts/updateVersions.sh
|
||||
- name: Run Yarn Build
|
||||
run: yarn build:docker:pre
|
||||
- name: Login to Docker Hub
|
||||
|
@ -56,7 +71,7 @@ jobs:
|
|||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }}
|
||||
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
|
||||
file: ./hosting/single/Dockerfile
|
||||
- name: Tag and release Budibase Azure App Service docker image
|
||||
uses: docker/build-push-action@v2
|
||||
|
@ -65,5 +80,5 @@ jobs:
|
|||
push: true
|
||||
platforms: linux/amd64
|
||||
build-args: TARGETBUILD=aas
|
||||
tags: budibase/budibase-aas,budibase/budibase-aas:v${{ env.RELEASE_VERSION }}
|
||||
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
|
||||
file: ./hosting/single/Dockerfile
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
name: Close stale issues and PRs # https://github.com/actions/stale
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '30 1 * * *' # 1:30 every morning
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
# stale rules
|
||||
days-before-stale: 60
|
||||
days-before-pr-stale: 7
|
||||
stale-issue-label: stale
|
||||
stale-issue-message: "This issue has been automatically marked as stale because it has not had any activity for 60 days."
|
||||
|
||||
# close rules
|
||||
# days after being marked as stale to close
|
||||
days-before-close: 30
|
||||
close-issue-label: closed-stale
|
||||
close-issue-message: This issue has been automatically closed it has not had any activity in 90 days."
|
||||
days-before-pr-close: 7
|
||||
|
||||
# exemptions
|
||||
exempt-pr-labels: pinned,security,roadmap
|
||||
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
name: Tag prerelease
|
||||
concurrency:
|
||||
group: tag-prerelease
|
||||
cancel-in-progress: false
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- ".aws/**"
|
||||
- ".github/**"
|
||||
- "charts/**"
|
||||
- "packages/**"
|
||||
- "scripts/**"
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
tag-prerelease:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Fail if branch is not develop
|
||||
if: github.ref != 'refs/heads/develop'
|
||||
run: |
|
||||
echo "Ref is not develop, you must run this job from develop."
|
||||
exit 1
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
|
||||
- run: cd scripts && yarn
|
||||
- name: Tag prerelease
|
||||
run: |
|
||||
cd scripts
|
||||
# setup the username and email.
|
||||
git config --global user.name "Budibase Staging Release Bot"
|
||||
git config --global user.email "<>"
|
||||
./versionCommit.sh prerelease
|
|
@ -0,0 +1,54 @@
|
|||
name: Tag release
|
||||
concurrency:
|
||||
group: tag-release
|
||||
cancel-in-progress: false
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- ".aws/**"
|
||||
- ".github/**"
|
||||
- "charts/**"
|
||||
- "packages/**"
|
||||
- "scripts/**"
|
||||
- "package.json"
|
||||
- "yarn.lock"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
versioning:
|
||||
type: choice
|
||||
description: "Versioning type: patch, minor, major"
|
||||
default: patch
|
||||
options:
|
||||
- patch
|
||||
- minor
|
||||
- major
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
tag-release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Fail if branch is not master
|
||||
if: github.ref != 'refs/heads/master'
|
||||
run: |
|
||||
echo "Ref is not master, you must run this job from master."
|
||||
exit 1
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
|
||||
- run: cd scripts && yarn
|
||||
- name: Tag release
|
||||
run: |
|
||||
cd scripts
|
||||
# setup the username and email.
|
||||
git config --global user.name "Budibase Staging Release Bot"
|
||||
git config --global user.email "<>"
|
||||
BUMP_TYPE_INPUT=${{ github.event.inputs.versioning }}
|
||||
BUMP_TYPE=${BUMP_TYPE_INPUT:-"patch"}
|
||||
./versionCommit.sh $BUMP_TYPE
|
|
@ -0,0 +1,3 @@
|
|||
[submodule "packages/pro"]
|
||||
path = packages/pro
|
||||
url = git@github.com:Budibase/budibase-pro.git
|
|
@ -0,0 +1,4 @@
|
|||
# .husky/post-checkout
|
||||
# ...
|
||||
|
||||
git config submodule.recurse true
|
|
@ -1 +1 @@
|
|||
3.10.0
|
||||
3.10.0
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
nodejs 14.20.1
|
||||
nodejs 14.21.3
|
||||
python 3.10.0
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"presets": [["@babel/preset-env", { "targets": { "node": "current" } }]]
|
||||
}
|
|
@ -40,6 +40,24 @@ spec:
|
|||
- image: budibase/proxy:{{ .Values.globals.appVersion | default .Chart.AppVersion }}
|
||||
imagePullPolicy: Always
|
||||
name: proxy-service
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
port: {{ .Values.services.proxy.port }}
|
||||
initialDelaySeconds: 0
|
||||
periodSeconds: 5
|
||||
successThreshold: 1
|
||||
failureThreshold: 2
|
||||
timeoutSeconds: 3
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
port: {{ .Values.services.proxy.port }}
|
||||
initialDelaySeconds: 0
|
||||
periodSeconds: 5
|
||||
successThreshold: 1
|
||||
failureThreshold: 2
|
||||
timeoutSeconds: 3
|
||||
ports:
|
||||
- containerPort: {{ .Values.services.proxy.port }}
|
||||
env:
|
||||
|
|
|
@ -209,7 +209,7 @@ services:
|
|||
# Override values in couchDB subchart
|
||||
couchdb:
|
||||
## clusterSize is the initial size of the CouchDB cluster.
|
||||
clusterSize: 3
|
||||
clusterSize: 1
|
||||
allowAdminParty: false
|
||||
|
||||
# Secret Management
|
||||
|
|
|
@ -144,8 +144,6 @@ The following commands can be executed to manually get Budibase up and running (
|
|||
|
||||
`yarn` to install project dependencies
|
||||
|
||||
`yarn bootstrap` will install all budibase modules and symlink them together using lerna.
|
||||
|
||||
`yarn build` will build all budibase packages.
|
||||
|
||||
#### 4. Running
|
||||
|
@ -233,18 +231,33 @@ An overview of the CI pipelines can be found [here](../.github/workflows/README.
|
|||
|
||||
### Pro
|
||||
|
||||
@budibase/pro is the closed source package that supports licensed features in budibase. By default the package will be pulled from NPM and will not normally need to be touched in local development. If you require to update code inside the pro package it can be cloned to the same root level as budibase, e.g.
|
||||
@budibase/pro is the closed source package that supports licensed features in budibase. By default the package will be pulled from NPM and will not normally need to be touched in local development. If you need to make an update to pro and have access to the repo, then you can update your submodule within the mono-repo by running `git submodule update --init` - from here you can use normal submodule flow to develop a change within pro.
|
||||
|
||||
Once you have updated to use the pro submodule, it will be linked into all of your local dependencies by NX as with all other monorepo packages. If you have been using the NPM version of `@budibase/pro` then you may need to run a `git reset --hard` to fix all of the pro versions back to `0.0.0` to be monorepo aware.
|
||||
|
||||
From here - to develop a change in pro, you can follow the below flow:
|
||||
|
||||
```
|
||||
.
|
||||
|_ budibase
|
||||
|_ budibase-pro
|
||||
# enter the pro submodule
|
||||
cd packages/pro
|
||||
# get the base branch you are working from (same as monorepo)
|
||||
git fetch
|
||||
git checkout <develop | master>
|
||||
# create a branch, named the same as the branch in your monorepo
|
||||
git checkout -b <some branch>
|
||||
... make changes
|
||||
# commit the changes you've made, with a message for pro
|
||||
git commit <something>
|
||||
# within the monorepo, add the pro reference to your branch, commit it with a message like "Update pro ref"
|
||||
cd ../..
|
||||
git add packages/pro
|
||||
git commit <add the new reference to main repo>
|
||||
```
|
||||
|
||||
From here, you will have created a branch in the pro repository and commited the reference to your branch on the monorepo. When you eventually PR this work back into the mainline branch, you will need to first merge your pro PR to the pro mainline, then go into your PR in the monorepo and update the reference again to the new mainline.
|
||||
|
||||
Note that only budibase maintainers will be able to access the pro repo.
|
||||
|
||||
The `yarn bootstrap` command can be used to replace the NPM supplied dependency with the local source aware version. This is achieved using the `yarn link` command. To see specifically how dependencies are linked see [scripts/link-dependencies.sh](../scripts/link-dependencies.sh). The same link script is used to link dependencies to account-portal in local dev.
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
Sometimes, things go wrong. This can be due to incompatible updates on the budibase platform. To clear down your development environment and start again follow **Step 6. Cleanup**, then proceed from **Step 3. Install and Build** in the setup guide above to create a fresh Budibase installation.
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
## Dev Environment on Debian 11
|
||||
|
||||
### Install NVM & Node 14
|
||||
|
||||
NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
|
||||
|
||||
Install NVM
|
||||
|
||||
```
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
|
||||
```
|
||||
|
||||
Install Node 14
|
||||
|
||||
```
|
||||
nvm install 14
|
||||
```
|
||||
|
@ -17,13 +21,16 @@ nvm install 14
|
|||
```
|
||||
npm install -g yarn jest lerna
|
||||
```
|
||||
|
||||
### Install Docker and Docker Compose
|
||||
|
||||
```
|
||||
apt install docker.io
|
||||
pip3 install docker-compose
|
||||
```
|
||||
|
||||
### Clone the repo
|
||||
|
||||
```
|
||||
git clone https://github.com/Budibase/budibase.git
|
||||
```
|
||||
|
@ -44,10 +51,13 @@ This setup process was tested on Debian 11 (bullseye) with version numbers show
|
|||
cd budibase
|
||||
yarn setup
|
||||
```
|
||||
|
||||
The yarn setup command runs several build steps i.e.
|
||||
|
||||
```
|
||||
node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev
|
||||
```
|
||||
|
||||
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
|
||||
|
||||
The dev version will be available on port 10000 i.e.
|
||||
|
@ -55,6 +65,7 @@ The dev version will be available on port 10000 i.e.
|
|||
http://127.0.0.1:10000/builder/admin
|
||||
|
||||
### File descriptor issues with Vite and Chrome in Linux
|
||||
|
||||
If your dev environment stalls forever, with some network requests stuck in flight, it's likely that Chrome is trying to open more file descriptors than your system allows.
|
||||
To fix this, apply the following tweaks.
|
||||
|
||||
|
@ -62,4 +73,4 @@ Debian based distros:
|
|||
Add `* - nofile 65536` to `/etc/security/limits.conf`.
|
||||
|
||||
Arch:
|
||||
Add `DefaultLimitNOFILE=65536` to `/etc/systemd/system.conf`.
|
||||
Add `DefaultLimitNOFILE=65536` to `/etc/systemd/system.conf`.
|
||||
|
|
|
@ -4,14 +4,14 @@
|
|||
|
||||
Install instructions [here](https://brew.sh/)
|
||||
|
||||
| **NOTE**: If you are working on a M1 Apple Silicon which is running Z shell, you could need to add
|
||||
`eval $(/opt/homebrew/bin/brew shellenv)` line to your `.zshrc`. This will make your zsh to find the apps you install
|
||||
| **NOTE**: If you are working on a M1 Apple Silicon which is running Z shell, you could need to add
|
||||
`eval $(/opt/homebrew/bin/brew shellenv)` line to your `.zshrc`. This will make your zsh to find the apps you install
|
||||
through brew.
|
||||
|
||||
|
||||
### Install Node
|
||||
|
||||
Budibase requires a recent version of node 14:
|
||||
|
||||
```
|
||||
brew install node npm
|
||||
node -v
|
||||
|
@ -22,12 +22,15 @@ node -v
|
|||
```
|
||||
npm install -g yarn jest lerna
|
||||
```
|
||||
|
||||
### Install Docker and Docker Compose
|
||||
|
||||
```
|
||||
brew install docker docker-compose
|
||||
```
|
||||
|
||||
### Clone the repo
|
||||
|
||||
```
|
||||
git clone https://github.com/Budibase/budibase.git
|
||||
```
|
||||
|
@ -48,10 +51,13 @@ This setup process was tested on Mac OSX 12 (Monterey) with version numbers show
|
|||
cd budibase
|
||||
yarn setup
|
||||
```
|
||||
|
||||
The yarn setup command runs several build steps i.e.
|
||||
|
||||
```
|
||||
node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev
|
||||
```
|
||||
|
||||
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
|
||||
|
||||
The dev version will be available on port 10000 i.e.
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
## Dev Environment on Windows 10/11 (WSL2)
|
||||
|
||||
|
||||
### Install WSL with Ubuntu LTS
|
||||
|
||||
Enable WSL 2 on Windows 10/11 for docker support.
|
||||
|
||||
```
|
||||
wsl --set-default-version 2
|
||||
```
|
||||
|
||||
Install Ubuntu LTS.
|
||||
|
||||
```
|
||||
wsl --install Ubuntu
|
||||
```
|
||||
|
@ -16,6 +18,7 @@ Or follow the instruction here:
|
|||
https://learn.microsoft.com/en-us/windows/wsl/install
|
||||
|
||||
### Install Docker in windows
|
||||
|
||||
Download the installer from docker and install it.
|
||||
|
||||
Check this url for more detailed instructions:
|
||||
|
@ -24,18 +27,21 @@ https://docs.docker.com/desktop/install/windows-install/
|
|||
You should follow the next steps from within the Ubuntu terminal.
|
||||
|
||||
### Install NVM & Node 14
|
||||
|
||||
NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
|
||||
|
||||
Install NVM
|
||||
|
||||
```
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
|
||||
```
|
||||
|
||||
Install Node 14
|
||||
|
||||
```
|
||||
nvm install 14
|
||||
```
|
||||
|
||||
|
||||
### Install npm requirements
|
||||
|
||||
```
|
||||
|
@ -43,6 +49,7 @@ npm install -g yarn jest lerna
|
|||
```
|
||||
|
||||
### Clone the repo
|
||||
|
||||
```
|
||||
git clone https://github.com/Budibase/budibase.git
|
||||
```
|
||||
|
@ -63,10 +70,13 @@ This setup process was tested on Windows 11 with version numbers show below. You
|
|||
cd budibase
|
||||
yarn setup
|
||||
```
|
||||
|
||||
The yarn setup command runs several build steps i.e.
|
||||
|
||||
```
|
||||
node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev
|
||||
```
|
||||
|
||||
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
|
||||
|
||||
The dev version will be available on port 10000 i.e.
|
||||
|
@ -74,8 +84,9 @@ The dev version will be available on port 10000 i.e.
|
|||
http://127.0.0.1:10000/builder/admin
|
||||
|
||||
### Working with the code
|
||||
|
||||
Here are the instructions to work on the application from within Visual Studio Code (in Windows) through the WSL. All the commands and files are within the Ubuntu system and it should run as if you were working on a Linux machine.
|
||||
|
||||
https://code.visualstudio.com/docs/remote/wsl
|
||||
|
||||
Note you will be able to run the application from within the WSL terminal and you will be able to access the application from the a browser in Windows.
|
||||
Note you will be able to run the application from within the WSL terminal and you will be able to access the application from the a browser in Windows.
|
||||
|
|
|
@ -28,3 +28,4 @@ BB_ADMIN_USER_PASSWORD=
|
|||
|
||||
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
|
||||
PLUGINS_DIR=
|
||||
ROLLING_LOG_MAX_SIZE=
|
|
@ -5,8 +5,11 @@ ENV COUCHDB_PASSWORD admin
|
|||
EXPOSE 5984
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
||||
apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \
|
||||
apt-get update && apt-get install -y --no-install-recommends openjdk-8-jre && \
|
||||
wget -qO - https://adoptopenjdk.jfrog.io/adoptopenjdk/api/gpg/key/public | apt-key add - && \
|
||||
apt-add-repository 'deb http://security.debian.org/debian-security bullseye-security/updates main' && \
|
||||
apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \
|
||||
apt-add-repository --yes https://adoptopenjdk.jfrog.io/adoptopenjdk/deb/ && \
|
||||
apt-get update && apt-get install -y --no-install-recommends adoptopenjdk-8-hotspot && \
|
||||
rm -rf /var/lib/apt/lists/
|
||||
|
||||
# setup clouseau
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
version: "3"
|
||||
|
||||
# optional ports are specified throughout for more advanced use cases.
|
||||
|
||||
services:
|
||||
app-service:
|
||||
build: ../packages/server
|
||||
container_name: build-bbapps
|
||||
environment:
|
||||
SELF_HOSTED: 1
|
||||
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
|
||||
WORKER_URL: http://worker-service:4003
|
||||
MINIO_URL: http://minio-service:9000
|
||||
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
|
||||
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
|
||||
INTERNAL_API_KEY: ${INTERNAL_API_KEY}
|
||||
BUDIBASE_ENVIRONMENT: ${BUDIBASE_ENVIRONMENT}
|
||||
PORT: 4002
|
||||
API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY}
|
||||
JWT_SECRET: ${JWT_SECRET}
|
||||
LOG_LEVEL: info
|
||||
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
|
||||
ENABLE_ANALYTICS: "true"
|
||||
REDIS_URL: redis-service:6379
|
||||
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
||||
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
|
||||
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
|
||||
PLUGINS_DIR: ${PLUGINS_DIR}
|
||||
depends_on:
|
||||
- worker-service
|
||||
- redis-service
|
||||
# volumes:
|
||||
# - /some/path/to/plugins:/plugins
|
||||
|
||||
worker-service:
|
||||
build: ../packages/worker
|
||||
container_name: build-bbworker
|
||||
environment:
|
||||
SELF_HOSTED: 1
|
||||
PORT: 4003
|
||||
CLUSTER_PORT: ${MAIN_PORT}
|
||||
API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY}
|
||||
JWT_SECRET: ${JWT_SECRET}
|
||||
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
|
||||
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
|
||||
MINIO_URL: http://minio-service:9000
|
||||
APPS_URL: http://app-service:4002
|
||||
COUCH_DB_USERNAME: ${COUCH_DB_USER}
|
||||
COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD}
|
||||
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
|
||||
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
|
||||
INTERNAL_API_KEY: ${INTERNAL_API_KEY}
|
||||
REDIS_URL: redis-service:6379
|
||||
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
||||
depends_on:
|
||||
- redis-service
|
||||
- minio-service
|
||||
|
||||
proxy-service-docker:
|
||||
ports:
|
||||
- "${MAIN_PORT}:10000"
|
||||
container_name: build-bbproxy
|
||||
image: budibase/proxy
|
||||
environment:
|
||||
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
|
||||
- PROXY_RATE_LIMIT_API_PER_SECOND=20
|
||||
- APPS_UPSTREAM_URL=http://app-service:4002
|
||||
- WORKER_UPSTREAM_URL=http://worker-service:4003
|
||||
- MINIO_UPSTREAM_URL=http://minio-service:9000
|
||||
- COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
|
||||
- WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
|
||||
- RESOLVER=127.0.0.11
|
||||
depends_on:
|
||||
- minio-service
|
||||
- worker-service
|
||||
- app-service
|
||||
- couchdb-service
|
|
@ -126,6 +126,16 @@ http {
|
|||
proxy_pass http://app-service;
|
||||
}
|
||||
|
||||
location /embed {
|
||||
rewrite /embed/(.*) /app/$1 break;
|
||||
proxy_pass http://app-service;
|
||||
proxy_redirect off;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header x-budibase-embed "true";
|
||||
add_header x-budibase-embed "true";
|
||||
add_header Content-Security-Policy "frame-ancestors *";
|
||||
}
|
||||
|
||||
location /builder {
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
|
|
|
@ -55,7 +55,7 @@ http {
|
|||
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
|
||||
set $csp_object "object-src 'none'";
|
||||
set $csp_base_uri "base-uri 'self'";
|
||||
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
|
||||
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
|
||||
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
|
||||
set $csp_frame "frame-src 'self' https:";
|
||||
set $csp_img "img-src http: https: data: blob:";
|
||||
|
@ -82,10 +82,26 @@ http {
|
|||
set $couchdb ${COUCHDB_UPSTREAM_URL};
|
||||
set $watchtower ${WATCHTOWER_UPSTREAM_URL};
|
||||
|
||||
location /health {
|
||||
access_log off;
|
||||
add_header 'Content-Type' 'application/json';
|
||||
return 200 '{ "status": "OK" }';
|
||||
}
|
||||
|
||||
location /app {
|
||||
proxy_pass $apps;
|
||||
}
|
||||
|
||||
location /embed {
|
||||
rewrite /embed/(.*) /app/$1 break;
|
||||
proxy_pass $apps;
|
||||
proxy_redirect off;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header x-budibase-embed "true";
|
||||
add_header x-budibase-embed "true";
|
||||
add_header Content-Security-Policy "frame-ancestors *";
|
||||
}
|
||||
|
||||
location = / {
|
||||
proxy_pass $apps;
|
||||
}
|
||||
|
@ -222,9 +238,9 @@ http {
|
|||
rewrite ^/files/signed/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
client_header_timeout 60;
|
||||
client_body_timeout 60;
|
||||
keepalive_timeout 60;
|
||||
client_header_timeout 120;
|
||||
client_body_timeout 120;
|
||||
keepalive_timeout 120;
|
||||
|
||||
# gzip
|
||||
gzip on;
|
||||
|
|
|
@ -2,7 +2,9 @@ const fs = require("fs")
|
|||
const { execSync } = require("child_process")
|
||||
const path = require("path")
|
||||
|
||||
const IMAGES = {
|
||||
const IS_SINGLE_IMAGE = process.env.SINGLE_IMAGE
|
||||
|
||||
let IMAGES = {
|
||||
worker: "budibase/worker",
|
||||
apps: "budibase/apps",
|
||||
proxy: "budibase/proxy",
|
||||
|
@ -10,7 +12,13 @@ const IMAGES = {
|
|||
couch: "ibmcom/couchdb3",
|
||||
curl: "curlimages/curl",
|
||||
redis: "redis",
|
||||
watchtower: "containrrr/watchtower"
|
||||
watchtower: "containrrr/watchtower",
|
||||
}
|
||||
|
||||
if (IS_SINGLE_IMAGE) {
|
||||
IMAGES = {
|
||||
budibase: "budibase/budibase"
|
||||
}
|
||||
}
|
||||
|
||||
const FILES = {
|
||||
|
@ -39,11 +47,10 @@ for (let image in IMAGES) {
|
|||
}
|
||||
|
||||
// copy config files
|
||||
copyFile(FILES.COMPOSE)
|
||||
if (!IS_SINGLE_IMAGE) {
|
||||
copyFile(FILES.COMPOSE)
|
||||
}
|
||||
copyFile(FILES.ENV)
|
||||
|
||||
// compress
|
||||
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)
|
||||
|
||||
// clean up
|
||||
fs.rmdirSync(OUTPUT_DIR, { recursive: true })
|
||||
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)
|
|
@ -5,24 +5,28 @@ RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-reco
|
|||
|
||||
# add pin script
|
||||
WORKDIR /
|
||||
ADD scripts/pinVersions.js scripts/cleanup.sh ./
|
||||
ADD scripts/cleanup.sh ./
|
||||
RUN chmod +x /cleanup.sh
|
||||
|
||||
# build server
|
||||
WORKDIR /app
|
||||
ADD packages/server .
|
||||
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
|
||||
COPY yarn.lock .
|
||||
RUN yarn install --production=true
|
||||
RUN /cleanup.sh
|
||||
|
||||
# build worker
|
||||
WORKDIR /worker
|
||||
ADD packages/worker .
|
||||
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
|
||||
COPY yarn.lock .
|
||||
RUN yarn install --production=true
|
||||
RUN /cleanup.sh
|
||||
|
||||
FROM budibase/couchdb
|
||||
ARG TARGETARCH
|
||||
ENV TARGETARCH $TARGETARCH
|
||||
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
|
||||
# e.g. docker build --build-arg TARGETBUILD=aas ....
|
||||
# e.g. docker build --build-arg TARGETBUILD=aas ....
|
||||
ARG TARGETBUILD=single
|
||||
ENV TARGETBUILD $TARGETBUILD
|
||||
|
||||
|
@ -31,9 +35,15 @@ COPY --from=build /worker /worker
|
|||
|
||||
# install base dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server && \
|
||||
apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
|
||||
|
||||
# Install postgres client for pg_dump utils
|
||||
RUN apt install software-properties-common apt-transport-https gpg -y \
|
||||
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
|
||||
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
|
||||
&& apt update -y \
|
||||
&& apt install postgresql-client-15 -y \
|
||||
&& apt remove software-properties-common apt-transport-https gpg -y
|
||||
|
||||
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
|
||||
WORKDIR /nodejs
|
||||
|
|
|
@ -22,6 +22,16 @@ server {
|
|||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location /embed {
|
||||
rewrite /embed/(.*) /app/$1 break;
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
proxy_redirect off;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header x-budibase-embed "true";
|
||||
add_header x-budibase-embed "true";
|
||||
add_header Content-Security-Policy "frame-ancestors *";
|
||||
}
|
||||
|
||||
location = / {
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME
|
|||
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
|
||||
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
|
||||
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
|
||||
[[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app
|
||||
# export CUSTOM_DOMAIN=budi001.custom.com
|
||||
|
||||
# Azure App Service customisations
|
||||
|
|
10
lerna.json
10
lerna.json
|
@ -1,8 +1,10 @@
|
|||
{
|
||||
"version": "2.5.6-alpha.37",
|
||||
"version": "2.8.22-alpha.3",
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": ["packages/*"],
|
||||
"packages": [
|
||||
"packages/*"
|
||||
],
|
||||
"useNx": true,
|
||||
"command": {
|
||||
"publish": {
|
||||
"ignoreChanges": [
|
||||
|
@ -17,4 +19,4 @@
|
|||
"loadEnvFiles": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
15
nx.json
15
nx.json
|
@ -1,10 +1,21 @@
|
|||
{
|
||||
"tasksRunnerOptions": {
|
||||
"default": {
|
||||
"runner": "nx/tasks-runners/default",
|
||||
"runner": "nx-cloud",
|
||||
"options": {
|
||||
"cacheableOperations": ["build", "test"]
|
||||
"cacheableOperations": ["build", "test"],
|
||||
"accessToken": "MmM4OGYxNzItMDBlYy00ZmE3LTk4MTYtNmJhYWMyZjBjZTUyfHJlYWQ="
|
||||
}
|
||||
}
|
||||
},
|
||||
"targetDefaults": {
|
||||
"dev:builder": {
|
||||
"dependsOn": [
|
||||
{
|
||||
"projects": ["@budibase/string-templates"],
|
||||
"target": "build"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
79
package.json
79
package.json
|
@ -2,68 +2,79 @@
|
|||
"name": "root",
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
|
||||
"@nx/js": "16.4.3",
|
||||
"@rollup/plugin-json": "^4.0.2",
|
||||
"@typescript-eslint/parser": "5.45.0",
|
||||
"babel-eslint": "^10.0.3",
|
||||
"eslint": "^7.28.0",
|
||||
"esbuild": "^0.17.18",
|
||||
"esbuild-node-externals": "^1.7.0",
|
||||
"eslint": "^8.44.0",
|
||||
"eslint-plugin-cypress": "^2.11.3",
|
||||
"eslint-plugin-svelte3": "^3.2.0",
|
||||
"husky": "^7.0.1",
|
||||
"husky": "^8.0.3",
|
||||
"js-yaml": "^4.1.0",
|
||||
"kill-port": "^1.6.1",
|
||||
"lerna": "^6.6.1",
|
||||
"lerna": "7.1.1",
|
||||
"madge": "^6.0.0",
|
||||
"prettier": "^2.3.1",
|
||||
"minimist": "^1.2.8",
|
||||
"nx": "16.4.3",
|
||||
"nx-cloud": "16.0.5",
|
||||
"prettier": "2.8.8",
|
||||
"prettier-plugin-svelte": "^2.3.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"rollup-plugin-replace": "^2.2.0",
|
||||
"svelte": "^3.38.2",
|
||||
"typescript": "4.7.3"
|
||||
"typescript": "4.7.3",
|
||||
"@babel/core": "^7.22.5",
|
||||
"@babel/eslint-parser": "^7.22.5",
|
||||
"@babel/preset-env": "^7.22.5",
|
||||
"eslint-plugin-svelte": "^2.32.2",
|
||||
"svelte-eslint-parser": "^0.32.0"
|
||||
},
|
||||
"scripts": {
|
||||
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
|
||||
"bootstrap": "lerna link && ./scripts/link-dependencies.sh",
|
||||
"build": "lerna run --stream build",
|
||||
"build:dev": "lerna run --stream prebuild && tsc --build --watch --preserveWatchOutput",
|
||||
"preinstall": "node scripts/syncProPackage.js",
|
||||
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
|
||||
"bootstrap": "./scripts/link-dependencies.sh && echo '***BOOTSTRAP ONLY REQUIRED FOR USE WITH ACCOUNT PORTAL***'",
|
||||
"build": "yarn nx run-many -t=build",
|
||||
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
|
||||
"check:types": "lerna run check:types --skip-nx-cache",
|
||||
"backend:bootstrap": "./scripts/scopeBackend.sh && yarn run bootstrap",
|
||||
"backend:build": "./scripts/scopeBackend.sh 'lerna run --stream build'",
|
||||
"build:sdk": "lerna run --stream build:sdk",
|
||||
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
|
||||
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
|
||||
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop --exact && yarn release:pro:develop",
|
||||
"release:pro": "bash scripts/pro/release.sh",
|
||||
"release:pro:develop": "bash scripts/pro/release.sh develop",
|
||||
"release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset",
|
||||
"release:develop": "yarn release --dist-tag develop",
|
||||
"restore": "yarn run clean && yarn run bootstrap && yarn run build",
|
||||
"nuke": "yarn run nuke:packages && yarn run nuke:docker",
|
||||
"nuke:packages": "yarn run restore",
|
||||
"nuke:docker": "lerna run --stream --parallel dev:stack:nuke",
|
||||
"nuke:docker": "lerna run --stream dev:stack:nuke",
|
||||
"clean": "lerna clean",
|
||||
"kill-builder": "kill-port 3000",
|
||||
"kill-server": "kill-port 4001 4002",
|
||||
"kill-all": "yarn run kill-builder && yarn run kill-server",
|
||||
"dev": "yarn run kill-all && lerna link && lerna run --stream --parallel dev:builder --concurrency 1 --stream",
|
||||
"dev:noserver": "yarn run kill-builder && lerna link && lerna run --stream dev:stack:up && lerna run --stream --parallel dev:builder --concurrency 1 --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
||||
"dev:server": "yarn run kill-server && lerna run --stream --parallel dev:builder --concurrency 1 --scope @budibase/backend-core --scope @budibase/worker --scope @budibase/server",
|
||||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream --parallel dev:built",
|
||||
"dev": "yarn run kill-all && lerna run --stream dev:builder --stream",
|
||||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
||||
"dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server",
|
||||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||
"dev:docker": "yarn build:docker:pre && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
||||
"test": "lerna run --stream test --stream",
|
||||
"test:pro": "bash scripts/pro/test.sh",
|
||||
"lint:eslint": "eslint packages && eslint qa-core",
|
||||
"lint:eslint": "eslint packages qa-core --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
"lint": "yarn run lint:eslint && yarn run lint:prettier",
|
||||
"lint:fix:eslint": "eslint --fix packages qa-core",
|
||||
"lint:fix:eslint": "eslint --fix --max-warnings=0 packages qa-core",
|
||||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
|
||||
"build:specs": "lerna run --stream specs",
|
||||
"build:docker": "lerna run --stream build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||
"build:docker:pre": "lerna run --stream build && lerna run --stream predocker",
|
||||
"build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||
"build:docker:pre": "yarn build && lerna run --stream predocker",
|
||||
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
||||
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
||||
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
||||
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
|
||||
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",
|
||||
"build:docker:single": "npm run build:docker:pre && npm run build:docker:single:image",
|
||||
"build:docker:single": "yarn build && lerna run --concurrency 1 predocker && yarn build:docker:single:image",
|
||||
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
|
||||
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
|
||||
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
|
||||
|
@ -82,12 +93,20 @@
|
|||
"mode:account": "yarn mode:cloud && yarn env:account:enable",
|
||||
"security:audit": "node scripts/audit.js",
|
||||
"postinstall": "husky install",
|
||||
"install:pro": "bash scripts/pro/install.sh",
|
||||
"dep:clean": "yarn clean && yarn bootstrap"
|
||||
"dep:clean": "yarn clean -y && yarn bootstrap",
|
||||
"submodules:load": "git submodule init && git submodule update && yarn && yarn bootstrap",
|
||||
"submodules:unload": "git submodule deinit --all && yarn && yarn bootstrap"
|
||||
},
|
||||
"workspaces": {
|
||||
"packages": [
|
||||
"packages/*"
|
||||
]
|
||||
}
|
||||
},
|
||||
"resolutions": {
|
||||
"@budibase/backend-core": "0.0.0",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/string-templates": "0.0.0",
|
||||
"@budibase/types": "0.0.0"
|
||||
},
|
||||
"dependencies": {}
|
||||
}
|
||||
|
|
|
@ -31,4 +31,6 @@ const config: Config.InitialOptions = {
|
|||
coverageReporters: ["lcov", "json", "clover"],
|
||||
}
|
||||
|
||||
process.env.DISABLE_PINO_LOGGER = "1"
|
||||
|
||||
export default config
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/backend-core",
|
||||
"version": "2.5.6-alpha.37",
|
||||
"version": "0.0.0",
|
||||
"description": "Budibase backend core libraries used in server and worker",
|
||||
"main": "dist/src/index.js",
|
||||
"types": "dist/src/index.d.ts",
|
||||
|
@ -15,8 +15,6 @@
|
|||
"prebuild": "rimraf dist/",
|
||||
"prepack": "cp package.json dist",
|
||||
"build": "tsc -p tsconfig.build.json",
|
||||
"build:pro": "../../scripts/pro/build.sh",
|
||||
"postbuild": "yarn run build:pro",
|
||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||
"test": "bash scripts/test.sh",
|
||||
"test:watch": "jest --watchAll"
|
||||
|
@ -24,18 +22,18 @@
|
|||
"dependencies": {
|
||||
"@budibase/nano": "10.1.2",
|
||||
"@budibase/pouchdb-replication-stream": "1.2.10",
|
||||
"@budibase/types": "2.5.6-alpha.37",
|
||||
"@budibase/types": "0.0.0",
|
||||
"@shopify/jest-koa-mocks": "5.0.1",
|
||||
"@techpass/passport-openidconnect": "0.3.2",
|
||||
"aws-cloudfront-sign": "2.2.0",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.0.1",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bull": "4.10.1",
|
||||
"correlation-id": "4.0.0",
|
||||
"dotenv": "16.0.1",
|
||||
"emitter-listener": "1.1.2",
|
||||
"ioredis": "4.28.0",
|
||||
"ioredis": "5.3.2",
|
||||
"joi": "17.6.0",
|
||||
"jsonwebtoken": "9.0.0",
|
||||
"koa-passport": "4.1.4",
|
||||
|
@ -53,6 +51,7 @@
|
|||
"pouchdb": "7.3.0",
|
||||
"pouchdb-find": "7.2.2",
|
||||
"redlock": "4.2.0",
|
||||
"rotating-file-stream": "3.1.0",
|
||||
"sanitize-s3-objectkey": "0.0.1",
|
||||
"semver": "7.3.7",
|
||||
"tar-fs": "2.1.1",
|
||||
|
@ -64,7 +63,6 @@
|
|||
"@swc/jest": "^0.2.24",
|
||||
"@trendyol/jest-testcontainers": "^2.1.1",
|
||||
"@types/chance": "1.1.3",
|
||||
"@types/ioredis": "4.28.0",
|
||||
"@types/jest": "29.5.0",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/lodash": "4.14.180",
|
||||
|
@ -76,7 +74,7 @@
|
|||
"@types/tar-fs": "2.0.1",
|
||||
"@types/uuid": "8.3.4",
|
||||
"chance": "1.1.8",
|
||||
"ioredis-mock": "5.8.0",
|
||||
"ioredis-mock": "8.7.0",
|
||||
"jest": "29.5.0",
|
||||
"jest-environment-node": "29.5.0",
|
||||
"jest-serial-runner": "^1.2.1",
|
||||
|
@ -90,5 +88,19 @@
|
|||
"tsconfig-paths": "4.0.0",
|
||||
"typescript": "4.7.3"
|
||||
},
|
||||
"nx": {
|
||||
"targets": {
|
||||
"build": {
|
||||
"dependsOn": [
|
||||
{
|
||||
"projects": [
|
||||
"@budibase/types"
|
||||
],
|
||||
"target": "build"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
||||
}
|
||||
|
|
|
@ -159,7 +159,7 @@ export async function updateUserOAuth(userId: string, oAuthConfig: any) {
|
|||
|
||||
try {
|
||||
const db = getGlobalDB()
|
||||
const dbUser = await db.get(userId)
|
||||
const dbUser = await db.get<any>(userId)
|
||||
|
||||
//Do not overwrite the refresh token if a valid one is not provided.
|
||||
if (typeof details.refreshToken !== "string") {
|
||||
|
|
|
@ -2,9 +2,14 @@ import { getAppClient } from "../redis/init"
|
|||
import { doWithDB, DocumentType } from "../db"
|
||||
import { Database, App } from "@budibase/types"
|
||||
|
||||
const AppState = {
|
||||
INVALID: "invalid",
|
||||
export enum AppState {
|
||||
INVALID = "invalid",
|
||||
}
|
||||
|
||||
export interface DeletedApp {
|
||||
state: AppState
|
||||
}
|
||||
|
||||
const EXPIRY_SECONDS = 3600
|
||||
|
||||
/**
|
||||
|
@ -31,7 +36,7 @@ function isInvalid(metadata?: { state: string }) {
|
|||
* @param {string} appId the id of the app to get metadata from.
|
||||
* @returns {object} the app metadata.
|
||||
*/
|
||||
export async function getAppMetadata(appId: string) {
|
||||
export async function getAppMetadata(appId: string): Promise<App | DeletedApp> {
|
||||
const client = await getAppClient()
|
||||
// try cache
|
||||
let metadata = await client.get(appId)
|
||||
|
@ -61,11 +66,8 @@ export async function getAppMetadata(appId: string) {
|
|||
}
|
||||
await client.store(appId, metadata, expiry)
|
||||
}
|
||||
// we've stored in the cache an object to tell us that it is currently invalid
|
||||
if (isInvalid(metadata)) {
|
||||
throw { status: 404, message: "No app metadata found" }
|
||||
}
|
||||
return metadata as App
|
||||
|
||||
return metadata
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -72,16 +72,12 @@ describe("writethrough", () => {
|
|||
writethrough.put({ ...current, value: 4 }),
|
||||
])
|
||||
|
||||
// with a lock, this will work
|
||||
const newRev = responses.map(x => x.rev).find(x => x !== current._rev)
|
||||
expect(newRev).toBeDefined()
|
||||
expect(responses.map(x => x.rev)).toEqual(
|
||||
expect.arrayContaining([current._rev, current._rev, newRev])
|
||||
)
|
||||
expectFunctionWasCalledTimesWith(
|
||||
mocks.alerts.logWarn,
|
||||
2,
|
||||
"Ignoring redlock conflict in write-through cache"
|
||||
)
|
||||
|
||||
const output = await db.get(current._id)
|
||||
expect(output.value).toBe(4)
|
||||
|
|
|
@ -12,7 +12,7 @@ const EXPIRY_SECONDS = 3600
|
|||
*/
|
||||
async function populateFromDB(userId: string, tenantId: string) {
|
||||
const db = tenancy.getTenantDB(tenantId)
|
||||
const user = await db.get(userId)
|
||||
const user = await db.get<any>(userId)
|
||||
user.budibaseAccess = true
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
const account = await accounts.getAccount(user.email)
|
||||
|
|
|
@ -47,7 +47,7 @@ async function put(
|
|||
type: LockType.TRY_ONCE,
|
||||
name: LockName.PERSIST_WRITETHROUGH,
|
||||
resource: key,
|
||||
ttl: 1000,
|
||||
ttl: 15000,
|
||||
},
|
||||
async () => {
|
||||
const writeDb = async (toWrite: any) => {
|
||||
|
@ -71,6 +71,7 @@ async function put(
|
|||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (!lockResponse.executed) {
|
||||
logWarn(`Ignoring redlock conflict in write-through cache`)
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import {
|
|||
GoogleInnerConfig,
|
||||
OIDCConfig,
|
||||
OIDCInnerConfig,
|
||||
OIDCLogosConfig,
|
||||
SCIMConfig,
|
||||
SCIMInnerConfig,
|
||||
SettingsConfig,
|
||||
|
@ -191,6 +192,10 @@ export function getDefaultGoogleConfig(): GoogleInnerConfig | undefined {
|
|||
|
||||
// OIDC
|
||||
|
||||
export async function getOIDCLogosDoc(): Promise<OIDCLogosConfig | undefined> {
|
||||
return getConfig<OIDCLogosConfig>(ConfigType.OIDC_LOGOS)
|
||||
}
|
||||
|
||||
async function getOIDCConfigDoc(): Promise<OIDCConfig | undefined> {
|
||||
return getConfig<OIDCConfig>(ConfigType.OIDC)
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ export enum ViewName {
|
|||
AUTOMATION_LOGS = "automation_logs",
|
||||
ACCOUNT_BY_EMAIL = "account_by_email",
|
||||
PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
|
||||
USER_BY_GROUP = "by_group_user",
|
||||
USER_BY_GROUP = "user_by_group",
|
||||
APP_BACKUP_BY_TRIGGER = "by_trigger",
|
||||
}
|
||||
|
||||
|
|
|
@ -16,9 +16,12 @@ export enum Header {
|
|||
LICENSE_KEY = "x-budibase-license-key",
|
||||
API_VER = "x-budibase-api-version",
|
||||
APP_ID = "x-budibase-app-id",
|
||||
SESSION_ID = "x-budibase-session-id",
|
||||
TYPE = "x-budibase-type",
|
||||
PREVIEW_ROLE = "x-budibase-role",
|
||||
TENANT_ID = "x-budibase-tenant-id",
|
||||
VERIFICATION_CODE = "x-budibase-verification-code",
|
||||
RETURN_VERIFICATION_CODE = "x-budibase-return-verification-code",
|
||||
TOKEN = "x-budibase-token",
|
||||
CSRF_TOKEN = "x-csrf-token",
|
||||
CORRELATION_ID = "x-budibase-correlation-id",
|
||||
|
|
|
@ -104,6 +104,22 @@ async function newContext(updates: ContextMap, task: any) {
|
|||
return Context.run(context, task)
|
||||
}
|
||||
|
||||
export async function doInAutomationContext(params: {
|
||||
appId: string
|
||||
automationId: string
|
||||
task: any
|
||||
}): Promise<any> {
|
||||
const tenantId = getTenantIDFromAppID(params.appId)
|
||||
return newContext(
|
||||
{
|
||||
tenantId,
|
||||
appId: params.appId,
|
||||
automationId: params.automationId,
|
||||
},
|
||||
params.task
|
||||
)
|
||||
}
|
||||
|
||||
export async function doInContext(appId: string, task: any): Promise<any> {
|
||||
const tenantId = getTenantIDFromAppID(appId)
|
||||
return newContext(
|
||||
|
@ -187,6 +203,11 @@ export function getTenantId(): string {
|
|||
return tenantId
|
||||
}
|
||||
|
||||
export function getAutomationId(): string | undefined {
|
||||
const context = Context.get()
|
||||
return context?.automationId
|
||||
}
|
||||
|
||||
export function getAppId(): string | undefined {
|
||||
const context = Context.get()
|
||||
const foundId = context?.appId
|
||||
|
|
|
@ -7,4 +7,5 @@ export type ContextMap = {
|
|||
identity?: IdentityContext
|
||||
environmentVariables?: Record<string, string>
|
||||
isScim?: boolean
|
||||
automationId?: string
|
||||
}
|
||||
|
|
|
@ -57,6 +57,9 @@ class Replication {
|
|||
appReplicateOpts() {
|
||||
return {
|
||||
filter: (doc: any) => {
|
||||
if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) {
|
||||
return false
|
||||
}
|
||||
return doc._id !== DocumentType.APP_METADATA
|
||||
},
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
isDocument,
|
||||
} from "@budibase/types"
|
||||
import { getCouchInfo } from "./connections"
|
||||
import { directCouchCall } from "./utils"
|
||||
import { directCouchUrlCall } from "./utils"
|
||||
import { getPouchDB } from "./pouchDB"
|
||||
import { WriteStream, ReadStream } from "fs"
|
||||
import { newid } from "../../docIds/newid"
|
||||
|
@ -46,6 +46,8 @@ export class DatabaseImpl implements Database {
|
|||
private readonly instanceNano?: Nano.ServerScope
|
||||
private readonly pouchOpts: DatabaseOpts
|
||||
|
||||
private readonly couchInfo = getCouchInfo()
|
||||
|
||||
constructor(dbName?: string, opts?: DatabaseOpts, connection?: string) {
|
||||
if (dbName == null) {
|
||||
throw new Error("Database name cannot be undefined.")
|
||||
|
@ -53,8 +55,8 @@ export class DatabaseImpl implements Database {
|
|||
this.name = dbName
|
||||
this.pouchOpts = opts || {}
|
||||
if (connection) {
|
||||
const couchInfo = getCouchInfo(connection)
|
||||
this.instanceNano = buildNano(couchInfo)
|
||||
this.couchInfo = getCouchInfo(connection)
|
||||
this.instanceNano = buildNano(this.couchInfo)
|
||||
}
|
||||
if (!DatabaseImpl.nano) {
|
||||
DatabaseImpl.init()
|
||||
|
@ -67,7 +69,11 @@ export class DatabaseImpl implements Database {
|
|||
}
|
||||
|
||||
async exists() {
|
||||
let response = await directCouchCall(`/${this.name}`, "HEAD")
|
||||
const response = await directCouchUrlCall({
|
||||
url: `${this.couchInfo.url}/${this.name}`,
|
||||
method: "HEAD",
|
||||
cookie: this.couchInfo.cookie,
|
||||
})
|
||||
return response.status === 200
|
||||
}
|
||||
|
||||
|
|
|
@ -4,21 +4,21 @@ export const getCouchInfo = (connection?: string) => {
|
|||
const urlInfo = getUrlInfo(connection)
|
||||
let username
|
||||
let password
|
||||
if (env.COUCH_DB_USERNAME) {
|
||||
// set from env
|
||||
username = env.COUCH_DB_USERNAME
|
||||
} else if (urlInfo.auth.username) {
|
||||
if (urlInfo.auth?.username) {
|
||||
// set from url
|
||||
username = urlInfo.auth.username
|
||||
} else if (env.COUCH_DB_USERNAME) {
|
||||
// set from env
|
||||
username = env.COUCH_DB_USERNAME
|
||||
} else if (!env.isTest()) {
|
||||
throw new Error("CouchDB username not set")
|
||||
}
|
||||
if (env.COUCH_DB_PASSWORD) {
|
||||
// set from env
|
||||
password = env.COUCH_DB_PASSWORD
|
||||
} else if (urlInfo.auth.password) {
|
||||
if (urlInfo.auth?.password) {
|
||||
// set from url
|
||||
password = urlInfo.auth.password
|
||||
} else if (env.COUCH_DB_PASSWORD) {
|
||||
// set from env
|
||||
password = env.COUCH_DB_PASSWORD
|
||||
} else if (!env.isTest()) {
|
||||
throw new Error("CouchDB password not set")
|
||||
}
|
||||
|
|
|
@ -9,6 +9,20 @@ export async function directCouchCall(
|
|||
) {
|
||||
let { url, cookie } = getCouchInfo()
|
||||
const couchUrl = `${url}/${path}`
|
||||
return await directCouchUrlCall({ url: couchUrl, cookie, method, body })
|
||||
}
|
||||
|
||||
export async function directCouchUrlCall({
|
||||
url,
|
||||
cookie,
|
||||
method,
|
||||
body,
|
||||
}: {
|
||||
url: string
|
||||
cookie: string
|
||||
method: string
|
||||
body?: any
|
||||
}) {
|
||||
const params: any = {
|
||||
method: method,
|
||||
headers: {
|
||||
|
@ -19,7 +33,7 @@ export async function directCouchCall(
|
|||
params.body = JSON.stringify(body)
|
||||
params.headers["Content-Type"] = "application/json"
|
||||
}
|
||||
return await fetch(checkSlashesInUrl(encodeURI(couchUrl)), params)
|
||||
return await fetch(checkSlashesInUrl(encodeURI(url)), params)
|
||||
}
|
||||
|
||||
export async function directCouchQuery(
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
export function checkErrorCode(error: any, code: number) {
|
||||
const stringCode = code.toString()
|
||||
if (typeof error === "object") {
|
||||
return error.status === code || error.message?.includes(stringCode)
|
||||
} else if (typeof error === "number") {
|
||||
return error === code
|
||||
} else if (typeof error === "string") {
|
||||
return error.includes(stringCode)
|
||||
}
|
||||
}
|
||||
|
||||
export function isDocumentConflictError(error: any) {
|
||||
return checkErrorCode(error, 409)
|
||||
}
|
|
@ -9,3 +9,4 @@ export * from "../constants/db"
|
|||
export { getGlobalDBName, baseGlobalDBName } from "../context"
|
||||
export * from "./lucene"
|
||||
export * as searchIndexes from "./searchIndexes"
|
||||
export * from "./errors"
|
||||
|
|
|
@ -343,6 +343,9 @@ export class QueryBuilder<T> {
|
|||
}
|
||||
|
||||
const oneOf = (key: string, value: any) => {
|
||||
if (!value) {
|
||||
return `*:*`
|
||||
}
|
||||
if (!Array.isArray(value)) {
|
||||
if (typeof value === "string") {
|
||||
value = value.split(",")
|
||||
|
@ -430,11 +433,14 @@ export class QueryBuilder<T> {
|
|||
if (!value) {
|
||||
return null
|
||||
}
|
||||
if (typeof value === "boolean") {
|
||||
return `(*:* AND !${key}:${value})`
|
||||
}
|
||||
return `!${key}:${builder.preprocess(value, allPreProcessingOpts)}`
|
||||
})
|
||||
}
|
||||
if (this.#query.empty) {
|
||||
build(this.#query.empty, (key: string) => `!${key}:["" TO *]`)
|
||||
build(this.#query.empty, (key: string) => `(*:* -${key}:["" TO *])`)
|
||||
}
|
||||
if (this.#query.notEmpty) {
|
||||
build(this.#query.notEmpty, (key: string) => `${key}:["" TO *]`)
|
||||
|
|
|
@ -5,7 +5,7 @@ export async function createUserIndex() {
|
|||
const db = getGlobalDB()
|
||||
let designDoc
|
||||
try {
|
||||
designDoc = await db.get("_design/database")
|
||||
designDoc = await db.get<any>("_design/database")
|
||||
} catch (err: any) {
|
||||
if (err.status === 404) {
|
||||
designDoc = { _id: "_design/database" }
|
||||
|
|
|
@ -114,6 +114,25 @@ describe("lucene", () => {
|
|||
expect(resp.rows.length).toBe(2)
|
||||
})
|
||||
|
||||
it("should return all rows when doing a one of search against falsey value", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addOneOf("property", null)
|
||||
let resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(3)
|
||||
|
||||
builder.addOneOf("property", undefined)
|
||||
resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(3)
|
||||
|
||||
builder.addOneOf("property", "")
|
||||
resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(3)
|
||||
|
||||
builder.addOneOf("property", [])
|
||||
resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(0)
|
||||
})
|
||||
|
||||
it("should be able to perform a contains search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addContains("property", ["word"])
|
||||
|
|
|
@ -2,7 +2,7 @@ import env from "../environment"
|
|||
import { DEFAULT_TENANT_ID, SEPARATOR, DocumentType } from "../constants"
|
||||
import { getTenantId, getGlobalDBName } from "../context"
|
||||
import { doWithDB, directCouchAllDbs } from "./db"
|
||||
import { getAppMetadata } from "../cache/appMetadata"
|
||||
import { AppState, DeletedApp, getAppMetadata } from "../cache/appMetadata"
|
||||
import { isDevApp, isDevAppID, getProdAppID } from "../docIds/conversions"
|
||||
import { App, Database } from "@budibase/types"
|
||||
import { getStartEndKeyURL } from "../docIds"
|
||||
|
@ -101,7 +101,9 @@ export async function getAllApps({
|
|||
const response = await Promise.allSettled(appPromises)
|
||||
const apps = response
|
||||
.filter(
|
||||
(result: any) => result.status === "fulfilled" && result.value != null
|
||||
(result: any) =>
|
||||
result.status === "fulfilled" &&
|
||||
result.value?.state !== AppState.INVALID
|
||||
)
|
||||
.map(({ value }: any) => value)
|
||||
if (!all) {
|
||||
|
@ -126,7 +128,11 @@ export async function getAppsByIDs(appIds: string[]) {
|
|||
)
|
||||
// have to list the apps which exist, some may have been deleted
|
||||
return settled
|
||||
.filter(promise => promise.status === "fulfilled")
|
||||
.filter(
|
||||
promise =>
|
||||
promise.status === "fulfilled" &&
|
||||
(promise.value as DeletedApp).state !== AppState.INVALID
|
||||
)
|
||||
.map(promise => (promise as PromiseFulfilledResult<App>).value)
|
||||
}
|
||||
|
||||
|
|
|
@ -81,8 +81,19 @@ export function generateAppUserID(prodAppId: string, userId: string) {
|
|||
* Generates a new role ID.
|
||||
* @returns {string} The new role ID which the role doc can be stored under.
|
||||
*/
|
||||
export function generateRoleID(id?: any) {
|
||||
return `${DocumentType.ROLE}${SEPARATOR}${id || newid()}`
|
||||
export function generateRoleID(name: string) {
|
||||
const prefix = `${DocumentType.ROLE}${SEPARATOR}`
|
||||
if (name.startsWith(prefix)) {
|
||||
return name
|
||||
}
|
||||
return `${prefix}${name}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to be more verbose.
|
||||
*/
|
||||
export function prefixRoleID(name: string) {
|
||||
return generateRoleID(name)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -47,7 +47,10 @@ function httpLogging() {
|
|||
return process.env.HTTP_LOGGING
|
||||
}
|
||||
|
||||
function findVersion() {
|
||||
function getPackageJsonFields(): {
|
||||
VERSION: string
|
||||
SERVICE_NAME: string
|
||||
} {
|
||||
function findFileInAncestors(
|
||||
fileName: string,
|
||||
currentDir: string
|
||||
|
@ -69,10 +72,14 @@ function findVersion() {
|
|||
try {
|
||||
const packageJsonFile = findFileInAncestors("package.json", process.cwd())
|
||||
const content = readFileSync(packageJsonFile!, "utf-8")
|
||||
const version = JSON.parse(content).version
|
||||
return version
|
||||
const parsedContent = JSON.parse(content)
|
||||
return {
|
||||
VERSION: parsedContent.version,
|
||||
SERVICE_NAME: parsedContent.name,
|
||||
}
|
||||
} catch {
|
||||
throw new Error("Cannot find a valid version in its package.json")
|
||||
// throwing an error here is confusing/causes backend-core to be hard to import
|
||||
return { VERSION: "", SERVICE_NAME: "" }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -95,8 +102,8 @@ const environment = {
|
|||
GOOGLE_CLIENT_SECRET: process.env.GOOGLE_CLIENT_SECRET,
|
||||
SALT_ROUNDS: process.env.SALT_ROUNDS,
|
||||
REDIS_URL: process.env.REDIS_URL || "localhost:6379",
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD || "budibase",
|
||||
MOCK_REDIS: process.env.MOCK_REDIS,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
REDIS_CLUSTERED: process.env.REDIS_CLUSTERED,
|
||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||
AWS_REGION: process.env.AWS_REGION,
|
||||
|
@ -128,6 +135,7 @@ const environment = {
|
|||
PLUGIN_BUCKET_NAME:
|
||||
process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS,
|
||||
USE_COUCH: process.env.USE_COUCH || true,
|
||||
MOCK_REDIS: process.env.MOCK_REDIS,
|
||||
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
|
||||
SERVICE: process.env.SERVICE || "budibase",
|
||||
LOG_LEVEL: process.env.LOG_LEVEL || "info",
|
||||
|
@ -153,12 +161,15 @@ const environment = {
|
|||
ENABLE_SSO_MAINTENANCE_MODE: selfHosted
|
||||
? process.env.ENABLE_SSO_MAINTENANCE_MODE
|
||||
: false,
|
||||
VERSION: findVersion(),
|
||||
...getPackageJsonFields(),
|
||||
DISABLE_PINO_LOGGER: process.env.DISABLE_PINO_LOGGER,
|
||||
OFFLINE_MODE: process.env.OFFLINE_MODE,
|
||||
_set(key: any, value: any) {
|
||||
process.env[key] = value
|
||||
// @ts-ignore
|
||||
environment[key] = value
|
||||
},
|
||||
ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M",
|
||||
}
|
||||
|
||||
// clean up any environment variable edge cases
|
||||
|
|
|
@ -55,6 +55,18 @@ export class HTTPError extends BudibaseError {
|
|||
}
|
||||
}
|
||||
|
||||
export class NotFoundError extends HTTPError {
|
||||
constructor(message: string) {
|
||||
super(message, 404)
|
||||
}
|
||||
}
|
||||
|
||||
export class BadRequestError extends HTTPError {
|
||||
constructor(message: string) {
|
||||
super(message, 400)
|
||||
}
|
||||
}
|
||||
|
||||
// LICENSING
|
||||
|
||||
export class UsageLimitError extends HTTPError {
|
||||
|
|
|
@ -86,6 +86,7 @@ const getCurrentIdentity = async (): Promise<Identity> => {
|
|||
installationId,
|
||||
tenantId,
|
||||
environment,
|
||||
realTenantId: context.getTenantId(),
|
||||
hostInfo: userContext.hostInfo,
|
||||
}
|
||||
} else {
|
||||
|
@ -263,7 +264,7 @@ const getEventTenantId = async (tenantId: string): Promise<string> => {
|
|||
}
|
||||
}
|
||||
|
||||
const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
||||
export const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
||||
// make sure this tenantId always matches the tenantId in context
|
||||
return context.doInTenant(tenantId, () => {
|
||||
return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
||||
|
|
|
@ -3,7 +3,6 @@ import {
|
|||
Event,
|
||||
LicenseActivatedEvent,
|
||||
LicensePlanChangedEvent,
|
||||
LicenseTierChangedEvent,
|
||||
PlanType,
|
||||
Account,
|
||||
LicensePortalOpenedEvent,
|
||||
|
@ -11,22 +10,23 @@ import {
|
|||
LicenseCheckoutOpenedEvent,
|
||||
LicensePaymentFailedEvent,
|
||||
LicensePaymentRecoveredEvent,
|
||||
PriceDuration,
|
||||
} from "@budibase/types"
|
||||
|
||||
async function tierChanged(account: Account, from: number, to: number) {
|
||||
const properties: LicenseTierChangedEvent = {
|
||||
accountId: account.accountId,
|
||||
to,
|
||||
from,
|
||||
async function planChanged(
|
||||
account: Account,
|
||||
opts: {
|
||||
from: PlanType
|
||||
to: PlanType
|
||||
fromQuantity: number | undefined
|
||||
toQuantity: number | undefined
|
||||
fromDuration: PriceDuration | undefined
|
||||
toDuration: PriceDuration | undefined
|
||||
}
|
||||
await publishEvent(Event.LICENSE_TIER_CHANGED, properties)
|
||||
}
|
||||
|
||||
async function planChanged(account: Account, from: PlanType, to: PlanType) {
|
||||
) {
|
||||
const properties: LicensePlanChangedEvent = {
|
||||
accountId: account.accountId,
|
||||
to,
|
||||
from,
|
||||
...opts,
|
||||
}
|
||||
await publishEvent(Event.LICENSE_PLAN_CHANGED, properties)
|
||||
}
|
||||
|
@ -74,7 +74,6 @@ async function paymentRecovered(account: Account) {
|
|||
}
|
||||
|
||||
export default {
|
||||
tierChanged,
|
||||
planChanged,
|
||||
activated,
|
||||
checkoutOpened,
|
||||
|
|
|
@ -14,10 +14,15 @@ async function servedBuilder(timezone: string) {
|
|||
await publishEvent(Event.SERVED_BUILDER, properties)
|
||||
}
|
||||
|
||||
async function servedApp(app: App, timezone: string) {
|
||||
async function servedApp(
|
||||
app: App,
|
||||
timezone: string,
|
||||
embed?: boolean | undefined
|
||||
) {
|
||||
const properties: AppServedEvent = {
|
||||
appVersion: app.version,
|
||||
timezone,
|
||||
embed: embed === true,
|
||||
}
|
||||
await publishEvent(Event.SERVED_APP, properties)
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ import {
|
|||
Event,
|
||||
User,
|
||||
UserCreatedEvent,
|
||||
UserDataCollaborationEvent,
|
||||
UserDeletedEvent,
|
||||
UserInviteAcceptedEvent,
|
||||
UserInvitedEvent,
|
||||
|
@ -173,6 +174,15 @@ async function passwordReset(user: User) {
|
|||
await publishEvent(Event.USER_PASSWORD_RESET, properties)
|
||||
}
|
||||
|
||||
// COLLABORATION
|
||||
|
||||
async function dataCollaboration(users: number) {
|
||||
const properties: UserDataCollaborationEvent = {
|
||||
users,
|
||||
}
|
||||
await publishEvent(Event.USER_DATA_COLLABORATION, properties)
|
||||
}
|
||||
|
||||
export default {
|
||||
created,
|
||||
updated,
|
||||
|
@ -188,4 +198,5 @@ export default {
|
|||
passwordUpdated,
|
||||
passwordResetRequested,
|
||||
passwordReset,
|
||||
dataCollaboration,
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ export * as context from "./context"
|
|||
export * as cache from "./cache"
|
||||
export * as objectStore from "./objectStore"
|
||||
export * as redis from "./redis"
|
||||
export { Client as RedisClient } from "./redis"
|
||||
export * as locks from "./redis/redlockImpl"
|
||||
export * as utils from "./utils"
|
||||
export * as errors from "./errors"
|
||||
|
|
|
@ -21,6 +21,6 @@ export function logAlertWithInfo(
|
|||
logAlert(message, error)
|
||||
}
|
||||
|
||||
export function logWarn(message: string) {
|
||||
console.warn(`bb-warn: ${message}`)
|
||||
export function logWarn(message: string, e?: any) {
|
||||
console.warn(`bb-warn: ${message}`, e)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
export * as correlation from "./correlation/correlation"
|
||||
export { logger, disableLogger } from "./pino/logger"
|
||||
export { logger } from "./pino/logger"
|
||||
export * from "./alerts"
|
||||
|
||||
// turn off or on context logging i.e. tenantId, appId etc
|
||||
export let LOG_CONTEXT = true
|
||||
export * as system from "./system"
|
||||
|
|
|
@ -1,188 +1,232 @@
|
|||
import env from "../../environment"
|
||||
import pino, { LoggerOptions } from "pino"
|
||||
import pinoPretty from "pino-pretty"
|
||||
|
||||
import { IdentityType } from "@budibase/types"
|
||||
import env from "../../environment"
|
||||
import * as context from "../../context"
|
||||
import * as correlation from "../correlation"
|
||||
import { IdentityType } from "@budibase/types"
|
||||
import { LOG_CONTEXT } from "../index"
|
||||
|
||||
// CORE LOGGERS - for disabling
|
||||
|
||||
const BUILT_INS = {
|
||||
log: console.log,
|
||||
error: console.error,
|
||||
info: console.info,
|
||||
warn: console.warn,
|
||||
trace: console.trace,
|
||||
debug: console.debug,
|
||||
}
|
||||
import { localFileDestination } from "../system"
|
||||
|
||||
// LOGGER
|
||||
|
||||
const pinoOptions: LoggerOptions = {
|
||||
level: env.LOG_LEVEL,
|
||||
formatters: {
|
||||
level: label => {
|
||||
return { level: label.toUpperCase() }
|
||||
},
|
||||
bindings: () => {
|
||||
return {}
|
||||
},
|
||||
},
|
||||
timestamp: () => `,"timestamp":"${new Date(Date.now()).toISOString()}"`,
|
||||
}
|
||||
|
||||
if (env.isDev()) {
|
||||
pinoOptions.transport = {
|
||||
target: "pino-pretty",
|
||||
options: {
|
||||
singleLine: true,
|
||||
let pinoInstance: pino.Logger | undefined
|
||||
if (!env.DISABLE_PINO_LOGGER) {
|
||||
const level = env.LOG_LEVEL
|
||||
const pinoOptions: LoggerOptions = {
|
||||
level,
|
||||
formatters: {
|
||||
level: level => {
|
||||
return { level: level.toUpperCase() }
|
||||
},
|
||||
bindings: () => {
|
||||
if (env.SELF_HOSTED) {
|
||||
// "service" is being injected in datadog using the pod names,
|
||||
// so we should leave it blank to allow the default behaviour if it's not running self-hosted
|
||||
return {
|
||||
service: env.SERVICE_NAME,
|
||||
}
|
||||
} else {
|
||||
return {}
|
||||
}
|
||||
},
|
||||
},
|
||||
timestamp: () => `,"timestamp":"${new Date(Date.now()).toISOString()}"`,
|
||||
}
|
||||
}
|
||||
|
||||
export const logger = pino(pinoOptions)
|
||||
const destinations: pino.StreamEntry[] = []
|
||||
|
||||
export function disableLogger() {
|
||||
console.log = BUILT_INS.log
|
||||
console.error = BUILT_INS.error
|
||||
console.info = BUILT_INS.info
|
||||
console.warn = BUILT_INS.warn
|
||||
console.trace = BUILT_INS.trace
|
||||
console.debug = BUILT_INS.debug
|
||||
}
|
||||
destinations.push(
|
||||
env.isDev()
|
||||
? {
|
||||
stream: pinoPretty({ singleLine: true }),
|
||||
level: level as pino.Level,
|
||||
}
|
||||
: { stream: process.stdout, level: level as pino.Level }
|
||||
)
|
||||
|
||||
// CONSOLE OVERRIDES
|
||||
if (env.SELF_HOSTED) {
|
||||
destinations.push({
|
||||
stream: localFileDestination(),
|
||||
level: level as pino.Level,
|
||||
})
|
||||
}
|
||||
|
||||
interface MergingObject {
|
||||
objects?: any[]
|
||||
tenantId?: string
|
||||
appId?: string
|
||||
identityId?: string
|
||||
identityType?: IdentityType
|
||||
correlationId?: string
|
||||
err?: Error
|
||||
}
|
||||
pinoInstance = destinations.length
|
||||
? pino(pinoOptions, pino.multistream(destinations))
|
||||
: pino(pinoOptions)
|
||||
|
||||
function isPlainObject(obj: any) {
|
||||
return typeof obj === "object" && obj !== null && !(obj instanceof Error)
|
||||
}
|
||||
// CONSOLE OVERRIDES
|
||||
|
||||
function isError(obj: any) {
|
||||
return obj instanceof Error
|
||||
}
|
||||
interface MergingObject {
|
||||
objects?: any[]
|
||||
tenantId?: string
|
||||
appId?: string
|
||||
automationId?: string
|
||||
identityId?: string
|
||||
identityType?: IdentityType
|
||||
correlationId?: string
|
||||
err?: Error
|
||||
}
|
||||
|
||||
function isMessage(obj: any) {
|
||||
return typeof obj === "string"
|
||||
}
|
||||
function isPlainObject(obj: any) {
|
||||
return typeof obj === "object" && obj !== null && !(obj instanceof Error)
|
||||
}
|
||||
|
||||
/**
|
||||
* Backwards compatibility between console logging statements
|
||||
* and pino logging requirements.
|
||||
*/
|
||||
function getLogParams(args: any[]): [MergingObject, string] {
|
||||
let error = undefined
|
||||
let objects: any[] = []
|
||||
let message = ""
|
||||
function isError(obj: any) {
|
||||
return obj instanceof Error
|
||||
}
|
||||
|
||||
args.forEach(arg => {
|
||||
if (isMessage(arg)) {
|
||||
message = `${message} ${arg}`.trimStart()
|
||||
}
|
||||
if (isPlainObject(arg)) {
|
||||
objects.push(arg)
|
||||
}
|
||||
if (isError(arg)) {
|
||||
error = arg
|
||||
}
|
||||
})
|
||||
function isMessage(obj: any) {
|
||||
return typeof obj === "string"
|
||||
}
|
||||
|
||||
const identity = getIdentity()
|
||||
/**
|
||||
* Backwards compatibility between console logging statements
|
||||
* and pino logging requirements.
|
||||
*/
|
||||
function getLogParams(args: any[]): [MergingObject, string] {
|
||||
let error = undefined
|
||||
let objects: any[] = []
|
||||
let message = ""
|
||||
|
||||
let contextObject = {}
|
||||
args.forEach(arg => {
|
||||
if (isMessage(arg)) {
|
||||
message = `${message} ${arg}`.trimStart()
|
||||
}
|
||||
if (isPlainObject(arg)) {
|
||||
objects.push(arg)
|
||||
}
|
||||
if (isError(arg)) {
|
||||
error = arg
|
||||
}
|
||||
})
|
||||
|
||||
const identity = getIdentity()
|
||||
|
||||
let contextObject = {}
|
||||
|
||||
if (LOG_CONTEXT) {
|
||||
contextObject = {
|
||||
tenantId: getTenantId(),
|
||||
appId: getAppId(),
|
||||
automationId: getAutomationId(),
|
||||
identityId: identity?._id,
|
||||
identityType: identity?.type,
|
||||
correlationId: correlation.getId(),
|
||||
}
|
||||
|
||||
const mergingObject: any = {
|
||||
err: error,
|
||||
pid: process.pid,
|
||||
...contextObject,
|
||||
}
|
||||
|
||||
if (objects.length) {
|
||||
// init generic data object for params supplied that don't have a
|
||||
// '_logKey' field. This prints an object using argument index as the key
|
||||
// e.g. { 0: {}, 1: {} }
|
||||
const data: any = {}
|
||||
let dataIndex = 0
|
||||
|
||||
for (let i = 0; i < objects.length; i++) {
|
||||
const object = objects[i]
|
||||
// the object has specified a log key
|
||||
// use this instead of generic key
|
||||
const logKey = object._logKey
|
||||
if (logKey) {
|
||||
delete object._logKey
|
||||
mergingObject[logKey] = object
|
||||
} else {
|
||||
data[dataIndex] = object
|
||||
dataIndex++
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(data).length) {
|
||||
mergingObject.data = data
|
||||
}
|
||||
}
|
||||
|
||||
return [mergingObject, message]
|
||||
}
|
||||
|
||||
const mergingObject = {
|
||||
objects: objects.length ? objects : undefined,
|
||||
err: error,
|
||||
...contextObject,
|
||||
console.log = (...arg: any[]) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
pinoInstance?.info(obj, msg)
|
||||
}
|
||||
console.info = (...arg: any[]) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
pinoInstance?.info(obj, msg)
|
||||
}
|
||||
console.warn = (...arg: any[]) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
pinoInstance?.warn(obj, msg)
|
||||
}
|
||||
console.error = (...arg: any[]) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
pinoInstance?.error(obj, msg)
|
||||
}
|
||||
|
||||
return [mergingObject, message]
|
||||
}
|
||||
|
||||
console.log = (...arg: any[]) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
logger.info(obj, msg)
|
||||
}
|
||||
console.info = (...arg: any[]) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
logger.info(obj, msg)
|
||||
}
|
||||
console.warn = (...arg: any[]) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
logger.warn(obj, msg)
|
||||
}
|
||||
console.error = (...arg: any[]) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
logger.error(obj, msg)
|
||||
}
|
||||
|
||||
/**
|
||||
* custom trace impl - this resembles the node trace behaviour rather
|
||||
* than traditional trace logging
|
||||
* @param arg
|
||||
*/
|
||||
console.trace = (...arg: any[]) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
if (!obj.err) {
|
||||
// to get stack trace
|
||||
obj.err = new Error()
|
||||
/**
|
||||
* custom trace impl - this resembles the node trace behaviour rather
|
||||
* than traditional trace logging
|
||||
* @param arg
|
||||
*/
|
||||
console.trace = (...arg: any[]) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
if (!obj.err) {
|
||||
// to get stack trace
|
||||
obj.err = new Error()
|
||||
}
|
||||
pinoInstance?.trace(obj, msg)
|
||||
}
|
||||
logger.trace(obj, msg)
|
||||
}
|
||||
|
||||
console.debug = (...arg: any) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
logger.debug(obj, msg)
|
||||
}
|
||||
|
||||
// CONTEXT
|
||||
|
||||
const getTenantId = () => {
|
||||
let tenantId
|
||||
try {
|
||||
tenantId = context.getTenantId()
|
||||
} catch (e: any) {
|
||||
// do nothing
|
||||
console.debug = (...arg: any) => {
|
||||
const [obj, msg] = getLogParams(arg)
|
||||
pinoInstance?.debug(obj, msg)
|
||||
}
|
||||
|
||||
// CONTEXT
|
||||
|
||||
const getTenantId = () => {
|
||||
let tenantId
|
||||
try {
|
||||
tenantId = context.getTenantId()
|
||||
} catch (e: any) {
|
||||
// do nothing
|
||||
}
|
||||
return tenantId
|
||||
}
|
||||
|
||||
const getAppId = () => {
|
||||
let appId
|
||||
try {
|
||||
appId = context.getAppId()
|
||||
} catch (e) {
|
||||
// do nothing
|
||||
}
|
||||
return appId
|
||||
}
|
||||
|
||||
const getAutomationId = () => {
|
||||
let appId
|
||||
try {
|
||||
appId = context.getAutomationId()
|
||||
} catch (e) {
|
||||
// do nothing
|
||||
}
|
||||
return appId
|
||||
}
|
||||
|
||||
const getIdentity = () => {
|
||||
let identity
|
||||
try {
|
||||
identity = context.getIdentity()
|
||||
} catch (e) {
|
||||
// do nothing
|
||||
}
|
||||
return identity
|
||||
}
|
||||
return tenantId
|
||||
}
|
||||
|
||||
const getAppId = () => {
|
||||
let appId
|
||||
try {
|
||||
appId = context.getAppId()
|
||||
} catch (e) {
|
||||
// do nothing
|
||||
}
|
||||
return appId
|
||||
}
|
||||
|
||||
const getIdentity = () => {
|
||||
let identity
|
||||
try {
|
||||
identity = context.getIdentity()
|
||||
} catch (e) {
|
||||
// do nothing
|
||||
}
|
||||
return identity
|
||||
}
|
||||
export const logger = pinoInstance
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
import fs from "fs"
|
||||
import path from "path"
|
||||
import * as rfs from "rotating-file-stream"
|
||||
|
||||
import env from "../environment"
|
||||
import { budibaseTempDir } from "../objectStore"
|
||||
|
||||
const logsFileName = `budibase.log`
|
||||
const budibaseLogsHistoryFileName = "budibase-logs-history.txt"
|
||||
|
||||
const logsPath = path.join(budibaseTempDir(), "systemlogs")
|
||||
|
||||
function getFullPath(fileName: string) {
|
||||
return path.join(logsPath, fileName)
|
||||
}
|
||||
|
||||
export function getSingleFileMaxSizeInfo(totalMaxSize: string) {
|
||||
const regex = /(\d+)([A-Za-z])/
|
||||
const match = totalMaxSize?.match(regex)
|
||||
if (!match) {
|
||||
console.warn(`totalMaxSize does not have a valid value`, {
|
||||
totalMaxSize,
|
||||
})
|
||||
return undefined
|
||||
}
|
||||
|
||||
const size = +match[1]
|
||||
const unit = match[2]
|
||||
if (size === 1) {
|
||||
switch (unit) {
|
||||
case "B":
|
||||
return { size: `${size}B`, totalHistoryFiles: 1 }
|
||||
case "K":
|
||||
return { size: `${(size * 1000) / 2}B`, totalHistoryFiles: 1 }
|
||||
case "M":
|
||||
return { size: `${(size * 1000) / 2}K`, totalHistoryFiles: 1 }
|
||||
case "G":
|
||||
return { size: `${(size * 1000) / 2}M`, totalHistoryFiles: 1 }
|
||||
default:
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
if (size % 2 === 0) {
|
||||
return { size: `${size / 2}${unit}`, totalHistoryFiles: 1 }
|
||||
}
|
||||
|
||||
return { size: `1${unit}`, totalHistoryFiles: size - 1 }
|
||||
}
|
||||
|
||||
export function localFileDestination() {
|
||||
const fileInfo = getSingleFileMaxSizeInfo(env.ROLLING_LOG_MAX_SIZE)
|
||||
const outFile = rfs.createStream(logsFileName, {
|
||||
// As we have a rolling size, we want to half the max size
|
||||
size: fileInfo?.size,
|
||||
path: logsPath,
|
||||
maxFiles: fileInfo?.totalHistoryFiles || 1,
|
||||
immutable: true,
|
||||
history: budibaseLogsHistoryFileName,
|
||||
initialRotation: false,
|
||||
})
|
||||
|
||||
return outFile
|
||||
}
|
||||
|
||||
export function getLogReadStream() {
|
||||
const streams = []
|
||||
const historyFile = getFullPath(budibaseLogsHistoryFileName)
|
||||
if (fs.existsSync(historyFile)) {
|
||||
const fileContent = fs.readFileSync(historyFile, "utf-8")
|
||||
const historyFiles = fileContent.split("\n")
|
||||
for (const historyFile of historyFiles.filter(x => x)) {
|
||||
streams.push(fs.readFileSync(historyFile))
|
||||
}
|
||||
}
|
||||
|
||||
streams.push(fs.readFileSync(getFullPath(logsFileName)))
|
||||
|
||||
const combinedContent = Buffer.concat(streams)
|
||||
return combinedContent
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
import { getSingleFileMaxSizeInfo } from "../system"
|
||||
|
||||
describe("system", () => {
|
||||
describe("getSingleFileMaxSizeInfo", () => {
|
||||
it.each([
|
||||
["100B", "50B"],
|
||||
["200K", "100K"],
|
||||
["20M", "10M"],
|
||||
["4G", "2G"],
|
||||
])(
|
||||
"Halving even number (%s) returns halved size and 1 history file (%s)",
|
||||
(totalValue, expectedMaxSize) => {
|
||||
const result = getSingleFileMaxSizeInfo(totalValue)
|
||||
expect(result).toEqual({
|
||||
size: expectedMaxSize,
|
||||
totalHistoryFiles: 1,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
it.each([
|
||||
["5B", "1B", 4],
|
||||
["17K", "1K", 16],
|
||||
["21M", "1M", 20],
|
||||
["3G", "1G", 2],
|
||||
])(
|
||||
"Halving an odd number (%s) returns as many files as size (-1) (%s)",
|
||||
(totalValue, expectedMaxSize, totalHistoryFiles) => {
|
||||
const result = getSingleFileMaxSizeInfo(totalValue)
|
||||
expect(result).toEqual({
|
||||
size: expectedMaxSize,
|
||||
totalHistoryFiles,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
it.each([
|
||||
["1B", "1B"],
|
||||
["1K", "500B"],
|
||||
["1M", "500K"],
|
||||
["1G", "500M"],
|
||||
])(
|
||||
"Halving '%s' returns halved unit (%s)",
|
||||
(totalValue, expectedMaxSize) => {
|
||||
const result = getSingleFileMaxSizeInfo(totalValue)
|
||||
expect(result).toEqual({
|
||||
size: expectedMaxSize,
|
||||
totalHistoryFiles: 1,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
it.each([[undefined], [""], ["50"], ["wrongvalue"]])(
|
||||
"Halving wrongly formatted value ('%s') returns undefined",
|
||||
totalValue => {
|
||||
const result = getSingleFileMaxSizeInfo(totalValue!)
|
||||
expect(result).toBeUndefined()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
|
@ -1,10 +1,11 @@
|
|||
import * as google from "../sso/google"
|
||||
import { Cookie } from "../../../constants"
|
||||
import { clearCookie, getCookie } from "../../../utils"
|
||||
import { doWithDB } from "../../../db"
|
||||
import * as configs from "../../../configs"
|
||||
import { BBContext, Database, SSOProfile } from "@budibase/types"
|
||||
import * as cache from "../../../cache"
|
||||
import * as utils from "../../../utils"
|
||||
import { UserCtx, SSOProfile } from "@budibase/types"
|
||||
import { ssoSaveUserNoOp } from "../sso/sso"
|
||||
|
||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||
|
||||
type Passport = {
|
||||
|
@ -22,7 +23,7 @@ async function fetchGoogleCreds() {
|
|||
|
||||
export async function preAuth(
|
||||
passport: Passport,
|
||||
ctx: BBContext,
|
||||
ctx: UserCtx,
|
||||
next: Function
|
||||
) {
|
||||
// get the relevant config
|
||||
|
@ -36,8 +37,8 @@ export async function preAuth(
|
|||
ssoSaveUserNoOp
|
||||
)
|
||||
|
||||
if (!ctx.query.appId || !ctx.query.datasourceId) {
|
||||
ctx.throw(400, "appId and datasourceId query params not present.")
|
||||
if (!ctx.query.appId) {
|
||||
ctx.throw(400, "appId query param not present.")
|
||||
}
|
||||
|
||||
return passport.authenticate(strategy, {
|
||||
|
@ -49,7 +50,7 @@ export async function preAuth(
|
|||
|
||||
export async function postAuth(
|
||||
passport: Passport,
|
||||
ctx: BBContext,
|
||||
ctx: UserCtx,
|
||||
next: Function
|
||||
) {
|
||||
// get the relevant config
|
||||
|
@ -57,7 +58,7 @@ export async function postAuth(
|
|||
const platformUrl = await configs.getPlatformUrl({ tenantAware: false })
|
||||
|
||||
let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback`
|
||||
const authStateCookie = getCookie(ctx, Cookie.DatasourceAuth)
|
||||
const authStateCookie = utils.getCookie(ctx, Cookie.DatasourceAuth)
|
||||
|
||||
return passport.authenticate(
|
||||
new GoogleStrategy(
|
||||
|
@ -69,33 +70,26 @@ export async function postAuth(
|
|||
(
|
||||
accessToken: string,
|
||||
refreshToken: string,
|
||||
profile: SSOProfile,
|
||||
_profile: SSOProfile,
|
||||
done: Function
|
||||
) => {
|
||||
clearCookie(ctx, Cookie.DatasourceAuth)
|
||||
utils.clearCookie(ctx, Cookie.DatasourceAuth)
|
||||
done(null, { accessToken, refreshToken })
|
||||
}
|
||||
),
|
||||
{ successRedirect: "/", failureRedirect: "/error" },
|
||||
async (err: any, tokens: string[]) => {
|
||||
const baseUrl = `/builder/app/${authStateCookie.appId}/data`
|
||||
// update the DB for the datasource with all the user info
|
||||
await doWithDB(authStateCookie.appId, async (db: Database) => {
|
||||
let datasource
|
||||
try {
|
||||
datasource = await db.get(authStateCookie.datasourceId)
|
||||
} catch (err: any) {
|
||||
if (err.status === 404) {
|
||||
ctx.redirect(baseUrl)
|
||||
}
|
||||
|
||||
const id = utils.newid()
|
||||
await cache.store(
|
||||
`datasource:creation:${authStateCookie.appId}:google:${id}`,
|
||||
{
|
||||
tokens,
|
||||
}
|
||||
if (!datasource.config) {
|
||||
datasource.config = {}
|
||||
}
|
||||
datasource.config.auth = { type: "google", ...tokens }
|
||||
await db.put(datasource)
|
||||
ctx.redirect(`${baseUrl}/datasource/${authStateCookie.datasourceId}`)
|
||||
})
|
||||
)
|
||||
|
||||
ctx.redirect(`${baseUrl}/new?continue_google_setup=${id}`)
|
||||
}
|
||||
)(ctx, next)
|
||||
}
|
||||
|
|
|
@ -128,6 +128,7 @@ class InMemoryQueue {
|
|||
|
||||
on() {
|
||||
// do nothing
|
||||
return this
|
||||
}
|
||||
|
||||
async waitForCompletion() {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { Job, JobId, Queue } from "bull"
|
||||
import { JobQueue } from "./constants"
|
||||
import * as context from "../context"
|
||||
|
||||
export type StalledFn = (job: Job) => Promise<void>
|
||||
|
||||
|
@ -31,71 +32,164 @@ function handleStalled(queue: Queue, removeStalledCb?: StalledFn) {
|
|||
})
|
||||
}
|
||||
|
||||
function logging(queue: Queue, jobQueue: JobQueue) {
|
||||
let eventType: string
|
||||
switch (jobQueue) {
|
||||
case JobQueue.AUTOMATION:
|
||||
eventType = "automation-event"
|
||||
break
|
||||
case JobQueue.APP_BACKUP:
|
||||
eventType = "app-backup-event"
|
||||
break
|
||||
function getLogParams(
|
||||
eventType: QueueEventType,
|
||||
event: BullEvent,
|
||||
opts: {
|
||||
job?: Job
|
||||
jobId?: JobId
|
||||
error?: Error
|
||||
} = {},
|
||||
extra: any = {}
|
||||
) {
|
||||
const message = `[BULL] ${eventType}=${event}`
|
||||
const err = opts.error
|
||||
|
||||
const bullLog = {
|
||||
_logKey: "bull",
|
||||
eventType,
|
||||
event,
|
||||
job: opts.job,
|
||||
jobId: opts.jobId || opts.job?.id,
|
||||
...extra,
|
||||
}
|
||||
|
||||
let automationLog
|
||||
if (opts.job?.data?.automation) {
|
||||
automationLog = {
|
||||
_logKey: "automation",
|
||||
trigger: opts.job
|
||||
? opts.job.data.automation.definition.trigger.event
|
||||
: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
return [message, err, bullLog, automationLog]
|
||||
}
|
||||
|
||||
enum BullEvent {
|
||||
ERROR = "error",
|
||||
WAITING = "waiting",
|
||||
ACTIVE = "active",
|
||||
STALLED = "stalled",
|
||||
PROGRESS = "progress",
|
||||
COMPLETED = "completed",
|
||||
FAILED = "failed",
|
||||
PAUSED = "paused",
|
||||
RESUMED = "resumed",
|
||||
CLEANED = "cleaned",
|
||||
DRAINED = "drained",
|
||||
REMOVED = "removed",
|
||||
}
|
||||
|
||||
enum QueueEventType {
|
||||
AUTOMATION_EVENT = "automation-event",
|
||||
APP_BACKUP_EVENT = "app-backup-event",
|
||||
AUDIT_LOG_EVENT = "audit-log-event",
|
||||
SYSTEM_EVENT = "system-event",
|
||||
}
|
||||
|
||||
const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
|
||||
[JobQueue.AUTOMATION]: QueueEventType.AUTOMATION_EVENT,
|
||||
[JobQueue.APP_BACKUP]: QueueEventType.APP_BACKUP_EVENT,
|
||||
[JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT,
|
||||
[JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT,
|
||||
}
|
||||
|
||||
function logging(queue: Queue, jobQueue: JobQueue) {
|
||||
const eventType = EventTypeMap[jobQueue]
|
||||
|
||||
function doInJobContext(job: Job, task: any) {
|
||||
// if this is an automation job try to get the app id
|
||||
const appId = job.data.event?.appId
|
||||
if (appId) {
|
||||
return context.doInContext(appId, task)
|
||||
} else {
|
||||
task()
|
||||
}
|
||||
}
|
||||
|
||||
queue
|
||||
.on(BullEvent.STALLED, async (job: Job) => {
|
||||
// A job has been marked as stalled. This is useful for debugging job
|
||||
// workers that crash or pause the event loop.
|
||||
await doInJobContext(job, () => {
|
||||
console.error(...getLogParams(eventType, BullEvent.STALLED, { job }))
|
||||
})
|
||||
})
|
||||
.on(BullEvent.ERROR, (error: any) => {
|
||||
// An error occurred.
|
||||
console.error(...getLogParams(eventType, BullEvent.ERROR, { error }))
|
||||
})
|
||||
|
||||
if (process.env.NODE_DEBUG?.includes("bull")) {
|
||||
queue
|
||||
.on("error", (error: any) => {
|
||||
// An error occurred.
|
||||
console.error(`${eventType}=error error=${JSON.stringify(error)}`)
|
||||
})
|
||||
.on("waiting", (jobId: JobId) => {
|
||||
.on(BullEvent.WAITING, (jobId: JobId) => {
|
||||
// A Job is waiting to be processed as soon as a worker is idling.
|
||||
console.log(`${eventType}=waiting jobId=${jobId}`)
|
||||
console.info(...getLogParams(eventType, BullEvent.WAITING, { jobId }))
|
||||
})
|
||||
.on("active", (job: Job, jobPromise: any) => {
|
||||
.on(BullEvent.ACTIVE, async (job: Job, jobPromise: any) => {
|
||||
// A job has started. You can use `jobPromise.cancel()`` to abort it.
|
||||
console.log(`${eventType}=active jobId=${job.id}`)
|
||||
await doInJobContext(job, () => {
|
||||
console.info(...getLogParams(eventType, BullEvent.ACTIVE, { job }))
|
||||
})
|
||||
})
|
||||
.on("stalled", (job: Job) => {
|
||||
// A job has been marked as stalled. This is useful for debugging job
|
||||
// workers that crash or pause the event loop.
|
||||
console.error(
|
||||
`${eventType}=stalled jobId=${job.id} job=${JSON.stringify(job)}`
|
||||
)
|
||||
.on(BullEvent.PROGRESS, async (job: Job, progress: any) => {
|
||||
// A job's progress was updated
|
||||
await doInJobContext(job, () => {
|
||||
console.info(
|
||||
...getLogParams(
|
||||
eventType,
|
||||
BullEvent.PROGRESS,
|
||||
{ job },
|
||||
{ progress }
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
.on("progress", (job: Job, progress: any) => {
|
||||
// A job's progress was updated!
|
||||
console.log(
|
||||
`${eventType}=progress jobId=${job.id} progress=${progress}`
|
||||
)
|
||||
})
|
||||
.on("completed", (job: Job, result) => {
|
||||
.on(BullEvent.COMPLETED, async (job: Job, result) => {
|
||||
// A job successfully completed with a `result`.
|
||||
console.log(`${eventType}=completed jobId=${job.id} result=${result}`)
|
||||
await doInJobContext(job, () => {
|
||||
console.info(
|
||||
...getLogParams(eventType, BullEvent.COMPLETED, { job }, { result })
|
||||
)
|
||||
})
|
||||
})
|
||||
.on("failed", (job, err: any) => {
|
||||
.on(BullEvent.FAILED, async (job: Job, error: any) => {
|
||||
// A job failed with reason `err`!
|
||||
console.log(`${eventType}=failed jobId=${job.id} error=${err}`)
|
||||
await doInJobContext(job, () => {
|
||||
console.error(
|
||||
...getLogParams(eventType, BullEvent.FAILED, { job, error })
|
||||
)
|
||||
})
|
||||
})
|
||||
.on("paused", () => {
|
||||
.on(BullEvent.PAUSED, () => {
|
||||
// The queue has been paused.
|
||||
console.log(`${eventType}=paused`)
|
||||
console.info(...getLogParams(eventType, BullEvent.PAUSED))
|
||||
})
|
||||
.on("resumed", (job: Job) => {
|
||||
.on(BullEvent.RESUMED, () => {
|
||||
// The queue has been resumed.
|
||||
console.log(`${eventType}=paused jobId=${job.id}`)
|
||||
console.info(...getLogParams(eventType, BullEvent.RESUMED))
|
||||
})
|
||||
.on("cleaned", (jobs: Job[], type: string) => {
|
||||
.on(BullEvent.CLEANED, (jobs: Job[], type: string) => {
|
||||
// Old jobs have been cleaned from the queue. `jobs` is an array of cleaned
|
||||
// jobs, and `type` is the type of jobs cleaned.
|
||||
console.log(`${eventType}=cleaned length=${jobs.length} type=${type}`)
|
||||
console.info(
|
||||
...getLogParams(
|
||||
eventType,
|
||||
BullEvent.CLEANED,
|
||||
{},
|
||||
{ length: jobs.length, type }
|
||||
)
|
||||
)
|
||||
})
|
||||
.on("drained", () => {
|
||||
.on(BullEvent.DRAINED, () => {
|
||||
// Emitted every time the queue has processed all the waiting jobs (even if there can be some delayed jobs not yet processed)
|
||||
console.log(`${eventType}=drained`)
|
||||
console.info(...getLogParams(eventType, BullEvent.DRAINED))
|
||||
})
|
||||
.on("removed", (job: Job) => {
|
||||
.on(BullEvent.REMOVED, (job: Job) => {
|
||||
// A job successfully removed.
|
||||
console.log(`${eventType}=removed jobId=${job.id}`)
|
||||
console.info(...getLogParams(eventType, BullEvent.REMOVED, { job }))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,8 @@ let userClient: Client,
|
|||
appClient: Client,
|
||||
cacheClient: Client,
|
||||
writethroughClient: Client,
|
||||
lockClient: Client
|
||||
lockClient: Client,
|
||||
socketClient: Client
|
||||
|
||||
async function init() {
|
||||
userClient = await new Client(utils.Databases.USER_CACHE).init()
|
||||
|
@ -14,9 +15,10 @@ async function init() {
|
|||
appClient = await new Client(utils.Databases.APP_METADATA).init()
|
||||
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
|
||||
lockClient = await new Client(utils.Databases.LOCKS).init()
|
||||
writethroughClient = await new Client(
|
||||
utils.Databases.WRITE_THROUGH,
|
||||
utils.SelectableDatabase.WRITE_THROUGH
|
||||
writethroughClient = await new Client(utils.Databases.WRITE_THROUGH).init()
|
||||
socketClient = await new Client(
|
||||
utils.Databases.SOCKET_IO,
|
||||
utils.SelectableDatabase.SOCKET_IO
|
||||
).init()
|
||||
}
|
||||
|
||||
|
@ -27,6 +29,7 @@ export async function shutdown() {
|
|||
if (cacheClient) await cacheClient.finish()
|
||||
if (writethroughClient) await writethroughClient.finish()
|
||||
if (lockClient) await lockClient.finish()
|
||||
if (socketClient) await socketClient.finish()
|
||||
}
|
||||
|
||||
process.on("exit", async () => {
|
||||
|
@ -74,3 +77,10 @@ export async function getLockClient() {
|
|||
}
|
||||
return lockClient
|
||||
}
|
||||
|
||||
export async function getSocketClient() {
|
||||
if (!socketClient) {
|
||||
await init()
|
||||
}
|
||||
return socketClient
|
||||
}
|
||||
|
|
|
@ -1,6 +1,15 @@
|
|||
import env from "../environment"
|
||||
// ioredis mock is all in memory
|
||||
const Redis = env.MOCK_REDIS ? require("ioredis-mock") : require("ioredis")
|
||||
import Redis from "ioredis"
|
||||
// mock-redis doesn't have any typing
|
||||
let MockRedis: any | undefined
|
||||
if (env.MOCK_REDIS) {
|
||||
try {
|
||||
// ioredis mock is all in memory
|
||||
MockRedis = require("ioredis-mock")
|
||||
} catch (err) {
|
||||
console.log("Mock redis unavailable")
|
||||
}
|
||||
}
|
||||
import {
|
||||
addDbPrefix,
|
||||
removeDbPrefix,
|
||||
|
@ -12,13 +21,13 @@ import * as timers from "../timers"
|
|||
|
||||
const RETRY_PERIOD_MS = 2000
|
||||
const STARTUP_TIMEOUT_MS = 5000
|
||||
const CLUSTERED = false
|
||||
const CLUSTERED = env.REDIS_CLUSTERED
|
||||
const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
|
||||
|
||||
// for testing just generate the client once
|
||||
let CLOSED = false
|
||||
let CLIENTS: { [key: number]: any } = {}
|
||||
|
||||
0
|
||||
let CONNECTED = false
|
||||
|
||||
// mock redis always connected
|
||||
|
@ -55,6 +64,7 @@ function connectionError(
|
|||
* will return the ioredis client which will be ready to use.
|
||||
*/
|
||||
function init(selectDb = DEFAULT_SELECT_DB) {
|
||||
const RedisCore = env.MOCK_REDIS && MockRedis ? MockRedis : Redis
|
||||
let timeout: NodeJS.Timeout
|
||||
CLOSED = false
|
||||
let client = pickClient(selectDb)
|
||||
|
@ -64,7 +74,7 @@ function init(selectDb = DEFAULT_SELECT_DB) {
|
|||
}
|
||||
// testing uses a single in memory client
|
||||
if (env.MOCK_REDIS) {
|
||||
CLIENTS[selectDb] = new Redis(getRedisOptions())
|
||||
CLIENTS[selectDb] = new RedisCore(getRedisOptions())
|
||||
}
|
||||
// start the timer - only allowed 5 seconds to connect
|
||||
timeout = setTimeout(() => {
|
||||
|
@ -81,14 +91,14 @@ function init(selectDb = DEFAULT_SELECT_DB) {
|
|||
if (client) {
|
||||
client.disconnect()
|
||||
}
|
||||
const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED)
|
||||
const { redisProtocolUrl, opts, host, port } = getRedisOptions()
|
||||
|
||||
if (CLUSTERED) {
|
||||
client = new Redis.Cluster([{ host, port }], opts)
|
||||
client = new RedisCore.Cluster([{ host, port }], opts)
|
||||
} else if (redisProtocolUrl) {
|
||||
client = new Redis(redisProtocolUrl)
|
||||
client = new RedisCore(redisProtocolUrl)
|
||||
} else {
|
||||
client = new Redis(opts)
|
||||
client = new RedisCore(opts)
|
||||
}
|
||||
// attach handlers
|
||||
client.on("end", (err: Error) => {
|
||||
|
@ -183,6 +193,9 @@ class RedisWrapper {
|
|||
CLOSED = false
|
||||
init(this._select)
|
||||
await waitForConnection(this._select)
|
||||
if (this._select && !env.isTest()) {
|
||||
this.getClient().select(this._select)
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
|
@ -209,6 +222,11 @@ class RedisWrapper {
|
|||
return this.getClient().keys(addDbPrefix(db, pattern))
|
||||
}
|
||||
|
||||
async exists(key: string) {
|
||||
const db = this._db
|
||||
return await this.getClient().exists(addDbPrefix(db, key))
|
||||
}
|
||||
|
||||
async get(key: string) {
|
||||
const db = this._db
|
||||
let response = await this.getClient().get(addDbPrefix(db, key))
|
||||
|
|
|
@ -4,10 +4,10 @@ import { LockOptions, LockType } from "@budibase/types"
|
|||
import * as context from "../context"
|
||||
import env from "../environment"
|
||||
|
||||
const getClient = async (
|
||||
async function getClient(
|
||||
type: LockType,
|
||||
opts?: Redlock.Options
|
||||
): Promise<Redlock> => {
|
||||
): Promise<Redlock> {
|
||||
if (type === LockType.CUSTOM) {
|
||||
return newRedlock(opts)
|
||||
}
|
||||
|
@ -18,6 +18,9 @@ const getClient = async (
|
|||
case LockType.TRY_ONCE: {
|
||||
return newRedlock(OPTIONS.TRY_ONCE)
|
||||
}
|
||||
case LockType.TRY_TWICE: {
|
||||
return newRedlock(OPTIONS.TRY_TWICE)
|
||||
}
|
||||
case LockType.DEFAULT: {
|
||||
return newRedlock(OPTIONS.DEFAULT)
|
||||
}
|
||||
|
@ -35,6 +38,9 @@ const OPTIONS = {
|
|||
// immediately throws an error if the lock is already held
|
||||
retryCount: 0,
|
||||
},
|
||||
TRY_TWICE: {
|
||||
retryCount: 1,
|
||||
},
|
||||
TEST: {
|
||||
// higher retry count in unit tests
|
||||
// due to high contention.
|
||||
|
@ -62,7 +68,7 @@ const OPTIONS = {
|
|||
},
|
||||
}
|
||||
|
||||
const newRedlock = async (opts: Redlock.Options = {}) => {
|
||||
export async function newRedlock(opts: Redlock.Options = {}) {
|
||||
let options = { ...OPTIONS.DEFAULT, ...opts }
|
||||
const redisWrapper = await getLockClient()
|
||||
const client = redisWrapper.getClient()
|
||||
|
@ -81,22 +87,26 @@ type RedlockExecution<T> =
|
|||
| SuccessfulRedlockExecution<T>
|
||||
| UnsuccessfulRedlockExecution
|
||||
|
||||
export const doWithLock = async <T>(
|
||||
function getLockName(opts: LockOptions) {
|
||||
// determine lock name
|
||||
// by default use the tenantId for uniqueness, unless using a system lock
|
||||
const prefix = opts.systemLock ? "system" : context.getTenantId()
|
||||
let name: string = `lock:${prefix}_${opts.name}`
|
||||
// add additional unique name if required
|
||||
if (opts.resource) {
|
||||
name = name + `_${opts.resource}`
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
export async function doWithLock<T>(
|
||||
opts: LockOptions,
|
||||
task: () => Promise<T>
|
||||
): Promise<RedlockExecution<T>> => {
|
||||
const redlock = await getClient(opts.type)
|
||||
): Promise<RedlockExecution<T>> {
|
||||
const redlock = await getClient(opts.type, opts.customOptions)
|
||||
let lock
|
||||
try {
|
||||
// determine lock name
|
||||
// by default use the tenantId for uniqueness, unless using a system lock
|
||||
const prefix = opts.systemLock ? "system" : context.getTenantId()
|
||||
let name: string = `lock:${prefix}_${opts.name}`
|
||||
|
||||
// add additional unique name if required
|
||||
if (opts.resource) {
|
||||
name = name + `_${opts.resource}`
|
||||
}
|
||||
const name = getLockName(opts)
|
||||
|
||||
// create the lock
|
||||
lock = await redlock.lock(name, opts.ttl)
|
||||
|
@ -112,7 +122,6 @@ export const doWithLock = async <T>(
|
|||
if (opts.type === LockType.TRY_ONCE) {
|
||||
// don't throw for try-once locks, they will always error
|
||||
// due to retry count (0) exceeded
|
||||
console.warn(e)
|
||||
return { executed: false }
|
||||
} else {
|
||||
console.error(e)
|
||||
|
|
|
@ -27,6 +27,7 @@ export enum Databases {
|
|||
GENERIC_CACHE = "data_cache",
|
||||
WRITE_THROUGH = "writeThrough",
|
||||
LOCKS = "locks",
|
||||
SOCKET_IO = "socket_io",
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -40,7 +41,7 @@ export enum Databases {
|
|||
*/
|
||||
export enum SelectableDatabase {
|
||||
DEFAULT = 0,
|
||||
WRITE_THROUGH = 1,
|
||||
SOCKET_IO = 1,
|
||||
UNUSED_1 = 2,
|
||||
UNUSED_2 = 3,
|
||||
UNUSED_3 = 4,
|
||||
|
@ -57,7 +58,7 @@ export enum SelectableDatabase {
|
|||
UNUSED_14 = 15,
|
||||
}
|
||||
|
||||
export function getRedisOptions(clustered = false) {
|
||||
export function getRedisOptions() {
|
||||
let password = env.REDIS_PASSWORD
|
||||
let url: string[] | string = env.REDIS_URL.split("//")
|
||||
// get rid of the protocol
|
||||
|
@ -83,7 +84,7 @@ export function getRedisOptions(clustered = false) {
|
|||
const opts: any = {
|
||||
connectTimeout: CONNECT_TIMEOUT_MS,
|
||||
}
|
||||
if (clustered) {
|
||||
if (env.REDIS_CLUSTERED) {
|
||||
opts.redisOptions = {}
|
||||
opts.redisOptions.tls = {}
|
||||
opts.redisOptions.password = password
|
||||
|
@ -94,7 +95,7 @@ export function getRedisOptions(clustered = false) {
|
|||
opts.port = port
|
||||
opts.password = password
|
||||
}
|
||||
return { opts, host, port, redisProtocolUrl }
|
||||
return { opts, host, port: parseInt(port), redisProtocolUrl }
|
||||
}
|
||||
|
||||
export function addDbPrefix(db: string, key: string) {
|
||||
|
|
|
@ -1,12 +1,17 @@
|
|||
import crypto from "crypto"
|
||||
import fs from "fs"
|
||||
import zlib from "zlib"
|
||||
import env from "../environment"
|
||||
import { join } from "path"
|
||||
|
||||
const ALGO = "aes-256-ctr"
|
||||
const SEPARATOR = "-"
|
||||
const ITERATIONS = 10000
|
||||
const RANDOM_BYTES = 16
|
||||
const STRETCH_LENGTH = 32
|
||||
|
||||
const SALT_LENGTH = 16
|
||||
const IV_LENGTH = 16
|
||||
|
||||
export enum SecretOption {
|
||||
API = "api",
|
||||
ENCRYPTION = "encryption",
|
||||
|
@ -31,15 +36,15 @@ export function getSecret(secretOption: SecretOption): string {
|
|||
return secret
|
||||
}
|
||||
|
||||
function stretchString(string: string, salt: Buffer) {
|
||||
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
||||
function stretchString(secret: string, salt: Buffer) {
|
||||
return crypto.pbkdf2Sync(secret, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
||||
}
|
||||
|
||||
export function encrypt(
|
||||
input: string,
|
||||
secretOption: SecretOption = SecretOption.API
|
||||
) {
|
||||
const salt = crypto.randomBytes(RANDOM_BYTES)
|
||||
const salt = crypto.randomBytes(SALT_LENGTH)
|
||||
const stretched = stretchString(getSecret(secretOption), salt)
|
||||
const cipher = crypto.createCipheriv(ALGO, stretched, salt)
|
||||
const base = cipher.update(input)
|
||||
|
@ -60,3 +65,115 @@ export function decrypt(
|
|||
const final = decipher.final()
|
||||
return Buffer.concat([base, final]).toString()
|
||||
}
|
||||
|
||||
export async function encryptFile(
|
||||
{ dir, filename }: { dir: string; filename: string },
|
||||
secret: string
|
||||
) {
|
||||
const outputFileName = `${filename}.enc`
|
||||
|
||||
const filePath = join(dir, filename)
|
||||
const inputFile = fs.createReadStream(filePath)
|
||||
const outputFile = fs.createWriteStream(join(dir, outputFileName))
|
||||
|
||||
const salt = crypto.randomBytes(SALT_LENGTH)
|
||||
const iv = crypto.randomBytes(IV_LENGTH)
|
||||
const stretched = stretchString(secret, salt)
|
||||
const cipher = crypto.createCipheriv(ALGO, stretched, iv)
|
||||
|
||||
outputFile.write(salt)
|
||||
outputFile.write(iv)
|
||||
|
||||
inputFile.pipe(zlib.createGzip()).pipe(cipher).pipe(outputFile)
|
||||
|
||||
return new Promise<{ filename: string; dir: string }>(r => {
|
||||
outputFile.on("finish", () => {
|
||||
r({
|
||||
filename: outputFileName,
|
||||
dir,
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function getSaltAndIV(path: string) {
|
||||
const fileStream = fs.createReadStream(path)
|
||||
|
||||
const salt = await readBytes(fileStream, SALT_LENGTH)
|
||||
const iv = await readBytes(fileStream, IV_LENGTH)
|
||||
fileStream.close()
|
||||
return { salt, iv }
|
||||
}
|
||||
|
||||
export async function decryptFile(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
secret: string
|
||||
) {
|
||||
const { salt, iv } = await getSaltAndIV(inputPath)
|
||||
const inputFile = fs.createReadStream(inputPath, {
|
||||
start: SALT_LENGTH + IV_LENGTH,
|
||||
})
|
||||
|
||||
const outputFile = fs.createWriteStream(outputPath)
|
||||
|
||||
const stretched = stretchString(secret, salt)
|
||||
const decipher = crypto.createDecipheriv(ALGO, stretched, iv)
|
||||
|
||||
const unzip = zlib.createGunzip()
|
||||
|
||||
inputFile.pipe(decipher).pipe(unzip).pipe(outputFile)
|
||||
|
||||
return new Promise<void>((res, rej) => {
|
||||
outputFile.on("finish", () => {
|
||||
outputFile.close()
|
||||
res()
|
||||
})
|
||||
|
||||
inputFile.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
|
||||
decipher.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
|
||||
unzip.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
|
||||
outputFile.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function readBytes(stream: fs.ReadStream, length: number) {
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
let bytesRead = 0
|
||||
const data: Buffer[] = []
|
||||
|
||||
stream.on("readable", () => {
|
||||
let chunk
|
||||
|
||||
while ((chunk = stream.read(length - bytesRead)) !== null) {
|
||||
data.push(chunk)
|
||||
bytesRead += chunk.length
|
||||
}
|
||||
|
||||
resolve(Buffer.concat(data))
|
||||
})
|
||||
|
||||
stream.on("end", () => {
|
||||
reject(new Error("Insufficient data in the stream."))
|
||||
})
|
||||
|
||||
stream.on("error", error => {
|
||||
reject(error)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { BuiltinPermissionID, PermissionLevel } from "./permissions"
|
||||
import { generateRoleID, getRoleParams, DocumentType, SEPARATOR } from "../db"
|
||||
import { prefixRoleID, getRoleParams, DocumentType, SEPARATOR } from "../db"
|
||||
import { getAppDB } from "../context"
|
||||
import { doWithDB } from "../db"
|
||||
import { Screen, Role as RoleDoc } from "@budibase/types"
|
||||
|
@ -25,18 +25,28 @@ const EXTERNAL_BUILTIN_ROLE_IDS = [
|
|||
BUILTIN_IDS.PUBLIC,
|
||||
]
|
||||
|
||||
export const RoleIDVersion = {
|
||||
// original version, with a UUID based ID
|
||||
UUID: undefined,
|
||||
// new version - with name based ID
|
||||
NAME: "name",
|
||||
}
|
||||
|
||||
export class Role implements RoleDoc {
|
||||
_id: string
|
||||
_rev?: string
|
||||
name: string
|
||||
permissionId: string
|
||||
inherits?: string
|
||||
version?: string
|
||||
permissions = {}
|
||||
|
||||
constructor(id: string, name: string, permissionId: string) {
|
||||
this._id = id
|
||||
this.name = name
|
||||
this.permissionId = permissionId
|
||||
// version for managing the ID - removing the role_ when responding
|
||||
this.version = RoleIDVersion.NAME
|
||||
}
|
||||
|
||||
addInheritance(inherits: string) {
|
||||
|
@ -140,9 +150,13 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
|
|||
* Gets the role object, this is mainly useful for two purposes, to check if the level exists and
|
||||
* to check if the role inherits any others.
|
||||
* @param {string|null} roleId The level ID to lookup.
|
||||
* @param {object|null} opts options for the function, like whether to halt errors, instead return public.
|
||||
* @returns {Promise<Role|object|null>} The role object, which may contain an "inherits" property.
|
||||
*/
|
||||
export async function getRole(roleId?: string): Promise<RoleDoc | undefined> {
|
||||
export async function getRole(
|
||||
roleId?: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc | undefined> {
|
||||
if (!roleId) {
|
||||
return undefined
|
||||
}
|
||||
|
@ -153,14 +167,20 @@ export async function getRole(roleId?: string): Promise<RoleDoc | undefined> {
|
|||
role = cloneDeep(
|
||||
Object.values(BUILTIN_ROLES).find(role => role._id === roleId)
|
||||
)
|
||||
} else {
|
||||
// make sure has the prefix (if it has it then it won't be added)
|
||||
roleId = prefixRoleID(roleId)
|
||||
}
|
||||
try {
|
||||
const db = getAppDB()
|
||||
const dbRole = await db.get(getDBRoleID(roleId))
|
||||
role = Object.assign(role, dbRole)
|
||||
// finalise the ID
|
||||
role._id = getExternalRoleID(role._id)
|
||||
role._id = getExternalRoleID(role._id, role.version)
|
||||
} catch (err) {
|
||||
if (!isBuiltin(roleId) && opts?.defaultPublic) {
|
||||
return cloneDeep(BUILTIN_ROLES.PUBLIC)
|
||||
}
|
||||
// only throw an error if there is no role at all
|
||||
if (Object.keys(role).length === 0) {
|
||||
throw err
|
||||
|
@ -254,6 +274,9 @@ export async function getAllRoles(appId?: string) {
|
|||
})
|
||||
)
|
||||
roles = body.rows.map((row: any) => row.doc)
|
||||
roles.forEach(
|
||||
role => (role._id = getExternalRoleID(role._id!, role.version))
|
||||
)
|
||||
}
|
||||
const builtinRoles = getBuiltinRoles()
|
||||
|
||||
|
@ -261,14 +284,15 @@ export async function getAllRoles(appId?: string) {
|
|||
for (let builtinRoleId of EXTERNAL_BUILTIN_ROLE_IDS) {
|
||||
const builtinRole = builtinRoles[builtinRoleId]
|
||||
const dbBuiltin = roles.filter(
|
||||
dbRole => getExternalRoleID(dbRole._id) === builtinRoleId
|
||||
dbRole =>
|
||||
getExternalRoleID(dbRole._id!, dbRole.version) === builtinRoleId
|
||||
)[0]
|
||||
if (dbBuiltin == null) {
|
||||
roles.push(builtinRole || builtinRoles.BASIC)
|
||||
} else {
|
||||
// remove role and all back after combining with the builtin
|
||||
roles = roles.filter(role => role._id !== dbBuiltin._id)
|
||||
dbBuiltin._id = getExternalRoleID(dbBuiltin._id)
|
||||
dbBuiltin._id = getExternalRoleID(dbBuiltin._id!, dbBuiltin.version)
|
||||
roles.push(Object.assign(builtinRole, dbBuiltin))
|
||||
}
|
||||
}
|
||||
|
@ -374,19 +398,22 @@ export class AccessController {
|
|||
/**
|
||||
* Adds the "role_" for builtin role IDs which are to be written to the DB (for permissions).
|
||||
*/
|
||||
export function getDBRoleID(roleId?: string) {
|
||||
if (roleId?.startsWith(DocumentType.ROLE)) {
|
||||
return roleId
|
||||
export function getDBRoleID(roleName: string) {
|
||||
if (roleName?.startsWith(DocumentType.ROLE)) {
|
||||
return roleName
|
||||
}
|
||||
return generateRoleID(roleId)
|
||||
return prefixRoleID(roleName)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the "role_" from builtin role IDs that have been written to the DB (for permissions).
|
||||
*/
|
||||
export function getExternalRoleID(roleId?: string) {
|
||||
export function getExternalRoleID(roleId: string, version?: string) {
|
||||
// for built-in roles we want to remove the DB role ID element (role_)
|
||||
if (roleId?.startsWith(DocumentType.ROLE) && isBuiltin(roleId)) {
|
||||
if (
|
||||
(roleId.startsWith(DocumentType.ROLE) && isBuiltin(roleId)) ||
|
||||
version === RoleIDVersion.NAME
|
||||
) {
|
||||
return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1]
|
||||
}
|
||||
return roleId
|
||||
|
|
|
@ -67,9 +67,9 @@ export const bulkUpdateGlobalUsers = async (users: User[]) => {
|
|||
|
||||
export async function getById(id: string, opts?: GetOpts): Promise<User> {
|
||||
const db = context.getGlobalDB()
|
||||
let user = await db.get(id)
|
||||
let user = await db.get<User>(id)
|
||||
if (opts?.cleanup) {
|
||||
user = removeUserPassword(user)
|
||||
user = removeUserPassword(user) as User
|
||||
}
|
||||
return user
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import * as db from "../../db"
|
|||
import { Header } from "../../constants"
|
||||
import { newid } from "../../utils"
|
||||
import env from "../../environment"
|
||||
import { BBContext } from "@budibase/types"
|
||||
|
||||
describe("utils", () => {
|
||||
const config = new DBTestConfiguration()
|
||||
|
@ -106,4 +107,85 @@ describe("utils", () => {
|
|||
expect(actual).toBe(undefined)
|
||||
})
|
||||
})
|
||||
|
||||
describe("isServingBuilder", () => {
|
||||
let ctx: BBContext
|
||||
|
||||
const expectResult = (result: boolean) =>
|
||||
expect(utils.isServingBuilder(ctx)).toBe(result)
|
||||
|
||||
beforeEach(() => {
|
||||
ctx = structures.koa.newContext()
|
||||
})
|
||||
|
||||
it("returns true if current path is in builder", async () => {
|
||||
ctx.path = "/builder/app/app_"
|
||||
expectResult(true)
|
||||
})
|
||||
|
||||
it("returns false if current path doesn't have '/' suffix", async () => {
|
||||
ctx.path = "/builder/app"
|
||||
expectResult(false)
|
||||
|
||||
ctx.path = "/xx"
|
||||
expectResult(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("isServingBuilderPreview", () => {
|
||||
let ctx: BBContext
|
||||
|
||||
const expectResult = (result: boolean) =>
|
||||
expect(utils.isServingBuilderPreview(ctx)).toBe(result)
|
||||
|
||||
beforeEach(() => {
|
||||
ctx = structures.koa.newContext()
|
||||
})
|
||||
|
||||
it("returns true if current path is in builder preview", async () => {
|
||||
ctx.path = "/app/preview/xx"
|
||||
expectResult(true)
|
||||
})
|
||||
|
||||
it("returns false if current path is not in builder preview", async () => {
|
||||
ctx.path = "/builder"
|
||||
expectResult(false)
|
||||
|
||||
ctx.path = "/xx"
|
||||
expectResult(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("isPublicAPIRequest", () => {
|
||||
let ctx: BBContext
|
||||
|
||||
const expectResult = (result: boolean) =>
|
||||
expect(utils.isPublicApiRequest(ctx)).toBe(result)
|
||||
|
||||
beforeEach(() => {
|
||||
ctx = structures.koa.newContext()
|
||||
})
|
||||
|
||||
it("returns true if current path remains to public API", async () => {
|
||||
ctx.path = "/api/public/v1/invoices"
|
||||
expectResult(true)
|
||||
|
||||
ctx.path = "/api/public/v1"
|
||||
expectResult(true)
|
||||
|
||||
ctx.path = "/api/public/v2"
|
||||
expectResult(true)
|
||||
|
||||
ctx.path = "/api/public/v21"
|
||||
expectResult(true)
|
||||
})
|
||||
|
||||
it("returns false if current path doesn't remain to public API", async () => {
|
||||
ctx.path = "/api/public"
|
||||
expectResult(false)
|
||||
|
||||
ctx.path = "/xx"
|
||||
expectResult(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,11 +1,5 @@
|
|||
import { getAllApps, queryGlobalView } from "../db"
|
||||
import {
|
||||
Header,
|
||||
MAX_VALID_DATE,
|
||||
DocumentType,
|
||||
SEPARATOR,
|
||||
ViewName,
|
||||
} from "../constants"
|
||||
import { getAllApps } from "../db"
|
||||
import { Header, MAX_VALID_DATE, DocumentType, SEPARATOR } from "../constants"
|
||||
import env from "../environment"
|
||||
import * as tenancy from "../tenancy"
|
||||
import * as context from "../context"
|
||||
|
@ -23,7 +17,9 @@ const APP_PREFIX = DocumentType.APP + SEPARATOR
|
|||
const PROD_APP_PREFIX = "/app/"
|
||||
|
||||
const BUILDER_PREVIEW_PATH = "/app/preview"
|
||||
const BUILDER_REFERER_PREFIX = "/builder/app/"
|
||||
const BUILDER_PREFIX = "/builder"
|
||||
const BUILDER_APP_PREFIX = `${BUILDER_PREFIX}/app/`
|
||||
const PUBLIC_API_PREFIX = "/api/public/v"
|
||||
|
||||
function confirmAppId(possibleAppId: string | undefined) {
|
||||
return possibleAppId && possibleAppId.startsWith(APP_PREFIX)
|
||||
|
@ -69,6 +65,18 @@ export function isServingApp(ctx: Ctx) {
|
|||
return false
|
||||
}
|
||||
|
||||
export function isServingBuilder(ctx: Ctx): boolean {
|
||||
return ctx.path.startsWith(BUILDER_APP_PREFIX)
|
||||
}
|
||||
|
||||
export function isServingBuilderPreview(ctx: Ctx): boolean {
|
||||
return ctx.path.startsWith(BUILDER_PREVIEW_PATH)
|
||||
}
|
||||
|
||||
export function isPublicApiRequest(ctx: Ctx): boolean {
|
||||
return ctx.path.startsWith(PUBLIC_API_PREFIX)
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a request tries to find the appId, which can be located in various places
|
||||
* @param {object} ctx The main request body to look through.
|
||||
|
@ -110,7 +118,7 @@ export async function getAppIdFromCtx(ctx: Ctx) {
|
|||
// make sure this is performed after prod app url resolution, in case the
|
||||
// referer header is present from a builder redirect
|
||||
const referer = ctx.request.headers.referer
|
||||
if (!appId && referer?.includes(BUILDER_REFERER_PREFIX)) {
|
||||
if (!appId && referer?.includes(BUILDER_APP_PREFIX)) {
|
||||
const refererId = parseAppIdFromUrl(ctx.request.headers.referer)
|
||||
appId = confirmAppId(refererId)
|
||||
}
|
||||
|
|
|
@ -123,7 +123,6 @@ beforeAll(async () => {
|
|||
jest.spyOn(events.plugin, "imported")
|
||||
jest.spyOn(events.plugin, "deleted")
|
||||
|
||||
jest.spyOn(events.license, "tierChanged")
|
||||
jest.spyOn(events.license, "planChanged")
|
||||
jest.spyOn(events.license, "activated")
|
||||
jest.spyOn(events.license, "checkoutOpened")
|
||||
|
|
|
@ -90,6 +90,10 @@ export const useScimIntegration = () => {
|
|||
return useFeature(Feature.SCIM)
|
||||
}
|
||||
|
||||
export const useSyncAutomations = () => {
|
||||
return useFeature(Feature.SYNC_AUTOMATIONS)
|
||||
}
|
||||
|
||||
// QUOTAS
|
||||
|
||||
export const setAutomationLogsQuota = (value: number) => {
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
|
||||
export const account = (): Account => {
|
||||
export const account = (partial: Partial<Account> = {}): Account => {
|
||||
return {
|
||||
accountId: uuid(),
|
||||
tenantId: generator.word(),
|
||||
|
@ -29,6 +29,7 @@ export const account = (): Account => {
|
|||
size: "10+",
|
||||
profession: "Software Engineer",
|
||||
quotaUsage: quotas.usage(),
|
||||
...partial,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { structures } from ".."
|
||||
import { generator } from "./generator"
|
||||
import { newid } from "../../../../src/docIds/newid"
|
||||
|
||||
export function id() {
|
||||
|
@ -6,7 +6,7 @@ export function id() {
|
|||
}
|
||||
|
||||
export function rev() {
|
||||
return `${structures.generator.character({
|
||||
return `${generator.character({
|
||||
numeric: true,
|
||||
})}-${structures.uuid().replace(/-/, "")}`
|
||||
})}-${generator.guid().replace(/-/, "")}`
|
||||
}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
export * from "./platform"
|
|
@ -0,0 +1 @@
|
|||
export * as installation from "./installation"
|
|
@ -0,0 +1,12 @@
|
|||
import { generator } from "../../generator"
|
||||
import { Installation } from "@budibase/types"
|
||||
import * as db from "../../db"
|
||||
|
||||
export function install(): Installation {
|
||||
return {
|
||||
_id: "install",
|
||||
_rev: db.rev(),
|
||||
installId: generator.guid(),
|
||||
version: generator.string(),
|
||||
}
|
||||
}
|
|
@ -2,6 +2,7 @@ export * from "./common"
|
|||
export * as accounts from "./accounts"
|
||||
export * as apps from "./apps"
|
||||
export * as db from "./db"
|
||||
export * as docs from "./documents"
|
||||
export * as koa from "./koa"
|
||||
export * as licenses from "./licenses"
|
||||
export * as plugins from "./plugins"
|
||||
|
|
|
@ -3,20 +3,36 @@ import {
|
|||
Customer,
|
||||
Feature,
|
||||
License,
|
||||
OfflineIdentifier,
|
||||
OfflineLicense,
|
||||
PlanModel,
|
||||
PlanType,
|
||||
PriceDuration,
|
||||
PurchasedPlan,
|
||||
PurchasedPrice,
|
||||
Quotas,
|
||||
Subscription,
|
||||
} from "@budibase/types"
|
||||
import { generator } from "./generator"
|
||||
|
||||
export function price(): PurchasedPrice {
|
||||
return {
|
||||
amount: 10000,
|
||||
amountMonthly: 10000,
|
||||
currency: "usd",
|
||||
duration: PriceDuration.MONTHLY,
|
||||
priceId: "price_123",
|
||||
dayPasses: undefined,
|
||||
isPerUser: true,
|
||||
}
|
||||
}
|
||||
|
||||
export const plan = (type: PlanType = PlanType.FREE): PurchasedPlan => {
|
||||
return {
|
||||
type,
|
||||
usesInvoicing: false,
|
||||
minUsers: 1,
|
||||
model: PlanModel.PER_USER,
|
||||
price: type !== PlanType.FREE ? price() : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -114,15 +130,15 @@ export function subscription(): Subscription {
|
|||
}
|
||||
}
|
||||
|
||||
export const license = (
|
||||
opts: {
|
||||
quotas?: Quotas
|
||||
plan?: PurchasedPlan
|
||||
planType?: PlanType
|
||||
features?: Feature[]
|
||||
billing?: Billing
|
||||
} = {}
|
||||
): License => {
|
||||
interface GenerateLicenseOpts {
|
||||
quotas?: Quotas
|
||||
plan?: PurchasedPlan
|
||||
planType?: PlanType
|
||||
features?: Feature[]
|
||||
billing?: Billing
|
||||
}
|
||||
|
||||
export const license = (opts: GenerateLicenseOpts = {}): License => {
|
||||
return {
|
||||
features: opts.features || [],
|
||||
quotas: opts.quotas || quotas(),
|
||||
|
@ -130,3 +146,22 @@ export const license = (
|
|||
billing: opts.billing || billing(),
|
||||
}
|
||||
}
|
||||
|
||||
export function offlineLicense(opts: GenerateLicenseOpts = {}): OfflineLicense {
|
||||
const base = license(opts)
|
||||
return {
|
||||
...base,
|
||||
expireAt: new Date().toISOString(),
|
||||
identifier: offlineIdentifier(),
|
||||
}
|
||||
}
|
||||
|
||||
export function offlineIdentifier(
|
||||
installId: string = generator.guid(),
|
||||
tenantId: string = generator.guid()
|
||||
): OfflineIdentifier {
|
||||
return {
|
||||
installId,
|
||||
tenantId,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,11 +7,6 @@
|
|||
"@budibase/types": ["../types/src"]
|
||||
}
|
||||
},
|
||||
"references": [
|
||||
{ "path": "../types" }
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"dist",
|
||||
]
|
||||
}
|
||||
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "2.5.6-alpha.37",
|
||||
"version": "0.0.0",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
@ -38,8 +38,8 @@
|
|||
],
|
||||
"dependencies": {
|
||||
"@adobe/spectrum-css-workflow-icons": "1.2.1",
|
||||
"@budibase/shared-core": "2.5.6-alpha.37",
|
||||
"@budibase/string-templates": "2.5.6-alpha.37",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/string-templates": "0.0.0",
|
||||
"@spectrum-css/accordion": "3.0.24",
|
||||
"@spectrum-css/actionbutton": "1.0.1",
|
||||
"@spectrum-css/actiongroup": "1.0.1",
|
||||
|
@ -84,11 +84,25 @@
|
|||
"@spectrum-css/vars": "3.0.1",
|
||||
"dayjs": "^1.10.4",
|
||||
"easymde": "^2.16.1",
|
||||
"svelte-flatpickr": "^3.3.2",
|
||||
"svelte-flatpickr": "3.2.3",
|
||||
"svelte-portal": "^1.0.0"
|
||||
},
|
||||
"resolutions": {
|
||||
"loader-utils": "1.4.1"
|
||||
},
|
||||
"nx": {
|
||||
"targets": {
|
||||
"build": {
|
||||
"dependsOn": [
|
||||
{
|
||||
"projects": [
|
||||
"@budibase/string-templates"
|
||||
],
|
||||
"target": "build"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
||||
}
|
||||
|
|
|
@ -102,7 +102,9 @@
|
|||
margin-left: 0;
|
||||
transition: color ease-out 130ms;
|
||||
}
|
||||
.is-selected:not(.spectrum-ActionButton--emphasized):not(.spectrum-ActionButton--quiet) {
|
||||
.is-selected:not(.spectrum-ActionButton--emphasized):not(
|
||||
.spectrum-ActionButton--quiet
|
||||
) {
|
||||
background: var(--spectrum-global-color-gray-300);
|
||||
border-color: var(--spectrum-global-color-gray-500);
|
||||
}
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
const ignoredClasses = [".flatpickr-calendar", ".spectrum-Popover"]
|
||||
const ignoredClasses = [
|
||||
".flatpickr-calendar",
|
||||
".spectrum-Popover",
|
||||
".download-js-link",
|
||||
]
|
||||
let clickHandlers = []
|
||||
|
||||
/**
|
||||
|
@ -22,8 +26,8 @@ const handleClick = event => {
|
|||
}
|
||||
|
||||
// Ignore clicks for modals, unless the handler is registered from a modal
|
||||
const sourceInModal = handler.anchor.closest(".spectrum-Modal") != null
|
||||
const clickInModal = event.target.closest(".spectrum-Modal") != null
|
||||
const sourceInModal = handler.anchor.closest(".spectrum-Underlay") != null
|
||||
const clickInModal = event.target.closest(".spectrum-Underlay") != null
|
||||
if (clickInModal && !sourceInModal) {
|
||||
return
|
||||
}
|
||||
|
|
|
@ -56,6 +56,8 @@ export default function positionDropdown(element, opts) {
|
|||
styles.left = anchorBounds.left + anchorBounds.width - elementBounds.width
|
||||
} else if (align === "right-outside") {
|
||||
styles.left = anchorBounds.right + offset
|
||||
} else if (align === "left-outside") {
|
||||
styles.left = anchorBounds.left - elementBounds.width - offset
|
||||
} else {
|
||||
styles.left = anchorBounds.left
|
||||
}
|
||||
|
|
|
@ -13,10 +13,12 @@
|
|||
export let url = ""
|
||||
export let disabled = false
|
||||
export let initials = "JD"
|
||||
export let color = null
|
||||
|
||||
const DefaultColor = "#3aab87"
|
||||
|
||||
$: color = getColor(initials)
|
||||
$: avatarColor = color || getColor(initials)
|
||||
$: style = getStyle(size, avatarColor)
|
||||
|
||||
const getColor = initials => {
|
||||
if (!initials?.length) {
|
||||
|
@ -26,6 +28,12 @@
|
|||
const hue = ((code % 26) / 26) * 360
|
||||
return `hsl(${hue}, 50%, 50%)`
|
||||
}
|
||||
|
||||
const getStyle = (sizeKey, color) => {
|
||||
const size = `var(${sizes.get(sizeKey)})`
|
||||
const fontSize = `calc(${size} / 2)`
|
||||
return `width:${size}; height:${size}; font-size:${fontSize}; background:${color};`
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if url}
|
||||
|
@ -37,13 +45,7 @@
|
|||
style="width: var({sizes.get(size)}); height: var({sizes.get(size)});"
|
||||
/>
|
||||
{:else}
|
||||
<div
|
||||
class="spectrum-Avatar"
|
||||
class:is-disabled={disabled}
|
||||
style="width: var({sizes.get(size)}); height: var({sizes.get(
|
||||
size
|
||||
)}); font-size: calc(var({sizes.get(size)}) / 2); background: {color};"
|
||||
>
|
||||
<div class="spectrum-Avatar" class:is-disabled={disabled} {style}>
|
||||
{initials || ""}
|
||||
</div>
|
||||
{/if}
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
<script>
|
||||
import "@spectrum-css/button/dist/index-vars.css"
|
||||
import Tooltip from "../Tooltip/Tooltip.svelte"
|
||||
import AbsTooltip from "../Tooltip/AbsTooltip.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
export let type
|
||||
export let disabled = false
|
||||
export let size = "M"
|
||||
export let cta = false
|
||||
|
@ -16,63 +18,52 @@
|
|||
export let newStyles = true
|
||||
export let id
|
||||
|
||||
let showTooltip = false
|
||||
const dispatch = createEventDispatcher()
|
||||
</script>
|
||||
|
||||
<button
|
||||
{id}
|
||||
class:spectrum-Button--cta={cta}
|
||||
class:spectrum-Button--primary={primary}
|
||||
class:spectrum-Button--secondary={secondary}
|
||||
class:spectrum-Button--warning={warning}
|
||||
class:spectrum-Button--overBackground={overBackground}
|
||||
class:spectrum-Button--quiet={quiet}
|
||||
class:new-styles={newStyles}
|
||||
class:active
|
||||
class:disabled
|
||||
class="spectrum-Button spectrum-Button--size{size.toUpperCase()}"
|
||||
{disabled}
|
||||
on:click|preventDefault
|
||||
on:mouseover={() => (showTooltip = true)}
|
||||
on:focus={() => (showTooltip = true)}
|
||||
on:mouseleave={() => (showTooltip = false)}
|
||||
>
|
||||
{#if icon}
|
||||
<svg
|
||||
class="spectrum-Icon spectrum-Icon--size{size.toUpperCase()}"
|
||||
focusable="false"
|
||||
aria-hidden="true"
|
||||
aria-label={icon}
|
||||
>
|
||||
<use xlink:href="#spectrum-icon-18-{icon}" />
|
||||
</svg>
|
||||
{/if}
|
||||
{#if $$slots}
|
||||
<span class="spectrum-Button-label"><slot /></span>
|
||||
{/if}
|
||||
{#if !disabled && tooltip}
|
||||
<div class="tooltip-icon">
|
||||
<AbsTooltip text={tooltip}>
|
||||
<button
|
||||
{id}
|
||||
{type}
|
||||
class:spectrum-Button--cta={cta}
|
||||
class:spectrum-Button--primary={primary}
|
||||
class:spectrum-Button--secondary={secondary}
|
||||
class:spectrum-Button--warning={warning}
|
||||
class:spectrum-Button--overBackground={overBackground}
|
||||
class:spectrum-Button--quiet={quiet}
|
||||
class:new-styles={newStyles}
|
||||
class:active
|
||||
class:is-disabled={disabled}
|
||||
class="spectrum-Button spectrum-Button--size{size.toUpperCase()}"
|
||||
on:click|preventDefault={() => {
|
||||
if (!disabled) {
|
||||
dispatch("click")
|
||||
}
|
||||
}}
|
||||
>
|
||||
{#if icon}
|
||||
<svg
|
||||
class="spectrum-Icon spectrum-Icon--size{size.toUpperCase()}"
|
||||
focusable="false"
|
||||
aria-hidden="true"
|
||||
aria-label="Info"
|
||||
aria-label={icon}
|
||||
>
|
||||
<use xlink:href="#spectrum-icon-18-InfoOutline" />
|
||||
<use xlink:href="#spectrum-icon-18-{icon}" />
|
||||
</svg>
|
||||
</div>
|
||||
{/if}
|
||||
{#if showTooltip && tooltip}
|
||||
<div class="tooltip">
|
||||
<Tooltip textWrapping={true} direction={"bottom"} text={tooltip} />
|
||||
</div>
|
||||
{/if}
|
||||
</button>
|
||||
{/if}
|
||||
{#if $$slots}
|
||||
<span class="spectrum-Button-label"><slot /></span>
|
||||
{/if}
|
||||
</button>
|
||||
</AbsTooltip>
|
||||
|
||||
<style>
|
||||
button {
|
||||
position: relative;
|
||||
}
|
||||
button.is-disabled {
|
||||
cursor: default;
|
||||
}
|
||||
.spectrum-Button-label {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
|
@ -81,21 +72,6 @@
|
|||
.active {
|
||||
color: var(--spectrum-global-color-blue-600) !important;
|
||||
}
|
||||
.tooltip {
|
||||
position: absolute;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
z-index: 100;
|
||||
width: 160px;
|
||||
text-align: center;
|
||||
transform: translateX(-50%);
|
||||
left: 50%;
|
||||
top: calc(100% - 3px);
|
||||
}
|
||||
.tooltip-icon {
|
||||
padding-left: var(--spacing-m);
|
||||
line-height: 0;
|
||||
}
|
||||
.spectrum-Button--primary.new-styles {
|
||||
background: var(--spectrum-global-color-gray-800);
|
||||
border-color: transparent;
|
||||
|
@ -109,10 +85,10 @@
|
|||
border-color: transparent;
|
||||
color: var(--spectrum-global-color-gray-900);
|
||||
}
|
||||
.spectrum-Button--secondary.new-styles:not(.disabled):hover {
|
||||
.spectrum-Button--secondary.new-styles:not(.is-disabled):hover {
|
||||
background: var(--spectrum-global-color-gray-300);
|
||||
}
|
||||
.spectrum-Button--secondary.new-styles.disabled {
|
||||
.spectrum-Button--secondary.new-styles.is-disabled {
|
||||
color: var(--spectrum-global-color-gray-500);
|
||||
}
|
||||
</style>
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue