merge branch 'master' of github.com:Budibase/budibase into fix/budi-7827

This commit is contained in:
mike12345567 2023-12-12 15:00:27 +00:00
commit b2b51a03f8
13 changed files with 168 additions and 318 deletions

View File

@ -1,194 +0,0 @@
{
"files": [
"README.md"
],
"imageSize": 100,
"commit": false,
"contributors": [
{
"login": "shogunpurple",
"name": "Martin McKeaveney",
"avatar_url": "https://avatars1.githubusercontent.com/u/11256663?v=4",
"profile": "http://martinmck.com",
"contributions": [
"code",
"doc",
"test",
"infra"
]
},
{
"login": "mike12345567",
"name": "Michael Drury",
"avatar_url": "https://avatars2.githubusercontent.com/u/4407001?v=4",
"profile": "http://www.michaeldrury.co.uk/",
"contributions": [
"doc",
"code",
"test",
"infra"
]
},
{
"login": "aptkingston",
"name": "Andrew Kingston",
"avatar_url": "https://avatars3.githubusercontent.com/u/9075550?v=4",
"profile": "https://github.com/aptkingston",
"contributions": [
"doc",
"code",
"test",
"design"
]
},
{
"login": "mjashanks",
"name": "Michael Shanks",
"avatar_url": "https://avatars3.githubusercontent.com/u/3524181?v=4",
"profile": "https://budibase.com/",
"contributions": [
"doc",
"code",
"test"
]
},
{
"login": "kevmodrome",
"name": "Kevin Åberg Kultalahti",
"avatar_url": "https://avatars3.githubusercontent.com/u/534488?v=4",
"profile": "https://github.com/kevmodrome",
"contributions": [
"doc",
"code",
"test"
]
},
{
"login": "joebudi",
"name": "Joe",
"avatar_url": "https://avatars2.githubusercontent.com/u/49767913?v=4",
"profile": "https://www.budibase.com/",
"contributions": [
"doc",
"code",
"content",
"design"
]
},
{
"login": "Rory-Powell",
"name": "Rory Powell",
"avatar_url": "https://avatars.githubusercontent.com/u/8755148?v=4",
"profile": "https://github.com/Rory-Powell",
"contributions": [
"code",
"doc",
"test"
]
},
{
"login": "PClmnt",
"name": "Peter Clement",
"avatar_url": "https://avatars.githubusercontent.com/u/5665926?v=4",
"profile": "https://github.com/PClmnt",
"contributions": [
"code",
"doc",
"test"
]
},
{
"login": "Conor-Mack",
"name": "Conor_Mack",
"avatar_url": "https://avatars1.githubusercontent.com/u/36074859?v=4",
"profile": "https://github.com/Conor-Mack",
"contributions": [
"code",
"test"
]
},
{
"login": "pngwn",
"name": "pngwn",
"avatar_url": "https://avatars1.githubusercontent.com/u/12937446?v=4",
"profile": "https://github.com/pngwn",
"contributions": [
"code",
"test"
]
},
{
"login": "HugoLd",
"name": "HugoLd",
"avatar_url": "https://avatars0.githubusercontent.com/u/26521848?v=4",
"profile": "https://github.com/HugoLd",
"contributions": [
"code"
]
},
{
"login": "victoriasloan",
"name": "victoriasloan",
"avatar_url": "https://avatars.githubusercontent.com/u/9913651?v=4",
"profile": "https://github.com/victoriasloan",
"contributions": [
"code"
]
},
{
"login": "yashank09",
"name": "yashank09",
"avatar_url": "https://avatars.githubusercontent.com/u/37672190?v=4",
"profile": "https://github.com/yashank09",
"contributions": [
"code"
]
},
{
"login": "SOVLOOKUP",
"name": "SOVLOOKUP",
"avatar_url": "https://avatars.githubusercontent.com/u/53158137?v=4",
"profile": "https://github.com/SOVLOOKUP",
"contributions": [
"code"
]
},
{
"login": "seoulaja",
"name": "seoulaja",
"avatar_url": "https://avatars.githubusercontent.com/u/15101654?v=4",
"profile": "https://github.com/seoulaja",
"contributions": [
"translation"
]
},
{
"login": "mslourens",
"name": "Maurits Lourens",
"avatar_url": "https://avatars.githubusercontent.com/u/1907152?v=4",
"profile": "https://github.com/mslourens",
"contributions": [
"test",
"code"
]
},
{
"login": "Rory-Powell",
"name": "Rory Powell",
"avatar_url": "https://avatars.githubusercontent.com/u/8755148?v=4",
"profile": "https://github.com/Rory-Powell",
"contributions": [
"infra",
"test",
"code"
]
}
],
"contributorsPerLine": 7,
"projectName": "budibase",
"projectOwner": "Budibase",
"repoType": "github",
"repoHost": "https://github.com",
"skipCi": true,
"commitConvention": "none"
}

View File

@ -1,139 +1,45 @@
# Budibase CI Pipelines
Welcome to the budibase CI pipelines directory. This document details what each of the CI pipelines are for, and come common combinations.
Welcome to the Budibase CI pipelines directory. This document details what each of the CI pipelines are for, and come common combinations.
## All CI Pipelines
### Note
- When running workflow dispatch jobs, ensure you always run them off the `master` branch. It defaults to `develop`, so double check before running any jobs. The exception to this case is the `deploy-release` job which requires the develop branch.
### Standard CI Build Job (budibase_ci.yml)
Triggers:
- PR or push to develop
- PR or push to master
The standard CI Build job is what runs when you raise a PR to develop or master.
The standard CI Build job is what runs when you raise a PR to master.
- Installs all dependencies,
- builds the project
- run the unit tests
- Generate test coverage metrics with codecov
- Run the integration tests
- Check that the pro and account portal submodules are pointing to the lastest master head
### Release Develop Job (release-develop.yml)
### Release Job (tag-release.yml)
Triggers:
- Push to develop
- Manually triggered
The job responsible for building, tagging and pushing docker images out to the test and release environments.
This job is responsible for building and pushing all the production services, packages and images. This is done via [budibase-deploys](https://github.com/Budibase/budibase-deploys/actions/workflows/release.yml).
- Installs all dependencies
- builds the project
- run the unit tests
- publish the budibase JS packages under a prerelease tag to NPM
- build, tag and push docker images under the `develop` tag to docker hub
An input is required, indicating if the new version will be a `patch`, `minor` or `major` bump.
These images will then be pulled by the test and release environments, updating the latest automatically. Discord notifications are sent to the #infra channel when this occurs.
### Release Job (release.yml)
Triggers:
- Push to master
This job is responsible for building and pushing the latest code to NPM and docker hub, so that it can be deployed.
- Installs all dependencies
- builds the project
- run the unit tests
- publish the budibase JS packages under a release tag to NPM (always incremented by patch versions)
- build, tag and push docker images under the `v.x.x.x` (the tag of the NPM release) tag to docker hub
### Release Selfhost Job (release-selfhost.yml)
Triggers:
- Manual Workflow Dispatch Trigger
This job is responsible for delivering the latest version of budibase to those that are self-hosting.
This job relies on the release job to have run first, so the latest image is pushed to dockerhub. This job then will pull the latest version from `lerna.json` and try to find an image in dockerhub corresponding to that version. For example, if the version in `lerna.json` is `1.0.0`:
- Pull the images for all budibase services tagged `v1.0.0` from dockerhub
- Tag these images as `latest`
- Push them back to dockerhub. This now means anyone who pulls `latest` (self hosters using docker-compose) will get the latest version.
- Build and release the budibase helm chart for kubernetes users
- Perform a github release with the latest version. You can see previous releases here (https://github.com/Budibase/budibase/releases)
### Deploy Release (deploy-release.yml)
Triggers:
- Manual Workflow Dispatch Trigger
This job is responsible for deploying to our release, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. After kicking off this job, the following will occur:
- Checks out the release branch
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
- Configures AWS Credentials
- Deploys the helm chart in the budibase repo to our preproduction EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
### Deploy Preprod (deploy-preprod.yml)
Triggers:
- Manual Workflow Dispatch Trigger
This job is responsible for deploying to our preprod, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. After kicking off this job, the following will occur:
- Checks out the master branch
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
- Configures AWS Credentials
- Deploys the helm chart in the budibase repo to our preprod EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
### Deploy Production (deploy-cloud.yml)
Triggers:
- Manual Workflow Dispatch Trigger
This job is responsible for deploying to our production, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. You can also manually enter a version number for this job, so you can perform rollbacks or upgrade to a specific version. After kicking off this job, the following will occur:
- Checks out the master branch
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
- Configures AWS Credentials
- Deploys the helm chart in the budibase repo to our production EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
More documentation can be found in here: https://budibase.atlassian.net/wiki/spaces/DEVOPS/pages/347930625/Production+release
## Common Workflows
### Deploy Changes to Production (Release)
- Merge `develop` into `master`
- Wait for budibase CI job and release job to run
- Run cloud deploy job
- Run release selfhost job
### Deploy Changes to Production (Hotfix)
- Branch off `master`
- Perform your hotfix
- Merge back into `master`
- Wait for budibase CI job and release job to run
- Run cloud deploy job
- Run release selfhost job
- Merge your changes into `master`
- Run `tag-release.yml`
- Check the progress in [budibase-deploys](https://github.com/Budibase/budibase-deploys/actions/workflows/release.yml)
### Rollback A Bad Cloud Deployment
- Kick off cloud deploy job
- Ensure you are running off master
- Enter the version number of the last known good version of budibase. For example `1.0.0`
Rollback documentation can be found in here.
https://budibase.atlassian.net/wiki/spaces/DEVOPS/pages/347930625/Production+release#Rollback

View File

@ -246,7 +246,7 @@ jobs:
if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.');
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/master/docs/getting_started.md')
process.exit(1);
} else {
console.log('All good, the submodule had been merged and setup correctly!')

46
.github/workflows/force-release.yml vendored Normal file
View File

@ -0,0 +1,46 @@
name: Forced release
concurrency:
group: tag-release
cancel-in-progress: false
on:
workflow_dispatch:
jobs:
ensure-is-master-tag:
name: Ensure is a master tag
runs-on: qa-arc-runner-set
steps:
- name: Checkout monorepo
uses: actions/checkout@v4
with:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-tags: true
fetch-depth: 0
- name: Fail if ref is not a tag
run: |
if ! git show-ref -q --verify "refs/tags/${{ github.ref_name }}" 2>/dev/null; then
echo "'${{ github.ref_name }}' is not a valid tag."
exit 1
fi
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.ref_name }} origin/master; then
echo "Tag is not in master. Release can only execute tags that are present on the master branch"
exit 1
fi
trigger-release:
needs: [ensure-is-master-tag]
runs-on: ubuntu-latest
steps:
- uses: peter-evans/repository-dispatch@v2
with:
repository: budibase/budibase-deploys
event-type: release-prod
token: ${{ secrets.GH_ACCESS_TOKEN }}
client-payload: |-
{
"TAG": "${{ github.ref_name }}"
}

View File

@ -84,7 +84,7 @@ Component libraries are collections of components as well as the definition of t
- If the project diverges from your branch, please rebase instead of merging. This makes the commit graph easier to read.
- Once your work is completed, please raise a PR against the `develop` branch with some information about what has changed and why.
- Once your work is completed, please raise a PR against the `master` branch with some information about what has changed and why.
### Getting Started For Contributors
@ -246,7 +246,7 @@ From here - to develop a change in pro, you can follow the below flow:
cd packages/pro
# get the base branch you are working from (same as monorepo)
git fetch
git checkout <develop | master>
git checkout master
# create a branch, named the same as the branch in your monorepo
git checkout -b <some branch>
... make changes

View File

@ -1,5 +1,5 @@
{
"version": "2.13.36",
"version": "2.13.37",
"npmClient": "yarn",
"packages": [
"packages/*",

@ -1 +1 @@
Subproject commit 62c4ed384670406576a2d52ce8dafd683cc0a571
Subproject commit a0b13270c36dd188e2a953d026b4560a1208008e

View File

@ -260,12 +260,12 @@ export async function listAllObjects(bucketName: string, path: string) {
}
/**
* Generate a presigned url with a default TTL of 36 hours
* Generate a presigned url with a default TTL of 1 hour
*/
export function getPresignedUrl(
bucketName: string,
key: string,
durationSeconds: number = 129600
durationSeconds: number = 3600
) {
const objectStore = ObjectStore(bucketName, { presigning: true })
const params = {

View File

@ -39,7 +39,7 @@ describe("Google Sheets Integration", () => {
let cleanupEnv: () => void
beforeAll(() => {
cleanupEnv = config.setEnv({
cleanupEnv = config.setCoreEnv({
GOOGLE_CLIENT_ID: "test",
GOOGLE_CLIENT_SECRET: "test",
})

View File

@ -218,20 +218,45 @@ class TestConfiguration {
*/
setEnv(newEnvVars: Partial<typeof env>): () => void {
const oldEnv = cloneDeep(env)
const oldCoreEnv = cloneDeep(coreEnv)
let key: keyof typeof newEnvVars
for (key in newEnvVars) {
env._set(key, newEnvVars[key])
coreEnv._set(key, newEnvVars[key])
}
return () => {
for (const [key, value] of Object.entries(oldEnv)) {
env._set(key, value)
}
}
}
for (const [key, value] of Object.entries(oldCoreEnv)) {
async withCoreEnv(
newEnvVars: Partial<typeof coreEnv>,
f: () => Promise<void>
) {
let cleanup = this.setCoreEnv(newEnvVars)
try {
await f()
} finally {
cleanup()
}
}
/*
* Sets the environment variables to the given values and returns a function
* that can be called to reset the environment variables to their original values.
*/
setCoreEnv(newEnvVars: Partial<typeof coreEnv>): () => void {
const oldEnv = cloneDeep(env)
let key: keyof typeof newEnvVars
for (key in newEnvVars) {
coreEnv._set(key, newEnvVars[key])
}
return () => {
for (const [key, value] of Object.entries(oldEnv)) {
coreEnv._set(key, value)
}
}

View File

@ -26,6 +26,7 @@ import {
migrations,
platform,
tenancy,
db,
} from "@budibase/backend-core"
import { checkAnyUserExists } from "../../../utilities/users"
import { isEmailConfigured } from "../../../utilities/email"
@ -185,9 +186,27 @@ export const getAppUsers = async (ctx: Ctx<SearchUsersRequest>) => {
export const search = async (ctx: Ctx<SearchUsersRequest>) => {
const body = ctx.request.body
// TODO: for now only one supported search key, string.email
if (body?.query && !userSdk.core.isSupportedUserSearch(body.query)) {
ctx.throw(501, "Can only search by string.email or equal._id")
// TODO: for now only two supported search keys; string.email and equal._id
if (body?.query) {
// Clean numeric prefixing. This will overwrite duplicate search fields,
// but this is fine because we only support a single custom search on
// email and id
for (let filters of Object.values(body.query)) {
if (filters && typeof filters === "object") {
for (let [field, value] of Object.entries(filters)) {
delete filters[field]
const cleanedField = db.removeKeyNumbering(field)
if (filters[cleanedField] !== undefined) {
ctx.throw(400, "Only 1 filter per field is supported")
}
filters[cleanedField] = value
}
}
}
// Validate we aren't trying to search on any illegal fields
if (!userSdk.core.isSupportedUserSearch(body.query)) {
ctx.throw(400, "Can only search by string.email or equal._id")
}
}
if (body.paginate === false) {

View File

@ -590,6 +590,15 @@ describe("/api/global/users", () => {
expect(response.body.data[0].email).toBe(user.email)
})
it("should be able to search by email with numeric prefixing", async () => {
const user = await config.createUser()
const response = await config.api.users.searchUsers({
query: { string: { ["999:email"]: user.email } },
})
expect(response.body.data.length).toBe(1)
expect(response.body.data[0].email).toBe(user.email)
})
it("should be able to search by _id", async () => {
const user = await config.createUser()
const response = await config.api.users.searchUsers({
@ -599,13 +608,52 @@ describe("/api/global/users", () => {
expect(response.body.data[0]._id).toBe(user._id)
})
it("should be able to search by _id with numeric prefixing", async () => {
const user = await config.createUser()
const response = await config.api.users.searchUsers({
query: { equal: { ["1:_id"]: user._id } },
})
expect(response.body.data.length).toBe(1)
expect(response.body.data[0]._id).toBe(user._id)
})
it("should throw an error when using multiple filters on the same field", async () => {
const user = await config.createUser()
await config.api.users.searchUsers(
{
query: {
string: {
["1:email"]: user.email,
["2:email"]: "something else",
},
},
},
{ status: 400 }
)
})
it("should throw an error when using multiple filters on the same field without prefixes", async () => {
const user = await config.createUser()
await config.api.users.searchUsers(
{
query: {
string: {
["_id"]: user.email,
["999:_id"]: "something else",
},
},
},
{ status: 400 }
)
})
it("should throw an error when unimplemented options used", async () => {
const user = await config.createUser()
await config.api.users.searchUsers(
{
query: { equal: { firstName: user.firstName } },
},
{ status: 501 }
{ status: 400 }
)
})

View File

@ -39,7 +39,7 @@ getDistro
if [[ $OS == "Darwin" ]];
then
echo "This script is not setup for your machine type:" $OS
echo "Please use the manual steps described in https://github.com/Budibase/budibase/blob/develop/docs/CONTRIBUTING.md#getting-started-for-contributors"
echo "Please use the manual steps described in https://github.com/Budibase/budibase/blob/master/docs/CONTRIBUTING.md#getting-started-for-contributors"
exit 1
fi