Merge branch 'master' of github.com:Budibase/budibase into develop
This commit is contained in:
commit
88edc9edf9
|
@ -60,9 +60,9 @@ jobs:
|
||||||
- name: "Get Current tag"
|
- name: "Get Current tag"
|
||||||
id: currenttag
|
id: currenttag
|
||||||
run: |
|
run: |
|
||||||
version=v$(./scripts/getCurrentVersion.sh)
|
version=$(./scripts/getCurrentVersion.sh)
|
||||||
echo 'Using tag $version'
|
echo "Using tag $version"
|
||||||
echo "::set-output name=tag::$resversionult"
|
echo "version=$version" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Build/release Docker images
|
- name: Build/release Docker images
|
||||||
run: |
|
run: |
|
||||||
|
@ -71,7 +71,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||||
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.tag }}
|
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }}
|
||||||
|
|
||||||
release-helm-chart:
|
release-helm-chart:
|
||||||
needs: [release-images]
|
needs: [release-images]
|
||||||
|
|
|
@ -8,7 +8,7 @@ env:
|
||||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||||
REGISTRY_URL: registry.hub.docker.com
|
REGISTRY_URL: registry.hub.docker.com
|
||||||
jobs:
|
jobs:
|
||||||
build-amd64:
|
build-amd64-arm64:
|
||||||
name: "build-amd64"
|
name: "build-amd64"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
|
@ -68,81 +68,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64,linux/arm64
|
||||||
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }}
|
|
||||||
file: ./hosting/single/Dockerfile
|
|
||||||
|
|
||||||
- name: Tag and release Budibase Azure App Service docker image
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
platforms: linux/amd64
|
|
||||||
build-args: TARGETBUILD=aas
|
|
||||||
tags: budibase/budibase-aas,budibase/budibase-aas:v${{ env.RELEASE_VERSION }}
|
|
||||||
file: ./hosting/single/Dockerfile
|
|
||||||
|
|
||||||
build-arm64:
|
|
||||||
name: "build-arm64"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
node-version: [14.x]
|
|
||||||
steps:
|
|
||||||
- name: Fail if not a tag
|
|
||||||
run: |
|
|
||||||
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
|
||||||
echo "Workflow Dispatch can only be run on tags"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
- name: "Checkout"
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
submodules: true
|
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Fail if tag is not in master
|
|
||||||
run: |
|
|
||||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
|
||||||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
|
||||||
uses: actions/setup-node@v1
|
|
||||||
with:
|
|
||||||
node-version: ${{ matrix.node-version }}
|
|
||||||
- name: Setup QEMU
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
- name: Setup Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
- name: Run Yarn
|
|
||||||
run: yarn
|
|
||||||
- name: Update versions
|
|
||||||
run: ./scripts/updateVersions.sh
|
|
||||||
- name: Runt Yarn Lint
|
|
||||||
run: yarn lint
|
|
||||||
- name: Update versions
|
|
||||||
run: ./scripts/updateVersions.sh
|
|
||||||
- name: Run Yarn Build
|
|
||||||
run: yarn build:docker:pre
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_API_KEY }}
|
|
||||||
- name: Get the latest release version
|
|
||||||
id: version
|
|
||||||
run: |
|
|
||||||
release_version=$(cat lerna.json | jq -r '.version')
|
|
||||||
echo $release_version
|
|
||||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
|
||||||
- name: Tag and release Budibase service docker image
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
platforms: linux/arm64
|
|
||||||
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }}
|
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }}
|
||||||
file: ./hosting/single/Dockerfile
|
file: ./hosting/single/Dockerfile
|
||||||
|
|
||||||
|
|
|
@ -120,6 +120,8 @@ spec:
|
||||||
{{ end }}
|
{{ end }}
|
||||||
- name: MULTI_TENANCY
|
- name: MULTI_TENANCY
|
||||||
value: {{ .Values.globals.multiTenancy | quote }}
|
value: {{ .Values.globals.multiTenancy | quote }}
|
||||||
|
- name: OFFLINE_MODE
|
||||||
|
value: {{ .Values.globals.offlineMode | quote }}
|
||||||
- name: LOG_LEVEL
|
- name: LOG_LEVEL
|
||||||
value: {{ .Values.services.apps.logLevel | quote }}
|
value: {{ .Values.services.apps.logLevel | quote }}
|
||||||
- name: REDIS_PASSWORD
|
- name: REDIS_PASSWORD
|
||||||
|
|
|
@ -116,6 +116,8 @@ spec:
|
||||||
value: {{ .Values.services.worker.port | quote }}
|
value: {{ .Values.services.worker.port | quote }}
|
||||||
- name: MULTI_TENANCY
|
- name: MULTI_TENANCY
|
||||||
value: {{ .Values.globals.multiTenancy | quote }}
|
value: {{ .Values.globals.multiTenancy | quote }}
|
||||||
|
- name: OFFLINE_MODE
|
||||||
|
value: {{ .Values.globals.offlineMode | quote }}
|
||||||
- name: LOG_LEVEL
|
- name: LOG_LEVEL
|
||||||
value: {{ .Values.services.worker.logLevel | quote }}
|
value: {{ .Values.services.worker.logLevel | quote }}
|
||||||
- name: REDIS_PASSWORD
|
- name: REDIS_PASSWORD
|
||||||
|
|
|
@ -82,6 +82,7 @@ globals:
|
||||||
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
|
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
|
||||||
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
||||||
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
||||||
|
offlineMode: "0" # set to 1 to enable offline mode
|
||||||
accountPortalUrl: ""
|
accountPortalUrl: ""
|
||||||
accountPortalApiKey: ""
|
accountPortalApiKey: ""
|
||||||
cookieDomain: ""
|
cookieDomain: ""
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.8.32-alpha.7",
|
"version": "2.9.7",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
@ -19,4 +19,4 @@
|
||||||
"loadEnvFiles": false
|
"loadEnvFiles": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,16 +2,16 @@
|
||||||
"name": "@budibase/cli",
|
"name": "@budibase/cli",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||||
"main": "dist/src/index.js",
|
"main": "dist/index.js",
|
||||||
"bin": {
|
"bin": {
|
||||||
"budi": "dist/src/index.js"
|
"budi": "dist/index.js"
|
||||||
},
|
},
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prebuild": "rm -rf prebuilds 2> /dev/null && cp -r ../../node_modules/leveldown/prebuilds prebuilds",
|
"prebuild": "rm -rf prebuilds 2> /dev/null && cp -r ../../node_modules/leveldown/prebuilds prebuilds",
|
||||||
"rename": "renamer --find .node --replace .fake 'prebuilds/**'",
|
"rename": "renamer --find .node --replace .fake 'prebuilds/**'",
|
||||||
"tsc": "tsc -p tsconfig.build.json",
|
"tsc": "node ../../scripts/build.js",
|
||||||
"pkg": "pkg . --out-path build --no-bytecode --public --public-packages \"*\" -C GZip",
|
"pkg": "pkg . --out-path build --no-bytecode --public --public-packages \"*\" -C GZip",
|
||||||
"build": "yarn prebuild && yarn rename && yarn tsc && yarn pkg && yarn postbuild",
|
"build": "yarn prebuild && yarn rename && yarn tsc && yarn pkg && yarn postbuild",
|
||||||
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
|
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
|
||||||
|
@ -19,12 +19,11 @@
|
||||||
},
|
},
|
||||||
"pkg": {
|
"pkg": {
|
||||||
"targets": [
|
"targets": [
|
||||||
"node16-linux",
|
"node18-linux",
|
||||||
"node16-win",
|
"node18-win",
|
||||||
"node16-macos"
|
"node18-macos"
|
||||||
],
|
],
|
||||||
"assets": [
|
"assets": [
|
||||||
"node_modules/@budibase/backend-core/dist/**/*",
|
|
||||||
"prebuilds/**/*"
|
"prebuilds/**/*"
|
||||||
],
|
],
|
||||||
"outputPath": "build"
|
"outputPath": "build"
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import util from "util"
|
import util from "util"
|
||||||
const runCommand = util.promisify(require("child_process").exec)
|
import childProcess from "child_process"
|
||||||
|
const runCommand = util.promisify(childProcess.exec)
|
||||||
|
|
||||||
export async function exec(command: string, dir = "./") {
|
export async function exec(command: string, dir = "./") {
|
||||||
const { stdout } = await runCommand(command, { cwd: dir })
|
const { stdout } = await runCommand(command, { cwd: dir })
|
||||||
|
@ -16,12 +17,12 @@ export async function utilityInstalled(utilName: string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function runPkgCommand(command: string, dir = "./") {
|
export async function runPkgCommand(command: string, dir = "./") {
|
||||||
const yarn = await exports.utilityInstalled("yarn")
|
const yarn = await utilityInstalled("yarn")
|
||||||
const npm = await exports.utilityInstalled("npm")
|
const npm = await utilityInstalled("npm")
|
||||||
if (!yarn && !npm) {
|
if (!yarn && !npm) {
|
||||||
throw new Error("Must have yarn or npm installed to run build.")
|
throw new Error("Must have yarn or npm installed to run build.")
|
||||||
}
|
}
|
||||||
const npmCmd = command === "install" ? `npm ${command}` : `npm run ${command}`
|
const npmCmd = command === "install" ? `npm ${command}` : `npm run ${command}`
|
||||||
const cmd = yarn ? `yarn ${command} --ignore-engines` : npmCmd
|
const cmd = yarn ? `yarn ${command} --ignore-engines` : npmCmd
|
||||||
await exports.exec(cmd, dir)
|
await exec(cmd, dir)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ import { error } from "./utils"
|
||||||
|
|
||||||
const PREBUILDS = "prebuilds"
|
const PREBUILDS = "prebuilds"
|
||||||
const ARCH = `${os.platform()}-${os.arch()}`
|
const ARCH = `${os.platform()}-${os.arch()}`
|
||||||
const PREBUILD_DIR = join(process.execPath, "..", PREBUILDS, ARCH)
|
const PREBUILD_DIR = join(process.execPath, "..", "cli", PREBUILDS, ARCH)
|
||||||
|
|
||||||
// running as built CLI pkg bundle
|
// running as built CLI pkg bundle
|
||||||
if (!process.argv[0].includes("node")) {
|
if (!process.argv[0].includes("node")) {
|
||||||
|
@ -13,17 +13,19 @@ if (!process.argv[0].includes("node")) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkForBinaries() {
|
function checkForBinaries() {
|
||||||
const readDir = join(__filename, "..", "..", "..", PREBUILDS, ARCH)
|
const readDir = join(__filename, "..", "..", "..", "cli", PREBUILDS, ARCH)
|
||||||
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
|
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const natives = fs.readdirSync(readDir)
|
const natives = fs.readdirSync(readDir)
|
||||||
if (fs.existsSync(readDir)) {
|
if (fs.existsSync(readDir)) {
|
||||||
fs.mkdirSync(PREBUILD_DIR, { recursive: true })
|
const writePath = join(process.execPath, PREBUILDS, ARCH)
|
||||||
|
fs.mkdirSync(writePath, { recursive: true })
|
||||||
for (let native of natives) {
|
for (let native of natives) {
|
||||||
const filename = `${native.split(".fake")[0]}.node`
|
const filename = `${native.split(".fake")[0]}.node`
|
||||||
fs.cpSync(join(readDir, native), join(PREBUILD_DIR, filename))
|
fs.cpSync(join(readDir, native), join(writePath, filename))
|
||||||
}
|
}
|
||||||
|
console.log("copied something")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,8 +41,9 @@ function cleanup(evt?: number) {
|
||||||
)
|
)
|
||||||
console.error(error(evt))
|
console.error(error(evt))
|
||||||
}
|
}
|
||||||
if (fs.existsSync(PREBUILD_DIR)) {
|
const path = join(process.execPath, PREBUILDS)
|
||||||
fs.rmSync(PREBUILD_DIR, { recursive: true })
|
if (fs.existsSync(path)) {
|
||||||
|
fs.rmSync(path, { recursive: true })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,12 @@
|
||||||
"incremental": true,
|
"incremental": true,
|
||||||
"types": [ "node", "jest" ],
|
"types": [ "node", "jest" ],
|
||||||
"outDir": "dist",
|
"outDir": "dist",
|
||||||
"skipLibCheck": true
|
"skipLibCheck": true,
|
||||||
|
"paths": {
|
||||||
|
"@budibase/types": ["../types/src"],
|
||||||
|
"@budibase/backend-core": ["../backend-core/src"],
|
||||||
|
"@budibase/backend-core/*": ["../backend-core/*"]
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"include": [
|
"include": [
|
||||||
"src/**/*"
|
"src/**/*"
|
||||||
|
|
|
@ -5,12 +5,7 @@
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"baseUrl": ".",
|
"baseUrl": ".",
|
||||||
"resolveJsonModule": true,
|
"resolveJsonModule": true
|
||||||
"paths": {
|
|
||||||
"@budibase/types": ["../types/src"],
|
|
||||||
"@budibase/backend-core": ["../backend-core/src"],
|
|
||||||
"@budibase/backend-core/*": ["../backend-core/*"]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"ts-node": {
|
"ts-node": {
|
||||||
"require": ["tsconfig-paths/register"],
|
"require": ["tsconfig-paths/register"],
|
||||||
|
|
|
@ -44,7 +44,7 @@ export async function handleRequest(
|
||||||
|
|
||||||
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
const tableId = ctx.params.tableId
|
const tableId = ctx.params.tableId
|
||||||
const { id, ...rowData } = ctx.request.body
|
const { _id, ...rowData } = ctx.request.body
|
||||||
|
|
||||||
const validateResult = await sdk.rows.utils.validate({
|
const validateResult = await sdk.rows.utils.validate({
|
||||||
row: rowData,
|
row: rowData,
|
||||||
|
@ -54,10 +54,10 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
throw { validation: validateResult.errors }
|
throw { validation: validateResult.errors }
|
||||||
}
|
}
|
||||||
const response = await handleRequest(Operation.UPDATE, tableId, {
|
const response = await handleRequest(Operation.UPDATE, tableId, {
|
||||||
id: breakRowIdField(id),
|
id: breakRowIdField(_id),
|
||||||
row: rowData,
|
row: rowData,
|
||||||
})
|
})
|
||||||
const row = await sdk.rows.external.getRow(tableId, id, {
|
const row = await sdk.rows.external.getRow(tableId, _id, {
|
||||||
relationships: true,
|
relationships: true,
|
||||||
})
|
})
|
||||||
const table = await sdk.tables.getTable(tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
|
@ -104,9 +104,9 @@ export async function find(ctx: UserCtx) {
|
||||||
|
|
||||||
export async function destroy(ctx: UserCtx) {
|
export async function destroy(ctx: UserCtx) {
|
||||||
const tableId = ctx.params.tableId
|
const tableId = ctx.params.tableId
|
||||||
const id = ctx.request.body._id
|
const _id = ctx.request.body._id
|
||||||
const { row } = (await handleRequest(Operation.DELETE, tableId, {
|
const { row } = (await handleRequest(Operation.DELETE, tableId, {
|
||||||
id: breakRowIdField(id),
|
id: breakRowIdField(_id),
|
||||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||||
})) as { row: Row }
|
})) as { row: Row }
|
||||||
return { response: { ok: true }, row }
|
return { response: { ok: true }, row }
|
||||||
|
|
|
@ -486,10 +486,13 @@ class Orchestrator {
|
||||||
const end = performance.now()
|
const end = performance.now()
|
||||||
const executionTime = end - start
|
const executionTime = end - start
|
||||||
|
|
||||||
console.info(`Execution time: ${executionTime} milliseconds`, {
|
console.info(
|
||||||
_logKey: "automation",
|
`Automation ID: ${automation._id} Execution time: ${executionTime} milliseconds`,
|
||||||
executionTime,
|
{
|
||||||
})
|
_logKey: "automation",
|
||||||
|
executionTime,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
// store the logs for the automation run
|
// store the logs for the automation run
|
||||||
try {
|
try {
|
||||||
|
|
Loading…
Reference in New Issue