diff --git a/.github/workflows/release-singleimage-test.yml b/.github/workflows/release-singleimage-test.yml deleted file mode 100644 index 79b9afdd44..0000000000 --- a/.github/workflows/release-singleimage-test.yml +++ /dev/null @@ -1,69 +0,0 @@ -name: Test - -on: - workflow_dispatch: - -env: - CI: true - PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} - REGISTRY_URL: registry.hub.docker.com - NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }} -jobs: - build: - name: "build" - runs-on: ubuntu-latest - strategy: - matrix: - node-version: [18.x] - steps: - - name: "Checkout" - uses: actions/checkout@v4 - with: - submodules: true - token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} - - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - cache: "yarn" - - name: Setup QEMU - uses: docker/setup-qemu-action@v3 - - name: Setup Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v3 - - name: Run Yarn - run: yarn - - name: Run Yarn Build - run: yarn build --scope @budibase/server --scope @budibase/worker - - name: Login to Docker Hub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_API_KEY }} - - name: Get the latest release version - id: version - run: | - release_version=$(cat lerna.json | jq -r '.version') - echo $release_version - echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV - - name: Tag and release Budibase service docker image - uses: docker/build-push-action@v5 - with: - context: . - push: true - pull: true - platforms: linux/amd64,linux/arm64 - tags: budibase/budibase-test:test - file: ./hosting/single/Dockerfile.v2 - cache-from: type=registry,ref=budibase/budibase-test:test - cache-to: type=inline - - name: Tag and release Budibase Azure App Service docker image - uses: docker/build-push-action@v2 - with: - context: . - push: true - platforms: linux/amd64 - build-args: TARGETBUILD=aas - tags: budibase/budibase-test:aas - file: ./hosting/single/Dockerfile.v2 diff --git a/hosting/docker-compose.build.yaml b/hosting/docker-compose.build.yaml index e192620b59..7ead001a1c 100644 --- a/hosting/docker-compose.build.yaml +++ b/hosting/docker-compose.build.yaml @@ -7,6 +7,8 @@ services: build: context: .. dockerfile: packages/server/Dockerfile.v2 + args: + - BUDIBASE_VERSION=0.0.0+dev-docker container_name: build-bbapps environment: SELF_HOSTED: 1 @@ -30,13 +32,13 @@ services: depends_on: - worker-service - redis-service - # volumes: - # - /some/path/to/plugins:/plugins worker-service: build: context: .. dockerfile: packages/worker/Dockerfile.v2 + args: + - BUDIBASE_VERSION=0.0.0+dev-docker container_name: build-bbworker environment: SELF_HOSTED: 1 diff --git a/hosting/proxy/nginx.prod.conf b/hosting/proxy/nginx.prod.conf index 365765ccbb..6da2e4a1c3 100644 --- a/hosting/proxy/nginx.prod.conf +++ b/hosting/proxy/nginx.prod.conf @@ -51,7 +51,7 @@ http { proxy_buffering off; set $csp_default "default-src 'self'"; - set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io"; + set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net"; set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com"; set $csp_object "object-src 'none'"; set $csp_base_uri "base-uri 'self'"; diff --git a/hosting/single/Dockerfile.v2 b/hosting/single/Dockerfile.v2 index a983f10044..5b07a51b27 100644 --- a/hosting/single/Dockerfile.v2 +++ b/hosting/single/Dockerfile.v2 @@ -118,6 +118,10 @@ EXPOSE 443 EXPOSE 2222 VOLUME /data +ARG BUDIBASE_VERSION +# Ensuring the version argument is sent +RUN test -n "$BUDIBASE_VERSION" +ENV BUDIBASE_VERSION=$BUDIBASE_VERSION HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh" diff --git a/lerna.json b/lerna.json index 4b2bbb05d5..384473120b 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.11.41", + "version": "2.11.45", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/package.json b/package.json index 100a306a35..d3f4903e6c 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,6 @@ "build:sdk": "lerna run --stream build:sdk", "deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular", "release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset", - "release:develop": "yarn release --dist-tag develop", "restore": "yarn run clean && yarn && yarn run build", "nuke": "yarn run nuke:packages && yarn run nuke:docker", "nuke:packages": "yarn run restore", diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts index c331d791a6..e64c116663 100644 --- a/packages/backend-core/src/cache/writethrough.ts +++ b/packages/backend-core/src/cache/writethrough.ts @@ -119,8 +119,8 @@ export class Writethrough { this.writeRateMs = writeRateMs } - async put(doc: any, writeRateMs: number = this.writeRateMs) { - return put(this.db, doc, writeRateMs) + async put(doc: any) { + return put(this.db, doc, this.writeRateMs) } async get(id: string) { diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 3894bdd0f7..ed882fe96a 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -75,12 +75,12 @@ function getPackageJsonFields(): { const content = readFileSync(packageJsonFile!, "utf-8") const parsedContent = JSON.parse(content) return { - VERSION: parsedContent.version, + VERSION: process.env.BUDIBASE_VERSION || parsedContent.version, SERVICE_NAME: parsedContent.name, } } catch { // throwing an error here is confusing/causes backend-core to be hard to import - return { VERSION: "", SERVICE_NAME: "" } + return { VERSION: process.env.BUDIBASE_VERSION || "", SERVICE_NAME: "" } } } diff --git a/packages/backend-core/src/objectStore/buckets/app.ts b/packages/backend-core/src/objectStore/buckets/app.ts index be9fddeaa6..43bc965c65 100644 --- a/packages/backend-core/src/objectStore/buckets/app.ts +++ b/packages/backend-core/src/objectStore/buckets/app.ts @@ -1,37 +1,50 @@ import env from "../../environment" import * as objectStore from "../objectStore" import * as cloudfront from "../cloudfront" +import qs from "querystring" +import { DEFAULT_TENANT_ID, getTenantId } from "../../context" + +export function clientLibraryPath(appId: string) { + return `${objectStore.sanitizeKey(appId)}/budibase-client.js` +} /** - * In production the client library is stored in the object store, however in development - * we use the symlinked version produced by lerna, located in node modules. We link to this - * via a specific endpoint (under /api/assets/client). - * @param appId In production we need the appId to look up the correct bucket, as the - * version of the client lib may differ between apps. - * @param version The version to retrieve. - * @return The URL to be inserted into appPackage response or server rendered - * app index file. + * Previously we used to serve the client library directly from Cloudfront, however + * due to issues with the domain we were unable to continue doing this - keeping + * incase we are able to switch back to CDN path again in future. */ -export const clientLibraryUrl = (appId: string, version: string) => { - if (env.isProd()) { - let file = `${objectStore.sanitizeKey(appId)}/budibase-client.js` - if (env.CLOUDFRONT_CDN) { - // append app version to bust the cache - if (version) { - file += `?v=${version}` - } - // don't need to use presigned for client with cloudfront - // file is public - return cloudfront.getUrl(file) - } else { - return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file) +export function clientLibraryCDNUrl(appId: string, version: string) { + let file = clientLibraryPath(appId) + if (env.CLOUDFRONT_CDN) { + // append app version to bust the cache + if (version) { + file += `?v=${version}` } + // don't need to use presigned for client with cloudfront + // file is public + return cloudfront.getUrl(file) } else { - return `/api/assets/client` + return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file) } } -export const getAppFileUrl = (s3Key: string) => { +export function clientLibraryUrl(appId: string, version: string) { + let tenantId, qsParams: { appId: string; version: string; tenantId?: string } + try { + tenantId = getTenantId() + } finally { + qsParams = { + appId, + version, + } + } + if (tenantId && tenantId !== DEFAULT_TENANT_ID) { + qsParams.tenantId = tenantId + } + return `/api/assets/client?${qs.encode(qsParams)}` +} + +export function getAppFileUrl(s3Key: string) { if (env.CLOUDFRONT_CDN) { return cloudfront.getPresignedUrl(s3Key) } else { diff --git a/packages/backend-core/src/objectStore/buckets/plugins.ts b/packages/backend-core/src/objectStore/buckets/plugins.ts index f7721afb23..6f1b7116ae 100644 --- a/packages/backend-core/src/objectStore/buckets/plugins.ts +++ b/packages/backend-core/src/objectStore/buckets/plugins.ts @@ -6,7 +6,7 @@ import { Plugin } from "@budibase/types" // URLS -export const enrichPluginURLs = (plugins: Plugin[]) => { +export function enrichPluginURLs(plugins: Plugin[]) { if (!plugins || !plugins.length) { return [] } @@ -17,12 +17,12 @@ export const enrichPluginURLs = (plugins: Plugin[]) => { }) } -const getPluginJSUrl = (plugin: Plugin) => { +function getPluginJSUrl(plugin: Plugin) { const s3Key = getPluginJSKey(plugin) return getPluginUrl(s3Key) } -const getPluginIconUrl = (plugin: Plugin): string | undefined => { +function getPluginIconUrl(plugin: Plugin): string | undefined { const s3Key = getPluginIconKey(plugin) if (!s3Key) { return @@ -30,7 +30,7 @@ const getPluginIconUrl = (plugin: Plugin): string | undefined => { return getPluginUrl(s3Key) } -const getPluginUrl = (s3Key: string) => { +function getPluginUrl(s3Key: string) { if (env.CLOUDFRONT_CDN) { return cloudfront.getPresignedUrl(s3Key) } else { @@ -40,11 +40,11 @@ const getPluginUrl = (s3Key: string) => { // S3 KEYS -export const getPluginJSKey = (plugin: Plugin) => { +export function getPluginJSKey(plugin: Plugin) { return getPluginS3Key(plugin, "plugin.min.js") } -export const getPluginIconKey = (plugin: Plugin) => { +export function getPluginIconKey(plugin: Plugin) { // stored iconUrl is deprecated - hardcode to icon.svg in this case const iconFileName = plugin.iconUrl ? "icon.svg" : plugin.iconFileName if (!iconFileName) { @@ -53,12 +53,12 @@ export const getPluginIconKey = (plugin: Plugin) => { return getPluginS3Key(plugin, iconFileName) } -const getPluginS3Key = (plugin: Plugin, fileName: string) => { +function getPluginS3Key(plugin: Plugin, fileName: string) { const s3Key = getPluginS3Dir(plugin.name) return `${s3Key}/${fileName}` } -export const getPluginS3Dir = (pluginName: string) => { +export function getPluginS3Dir(pluginName: string) { let s3Key = `${pluginName}` if (env.MULTI_TENANCY) { const tenantId = context.getTenantId() diff --git a/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts b/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts index aaa07ec9d3..cbbbee6255 100644 --- a/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts +++ b/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts @@ -1,5 +1,4 @@ import * as app from "../app" -import { getAppFileUrl } from "../app" import { testEnv } from "../../../../tests/extra" describe("app", () => { @@ -7,6 +6,15 @@ describe("app", () => { testEnv.nodeJest() }) + function baseCheck(url: string, tenantId?: string) { + expect(url).toContain("/api/assets/client") + if (tenantId) { + expect(url).toContain(`tenantId=${tenantId}`) + } + expect(url).toContain("appId=app_123") + expect(url).toContain("version=2.0.0") + } + describe("clientLibraryUrl", () => { function getClientUrl() { return app.clientLibraryUrl("app_123/budibase-client.js", "2.0.0") @@ -20,31 +28,19 @@ describe("app", () => { it("gets url in dev", () => { testEnv.nodeDev() const url = getClientUrl() - expect(url).toBe("/api/assets/client") - }) - - it("gets url with embedded minio", () => { - testEnv.withMinio() - const url = getClientUrl() - expect(url).toBe( - "/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" - ) + baseCheck(url) }) it("gets url with custom S3", () => { testEnv.withS3() const url = getClientUrl() - expect(url).toBe( - "http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" - ) + baseCheck(url) }) it("gets url with cloudfront + s3", () => { testEnv.withCloudfront() const url = getClientUrl() - expect(url).toBe( - "http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0" - ) + baseCheck(url) }) }) @@ -57,7 +53,7 @@ describe("app", () => { testEnv.nodeDev() await testEnv.withTenant(tenantId => { const url = getClientUrl() - expect(url).toBe("/api/assets/client") + baseCheck(url, tenantId) }) }) @@ -65,9 +61,7 @@ describe("app", () => { await testEnv.withTenant(tenantId => { testEnv.withMinio() const url = getClientUrl() - expect(url).toBe( - "/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" - ) + baseCheck(url, tenantId) }) }) @@ -75,9 +69,7 @@ describe("app", () => { await testEnv.withTenant(tenantId => { testEnv.withS3() const url = getClientUrl() - expect(url).toBe( - "http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" - ) + baseCheck(url, tenantId) }) }) @@ -85,9 +77,7 @@ describe("app", () => { await testEnv.withTenant(tenantId => { testEnv.withCloudfront() const url = getClientUrl() - expect(url).toBe( - "http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0" - ) + baseCheck(url, tenantId) }) }) }) diff --git a/packages/backend-core/src/objectStore/objectStore.ts b/packages/backend-core/src/objectStore/objectStore.ts index c36a09915e..76d2dd6689 100644 --- a/packages/backend-core/src/objectStore/objectStore.ts +++ b/packages/backend-core/src/objectStore/objectStore.ts @@ -1,6 +1,6 @@ const sanitize = require("sanitize-s3-objectkey") import AWS from "aws-sdk" -import stream from "stream" +import stream, { Readable } from "stream" import fetch from "node-fetch" import tar from "tar-fs" import zlib from "zlib" @@ -66,10 +66,10 @@ export function sanitizeBucket(input: string) { * @return an S3 object store object, check S3 Nodejs SDK for usage. * @constructor */ -export const ObjectStore = ( +export function ObjectStore( bucket: string, opts: { presigning: boolean } = { presigning: false } -) => { +) { const config: any = { s3ForcePathStyle: true, signatureVersion: "v4", @@ -104,7 +104,7 @@ export const ObjectStore = ( * Given an object store and a bucket name this will make sure the bucket exists, * if it does not exist then it will create it. */ -export const makeSureBucketExists = async (client: any, bucketName: string) => { +export async function makeSureBucketExists(client: any, bucketName: string) { bucketName = sanitizeBucket(bucketName) try { await client @@ -139,13 +139,13 @@ export const makeSureBucketExists = async (client: any, bucketName: string) => { * Uploads the contents of a file given the required parameters, useful when * temp files in use (for example file uploaded as an attachment). */ -export const upload = async ({ +export async function upload({ bucket: bucketName, filename, path, type, metadata, -}: UploadParams) => { +}: UploadParams) { const extension = filename.split(".").pop() const fileBytes = fs.readFileSync(path) @@ -180,12 +180,12 @@ export const upload = async ({ * Similar to the upload function but can be used to send a file stream * through to the object store. */ -export const streamUpload = async ( +export async function streamUpload( bucketName: string, filename: string, stream: any, extra = {} -) => { +) { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) @@ -215,7 +215,7 @@ export const streamUpload = async ( * retrieves the contents of a file from the object store, if it is a known content type it * will be converted, otherwise it will be returned as a buffer stream. */ -export const retrieve = async (bucketName: string, filepath: string) => { +export async function retrieve(bucketName: string, filepath: string) { const objectStore = ObjectStore(bucketName) const params = { Bucket: sanitizeBucket(bucketName), @@ -230,7 +230,7 @@ export const retrieve = async (bucketName: string, filepath: string) => { } } -export const listAllObjects = async (bucketName: string, path: string) => { +export async function listAllObjects(bucketName: string, path: string) { const objectStore = ObjectStore(bucketName) const list = (params: ListParams = {}) => { return objectStore @@ -261,11 +261,11 @@ export const listAllObjects = async (bucketName: string, path: string) => { /** * Generate a presigned url with a default TTL of 1 hour */ -export const getPresignedUrl = ( +export function getPresignedUrl( bucketName: string, key: string, durationSeconds: number = 3600 -) => { +) { const objectStore = ObjectStore(bucketName, { presigning: true }) const params = { Bucket: sanitizeBucket(bucketName), @@ -291,7 +291,7 @@ export const getPresignedUrl = ( /** * Same as retrieval function but puts to a temporary file. */ -export const retrieveToTmp = async (bucketName: string, filepath: string) => { +export async function retrieveToTmp(bucketName: string, filepath: string) { bucketName = sanitizeBucket(bucketName) filepath = sanitizeKey(filepath) const data = await retrieve(bucketName, filepath) @@ -300,7 +300,7 @@ export const retrieveToTmp = async (bucketName: string, filepath: string) => { return outputPath } -export const retrieveDirectory = async (bucketName: string, path: string) => { +export async function retrieveDirectory(bucketName: string, path: string) { let writePath = join(budibaseTempDir(), v4()) fs.mkdirSync(writePath) const objects = await listAllObjects(bucketName, path) @@ -324,7 +324,7 @@ export const retrieveDirectory = async (bucketName: string, path: string) => { /** * Delete a single file. */ -export const deleteFile = async (bucketName: string, filepath: string) => { +export async function deleteFile(bucketName: string, filepath: string) { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) const params = { @@ -334,7 +334,7 @@ export const deleteFile = async (bucketName: string, filepath: string) => { return objectStore.deleteObject(params).promise() } -export const deleteFiles = async (bucketName: string, filepaths: string[]) => { +export async function deleteFiles(bucketName: string, filepaths: string[]) { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) const params = { @@ -349,10 +349,10 @@ export const deleteFiles = async (bucketName: string, filepaths: string[]) => { /** * Delete a path, including everything within. */ -export const deleteFolder = async ( +export async function deleteFolder( bucketName: string, folder: string -): Promise => { +): Promise { bucketName = sanitizeBucket(bucketName) folder = sanitizeKey(folder) const client = ObjectStore(bucketName) @@ -383,11 +383,11 @@ export const deleteFolder = async ( } } -export const uploadDirectory = async ( +export async function uploadDirectory( bucketName: string, localPath: string, bucketPath: string -) => { +) { bucketName = sanitizeBucket(bucketName) let uploads = [] const files = fs.readdirSync(localPath, { withFileTypes: true }) @@ -404,11 +404,11 @@ export const uploadDirectory = async ( return files } -export const downloadTarballDirect = async ( +export async function downloadTarballDirect( url: string, path: string, headers = {} -) => { +) { path = sanitizeKey(path) const response = await fetch(url, { headers }) if (!response.ok) { @@ -418,11 +418,11 @@ export const downloadTarballDirect = async ( await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path)) } -export const downloadTarball = async ( +export async function downloadTarball( url: string, bucketName: string, path: string -) => { +) { bucketName = sanitizeBucket(bucketName) path = sanitizeKey(path) const response = await fetch(url) @@ -438,3 +438,17 @@ export const downloadTarball = async ( // return the temporary path incase there is a use for it return tmpPath } + +export async function getReadStream( + bucketName: string, + path: string +): Promise { + bucketName = sanitizeBucket(bucketName) + path = sanitizeKey(path) + const client = ObjectStore(bucketName) + const params = { + Bucket: bucketName, + Key: path, + } + return client.getObject(params).createReadStream() +} diff --git a/packages/backend-core/src/users/db.ts b/packages/backend-core/src/users/db.ts index daa09bee6f..a2539e836e 100644 --- a/packages/backend-core/src/users/db.ts +++ b/packages/backend-core/src/users/db.ts @@ -25,17 +25,12 @@ import { import { getAccountHolderFromUserIds, isAdmin, - isCreator, validateUniqueUser, } from "./utils" import { searchExistingEmails } from "./lookup" import { hash } from "../utils" -type QuotaUpdateFn = ( - change: number, - creatorsChange: number, - cb?: () => Promise -) => Promise +type QuotaUpdateFn = (change: number, cb?: () => Promise) => Promise type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise type FeatureFn = () => Promise type GroupGetFn = (ids: string[]) => Promise @@ -250,8 +245,7 @@ export class UserDB { } const change = dbUser ? 0 : 1 // no change if there is existing user - const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0 - return UserDB.quotas.addUsers(change, creatorsChange, async () => { + return UserDB.quotas.addUsers(change, async () => { await validateUniqueUser(email, tenantId) let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser) @@ -313,7 +307,6 @@ export class UserDB { let usersToSave: any[] = [] let newUsers: any[] = [] - let newCreators: any[] = [] const emails = newUsersRequested.map((user: User) => user.email) const existingEmails = await searchExistingEmails(emails) @@ -334,66 +327,59 @@ export class UserDB { } newUser.userGroups = groups newUsers.push(newUser) - if (isCreator(newUser)) { - newCreators.push(newUser) - } } const account = await accountSdk.getAccountByTenantId(tenantId) - return UserDB.quotas.addUsers( - newUsers.length, - newCreators.length, - async () => { - // create the promises array that will be called by bulkDocs - newUsers.forEach((user: any) => { - usersToSave.push( - UserDB.buildUser( - user, - { - hashPassword: true, - requirePassword: user.requirePassword, - }, - tenantId, - undefined, // no dbUser - account - ) + return UserDB.quotas.addUsers(newUsers.length, async () => { + // create the promises array that will be called by bulkDocs + newUsers.forEach((user: any) => { + usersToSave.push( + UserDB.buildUser( + user, + { + hashPassword: true, + requirePassword: user.requirePassword, + }, + tenantId, + undefined, // no dbUser + account ) - }) + ) + }) - const usersToBulkSave = await Promise.all(usersToSave) - await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) + const usersToBulkSave = await Promise.all(usersToSave) + await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) - // Post-processing of bulk added users, e.g. events and cache operations - for (const user of usersToBulkSave) { - // TODO: Refactor to bulk insert users into the info db - // instead of relying on looping tenant creation - await platform.users.addUser(tenantId, user._id, user.email) - await eventHelpers.handleSaveEvents(user, undefined) - } - - const saved = usersToBulkSave.map(user => { - return { - _id: user._id, - email: user.email, - } - }) - - // now update the groups - if (Array.isArray(saved) && groups) { - const groupPromises = [] - const createdUserIds = saved.map(user => user._id) - for (let groupId of groups) { - groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds)) - } - await Promise.all(groupPromises) - } - - return { - successful: saved, - unsuccessful, - } + // Post-processing of bulk added users, e.g. events and cache operations + for (const user of usersToBulkSave) { + // TODO: Refactor to bulk insert users into the info db + // instead of relying on looping tenant creation + await platform.users.addUser(tenantId, user._id, user.email) + await eventHelpers.handleSaveEvents(user, undefined) } - ) + + const saved = usersToBulkSave.map(user => { + return { + _id: user._id, + email: user.email, + } + }) + + // now update the groups + if (Array.isArray(saved) && groups) { + const groupPromises = [] + const createdUserIds = saved.map(user => user._id) + for (let groupId of groups) { + groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds)) + } + await Promise.all(groupPromises) + } + + return { + successful: saved, + unsuccessful, + } + }) } static async bulkDelete(userIds: string[]): Promise { @@ -433,12 +419,11 @@ export class UserDB { _deleted: true, })) const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete) - const creatorsToDelete = usersToDelete.filter(isCreator) + await UserDB.quotas.removeUsers(toDelete.length) for (let user of usersToDelete) { await bulkDeleteProcessing(user) } - await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length) // Build Response // index users by id @@ -487,8 +472,7 @@ export class UserDB { await db.remove(userId, dbUser._rev) - const creatorsToDelete = isCreator(dbUser) ? 1 : 0 - await UserDB.quotas.removeUsers(1, creatorsToDelete) + await UserDB.quotas.removeUsers(1) await eventHelpers.handleDeleteEvents(dbUser) await cache.user.invalidateUser(userId) await sessions.invalidateSessions(userId, { reason: "deletion" }) diff --git a/packages/backend-core/src/users/users.ts b/packages/backend-core/src/users/users.ts index bad108ab84..6237c23972 100644 --- a/packages/backend-core/src/users/users.ts +++ b/packages/backend-core/src/users/users.ts @@ -14,11 +14,11 @@ import { } from "../db" import { BulkDocsResponse, + ContextUser, SearchQuery, SearchQueryOperators, SearchUsersRequest, User, - ContextUser, DatabaseQueryOpts, } from "@budibase/types" import { getGlobalDB } from "../context" diff --git a/packages/backend-core/tests/core/users/users.spec.js b/packages/backend-core/tests/core/users/users.spec.js deleted file mode 100644 index ae7109344a..0000000000 --- a/packages/backend-core/tests/core/users/users.spec.js +++ /dev/null @@ -1,54 +0,0 @@ -const _ = require('lodash/fp') -const {structures} = require("../../../tests") - -jest.mock("../../../src/context") -jest.mock("../../../src/db") - -const context = require("../../../src/context") -const db = require("../../../src/db") - -const {getCreatorCount} = require('../../../src/users/users') - -describe("Users", () => { - - let getGlobalDBMock - let getGlobalUserParamsMock - let paginationMock - - beforeEach(() => { - jest.resetAllMocks() - - getGlobalDBMock = jest.spyOn(context, "getGlobalDB") - getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams") - paginationMock = jest.spyOn(db, "pagination") - }) - - it("Retrieves the number of creators", async () => { - const getUsers = (offset, limit, creators = false) => { - const range = _.range(offset, limit) - const opts = creators ? {builder: {global: true}} : undefined - return range.map(() => structures.users.user(opts)) - } - const page1Data = getUsers(0, 8) - const page2Data = getUsers(8, 12, true) - getGlobalDBMock.mockImplementation(() => ({ - name : "fake-db", - allDocs: () => ({ - rows: [...page1Data, ...page2Data] - }) - })) - paginationMock.mockImplementationOnce(() => ({ - data: page1Data, - hasNextPage: true, - nextPage: "1" - })) - paginationMock.mockImplementation(() => ({ - data: page2Data, - hasNextPage: false, - nextPage: undefined - })) - const creatorsCount = await getCreatorCount() - expect(creatorsCount).toBe(4) - expect(paginationMock).toHaveBeenCalledTimes(2) - }) -}) diff --git a/packages/backend-core/tests/core/utilities/structures/licenses.ts b/packages/backend-core/tests/core/utilities/structures/licenses.ts index bb452f9ad5..0e34f2e9bb 100644 --- a/packages/backend-core/tests/core/utilities/structures/licenses.ts +++ b/packages/backend-core/tests/core/utilities/structures/licenses.ts @@ -123,10 +123,6 @@ export function customer(): Customer { export function subscription(): Subscription { return { amount: 10000, - amounts: { - user: 10000, - creator: 0, - }, cancelAt: undefined, currency: "usd", currentPeriodEnd: 0, @@ -135,10 +131,6 @@ export function subscription(): Subscription { duration: PriceDuration.MONTHLY, pastDueAt: undefined, quantity: 0, - quantities: { - user: 0, - creator: 0, - }, status: "active", } } diff --git a/packages/bbui/src/Table/Table.svelte b/packages/bbui/src/Table/Table.svelte index 529d1144ee..2610d6106c 100644 --- a/packages/bbui/src/Table/Table.svelte +++ b/packages/bbui/src/Table/Table.svelte @@ -106,6 +106,13 @@ name: fieldName, } } + + // Delete numeric only widths as these are grid widths and should be + // ignored + const width = fixedSchema[fieldName].width + if (width != null && `${width}`.trim().match(/^[0-9]+$/)) { + delete fixedSchema[fieldName].width + } }) return fixedSchema } diff --git a/packages/builder/src/builderStore/componentUtils.js b/packages/builder/src/builderStore/componentUtils.js index 16b972058e..522dbae416 100644 --- a/packages/builder/src/builderStore/componentUtils.js +++ b/packages/builder/src/builderStore/componentUtils.js @@ -5,6 +5,7 @@ import { encodeJSBinding, findHBSBlocks, } from "@budibase/string-templates" +import { capitalise } from "helpers" /** * Recursively searches for a specific component ID @@ -235,3 +236,13 @@ export const makeComponentUnique = component => { // Recurse on all children return JSON.parse(definition) } + +export const getComponentText = component => { + if (component?._instanceName) { + return component._instanceName + } + const type = + component._component.replace("@budibase/standard-components/", "") || + "component" + return capitalise(type) +} diff --git a/packages/builder/src/builderStore/store/screenTemplates/rowListScreen.js b/packages/builder/src/builderStore/store/screenTemplates/rowListScreen.js index b17bd99e10..59bcd0d5e8 100644 --- a/packages/builder/src/builderStore/store/screenTemplates/rowListScreen.js +++ b/packages/builder/src/builderStore/store/screenTemplates/rowListScreen.js @@ -2,14 +2,14 @@ import sanitizeUrl from "./utils/sanitizeUrl" import { Screen } from "./utils/Screen" import { Component } from "./utils/Component" -export default function (datasources) { +export default function (datasources, mode = "table") { if (!Array.isArray(datasources)) { return [] } return datasources.map(datasource => { return { name: `${datasource.label} - List`, - create: () => createScreen(datasource), + create: () => createScreen(datasource, mode), id: ROW_LIST_TEMPLATE, resourceId: datasource.resourceId, } @@ -40,10 +40,24 @@ const generateTableBlock = datasource => { return tableBlock } -const createScreen = datasource => { +const generateGridBlock = datasource => { + const gridBlock = new Component("@budibase/standard-components/gridblock") + gridBlock + .customProps({ + table: datasource, + }) + .instanceName(`${datasource.label} - Grid block`) + return gridBlock +} + +const createScreen = (datasource, mode) => { return new Screen() .route(rowListUrl(datasource)) .instanceName(`${datasource.label} - List`) - .addChild(generateTableBlock(datasource)) + .addChild( + mode === "table" + ? generateTableBlock(datasource) + : generateGridBlock(datasource) + ) .json() } diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index 7b51e6c839..467ae413c3 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -777,7 +777,8 @@ disabled={deleteColName !== originalName} >

- Are you sure you wish to delete the column {originalName}? + Are you sure you wish to delete the column + (deleteColName = originalName)}>{originalName}? Your data will be deleted and this action cannot be undone - enter the column name to confirm.

@@ -810,4 +811,11 @@ gap: 8px; display: flex; } + b { + transition: color 130ms ease-out; + } + b:hover { + cursor: pointer; + color: var(--spectrum-global-color-gray-900); + } diff --git a/packages/builder/src/components/design/Panel.svelte b/packages/builder/src/components/design/Panel.svelte index 91ea3f98ad..3d5938c174 100644 --- a/packages/builder/src/components/design/Panel.svelte +++ b/packages/builder/src/components/design/Panel.svelte @@ -16,6 +16,7 @@ export let closeButtonIcon = "Close" $: customHeaderContent = $$slots["panel-header-content"] + $: customTitleContent = $$slots["panel-title-content"]
{/if}
- {title} + {#if customTitleContent} + + {:else} + {title || ""} + {/if}
{#if showAddButton}
@@ -134,4 +139,7 @@ .custom-content-wrap { border-bottom: var(--border-light); } + .title { + display: flex; + } diff --git a/packages/builder/src/components/design/settings/controls/ColumnEditor/CellDrawer.svelte b/packages/builder/src/components/design/settings/controls/ColumnEditor/CellDrawer.svelte index 8e3079101a..9e53f7f1cf 100644 --- a/packages/builder/src/components/design/settings/controls/ColumnEditor/CellDrawer.svelte +++ b/packages/builder/src/components/design/settings/controls/ColumnEditor/CellDrawer.svelte @@ -16,7 +16,11 @@
- + focusedCellId.set(null)} + on:keydown={onInputKeyDown} + data-grid-ignore + /> + {/if} + +
+ +
+
+ +
+
{column.label}
- {#if sortedBy} -
- + + {#if searching} +
+ +
+ {:else} + {#if sortedBy} +
+ +
+ {/if} +
(open = true)}> +
{/if} -
(open = true)}> - -
@@ -235,7 +336,7 @@ disabled={!canBeSortColumn(column.schema.type) || (column.name === $sort.column && $sort.order === "ascending")} > - Sort {ascendingLabel} + Sort {sortingLabels.ascending} - Sort {descendingLabel} + Sort {sortingLabels.descending} Move left @@ -283,6 +384,29 @@ background: var(--grid-background-alt); } + /* Icon colors */ + .header-cell :global(.spectrum-Icon) { + color: var(--spectrum-global-color-gray-600); + } + .header-cell :global(.spectrum-Icon.hoverable:hover) { + color: var(--spectrum-global-color-gray-800) !important; + cursor: pointer; + } + + /* Search icon */ + .search-icon { + display: none; + } + .header-cell.searchable:not(.open):hover .search-icon, + .header-cell.searchable.searching .search-icon { + display: block; + } + .header-cell.searchable:not(.open):hover .column-icon, + .header-cell.searchable.searching .column-icon { + display: none; + } + + /* Main center content */ .name { flex: 1 1 auto; width: 0; @@ -290,23 +414,45 @@ text-overflow: ellipsis; overflow: hidden; } + .header-cell.searching .name { + opacity: 0; + pointer-events: none; + } + input { + display: none; + font-family: var(--font-sans); + outline: none; + border: 1px solid transparent; + background: transparent; + color: var(--spectrum-global-color-gray-800); + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + padding: 0 30px; + border-radius: 2px; + } + input:focus { + border: 1px solid var(--accent-color); + } + input:not(:focus) { + background: var(--spectrum-global-color-gray-200); + } + .header-cell.searching input { + display: block; + } - .more { + /* Right icons */ + .more-icon { display: none; padding: 4px; margin: 0 -4px; } - .header-cell.open .more, - .header-cell:hover .more { + .header-cell.open .more-icon, + .header-cell:hover .more-icon { display: block; } - .more:hover { - cursor: pointer; - } - .more:hover :global(.spectrum-Icon) { - color: var(--spectrum-global-color-gray-800) !important; - } - .header-cell.open .sort-indicator, .header-cell:hover .sort-indicator { display: none; diff --git a/packages/frontend-core/src/components/grid/layout/GridBody.svelte b/packages/frontend-core/src/components/grid/layout/GridBody.svelte index 762985a4db..0bb2a51fb4 100644 --- a/packages/frontend-core/src/components/grid/layout/GridBody.svelte +++ b/packages/frontend-core/src/components/grid/layout/GridBody.svelte @@ -7,7 +7,7 @@ const { bounds, renderedRows, - renderedColumns, + visibleColumns, rowVerticalInversionIndex, hoveredRowId, dispatch, @@ -17,7 +17,7 @@ let body - $: renderColumnsWidth = $renderedColumns.reduce( + $: columnsWidth = $visibleColumns.reduce( (total, col) => (total += col.width), 0 ) @@ -47,7 +47,7 @@
($hoveredRowId = BlankRowID)} on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} on:click={() => dispatch("add-row-inline")} diff --git a/packages/frontend-core/src/components/grid/layout/GridRow.svelte b/packages/frontend-core/src/components/grid/layout/GridRow.svelte index 4754d493bf..4a0db40ee8 100644 --- a/packages/frontend-core/src/components/grid/layout/GridRow.svelte +++ b/packages/frontend-core/src/components/grid/layout/GridRow.svelte @@ -10,7 +10,7 @@ focusedCellId, reorder, selectedRows, - renderedColumns, + visibleColumns, hoveredRowId, selectedCellMap, focusedRow, @@ -19,6 +19,7 @@ isDragging, dispatch, rows, + columnRenderMap, } = getContext("grid") $: rowSelected = !!$selectedRows[row._id] @@ -34,7 +35,7 @@ on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} on:click={() => dispatch("rowclick", rows.actions.cleanRow(row))} > - {#each $renderedColumns as column, columnIdx (column.name)} + {#each $visibleColumns as column, columnIdx} {@const cellId = `${row._id}-${column.name}`}
diff --git a/packages/frontend-core/src/components/grid/layout/GridScrollWrapper.svelte b/packages/frontend-core/src/components/grid/layout/GridScrollWrapper.svelte index 05bd261721..2a131809a9 100644 --- a/packages/frontend-core/src/components/grid/layout/GridScrollWrapper.svelte +++ b/packages/frontend-core/src/components/grid/layout/GridScrollWrapper.svelte @@ -11,7 +11,6 @@ maxScrollLeft, bounds, hoveredRowId, - hiddenColumnsWidth, menu, } = getContext("grid") @@ -23,10 +22,10 @@ let initialTouchX let initialTouchY - $: style = generateStyle($scroll, $rowHeight, $hiddenColumnsWidth) + $: style = generateStyle($scroll, $rowHeight) - const generateStyle = (scroll, rowHeight, hiddenWidths) => { - const offsetX = scrollHorizontally ? -1 * scroll.left + hiddenWidths : 0 + const generateStyle = (scroll, rowHeight) => { + const offsetX = scrollHorizontally ? -1 * scroll.left : 0 const offsetY = scrollVertically ? -1 * (scroll.top % rowHeight) : 0 return `transform: translate3d(${offsetX}px, ${offsetY}px, 0);` } diff --git a/packages/frontend-core/src/components/grid/layout/HeaderRow.svelte b/packages/frontend-core/src/components/grid/layout/HeaderRow.svelte index 97b7d054f3..b8655b98b3 100644 --- a/packages/frontend-core/src/components/grid/layout/HeaderRow.svelte +++ b/packages/frontend-core/src/components/grid/layout/HeaderRow.svelte @@ -5,14 +5,14 @@ import HeaderCell from "../cells/HeaderCell.svelte" import { TempTooltip, TooltipType } from "@budibase/bbui" - const { renderedColumns, config, hasNonAutoColumn, datasource, loading } = + const { visibleColumns, config, hasNonAutoColumn, datasource, loading } = getContext("grid")
- {#each $renderedColumns as column, idx} + {#each $visibleColumns as column, idx} diff --git a/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte b/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte index d131df26e5..46e9b40fb6 100644 --- a/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte +++ b/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte @@ -2,17 +2,16 @@ import { getContext, onMount } from "svelte" import { Icon, Popover, clickOutside } from "@budibase/bbui" - const { renderedColumns, scroll, hiddenColumnsWidth, width, subscribe } = - getContext("grid") + const { visibleColumns, scroll, width, subscribe } = getContext("grid") let anchor let open = false - $: columnsWidth = $renderedColumns.reduce( + $: columnsWidth = $visibleColumns.reduce( (total, col) => (total += col.width), 0 ) - $: end = $hiddenColumnsWidth + columnsWidth - 1 - $scroll.left + $: end = columnsWidth - 1 - $scroll.left $: left = Math.min($width - 40, end) const close = () => { @@ -34,7 +33,7 @@ {#if !visible && !selectedRowCount && $config.canAddRows} @@ -209,29 +212,28 @@
- {#each $renderedColumns as column, columnIdx} + {#each $visibleColumns as column, columnIdx} {@const cellId = `new-${column.name}`} - {#key cellId} - = $columnHorizontalInversionIndex} - {invertY} - > - {#if column?.schema?.autocolumn} -
Can't edit auto column
- {/if} - {#if isAdding} -
- {/if} - - {/key} + = $columnHorizontalInversionIndex} + {invertY} + hidden={!$columnRenderMap[column.name]} + > + {#if column?.schema?.autocolumn} +
Can't edit auto column
+ {/if} + {#if isAdding} +
+ {/if} + {/each}
diff --git a/packages/frontend-core/src/components/grid/overlays/KeyboardManager.svelte b/packages/frontend-core/src/components/grid/overlays/KeyboardManager.svelte index cd23f154b5..8b0a0f0942 100644 --- a/packages/frontend-core/src/components/grid/overlays/KeyboardManager.svelte +++ b/packages/frontend-core/src/components/grid/overlays/KeyboardManager.svelte @@ -21,6 +21,7 @@ const ignoredOriginSelectors = [ ".spectrum-Modal", "#builder-side-panel-container", + "[data-grid-ignore]", ] // Global key listener which intercepts all key events diff --git a/packages/frontend-core/src/components/grid/overlays/ResizeOverlay.svelte b/packages/frontend-core/src/components/grid/overlays/ResizeOverlay.svelte index 13e158b300..9e584ab610 100644 --- a/packages/frontend-core/src/components/grid/overlays/ResizeOverlay.svelte +++ b/packages/frontend-core/src/components/grid/overlays/ResizeOverlay.svelte @@ -2,7 +2,7 @@ import { getContext } from "svelte" import { GutterWidth } from "../lib/constants" - const { resize, renderedColumns, stickyColumn, isReordering, scrollLeft } = + const { resize, visibleColumns, stickyColumn, isReordering, scrollLeft } = getContext("grid") $: offset = GutterWidth + ($stickyColumn?.width || 0) @@ -26,7 +26,7 @@
{/if} - {#each $renderedColumns as column} + {#each $visibleColumns as column}
{ - const definition = writable(null) + const definition = memo(null) return { definition, @@ -10,10 +11,15 @@ export const createStores = () => { } export const deriveStores = context => { - const { definition, schemaOverrides, columnWhitelist, datasource } = context + const { API, definition, schemaOverrides, columnWhitelist, datasource } = + context const schema = derived(definition, $definition => { - let schema = $definition?.schema + let schema = getDatasourceSchema({ + API, + datasource: get(datasource), + definition: $definition, + }) if (!schema) { return null } diff --git a/packages/frontend-core/src/components/grid/stores/datasources/nonPlus.js b/packages/frontend-core/src/components/grid/stores/datasources/nonPlus.js index a05e1f7d37..017c16a03c 100644 --- a/packages/frontend-core/src/components/grid/stores/datasources/nonPlus.js +++ b/packages/frontend-core/src/components/grid/stores/datasources/nonPlus.js @@ -66,6 +66,8 @@ export const initialise = context => { datasource, sort, filter, + inlineFilters, + allFilters, nonPlus, initialFilter, initialSortColumn, @@ -87,6 +89,7 @@ export const initialise = context => { // Wipe state filter.set(get(initialFilter)) + inlineFilters.set([]) sort.set({ column: get(initialSortColumn), order: get(initialSortOrder) || "ascending", @@ -94,14 +97,14 @@ export const initialise = context => { // Update fetch when filter changes unsubscribers.push( - filter.subscribe($filter => { + allFilters.subscribe($allFilters => { // Ensure we're updating the correct fetch const $fetch = get(fetch) if (!isSameDatasource($fetch?.options?.datasource, $datasource)) { return } $fetch.update({ - filter: $filter, + filter: $allFilters, }) }) ) diff --git a/packages/frontend-core/src/components/grid/stores/datasources/table.js b/packages/frontend-core/src/components/grid/stores/datasources/table.js index 9ced1530ba..2f49ab1d38 100644 --- a/packages/frontend-core/src/components/grid/stores/datasources/table.js +++ b/packages/frontend-core/src/components/grid/stores/datasources/table.js @@ -71,6 +71,8 @@ export const initialise = context => { datasource, fetch, filter, + inlineFilters, + allFilters, sort, table, initialFilter, @@ -93,6 +95,7 @@ export const initialise = context => { // Wipe state filter.set(get(initialFilter)) + inlineFilters.set([]) sort.set({ column: get(initialSortColumn), order: get(initialSortOrder) || "ascending", @@ -100,14 +103,14 @@ export const initialise = context => { // Update fetch when filter changes unsubscribers.push( - filter.subscribe($filter => { + allFilters.subscribe($allFilters => { // Ensure we're updating the correct fetch const $fetch = get(fetch) if ($fetch?.options?.datasource?.tableId !== $datasource.tableId) { return } $fetch.update({ - filter: $filter, + filter: $allFilters, }) }) ) diff --git a/packages/frontend-core/src/components/grid/stores/datasources/viewV2.js b/packages/frontend-core/src/components/grid/stores/datasources/viewV2.js index f0572003c2..35f57a5fc4 100644 --- a/packages/frontend-core/src/components/grid/stores/datasources/viewV2.js +++ b/packages/frontend-core/src/components/grid/stores/datasources/viewV2.js @@ -73,6 +73,8 @@ export const initialise = context => { sort, rows, filter, + inlineFilters, + allFilters, subscribe, viewV2, initialFilter, @@ -97,6 +99,7 @@ export const initialise = context => { // Reset state for new view filter.set(get(initialFilter)) + inlineFilters.set([]) sort.set({ column: get(initialSortColumn), order: get(initialSortOrder) || "ascending", @@ -143,21 +146,19 @@ export const initialise = context => { order: $sort.order || "ascending", }, }) - await rows.actions.refreshData() } } - // Otherwise just update the fetch - else { - // Ensure we're updating the correct fetch - const $fetch = get(fetch) - if ($fetch?.options?.datasource?.tableId !== $datasource.tableId) { - return - } - $fetch.update({ - sortOrder: $sort.order || "ascending", - sortColumn: $sort.column, - }) + + // Also update the fetch to ensure the new sort is respected. + // Ensure we're updating the correct fetch. + const $fetch = get(fetch) + if ($fetch?.options?.datasource?.tableId !== $datasource.tableId) { + return } + $fetch.update({ + sortOrder: $sort.order, + sortColumn: $sort.column, + }) }) ) @@ -176,20 +177,25 @@ export const initialise = context => { ...$view, query: $filter, }) - await rows.actions.refreshData() } } - // Otherwise just update the fetch - else { - // Ensure we're updating the correct fetch - const $fetch = get(fetch) - if ($fetch?.options?.datasource?.tableId !== $datasource.tableId) { - return - } - $fetch.update({ - filter: $filter, - }) + }) + ) + + // Keep fetch up to date with filters. + // If we're able to save filters against the view then we only need to apply + // inline filters to the fetch, as saved filters are applied server side. + // If we can't save filters, then all filters must be applied to the fetch. + unsubscribers.push( + allFilters.subscribe($allFilters => { + // Ensure we're updating the correct fetch + const $fetch = get(fetch) + if ($fetch?.options?.datasource?.tableId !== $datasource.tableId) { + return } + $fetch.update({ + filter: $allFilters, + }) }) ) diff --git a/packages/frontend-core/src/components/grid/stores/filter.js b/packages/frontend-core/src/components/grid/stores/filter.js index a59c98ccdd..a16b101bbb 100644 --- a/packages/frontend-core/src/components/grid/stores/filter.js +++ b/packages/frontend-core/src/components/grid/stores/filter.js @@ -1,13 +1,79 @@ -import { writable, get } from "svelte/store" +import { writable, get, derived } from "svelte/store" +import { FieldType } from "@budibase/types" export const createStores = context => { const { props } = context // Initialise to default props const filter = writable(get(props).initialFilter) + const inlineFilters = writable([]) return { filter, + inlineFilters, + } +} + +export const deriveStores = context => { + const { filter, inlineFilters } = context + + const allFilters = derived( + [filter, inlineFilters], + ([$filter, $inlineFilters]) => { + return [...($filter || []), ...$inlineFilters] + } + ) + + return { + allFilters, + } +} + +export const createActions = context => { + const { filter, inlineFilters } = context + + const addInlineFilter = (column, value) => { + const filterId = `inline-${column.name}` + const type = column.schema.type + let inlineFilter = { + field: column.name, + id: filterId, + operator: "string", + valueType: "value", + type, + value, + } + + // Add overrides specific so the certain column type + if (type === FieldType.NUMBER) { + inlineFilter.value = parseFloat(value) + inlineFilter.operator = "equal" + } else if (type === FieldType.BIGINT) { + inlineFilter.operator = "equal" + } else if (type === FieldType.ARRAY) { + inlineFilter.operator = "contains" + } + + // Add this filter + inlineFilters.update($inlineFilters => { + // Remove any existing inline filter for this column + $inlineFilters = $inlineFilters?.filter(x => x.id !== filterId) + + // Add new one if a value exists + if (value) { + $inlineFilters.push(inlineFilter) + } + return $inlineFilters + }) + } + + return { + filter: { + ...filter, + actions: { + addInlineFilter, + }, + }, } } diff --git a/packages/frontend-core/src/components/grid/stores/rows.js b/packages/frontend-core/src/components/grid/stores/rows.js index 49adb62936..51c46f8263 100644 --- a/packages/frontend-core/src/components/grid/stores/rows.js +++ b/packages/frontend-core/src/components/grid/stores/rows.js @@ -8,6 +8,7 @@ export const createStores = () => { const rows = writable([]) const loading = writable(false) const loaded = writable(false) + const refreshing = writable(false) const rowChangeCache = writable({}) const inProgressChanges = writable({}) const hasNextPage = writable(false) @@ -53,6 +54,7 @@ export const createStores = () => { fetch, rowLookupMap, loaded, + refreshing, loading, rowChangeCache, inProgressChanges, @@ -66,7 +68,7 @@ export const createActions = context => { rows, rowLookupMap, definition, - filter, + allFilters, loading, sort, datasource, @@ -82,6 +84,7 @@ export const createActions = context => { notifications, fetch, isDatasourcePlus, + refreshing, } = context const instanceLoaded = writable(false) @@ -108,7 +111,7 @@ export const createActions = context => { // Tick to allow other reactive logic to update stores when datasource changes // before proceeding. This allows us to wipe filters etc if needed. await tick() - const $filter = get(filter) + const $allFilters = get(allFilters) const $sort = get(sort) // Determine how many rows to fetch per page @@ -120,7 +123,7 @@ export const createActions = context => { API, datasource: $datasource, options: { - filter: $filter, + filter: $allFilters, sortColumn: $sort.column, sortOrder: $sort.order, limit, @@ -176,6 +179,9 @@ export const createActions = context => { // Notify that we're loaded loading.set(false) } + + // Update refreshing state + refreshing.set($fetch.loading) }) fetch.set(newFetch) diff --git a/packages/frontend-core/src/components/grid/stores/viewport.js b/packages/frontend-core/src/components/grid/stores/viewport.js index 6c0c4708b9..8df8acd0f4 100644 --- a/packages/frontend-core/src/components/grid/stores/viewport.js +++ b/packages/frontend-core/src/components/grid/stores/viewport.js @@ -1,4 +1,4 @@ -import { derived, get } from "svelte/store" +import { derived } from "svelte/store" import { MaxCellRenderHeight, MaxCellRenderWidthOverflow, @@ -50,12 +50,11 @@ export const deriveStores = context => { const interval = MinColumnWidth return Math.round($scrollLeft / interval) * interval }) - const renderedColumns = derived( + const columnRenderMap = derived( [visibleColumns, scrollLeftRounded, width], - ([$visibleColumns, $scrollLeft, $width], set) => { + ([$visibleColumns, $scrollLeft, $width]) => { if (!$visibleColumns.length) { - set([]) - return + return {} } let startColIdx = 0 let rightEdge = $visibleColumns[0].width @@ -75,34 +74,16 @@ export const deriveStores = context => { leftEdge += $visibleColumns[endColIdx].width endColIdx++ } - // Render an additional column on either side to account for - // debounce column updates based on scroll position - const next = $visibleColumns.slice( - Math.max(0, startColIdx - 1), - endColIdx + 1 - ) - const current = get(renderedColumns) - if (JSON.stringify(next) !== JSON.stringify(current)) { - set(next) - } - } - ) - const hiddenColumnsWidth = derived( - [renderedColumns, visibleColumns], - ([$renderedColumns, $visibleColumns]) => { - const idx = $visibleColumns.findIndex( - col => col.name === $renderedColumns[0]?.name - ) - let width = 0 - if (idx > 0) { - for (let i = 0; i < idx; i++) { - width += $visibleColumns[i].width - } - } - return width - }, - 0 + // Only update the store if different + let next = {} + $visibleColumns + .slice(Math.max(0, startColIdx), endColIdx) + .forEach(col => { + next[col.name] = true + }) + return next + } ) // Determine the row index at which we should start vertically inverting cell @@ -130,12 +111,12 @@ export const deriveStores = context => { // Determine the column index at which we should start horizontally inverting // cell dropdowns const columnHorizontalInversionIndex = derived( - [renderedColumns, scrollLeft, width], - ([$renderedColumns, $scrollLeft, $width]) => { + [visibleColumns, scrollLeft, width], + ([$visibleColumns, $scrollLeft, $width]) => { const cutoff = $width + $scrollLeft - ScrollBarSize * 3 - let inversionIdx = $renderedColumns.length - for (let i = $renderedColumns.length - 1; i >= 0; i--, inversionIdx--) { - const rightEdge = $renderedColumns[i].left + $renderedColumns[i].width + let inversionIdx = $visibleColumns.length + for (let i = $visibleColumns.length - 1; i >= 0; i--, inversionIdx--) { + const rightEdge = $visibleColumns[i].left + $visibleColumns[i].width if (rightEdge + MaxCellRenderWidthOverflow <= cutoff) { break } @@ -148,8 +129,7 @@ export const deriveStores = context => { scrolledRowCount, visualRowCapacity, renderedRows, - renderedColumns, - hiddenColumnsWidth, + columnRenderMap, rowVerticalInversionIndex, columnHorizontalInversionIndex, } diff --git a/packages/frontend-core/src/fetch/ViewV2Fetch.js b/packages/frontend-core/src/fetch/ViewV2Fetch.js index b9eaf4bdf7..9d2f8c103a 100644 --- a/packages/frontend-core/src/fetch/ViewV2Fetch.js +++ b/packages/frontend-core/src/fetch/ViewV2Fetch.js @@ -35,9 +35,28 @@ export default class ViewV2Fetch extends DataFetch { } async getData() { - const { datasource, limit, sortColumn, sortOrder, sortType, paginate } = - this.options - const { cursor, query } = get(this.store) + const { + datasource, + limit, + sortColumn, + sortOrder, + sortType, + paginate, + filter, + } = this.options + const { cursor, query, definition } = get(this.store) + + // If sort/filter params are not defined, update options to store the + // params built in to this view. This ensures that we can accurately + // compare old and new params and skip a redundant API call. + if (!sortColumn && definition.sort?.field) { + this.options.sortColumn = definition.sort.field + this.options.sortOrder = definition.sort.order + } + if (!filter?.length && definition.query?.length) { + this.options.filter = definition.query + } + try { const res = await this.API.viewV2.fetch({ viewId: datasource.id, diff --git a/packages/frontend-core/src/fetch/index.js b/packages/frontend-core/src/fetch/index.js index d133942bb7..a41a859351 100644 --- a/packages/frontend-core/src/fetch/index.js +++ b/packages/frontend-core/src/fetch/index.js @@ -32,12 +32,24 @@ export const fetchData = ({ API, datasource, options }) => { return new Fetch({ API, datasource, ...options }) } -// Fetches the definition of any type of datasource -export const getDatasourceDefinition = async ({ API, datasource }) => { +// Creates an empty fetch instance with no datasource configured, so no data +// will initially be loaded +const createEmptyFetchInstance = ({ API, datasource }) => { const handler = DataFetchMap[datasource?.type] if (!handler) { return null } - const instance = new handler({ API }) - return await instance.getDefinition(datasource) + return new handler({ API }) +} + +// Fetches the definition of any type of datasource +export const getDatasourceDefinition = async ({ API, datasource }) => { + const instance = createEmptyFetchInstance({ API, datasource }) + return await instance?.getDefinition(datasource) +} + +// Fetches the schema of any type of datasource +export const getDatasourceSchema = ({ API, datasource, definition }) => { + const instance = createEmptyFetchInstance({ API, datasource }) + return instance?.getSchema(datasource, definition) } diff --git a/packages/pro b/packages/pro index 56d968bfe6..d24c0dc3a3 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 56d968bfe6998e1077d3fce4eb1c9e483d1d6fc9 +Subproject commit d24c0dc3a30014cbe61860252aa48104cad36376 diff --git a/packages/server/Dockerfile.v2 b/packages/server/Dockerfile.v2 index d5a86b037d..f737570fcd 100644 --- a/packages/server/Dockerfile.v2 +++ b/packages/server/Dockerfile.v2 @@ -44,7 +44,7 @@ RUN chmod +x ./scripts/removeWorkspaceDependencies.sh WORKDIR /string-templates COPY packages/string-templates/package.json package.json RUN ../scripts/removeWorkspaceDependencies.sh package.json -RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true +RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true --network-timeout 1000000 COPY packages/string-templates . @@ -57,7 +57,7 @@ COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies. RUN chmod +x ./scripts/removeWorkspaceDependencies.sh RUN ./scripts/removeWorkspaceDependencies.sh package.json -RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true \ +RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true --network-timeout 1000000 \ # Remove unneeded data from file system to reduce image size && yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python jq \ && rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp @@ -67,6 +67,11 @@ COPY packages/server/docker_run.sh . COPY packages/server/builder/ builder/ COPY packages/server/client/ client/ +ARG BUDIBASE_VERSION +# Ensuring the version argument is sent +RUN test -n "$BUDIBASE_VERSION" +ENV BUDIBASE_VERSION=$BUDIBASE_VERSION + EXPOSE 4001 # have to add node environment production after install diff --git a/packages/server/package.json b/packages/server/package.json index 0bc3618a08..4a858f3be9 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -18,7 +18,7 @@ "test": "bash scripts/test.sh", "test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit", "test:watch": "jest --watch", - "build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION", + "build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION", "run:docker": "node dist/index.js", "run:docker:cluster": "pm2-runtime start pm2.config.js", "dev:stack:up": "node scripts/dev/manage.js up", diff --git a/packages/server/scripts/dev/manage.js b/packages/server/scripts/dev/manage.js index 5db45040bf..13639b6bfd 100644 --- a/packages/server/scripts/dev/manage.js +++ b/packages/server/scripts/dev/manage.js @@ -47,6 +47,7 @@ async function init() { TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR", HTTP_MIGRATIONS: "0", HTTP_LOGGING: "0", + VERSION: "0.0.0+local", } let envFile = "" Object.keys(envFileJson).forEach(key => { diff --git a/packages/server/src/api/controllers/role.ts b/packages/server/src/api/controllers/role.ts index ed23009706..3697bbe925 100644 --- a/packages/server/src/api/controllers/role.ts +++ b/packages/server/src/api/controllers/role.ts @@ -1,4 +1,10 @@ -import { context, db as dbCore, events, roles } from "@budibase/backend-core" +import { + context, + db as dbCore, + events, + roles, + Header, +} from "@budibase/backend-core" import { getUserMetadataParams, InternalTables } from "../../db/utils" import { Database, Role, UserCtx, UserRoles } from "@budibase/types" import { sdk as sharedSdk } from "@budibase/shared-core" @@ -143,4 +149,20 @@ export async function accessible(ctx: UserCtx) { } else { ctx.body = await roles.getUserRoleIdHierarchy(roleId!) } + + // If a custom role is provided in the header, filter out higher level roles + const roleHeader = ctx.header?.[Header.PREVIEW_ROLE] as string + if (roleHeader && !Object.keys(roles.BUILTIN_ROLE_IDS).includes(roleHeader)) { + const inherits = (await roles.getRole(roleHeader))?.inherits + const orderedRoles = ctx.body.reverse() + let filteredRoles = [roleHeader] + for (let role of orderedRoles) { + filteredRoles = [role, ...filteredRoles] + if (role === inherits) { + break + } + } + filteredRoles.pop() + ctx.body = [roleHeader, ...filteredRoles] + } } diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts index bbf9dd34f5..984cb16c06 100644 --- a/packages/server/src/api/controllers/static/index.ts +++ b/packages/server/src/api/controllers/static/index.ts @@ -16,7 +16,7 @@ import AWS from "aws-sdk" import fs from "fs" import sdk from "../../../sdk" import * as pro from "@budibase/pro" -import { App } from "@budibase/types" +import { App, Ctx } from "@budibase/types" const send = require("koa-send") @@ -39,7 +39,7 @@ async function prepareUpload({ s3Key, bucket, metadata, file }: any) { } } -export const toggleBetaUiFeature = async function (ctx: any) { +export const toggleBetaUiFeature = async function (ctx: Ctx) { const cookieName = `beta:${ctx.params.feature}` if (ctx.cookies.get(cookieName)) { @@ -67,16 +67,14 @@ export const toggleBetaUiFeature = async function (ctx: any) { } } -export const serveBuilder = async function (ctx: any) { +export const serveBuilder = async function (ctx: Ctx) { const builderPath = join(TOP_LEVEL_PATH, "builder") await send(ctx, ctx.file, { root: builderPath }) } -export const uploadFile = async function (ctx: any) { - let files = - ctx.request.files.file.length > 1 - ? Array.from(ctx.request.files.file) - : [ctx.request.files.file] +export const uploadFile = async function (ctx: Ctx) { + const file = ctx.request?.files?.file + let files = file && Array.isArray(file) ? Array.from(file) : [file] const uploads = files.map(async (file: any) => { const fileExtension = [...file.name.split(".")].pop() @@ -93,14 +91,14 @@ export const uploadFile = async function (ctx: any) { ctx.body = await Promise.all(uploads) } -export const deleteObjects = async function (ctx: any) { +export const deleteObjects = async function (ctx: Ctx) { ctx.body = await objectStore.deleteFiles( ObjectStoreBuckets.APPS, ctx.request.body.keys ) } -export const serveApp = async function (ctx: any) { +export const serveApp = async function (ctx: Ctx) { const bbHeaderEmbed = ctx.request.get("x-budibase-embed")?.toLowerCase() === "true" @@ -181,7 +179,7 @@ export const serveApp = async function (ctx: any) { } } -export const serveBuilderPreview = async function (ctx: any) { +export const serveBuilderPreview = async function (ctx: Ctx) { const db = context.getAppDB({ skip_setup: true }) const appInfo = await db.get(DocumentType.APP_METADATA) @@ -197,18 +195,30 @@ export const serveBuilderPreview = async function (ctx: any) { } } -export const serveClientLibrary = async function (ctx: any) { +export const serveClientLibrary = async function (ctx: Ctx) { + const appId = context.getAppId() || (ctx.request.query.appId as string) let rootPath = join(NODE_MODULES_PATH, "@budibase", "client", "dist") - // incase running from TS directly - if (env.isDev() && !fs.existsSync(rootPath)) { - rootPath = join(require.resolve("@budibase/client"), "..") + if (!appId) { + ctx.throw(400, "No app ID provided - cannot fetch client library.") + } + if (env.isProd()) { + ctx.body = await objectStore.getReadStream( + ObjectStoreBuckets.APPS, + objectStore.clientLibraryPath(appId!) + ) + ctx.set("Content-Type", "application/javascript") + } else if (env.isDev()) { + // incase running from TS directly + const tsPath = join(require.resolve("@budibase/client"), "..") + return send(ctx, "budibase-client.js", { + root: !fs.existsSync(rootPath) ? tsPath : rootPath, + }) + } else { + ctx.throw(500, "Unable to retrieve client library.") } - return send(ctx, "budibase-client.js", { - root: rootPath, - }) } -export const getSignedUploadURL = async function (ctx: any) { +export const getSignedUploadURL = async function (ctx: Ctx) { // Ensure datasource is valid let datasource try { @@ -247,7 +257,7 @@ export const getSignedUploadURL = async function (ctx: any) { const params = { Bucket: bucket, Key: key } signedUrl = s3.getSignedUrl("putObject", params) publicUrl = `https://${bucket}.s3.${awsRegion}.amazonaws.com/${key}` - } catch (error) { + } catch (error: any) { ctx.throw(400, error) } } diff --git a/packages/server/src/api/routes/static.ts b/packages/server/src/api/routes/static.ts index 0012764b40..bd3f1aba2e 100644 --- a/packages/server/src/api/routes/static.ts +++ b/packages/server/src/api/routes/static.ts @@ -27,15 +27,9 @@ router.param("file", async (file: any, ctx: any, next: any) => { return next() }) -// only used in development for retrieving the client library, -// in production the client lib is always stored in the object store. -if (env.isDev()) { - router.get("/api/assets/client", controller.serveClientLibrary) -} - router - // TODO: for now this builder endpoint is not authorized/secured, will need to be .get("/builder/:file*", controller.serveBuilder) + .get("/api/assets/client", controller.serveClientLibrary) .post("/api/attachments/process", authorized(BUILDER), controller.uploadFile) .post( "/api/attachments/delete", diff --git a/packages/server/src/api/routes/tests/debug.spec.ts b/packages/server/src/api/routes/tests/debug.spec.ts index 23ee43fc73..26e98d93f9 100644 --- a/packages/server/src/api/routes/tests/debug.spec.ts +++ b/packages/server/src/api/routes/tests/debug.spec.ts @@ -41,7 +41,7 @@ describe("/component", () => { .expect("Content-Type", /json/) .expect(200) expect(res.body).toEqual({ - budibaseVersion: "0.0.0", + budibaseVersion: "0.0.0+jest", cpuArch: "arm64", cpuCores: 1, cpuInfo: "test", diff --git a/packages/server/src/api/routes/tests/dev.spec.js b/packages/server/src/api/routes/tests/dev.spec.js index 111f3dbd5b..af1dc82a9d 100644 --- a/packages/server/src/api/routes/tests/dev.spec.js +++ b/packages/server/src/api/routes/tests/dev.spec.js @@ -1,6 +1,6 @@ const setup = require("./utilities") const { events } = require("@budibase/backend-core") -const version = require("../../../../package.json").version + describe("/dev", () => { let request = setup.getRequest() @@ -32,9 +32,9 @@ describe("/dev", () => { .expect("Content-Type", /json/) .expect(200) - expect(res.body.version).toBe(version) + expect(res.body.version).toBe('0.0.0+jest') expect(events.installation.versionChecked).toBeCalledTimes(1) - expect(events.installation.versionChecked).toBeCalledWith(version) + expect(events.installation.versionChecked).toBeCalledWith('0.0.0+jest') }) }) }) \ No newline at end of file diff --git a/packages/server/src/api/routes/tests/role.spec.js b/packages/server/src/api/routes/tests/role.spec.js index c8e383d5ed..d133a69d64 100644 --- a/packages/server/src/api/routes/tests/role.spec.js +++ b/packages/server/src/api/routes/tests/role.spec.js @@ -158,5 +158,25 @@ describe("/roles", () => { expect(res.body.length).toBe(1) expect(res.body[0]).toBe("PUBLIC") }) + + it("should not fetch higher level accessible roles when a custom role header is provided", async () => { + await createRole({ + name: `CUSTOM_ROLE`, + inherits: roles.BUILTIN_ROLE_IDS.BASIC, + permissionId: permissions.BuiltinPermissionID.READ_ONLY, + version: "name", + }) + const res = await request + .get("/api/roles/accessible") + .set({ + ...config.defaultHeaders(), + "x-budibase-role": "CUSTOM_ROLE" + }) + .expect(200) + expect(res.body.length).toBe(3) + expect(res.body[0]).toBe("CUSTOM_ROLE") + expect(res.body[1]).toBe("BASIC") + expect(res.body[2]).toBe("PUBLIC") + }) }) }) diff --git a/packages/server/src/api/routes/tests/routing.spec.js b/packages/server/src/api/routes/tests/routing.spec.js index ff6d7aba1d..4076f4879c 100644 --- a/packages/server/src/api/routes/tests/routing.spec.js +++ b/packages/server/src/api/routes/tests/routing.spec.js @@ -1,5 +1,5 @@ const setup = require("./utilities") -const { basicScreen } = setup.structures +const { basicScreen, powerScreen } = setup.structures const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions") const { roles } = require("@budibase/backend-core") const { BUILTIN_ROLE_IDS } = roles @@ -12,19 +12,14 @@ const route = "/test" describe("/routing", () => { let request = setup.getRequest() let config = setup.getConfig() - let screen, screen2 + let basic, power afterAll(setup.afterAll) beforeAll(async () => { await config.init() - screen = basicScreen() - screen.routing.route = route - screen = await config.createScreen(screen) - screen2 = basicScreen() - screen2.routing.roleId = BUILTIN_ROLE_IDS.POWER - screen2.routing.route = route - screen2 = await config.createScreen(screen2) + basic = await config.createScreen(basicScreen(route)) + power = await config.createScreen(powerScreen(route)) await config.publish() }) @@ -61,8 +56,8 @@ describe("/routing", () => { expect(res.body.routes[route]).toEqual({ subpaths: { [route]: { - screenId: screen._id, - roleId: screen.routing.roleId + screenId: basic._id, + roleId: basic.routing.roleId } } }) @@ -80,8 +75,8 @@ describe("/routing", () => { expect(res.body.routes[route]).toEqual({ subpaths: { [route]: { - screenId: screen2._id, - roleId: screen2.routing.roleId + screenId: power._id, + roleId: power.routing.roleId } } }) @@ -101,8 +96,8 @@ describe("/routing", () => { expect(res.body.routes).toBeDefined() expect(res.body.routes[route].subpaths[route]).toBeDefined() const subpath = res.body.routes[route].subpaths[route] - expect(subpath.screens[screen2.routing.roleId]).toEqual(screen2._id) - expect(subpath.screens[screen.routing.roleId]).toEqual(screen._id) + expect(subpath.screens[power.routing.roleId]).toEqual(power._id) + expect(subpath.screens[basic.routing.roleId]).toEqual(basic._id) }) it("make sure it is a builder only endpoint", async () => { diff --git a/packages/server/src/constants/screens.ts b/packages/server/src/constants/screens.ts index 23e36a65b8..6c88b0f957 100644 --- a/packages/server/src/constants/screens.ts +++ b/packages/server/src/constants/screens.ts @@ -1,7 +1,15 @@ import { roles } from "@budibase/backend-core" import { BASE_LAYOUT_PROP_IDS } from "./layouts" -export function createHomeScreen() { +export function createHomeScreen( + config: { + roleId: string + route: string + } = { + roleId: roles.BUILTIN_ROLE_IDS.BASIC, + route: "/", + } +) { return { description: "", url: "", @@ -40,8 +48,8 @@ export function createHomeScreen() { gap: "M", }, routing: { - route: "/", - roleId: roles.BUILTIN_ROLE_IDS.BASIC, + route: config.route, + roleId: config.roleId, }, name: "home-screen", } diff --git a/packages/server/src/tests/jestEnv.ts b/packages/server/src/tests/jestEnv.ts index 34c51009aa..4763208c54 100644 --- a/packages/server/src/tests/jestEnv.ts +++ b/packages/server/src/tests/jestEnv.ts @@ -9,3 +9,4 @@ process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error" process.env.MOCK_REDIS = "1" process.env.PLATFORM_URL = "http://localhost:10000" process.env.REDIS_PASSWORD = "budibase" +process.env.BUDIBASE_VERSION = "0.0.0+jest" diff --git a/packages/server/src/tests/utilities/structures.ts b/packages/server/src/tests/utilities/structures.ts index d3e92ea34d..6d236062a8 100644 --- a/packages/server/src/tests/utilities/structures.ts +++ b/packages/server/src/tests/utilities/structures.ts @@ -20,6 +20,7 @@ import { SourceName, Table, } from "@budibase/types" +const { BUILTIN_ROLE_IDS } = roles export function basicTable(): Table { return { @@ -322,8 +323,22 @@ export function basicUser(role: string) { } } -export function basicScreen() { - return createHomeScreen() +export function basicScreen(route: string = "/") { + return createHomeScreen({ + roleId: BUILTIN_ROLE_IDS.BASIC, + route, + }) +} + +export function powerScreen(route: string = "/") { + return createHomeScreen({ + roleId: BUILTIN_ROLE_IDS.POWER, + route, + }) +} + +export function customScreen(config: { roleId: string; route: string }) { + return createHomeScreen(config) } export function basicLayout() { diff --git a/packages/types/src/sdk/featureFlag.ts b/packages/types/src/sdk/featureFlag.ts index e3935bc7ee..53aa4842c4 100644 --- a/packages/types/src/sdk/featureFlag.ts +++ b/packages/types/src/sdk/featureFlag.ts @@ -1,8 +1,5 @@ export enum FeatureFlag { LICENSING = "LICENSING", - // Feature IDs in Posthog - PER_CREATOR_PER_USER_PRICE = "18873", - PER_CREATOR_PER_USER_PRICE_ALERT = "18530", } export interface TenantFeatureFlags { diff --git a/packages/types/src/sdk/licensing/billing.ts b/packages/types/src/sdk/licensing/billing.ts index bcbc7abd18..35f366c811 100644 --- a/packages/types/src/sdk/licensing/billing.ts +++ b/packages/types/src/sdk/licensing/billing.ts @@ -5,17 +5,10 @@ export interface Customer { currency: string | null | undefined } -export interface SubscriptionItems { - user: number | undefined - creator: number | undefined -} - export interface Subscription { amount: number - amounts: SubscriptionItems | undefined currency: string quantity: number - quantities: SubscriptionItems | undefined duration: PriceDuration cancelAt: number | null | undefined currentPeriodStart: number diff --git a/packages/types/src/sdk/licensing/plan.ts b/packages/types/src/sdk/licensing/plan.ts index 1604dfb8af..3e214a01ff 100644 --- a/packages/types/src/sdk/licensing/plan.ts +++ b/packages/types/src/sdk/licensing/plan.ts @@ -4,9 +4,7 @@ export enum PlanType { PRO = "pro", /** @deprecated */ TEAM = "team", - /** @deprecated */ PREMIUM = "premium", - PREMIUM_PLUS = "premium_plus", BUSINESS = "business", ENTERPRISE = "enterprise", } @@ -28,12 +26,10 @@ export interface AvailablePrice { currency: string duration: PriceDuration priceId: string - type?: string } export enum PlanModel { PER_USER = "perUser", - PER_CREATOR_PER_USER = "per_creator_per_user", DAY_PASS = "dayPass", } diff --git a/packages/types/src/sdk/migrations.ts b/packages/types/src/sdk/migrations.ts index 4667ed0c8f..0692b27f8e 100644 --- a/packages/types/src/sdk/migrations.ts +++ b/packages/types/src/sdk/migrations.ts @@ -46,7 +46,7 @@ export enum MigrationName { GLOBAL_INFO_SYNC_USERS = "global_info_sync_users", TABLE_SETTINGS_LINKS_TO_ACTIONS = "table_settings_links_to_actions", // increment this number to re-activate this migration - SYNC_QUOTAS = "sync_quotas_1", + SYNC_QUOTAS = "sync_quotas_2", } export interface MigrationDefinition { diff --git a/packages/worker/Dockerfile.v2 b/packages/worker/Dockerfile.v2 index 0d60db6fc5..4706ca155a 100644 --- a/packages/worker/Dockerfile.v2 +++ b/packages/worker/Dockerfile.v2 @@ -19,7 +19,7 @@ RUN chmod +x ./scripts/removeWorkspaceDependencies.sh WORKDIR /string-templates COPY packages/string-templates/package.json package.json RUN ../scripts/removeWorkspaceDependencies.sh package.json -RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true +RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true --network-timeout 1000000 COPY packages/string-templates . @@ -30,7 +30,7 @@ RUN cd ../string-templates && yarn link && cd - && yarn link @budibase/string-te RUN ../scripts/removeWorkspaceDependencies.sh package.json -RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true +RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true --network-timeout 1000000 # Remove unneeded data from file system to reduce image size RUN apk del .gyp \ && yarn cache clean @@ -50,4 +50,9 @@ ENV POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU ENV TENANT_FEATURE_FLAGS=*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR ENV ACCOUNT_PORTAL_URL=https://account.budibase.app +ARG BUDIBASE_VERSION +# Ensuring the version argument is sent +RUN test -n "$BUDIBASE_VERSION" +ENV BUDIBASE_VERSION=$BUDIBASE_VERSION + CMD ["./docker_run.sh"] diff --git a/packages/worker/package.json b/packages/worker/package.json index 1eee3f020f..205bf3309a 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -20,7 +20,7 @@ "run:docker": "node dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js", "run:docker:cluster": "pm2-runtime start pm2.config.js", - "build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION", + "build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION", "dev:stack:init": "node ./scripts/dev/manage.js init", "dev:builder": "npm run dev:stack:init && nodemon", "dev:built": "yarn run dev:stack:init && yarn run run:docker", diff --git a/packages/worker/scripts/dev/manage.js b/packages/worker/scripts/dev/manage.js index ecf5defd47..9e6a57d4bf 100644 --- a/packages/worker/scripts/dev/manage.js +++ b/packages/worker/scripts/dev/manage.js @@ -31,6 +31,7 @@ async function init() { TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR", ENABLE_EMAIL_TEST_MODE: 1, HTTP_LOGGING: 0, + VERSION: "0.0.0+local", } let envFile = "" Object.keys(envFileJson).forEach(key => { diff --git a/packages/worker/src/tests/jestEnv.ts b/packages/worker/src/tests/jestEnv.ts index 9153676b8e..9bd258c43c 100644 --- a/packages/worker/src/tests/jestEnv.ts +++ b/packages/worker/src/tests/jestEnv.ts @@ -10,3 +10,4 @@ process.env.PLATFORM_URL = "http://localhost:10000" process.env.INTERNAL_API_KEY = "tet" process.env.DISABLE_ACCOUNT_PORTAL = "0" process.env.MOCK_REDIS = "1" +process.env.BUDIBASE_VERSION = "0.0.0+jest" diff --git a/qa-core/package.json b/qa-core/package.json index 3c789d89e6..d266ca9def 100644 --- a/qa-core/package.json +++ b/qa-core/package.json @@ -17,7 +17,7 @@ "test:notify": "node scripts/testResultsWebhook", "test:cloud:prod": "yarn run test --testPathIgnorePatterns=\\.integration\\.", "test:cloud:qa": "yarn run test", - "test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\.", + "test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\. \\.license\\.", "serve:test:self:ci": "start-server-and-test dev:built http://localhost:4001/health test:self:ci", "serve": "start-server-and-test dev:built http://localhost:4001/health", "dev:built": "cd ../ && yarn dev:built" diff --git a/qa-core/src/account-api/api/AccountInternalAPI.ts b/qa-core/src/account-api/api/AccountInternalAPI.ts index 3813ad2c9e..f89bf556f2 100644 --- a/qa-core/src/account-api/api/AccountInternalAPI.ts +++ b/qa-core/src/account-api/api/AccountInternalAPI.ts @@ -1,5 +1,5 @@ import AccountInternalAPIClient from "./AccountInternalAPIClient" -import { AccountAPI, LicenseAPI, AuthAPI } from "./apis" +import { AccountAPI, LicenseAPI, AuthAPI, StripeAPI } from "./apis" import { State } from "../../types" export default class AccountInternalAPI { @@ -8,11 +8,13 @@ export default class AccountInternalAPI { auth: AuthAPI accounts: AccountAPI licenses: LicenseAPI + stripe: StripeAPI constructor(state: State) { this.client = new AccountInternalAPIClient(state) this.auth = new AuthAPI(this.client) this.accounts = new AccountAPI(this.client) this.licenses = new LicenseAPI(this.client) + this.stripe = new StripeAPI(this.client) } } diff --git a/qa-core/src/account-api/api/apis/LicenseAPI.ts b/qa-core/src/account-api/api/apis/LicenseAPI.ts index 44579f867b..b371f00f05 100644 --- a/qa-core/src/account-api/api/apis/LicenseAPI.ts +++ b/qa-core/src/account-api/api/apis/LicenseAPI.ts @@ -2,21 +2,19 @@ import AccountInternalAPIClient from "../AccountInternalAPIClient" import { Account, CreateOfflineLicenseRequest, + GetLicenseKeyResponse, GetOfflineLicenseResponse, UpdateLicenseRequest, } from "@budibase/types" import { Response } from "node-fetch" import BaseAPI from "./BaseAPI" import { APIRequestOpts } from "../../../types" - export default class LicenseAPI extends BaseAPI { client: AccountInternalAPIClient - constructor(client: AccountInternalAPIClient) { super() this.client = client } - async updateLicense( accountId: string, body: UpdateLicenseRequest, @@ -29,9 +27,7 @@ export default class LicenseAPI extends BaseAPI { }) }, opts) } - // TODO: Better approach for setting tenant id header - async createOfflineLicense( accountId: string, tenantId: string, @@ -51,7 +47,6 @@ export default class LicenseAPI extends BaseAPI { expect(response.status).toBe(opts.status ? opts.status : 201) return response } - async getOfflineLicense( accountId: string, tenantId: string, @@ -69,4 +64,74 @@ export default class LicenseAPI extends BaseAPI { expect(response.status).toBe(opts.status ? opts.status : 200) return [response, json] } + async getLicenseKey( + opts: { status?: number } = {} + ): Promise<[Response, GetLicenseKeyResponse]> { + const [response, json] = await this.client.get(`/api/license/key`) + expect(response.status).toBe(opts.status || 200) + return [response, json] + } + async activateLicense( + apiKey: string, + tenantId: string, + licenseKey: string, + opts: APIRequestOpts = { status: 200 } + ) { + return this.doRequest(() => { + return this.client.post(`/api/license/activate`, { + body: { + apiKey: apiKey, + tenantId: tenantId, + licenseKey: licenseKey, + }, + }) + }, opts) + } + async regenerateLicenseKey(opts: APIRequestOpts = { status: 200 }) { + return this.doRequest(() => { + return this.client.post(`/api/license/key/regenerate`, {}) + }, opts) + } + + async getPlans(opts: APIRequestOpts = { status: 200 }) { + return this.doRequest(() => { + return this.client.get(`/api/plans`) + }, opts) + } + + async updatePlan(opts: APIRequestOpts = { status: 200 }) { + return this.doRequest(() => { + return this.client.put(`/api/license/plan`) + }, opts) + } + + async refreshAccountLicense( + accountId: string, + opts: { status?: number } = {} + ): Promise { + const [response, json] = await this.client.post( + `/api/accounts/${accountId}/license/refresh`, + { + internal: true, + } + ) + expect(response.status).toBe(opts.status ? opts.status : 201) + return response + } + + async getLicenseUsage(opts: APIRequestOpts = { status: 200 }) { + return this.doRequest(() => { + return this.client.get(`/api/license/usage`) + }, opts) + } + + async licenseUsageTriggered( + opts: { status?: number } = {} + ): Promise { + const [response, json] = await this.client.post( + `/api/license/usage/triggered` + ) + expect(response.status).toBe(opts.status ? opts.status : 201) + return response + } } diff --git a/qa-core/src/account-api/api/apis/StripeAPI.ts b/qa-core/src/account-api/api/apis/StripeAPI.ts new file mode 100644 index 0000000000..c9c776e89b --- /dev/null +++ b/qa-core/src/account-api/api/apis/StripeAPI.ts @@ -0,0 +1,64 @@ +import AccountInternalAPIClient from "../AccountInternalAPIClient" +import BaseAPI from "./BaseAPI" +import { APIRequestOpts } from "../../../types" + +export default class StripeAPI extends BaseAPI { + client: AccountInternalAPIClient + + constructor(client: AccountInternalAPIClient) { + super() + this.client = client + } + + async createCheckoutSession( + priceId: string, + opts: APIRequestOpts = { status: 200 } + ) { + return this.doRequest(() => { + return this.client.post(`/api/stripe/checkout-session`, { + body: { priceId }, + }) + }, opts) + } + + async checkoutSuccess(opts: APIRequestOpts = { status: 200 }) { + return this.doRequest(() => { + return this.client.post(`/api/stripe/checkout-success`) + }, opts) + } + + async createPortalSession( + stripeCustomerId: string, + opts: APIRequestOpts = { status: 200 } + ) { + return this.doRequest(() => { + return this.client.post(`/api/stripe/portal-session`, { + body: { stripeCustomerId }, + }) + }, opts) + } + + async linkStripeCustomer(opts: APIRequestOpts = { status: 200 }) { + return this.doRequest(() => { + return this.client.post(`/api/stripe/link`) + }, opts) + } + + async getInvoices(opts: APIRequestOpts = { status: 200 }) { + return this.doRequest(() => { + return this.client.get(`/api/stripe/invoices`) + }, opts) + } + + async getUpcomingInvoice(opts: APIRequestOpts = { status: 200 }) { + return this.doRequest(() => { + return this.client.get(`/api/stripe/upcoming-invoice`) + }, opts) + } + + async getStripeCustomers(opts: APIRequestOpts = { status: 200 }) { + return this.doRequest(() => { + return this.client.get(`/api/stripe/customers`) + }, opts) + } +} diff --git a/qa-core/src/account-api/api/apis/index.ts b/qa-core/src/account-api/api/apis/index.ts index 1137ac3e36..5b0cf55110 100644 --- a/qa-core/src/account-api/api/apis/index.ts +++ b/qa-core/src/account-api/api/apis/index.ts @@ -1,3 +1,4 @@ export { default as AuthAPI } from "./AuthAPI" export { default as AccountAPI } from "./AccountAPI" export { default as LicenseAPI } from "./LicenseAPI" +export { default as StripeAPI } from "./StripeAPI" diff --git a/qa-core/src/account-api/tests/licensing/license.activate.spec.ts b/qa-core/src/account-api/tests/licensing/license.activate.spec.ts new file mode 100644 index 0000000000..96c6eaea2a --- /dev/null +++ b/qa-core/src/account-api/tests/licensing/license.activate.spec.ts @@ -0,0 +1,68 @@ +import TestConfiguration from "../../config/TestConfiguration" +import * as fixures from "../../fixtures" +import { Feature, Hosting } from "@budibase/types" + +describe("license activation", () => { + const config = new TestConfiguration() + + beforeAll(async () => { + await config.beforeAll() + }) + + afterAll(async () => { + await config.afterAll() + }) + + it("creates, activates and deletes online license - self host", async () => { + // Remove existing license key + await config.internalApi.license.deleteLicenseKey() + + // Verify license key not found + await config.internalApi.license.getLicenseKey({ status: 404 }) + + // Create self host account + const createAccountRequest = fixures.accounts.generateAccount({ + hosting: Hosting.SELF, + }) + const [createAccountRes, account] = + await config.accountsApi.accounts.create(createAccountRequest, { + autoVerify: true, + }) + + let licenseKey: string = " " + await config.doInNewState(async () => { + await config.loginAsAccount(createAccountRequest) + // Retrieve license key + const [res, body] = await config.accountsApi.licenses.getLicenseKey() + licenseKey = body.licenseKey + }) + + const accountId = account.accountId! + + // Update license to have paid feature + const [res, acc] = await config.accountsApi.licenses.updateLicense( + accountId, + { + overrides: { + features: [Feature.APP_BACKUPS], + }, + } + ) + + // Activate license key + await config.internalApi.license.activateLicenseKey({ licenseKey }) + + // Verify license updated with new feature + await config.doInNewState(async () => { + await config.loginAsAccount(createAccountRequest) + const [selfRes, body] = await config.api.accounts.self() + expect(body.license.features[0]).toBe("appBackups") + }) + + // Remove license key + await config.internalApi.license.deleteLicenseKey() + + // Verify license key not found + await config.internalApi.license.getLicenseKey({ status: 404 }) + }) +}) diff --git a/qa-core/src/internal-api/api/apis/LicenseAPI.ts b/qa-core/src/internal-api/api/apis/LicenseAPI.ts index 4c9d14c55e..ef322e069a 100644 --- a/qa-core/src/internal-api/api/apis/LicenseAPI.ts +++ b/qa-core/src/internal-api/api/apis/LicenseAPI.ts @@ -1,17 +1,19 @@ import { Response } from "node-fetch" import { + ActivateLicenseKeyRequest, ActivateOfflineLicenseTokenRequest, + GetLicenseKeyResponse, GetOfflineIdentifierResponse, GetOfflineLicenseTokenResponse, } from "@budibase/types" import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient" import BaseAPI from "./BaseAPI" +import { APIRequestOpts } from "../../../types" export default class LicenseAPI extends BaseAPI { constructor(client: BudibaseInternalAPIClient) { super(client) } - async getOfflineLicenseToken( opts: { status?: number } = {} ): Promise<[Response, GetOfflineLicenseTokenResponse]> { @@ -21,19 +23,16 @@ export default class LicenseAPI extends BaseAPI { ) return [response, body] } - async deleteOfflineLicenseToken(): Promise<[Response]> { const [response] = await this.del(`/global/license/offline`, 204) return [response] } - async activateOfflineLicenseToken( body: ActivateOfflineLicenseTokenRequest ): Promise<[Response]> { const [response] = await this.post(`/global/license/offline`, body) return [response] } - async getOfflineIdentifier(): Promise< [Response, GetOfflineIdentifierResponse] > { @@ -42,4 +41,23 @@ export default class LicenseAPI extends BaseAPI { ) return [response, body] } + + async getLicenseKey( + opts: { status?: number } = {} + ): Promise<[Response, GetLicenseKeyResponse]> { + const [response, body] = await this.get(`/global/license/key`, opts.status) + return [response, body] + } + + async activateLicenseKey( + body: ActivateLicenseKeyRequest + ): Promise<[Response]> { + const [response] = await this.post(`/global/license/key`, body) + return [response] + } + + async deleteLicenseKey(): Promise<[Response]> { + const [response] = await this.del(`/global/license/key`, 204) + return [response] + } } diff --git a/scripts/build-single-image.sh b/scripts/build-single-image.sh index cb2c29deac..ed3d9a8ed6 100755 --- a/scripts/build-single-image.sh +++ b/scripts/build-single-image.sh @@ -1,3 +1,4 @@ #!/bin/bash yarn build --scope @budibase/server --scope @budibase/worker -docker build -f hosting/single/Dockerfile.v2 -t budibase:latest . +version=$(./scripts/getCurrentVersion.sh) +docker build -f hosting/single/Dockerfile.v2 -t budibase:latest --build-arg BUDIBASE_VERSION=$version . diff --git a/scripts/updateWorkspaceVersions.V2.sh b/scripts/updateWorkspaceVersions.V2.sh new file mode 100755 index 0000000000..634bcbcfb0 --- /dev/null +++ b/scripts/updateWorkspaceVersions.V2.sh @@ -0,0 +1,8 @@ +#!/bin/bash +version=$1 +echo "Setting version $version" +yarn lerna exec "yarn version --no-git-tag-version --new-version=$version" +echo "Updating dependencies" +node scripts/syncLocalDependencies.js $version +echo "Syncing yarn workspace" +yarn