diff --git a/.github/workflows/release-singleimage-test.yml b/.github/workflows/release-singleimage-test.yml index 79b9afdd44..c3a14226ce 100644 --- a/.github/workflows/release-singleimage-test.yml +++ b/.github/workflows/release-singleimage-test.yml @@ -54,6 +54,7 @@ jobs: push: true pull: true platforms: linux/amd64,linux/arm64 + build-args: BUDIBASE_VERSION=0.0.0+test tags: budibase/budibase-test:test file: ./hosting/single/Dockerfile.v2 cache-from: type=registry,ref=budibase/budibase-test:test @@ -64,6 +65,8 @@ jobs: context: . push: true platforms: linux/amd64 - build-args: TARGETBUILD=aas + build-args: | + TARGETBUILD=aas + BUDIBASE_VERSION=0.0.0+test tags: budibase/budibase-test:aas file: ./hosting/single/Dockerfile.v2 diff --git a/hosting/docker-compose.build.yaml b/hosting/docker-compose.build.yaml index e192620b59..7ead001a1c 100644 --- a/hosting/docker-compose.build.yaml +++ b/hosting/docker-compose.build.yaml @@ -7,6 +7,8 @@ services: build: context: .. dockerfile: packages/server/Dockerfile.v2 + args: + - BUDIBASE_VERSION=0.0.0+dev-docker container_name: build-bbapps environment: SELF_HOSTED: 1 @@ -30,13 +32,13 @@ services: depends_on: - worker-service - redis-service - # volumes: - # - /some/path/to/plugins:/plugins worker-service: build: context: .. dockerfile: packages/worker/Dockerfile.v2 + args: + - BUDIBASE_VERSION=0.0.0+dev-docker container_name: build-bbworker environment: SELF_HOSTED: 1 diff --git a/hosting/single/Dockerfile.v2 b/hosting/single/Dockerfile.v2 index a983f10044..5b07a51b27 100644 --- a/hosting/single/Dockerfile.v2 +++ b/hosting/single/Dockerfile.v2 @@ -118,6 +118,10 @@ EXPOSE 443 EXPOSE 2222 VOLUME /data +ARG BUDIBASE_VERSION +# Ensuring the version argument is sent +RUN test -n "$BUDIBASE_VERSION" +ENV BUDIBASE_VERSION=$BUDIBASE_VERSION HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh" diff --git a/lerna.json b/lerna.json index 75db3b9b55..11e4e7627a 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.11.40", + "version": "2.11.43", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts index c331d791a6..e64c116663 100644 --- a/packages/backend-core/src/cache/writethrough.ts +++ b/packages/backend-core/src/cache/writethrough.ts @@ -119,8 +119,8 @@ export class Writethrough { this.writeRateMs = writeRateMs } - async put(doc: any, writeRateMs: number = this.writeRateMs) { - return put(this.db, doc, writeRateMs) + async put(doc: any) { + return put(this.db, doc, this.writeRateMs) } async get(id: string) { diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 3894bdd0f7..ed882fe96a 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -75,12 +75,12 @@ function getPackageJsonFields(): { const content = readFileSync(packageJsonFile!, "utf-8") const parsedContent = JSON.parse(content) return { - VERSION: parsedContent.version, + VERSION: process.env.BUDIBASE_VERSION || parsedContent.version, SERVICE_NAME: parsedContent.name, } } catch { // throwing an error here is confusing/causes backend-core to be hard to import - return { VERSION: "", SERVICE_NAME: "" } + return { VERSION: process.env.BUDIBASE_VERSION || "", SERVICE_NAME: "" } } } diff --git a/packages/backend-core/src/objectStore/buckets/app.ts b/packages/backend-core/src/objectStore/buckets/app.ts index be9fddeaa6..43bc965c65 100644 --- a/packages/backend-core/src/objectStore/buckets/app.ts +++ b/packages/backend-core/src/objectStore/buckets/app.ts @@ -1,37 +1,50 @@ import env from "../../environment" import * as objectStore from "../objectStore" import * as cloudfront from "../cloudfront" +import qs from "querystring" +import { DEFAULT_TENANT_ID, getTenantId } from "../../context" + +export function clientLibraryPath(appId: string) { + return `${objectStore.sanitizeKey(appId)}/budibase-client.js` +} /** - * In production the client library is stored in the object store, however in development - * we use the symlinked version produced by lerna, located in node modules. We link to this - * via a specific endpoint (under /api/assets/client). - * @param appId In production we need the appId to look up the correct bucket, as the - * version of the client lib may differ between apps. - * @param version The version to retrieve. - * @return The URL to be inserted into appPackage response or server rendered - * app index file. + * Previously we used to serve the client library directly from Cloudfront, however + * due to issues with the domain we were unable to continue doing this - keeping + * incase we are able to switch back to CDN path again in future. */ -export const clientLibraryUrl = (appId: string, version: string) => { - if (env.isProd()) { - let file = `${objectStore.sanitizeKey(appId)}/budibase-client.js` - if (env.CLOUDFRONT_CDN) { - // append app version to bust the cache - if (version) { - file += `?v=${version}` - } - // don't need to use presigned for client with cloudfront - // file is public - return cloudfront.getUrl(file) - } else { - return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file) +export function clientLibraryCDNUrl(appId: string, version: string) { + let file = clientLibraryPath(appId) + if (env.CLOUDFRONT_CDN) { + // append app version to bust the cache + if (version) { + file += `?v=${version}` } + // don't need to use presigned for client with cloudfront + // file is public + return cloudfront.getUrl(file) } else { - return `/api/assets/client` + return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file) } } -export const getAppFileUrl = (s3Key: string) => { +export function clientLibraryUrl(appId: string, version: string) { + let tenantId, qsParams: { appId: string; version: string; tenantId?: string } + try { + tenantId = getTenantId() + } finally { + qsParams = { + appId, + version, + } + } + if (tenantId && tenantId !== DEFAULT_TENANT_ID) { + qsParams.tenantId = tenantId + } + return `/api/assets/client?${qs.encode(qsParams)}` +} + +export function getAppFileUrl(s3Key: string) { if (env.CLOUDFRONT_CDN) { return cloudfront.getPresignedUrl(s3Key) } else { diff --git a/packages/backend-core/src/objectStore/buckets/plugins.ts b/packages/backend-core/src/objectStore/buckets/plugins.ts index f7721afb23..6f1b7116ae 100644 --- a/packages/backend-core/src/objectStore/buckets/plugins.ts +++ b/packages/backend-core/src/objectStore/buckets/plugins.ts @@ -6,7 +6,7 @@ import { Plugin } from "@budibase/types" // URLS -export const enrichPluginURLs = (plugins: Plugin[]) => { +export function enrichPluginURLs(plugins: Plugin[]) { if (!plugins || !plugins.length) { return [] } @@ -17,12 +17,12 @@ export const enrichPluginURLs = (plugins: Plugin[]) => { }) } -const getPluginJSUrl = (plugin: Plugin) => { +function getPluginJSUrl(plugin: Plugin) { const s3Key = getPluginJSKey(plugin) return getPluginUrl(s3Key) } -const getPluginIconUrl = (plugin: Plugin): string | undefined => { +function getPluginIconUrl(plugin: Plugin): string | undefined { const s3Key = getPluginIconKey(plugin) if (!s3Key) { return @@ -30,7 +30,7 @@ const getPluginIconUrl = (plugin: Plugin): string | undefined => { return getPluginUrl(s3Key) } -const getPluginUrl = (s3Key: string) => { +function getPluginUrl(s3Key: string) { if (env.CLOUDFRONT_CDN) { return cloudfront.getPresignedUrl(s3Key) } else { @@ -40,11 +40,11 @@ const getPluginUrl = (s3Key: string) => { // S3 KEYS -export const getPluginJSKey = (plugin: Plugin) => { +export function getPluginJSKey(plugin: Plugin) { return getPluginS3Key(plugin, "plugin.min.js") } -export const getPluginIconKey = (plugin: Plugin) => { +export function getPluginIconKey(plugin: Plugin) { // stored iconUrl is deprecated - hardcode to icon.svg in this case const iconFileName = plugin.iconUrl ? "icon.svg" : plugin.iconFileName if (!iconFileName) { @@ -53,12 +53,12 @@ export const getPluginIconKey = (plugin: Plugin) => { return getPluginS3Key(plugin, iconFileName) } -const getPluginS3Key = (plugin: Plugin, fileName: string) => { +function getPluginS3Key(plugin: Plugin, fileName: string) { const s3Key = getPluginS3Dir(plugin.name) return `${s3Key}/${fileName}` } -export const getPluginS3Dir = (pluginName: string) => { +export function getPluginS3Dir(pluginName: string) { let s3Key = `${pluginName}` if (env.MULTI_TENANCY) { const tenantId = context.getTenantId() diff --git a/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts b/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts index aaa07ec9d3..cbbbee6255 100644 --- a/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts +++ b/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts @@ -1,5 +1,4 @@ import * as app from "../app" -import { getAppFileUrl } from "../app" import { testEnv } from "../../../../tests/extra" describe("app", () => { @@ -7,6 +6,15 @@ describe("app", () => { testEnv.nodeJest() }) + function baseCheck(url: string, tenantId?: string) { + expect(url).toContain("/api/assets/client") + if (tenantId) { + expect(url).toContain(`tenantId=${tenantId}`) + } + expect(url).toContain("appId=app_123") + expect(url).toContain("version=2.0.0") + } + describe("clientLibraryUrl", () => { function getClientUrl() { return app.clientLibraryUrl("app_123/budibase-client.js", "2.0.0") @@ -20,31 +28,19 @@ describe("app", () => { it("gets url in dev", () => { testEnv.nodeDev() const url = getClientUrl() - expect(url).toBe("/api/assets/client") - }) - - it("gets url with embedded minio", () => { - testEnv.withMinio() - const url = getClientUrl() - expect(url).toBe( - "/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" - ) + baseCheck(url) }) it("gets url with custom S3", () => { testEnv.withS3() const url = getClientUrl() - expect(url).toBe( - "http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" - ) + baseCheck(url) }) it("gets url with cloudfront + s3", () => { testEnv.withCloudfront() const url = getClientUrl() - expect(url).toBe( - "http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0" - ) + baseCheck(url) }) }) @@ -57,7 +53,7 @@ describe("app", () => { testEnv.nodeDev() await testEnv.withTenant(tenantId => { const url = getClientUrl() - expect(url).toBe("/api/assets/client") + baseCheck(url, tenantId) }) }) @@ -65,9 +61,7 @@ describe("app", () => { await testEnv.withTenant(tenantId => { testEnv.withMinio() const url = getClientUrl() - expect(url).toBe( - "/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" - ) + baseCheck(url, tenantId) }) }) @@ -75,9 +69,7 @@ describe("app", () => { await testEnv.withTenant(tenantId => { testEnv.withS3() const url = getClientUrl() - expect(url).toBe( - "http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" - ) + baseCheck(url, tenantId) }) }) @@ -85,9 +77,7 @@ describe("app", () => { await testEnv.withTenant(tenantId => { testEnv.withCloudfront() const url = getClientUrl() - expect(url).toBe( - "http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0" - ) + baseCheck(url, tenantId) }) }) }) diff --git a/packages/backend-core/src/objectStore/objectStore.ts b/packages/backend-core/src/objectStore/objectStore.ts index c36a09915e..76d2dd6689 100644 --- a/packages/backend-core/src/objectStore/objectStore.ts +++ b/packages/backend-core/src/objectStore/objectStore.ts @@ -1,6 +1,6 @@ const sanitize = require("sanitize-s3-objectkey") import AWS from "aws-sdk" -import stream from "stream" +import stream, { Readable } from "stream" import fetch from "node-fetch" import tar from "tar-fs" import zlib from "zlib" @@ -66,10 +66,10 @@ export function sanitizeBucket(input: string) { * @return an S3 object store object, check S3 Nodejs SDK for usage. * @constructor */ -export const ObjectStore = ( +export function ObjectStore( bucket: string, opts: { presigning: boolean } = { presigning: false } -) => { +) { const config: any = { s3ForcePathStyle: true, signatureVersion: "v4", @@ -104,7 +104,7 @@ export const ObjectStore = ( * Given an object store and a bucket name this will make sure the bucket exists, * if it does not exist then it will create it. */ -export const makeSureBucketExists = async (client: any, bucketName: string) => { +export async function makeSureBucketExists(client: any, bucketName: string) { bucketName = sanitizeBucket(bucketName) try { await client @@ -139,13 +139,13 @@ export const makeSureBucketExists = async (client: any, bucketName: string) => { * Uploads the contents of a file given the required parameters, useful when * temp files in use (for example file uploaded as an attachment). */ -export const upload = async ({ +export async function upload({ bucket: bucketName, filename, path, type, metadata, -}: UploadParams) => { +}: UploadParams) { const extension = filename.split(".").pop() const fileBytes = fs.readFileSync(path) @@ -180,12 +180,12 @@ export const upload = async ({ * Similar to the upload function but can be used to send a file stream * through to the object store. */ -export const streamUpload = async ( +export async function streamUpload( bucketName: string, filename: string, stream: any, extra = {} -) => { +) { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) @@ -215,7 +215,7 @@ export const streamUpload = async ( * retrieves the contents of a file from the object store, if it is a known content type it * will be converted, otherwise it will be returned as a buffer stream. */ -export const retrieve = async (bucketName: string, filepath: string) => { +export async function retrieve(bucketName: string, filepath: string) { const objectStore = ObjectStore(bucketName) const params = { Bucket: sanitizeBucket(bucketName), @@ -230,7 +230,7 @@ export const retrieve = async (bucketName: string, filepath: string) => { } } -export const listAllObjects = async (bucketName: string, path: string) => { +export async function listAllObjects(bucketName: string, path: string) { const objectStore = ObjectStore(bucketName) const list = (params: ListParams = {}) => { return objectStore @@ -261,11 +261,11 @@ export const listAllObjects = async (bucketName: string, path: string) => { /** * Generate a presigned url with a default TTL of 1 hour */ -export const getPresignedUrl = ( +export function getPresignedUrl( bucketName: string, key: string, durationSeconds: number = 3600 -) => { +) { const objectStore = ObjectStore(bucketName, { presigning: true }) const params = { Bucket: sanitizeBucket(bucketName), @@ -291,7 +291,7 @@ export const getPresignedUrl = ( /** * Same as retrieval function but puts to a temporary file. */ -export const retrieveToTmp = async (bucketName: string, filepath: string) => { +export async function retrieveToTmp(bucketName: string, filepath: string) { bucketName = sanitizeBucket(bucketName) filepath = sanitizeKey(filepath) const data = await retrieve(bucketName, filepath) @@ -300,7 +300,7 @@ export const retrieveToTmp = async (bucketName: string, filepath: string) => { return outputPath } -export const retrieveDirectory = async (bucketName: string, path: string) => { +export async function retrieveDirectory(bucketName: string, path: string) { let writePath = join(budibaseTempDir(), v4()) fs.mkdirSync(writePath) const objects = await listAllObjects(bucketName, path) @@ -324,7 +324,7 @@ export const retrieveDirectory = async (bucketName: string, path: string) => { /** * Delete a single file. */ -export const deleteFile = async (bucketName: string, filepath: string) => { +export async function deleteFile(bucketName: string, filepath: string) { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) const params = { @@ -334,7 +334,7 @@ export const deleteFile = async (bucketName: string, filepath: string) => { return objectStore.deleteObject(params).promise() } -export const deleteFiles = async (bucketName: string, filepaths: string[]) => { +export async function deleteFiles(bucketName: string, filepaths: string[]) { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) const params = { @@ -349,10 +349,10 @@ export const deleteFiles = async (bucketName: string, filepaths: string[]) => { /** * Delete a path, including everything within. */ -export const deleteFolder = async ( +export async function deleteFolder( bucketName: string, folder: string -): Promise => { +): Promise { bucketName = sanitizeBucket(bucketName) folder = sanitizeKey(folder) const client = ObjectStore(bucketName) @@ -383,11 +383,11 @@ export const deleteFolder = async ( } } -export const uploadDirectory = async ( +export async function uploadDirectory( bucketName: string, localPath: string, bucketPath: string -) => { +) { bucketName = sanitizeBucket(bucketName) let uploads = [] const files = fs.readdirSync(localPath, { withFileTypes: true }) @@ -404,11 +404,11 @@ export const uploadDirectory = async ( return files } -export const downloadTarballDirect = async ( +export async function downloadTarballDirect( url: string, path: string, headers = {} -) => { +) { path = sanitizeKey(path) const response = await fetch(url, { headers }) if (!response.ok) { @@ -418,11 +418,11 @@ export const downloadTarballDirect = async ( await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path)) } -export const downloadTarball = async ( +export async function downloadTarball( url: string, bucketName: string, path: string -) => { +) { bucketName = sanitizeBucket(bucketName) path = sanitizeKey(path) const response = await fetch(url) @@ -438,3 +438,17 @@ export const downloadTarball = async ( // return the temporary path incase there is a use for it return tmpPath } + +export async function getReadStream( + bucketName: string, + path: string +): Promise { + bucketName = sanitizeBucket(bucketName) + path = sanitizeKey(path) + const client = ObjectStore(bucketName) + const params = { + Bucket: bucketName, + Key: path, + } + return client.getObject(params).createReadStream() +} diff --git a/packages/backend-core/src/users/db.ts b/packages/backend-core/src/users/db.ts index 8bb6300d4e..a2539e836e 100644 --- a/packages/backend-core/src/users/db.ts +++ b/packages/backend-core/src/users/db.ts @@ -25,17 +25,12 @@ import { import { getAccountHolderFromUserIds, isAdmin, - isCreator, validateUniqueUser, } from "./utils" import { searchExistingEmails } from "./lookup" import { hash } from "../utils" -type QuotaUpdateFn = ( - change: number, - creatorsChange: number, - cb?: () => Promise -) => Promise +type QuotaUpdateFn = (change: number, cb?: () => Promise) => Promise type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise type FeatureFn = () => Promise type GroupGetFn = (ids: string[]) => Promise @@ -164,14 +159,14 @@ export class UserDB { } } - static async getUsersByAppAccess(appId?: string) { - const opts: any = { + static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) { + const params: any = { include_docs: true, - limit: 50, + limit: opts.limit || 50, } let response: User[] = await usersCore.searchGlobalUsersByAppAccess( - appId, - opts + opts.appId, + params ) return response } @@ -250,8 +245,7 @@ export class UserDB { } const change = dbUser ? 0 : 1 // no change if there is existing user - const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0 - return UserDB.quotas.addUsers(change, creatorsChange, async () => { + return UserDB.quotas.addUsers(change, async () => { await validateUniqueUser(email, tenantId) let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser) @@ -313,7 +307,6 @@ export class UserDB { let usersToSave: any[] = [] let newUsers: any[] = [] - let newCreators: any[] = [] const emails = newUsersRequested.map((user: User) => user.email) const existingEmails = await searchExistingEmails(emails) @@ -334,66 +327,59 @@ export class UserDB { } newUser.userGroups = groups newUsers.push(newUser) - if (isCreator(newUser)) { - newCreators.push(newUser) - } } const account = await accountSdk.getAccountByTenantId(tenantId) - return UserDB.quotas.addUsers( - newUsers.length, - newCreators.length, - async () => { - // create the promises array that will be called by bulkDocs - newUsers.forEach((user: any) => { - usersToSave.push( - UserDB.buildUser( - user, - { - hashPassword: true, - requirePassword: user.requirePassword, - }, - tenantId, - undefined, // no dbUser - account - ) + return UserDB.quotas.addUsers(newUsers.length, async () => { + // create the promises array that will be called by bulkDocs + newUsers.forEach((user: any) => { + usersToSave.push( + UserDB.buildUser( + user, + { + hashPassword: true, + requirePassword: user.requirePassword, + }, + tenantId, + undefined, // no dbUser + account ) - }) + ) + }) - const usersToBulkSave = await Promise.all(usersToSave) - await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) + const usersToBulkSave = await Promise.all(usersToSave) + await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) - // Post-processing of bulk added users, e.g. events and cache operations - for (const user of usersToBulkSave) { - // TODO: Refactor to bulk insert users into the info db - // instead of relying on looping tenant creation - await platform.users.addUser(tenantId, user._id, user.email) - await eventHelpers.handleSaveEvents(user, undefined) - } - - const saved = usersToBulkSave.map(user => { - return { - _id: user._id, - email: user.email, - } - }) - - // now update the groups - if (Array.isArray(saved) && groups) { - const groupPromises = [] - const createdUserIds = saved.map(user => user._id) - for (let groupId of groups) { - groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds)) - } - await Promise.all(groupPromises) - } - - return { - successful: saved, - unsuccessful, - } + // Post-processing of bulk added users, e.g. events and cache operations + for (const user of usersToBulkSave) { + // TODO: Refactor to bulk insert users into the info db + // instead of relying on looping tenant creation + await platform.users.addUser(tenantId, user._id, user.email) + await eventHelpers.handleSaveEvents(user, undefined) } - ) + + const saved = usersToBulkSave.map(user => { + return { + _id: user._id, + email: user.email, + } + }) + + // now update the groups + if (Array.isArray(saved) && groups) { + const groupPromises = [] + const createdUserIds = saved.map(user => user._id) + for (let groupId of groups) { + groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds)) + } + await Promise.all(groupPromises) + } + + return { + successful: saved, + unsuccessful, + } + }) } static async bulkDelete(userIds: string[]): Promise { @@ -433,12 +419,11 @@ export class UserDB { _deleted: true, })) const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete) - const creatorsToDelete = usersToDelete.filter(isCreator) + await UserDB.quotas.removeUsers(toDelete.length) for (let user of usersToDelete) { await bulkDeleteProcessing(user) } - await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length) // Build Response // index users by id @@ -487,8 +472,7 @@ export class UserDB { await db.remove(userId, dbUser._rev) - const creatorsToDelete = isCreator(dbUser) ? 1 : 0 - await UserDB.quotas.removeUsers(1, creatorsToDelete) + await UserDB.quotas.removeUsers(1) await eventHelpers.handleDeleteEvents(dbUser) await cache.user.invalidateUser(userId) await sessions.invalidateSessions(userId, { reason: "deletion" }) diff --git a/packages/backend-core/src/users/users.ts b/packages/backend-core/src/users/users.ts index a64997224e..6237c23972 100644 --- a/packages/backend-core/src/users/users.ts +++ b/packages/backend-core/src/users/users.ts @@ -14,11 +14,12 @@ import { } from "../db" import { BulkDocsResponse, + ContextUser, SearchQuery, SearchQueryOperators, SearchUsersRequest, User, - ContextUser, + DatabaseQueryOpts, } from "@budibase/types" import { getGlobalDB } from "../context" import * as context from "../context" @@ -241,12 +242,14 @@ export const paginatedUsers = async ({ bookmark, query, appId, + limit, }: SearchUsersRequest = {}) => { const db = getGlobalDB() + const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1 // get one extra document, to have the next page - const opts: any = { + const opts: DatabaseQueryOpts = { include_docs: true, - limit: PAGE_LIMIT + 1, + limit: pageLimit, } // add a startkey if the page was specified (anchor) if (bookmark) { @@ -269,7 +272,7 @@ export const paginatedUsers = async ({ const response = await db.allDocs(getGlobalUserParams(null, opts)) userList = response.rows.map((row: any) => row.doc) } - return pagination(userList, PAGE_LIMIT, { + return pagination(userList, pageLimit, { paginate: true, property, getKey, diff --git a/packages/backend-core/tests/core/users/users.spec.js b/packages/backend-core/tests/core/users/users.spec.js deleted file mode 100644 index ae7109344a..0000000000 --- a/packages/backend-core/tests/core/users/users.spec.js +++ /dev/null @@ -1,54 +0,0 @@ -const _ = require('lodash/fp') -const {structures} = require("../../../tests") - -jest.mock("../../../src/context") -jest.mock("../../../src/db") - -const context = require("../../../src/context") -const db = require("../../../src/db") - -const {getCreatorCount} = require('../../../src/users/users') - -describe("Users", () => { - - let getGlobalDBMock - let getGlobalUserParamsMock - let paginationMock - - beforeEach(() => { - jest.resetAllMocks() - - getGlobalDBMock = jest.spyOn(context, "getGlobalDB") - getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams") - paginationMock = jest.spyOn(db, "pagination") - }) - - it("Retrieves the number of creators", async () => { - const getUsers = (offset, limit, creators = false) => { - const range = _.range(offset, limit) - const opts = creators ? {builder: {global: true}} : undefined - return range.map(() => structures.users.user(opts)) - } - const page1Data = getUsers(0, 8) - const page2Data = getUsers(8, 12, true) - getGlobalDBMock.mockImplementation(() => ({ - name : "fake-db", - allDocs: () => ({ - rows: [...page1Data, ...page2Data] - }) - })) - paginationMock.mockImplementationOnce(() => ({ - data: page1Data, - hasNextPage: true, - nextPage: "1" - })) - paginationMock.mockImplementation(() => ({ - data: page2Data, - hasNextPage: false, - nextPage: undefined - })) - const creatorsCount = await getCreatorCount() - expect(creatorsCount).toBe(4) - expect(paginationMock).toHaveBeenCalledTimes(2) - }) -}) diff --git a/packages/backend-core/tests/core/utilities/structures/licenses.ts b/packages/backend-core/tests/core/utilities/structures/licenses.ts index bb452f9ad5..0e34f2e9bb 100644 --- a/packages/backend-core/tests/core/utilities/structures/licenses.ts +++ b/packages/backend-core/tests/core/utilities/structures/licenses.ts @@ -123,10 +123,6 @@ export function customer(): Customer { export function subscription(): Subscription { return { amount: 10000, - amounts: { - user: 10000, - creator: 0, - }, cancelAt: undefined, currency: "usd", currentPeriodEnd: 0, @@ -135,10 +131,6 @@ export function subscription(): Subscription { duration: PriceDuration.MONTHLY, pastDueAt: undefined, quantity: 0, - quantities: { - user: 0, - creator: 0, - }, status: "active", } } diff --git a/packages/bbui/src/Table/Table.svelte b/packages/bbui/src/Table/Table.svelte index 529d1144ee..2610d6106c 100644 --- a/packages/bbui/src/Table/Table.svelte +++ b/packages/bbui/src/Table/Table.svelte @@ -106,6 +106,13 @@ name: fieldName, } } + + // Delete numeric only widths as these are grid widths and should be + // ignored + const width = fixedSchema[fieldName].width + if (width != null && `${width}`.trim().match(/^[0-9]+$/)) { + delete fixedSchema[fieldName].width + } }) return fixedSchema } diff --git a/packages/builder/src/components/design/settings/controls/ColumnEditor/CellDrawer.svelte b/packages/builder/src/components/design/settings/controls/ColumnEditor/CellDrawer.svelte index 8e3079101a..9e53f7f1cf 100644 --- a/packages/builder/src/components/design/settings/controls/ColumnEditor/CellDrawer.svelte +++ b/packages/builder/src/components/design/settings/controls/ColumnEditor/CellDrawer.svelte @@ -16,7 +16,11 @@
- +