Merge remote-tracking branch 'origin/master' into fix/view1-sortable-column-config

This commit is contained in:
Dean 2023-10-25 09:15:37 +01:00
commit 66fda3db09
51 changed files with 600 additions and 352 deletions

View File

@ -54,6 +54,7 @@ jobs:
push: true push: true
pull: true pull: true
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
build-args: BUDIBASE_VERSION=0.0.0+test
tags: budibase/budibase-test:test tags: budibase/budibase-test:test
file: ./hosting/single/Dockerfile.v2 file: ./hosting/single/Dockerfile.v2
cache-from: type=registry,ref=budibase/budibase-test:test cache-from: type=registry,ref=budibase/budibase-test:test
@ -64,6 +65,8 @@ jobs:
context: . context: .
push: true push: true
platforms: linux/amd64 platforms: linux/amd64
build-args: TARGETBUILD=aas build-args: |
TARGETBUILD=aas
BUDIBASE_VERSION=0.0.0+test
tags: budibase/budibase-test:aas tags: budibase/budibase-test:aas
file: ./hosting/single/Dockerfile.v2 file: ./hosting/single/Dockerfile.v2

View File

@ -7,6 +7,8 @@ services:
build: build:
context: .. context: ..
dockerfile: packages/server/Dockerfile.v2 dockerfile: packages/server/Dockerfile.v2
args:
- BUDIBASE_VERSION=0.0.0+dev-docker
container_name: build-bbapps container_name: build-bbapps
environment: environment:
SELF_HOSTED: 1 SELF_HOSTED: 1
@ -30,13 +32,13 @@ services:
depends_on: depends_on:
- worker-service - worker-service
- redis-service - redis-service
# volumes:
# - /some/path/to/plugins:/plugins
worker-service: worker-service:
build: build:
context: .. context: ..
dockerfile: packages/worker/Dockerfile.v2 dockerfile: packages/worker/Dockerfile.v2
args:
- BUDIBASE_VERSION=0.0.0+dev-docker
container_name: build-bbworker container_name: build-bbworker
environment: environment:
SELF_HOSTED: 1 SELF_HOSTED: 1

View File

@ -118,6 +118,10 @@ EXPOSE 443
EXPOSE 2222 EXPOSE 2222
VOLUME /data VOLUME /data
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh" HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"

View File

@ -1,5 +1,5 @@
{ {
"version": "2.11.40", "version": "2.11.43",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -119,8 +119,8 @@ export class Writethrough {
this.writeRateMs = writeRateMs this.writeRateMs = writeRateMs
} }
async put(doc: any, writeRateMs: number = this.writeRateMs) { async put(doc: any) {
return put(this.db, doc, writeRateMs) return put(this.db, doc, this.writeRateMs)
} }
async get(id: string) { async get(id: string) {

View File

@ -75,12 +75,12 @@ function getPackageJsonFields(): {
const content = readFileSync(packageJsonFile!, "utf-8") const content = readFileSync(packageJsonFile!, "utf-8")
const parsedContent = JSON.parse(content) const parsedContent = JSON.parse(content)
return { return {
VERSION: parsedContent.version, VERSION: process.env.BUDIBASE_VERSION || parsedContent.version,
SERVICE_NAME: parsedContent.name, SERVICE_NAME: parsedContent.name,
} }
} catch { } catch {
// throwing an error here is confusing/causes backend-core to be hard to import // throwing an error here is confusing/causes backend-core to be hard to import
return { VERSION: "", SERVICE_NAME: "" } return { VERSION: process.env.BUDIBASE_VERSION || "", SERVICE_NAME: "" }
} }
} }

View File

@ -1,20 +1,20 @@
import env from "../../environment" import env from "../../environment"
import * as objectStore from "../objectStore" import * as objectStore from "../objectStore"
import * as cloudfront from "../cloudfront" import * as cloudfront from "../cloudfront"
import qs from "querystring"
import { DEFAULT_TENANT_ID, getTenantId } from "../../context"
export function clientLibraryPath(appId: string) {
return `${objectStore.sanitizeKey(appId)}/budibase-client.js`
}
/** /**
* In production the client library is stored in the object store, however in development * Previously we used to serve the client library directly from Cloudfront, however
* we use the symlinked version produced by lerna, located in node modules. We link to this * due to issues with the domain we were unable to continue doing this - keeping
* via a specific endpoint (under /api/assets/client). * incase we are able to switch back to CDN path again in future.
* @param appId In production we need the appId to look up the correct bucket, as the
* version of the client lib may differ between apps.
* @param version The version to retrieve.
* @return The URL to be inserted into appPackage response or server rendered
* app index file.
*/ */
export const clientLibraryUrl = (appId: string, version: string) => { export function clientLibraryCDNUrl(appId: string, version: string) {
if (env.isProd()) { let file = clientLibraryPath(appId)
let file = `${objectStore.sanitizeKey(appId)}/budibase-client.js`
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
// append app version to bust the cache // append app version to bust the cache
if (version) { if (version) {
@ -26,12 +26,25 @@ export const clientLibraryUrl = (appId: string, version: string) => {
} else { } else {
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file) return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
} }
} else {
return `/api/assets/client`
}
} }
export const getAppFileUrl = (s3Key: string) => { export function clientLibraryUrl(appId: string, version: string) {
let tenantId, qsParams: { appId: string; version: string; tenantId?: string }
try {
tenantId = getTenantId()
} finally {
qsParams = {
appId,
version,
}
}
if (tenantId && tenantId !== DEFAULT_TENANT_ID) {
qsParams.tenantId = tenantId
}
return `/api/assets/client?${qs.encode(qsParams)}`
}
export function getAppFileUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key) return cloudfront.getPresignedUrl(s3Key)
} else { } else {

View File

@ -6,7 +6,7 @@ import { Plugin } from "@budibase/types"
// URLS // URLS
export const enrichPluginURLs = (plugins: Plugin[]) => { export function enrichPluginURLs(plugins: Plugin[]) {
if (!plugins || !plugins.length) { if (!plugins || !plugins.length) {
return [] return []
} }
@ -17,12 +17,12 @@ export const enrichPluginURLs = (plugins: Plugin[]) => {
}) })
} }
const getPluginJSUrl = (plugin: Plugin) => { function getPluginJSUrl(plugin: Plugin) {
const s3Key = getPluginJSKey(plugin) const s3Key = getPluginJSKey(plugin)
return getPluginUrl(s3Key) return getPluginUrl(s3Key)
} }
const getPluginIconUrl = (plugin: Plugin): string | undefined => { function getPluginIconUrl(plugin: Plugin): string | undefined {
const s3Key = getPluginIconKey(plugin) const s3Key = getPluginIconKey(plugin)
if (!s3Key) { if (!s3Key) {
return return
@ -30,7 +30,7 @@ const getPluginIconUrl = (plugin: Plugin): string | undefined => {
return getPluginUrl(s3Key) return getPluginUrl(s3Key)
} }
const getPluginUrl = (s3Key: string) => { function getPluginUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key) return cloudfront.getPresignedUrl(s3Key)
} else { } else {
@ -40,11 +40,11 @@ const getPluginUrl = (s3Key: string) => {
// S3 KEYS // S3 KEYS
export const getPluginJSKey = (plugin: Plugin) => { export function getPluginJSKey(plugin: Plugin) {
return getPluginS3Key(plugin, "plugin.min.js") return getPluginS3Key(plugin, "plugin.min.js")
} }
export const getPluginIconKey = (plugin: Plugin) => { export function getPluginIconKey(plugin: Plugin) {
// stored iconUrl is deprecated - hardcode to icon.svg in this case // stored iconUrl is deprecated - hardcode to icon.svg in this case
const iconFileName = plugin.iconUrl ? "icon.svg" : plugin.iconFileName const iconFileName = plugin.iconUrl ? "icon.svg" : plugin.iconFileName
if (!iconFileName) { if (!iconFileName) {
@ -53,12 +53,12 @@ export const getPluginIconKey = (plugin: Plugin) => {
return getPluginS3Key(plugin, iconFileName) return getPluginS3Key(plugin, iconFileName)
} }
const getPluginS3Key = (plugin: Plugin, fileName: string) => { function getPluginS3Key(plugin: Plugin, fileName: string) {
const s3Key = getPluginS3Dir(plugin.name) const s3Key = getPluginS3Dir(plugin.name)
return `${s3Key}/${fileName}` return `${s3Key}/${fileName}`
} }
export const getPluginS3Dir = (pluginName: string) => { export function getPluginS3Dir(pluginName: string) {
let s3Key = `${pluginName}` let s3Key = `${pluginName}`
if (env.MULTI_TENANCY) { if (env.MULTI_TENANCY) {
const tenantId = context.getTenantId() const tenantId = context.getTenantId()

View File

@ -1,5 +1,4 @@
import * as app from "../app" import * as app from "../app"
import { getAppFileUrl } from "../app"
import { testEnv } from "../../../../tests/extra" import { testEnv } from "../../../../tests/extra"
describe("app", () => { describe("app", () => {
@ -7,6 +6,15 @@ describe("app", () => {
testEnv.nodeJest() testEnv.nodeJest()
}) })
function baseCheck(url: string, tenantId?: string) {
expect(url).toContain("/api/assets/client")
if (tenantId) {
expect(url).toContain(`tenantId=${tenantId}`)
}
expect(url).toContain("appId=app_123")
expect(url).toContain("version=2.0.0")
}
describe("clientLibraryUrl", () => { describe("clientLibraryUrl", () => {
function getClientUrl() { function getClientUrl() {
return app.clientLibraryUrl("app_123/budibase-client.js", "2.0.0") return app.clientLibraryUrl("app_123/budibase-client.js", "2.0.0")
@ -20,31 +28,19 @@ describe("app", () => {
it("gets url in dev", () => { it("gets url in dev", () => {
testEnv.nodeDev() testEnv.nodeDev()
const url = getClientUrl() const url = getClientUrl()
expect(url).toBe("/api/assets/client") baseCheck(url)
})
it("gets url with embedded minio", () => {
testEnv.withMinio()
const url = getClientUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
}) })
it("gets url with custom S3", () => { it("gets url with custom S3", () => {
testEnv.withS3() testEnv.withS3()
const url = getClientUrl() const url = getClientUrl()
expect(url).toBe( baseCheck(url)
"http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
}) })
it("gets url with cloudfront + s3", () => { it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront() testEnv.withCloudfront()
const url = getClientUrl() const url = getClientUrl()
expect(url).toBe( baseCheck(url)
"http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0"
)
}) })
}) })
@ -57,7 +53,7 @@ describe("app", () => {
testEnv.nodeDev() testEnv.nodeDev()
await testEnv.withTenant(tenantId => { await testEnv.withTenant(tenantId => {
const url = getClientUrl() const url = getClientUrl()
expect(url).toBe("/api/assets/client") baseCheck(url, tenantId)
}) })
}) })
@ -65,9 +61,7 @@ describe("app", () => {
await testEnv.withTenant(tenantId => { await testEnv.withTenant(tenantId => {
testEnv.withMinio() testEnv.withMinio()
const url = getClientUrl() const url = getClientUrl()
expect(url).toBe( baseCheck(url, tenantId)
"/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
}) })
}) })
@ -75,9 +69,7 @@ describe("app", () => {
await testEnv.withTenant(tenantId => { await testEnv.withTenant(tenantId => {
testEnv.withS3() testEnv.withS3()
const url = getClientUrl() const url = getClientUrl()
expect(url).toBe( baseCheck(url, tenantId)
"http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
}) })
}) })
@ -85,9 +77,7 @@ describe("app", () => {
await testEnv.withTenant(tenantId => { await testEnv.withTenant(tenantId => {
testEnv.withCloudfront() testEnv.withCloudfront()
const url = getClientUrl() const url = getClientUrl()
expect(url).toBe( baseCheck(url, tenantId)
"http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0"
)
}) })
}) })
}) })

View File

@ -1,6 +1,6 @@
const sanitize = require("sanitize-s3-objectkey") const sanitize = require("sanitize-s3-objectkey")
import AWS from "aws-sdk" import AWS from "aws-sdk"
import stream from "stream" import stream, { Readable } from "stream"
import fetch from "node-fetch" import fetch from "node-fetch"
import tar from "tar-fs" import tar from "tar-fs"
import zlib from "zlib" import zlib from "zlib"
@ -66,10 +66,10 @@ export function sanitizeBucket(input: string) {
* @return an S3 object store object, check S3 Nodejs SDK for usage. * @return an S3 object store object, check S3 Nodejs SDK for usage.
* @constructor * @constructor
*/ */
export const ObjectStore = ( export function ObjectStore(
bucket: string, bucket: string,
opts: { presigning: boolean } = { presigning: false } opts: { presigning: boolean } = { presigning: false }
) => { ) {
const config: any = { const config: any = {
s3ForcePathStyle: true, s3ForcePathStyle: true,
signatureVersion: "v4", signatureVersion: "v4",
@ -104,7 +104,7 @@ export const ObjectStore = (
* Given an object store and a bucket name this will make sure the bucket exists, * Given an object store and a bucket name this will make sure the bucket exists,
* if it does not exist then it will create it. * if it does not exist then it will create it.
*/ */
export const makeSureBucketExists = async (client: any, bucketName: string) => { export async function makeSureBucketExists(client: any, bucketName: string) {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
try { try {
await client await client
@ -139,13 +139,13 @@ export const makeSureBucketExists = async (client: any, bucketName: string) => {
* Uploads the contents of a file given the required parameters, useful when * Uploads the contents of a file given the required parameters, useful when
* temp files in use (for example file uploaded as an attachment). * temp files in use (for example file uploaded as an attachment).
*/ */
export const upload = async ({ export async function upload({
bucket: bucketName, bucket: bucketName,
filename, filename,
path, path,
type, type,
metadata, metadata,
}: UploadParams) => { }: UploadParams) {
const extension = filename.split(".").pop() const extension = filename.split(".").pop()
const fileBytes = fs.readFileSync(path) const fileBytes = fs.readFileSync(path)
@ -180,12 +180,12 @@ export const upload = async ({
* Similar to the upload function but can be used to send a file stream * Similar to the upload function but can be used to send a file stream
* through to the object store. * through to the object store.
*/ */
export const streamUpload = async ( export async function streamUpload(
bucketName: string, bucketName: string,
filename: string, filename: string,
stream: any, stream: any,
extra = {} extra = {}
) => { ) {
const objectStore = ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName) await makeSureBucketExists(objectStore, bucketName)
@ -215,7 +215,7 @@ export const streamUpload = async (
* retrieves the contents of a file from the object store, if it is a known content type it * retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream. * will be converted, otherwise it will be returned as a buffer stream.
*/ */
export const retrieve = async (bucketName: string, filepath: string) => { export async function retrieve(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
const params = { const params = {
Bucket: sanitizeBucket(bucketName), Bucket: sanitizeBucket(bucketName),
@ -230,7 +230,7 @@ export const retrieve = async (bucketName: string, filepath: string) => {
} }
} }
export const listAllObjects = async (bucketName: string, path: string) => { export async function listAllObjects(bucketName: string, path: string) {
const objectStore = ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
const list = (params: ListParams = {}) => { const list = (params: ListParams = {}) => {
return objectStore return objectStore
@ -261,11 +261,11 @@ export const listAllObjects = async (bucketName: string, path: string) => {
/** /**
* Generate a presigned url with a default TTL of 1 hour * Generate a presigned url with a default TTL of 1 hour
*/ */
export const getPresignedUrl = ( export function getPresignedUrl(
bucketName: string, bucketName: string,
key: string, key: string,
durationSeconds: number = 3600 durationSeconds: number = 3600
) => { ) {
const objectStore = ObjectStore(bucketName, { presigning: true }) const objectStore = ObjectStore(bucketName, { presigning: true })
const params = { const params = {
Bucket: sanitizeBucket(bucketName), Bucket: sanitizeBucket(bucketName),
@ -291,7 +291,7 @@ export const getPresignedUrl = (
/** /**
* Same as retrieval function but puts to a temporary file. * Same as retrieval function but puts to a temporary file.
*/ */
export const retrieveToTmp = async (bucketName: string, filepath: string) => { export async function retrieveToTmp(bucketName: string, filepath: string) {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
filepath = sanitizeKey(filepath) filepath = sanitizeKey(filepath)
const data = await retrieve(bucketName, filepath) const data = await retrieve(bucketName, filepath)
@ -300,7 +300,7 @@ export const retrieveToTmp = async (bucketName: string, filepath: string) => {
return outputPath return outputPath
} }
export const retrieveDirectory = async (bucketName: string, path: string) => { export async function retrieveDirectory(bucketName: string, path: string) {
let writePath = join(budibaseTempDir(), v4()) let writePath = join(budibaseTempDir(), v4())
fs.mkdirSync(writePath) fs.mkdirSync(writePath)
const objects = await listAllObjects(bucketName, path) const objects = await listAllObjects(bucketName, path)
@ -324,7 +324,7 @@ export const retrieveDirectory = async (bucketName: string, path: string) => {
/** /**
* Delete a single file. * Delete a single file.
*/ */
export const deleteFile = async (bucketName: string, filepath: string) => { export async function deleteFile(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName) await makeSureBucketExists(objectStore, bucketName)
const params = { const params = {
@ -334,7 +334,7 @@ export const deleteFile = async (bucketName: string, filepath: string) => {
return objectStore.deleteObject(params).promise() return objectStore.deleteObject(params).promise()
} }
export const deleteFiles = async (bucketName: string, filepaths: string[]) => { export async function deleteFiles(bucketName: string, filepaths: string[]) {
const objectStore = ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName) await makeSureBucketExists(objectStore, bucketName)
const params = { const params = {
@ -349,10 +349,10 @@ export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
/** /**
* Delete a path, including everything within. * Delete a path, including everything within.
*/ */
export const deleteFolder = async ( export async function deleteFolder(
bucketName: string, bucketName: string,
folder: string folder: string
): Promise<any> => { ): Promise<any> {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder) folder = sanitizeKey(folder)
const client = ObjectStore(bucketName) const client = ObjectStore(bucketName)
@ -383,11 +383,11 @@ export const deleteFolder = async (
} }
} }
export const uploadDirectory = async ( export async function uploadDirectory(
bucketName: string, bucketName: string,
localPath: string, localPath: string,
bucketPath: string bucketPath: string
) => { ) {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
let uploads = [] let uploads = []
const files = fs.readdirSync(localPath, { withFileTypes: true }) const files = fs.readdirSync(localPath, { withFileTypes: true })
@ -404,11 +404,11 @@ export const uploadDirectory = async (
return files return files
} }
export const downloadTarballDirect = async ( export async function downloadTarballDirect(
url: string, url: string,
path: string, path: string,
headers = {} headers = {}
) => { ) {
path = sanitizeKey(path) path = sanitizeKey(path)
const response = await fetch(url, { headers }) const response = await fetch(url, { headers })
if (!response.ok) { if (!response.ok) {
@ -418,11 +418,11 @@ export const downloadTarballDirect = async (
await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path)) await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path))
} }
export const downloadTarball = async ( export async function downloadTarball(
url: string, url: string,
bucketName: string, bucketName: string,
path: string path: string
) => { ) {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path) path = sanitizeKey(path)
const response = await fetch(url) const response = await fetch(url)
@ -438,3 +438,17 @@ export const downloadTarball = async (
// return the temporary path incase there is a use for it // return the temporary path incase there is a use for it
return tmpPath return tmpPath
} }
export async function getReadStream(
bucketName: string,
path: string
): Promise<Readable> {
bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path)
const client = ObjectStore(bucketName)
const params = {
Bucket: bucketName,
Key: path,
}
return client.getObject(params).createReadStream()
}

View File

@ -25,17 +25,12 @@ import {
import { import {
getAccountHolderFromUserIds, getAccountHolderFromUserIds,
isAdmin, isAdmin,
isCreator,
validateUniqueUser, validateUniqueUser,
} from "./utils" } from "./utils"
import { searchExistingEmails } from "./lookup" import { searchExistingEmails } from "./lookup"
import { hash } from "../utils" import { hash } from "../utils"
type QuotaUpdateFn = ( type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any>
change: number,
creatorsChange: number,
cb?: () => Promise<any>
) => Promise<any>
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any> type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
type FeatureFn = () => Promise<Boolean> type FeatureFn = () => Promise<Boolean>
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]> type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
@ -164,14 +159,14 @@ export class UserDB {
} }
} }
static async getUsersByAppAccess(appId?: string) { static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {
const opts: any = { const params: any = {
include_docs: true, include_docs: true,
limit: 50, limit: opts.limit || 50,
} }
let response: User[] = await usersCore.searchGlobalUsersByAppAccess( let response: User[] = await usersCore.searchGlobalUsersByAppAccess(
appId, opts.appId,
opts params
) )
return response return response
} }
@ -250,8 +245,7 @@ export class UserDB {
} }
const change = dbUser ? 0 : 1 // no change if there is existing user const change = dbUser ? 0 : 1 // no change if there is existing user
const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0 return UserDB.quotas.addUsers(change, async () => {
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
await validateUniqueUser(email, tenantId) await validateUniqueUser(email, tenantId)
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser) let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
@ -313,7 +307,6 @@ export class UserDB {
let usersToSave: any[] = [] let usersToSave: any[] = []
let newUsers: any[] = [] let newUsers: any[] = []
let newCreators: any[] = []
const emails = newUsersRequested.map((user: User) => user.email) const emails = newUsersRequested.map((user: User) => user.email)
const existingEmails = await searchExistingEmails(emails) const existingEmails = await searchExistingEmails(emails)
@ -334,16 +327,10 @@ export class UserDB {
} }
newUser.userGroups = groups newUser.userGroups = groups
newUsers.push(newUser) newUsers.push(newUser)
if (isCreator(newUser)) {
newCreators.push(newUser)
}
} }
const account = await accountSdk.getAccountByTenantId(tenantId) const account = await accountSdk.getAccountByTenantId(tenantId)
return UserDB.quotas.addUsers( return UserDB.quotas.addUsers(newUsers.length, async () => {
newUsers.length,
newCreators.length,
async () => {
// create the promises array that will be called by bulkDocs // create the promises array that will be called by bulkDocs
newUsers.forEach((user: any) => { newUsers.forEach((user: any) => {
usersToSave.push( usersToSave.push(
@ -392,8 +379,7 @@ export class UserDB {
successful: saved, successful: saved,
unsuccessful, unsuccessful,
} }
} })
)
} }
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> { static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
@ -433,12 +419,11 @@ export class UserDB {
_deleted: true, _deleted: true,
})) }))
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete) const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
const creatorsToDelete = usersToDelete.filter(isCreator)
await UserDB.quotas.removeUsers(toDelete.length)
for (let user of usersToDelete) { for (let user of usersToDelete) {
await bulkDeleteProcessing(user) await bulkDeleteProcessing(user)
} }
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
// Build Response // Build Response
// index users by id // index users by id
@ -487,8 +472,7 @@ export class UserDB {
await db.remove(userId, dbUser._rev) await db.remove(userId, dbUser._rev)
const creatorsToDelete = isCreator(dbUser) ? 1 : 0 await UserDB.quotas.removeUsers(1)
await UserDB.quotas.removeUsers(1, creatorsToDelete)
await eventHelpers.handleDeleteEvents(dbUser) await eventHelpers.handleDeleteEvents(dbUser)
await cache.user.invalidateUser(userId) await cache.user.invalidateUser(userId)
await sessions.invalidateSessions(userId, { reason: "deletion" }) await sessions.invalidateSessions(userId, { reason: "deletion" })

View File

@ -14,11 +14,12 @@ import {
} from "../db" } from "../db"
import { import {
BulkDocsResponse, BulkDocsResponse,
ContextUser,
SearchQuery, SearchQuery,
SearchQueryOperators, SearchQueryOperators,
SearchUsersRequest, SearchUsersRequest,
User, User,
ContextUser, DatabaseQueryOpts,
} from "@budibase/types" } from "@budibase/types"
import { getGlobalDB } from "../context" import { getGlobalDB } from "../context"
import * as context from "../context" import * as context from "../context"
@ -241,12 +242,14 @@ export const paginatedUsers = async ({
bookmark, bookmark,
query, query,
appId, appId,
limit,
}: SearchUsersRequest = {}) => { }: SearchUsersRequest = {}) => {
const db = getGlobalDB() const db = getGlobalDB()
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
// get one extra document, to have the next page // get one extra document, to have the next page
const opts: any = { const opts: DatabaseQueryOpts = {
include_docs: true, include_docs: true,
limit: PAGE_LIMIT + 1, limit: pageLimit,
} }
// add a startkey if the page was specified (anchor) // add a startkey if the page was specified (anchor)
if (bookmark) { if (bookmark) {
@ -269,7 +272,7 @@ export const paginatedUsers = async ({
const response = await db.allDocs(getGlobalUserParams(null, opts)) const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc) userList = response.rows.map((row: any) => row.doc)
} }
return pagination(userList, PAGE_LIMIT, { return pagination(userList, pageLimit, {
paginate: true, paginate: true,
property, property,
getKey, getKey,

View File

@ -1,54 +0,0 @@
const _ = require('lodash/fp')
const {structures} = require("../../../tests")
jest.mock("../../../src/context")
jest.mock("../../../src/db")
const context = require("../../../src/context")
const db = require("../../../src/db")
const {getCreatorCount} = require('../../../src/users/users')
describe("Users", () => {
let getGlobalDBMock
let getGlobalUserParamsMock
let paginationMock
beforeEach(() => {
jest.resetAllMocks()
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
paginationMock = jest.spyOn(db, "pagination")
})
it("Retrieves the number of creators", async () => {
const getUsers = (offset, limit, creators = false) => {
const range = _.range(offset, limit)
const opts = creators ? {builder: {global: true}} : undefined
return range.map(() => structures.users.user(opts))
}
const page1Data = getUsers(0, 8)
const page2Data = getUsers(8, 12, true)
getGlobalDBMock.mockImplementation(() => ({
name : "fake-db",
allDocs: () => ({
rows: [...page1Data, ...page2Data]
})
}))
paginationMock.mockImplementationOnce(() => ({
data: page1Data,
hasNextPage: true,
nextPage: "1"
}))
paginationMock.mockImplementation(() => ({
data: page2Data,
hasNextPage: false,
nextPage: undefined
}))
const creatorsCount = await getCreatorCount()
expect(creatorsCount).toBe(4)
expect(paginationMock).toHaveBeenCalledTimes(2)
})
})

View File

@ -123,10 +123,6 @@ export function customer(): Customer {
export function subscription(): Subscription { export function subscription(): Subscription {
return { return {
amount: 10000, amount: 10000,
amounts: {
user: 10000,
creator: 0,
},
cancelAt: undefined, cancelAt: undefined,
currency: "usd", currency: "usd",
currentPeriodEnd: 0, currentPeriodEnd: 0,
@ -135,10 +131,6 @@ export function subscription(): Subscription {
duration: PriceDuration.MONTHLY, duration: PriceDuration.MONTHLY,
pastDueAt: undefined, pastDueAt: undefined,
quantity: 0, quantity: 0,
quantities: {
user: 0,
creator: 0,
},
status: "active", status: "active",
} }
} }

View File

@ -106,6 +106,13 @@
name: fieldName, name: fieldName,
} }
} }
// Delete numeric only widths as these are grid widths and should be
// ignored
const width = fixedSchema[fieldName].width
if (width != null && `${width}`.trim().match(/^[0-9]+$/)) {
delete fixedSchema[fieldName].width
}
}) })
return fixedSchema return fixedSchema
} }

View File

@ -16,7 +16,11 @@
<DrawerContent> <DrawerContent>
<div class="container"> <div class="container">
<Layout noPadding gap="S"> <Layout noPadding gap="S">
<Input bind:value={column.width} label="Width" placeholder="Auto" /> <Input
bind:value={column.width}
label="Width (must include a unit like px or %)"
placeholder="Auto"
/>
<Select <Select
label="Alignment" label="Alignment"
bind:value={column.align} bind:value={column.align}

View File

@ -114,8 +114,9 @@
query: { query: {
appId: query || !filterByAppAccess ? null : prodAppId, appId: query || !filterByAppAccess ? null : prodAppId,
email: query, email: query,
paginated: query || !filterByAppAccess ? null : false,
}, },
limit: 50,
paginate: query || !filterByAppAccess ? null : false,
}) })
await usersFetch.refresh() await usersFetch.refresh()

View File

@ -5305,6 +5305,12 @@
"key": "title", "key": "title",
"nested": true "nested": true
}, },
{
"type": "text",
"label": "Description",
"key": "description",
"nested": true
},
{ {
"section": true, "section": true,
"dependsOn": { "dependsOn": {

View File

@ -81,6 +81,7 @@
sortOrder: $fetch.sortOrder, sortOrder: $fetch.sortOrder,
}, },
limit, limit,
primaryDisplay: $fetch.definition?.primaryDisplay,
} }
const createFetch = datasource => { const createFetch = datasource => {

View File

@ -12,6 +12,7 @@
export let fields export let fields
export let labelPosition export let labelPosition
export let title export let title
export let description
export let showDeleteButton export let showDeleteButton
export let showSaveButton export let showSaveButton
export let saveButtonLabel export let saveButtonLabel
@ -98,6 +99,7 @@
fields: fieldsOrDefault, fields: fieldsOrDefault,
labelPosition, labelPosition,
title, title,
description,
saveButtonLabel: saveLabel, saveButtonLabel: saveLabel,
deleteButtonLabel: deleteLabel, deleteButtonLabel: deleteLabel,
schema, schema,

View File

@ -11,6 +11,7 @@
export let fields export let fields
export let labelPosition export let labelPosition
export let title export let title
export let description
export let saveButtonLabel export let saveButtonLabel
export let deleteButtonLabel export let deleteButtonLabel
export let schema export let schema
@ -157,6 +158,14 @@
}} }}
> >
{#if renderHeader} {#if renderHeader}
<BlockComponent
type="container"
props={{
direction: "column",
gap: "S",
}}
order={0}
>
<BlockComponent <BlockComponent
type="container" type="container"
props={{ props={{
@ -211,6 +220,14 @@
</BlockComponent> </BlockComponent>
{/if} {/if}
</BlockComponent> </BlockComponent>
{#if description}
<BlockComponent
type="text"
props={{ text: description }}
order={1}
/>
{/if}
</BlockComponent>
{/if} {/if}
{#key fields} {#key fields}
<BlockComponent type="fieldgroup" props={{ labelPosition }} order={1}> <BlockComponent type="fieldgroup" props={{ labelPosition }} order={1}>

View File

@ -32,7 +32,8 @@
$: loading = dataProvider?.loading ?? false $: loading = dataProvider?.loading ?? false
$: data = dataProvider?.rows || [] $: data = dataProvider?.rows || []
$: fullSchema = dataProvider?.schema ?? {} $: fullSchema = dataProvider?.schema ?? {}
$: fields = getFields(fullSchema, columns, false) $: primaryDisplay = dataProvider?.primaryDisplay
$: fields = getFields(fullSchema, columns, false, primaryDisplay)
$: schema = getFilteredSchema(fullSchema, fields, hasChildren) $: schema = getFilteredSchema(fullSchema, fields, hasChildren)
$: setSorting = getAction( $: setSorting = getAction(
dataProvider?.id, dataProvider?.id,
@ -55,18 +56,13 @@
} }
} }
const getFields = (schema, customColumns, showAutoColumns) => { const getFields = (
// Check for an invalid column selection schema,
let invalid = false customColumns,
customColumns?.forEach(column => { showAutoColumns,
const columnName = typeof column === "string" ? column : column.name primaryDisplay
if (schema[columnName] == null) { ) => {
invalid = true if (customColumns?.length) {
}
})
// Use column selection if it exists
if (!invalid && customColumns?.length) {
return customColumns return customColumns
} }
@ -74,13 +70,38 @@
let columns = [] let columns = []
let autoColumns = [] let autoColumns = []
Object.entries(schema).forEach(([field, fieldSchema]) => { Object.entries(schema).forEach(([field, fieldSchema]) => {
if (fieldSchema.visible === false) {
return
}
if (!fieldSchema?.autocolumn) { if (!fieldSchema?.autocolumn) {
columns.push(field) columns.push(field)
} else if (showAutoColumns) { } else if (showAutoColumns) {
autoColumns.push(field) autoColumns.push(field)
} }
}) })
return columns.concat(autoColumns)
// Sort columns to respect grid metadata
const allCols = columns.concat(autoColumns)
return allCols.sort((a, b) => {
if (a === primaryDisplay) {
return -1
}
if (b === primaryDisplay) {
return 1
}
const aOrder = schema[a].order
const bOrder = schema[b].order
if (aOrder === bOrder) {
return 0
}
if (aOrder == null) {
return 1
}
if (bOrder == null) {
return -1
}
return aOrder < bOrder ? -1 : 1
})
} }
const getFilteredSchema = (schema, fields, hasChildren) => { const getFilteredSchema = (schema, fields, hasChildren) => {

@ -1 +1 @@
Subproject commit 570d14aa44aa88f4d053856322210f0008ba5c76 Subproject commit d24c0dc3a30014cbe61860252aa48104cad36376

View File

@ -67,6 +67,11 @@ COPY packages/server/docker_run.sh .
COPY packages/server/builder/ builder/ COPY packages/server/builder/ builder/
COPY packages/server/client/ client/ COPY packages/server/client/ client/
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
EXPOSE 4001 EXPOSE 4001
# have to add node environment production after install # have to add node environment production after install

View File

@ -18,7 +18,7 @@
"test": "bash scripts/test.sh", "test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit", "test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch", "test:watch": "jest --watch",
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION", "build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up", "dev:stack:up": "node scripts/dev/manage.js up",

View File

@ -47,6 +47,7 @@ async function init() {
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR", TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
HTTP_MIGRATIONS: "0", HTTP_MIGRATIONS: "0",
HTTP_LOGGING: "0", HTTP_LOGGING: "0",
VERSION: "0.0.0+local",
} }
let envFile = "" let envFile = ""
Object.keys(envFileJson).forEach(key => { Object.keys(envFileJson).forEach(key => {

View File

@ -16,7 +16,7 @@ import AWS from "aws-sdk"
import fs from "fs" import fs from "fs"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
import { App } from "@budibase/types" import { App, Ctx } from "@budibase/types"
const send = require("koa-send") const send = require("koa-send")
@ -39,7 +39,7 @@ async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
} }
} }
export const toggleBetaUiFeature = async function (ctx: any) { export const toggleBetaUiFeature = async function (ctx: Ctx) {
const cookieName = `beta:${ctx.params.feature}` const cookieName = `beta:${ctx.params.feature}`
if (ctx.cookies.get(cookieName)) { if (ctx.cookies.get(cookieName)) {
@ -67,16 +67,14 @@ export const toggleBetaUiFeature = async function (ctx: any) {
} }
} }
export const serveBuilder = async function (ctx: any) { export const serveBuilder = async function (ctx: Ctx) {
const builderPath = join(TOP_LEVEL_PATH, "builder") const builderPath = join(TOP_LEVEL_PATH, "builder")
await send(ctx, ctx.file, { root: builderPath }) await send(ctx, ctx.file, { root: builderPath })
} }
export const uploadFile = async function (ctx: any) { export const uploadFile = async function (ctx: Ctx) {
let files = const file = ctx.request?.files?.file
ctx.request.files.file.length > 1 let files = file && Array.isArray(file) ? Array.from(file) : [file]
? Array.from(ctx.request.files.file)
: [ctx.request.files.file]
const uploads = files.map(async (file: any) => { const uploads = files.map(async (file: any) => {
const fileExtension = [...file.name.split(".")].pop() const fileExtension = [...file.name.split(".")].pop()
@ -93,14 +91,14 @@ export const uploadFile = async function (ctx: any) {
ctx.body = await Promise.all(uploads) ctx.body = await Promise.all(uploads)
} }
export const deleteObjects = async function (ctx: any) { export const deleteObjects = async function (ctx: Ctx) {
ctx.body = await objectStore.deleteFiles( ctx.body = await objectStore.deleteFiles(
ObjectStoreBuckets.APPS, ObjectStoreBuckets.APPS,
ctx.request.body.keys ctx.request.body.keys
) )
} }
export const serveApp = async function (ctx: any) { export const serveApp = async function (ctx: Ctx) {
const bbHeaderEmbed = const bbHeaderEmbed =
ctx.request.get("x-budibase-embed")?.toLowerCase() === "true" ctx.request.get("x-budibase-embed")?.toLowerCase() === "true"
@ -181,7 +179,7 @@ export const serveApp = async function (ctx: any) {
} }
} }
export const serveBuilderPreview = async function (ctx: any) { export const serveBuilderPreview = async function (ctx: Ctx) {
const db = context.getAppDB({ skip_setup: true }) const db = context.getAppDB({ skip_setup: true })
const appInfo = await db.get<App>(DocumentType.APP_METADATA) const appInfo = await db.get<App>(DocumentType.APP_METADATA)
@ -197,18 +195,30 @@ export const serveBuilderPreview = async function (ctx: any) {
} }
} }
export const serveClientLibrary = async function (ctx: any) { export const serveClientLibrary = async function (ctx: Ctx) {
const appId = context.getAppId() || (ctx.request.query.appId as string)
let rootPath = join(NODE_MODULES_PATH, "@budibase", "client", "dist") let rootPath = join(NODE_MODULES_PATH, "@budibase", "client", "dist")
// incase running from TS directly if (!appId) {
if (env.isDev() && !fs.existsSync(rootPath)) { ctx.throw(400, "No app ID provided - cannot fetch client library.")
rootPath = join(require.resolve("@budibase/client"), "..")
} }
if (env.isProd()) {
ctx.body = await objectStore.getReadStream(
ObjectStoreBuckets.APPS,
objectStore.clientLibraryPath(appId!)
)
ctx.set("Content-Type", "application/javascript")
} else if (env.isDev()) {
// incase running from TS directly
const tsPath = join(require.resolve("@budibase/client"), "..")
return send(ctx, "budibase-client.js", { return send(ctx, "budibase-client.js", {
root: rootPath, root: !fs.existsSync(rootPath) ? tsPath : rootPath,
}) })
} else {
ctx.throw(500, "Unable to retrieve client library.")
}
} }
export const getSignedUploadURL = async function (ctx: any) { export const getSignedUploadURL = async function (ctx: Ctx) {
// Ensure datasource is valid // Ensure datasource is valid
let datasource let datasource
try { try {
@ -247,7 +257,7 @@ export const getSignedUploadURL = async function (ctx: any) {
const params = { Bucket: bucket, Key: key } const params = { Bucket: bucket, Key: key }
signedUrl = s3.getSignedUrl("putObject", params) signedUrl = s3.getSignedUrl("putObject", params)
publicUrl = `https://${bucket}.s3.${awsRegion}.amazonaws.com/${key}` publicUrl = `https://${bucket}.s3.${awsRegion}.amazonaws.com/${key}`
} catch (error) { } catch (error: any) {
ctx.throw(400, error) ctx.throw(400, error)
} }
} }

View File

@ -27,15 +27,9 @@ router.param("file", async (file: any, ctx: any, next: any) => {
return next() return next()
}) })
// only used in development for retrieving the client library,
// in production the client lib is always stored in the object store.
if (env.isDev()) {
router.get("/api/assets/client", controller.serveClientLibrary)
}
router router
// TODO: for now this builder endpoint is not authorized/secured, will need to be
.get("/builder/:file*", controller.serveBuilder) .get("/builder/:file*", controller.serveBuilder)
.get("/api/assets/client", controller.serveClientLibrary)
.post("/api/attachments/process", authorized(BUILDER), controller.uploadFile) .post("/api/attachments/process", authorized(BUILDER), controller.uploadFile)
.post( .post(
"/api/attachments/delete", "/api/attachments/delete",

View File

@ -41,7 +41,7 @@ describe("/component", () => {
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body).toEqual({ expect(res.body).toEqual({
budibaseVersion: "0.0.0", budibaseVersion: "0.0.0+jest",
cpuArch: "arm64", cpuArch: "arm64",
cpuCores: 1, cpuCores: 1,
cpuInfo: "test", cpuInfo: "test",

View File

@ -1,6 +1,6 @@
const setup = require("./utilities") const setup = require("./utilities")
const { events } = require("@budibase/backend-core") const { events } = require("@budibase/backend-core")
const version = require("../../../../package.json").version
describe("/dev", () => { describe("/dev", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -32,9 +32,9 @@ describe("/dev", () => {
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.version).toBe(version) expect(res.body.version).toBe('0.0.0+jest')
expect(events.installation.versionChecked).toBeCalledTimes(1) expect(events.installation.versionChecked).toBeCalledTimes(1)
expect(events.installation.versionChecked).toBeCalledWith(version) expect(events.installation.versionChecked).toBeCalledWith('0.0.0+jest')
}) })
}) })
}) })

View File

@ -9,3 +9,4 @@ process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error"
process.env.MOCK_REDIS = "1" process.env.MOCK_REDIS = "1"
process.env.PLATFORM_URL = "http://localhost:10000" process.env.PLATFORM_URL = "http://localhost:10000"
process.env.REDIS_PASSWORD = "budibase" process.env.REDIS_PASSWORD = "budibase"
process.env.BUDIBASE_VERSION = "0.0.0+jest"

View File

@ -55,6 +55,7 @@ export interface SearchUsersRequest {
bookmark?: string bookmark?: string
query?: SearchQuery query?: SearchQuery
appId?: string appId?: string
limit?: number
paginate?: boolean paginate?: boolean
} }

View File

@ -1,8 +1,5 @@
export enum FeatureFlag { export enum FeatureFlag {
LICENSING = "LICENSING", LICENSING = "LICENSING",
// Feature IDs in Posthog
PER_CREATOR_PER_USER_PRICE = "18873",
PER_CREATOR_PER_USER_PRICE_ALERT = "18530",
} }
export interface TenantFeatureFlags { export interface TenantFeatureFlags {

View File

@ -5,17 +5,10 @@ export interface Customer {
currency: string | null | undefined currency: string | null | undefined
} }
export interface SubscriptionItems {
user: number | undefined
creator: number | undefined
}
export interface Subscription { export interface Subscription {
amount: number amount: number
amounts: SubscriptionItems | undefined
currency: string currency: string
quantity: number quantity: number
quantities: SubscriptionItems | undefined
duration: PriceDuration duration: PriceDuration
cancelAt: number | null | undefined cancelAt: number | null | undefined
currentPeriodStart: number currentPeriodStart: number

View File

@ -4,9 +4,7 @@ export enum PlanType {
PRO = "pro", PRO = "pro",
/** @deprecated */ /** @deprecated */
TEAM = "team", TEAM = "team",
/** @deprecated */
PREMIUM = "premium", PREMIUM = "premium",
PREMIUM_PLUS = "premium_plus",
BUSINESS = "business", BUSINESS = "business",
ENTERPRISE = "enterprise", ENTERPRISE = "enterprise",
} }
@ -28,12 +26,10 @@ export interface AvailablePrice {
currency: string currency: string
duration: PriceDuration duration: PriceDuration
priceId: string priceId: string
type?: string
} }
export enum PlanModel { export enum PlanModel {
PER_USER = "perUser", PER_USER = "perUser",
PER_CREATOR_PER_USER = "per_creator_per_user",
DAY_PASS = "dayPass", DAY_PASS = "dayPass",
} }

View File

@ -50,4 +50,9 @@ ENV POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
ENV TENANT_FEATURE_FLAGS=*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR ENV TENANT_FEATURE_FLAGS=*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR
ENV ACCOUNT_PORTAL_URL=https://account.budibase.app ENV ACCOUNT_PORTAL_URL=https://account.budibase.app
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
CMD ["./docker_run.sh"] CMD ["./docker_run.sh"]

View File

@ -20,7 +20,7 @@
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION", "build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"dev:stack:init": "node ./scripts/dev/manage.js init", "dev:stack:init": "node ./scripts/dev/manage.js init",
"dev:builder": "npm run dev:stack:init && nodemon", "dev:builder": "npm run dev:stack:init && nodemon",
"dev:built": "yarn run dev:stack:init && yarn run run:docker", "dev:built": "yarn run dev:stack:init && yarn run run:docker",

View File

@ -31,6 +31,7 @@ async function init() {
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR", TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
ENABLE_EMAIL_TEST_MODE: 1, ENABLE_EMAIL_TEST_MODE: 1,
HTTP_LOGGING: 0, HTTP_LOGGING: 0,
VERSION: "0.0.0+local",
} }
let envFile = "" let envFile = ""
Object.keys(envFileJson).forEach(key => { Object.keys(envFileJson).forEach(key => {

View File

@ -189,7 +189,10 @@ export const destroy = async (ctx: any) => {
export const getAppUsers = async (ctx: Ctx<SearchUsersRequest>) => { export const getAppUsers = async (ctx: Ctx<SearchUsersRequest>) => {
const body = ctx.request.body const body = ctx.request.body
const users = await userSdk.db.getUsersByAppAccess(body?.appId) const users = await userSdk.db.getUsersByAppAccess({
appId: body.appId,
limit: body.limit,
})
ctx.body = { data: users } ctx.body = { data: users }
} }

View File

@ -569,9 +569,13 @@ describe("/api/global/users", () => {
{ {
query: { equal: { firstName: user.firstName } }, query: { equal: { firstName: user.firstName } },
}, },
501 { status: 501 }
) )
}) })
it("should throw an error if public query performed", async () => {
await config.api.users.searchUsers({}, { status: 403, noHeaders: true })
})
}) })
describe("DELETE /api/global/users/:userId", () => { describe("DELETE /api/global/users/:userId", () => {

View File

@ -72,7 +72,8 @@ router
) )
.get("/api/global/users", auth.builderOrAdmin, controller.fetch) .get("/api/global/users", auth.builderOrAdmin, controller.fetch)
.post("/api/global/users/search", auth.builderOrAdmin, controller.search) // search can be used by any user now, to retrieve users for user column
.post("/api/global/users/search", controller.search)
.delete("/api/global/users/:id", auth.adminOnly, controller.destroy) .delete("/api/global/users/:id", auth.adminOnly, controller.destroy)
.get( .get(
"/api/global/users/count/:appId", "/api/global/users/count/:appId",

View File

@ -134,13 +134,19 @@ export class UserAPI extends TestAPI {
.expect(status ? status : 200) .expect(status ? status : 200)
} }
searchUsers = ({ query }: { query?: SearchQuery }, status = 200) => { searchUsers = (
return this.request { query }: { query?: SearchQuery },
opts?: { status?: number; noHeaders?: boolean }
) => {
const req = this.request
.post("/api/global/users/search") .post("/api/global/users/search")
.set(this.config.defaultHeaders())
.send({ query }) .send({ query })
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(status ? status : 200) .expect(opts?.status ? opts.status : 200)
if (!opts?.noHeaders) {
req.set(this.config.defaultHeaders())
}
return req
} }
getUser = (userId: string, opts?: TestAPIOpts) => { getUser = (userId: string, opts?: TestAPIOpts) => {

View File

@ -10,3 +10,4 @@ process.env.PLATFORM_URL = "http://localhost:10000"
process.env.INTERNAL_API_KEY = "tet" process.env.INTERNAL_API_KEY = "tet"
process.env.DISABLE_ACCOUNT_PORTAL = "0" process.env.DISABLE_ACCOUNT_PORTAL = "0"
process.env.MOCK_REDIS = "1" process.env.MOCK_REDIS = "1"
process.env.BUDIBASE_VERSION = "0.0.0+jest"

View File

@ -17,7 +17,7 @@
"test:notify": "node scripts/testResultsWebhook", "test:notify": "node scripts/testResultsWebhook",
"test:cloud:prod": "yarn run test --testPathIgnorePatterns=\\.integration\\.", "test:cloud:prod": "yarn run test --testPathIgnorePatterns=\\.integration\\.",
"test:cloud:qa": "yarn run test", "test:cloud:qa": "yarn run test",
"test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\.", "test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\. \\.license\\.",
"serve:test:self:ci": "start-server-and-test dev:built http://localhost:4001/health test:self:ci", "serve:test:self:ci": "start-server-and-test dev:built http://localhost:4001/health test:self:ci",
"serve": "start-server-and-test dev:built http://localhost:4001/health", "serve": "start-server-and-test dev:built http://localhost:4001/health",
"dev:built": "cd ../ && yarn dev:built" "dev:built": "cd ../ && yarn dev:built"

View File

@ -1,5 +1,5 @@
import AccountInternalAPIClient from "./AccountInternalAPIClient" import AccountInternalAPIClient from "./AccountInternalAPIClient"
import { AccountAPI, LicenseAPI, AuthAPI } from "./apis" import { AccountAPI, LicenseAPI, AuthAPI, StripeAPI } from "./apis"
import { State } from "../../types" import { State } from "../../types"
export default class AccountInternalAPI { export default class AccountInternalAPI {
@ -8,11 +8,13 @@ export default class AccountInternalAPI {
auth: AuthAPI auth: AuthAPI
accounts: AccountAPI accounts: AccountAPI
licenses: LicenseAPI licenses: LicenseAPI
stripe: StripeAPI
constructor(state: State) { constructor(state: State) {
this.client = new AccountInternalAPIClient(state) this.client = new AccountInternalAPIClient(state)
this.auth = new AuthAPI(this.client) this.auth = new AuthAPI(this.client)
this.accounts = new AccountAPI(this.client) this.accounts = new AccountAPI(this.client)
this.licenses = new LicenseAPI(this.client) this.licenses = new LicenseAPI(this.client)
this.stripe = new StripeAPI(this.client)
} }
} }

View File

@ -2,21 +2,19 @@ import AccountInternalAPIClient from "../AccountInternalAPIClient"
import { import {
Account, Account,
CreateOfflineLicenseRequest, CreateOfflineLicenseRequest,
GetLicenseKeyResponse,
GetOfflineLicenseResponse, GetOfflineLicenseResponse,
UpdateLicenseRequest, UpdateLicenseRequest,
} from "@budibase/types" } from "@budibase/types"
import { Response } from "node-fetch" import { Response } from "node-fetch"
import BaseAPI from "./BaseAPI" import BaseAPI from "./BaseAPI"
import { APIRequestOpts } from "../../../types" import { APIRequestOpts } from "../../../types"
export default class LicenseAPI extends BaseAPI { export default class LicenseAPI extends BaseAPI {
client: AccountInternalAPIClient client: AccountInternalAPIClient
constructor(client: AccountInternalAPIClient) { constructor(client: AccountInternalAPIClient) {
super() super()
this.client = client this.client = client
} }
async updateLicense( async updateLicense(
accountId: string, accountId: string,
body: UpdateLicenseRequest, body: UpdateLicenseRequest,
@ -29,9 +27,7 @@ export default class LicenseAPI extends BaseAPI {
}) })
}, opts) }, opts)
} }
// TODO: Better approach for setting tenant id header // TODO: Better approach for setting tenant id header
async createOfflineLicense( async createOfflineLicense(
accountId: string, accountId: string,
tenantId: string, tenantId: string,
@ -51,7 +47,6 @@ export default class LicenseAPI extends BaseAPI {
expect(response.status).toBe(opts.status ? opts.status : 201) expect(response.status).toBe(opts.status ? opts.status : 201)
return response return response
} }
async getOfflineLicense( async getOfflineLicense(
accountId: string, accountId: string,
tenantId: string, tenantId: string,
@ -69,4 +64,74 @@ export default class LicenseAPI extends BaseAPI {
expect(response.status).toBe(opts.status ? opts.status : 200) expect(response.status).toBe(opts.status ? opts.status : 200)
return [response, json] return [response, json]
} }
async getLicenseKey(
opts: { status?: number } = {}
): Promise<[Response, GetLicenseKeyResponse]> {
const [response, json] = await this.client.get(`/api/license/key`)
expect(response.status).toBe(opts.status || 200)
return [response, json]
}
async activateLicense(
apiKey: string,
tenantId: string,
licenseKey: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/license/activate`, {
body: {
apiKey: apiKey,
tenantId: tenantId,
licenseKey: licenseKey,
},
})
}, opts)
}
async regenerateLicenseKey(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.post(`/api/license/key/regenerate`, {})
}, opts)
}
async getPlans(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/plans`)
}, opts)
}
async updatePlan(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.put(`/api/license/plan`)
}, opts)
}
async refreshAccountLicense(
accountId: string,
opts: { status?: number } = {}
): Promise<Response> {
const [response, json] = await this.client.post(
`/api/accounts/${accountId}/license/refresh`,
{
internal: true,
}
)
expect(response.status).toBe(opts.status ? opts.status : 201)
return response
}
async getLicenseUsage(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/license/usage`)
}, opts)
}
async licenseUsageTriggered(
opts: { status?: number } = {}
): Promise<Response> {
const [response, json] = await this.client.post(
`/api/license/usage/triggered`
)
expect(response.status).toBe(opts.status ? opts.status : 201)
return response
}
} }

View File

@ -0,0 +1,64 @@
import AccountInternalAPIClient from "../AccountInternalAPIClient"
import BaseAPI from "./BaseAPI"
import { APIRequestOpts } from "../../../types"
export default class StripeAPI extends BaseAPI {
client: AccountInternalAPIClient
constructor(client: AccountInternalAPIClient) {
super()
this.client = client
}
async createCheckoutSession(
priceId: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/stripe/checkout-session`, {
body: { priceId },
})
}, opts)
}
async checkoutSuccess(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.post(`/api/stripe/checkout-success`)
}, opts)
}
async createPortalSession(
stripeCustomerId: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/stripe/portal-session`, {
body: { stripeCustomerId },
})
}, opts)
}
async linkStripeCustomer(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.post(`/api/stripe/link`)
}, opts)
}
async getInvoices(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/stripe/invoices`)
}, opts)
}
async getUpcomingInvoice(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/stripe/upcoming-invoice`)
}, opts)
}
async getStripeCustomers(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/stripe/customers`)
}, opts)
}
}

View File

@ -1,3 +1,4 @@
export { default as AuthAPI } from "./AuthAPI" export { default as AuthAPI } from "./AuthAPI"
export { default as AccountAPI } from "./AccountAPI" export { default as AccountAPI } from "./AccountAPI"
export { default as LicenseAPI } from "./LicenseAPI" export { default as LicenseAPI } from "./LicenseAPI"
export { default as StripeAPI } from "./StripeAPI"

View File

@ -0,0 +1,68 @@
import TestConfiguration from "../../config/TestConfiguration"
import * as fixures from "../../fixtures"
import { Feature, Hosting } from "@budibase/types"
describe("license activation", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
it("creates, activates and deletes online license - self host", async () => {
// Remove existing license key
await config.internalApi.license.deleteLicenseKey()
// Verify license key not found
await config.internalApi.license.getLicenseKey({ status: 404 })
// Create self host account
const createAccountRequest = fixures.accounts.generateAccount({
hosting: Hosting.SELF,
})
const [createAccountRes, account] =
await config.accountsApi.accounts.create(createAccountRequest, {
autoVerify: true,
})
let licenseKey: string = " "
await config.doInNewState(async () => {
await config.loginAsAccount(createAccountRequest)
// Retrieve license key
const [res, body] = await config.accountsApi.licenses.getLicenseKey()
licenseKey = body.licenseKey
})
const accountId = account.accountId!
// Update license to have paid feature
const [res, acc] = await config.accountsApi.licenses.updateLicense(
accountId,
{
overrides: {
features: [Feature.APP_BACKUPS],
},
}
)
// Activate license key
await config.internalApi.license.activateLicenseKey({ licenseKey })
// Verify license updated with new feature
await config.doInNewState(async () => {
await config.loginAsAccount(createAccountRequest)
const [selfRes, body] = await config.api.accounts.self()
expect(body.license.features[0]).toBe("appBackups")
})
// Remove license key
await config.internalApi.license.deleteLicenseKey()
// Verify license key not found
await config.internalApi.license.getLicenseKey({ status: 404 })
})
})

View File

@ -1,17 +1,19 @@
import { Response } from "node-fetch" import { Response } from "node-fetch"
import { import {
ActivateLicenseKeyRequest,
ActivateOfflineLicenseTokenRequest, ActivateOfflineLicenseTokenRequest,
GetLicenseKeyResponse,
GetOfflineIdentifierResponse, GetOfflineIdentifierResponse,
GetOfflineLicenseTokenResponse, GetOfflineLicenseTokenResponse,
} from "@budibase/types" } from "@budibase/types"
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient" import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
import BaseAPI from "./BaseAPI" import BaseAPI from "./BaseAPI"
import { APIRequestOpts } from "../../../types"
export default class LicenseAPI extends BaseAPI { export default class LicenseAPI extends BaseAPI {
constructor(client: BudibaseInternalAPIClient) { constructor(client: BudibaseInternalAPIClient) {
super(client) super(client)
} }
async getOfflineLicenseToken( async getOfflineLicenseToken(
opts: { status?: number } = {} opts: { status?: number } = {}
): Promise<[Response, GetOfflineLicenseTokenResponse]> { ): Promise<[Response, GetOfflineLicenseTokenResponse]> {
@ -21,19 +23,16 @@ export default class LicenseAPI extends BaseAPI {
) )
return [response, body] return [response, body]
} }
async deleteOfflineLicenseToken(): Promise<[Response]> { async deleteOfflineLicenseToken(): Promise<[Response]> {
const [response] = await this.del(`/global/license/offline`, 204) const [response] = await this.del(`/global/license/offline`, 204)
return [response] return [response]
} }
async activateOfflineLicenseToken( async activateOfflineLicenseToken(
body: ActivateOfflineLicenseTokenRequest body: ActivateOfflineLicenseTokenRequest
): Promise<[Response]> { ): Promise<[Response]> {
const [response] = await this.post(`/global/license/offline`, body) const [response] = await this.post(`/global/license/offline`, body)
return [response] return [response]
} }
async getOfflineIdentifier(): Promise< async getOfflineIdentifier(): Promise<
[Response, GetOfflineIdentifierResponse] [Response, GetOfflineIdentifierResponse]
> { > {
@ -42,4 +41,23 @@ export default class LicenseAPI extends BaseAPI {
) )
return [response, body] return [response, body]
} }
async getLicenseKey(
opts: { status?: number } = {}
): Promise<[Response, GetLicenseKeyResponse]> {
const [response, body] = await this.get(`/global/license/key`, opts.status)
return [response, body]
}
async activateLicenseKey(
body: ActivateLicenseKeyRequest
): Promise<[Response]> {
const [response] = await this.post(`/global/license/key`, body)
return [response]
}
async deleteLicenseKey(): Promise<[Response]> {
const [response] = await this.del(`/global/license/key`, 204)
return [response]
}
} }

View File

@ -1,3 +1,4 @@
#!/bin/bash #!/bin/bash
yarn build --scope @budibase/server --scope @budibase/worker yarn build --scope @budibase/server --scope @budibase/worker
docker build -f hosting/single/Dockerfile.v2 -t budibase:latest . version=$(./scripts/getCurrentVersion.sh)
docker build -f hosting/single/Dockerfile.v2 -t budibase:latest --build-arg BUDIBASE_VERSION=$version .