Merge branch 'master' into fix-attachments-remote-urls
This commit is contained in:
commit
a3756d00f3
|
@ -63,6 +63,9 @@ jobs:
|
|||
echo "Using tag $version"
|
||||
echo "version=$version" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Setup Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Build/release Docker images
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
|
|
|
@ -67,7 +67,7 @@ jobs:
|
|||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
|
||||
file: ./hosting/single/Dockerfile
|
||||
file: ./hosting/single/Dockerfile.v2
|
||||
- name: Tag and release Budibase Azure App Service docker image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
|
@ -76,4 +76,4 @@ jobs:
|
|||
platforms: linux/amd64
|
||||
build-args: TARGETBUILD=aas
|
||||
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
|
||||
file: ./hosting/single/Dockerfile
|
||||
file: ./hosting/single/Dockerfile.v2
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.11.45",
|
||||
"version": "2.12.0",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -119,8 +119,8 @@ export class Writethrough {
|
|||
this.writeRateMs = writeRateMs
|
||||
}
|
||||
|
||||
async put(doc: any) {
|
||||
return put(this.db, doc, this.writeRateMs)
|
||||
async put(doc: any, writeRateMs: number = this.writeRateMs) {
|
||||
return put(this.db, doc, writeRateMs)
|
||||
}
|
||||
|
||||
async get(id: string) {
|
||||
|
|
|
@ -25,12 +25,17 @@ import {
|
|||
import {
|
||||
getAccountHolderFromUserIds,
|
||||
isAdmin,
|
||||
isCreator,
|
||||
validateUniqueUser,
|
||||
} from "./utils"
|
||||
import { searchExistingEmails } from "./lookup"
|
||||
import { hash } from "../utils"
|
||||
|
||||
type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any>
|
||||
type QuotaUpdateFn = (
|
||||
change: number,
|
||||
creatorsChange: number,
|
||||
cb?: () => Promise<any>
|
||||
) => Promise<any>
|
||||
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
|
||||
type FeatureFn = () => Promise<Boolean>
|
||||
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
|
||||
|
@ -245,7 +250,8 @@ export class UserDB {
|
|||
}
|
||||
|
||||
const change = dbUser ? 0 : 1 // no change if there is existing user
|
||||
return UserDB.quotas.addUsers(change, async () => {
|
||||
const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0
|
||||
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
|
||||
await validateUniqueUser(email, tenantId)
|
||||
|
||||
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
|
||||
|
@ -307,6 +313,7 @@ export class UserDB {
|
|||
|
||||
let usersToSave: any[] = []
|
||||
let newUsers: any[] = []
|
||||
let newCreators: any[] = []
|
||||
|
||||
const emails = newUsersRequested.map((user: User) => user.email)
|
||||
const existingEmails = await searchExistingEmails(emails)
|
||||
|
@ -327,59 +334,66 @@ export class UserDB {
|
|||
}
|
||||
newUser.userGroups = groups
|
||||
newUsers.push(newUser)
|
||||
if (isCreator(newUser)) {
|
||||
newCreators.push(newUser)
|
||||
}
|
||||
}
|
||||
|
||||
const account = await accountSdk.getAccountByTenantId(tenantId)
|
||||
return UserDB.quotas.addUsers(newUsers.length, async () => {
|
||||
// create the promises array that will be called by bulkDocs
|
||||
newUsers.forEach((user: any) => {
|
||||
usersToSave.push(
|
||||
UserDB.buildUser(
|
||||
user,
|
||||
{
|
||||
hashPassword: true,
|
||||
requirePassword: user.requirePassword,
|
||||
},
|
||||
tenantId,
|
||||
undefined, // no dbUser
|
||||
account
|
||||
return UserDB.quotas.addUsers(
|
||||
newUsers.length,
|
||||
newCreators.length,
|
||||
async () => {
|
||||
// create the promises array that will be called by bulkDocs
|
||||
newUsers.forEach((user: any) => {
|
||||
usersToSave.push(
|
||||
UserDB.buildUser(
|
||||
user,
|
||||
{
|
||||
hashPassword: true,
|
||||
requirePassword: user.requirePassword,
|
||||
},
|
||||
tenantId,
|
||||
undefined, // no dbUser
|
||||
account
|
||||
)
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
const usersToBulkSave = await Promise.all(usersToSave)
|
||||
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
|
||||
const usersToBulkSave = await Promise.all(usersToSave)
|
||||
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
|
||||
|
||||
// Post-processing of bulk added users, e.g. events and cache operations
|
||||
for (const user of usersToBulkSave) {
|
||||
// TODO: Refactor to bulk insert users into the info db
|
||||
// instead of relying on looping tenant creation
|
||||
await platform.users.addUser(tenantId, user._id, user.email)
|
||||
await eventHelpers.handleSaveEvents(user, undefined)
|
||||
}
|
||||
// Post-processing of bulk added users, e.g. events and cache operations
|
||||
for (const user of usersToBulkSave) {
|
||||
// TODO: Refactor to bulk insert users into the info db
|
||||
// instead of relying on looping tenant creation
|
||||
await platform.users.addUser(tenantId, user._id, user.email)
|
||||
await eventHelpers.handleSaveEvents(user, undefined)
|
||||
}
|
||||
|
||||
const saved = usersToBulkSave.map(user => {
|
||||
return {
|
||||
_id: user._id,
|
||||
email: user.email,
|
||||
}
|
||||
})
|
||||
|
||||
// now update the groups
|
||||
if (Array.isArray(saved) && groups) {
|
||||
const groupPromises = []
|
||||
const createdUserIds = saved.map(user => user._id)
|
||||
for (let groupId of groups) {
|
||||
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
|
||||
}
|
||||
await Promise.all(groupPromises)
|
||||
}
|
||||
|
||||
const saved = usersToBulkSave.map(user => {
|
||||
return {
|
||||
_id: user._id,
|
||||
email: user.email,
|
||||
successful: saved,
|
||||
unsuccessful,
|
||||
}
|
||||
})
|
||||
|
||||
// now update the groups
|
||||
if (Array.isArray(saved) && groups) {
|
||||
const groupPromises = []
|
||||
const createdUserIds = saved.map(user => user._id)
|
||||
for (let groupId of groups) {
|
||||
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
|
||||
}
|
||||
await Promise.all(groupPromises)
|
||||
}
|
||||
|
||||
return {
|
||||
successful: saved,
|
||||
unsuccessful,
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
|
||||
|
@ -419,11 +433,12 @@ export class UserDB {
|
|||
_deleted: true,
|
||||
}))
|
||||
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
|
||||
const creatorsToDelete = usersToDelete.filter(isCreator)
|
||||
|
||||
await UserDB.quotas.removeUsers(toDelete.length)
|
||||
for (let user of usersToDelete) {
|
||||
await bulkDeleteProcessing(user)
|
||||
}
|
||||
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
|
||||
|
||||
// Build Response
|
||||
// index users by id
|
||||
|
@ -472,7 +487,8 @@ export class UserDB {
|
|||
|
||||
await db.remove(userId, dbUser._rev)
|
||||
|
||||
await UserDB.quotas.removeUsers(1)
|
||||
const creatorsToDelete = isCreator(dbUser) ? 1 : 0
|
||||
await UserDB.quotas.removeUsers(1, creatorsToDelete)
|
||||
await eventHelpers.handleDeleteEvents(dbUser)
|
||||
await cache.user.invalidateUser(userId)
|
||||
await sessions.invalidateSessions(userId, { reason: "deletion" })
|
||||
|
|
|
@ -14,11 +14,11 @@ import {
|
|||
} from "../db"
|
||||
import {
|
||||
BulkDocsResponse,
|
||||
ContextUser,
|
||||
SearchQuery,
|
||||
SearchQueryOperators,
|
||||
SearchUsersRequest,
|
||||
User,
|
||||
ContextUser,
|
||||
DatabaseQueryOpts,
|
||||
} from "@budibase/types"
|
||||
import { getGlobalDB } from "../context"
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
const _ = require('lodash/fp')
|
||||
const {structures} = require("../../../tests")
|
||||
|
||||
jest.mock("../../../src/context")
|
||||
jest.mock("../../../src/db")
|
||||
|
||||
const context = require("../../../src/context")
|
||||
const db = require("../../../src/db")
|
||||
|
||||
const {getCreatorCount} = require('../../../src/users/users')
|
||||
|
||||
describe("Users", () => {
|
||||
|
||||
let getGlobalDBMock
|
||||
let getGlobalUserParamsMock
|
||||
let paginationMock
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks()
|
||||
|
||||
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
|
||||
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
|
||||
paginationMock = jest.spyOn(db, "pagination")
|
||||
})
|
||||
|
||||
it("Retrieves the number of creators", async () => {
|
||||
const getUsers = (offset, limit, creators = false) => {
|
||||
const range = _.range(offset, limit)
|
||||
const opts = creators ? {builder: {global: true}} : undefined
|
||||
return range.map(() => structures.users.user(opts))
|
||||
}
|
||||
const page1Data = getUsers(0, 8)
|
||||
const page2Data = getUsers(8, 12, true)
|
||||
getGlobalDBMock.mockImplementation(() => ({
|
||||
name : "fake-db",
|
||||
allDocs: () => ({
|
||||
rows: [...page1Data, ...page2Data]
|
||||
})
|
||||
}))
|
||||
paginationMock.mockImplementationOnce(() => ({
|
||||
data: page1Data,
|
||||
hasNextPage: true,
|
||||
nextPage: "1"
|
||||
}))
|
||||
paginationMock.mockImplementation(() => ({
|
||||
data: page2Data,
|
||||
hasNextPage: false,
|
||||
nextPage: undefined
|
||||
}))
|
||||
const creatorsCount = await getCreatorCount()
|
||||
expect(creatorsCount).toBe(4)
|
||||
expect(paginationMock).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
})
|
|
@ -123,6 +123,10 @@ export function customer(): Customer {
|
|||
export function subscription(): Subscription {
|
||||
return {
|
||||
amount: 10000,
|
||||
amounts: {
|
||||
user: 10000,
|
||||
creator: 0,
|
||||
},
|
||||
cancelAt: undefined,
|
||||
currency: "usd",
|
||||
currentPeriodEnd: 0,
|
||||
|
@ -131,6 +135,10 @@ export function subscription(): Subscription {
|
|||
duration: PriceDuration.MONTHLY,
|
||||
pastDueAt: undefined,
|
||||
quantity: 0,
|
||||
quantities: {
|
||||
user: 0,
|
||||
creator: 0,
|
||||
},
|
||||
status: "active",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 4506399e0d0297554cacbef1f436884aabdb9741
|
||||
Subproject commit 3820c0c93a3e448e10a60a9feb5396844b537ca8
|
|
@ -38,7 +38,7 @@ RUN apt update && apt upgrade -y \
|
|||
|
||||
COPY package.json .
|
||||
COPY dist/yarn.lock .
|
||||
RUN yarn install --production=true \
|
||||
RUN yarn install --production=true --network-timeout 1000000 \
|
||||
# Remove unneeded data from file system to reduce image size
|
||||
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
|
||||
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
"test": "bash scripts/test.sh",
|
||||
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
|
||||
"test:watch": "jest --watch",
|
||||
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
|
||||
"build:docker": "yarn nx build && docker buildx build ../.. -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION -f Dockerfile.v2 --platform linux/amd64,linux/arm64",
|
||||
"run:docker": "node dist/index.js",
|
||||
"run:docker:cluster": "pm2-runtime start pm2.config.js",
|
||||
"dev:stack:up": "node scripts/dev/manage.js up",
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
export enum FeatureFlag {
|
||||
LICENSING = "LICENSING",
|
||||
// Feature IDs in Posthog
|
||||
PER_CREATOR_PER_USER_PRICE = "18873",
|
||||
PER_CREATOR_PER_USER_PRICE_ALERT = "18530",
|
||||
}
|
||||
|
||||
export interface TenantFeatureFlags {
|
||||
|
|
|
@ -5,10 +5,17 @@ export interface Customer {
|
|||
currency: string | null | undefined
|
||||
}
|
||||
|
||||
export interface SubscriptionItems {
|
||||
user: number | undefined
|
||||
creator: number | undefined
|
||||
}
|
||||
|
||||
export interface Subscription {
|
||||
amount: number
|
||||
amounts: SubscriptionItems | undefined
|
||||
currency: string
|
||||
quantity: number
|
||||
quantities: SubscriptionItems | undefined
|
||||
duration: PriceDuration
|
||||
cancelAt: number | null | undefined
|
||||
currentPeriodStart: number
|
||||
|
|
|
@ -4,7 +4,9 @@ export enum PlanType {
|
|||
PRO = "pro",
|
||||
/** @deprecated */
|
||||
TEAM = "team",
|
||||
/** @deprecated */
|
||||
PREMIUM = "premium",
|
||||
PREMIUM_PLUS = "premium_plus",
|
||||
BUSINESS = "business",
|
||||
ENTERPRISE = "enterprise",
|
||||
}
|
||||
|
@ -26,10 +28,12 @@ export interface AvailablePrice {
|
|||
currency: string
|
||||
duration: PriceDuration
|
||||
priceId: string
|
||||
type?: string
|
||||
}
|
||||
|
||||
export enum PlanModel {
|
||||
PER_USER = "perUser",
|
||||
PER_CREATOR_PER_USER = "per_creator_per_user",
|
||||
DAY_PASS = "dayPass",
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ RUN yarn global add pm2
|
|||
|
||||
COPY package.json .
|
||||
COPY dist/yarn.lock .
|
||||
RUN yarn install --production=true
|
||||
RUN yarn install --production=true --network-timeout 1000000
|
||||
# Remove unneeded data from file system to reduce image size
|
||||
RUN apk del .gyp \
|
||||
&& yarn cache clean
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
"run:docker": "node dist/index.js",
|
||||
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
|
||||
"run:docker:cluster": "pm2-runtime start pm2.config.js",
|
||||
"build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
|
||||
"build:docker": "yarn nx build && docker buildx build ../.. -t worker-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION -f Dockerfile.v2 --platform linux/amd64,linux/arm64",
|
||||
"dev:stack:init": "node ./scripts/dev/manage.js init",
|
||||
"dev:builder": "npm run dev:stack:init && nodemon",
|
||||
"dev:built": "yarn run dev:stack:init && yarn run run:docker",
|
||||
|
|
Loading…
Reference in New Issue