Merge branch 'master' into BUDI-7580/account_portal_submodule
This commit is contained in:
commit
18b8d6d3ef
|
@ -36,6 +36,7 @@ jobs:
|
||||||
- uses: actions/setup-node@v1
|
- uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: 18.x
|
node-version: 18.x
|
||||||
|
cache: yarn
|
||||||
|
|
||||||
- run: yarn install --frozen-lockfile
|
- run: yarn install --frozen-lockfile
|
||||||
- name: Update versions
|
- name: Update versions
|
||||||
|
@ -63,14 +64,64 @@ jobs:
|
||||||
echo "Using tag $version"
|
echo "Using tag $version"
|
||||||
echo "version=$version" >> "$GITHUB_OUTPUT"
|
echo "version=$version" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Build/release Docker images
|
- name: Setup Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
|
||||||
|
- name: Docker login
|
||||||
run: |
|
run: |
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||||
yarn build:docker
|
|
||||||
env:
|
env:
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||||
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }}
|
|
||||||
|
- name: Build worker docker
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: true
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
build-args: |
|
||||||
|
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
|
||||||
|
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
|
||||||
|
file: ./packages/worker/Dockerfile.v2
|
||||||
|
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
|
||||||
|
cache-to: type=inline
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: budibase/worker
|
||||||
|
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
|
||||||
|
BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
|
||||||
|
|
||||||
|
- name: Build server docker
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: true
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
build-args: |
|
||||||
|
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
|
||||||
|
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
|
||||||
|
file: ./packages/server/Dockerfile.v2
|
||||||
|
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
|
||||||
|
cache-to: type=inline
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: budibase/apps
|
||||||
|
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
|
||||||
|
BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
|
||||||
|
|
||||||
|
- name: Build proxy docker
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: ./hosting/proxy
|
||||||
|
push: true
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
|
||||||
|
file: ./hosting/proxy/Dockerfile
|
||||||
|
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
|
||||||
|
cache-to: type=inline
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: budibase/proxy
|
||||||
|
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
|
||||||
|
|
||||||
release-helm-chart:
|
release-helm-chart:
|
||||||
needs: [release-images]
|
needs: [release-images]
|
||||||
|
|
|
@ -67,7 +67,7 @@ jobs:
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
|
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
|
||||||
file: ./hosting/single/Dockerfile
|
file: ./hosting/single/Dockerfile.v2
|
||||||
- name: Tag and release Budibase Azure App Service docker image
|
- name: Tag and release Budibase Azure App Service docker image
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
|
@ -76,4 +76,4 @@ jobs:
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
build-args: TARGETBUILD=aas
|
build-args: TARGETBUILD=aas
|
||||||
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
|
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
|
||||||
file: ./hosting/single/Dockerfile
|
file: ./hosting/single/Dockerfile.v2
|
||||||
|
|
|
@ -126,13 +126,6 @@ You can learn more about the Budibase API at the following places:
|
||||||
|
|
||||||
- [Build an app with Budibase and Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/)
|
- [Build an app with Budibase and Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/)
|
||||||
|
|
||||||
<p align="center">
|
|
||||||
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1647858558/Feb%20release/Start_building_with_Budibase_s_API_3_rhlzhv.png">
|
|
||||||
</p>
|
|
||||||
<br /><br />
|
|
||||||
|
|
||||||
<br /><br /><br />
|
|
||||||
|
|
||||||
## 🏁 Get started
|
## 🏁 Get started
|
||||||
|
|
||||||
Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean.
|
Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean.
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
tag=$1
|
|
||||||
|
|
||||||
if [[ ! "$tag" ]]; then
|
|
||||||
echo "No tag present. You must pass a tag to this script"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Tagging images with tag: $tag"
|
|
||||||
|
|
||||||
docker tag proxy-service budibase/proxy:$tag
|
|
||||||
docker tag app-service budibase/apps:$tag
|
|
||||||
docker tag worker-service budibase/worker:$tag
|
|
||||||
|
|
||||||
docker push --all-tags budibase/apps
|
|
||||||
docker push --all-tags budibase/worker
|
|
||||||
docker push --all-tags budibase/proxy
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.11.45",
|
"version": "2.12.1",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": ["packages/*", "packages/account-portal/packages/*"],
|
"packages": ["packages/*", "packages/account-portal/packages/*"],
|
||||||
"useNx": true,
|
"useNx": true,
|
||||||
|
|
|
@ -55,10 +55,6 @@
|
||||||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
|
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
|
||||||
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
|
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
|
||||||
"build:specs": "lerna run --stream specs",
|
"build:specs": "lerna run --stream specs",
|
||||||
"build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
|
||||||
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
|
||||||
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
|
||||||
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
|
||||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||||
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
|
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||||
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 9ab3f75289b77e92cb9b5ebdefcc07c1608532b5
|
Subproject commit 1ca7329ddcb5b1e3a66fdbc6803579ff1270c586
|
|
@ -119,8 +119,8 @@ export class Writethrough {
|
||||||
this.writeRateMs = writeRateMs
|
this.writeRateMs = writeRateMs
|
||||||
}
|
}
|
||||||
|
|
||||||
async put(doc: any) {
|
async put(doc: any, writeRateMs: number = this.writeRateMs) {
|
||||||
return put(this.db, doc, this.writeRateMs)
|
return put(this.db, doc, writeRateMs)
|
||||||
}
|
}
|
||||||
|
|
||||||
async get(id: string) {
|
async get(id: string) {
|
||||||
|
|
|
@ -122,7 +122,9 @@ export async function roleToNumber(id?: string) {
|
||||||
if (isBuiltin(id)) {
|
if (isBuiltin(id)) {
|
||||||
return builtinRoleToNumber(id)
|
return builtinRoleToNumber(id)
|
||||||
}
|
}
|
||||||
const hierarchy = (await getUserRoleHierarchy(id)) as RoleDoc[]
|
const hierarchy = (await getUserRoleHierarchy(id, {
|
||||||
|
defaultPublic: true,
|
||||||
|
})) as RoleDoc[]
|
||||||
for (let role of hierarchy) {
|
for (let role of hierarchy) {
|
||||||
if (isBuiltin(role?.inherits)) {
|
if (isBuiltin(role?.inherits)) {
|
||||||
return builtinRoleToNumber(role.inherits) + 1
|
return builtinRoleToNumber(role.inherits) + 1
|
||||||
|
@ -192,12 +194,15 @@ export async function getRole(
|
||||||
/**
|
/**
|
||||||
* Simple function to get all the roles based on the top level user role ID.
|
* Simple function to get all the roles based on the top level user role ID.
|
||||||
*/
|
*/
|
||||||
async function getAllUserRoles(userRoleId?: string): Promise<RoleDoc[]> {
|
async function getAllUserRoles(
|
||||||
|
userRoleId?: string,
|
||||||
|
opts?: { defaultPublic?: boolean }
|
||||||
|
): Promise<RoleDoc[]> {
|
||||||
// admins have access to all roles
|
// admins have access to all roles
|
||||||
if (userRoleId === BUILTIN_IDS.ADMIN) {
|
if (userRoleId === BUILTIN_IDS.ADMIN) {
|
||||||
return getAllRoles()
|
return getAllRoles()
|
||||||
}
|
}
|
||||||
let currentRole = await getRole(userRoleId)
|
let currentRole = await getRole(userRoleId, opts)
|
||||||
let roles = currentRole ? [currentRole] : []
|
let roles = currentRole ? [currentRole] : []
|
||||||
let roleIds = [userRoleId]
|
let roleIds = [userRoleId]
|
||||||
// get all the inherited roles
|
// get all the inherited roles
|
||||||
|
@ -226,12 +231,16 @@ export async function getUserRoleIdHierarchy(
|
||||||
* Returns an ordered array of the user's inherited role IDs, this can be used
|
* Returns an ordered array of the user's inherited role IDs, this can be used
|
||||||
* to determine if a user can access something that requires a specific role.
|
* to determine if a user can access something that requires a specific role.
|
||||||
* @param userRoleId The user's role ID, this can be found in their access token.
|
* @param userRoleId The user's role ID, this can be found in their access token.
|
||||||
|
* @param opts optional - if want to default to public use this.
|
||||||
* @returns returns an ordered array of the roles, with the first being their
|
* @returns returns an ordered array of the roles, with the first being their
|
||||||
* highest level of access and the last being the lowest level.
|
* highest level of access and the last being the lowest level.
|
||||||
*/
|
*/
|
||||||
export async function getUserRoleHierarchy(userRoleId?: string) {
|
export async function getUserRoleHierarchy(
|
||||||
|
userRoleId?: string,
|
||||||
|
opts?: { defaultPublic?: boolean }
|
||||||
|
) {
|
||||||
// special case, if they don't have a role then they are a public user
|
// special case, if they don't have a role then they are a public user
|
||||||
return getAllUserRoles(userRoleId)
|
return getAllUserRoles(userRoleId, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
// this function checks that the provided permissions are in an array format
|
// this function checks that the provided permissions are in an array format
|
||||||
|
|
|
@ -25,12 +25,17 @@ import {
|
||||||
import {
|
import {
|
||||||
getAccountHolderFromUserIds,
|
getAccountHolderFromUserIds,
|
||||||
isAdmin,
|
isAdmin,
|
||||||
|
isCreator,
|
||||||
validateUniqueUser,
|
validateUniqueUser,
|
||||||
} from "./utils"
|
} from "./utils"
|
||||||
import { searchExistingEmails } from "./lookup"
|
import { searchExistingEmails } from "./lookup"
|
||||||
import { hash } from "../utils"
|
import { hash } from "../utils"
|
||||||
|
|
||||||
type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any>
|
type QuotaUpdateFn = (
|
||||||
|
change: number,
|
||||||
|
creatorsChange: number,
|
||||||
|
cb?: () => Promise<any>
|
||||||
|
) => Promise<any>
|
||||||
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
|
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
|
||||||
type FeatureFn = () => Promise<Boolean>
|
type FeatureFn = () => Promise<Boolean>
|
||||||
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
|
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
|
||||||
|
@ -245,7 +250,8 @@ export class UserDB {
|
||||||
}
|
}
|
||||||
|
|
||||||
const change = dbUser ? 0 : 1 // no change if there is existing user
|
const change = dbUser ? 0 : 1 // no change if there is existing user
|
||||||
return UserDB.quotas.addUsers(change, async () => {
|
const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0
|
||||||
|
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
|
||||||
await validateUniqueUser(email, tenantId)
|
await validateUniqueUser(email, tenantId)
|
||||||
|
|
||||||
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
|
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
|
||||||
|
@ -307,6 +313,7 @@ export class UserDB {
|
||||||
|
|
||||||
let usersToSave: any[] = []
|
let usersToSave: any[] = []
|
||||||
let newUsers: any[] = []
|
let newUsers: any[] = []
|
||||||
|
let newCreators: any[] = []
|
||||||
|
|
||||||
const emails = newUsersRequested.map((user: User) => user.email)
|
const emails = newUsersRequested.map((user: User) => user.email)
|
||||||
const existingEmails = await searchExistingEmails(emails)
|
const existingEmails = await searchExistingEmails(emails)
|
||||||
|
@ -327,10 +334,16 @@ export class UserDB {
|
||||||
}
|
}
|
||||||
newUser.userGroups = groups
|
newUser.userGroups = groups
|
||||||
newUsers.push(newUser)
|
newUsers.push(newUser)
|
||||||
|
if (isCreator(newUser)) {
|
||||||
|
newCreators.push(newUser)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const account = await accountSdk.getAccountByTenantId(tenantId)
|
const account = await accountSdk.getAccountByTenantId(tenantId)
|
||||||
return UserDB.quotas.addUsers(newUsers.length, async () => {
|
return UserDB.quotas.addUsers(
|
||||||
|
newUsers.length,
|
||||||
|
newCreators.length,
|
||||||
|
async () => {
|
||||||
// create the promises array that will be called by bulkDocs
|
// create the promises array that will be called by bulkDocs
|
||||||
newUsers.forEach((user: any) => {
|
newUsers.forEach((user: any) => {
|
||||||
usersToSave.push(
|
usersToSave.push(
|
||||||
|
@ -379,7 +392,8 @@ export class UserDB {
|
||||||
successful: saved,
|
successful: saved,
|
||||||
unsuccessful,
|
unsuccessful,
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
|
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
|
||||||
|
@ -419,11 +433,12 @@ export class UserDB {
|
||||||
_deleted: true,
|
_deleted: true,
|
||||||
}))
|
}))
|
||||||
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
|
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
|
||||||
|
const creatorsToDelete = usersToDelete.filter(isCreator)
|
||||||
|
|
||||||
await UserDB.quotas.removeUsers(toDelete.length)
|
|
||||||
for (let user of usersToDelete) {
|
for (let user of usersToDelete) {
|
||||||
await bulkDeleteProcessing(user)
|
await bulkDeleteProcessing(user)
|
||||||
}
|
}
|
||||||
|
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
|
||||||
|
|
||||||
// Build Response
|
// Build Response
|
||||||
// index users by id
|
// index users by id
|
||||||
|
@ -472,7 +487,8 @@ export class UserDB {
|
||||||
|
|
||||||
await db.remove(userId, dbUser._rev)
|
await db.remove(userId, dbUser._rev)
|
||||||
|
|
||||||
await UserDB.quotas.removeUsers(1)
|
const creatorsToDelete = isCreator(dbUser) ? 1 : 0
|
||||||
|
await UserDB.quotas.removeUsers(1, creatorsToDelete)
|
||||||
await eventHelpers.handleDeleteEvents(dbUser)
|
await eventHelpers.handleDeleteEvents(dbUser)
|
||||||
await cache.user.invalidateUser(userId)
|
await cache.user.invalidateUser(userId)
|
||||||
await sessions.invalidateSessions(userId, { reason: "deletion" })
|
await sessions.invalidateSessions(userId, { reason: "deletion" })
|
||||||
|
|
|
@ -14,11 +14,11 @@ import {
|
||||||
} from "../db"
|
} from "../db"
|
||||||
import {
|
import {
|
||||||
BulkDocsResponse,
|
BulkDocsResponse,
|
||||||
ContextUser,
|
|
||||||
SearchQuery,
|
SearchQuery,
|
||||||
SearchQueryOperators,
|
SearchQueryOperators,
|
||||||
SearchUsersRequest,
|
SearchUsersRequest,
|
||||||
User,
|
User,
|
||||||
|
ContextUser,
|
||||||
DatabaseQueryOpts,
|
DatabaseQueryOpts,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { getGlobalDB } from "../context"
|
import { getGlobalDB } from "../context"
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
const _ = require('lodash/fp')
|
||||||
|
const {structures} = require("../../../tests")
|
||||||
|
|
||||||
|
jest.mock("../../../src/context")
|
||||||
|
jest.mock("../../../src/db")
|
||||||
|
|
||||||
|
const context = require("../../../src/context")
|
||||||
|
const db = require("../../../src/db")
|
||||||
|
|
||||||
|
const {getCreatorCount} = require('../../../src/users/users')
|
||||||
|
|
||||||
|
describe("Users", () => {
|
||||||
|
|
||||||
|
let getGlobalDBMock
|
||||||
|
let getGlobalUserParamsMock
|
||||||
|
let paginationMock
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.resetAllMocks()
|
||||||
|
|
||||||
|
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
|
||||||
|
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
|
||||||
|
paginationMock = jest.spyOn(db, "pagination")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("Retrieves the number of creators", async () => {
|
||||||
|
const getUsers = (offset, limit, creators = false) => {
|
||||||
|
const range = _.range(offset, limit)
|
||||||
|
const opts = creators ? {builder: {global: true}} : undefined
|
||||||
|
return range.map(() => structures.users.user(opts))
|
||||||
|
}
|
||||||
|
const page1Data = getUsers(0, 8)
|
||||||
|
const page2Data = getUsers(8, 12, true)
|
||||||
|
getGlobalDBMock.mockImplementation(() => ({
|
||||||
|
name : "fake-db",
|
||||||
|
allDocs: () => ({
|
||||||
|
rows: [...page1Data, ...page2Data]
|
||||||
|
})
|
||||||
|
}))
|
||||||
|
paginationMock.mockImplementationOnce(() => ({
|
||||||
|
data: page1Data,
|
||||||
|
hasNextPage: true,
|
||||||
|
nextPage: "1"
|
||||||
|
}))
|
||||||
|
paginationMock.mockImplementation(() => ({
|
||||||
|
data: page2Data,
|
||||||
|
hasNextPage: false,
|
||||||
|
nextPage: undefined
|
||||||
|
}))
|
||||||
|
const creatorsCount = await getCreatorCount()
|
||||||
|
expect(creatorsCount).toBe(4)
|
||||||
|
expect(paginationMock).toHaveBeenCalledTimes(2)
|
||||||
|
})
|
||||||
|
})
|
|
@ -123,6 +123,10 @@ export function customer(): Customer {
|
||||||
export function subscription(): Subscription {
|
export function subscription(): Subscription {
|
||||||
return {
|
return {
|
||||||
amount: 10000,
|
amount: 10000,
|
||||||
|
amounts: {
|
||||||
|
user: 10000,
|
||||||
|
creator: 0,
|
||||||
|
},
|
||||||
cancelAt: undefined,
|
cancelAt: undefined,
|
||||||
currency: "usd",
|
currency: "usd",
|
||||||
currentPeriodEnd: 0,
|
currentPeriodEnd: 0,
|
||||||
|
@ -131,6 +135,10 @@ export function subscription(): Subscription {
|
||||||
duration: PriceDuration.MONTHLY,
|
duration: PriceDuration.MONTHLY,
|
||||||
pastDueAt: undefined,
|
pastDueAt: undefined,
|
||||||
quantity: 0,
|
quantity: 0,
|
||||||
|
quantities: {
|
||||||
|
user: 0,
|
||||||
|
creator: 0,
|
||||||
|
},
|
||||||
status: "active",
|
status: "active",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -159,8 +159,10 @@
|
||||||
{#if selectedImage.size}
|
{#if selectedImage.size}
|
||||||
<div class="filesize">
|
<div class="filesize">
|
||||||
{#if selectedImage.size <= BYTES_IN_MB}
|
{#if selectedImage.size <= BYTES_IN_MB}
|
||||||
{`${selectedImage.size / BYTES_IN_KB} KB`}
|
{`${(selectedImage.size / BYTES_IN_KB).toFixed(1)} KB`}
|
||||||
{:else}{`${selectedImage.size / BYTES_IN_MB} MB`}{/if}
|
{:else}{`${(selectedImage.size / BYTES_IN_MB).toFixed(
|
||||||
|
1
|
||||||
|
)} MB`}{/if}
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
{#if !disabled}
|
{#if !disabled}
|
||||||
|
@ -203,8 +205,8 @@
|
||||||
{#if file.size}
|
{#if file.size}
|
||||||
<div class="filesize">
|
<div class="filesize">
|
||||||
{#if file.size <= BYTES_IN_MB}
|
{#if file.size <= BYTES_IN_MB}
|
||||||
{`${file.size / BYTES_IN_KB} KB`}
|
{`${(file.size / BYTES_IN_KB).toFixed(1)} KB`}
|
||||||
{:else}{`${file.size / BYTES_IN_MB} MB`}{/if}
|
{:else}{`${(file.size / BYTES_IN_MB).toFixed(1)} MB`}{/if}
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
{#if !disabled}
|
{#if !disabled}
|
||||||
|
|
|
@ -23,7 +23,7 @@
|
||||||
try {
|
try {
|
||||||
return await API.uploadBuilderAttachment(data)
|
return await API.uploadBuilderAttachment(data)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error("Failed to upload attachment")
|
notifications.error(error.message || "Failed to upload attachment")
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,7 +103,6 @@ const fetchRowHandler = async action => {
|
||||||
|
|
||||||
const deleteRowHandler = async action => {
|
const deleteRowHandler = async action => {
|
||||||
const { tableId, rowId: rowConfig, notificationOverride } = action.parameters
|
const { tableId, rowId: rowConfig, notificationOverride } = action.parameters
|
||||||
|
|
||||||
if (tableId && rowConfig) {
|
if (tableId && rowConfig) {
|
||||||
try {
|
try {
|
||||||
let requestConfig
|
let requestConfig
|
||||||
|
@ -129,9 +128,11 @@ const deleteRowHandler = async action => {
|
||||||
requestConfig = [parsedRowConfig]
|
requestConfig = [parsedRowConfig]
|
||||||
} else if (Array.isArray(parsedRowConfig)) {
|
} else if (Array.isArray(parsedRowConfig)) {
|
||||||
requestConfig = parsedRowConfig
|
requestConfig = parsedRowConfig
|
||||||
|
} else if (Number.isInteger(parsedRowConfig)) {
|
||||||
|
requestConfig = [String(parsedRowConfig)]
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!requestConfig.length) {
|
if (!requestConfig && !parsedRowConfig) {
|
||||||
notificationStore.actions.warning("No valid rows were supplied")
|
notificationStore.actions.warning("No valid rows were supplied")
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,7 +55,7 @@
|
||||||
try {
|
try {
|
||||||
return await API.uploadBuilderAttachment(data)
|
return await API.uploadBuilderAttachment(data)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
$notifications.error("Failed to upload attachment")
|
$notifications.error(error.message || "Failed to upload attachment")
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 1001bb64d4e77c3977da354ca28d754a1688f718
|
Subproject commit 0db27cad42b1ea92621a2005bde355a4b818d7bc
|
|
@ -38,7 +38,7 @@ RUN apt update && apt upgrade -y \
|
||||||
|
|
||||||
COPY package.json .
|
COPY package.json .
|
||||||
COPY dist/yarn.lock .
|
COPY dist/yarn.lock .
|
||||||
RUN yarn install --production=true \
|
RUN yarn install --production=true --network-timeout 1000000 \
|
||||||
# Remove unneeded data from file system to reduce image size
|
# Remove unneeded data from file system to reduce image size
|
||||||
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
|
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
|
||||||
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
|
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
"test": "bash scripts/test.sh",
|
"test": "bash scripts/test.sh",
|
||||||
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
|
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
|
||||||
"test:watch": "jest --watch",
|
"test:watch": "jest --watch",
|
||||||
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
|
|
||||||
"run:docker": "node dist/index.js",
|
"run:docker": "node dist/index.js",
|
||||||
"run:docker:cluster": "pm2-runtime start pm2.config.js",
|
"run:docker:cluster": "pm2-runtime start pm2.config.js",
|
||||||
"dev:stack:up": "node scripts/dev/manage.js up",
|
"dev:stack:up": "node scripts/dev/manage.js up",
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { ValidFileExtensions } from "@budibase/shared-core"
|
||||||
|
|
||||||
require("svelte/register")
|
require("svelte/register")
|
||||||
|
|
||||||
import { join } from "../../../utilities/centralPath"
|
import { join } from "../../../utilities/centralPath"
|
||||||
|
@ -11,34 +13,21 @@ import {
|
||||||
} from "../../../utilities/fileSystem"
|
} from "../../../utilities/fileSystem"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { DocumentType } from "../../../db/utils"
|
import { DocumentType } from "../../../db/utils"
|
||||||
import { context, objectStore, utils, configs } from "@budibase/backend-core"
|
import {
|
||||||
|
context,
|
||||||
|
objectStore,
|
||||||
|
utils,
|
||||||
|
configs,
|
||||||
|
BadRequestError,
|
||||||
|
} from "@budibase/backend-core"
|
||||||
import AWS from "aws-sdk"
|
import AWS from "aws-sdk"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import * as pro from "@budibase/pro"
|
import * as pro from "@budibase/pro"
|
||||||
import { App, Ctx } from "@budibase/types"
|
import { App, Ctx, ProcessAttachmentResponse, Upload } from "@budibase/types"
|
||||||
|
|
||||||
const send = require("koa-send")
|
const send = require("koa-send")
|
||||||
|
|
||||||
async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
|
|
||||||
const response = await objectStore.upload({
|
|
||||||
bucket,
|
|
||||||
metadata,
|
|
||||||
filename: s3Key,
|
|
||||||
path: file.path,
|
|
||||||
type: file.type,
|
|
||||||
})
|
|
||||||
|
|
||||||
// don't store a URL, work this out on the way out as the URL could change
|
|
||||||
return {
|
|
||||||
size: file.size,
|
|
||||||
name: file.name,
|
|
||||||
url: objectStore.getAppFileUrl(s3Key),
|
|
||||||
extension: [...file.name.split(".")].pop(),
|
|
||||||
key: response.Key,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const toggleBetaUiFeature = async function (ctx: Ctx) {
|
export const toggleBetaUiFeature = async function (ctx: Ctx) {
|
||||||
const cookieName = `beta:${ctx.params.feature}`
|
const cookieName = `beta:${ctx.params.feature}`
|
||||||
|
|
||||||
|
@ -72,23 +61,58 @@ export const serveBuilder = async function (ctx: Ctx) {
|
||||||
await send(ctx, ctx.file, { root: builderPath })
|
await send(ctx, ctx.file, { root: builderPath })
|
||||||
}
|
}
|
||||||
|
|
||||||
export const uploadFile = async function (ctx: Ctx) {
|
export const uploadFile = async function (
|
||||||
|
ctx: Ctx<{}, ProcessAttachmentResponse>
|
||||||
|
) {
|
||||||
const file = ctx.request?.files?.file
|
const file = ctx.request?.files?.file
|
||||||
|
if (!file) {
|
||||||
|
throw new BadRequestError("No file provided")
|
||||||
|
}
|
||||||
|
|
||||||
let files = file && Array.isArray(file) ? Array.from(file) : [file]
|
let files = file && Array.isArray(file) ? Array.from(file) : [file]
|
||||||
|
|
||||||
const uploads = files.map(async (file: any) => {
|
ctx.body = await Promise.all(
|
||||||
const fileExtension = [...file.name.split(".")].pop()
|
files.map(async file => {
|
||||||
|
if (!file.name) {
|
||||||
|
throw new BadRequestError(
|
||||||
|
"Attempted to upload a file without a filename"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const extension = [...file.name.split(".")].pop()
|
||||||
|
if (!extension) {
|
||||||
|
throw new BadRequestError(
|
||||||
|
`File "${file.name}" has no extension, an extension is required to upload a file`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!env.SELF_HOSTED && !ValidFileExtensions.includes(extension)) {
|
||||||
|
throw new BadRequestError(
|
||||||
|
`File "${file.name}" has an invalid extension: "${extension}"`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// filenames converted to UUIDs so they are unique
|
// filenames converted to UUIDs so they are unique
|
||||||
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
const processedFileName = `${uuid.v4()}.${extension}`
|
||||||
|
|
||||||
return prepareUpload({
|
const s3Key = `${context.getProdAppId()}/attachments/${processedFileName}`
|
||||||
file,
|
|
||||||
s3Key: `${context.getProdAppId()}/attachments/${processedFileName}`,
|
const response = await objectStore.upload({
|
||||||
bucket: ObjectStoreBuckets.APPS,
|
bucket: ObjectStoreBuckets.APPS,
|
||||||
})
|
filename: s3Key,
|
||||||
|
path: file.path,
|
||||||
|
type: file.type,
|
||||||
})
|
})
|
||||||
|
|
||||||
ctx.body = await Promise.all(uploads)
|
return {
|
||||||
|
size: file.size,
|
||||||
|
name: file.name,
|
||||||
|
url: objectStore.getAppFileUrl(s3Key),
|
||||||
|
extension,
|
||||||
|
key: response.Key,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export const deleteObjects = async function (ctx: Ctx) {
|
export const deleteObjects = async function (ctx: Ctx) {
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
import * as setup from "./utilities"
|
||||||
|
import { APIError } from "@budibase/types"
|
||||||
|
|
||||||
|
describe("/api/applications/:appId/sync", () => {
|
||||||
|
let config = setup.getConfig()
|
||||||
|
|
||||||
|
afterAll(setup.afterAll)
|
||||||
|
beforeAll(async () => {
|
||||||
|
await config.init()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("/api/attachments/process", () => {
|
||||||
|
it("should accept an image file upload", async () => {
|
||||||
|
let resp = await config.api.attachment.process(
|
||||||
|
"1px.jpg",
|
||||||
|
Buffer.from([0])
|
||||||
|
)
|
||||||
|
expect(resp.length).toBe(1)
|
||||||
|
|
||||||
|
let upload = resp[0]
|
||||||
|
expect(upload.url.endsWith(".jpg")).toBe(true)
|
||||||
|
expect(upload.extension).toBe("jpg")
|
||||||
|
expect(upload.size).toBe(1)
|
||||||
|
expect(upload.name).toBe("1px.jpg")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should reject an upload with a malicious file extension", async () => {
|
||||||
|
await config.withEnv({ SELF_HOSTED: undefined }, async () => {
|
||||||
|
let resp = (await config.api.attachment.process(
|
||||||
|
"ohno.exe",
|
||||||
|
Buffer.from([0]),
|
||||||
|
{ expectStatus: 400 }
|
||||||
|
)) as unknown as APIError
|
||||||
|
expect(resp.message).toContain("invalid extension")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should reject an upload with no file", async () => {
|
||||||
|
let resp = (await config.api.attachment.process(
|
||||||
|
undefined as any,
|
||||||
|
undefined as any,
|
||||||
|
{
|
||||||
|
expectStatus: 400,
|
||||||
|
}
|
||||||
|
)) as unknown as APIError
|
||||||
|
expect(resp.message).toContain("No file provided")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -5,11 +5,15 @@ describe("/static", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
let app
|
let app
|
||||||
|
let cleanupEnv
|
||||||
|
|
||||||
afterAll(setup.afterAll)
|
afterAll(() => {
|
||||||
|
setup.afterAll()
|
||||||
|
cleanupEnv()
|
||||||
|
})
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
config.modeSelf()
|
cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
|
||||||
app = await config.init()
|
app = await config.init()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -8,11 +8,15 @@ describe("/webhooks", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
let webhook: Webhook
|
let webhook: Webhook
|
||||||
|
let cleanupEnv: () => void
|
||||||
|
|
||||||
afterAll(setup.afterAll)
|
afterAll(() => {
|
||||||
|
setup.afterAll()
|
||||||
|
cleanupEnv()
|
||||||
|
})
|
||||||
|
|
||||||
const setupTest = async () => {
|
const setupTest = async () => {
|
||||||
config.modeSelf()
|
cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
|
||||||
await config.init()
|
await config.init()
|
||||||
const autoConfig = basicAutomation()
|
const autoConfig = basicAutomation()
|
||||||
autoConfig.definition.trigger.schema = {
|
autoConfig.definition.trigger.schema = {
|
||||||
|
|
|
@ -35,13 +35,18 @@ import { FieldType, Table, TableSchema } from "@budibase/types"
|
||||||
describe("Google Sheets Integration", () => {
|
describe("Google Sheets Integration", () => {
|
||||||
let integration: any,
|
let integration: any,
|
||||||
config = new TestConfiguration()
|
config = new TestConfiguration()
|
||||||
|
let cleanupEnv: () => void
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
config.setGoogleAuth("test")
|
cleanupEnv = config.setEnv({
|
||||||
|
GOOGLE_CLIENT_ID: "test",
|
||||||
|
GOOGLE_CLIENT_SECRET: "test",
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
await config.end()
|
cleanupEnv()
|
||||||
|
config.end()
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
|
|
|
@ -58,6 +58,7 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
import API from "./api"
|
import API from "./api"
|
||||||
|
import { cloneDeep } from "lodash"
|
||||||
|
|
||||||
type DefaultUserValues = {
|
type DefaultUserValues = {
|
||||||
globalUserId: string
|
globalUserId: string
|
||||||
|
@ -188,30 +189,38 @@ class TestConfiguration {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// MODES
|
async withEnv(newEnvVars: Partial<typeof env>, f: () => Promise<void>) {
|
||||||
setMultiTenancy = (value: boolean) => {
|
let cleanup = this.setEnv(newEnvVars)
|
||||||
env._set("MULTI_TENANCY", value)
|
try {
|
||||||
coreEnv._set("MULTI_TENANCY", value)
|
await f()
|
||||||
|
} finally {
|
||||||
|
cleanup()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
setSelfHosted = (value: boolean) => {
|
/*
|
||||||
env._set("SELF_HOSTED", value)
|
* Sets the environment variables to the given values and returns a function
|
||||||
coreEnv._set("SELF_HOSTED", value)
|
* that can be called to reset the environment variables to their original values.
|
||||||
|
*/
|
||||||
|
setEnv(newEnvVars: Partial<typeof env>): () => void {
|
||||||
|
const oldEnv = cloneDeep(env)
|
||||||
|
const oldCoreEnv = cloneDeep(coreEnv)
|
||||||
|
|
||||||
|
let key: keyof typeof newEnvVars
|
||||||
|
for (key in newEnvVars) {
|
||||||
|
env._set(key, newEnvVars[key])
|
||||||
|
coreEnv._set(key, newEnvVars[key])
|
||||||
}
|
}
|
||||||
|
|
||||||
setGoogleAuth = (value: string) => {
|
return () => {
|
||||||
env._set("GOOGLE_CLIENT_ID", value)
|
for (const [key, value] of Object.entries(oldEnv)) {
|
||||||
env._set("GOOGLE_CLIENT_SECRET", value)
|
env._set(key, value)
|
||||||
coreEnv._set("GOOGLE_CLIENT_ID", value)
|
|
||||||
coreEnv._set("GOOGLE_CLIENT_SECRET", value)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
modeCloud = () => {
|
for (const [key, value] of Object.entries(oldCoreEnv)) {
|
||||||
this.setSelfHosted(false)
|
coreEnv._set(key, value)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
modeSelf = () => {
|
|
||||||
this.setSelfHosted(true)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// UTILS
|
// UTILS
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
import {
|
||||||
|
APIError,
|
||||||
|
Datasource,
|
||||||
|
ProcessAttachmentResponse,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import TestConfiguration from "../TestConfiguration"
|
||||||
|
import { TestAPI } from "./base"
|
||||||
|
import fs from "fs"
|
||||||
|
|
||||||
|
export class AttachmentAPI extends TestAPI {
|
||||||
|
constructor(config: TestConfiguration) {
|
||||||
|
super(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
process = async (
|
||||||
|
name: string,
|
||||||
|
file: Buffer | fs.ReadStream | string,
|
||||||
|
{ expectStatus } = { expectStatus: 200 }
|
||||||
|
): Promise<ProcessAttachmentResponse> => {
|
||||||
|
const result = await this.request
|
||||||
|
.post(`/api/attachments/process`)
|
||||||
|
.attach("file", file, name)
|
||||||
|
.set(this.config.defaultHeaders())
|
||||||
|
|
||||||
|
if (result.statusCode !== expectStatus) {
|
||||||
|
throw new Error(
|
||||||
|
`Expected status ${expectStatus} but got ${
|
||||||
|
result.statusCode
|
||||||
|
}, body: ${JSON.stringify(result.body)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.body
|
||||||
|
}
|
||||||
|
}
|
|
@ -7,6 +7,7 @@ import { DatasourceAPI } from "./datasource"
|
||||||
import { LegacyViewAPI } from "./legacyView"
|
import { LegacyViewAPI } from "./legacyView"
|
||||||
import { ScreenAPI } from "./screen"
|
import { ScreenAPI } from "./screen"
|
||||||
import { ApplicationAPI } from "./application"
|
import { ApplicationAPI } from "./application"
|
||||||
|
import { AttachmentAPI } from "./attachment"
|
||||||
|
|
||||||
export default class API {
|
export default class API {
|
||||||
table: TableAPI
|
table: TableAPI
|
||||||
|
@ -17,6 +18,7 @@ export default class API {
|
||||||
datasource: DatasourceAPI
|
datasource: DatasourceAPI
|
||||||
screen: ScreenAPI
|
screen: ScreenAPI
|
||||||
application: ApplicationAPI
|
application: ApplicationAPI
|
||||||
|
attachment: AttachmentAPI
|
||||||
|
|
||||||
constructor(config: TestConfiguration) {
|
constructor(config: TestConfiguration) {
|
||||||
this.table = new TableAPI(config)
|
this.table = new TableAPI(config)
|
||||||
|
@ -27,5 +29,6 @@ export default class API {
|
||||||
this.datasource = new DatasourceAPI(config)
|
this.datasource = new DatasourceAPI(config)
|
||||||
this.screen = new ScreenAPI(config)
|
this.screen = new ScreenAPI(config)
|
||||||
this.application = new ApplicationAPI(config)
|
this.application = new ApplicationAPI(config)
|
||||||
|
this.attachment = new AttachmentAPI(config)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -241,7 +241,7 @@ export async function outputProcessing<T extends Row[] | Row>(
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
row[property].forEach((attachment: RowAttachment) => {
|
row[property].forEach((attachment: RowAttachment) => {
|
||||||
attachment.url = objectStore.getAppFileUrl(attachment.key)
|
attachment.url ??= objectStore.getAppFileUrl(attachment.key)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} else if (
|
} else if (
|
||||||
|
|
|
@ -96,3 +96,45 @@ export enum BuilderSocketEvent {
|
||||||
export const SocketSessionTTL = 60
|
export const SocketSessionTTL = 60
|
||||||
export const ValidQueryNameRegex = /^[^()]*$/
|
export const ValidQueryNameRegex = /^[^()]*$/
|
||||||
export const ValidColumnNameRegex = /^[_a-zA-Z0-9\s]*$/g
|
export const ValidColumnNameRegex = /^[_a-zA-Z0-9\s]*$/g
|
||||||
|
export const ValidFileExtensions = [
|
||||||
|
"avif",
|
||||||
|
"css",
|
||||||
|
"csv",
|
||||||
|
"docx",
|
||||||
|
"drawio",
|
||||||
|
"editorconfig",
|
||||||
|
"edl",
|
||||||
|
"enc",
|
||||||
|
"export",
|
||||||
|
"geojson",
|
||||||
|
"gif",
|
||||||
|
"htm",
|
||||||
|
"html",
|
||||||
|
"ics",
|
||||||
|
"iqy",
|
||||||
|
"jfif",
|
||||||
|
"jpeg",
|
||||||
|
"jpg",
|
||||||
|
"json",
|
||||||
|
"log",
|
||||||
|
"md",
|
||||||
|
"mid",
|
||||||
|
"odt",
|
||||||
|
"pdf",
|
||||||
|
"png",
|
||||||
|
"ris",
|
||||||
|
"rtf",
|
||||||
|
"svg",
|
||||||
|
"tex",
|
||||||
|
"toml",
|
||||||
|
"twig",
|
||||||
|
"txt",
|
||||||
|
"url",
|
||||||
|
"wav",
|
||||||
|
"webp",
|
||||||
|
"xls",
|
||||||
|
"xlsx",
|
||||||
|
"xml",
|
||||||
|
"yaml",
|
||||||
|
"yml",
|
||||||
|
]
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
export interface Upload {
|
||||||
|
size: number
|
||||||
|
name: string
|
||||||
|
url: string
|
||||||
|
extension: string
|
||||||
|
key: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ProcessAttachmentResponse = Upload[]
|
|
@ -5,3 +5,4 @@ export * from "./view"
|
||||||
export * from "./rows"
|
export * from "./rows"
|
||||||
export * from "./table"
|
export * from "./table"
|
||||||
export * from "./permission"
|
export * from "./permission"
|
||||||
|
export * from "./attachment"
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
export enum FeatureFlag {
|
export enum FeatureFlag {
|
||||||
LICENSING = "LICENSING",
|
LICENSING = "LICENSING",
|
||||||
|
// Feature IDs in Posthog
|
||||||
|
PER_CREATOR_PER_USER_PRICE = "18873",
|
||||||
|
PER_CREATOR_PER_USER_PRICE_ALERT = "18530",
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TenantFeatureFlags {
|
export interface TenantFeatureFlags {
|
||||||
|
|
|
@ -5,10 +5,17 @@ export interface Customer {
|
||||||
currency: string | null | undefined
|
currency: string | null | undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface SubscriptionItems {
|
||||||
|
user: number | undefined
|
||||||
|
creator: number | undefined
|
||||||
|
}
|
||||||
|
|
||||||
export interface Subscription {
|
export interface Subscription {
|
||||||
amount: number
|
amount: number
|
||||||
|
amounts: SubscriptionItems | undefined
|
||||||
currency: string
|
currency: string
|
||||||
quantity: number
|
quantity: number
|
||||||
|
quantities: SubscriptionItems | undefined
|
||||||
duration: PriceDuration
|
duration: PriceDuration
|
||||||
cancelAt: number | null | undefined
|
cancelAt: number | null | undefined
|
||||||
currentPeriodStart: number
|
currentPeriodStart: number
|
||||||
|
|
|
@ -4,7 +4,9 @@ export enum PlanType {
|
||||||
PRO = "pro",
|
PRO = "pro",
|
||||||
/** @deprecated */
|
/** @deprecated */
|
||||||
TEAM = "team",
|
TEAM = "team",
|
||||||
|
/** @deprecated */
|
||||||
PREMIUM = "premium",
|
PREMIUM = "premium",
|
||||||
|
PREMIUM_PLUS = "premium_plus",
|
||||||
BUSINESS = "business",
|
BUSINESS = "business",
|
||||||
ENTERPRISE = "enterprise",
|
ENTERPRISE = "enterprise",
|
||||||
}
|
}
|
||||||
|
@ -26,10 +28,12 @@ export interface AvailablePrice {
|
||||||
currency: string
|
currency: string
|
||||||
duration: PriceDuration
|
duration: PriceDuration
|
||||||
priceId: string
|
priceId: string
|
||||||
|
type?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum PlanModel {
|
export enum PlanModel {
|
||||||
PER_USER = "perUser",
|
PER_USER = "perUser",
|
||||||
|
PER_CREATOR_PER_USER = "per_creator_per_user",
|
||||||
DAY_PASS = "dayPass",
|
DAY_PASS = "dayPass",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ RUN yarn global add pm2
|
||||||
|
|
||||||
COPY package.json .
|
COPY package.json .
|
||||||
COPY dist/yarn.lock .
|
COPY dist/yarn.lock .
|
||||||
RUN yarn install --production=true
|
RUN yarn install --production=true --network-timeout 1000000
|
||||||
# Remove unneeded data from file system to reduce image size
|
# Remove unneeded data from file system to reduce image size
|
||||||
RUN apk del .gyp \
|
RUN apk del .gyp \
|
||||||
&& yarn cache clean
|
&& yarn cache clean
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
"run:docker": "node dist/index.js",
|
"run:docker": "node dist/index.js",
|
||||||
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
|
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
|
||||||
"run:docker:cluster": "pm2-runtime start pm2.config.js",
|
"run:docker:cluster": "pm2-runtime start pm2.config.js",
|
||||||
"build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
|
|
||||||
"dev:stack:init": "node ./scripts/dev/manage.js init",
|
"dev:stack:init": "node ./scripts/dev/manage.js init",
|
||||||
"dev:builder": "npm run dev:stack:init && nodemon",
|
"dev:builder": "npm run dev:stack:init && nodemon",
|
||||||
"dev:built": "yarn run dev:stack:init && yarn run run:docker",
|
"dev:built": "yarn run dev:stack:init && yarn run run:docker",
|
||||||
|
|
Loading…
Reference in New Issue