Merge branch 'master' into fix/show-formblock-description-in-view-mode

This commit is contained in:
Andrew Kingston 2023-11-01 09:38:25 +00:00 committed by GitHub
commit 2711bc8794
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
130 changed files with 2042 additions and 912 deletions

View File

@ -36,6 +36,7 @@ jobs:
- uses: actions/setup-node@v1 - uses: actions/setup-node@v1
with: with:
node-version: 18.x node-version: 18.x
cache: yarn
- run: yarn install --frozen-lockfile - run: yarn install --frozen-lockfile
- name: Update versions - name: Update versions
@ -63,14 +64,64 @@ jobs:
echo "Using tag $version" echo "Using tag $version"
echo "version=$version" >> "$GITHUB_OUTPUT" echo "version=$version" >> "$GITHUB_OUTPUT"
- name: Build/release Docker images - name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Docker login
run: | run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker
env: env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }} DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }} DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }}
- name: Build worker docker
uses: docker/build-push-action@v5
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
build-args: |
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
file: ./packages/worker/Dockerfile.v2
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
cache-to: type=inline
env:
IMAGE_NAME: budibase/worker
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
- name: Build server docker
uses: docker/build-push-action@v5
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
build-args: |
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
file: ./packages/server/Dockerfile.v2
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
cache-to: type=inline
env:
IMAGE_NAME: budibase/apps
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
- name: Build proxy docker
uses: docker/build-push-action@v5
with:
context: ./hosting/proxy
push: true
platforms: linux/amd64,linux/arm64
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
file: ./hosting/proxy/Dockerfile
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
cache-to: type=inline
env:
IMAGE_NAME: budibase/proxy
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
release-helm-chart: release-helm-chart:
needs: [release-images] needs: [release-images]

View File

@ -67,7 +67,7 @@ jobs:
push: true push: true
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }} tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile file: ./hosting/single/Dockerfile.v2
- name: Tag and release Budibase Azure App Service docker image - name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
with: with:
@ -76,4 +76,4 @@ jobs:
platforms: linux/amd64 platforms: linux/amd64
build-args: TARGETBUILD=aas build-args: TARGETBUILD=aas
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }} tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile file: ./hosting/single/Dockerfile.v2

View File

@ -126,13 +126,6 @@ You can learn more about the Budibase API at the following places:
- [Build an app with Budibase and Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/) - [Build an app with Budibase and Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/)
<p align="center">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1647858558/Feb%20release/Start_building_with_Budibase_s_API_3_rhlzhv.png">
</p>
<br /><br />
<br /><br /><br />
## 🏁 Get started ## 🏁 Get started
Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean. Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean.

View File

@ -1,18 +0,0 @@
#!/bin/bash
tag=$1
if [[ ! "$tag" ]]; then
echo "No tag present. You must pass a tag to this script"
exit 1
fi
echo "Tagging images with tag: $tag"
docker tag proxy-service budibase/proxy:$tag
docker tag app-service budibase/apps:$tag
docker tag worker-service budibase/worker:$tag
docker push --all-tags budibase/apps
docker push --all-tags budibase/worker
docker push --all-tags budibase/proxy

View File

@ -1,5 +1,5 @@
{ {
"version": "2.11.45", "version": "2.12.1",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -54,10 +54,6 @@
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"", "lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint", "lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"build:specs": "lerna run --stream specs", "build:specs": "lerna run --stream specs",
"build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild", "build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild", "build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -", "build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",

View File

@ -3,6 +3,7 @@ const mockS3 = {
deleteObject: jest.fn().mockReturnThis(), deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(), deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(), createBucket: jest.fn().mockReturnThis(),
getObject: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(), listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => { getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}` return `http://s3.example.com/${params.Bucket}/${params.Key}`

View File

@ -21,7 +21,7 @@
"test:watch": "jest --watchAll" "test:watch": "jest --watchAll"
}, },
"dependencies": { "dependencies": {
"@budibase/nano": "10.1.2", "@budibase/nano": "10.1.3",
"@budibase/pouchdb-replication-stream": "1.2.10", "@budibase/pouchdb-replication-stream": "1.2.10",
"@budibase/shared-core": "0.0.0", "@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",

View File

@ -119,8 +119,8 @@ export class Writethrough {
this.writeRateMs = writeRateMs this.writeRateMs = writeRateMs
} }
async put(doc: any) { async put(doc: any, writeRateMs: number = this.writeRateMs) {
return put(this.db, doc, this.writeRateMs) return put(this.db, doc, writeRateMs)
} }
async get(id: string) { async get(id: string) {

View File

@ -8,3 +8,7 @@ export const CONSTANT_INTERNAL_ROW_COLS = [
] as const ] as const
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
export function isInternalColumnName(name: string): boolean {
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
}

View File

@ -6,6 +6,7 @@ import {
ViewName, ViewName,
} from "../constants" } from "../constants"
import { getProdAppID } from "./conversions" import { getProdAppID } from "./conversions"
import { DatabaseQueryOpts } from "@budibase/types"
/** /**
* If creating DB allDocs/query params with only a single top level ID this can be used, this * If creating DB allDocs/query params with only a single top level ID this can be used, this
@ -22,8 +23,8 @@ import { getProdAppID } from "./conversions"
export function getDocParams( export function getDocParams(
docType: string, docType: string,
docId?: string | null, docId?: string | null,
otherProps: any = {} otherProps: Partial<DatabaseQueryOpts> = {}
) { ): DatabaseQueryOpts {
if (docId == null) { if (docId == null) {
docId = "" docId = ""
} }
@ -45,8 +46,8 @@ export function getDocParams(
export function getRowParams( export function getRowParams(
tableId?: string | null, tableId?: string | null,
rowId?: string | null, rowId?: string | null,
otherProps = {} otherProps: Partial<DatabaseQueryOpts> = {}
) { ): DatabaseQueryOpts {
if (tableId == null) { if (tableId == null) {
return getDocParams(DocumentType.ROW, null, otherProps) return getDocParams(DocumentType.ROW, null, otherProps)
} }
@ -88,7 +89,10 @@ export const isDatasourceId = (id: string) => {
/** /**
* Gets parameters for retrieving workspaces. * Gets parameters for retrieving workspaces.
*/ */
export function getWorkspaceParams(id = "", otherProps = {}) { export function getWorkspaceParams(
id = "",
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
return { return {
...otherProps, ...otherProps,
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`, startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
@ -99,7 +103,10 @@ export function getWorkspaceParams(id = "", otherProps = {}) {
/** /**
* Gets parameters for retrieving users. * Gets parameters for retrieving users.
*/ */
export function getGlobalUserParams(globalId: any, otherProps: any = {}) { export function getGlobalUserParams(
globalId: any,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
if (!globalId) { if (!globalId) {
globalId = "" globalId = ""
} }
@ -117,11 +124,17 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
/** /**
* Gets parameters for retrieving users, this is a utility function for the getDocParams function. * Gets parameters for retrieving users, this is a utility function for the getDocParams function.
*/ */
export function getUserMetadataParams(userId?: string | null, otherProps = {}) { export function getUserMetadataParams(
userId?: string | null,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
return getRowParams(InternalTable.USER_METADATA, userId, otherProps) return getRowParams(InternalTable.USER_METADATA, userId, otherProps)
} }
export function getUsersByAppParams(appId: any, otherProps: any = {}) { export function getUsersByAppParams(
appId: any,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
const prodAppId = getProdAppID(appId) const prodAppId = getProdAppID(appId)
return { return {
...otherProps, ...otherProps,

View File

@ -122,7 +122,9 @@ export async function roleToNumber(id?: string) {
if (isBuiltin(id)) { if (isBuiltin(id)) {
return builtinRoleToNumber(id) return builtinRoleToNumber(id)
} }
const hierarchy = (await getUserRoleHierarchy(id)) as RoleDoc[] const hierarchy = (await getUserRoleHierarchy(id, {
defaultPublic: true,
})) as RoleDoc[]
for (let role of hierarchy) { for (let role of hierarchy) {
if (isBuiltin(role?.inherits)) { if (isBuiltin(role?.inherits)) {
return builtinRoleToNumber(role.inherits) + 1 return builtinRoleToNumber(role.inherits) + 1
@ -192,12 +194,15 @@ export async function getRole(
/** /**
* Simple function to get all the roles based on the top level user role ID. * Simple function to get all the roles based on the top level user role ID.
*/ */
async function getAllUserRoles(userRoleId?: string): Promise<RoleDoc[]> { async function getAllUserRoles(
userRoleId?: string,
opts?: { defaultPublic?: boolean }
): Promise<RoleDoc[]> {
// admins have access to all roles // admins have access to all roles
if (userRoleId === BUILTIN_IDS.ADMIN) { if (userRoleId === BUILTIN_IDS.ADMIN) {
return getAllRoles() return getAllRoles()
} }
let currentRole = await getRole(userRoleId) let currentRole = await getRole(userRoleId, opts)
let roles = currentRole ? [currentRole] : [] let roles = currentRole ? [currentRole] : []
let roleIds = [userRoleId] let roleIds = [userRoleId]
// get all the inherited roles // get all the inherited roles
@ -226,12 +231,16 @@ export async function getUserRoleIdHierarchy(
* Returns an ordered array of the user's inherited role IDs, this can be used * Returns an ordered array of the user's inherited role IDs, this can be used
* to determine if a user can access something that requires a specific role. * to determine if a user can access something that requires a specific role.
* @param userRoleId The user's role ID, this can be found in their access token. * @param userRoleId The user's role ID, this can be found in their access token.
* @param opts optional - if want to default to public use this.
* @returns returns an ordered array of the roles, with the first being their * @returns returns an ordered array of the roles, with the first being their
* highest level of access and the last being the lowest level. * highest level of access and the last being the lowest level.
*/ */
export async function getUserRoleHierarchy(userRoleId?: string) { export async function getUserRoleHierarchy(
userRoleId?: string,
opts?: { defaultPublic?: boolean }
) {
// special case, if they don't have a role then they are a public user // special case, if they don't have a role then they are a public user
return getAllUserRoles(userRoleId) return getAllUserRoles(userRoleId, opts)
} }
// this function checks that the provided permissions are in an array format // this function checks that the provided permissions are in an array format

View File

@ -25,12 +25,17 @@ import {
import { import {
getAccountHolderFromUserIds, getAccountHolderFromUserIds,
isAdmin, isAdmin,
isCreator,
validateUniqueUser, validateUniqueUser,
} from "./utils" } from "./utils"
import { searchExistingEmails } from "./lookup" import { searchExistingEmails } from "./lookup"
import { hash } from "../utils" import { hash } from "../utils"
type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any> type QuotaUpdateFn = (
change: number,
creatorsChange: number,
cb?: () => Promise<any>
) => Promise<any>
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any> type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
type FeatureFn = () => Promise<Boolean> type FeatureFn = () => Promise<Boolean>
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]> type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
@ -160,13 +165,9 @@ export class UserDB {
} }
static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) { static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {
const params: any = {
include_docs: true,
limit: opts.limit || 50,
}
let response: User[] = await usersCore.searchGlobalUsersByAppAccess( let response: User[] = await usersCore.searchGlobalUsersByAppAccess(
opts.appId, opts.appId,
params { limit: opts.limit || 50 }
) )
return response return response
} }
@ -245,7 +246,8 @@ export class UserDB {
} }
const change = dbUser ? 0 : 1 // no change if there is existing user const change = dbUser ? 0 : 1 // no change if there is existing user
return UserDB.quotas.addUsers(change, async () => { const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
await validateUniqueUser(email, tenantId) await validateUniqueUser(email, tenantId)
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser) let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
@ -307,6 +309,7 @@ export class UserDB {
let usersToSave: any[] = [] let usersToSave: any[] = []
let newUsers: any[] = [] let newUsers: any[] = []
let newCreators: any[] = []
const emails = newUsersRequested.map((user: User) => user.email) const emails = newUsersRequested.map((user: User) => user.email)
const existingEmails = await searchExistingEmails(emails) const existingEmails = await searchExistingEmails(emails)
@ -327,10 +330,16 @@ export class UserDB {
} }
newUser.userGroups = groups newUser.userGroups = groups
newUsers.push(newUser) newUsers.push(newUser)
if (isCreator(newUser)) {
newCreators.push(newUser)
}
} }
const account = await accountSdk.getAccountByTenantId(tenantId) const account = await accountSdk.getAccountByTenantId(tenantId)
return UserDB.quotas.addUsers(newUsers.length, async () => { return UserDB.quotas.addUsers(
newUsers.length,
newCreators.length,
async () => {
// create the promises array that will be called by bulkDocs // create the promises array that will be called by bulkDocs
newUsers.forEach((user: any) => { newUsers.forEach((user: any) => {
usersToSave.push( usersToSave.push(
@ -379,7 +388,8 @@ export class UserDB {
successful: saved, successful: saved,
unsuccessful, unsuccessful,
} }
}) }
)
} }
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> { static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
@ -419,11 +429,12 @@ export class UserDB {
_deleted: true, _deleted: true,
})) }))
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete) const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
const creatorsToDelete = usersToDelete.filter(isCreator)
await UserDB.quotas.removeUsers(toDelete.length)
for (let user of usersToDelete) { for (let user of usersToDelete) {
await bulkDeleteProcessing(user) await bulkDeleteProcessing(user)
} }
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
// Build Response // Build Response
// index users by id // index users by id
@ -472,7 +483,8 @@ export class UserDB {
await db.remove(userId, dbUser._rev) await db.remove(userId, dbUser._rev)
await UserDB.quotas.removeUsers(1) const creatorsToDelete = isCreator(dbUser) ? 1 : 0
await UserDB.quotas.removeUsers(1, creatorsToDelete)
await eventHelpers.handleDeleteEvents(dbUser) await eventHelpers.handleDeleteEvents(dbUser)
await cache.user.invalidateUser(userId) await cache.user.invalidateUser(userId)
await sessions.invalidateSessions(userId, { reason: "deletion" }) await sessions.invalidateSessions(userId, { reason: "deletion" })

View File

@ -14,12 +14,13 @@ import {
} from "../db" } from "../db"
import { import {
BulkDocsResponse, BulkDocsResponse,
ContextUser,
SearchQuery, SearchQuery,
SearchQueryOperators, SearchQueryOperators,
SearchUsersRequest, SearchUsersRequest,
User, User,
ContextUser,
DatabaseQueryOpts, DatabaseQueryOpts,
CouchFindOptions,
} from "@budibase/types" } from "@budibase/types"
import { getGlobalDB } from "../context" import { getGlobalDB } from "../context"
import * as context from "../context" import * as context from "../context"
@ -140,7 +141,7 @@ export const getGlobalUserByEmail = async (
export const searchGlobalUsersByApp = async ( export const searchGlobalUsersByApp = async (
appId: any, appId: any,
opts: any, opts: DatabaseQueryOpts,
getOpts?: GetOpts getOpts?: GetOpts
) => { ) => {
if (typeof appId !== "string") { if (typeof appId !== "string") {
@ -166,7 +167,10 @@ export const searchGlobalUsersByApp = async (
Return any user who potentially has access to the application Return any user who potentially has access to the application
Admins, developers and app users with the explicitly role. Admins, developers and app users with the explicitly role.
*/ */
export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => { export const searchGlobalUsersByAppAccess = async (
appId: any,
opts?: { limit?: number }
) => {
const roleSelector = `roles.${appId}` const roleSelector = `roles.${appId}`
let orQuery: any[] = [ let orQuery: any[] = [
@ -187,7 +191,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
orQuery.push(roleCheck) orQuery.push(roleCheck)
} }
let searchOptions = { let searchOptions: CouchFindOptions = {
selector: { selector: {
$or: orQuery, $or: orQuery,
_id: { _id: {
@ -198,7 +202,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
} }
const resp = await directCouchFind(context.getGlobalDBName(), searchOptions) const resp = await directCouchFind(context.getGlobalDBName(), searchOptions)
return resp?.rows return resp.rows
} }
export const getGlobalUserByAppPage = (appId: string, user: User) => { export const getGlobalUserByAppPage = (appId: string, user: User) => {
@ -245,7 +249,8 @@ export const paginatedUsers = async ({
limit, limit,
}: SearchUsersRequest = {}) => { }: SearchUsersRequest = {}) => {
const db = getGlobalDB() const db = getGlobalDB()
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1 const pageSize = limit ?? PAGE_LIMIT
const pageLimit = pageSize + 1
// get one extra document, to have the next page // get one extra document, to have the next page
const opts: DatabaseQueryOpts = { const opts: DatabaseQueryOpts = {
include_docs: true, include_docs: true,
@ -272,7 +277,7 @@ export const paginatedUsers = async ({
const response = await db.allDocs(getGlobalUserParams(null, opts)) const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc) userList = response.rows.map((row: any) => row.doc)
} }
return pagination(userList, pageLimit, { return pagination(userList, pageSize, {
paginate: true, paginate: true,
property, property,
getKey, getKey,

View File

@ -0,0 +1,54 @@
const _ = require('lodash/fp')
const {structures} = require("../../../tests")
jest.mock("../../../src/context")
jest.mock("../../../src/db")
const context = require("../../../src/context")
const db = require("../../../src/db")
const {getCreatorCount} = require('../../../src/users/users')
describe("Users", () => {
let getGlobalDBMock
let getGlobalUserParamsMock
let paginationMock
beforeEach(() => {
jest.resetAllMocks()
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
paginationMock = jest.spyOn(db, "pagination")
})
it("Retrieves the number of creators", async () => {
const getUsers = (offset, limit, creators = false) => {
const range = _.range(offset, limit)
const opts = creators ? {builder: {global: true}} : undefined
return range.map(() => structures.users.user(opts))
}
const page1Data = getUsers(0, 8)
const page2Data = getUsers(8, 12, true)
getGlobalDBMock.mockImplementation(() => ({
name : "fake-db",
allDocs: () => ({
rows: [...page1Data, ...page2Data]
})
}))
paginationMock.mockImplementationOnce(() => ({
data: page1Data,
hasNextPage: true,
nextPage: "1"
}))
paginationMock.mockImplementation(() => ({
data: page2Data,
hasNextPage: false,
nextPage: undefined
}))
const creatorsCount = await getCreatorCount()
expect(creatorsCount).toBe(4)
expect(paginationMock).toHaveBeenCalledTimes(2)
})
})

View File

@ -1,2 +1,3 @@
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z") export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
export const MOCK_DATE_TIMESTAMP = 1577836800000 export const MOCK_DATE_TIMESTAMP = 1577836800000

View File

@ -123,6 +123,10 @@ export function customer(): Customer {
export function subscription(): Subscription { export function subscription(): Subscription {
return { return {
amount: 10000, amount: 10000,
amounts: {
user: 10000,
creator: 0,
},
cancelAt: undefined, cancelAt: undefined,
currency: "usd", currency: "usd",
currentPeriodEnd: 0, currentPeriodEnd: 0,
@ -131,6 +135,10 @@ export function subscription(): Subscription {
duration: PriceDuration.MONTHLY, duration: PriceDuration.MONTHLY,
pastDueAt: undefined, pastDueAt: undefined,
quantity: 0, quantity: 0,
quantities: {
user: 0,
creator: 0,
},
status: "active", status: "active",
} }
} }

View File

@ -159,8 +159,10 @@
{#if selectedImage.size} {#if selectedImage.size}
<div class="filesize"> <div class="filesize">
{#if selectedImage.size <= BYTES_IN_MB} {#if selectedImage.size <= BYTES_IN_MB}
{`${selectedImage.size / BYTES_IN_KB} KB`} {`${(selectedImage.size / BYTES_IN_KB).toFixed(1)} KB`}
{:else}{`${selectedImage.size / BYTES_IN_MB} MB`}{/if} {:else}{`${(selectedImage.size / BYTES_IN_MB).toFixed(
1
)} MB`}{/if}
</div> </div>
{/if} {/if}
{#if !disabled} {#if !disabled}
@ -203,8 +205,8 @@
{#if file.size} {#if file.size}
<div class="filesize"> <div class="filesize">
{#if file.size <= BYTES_IN_MB} {#if file.size <= BYTES_IN_MB}
{`${file.size / BYTES_IN_KB} KB`} {`${(file.size / BYTES_IN_KB).toFixed(1)} KB`}
{:else}{`${file.size / BYTES_IN_MB} MB`}{/if} {:else}{`${(file.size / BYTES_IN_MB).toFixed(1)} MB`}{/if}
</div> </div>
{/if} {/if}
{#if !disabled} {#if !disabled}

View File

@ -580,7 +580,7 @@ export const getFrontendStore = () => {
let table = validTables.find(table => { let table = validTables.find(table => {
return ( return (
table.sourceId !== BUDIBASE_INTERNAL_DB_ID && table.sourceId !== BUDIBASE_INTERNAL_DB_ID &&
table.type === DB_TYPE_INTERNAL table.sourceType === DB_TYPE_INTERNAL
) )
}) })
if (table) { if (table) {
@ -591,7 +591,7 @@ export const getFrontendStore = () => {
table = validTables.find(table => { table = validTables.find(table => {
return ( return (
table.sourceId === BUDIBASE_INTERNAL_DB_ID && table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
table.type === DB_TYPE_INTERNAL table.sourceType === DB_TYPE_INTERNAL
) )
}) })
if (table) { if (table) {
@ -599,7 +599,7 @@ export const getFrontendStore = () => {
} }
// Finally try an external table // Finally try an external table
return validTables.find(table => table.type === DB_TYPE_EXTERNAL) return validTables.find(table => table.sourceType === DB_TYPE_EXTERNAL)
}, },
enrichEmptySettings: (component, opts) => { enrichEmptySettings: (component, opts) => {
if (!component?._component) { if (!component?._component) {

View File

@ -16,7 +16,6 @@
$: linkedTable = $tables.list.find(table => table._id === linkedTableId) $: linkedTable = $tables.list.find(table => table._id === linkedTableId)
$: schema = linkedTable?.schema $: schema = linkedTable?.schema
$: table = $tables.list.find(table => table._id === tableId) $: table = $tables.list.find(table => table._id === tableId)
$: type = table?.type
$: fetchData(tableId, rowId) $: fetchData(tableId, rowId)
$: { $: {
let rowLabel = row?.[table?.primaryDisplay] let rowLabel = row?.[table?.primaryDisplay]
@ -41,5 +40,5 @@
</script> </script>
{#if row && row._id === rowId} {#if row && row._id === rowId}
<Table {title} {schema} {data} {type} /> <Table {title} {schema} {data} />
{/if} {/if}

View File

@ -16,6 +16,7 @@
import GridRelationshipButton from "components/backend/DataTable/buttons/grid/GridRelationshipButton.svelte" import GridRelationshipButton from "components/backend/DataTable/buttons/grid/GridRelationshipButton.svelte"
import GridEditColumnModal from "components/backend/DataTable/modals/grid/GridEditColumnModal.svelte" import GridEditColumnModal from "components/backend/DataTable/modals/grid/GridEditColumnModal.svelte"
import GridUsersTableButton from "components/backend/DataTable/modals/grid/GridUsersTableButton.svelte" import GridUsersTableButton from "components/backend/DataTable/modals/grid/GridUsersTableButton.svelte"
import { DB_TYPE_EXTERNAL } from "constants/backend"
const userSchemaOverrides = { const userSchemaOverrides = {
firstName: { displayName: "First name", disabled: true }, firstName: { displayName: "First name", disabled: true },
@ -27,7 +28,7 @@
$: id = $tables.selected?._id $: id = $tables.selected?._id
$: isUsersTable = id === TableNames.USERS $: isUsersTable = id === TableNames.USERS
$: isInternal = $tables.selected?.type !== "external" $: isInternal = $tables.selected?.sourceType !== DB_TYPE_EXTERNAL
$: gridDatasource = { $: gridDatasource = {
type: "table", type: "table",
tableId: id, tableId: id,
@ -46,10 +47,7 @@
tables.replaceTable(id, e.detail) tables.replaceTable(id, e.detail)
// We need to refresh datasources when an external table changes. // We need to refresh datasources when an external table changes.
// Type "external" may exist - sometimes type is "table" and sometimes it if (e.detail?.sourceType === DB_TYPE_EXTERNAL) {
// is "external" - it has different meanings in different endpoints.
// If we check both these then we hopefully catch all external tables.
if (e.detail?.type === "external" || e.detail?.sql) {
await datasources.fetch() await datasources.fetch()
} }
} }

View File

@ -17,7 +17,6 @@
let hideAutocolumns = true let hideAutocolumns = true
let data = [] let data = []
let loading = false let loading = false
let type = "internal"
$: name = view.name $: name = view.name
$: schema = view.schema $: schema = view.schema
@ -66,7 +65,6 @@
tableId={view.tableId} tableId={view.tableId}
{data} {data}
{loading} {loading}
{type}
rowCount={10} rowCount={10}
allowEditing={false} allowEditing={false}
bind:hideAutocolumns bind:hideAutocolumns

View File

@ -10,6 +10,6 @@
<ImportButton <ImportButton
{disabled} {disabled}
tableId={$datasource?.tableId} tableId={$datasource?.tableId}
tableType={$definition?.type} tableType={$definition?.sourceType}
on:importrows={rows.actions.refreshData} on:importrows={rows.actions.refreshData}
/> />

View File

@ -26,6 +26,7 @@
ALLOWABLE_NUMBER_TYPES, ALLOWABLE_NUMBER_TYPES,
SWITCHABLE_TYPES, SWITCHABLE_TYPES,
PrettyRelationshipDefinitions, PrettyRelationshipDefinitions,
DB_TYPE_EXTERNAL,
} from "constants/backend" } from "constants/backend"
import { getAutoColumnInformation, buildAutoColumn } from "builderStore/utils" import { getAutoColumnInformation, buildAutoColumn } from "builderStore/utils"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
@ -254,10 +255,11 @@
!uneditable && !uneditable &&
editableColumn?.type !== AUTO_TYPE && editableColumn?.type !== AUTO_TYPE &&
!editableColumn.autocolumn !editableColumn.autocolumn
$: external = table.type === "external" $: externalTable = table.sourceType === DB_TYPE_EXTERNAL
// in the case of internal tables the sourceId will just be undefined // in the case of internal tables the sourceId will just be undefined
$: tableOptions = $tables.list.filter( $: tableOptions = $tables.list.filter(
opt => opt.type === table.type && table.sourceId === opt.sourceId opt =>
opt.sourceType === table.sourceType && table.sourceId === opt.sourceId
) )
$: typeEnabled = $: typeEnabled =
!originalName || !originalName ||
@ -409,7 +411,7 @@
editableColumn.type === FieldType.BB_REFERENCE && editableColumn.type === FieldType.BB_REFERENCE &&
editableColumn.subtype === FieldSubtype.USERS editableColumn.subtype === FieldSubtype.USERS
if (!external) { if (!externalTable) {
return [ return [
FIELDS.STRING, FIELDS.STRING,
FIELDS.BARCODEQR, FIELDS.BARCODEQR,
@ -441,7 +443,7 @@
isUsers ? FIELDS.USERS : FIELDS.USER, isUsers ? FIELDS.USERS : FIELDS.USER,
] ]
// no-sql or a spreadsheet // no-sql or a spreadsheet
if (!external || table.sql) { if (!externalTable || table.sql) {
fields = [...fields, FIELDS.LINK, FIELDS.ARRAY] fields = [...fields, FIELDS.LINK, FIELDS.ARRAY]
} }
return fields return fields
@ -486,7 +488,7 @@
}) })
} }
const newError = {} const newError = {}
if (!external && fieldInfo.name?.startsWith("_")) { if (!externalTable && fieldInfo.name?.startsWith("_")) {
newError.name = `Column name cannot start with an underscore.` newError.name = `Column name cannot start with an underscore.`
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) { } else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {
newError.name = `Illegal character; must be alpha-numeric.` newError.name = `Illegal character; must be alpha-numeric.`
@ -498,7 +500,7 @@
newError.name = `Column name already in use.` newError.name = `Column name already in use.`
} }
if (fieldInfo.type == "auto" && !fieldInfo.subtype) { if (fieldInfo.type === "auto" && !fieldInfo.subtype) {
newError.subtype = `Auto Column requires a type` newError.subtype = `Auto Column requires a type`
} }

View File

@ -1,6 +1,6 @@
<script> <script>
import { Select, Toggle, Multiselect } from "@budibase/bbui" import { Select, Toggle, Multiselect } from "@budibase/bbui"
import { FIELDS } from "constants/backend" import { DB_TYPE_INTERNAL, FIELDS } from "constants/backend"
import { API } from "api" import { API } from "api"
import { parseFile } from "./utils" import { parseFile } from "./utils"
@ -169,7 +169,7 @@
</div> </div>
{/each} {/each}
</div> </div>
{#if tableType === "internal"} {#if tableType === DB_TYPE_INTERNAL}
<br /> <br />
<Toggle <Toggle
bind:value={updateExistingRows} bind:value={updateExistingRows}

View File

@ -8,6 +8,7 @@
import { import {
BUDIBASE_INTERNAL_DB_ID, BUDIBASE_INTERNAL_DB_ID,
BUDIBASE_DATASOURCE_TYPE, BUDIBASE_DATASOURCE_TYPE,
DB_TYPE_INTERNAL,
} from "constants/backend" } from "constants/backend"
$: tableNames = $tables.list.map(table => table.name) $: tableNames = $tables.list.map(table => table.name)
@ -55,8 +56,9 @@
name, name,
schema: { ...schema }, schema: { ...schema },
rows, rows,
type: "internal", type: "table",
sourceId: targetDatasourceId, sourceId: targetDatasourceId,
sourceType: DB_TYPE_INTERNAL,
} }
// Only set primary display if defined // Only set primary display if defined

View File

@ -13,6 +13,7 @@
notifications, notifications,
} from "@budibase/bbui" } from "@budibase/bbui"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { DB_TYPE_EXTERNAL } from "constants/backend"
export let table export let table
@ -27,8 +28,8 @@
let willBeDeleted let willBeDeleted
let deleteTableName let deleteTableName
$: external = table?.type === "external" $: externalTable = table?.sourceType === DB_TYPE_EXTERNAL
$: allowDeletion = !external || table?.created $: allowDeletion = !externalTable || table?.created
function showDeleteModal() { function showDeleteModal() {
templateScreens = $store.screens.filter( templateScreens = $store.screens.filter(
@ -48,7 +49,7 @@
for (let screen of templateScreens) { for (let screen of templateScreens) {
await store.actions.screens.delete(screen) await store.actions.screens.delete(screen)
} }
if (table.type === "external") { if (table.sourceType === DB_TYPE_EXTERNAL) {
await datasources.fetch() await datasources.fetch()
} }
notifications.success("Table deleted") notifications.success("Table deleted")
@ -91,7 +92,7 @@
<div slot="control" class="icon"> <div slot="control" class="icon">
<Icon s hoverable name="MoreSmallList" /> <Icon s hoverable name="MoreSmallList" />
</div> </div>
{#if !external} {#if !externalTable}
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem> <MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
{/if} {/if}
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem> <MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>

View File

@ -23,7 +23,7 @@
try { try {
return await API.uploadBuilderAttachment(data) return await API.uploadBuilderAttachment(data)
} catch (error) { } catch (error) {
notifications.error("Failed to upload attachment") notifications.error(error.message || "Failed to upload attachment")
return [] return []
} }
} }

View File

@ -39,7 +39,15 @@
allowCreator allowCreator
) => { ) => {
if (allowedRoles?.length) { if (allowedRoles?.length) {
return roles.filter(role => allowedRoles.includes(role._id)) const filteredRoles = roles.filter(role =>
allowedRoles.includes(role._id)
)
return [
...filteredRoles,
...(allowedRoles.includes(Constants.Roles.CREATOR)
? [{ _id: Constants.Roles.CREATOR, name: "Creator", enabled: false }]
: []),
]
} }
let newRoles = [...roles] let newRoles = [...roles]
@ -129,8 +137,9 @@
getOptionColour={getColor} getOptionColour={getColor}
getOptionIcon={getIcon} getOptionIcon={getIcon}
isOptionEnabled={option => isOptionEnabled={option =>
option._id !== Constants.Roles.CREATOR || (option._id !== Constants.Roles.CREATOR ||
$licensing.perAppBuildersEnabled} $licensing.perAppBuildersEnabled) &&
option.enabled !== false}
{placeholder} {placeholder}
{error} {error}
/> />

View File

@ -23,7 +23,7 @@
</script> </script>
<div class="table"> <div class="table">
<Table {schema} data={rowsCopy} type="external" allowEditing={false} /> <Table {schema} data={rowsCopy} allowEditing={false} />
</div> </div>
<style> <style>

View File

@ -516,6 +516,13 @@
} }
return null return null
} }
const parseRole = user => {
if (user.isAdminOrGlobalBuilder) {
return Constants.Roles.CREATOR
}
return user.role
}
</script> </script>
<svelte:window on:keydown={handleKeyDown} /> <svelte:window on:keydown={handleKeyDown} />
@ -725,7 +732,7 @@
<RoleSelect <RoleSelect
footer={getRoleFooter(user)} footer={getRoleFooter(user)}
placeholder={false} placeholder={false}
value={user.role} value={parseRole(user)}
allowRemove={user.role && !user.group} allowRemove={user.role && !user.group}
allowPublic={false} allowPublic={false}
allowCreator={true} allowCreator={true}
@ -744,7 +751,7 @@
autoWidth autoWidth
align="right" align="right"
allowedRoles={user.isAdminOrGlobalBuilder allowedRoles={user.isAdminOrGlobalBuilder
? [Constants.Roles.ADMIN] ? [Constants.Roles.CREATOR]
: null} : null}
/> />
</div> </div>

View File

@ -2,6 +2,7 @@
import { ModalContent, Body, Input, notifications } from "@budibase/bbui" import { ModalContent, Body, Input, notifications } from "@budibase/bbui"
import { tables, datasources } from "stores/backend" import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { DB_TYPE_EXTERNAL } from "constants/backend"
export let datasource export let datasource
@ -16,9 +17,10 @@
function buildDefaultTable(tableName, datasourceId) { function buildDefaultTable(tableName, datasourceId) {
return { return {
name: tableName, name: tableName,
type: "external", type: "table",
primary: ["id"], primary: ["id"],
sourceId: datasourceId, sourceId: datasourceId,
sourceType: DB_TYPE_EXTERNAL,
schema: { schema: {
id: { id: {
autocolumn: true, autocolumn: true,

View File

@ -5,7 +5,7 @@
import { tables, datasources } from "stores/backend" import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { onMount } from "svelte" import { onMount } from "svelte"
import { BUDIBASE_INTERNAL_DB_ID } from "constants/backend" import { BUDIBASE_INTERNAL_DB_ID, DB_TYPE_EXTERNAL } from "constants/backend"
import { TableNames } from "constants" import { TableNames } from "constants"
import { store } from "builderStore" import { store } from "builderStore"
@ -14,7 +14,7 @@
$: store.actions.websocket.selectResource(BUDIBASE_INTERNAL_DB_ID) $: store.actions.websocket.selectResource(BUDIBASE_INTERNAL_DB_ID)
$: internalTablesBySourceId = $tables.list.filter( $: internalTablesBySourceId = $tables.list.filter(
table => table =>
table.type !== "external" && table.sourceType !== DB_TYPE_EXTERNAL &&
table.sourceId === BUDIBASE_INTERNAL_DB_ID && table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
table._id !== TableNames.USERS table._id !== TableNames.USERS
) )

View File

@ -4,7 +4,7 @@
import ICONS from "components/backend/DatasourceNavigator/icons" import ICONS from "components/backend/DatasourceNavigator/icons"
import { tables, datasources } from "stores/backend" import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { DEFAULT_BB_DATASOURCE_ID } from "constants/backend" import { DEFAULT_BB_DATASOURCE_ID, DB_TYPE_EXTERNAL } from "constants/backend"
import { onMount } from "svelte" import { onMount } from "svelte"
import { store } from "builderStore" import { store } from "builderStore"
@ -13,7 +13,8 @@
$: store.actions.websocket.selectResource(DEFAULT_BB_DATASOURCE_ID) $: store.actions.websocket.selectResource(DEFAULT_BB_DATASOURCE_ID)
$: internalTablesBySourceId = $tables.list.filter( $: internalTablesBySourceId = $tables.list.filter(
table => table =>
table.type !== "external" && table.sourceId === DEFAULT_BB_DATASOURCE_ID table.sourceType !== DB_TYPE_EXTERNAL &&
table.sourceId === DEFAULT_BB_DATASOURCE_ID
) )
onMount(() => { onMount(() => {

View File

@ -3,7 +3,6 @@
Heading, Heading,
Body, Body,
Button, Button,
ButtonGroup,
Table, Table,
Layout, Layout,
Modal, Modal,
@ -46,6 +45,10 @@
datasource: { datasource: {
type: "user", type: "user",
}, },
options: {
paginate: true,
limit: 10,
},
}) })
let groupsLoaded = !$licensing.groupsEnabled || $groups?.length let groupsLoaded = !$licensing.groupsEnabled || $groups?.length
@ -65,10 +68,12 @@
{ column: "role", component: RoleTableRenderer }, { column: "role", component: RoleTableRenderer },
] ]
let userData = [] let userData = []
let invitesLoaded = false
let pendingInvites = []
let parsedInvites = []
$: isOwner = $auth.accountPortalAccess && $admin.cloud $: isOwner = $auth.accountPortalAccess && $admin.cloud
$: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled $: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled
$: debouncedUpdateFetch(searchEmail) $: debouncedUpdateFetch(searchEmail)
$: schema = { $: schema = {
email: { email: {
@ -88,16 +93,6 @@
width: "1fr", width: "1fr",
}, },
} }
const getPendingSchema = tblSchema => {
if (!tblSchema) {
return {}
}
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
pendingSchema.email.displayName = "Pending Invites"
return pendingSchema
}
$: pendingSchema = getPendingSchema(schema) $: pendingSchema = getPendingSchema(schema)
$: userData = [] $: userData = []
$: inviteUsersResponse = { successful: [], unsuccessful: [] } $: inviteUsersResponse = { successful: [], unsuccessful: [] }
@ -121,9 +116,15 @@
} }
}) })
} }
let invitesLoaded = false
let pendingInvites = [] const getPendingSchema = tblSchema => {
let parsedInvites = [] if (!tblSchema) {
return {}
}
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
pendingSchema.email.displayName = "Pending Invites"
return pendingSchema
}
const invitesToSchema = invites => { const invitesToSchema = invites => {
return invites.map(invite => { return invites.map(invite => {
@ -143,8 +144,10 @@
const updateFetch = email => { const updateFetch = email => {
fetch.update({ fetch.update({
query: { query: {
string: {
email, email,
}, },
},
}) })
} }
const debouncedUpdateFetch = Utils.debounce(updateFetch, 250) const debouncedUpdateFetch = Utils.debounce(updateFetch, 250)
@ -296,7 +299,7 @@
{/if} {/if}
<div class="controls"> <div class="controls">
{#if !readonly} {#if !readonly}
<ButtonGroup> <div class="buttons">
<Button <Button
disabled={readonly} disabled={readonly}
on:click={$licensing.userLimitReached on:click={$licensing.userLimitReached
@ -315,7 +318,7 @@
> >
Import Import
</Button> </Button>
</ButtonGroup> </div>
{:else} {:else}
<ScimBanner /> <ScimBanner />
{/if} {/if}
@ -390,12 +393,15 @@
</Modal> </Modal>
<style> <style>
.buttons {
display: flex;
gap: 10px;
}
.pagination { .pagination {
display: flex; display: flex;
flex-direction: row; flex-direction: row;
justify-content: flex-end; justify-content: flex-end;
} }
.controls { .controls {
display: flex; display: flex;
flex-direction: row; flex-direction: row;
@ -403,7 +409,6 @@
align-items: center; align-items: center;
gap: var(--spacing-xl); gap: var(--spacing-xl);
} }
.controls-right { .controls-right {
display: flex; display: flex;
flex-direction: row; flex-direction: row;
@ -411,7 +416,6 @@
align-items: center; align-items: center;
gap: var(--spacing-xl); gap: var(--spacing-xl);
} }
.controls-right :global(.spectrum-Search) { .controls-right :global(.spectrum-Search) {
width: 200px; width: 200px;
} }

View File

@ -103,7 +103,6 @@ const fetchRowHandler = async action => {
const deleteRowHandler = async action => { const deleteRowHandler = async action => {
const { tableId, rowId: rowConfig, notificationOverride } = action.parameters const { tableId, rowId: rowConfig, notificationOverride } = action.parameters
if (tableId && rowConfig) { if (tableId && rowConfig) {
try { try {
let requestConfig let requestConfig
@ -129,9 +128,11 @@ const deleteRowHandler = async action => {
requestConfig = [parsedRowConfig] requestConfig = [parsedRowConfig]
} else if (Array.isArray(parsedRowConfig)) { } else if (Array.isArray(parsedRowConfig)) {
requestConfig = parsedRowConfig requestConfig = parsedRowConfig
} else if (Number.isInteger(parsedRowConfig)) {
requestConfig = [String(parsedRowConfig)]
} }
if (!requestConfig.length) { if (!requestConfig && !parsedRowConfig) {
notificationStore.actions.warning("No valid rows were supplied") notificationStore.actions.warning("No valid rows were supplied")
return false return false
} }

View File

@ -140,4 +140,13 @@ export const buildTableEndpoints = API => ({
}, },
}) })
}, },
migrateColumn: async ({ tableId, oldColumn, newColumn }) => {
return await API.post({
url: `/api/tables/${tableId}/migrate`,
body: {
oldColumn,
newColumn,
},
})
},
}) })

View File

@ -55,7 +55,7 @@
try { try {
return await API.uploadBuilderAttachment(data) return await API.uploadBuilderAttachment(data)
} catch (error) { } catch (error) {
$notifications.error("Failed to upload attachment") $notifications.error(error.message || "Failed to upload attachment")
return [] return []
} }
} }

View File

@ -1,11 +1,20 @@
<script> <script>
import { getContext, onMount, tick } from "svelte" import { getContext, onMount, tick } from "svelte"
import { canBeDisplayColumn, canBeSortColumn } from "@budibase/shared-core" import { canBeDisplayColumn, canBeSortColumn } from "@budibase/shared-core"
import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui" import {
Icon,
Popover,
Menu,
MenuItem,
clickOutside,
Modal,
} from "@budibase/bbui"
import GridCell from "./GridCell.svelte" import GridCell from "./GridCell.svelte"
import { getColumnIcon } from "../lib/utils" import { getColumnIcon } from "../lib/utils"
import MigrationModal from "../controls/MigrationModal.svelte"
import { debounce } from "../../../utils/utils" import { debounce } from "../../../utils/utils"
import { FieldType, FormulaTypes } from "@budibase/types" import { FieldType, FormulaTypes } from "@budibase/types"
import { TableNames } from "../../../constants"
export let column export let column
export let idx export let idx
@ -45,6 +54,7 @@
let editIsOpen = false let editIsOpen = false
let timeout let timeout
let popover let popover
let migrationModal
let searchValue let searchValue
let input let input
@ -189,6 +199,11 @@
}) })
} }
const openMigrationModal = () => {
migrationModal.show()
open = false
}
const startSearching = async () => { const startSearching = async () => {
$focusedCellId = null $focusedCellId = null
searchValue = "" searchValue = ""
@ -224,6 +239,10 @@
onMount(() => subscribe("close-edit-column", cancelEdit)) onMount(() => subscribe("close-edit-column", cancelEdit))
</script> </script>
<Modal bind:this={migrationModal}>
<MigrationModal {column} />
</Modal>
<div <div
class="header-cell" class="header-cell"
class:open class:open
@ -363,6 +382,11 @@
> >
Hide column Hide column
</MenuItem> </MenuItem>
{#if $config.canEditColumns && column.schema.type === "link" && column.schema.tableId === TableNames.USERS}
<MenuItem icon="User" on:click={openMigrationModal}>
Migrate to user column
</MenuItem>
{/if}
</Menu> </Menu>
{/if} {/if}
</Popover> </Popover>

View File

@ -0,0 +1,73 @@
<script>
import {
ModalContent,
notifications,
Input,
InlineAlert,
} from "@budibase/bbui"
import { getContext } from "svelte"
import { ValidColumnNameRegex } from "@budibase/shared-core"
import { FieldSubtype, FieldType, RelationshipType } from "@budibase/types"
const { API, definition, rows } = getContext("grid")
export let column
let newColumnName = `${column.schema.name} migrated`
$: error = checkNewColumnName(newColumnName)
const checkNewColumnName = newColumnName => {
if (newColumnName === "") {
return "Column name can't be empty."
}
if (newColumnName in $definition.schema) {
return "New column name can't be the same as an existing column name."
}
if (newColumnName.match(ValidColumnNameRegex) === null) {
return "Illegal character; must be alpha-numeric."
}
}
const migrateUserColumn = async () => {
let subtype = FieldSubtype.USERS
if (column.schema.relationshipType === RelationshipType.ONE_TO_MANY) {
subtype = FieldSubtype.USER
}
try {
await API.migrateColumn({
tableId: $definition._id,
oldColumn: column.schema,
newColumn: {
name: newColumnName,
type: FieldType.BB_REFERENCE,
subtype,
},
})
notifications.success("Column migrated")
} catch (e) {
notifications.error(`Failed to migrate: ${e.message}`)
}
await rows.actions.refreshData()
}
</script>
<ModalContent
title="Migrate column"
confirmText="Continue"
cancelText="Cancel"
onConfirm={migrateUserColumn}
disabled={error !== undefined}
size="M"
>
This operation will kick off a migration of the column "{column.schema.name}"
to a new column, with the name provided - this operation may take a moment to
complete.
<InlineAlert
type="error"
header="Are you sure?"
message="This will leave bindings which utilised the user relationship column in a state where they will need to be updated to use the new column instead."
/>
<Input bind:value={newColumnName} label="New column name" {error} />
</ModalContent>

@ -1 +1 @@
Subproject commit 5ed0ee2aca9d754d80cd46bae412b24621afa47e Subproject commit 3820c0c93a3e448e10a60a9feb5396844b537ca8

View File

@ -38,7 +38,7 @@ RUN apt update && apt upgrade -y \
COPY package.json . COPY package.json .
COPY dist/yarn.lock . COPY dist/yarn.lock .
RUN yarn install --production=true \ RUN yarn install --production=true --network-timeout 1000000 \
# Remove unneeded data from file system to reduce image size # Remove unneeded data from file system to reduce image size
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \ && yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp && rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp

View File

@ -70,6 +70,13 @@ module AwsMock {
Contents: {}, Contents: {},
}) })
) )
// @ts-ignore
this.getObject = jest.fn(
response({
Body: "",
})
)
} }
aws.DynamoDB = { DocumentClient } aws.DynamoDB = { DocumentClient }

View File

@ -18,7 +18,6 @@
"test": "bash scripts/test.sh", "test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit", "test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch", "test:watch": "jest --watch",
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up", "dev:stack:up": "node scripts/dev/manage.js up",

View File

@ -2,7 +2,7 @@ version: "3.8"
services: services:
db: db:
container_name: postgres container_name: postgres
image: postgres:15 image: postgres:15-bullseye
restart: unless-stopped restart: unless-stopped
environment: environment:
POSTGRES_USER: root POSTGRES_USER: root

View File

@ -32,11 +32,8 @@ import {
tenancy, tenancy,
users, users,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { USERS_TABLE_SCHEMA } from "../../constants" import { USERS_TABLE_SCHEMA, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
import { import { buildDefaultDocs } from "../../db/defaultData/datasource_bb_default"
buildDefaultDocs,
DEFAULT_BB_DATASOURCE_ID,
} from "../../db/defaultData/datasource_bb_default"
import { removeAppFromUserRoles } from "../../utilities/workerRequests" import { removeAppFromUserRoles } from "../../utilities/workerRequests"
import { stringToReadStream } from "../../utilities" import { stringToReadStream } from "../../utilities"
import { doesUserHaveLock } from "../../utilities/redis" import { doesUserHaveLock } from "../../utilities/redis"

View File

@ -12,7 +12,6 @@ import {
CreateDatasourceResponse, CreateDatasourceResponse,
Datasource, Datasource,
DatasourcePlus, DatasourcePlus,
ExternalTable,
FetchDatasourceInfoRequest, FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse, FetchDatasourceInfoResponse,
IntegrationBase, IntegrationBase,
@ -59,7 +58,7 @@ async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus const connector = (await getConnector(datasource)) as DatasourcePlus
return await connector.buildSchema( return await connector.buildSchema(
datasource._id!, datasource._id!,
datasource.entities! as Record<string, ExternalTable> datasource.entities! as Record<string, Table>
) )
} }

View File

@ -1,12 +1,12 @@
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import { APP_PREFIX, DocumentType } from "../../../db/utils" import { APP_PREFIX, DocumentType } from "../../../db/utils"
export async function addRev( export async function addRev(
body: { _id?: string; _rev?: string }, body: { _id?: string; _rev?: string },
tableId?: string tableId?: string
) { ) {
if (!body._id || (tableId && isExternalTable(tableId))) { if (!body._id || (tableId && isExternalTableID(tableId))) {
return body return body
} }
let id = body._id let id = body._id

View File

@ -1,7 +1,7 @@
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import * as internal from "./internal" import * as internal from "./internal"
import * as external from "./external" import * as external from "./external"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import { import {
Ctx, Ctx,
UserCtx, UserCtx,
@ -30,7 +30,7 @@ import { Format } from "../view/exporters"
export * as views from "./views" export * as views from "./views"
function pickApi(tableId: any) { function pickApi(tableId: any) {
if (isExternalTable(tableId)) { if (isExternalTableID(tableId)) {
return external return external
} }
return internal return internal
@ -227,7 +227,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
export async function validate(ctx: Ctx<Row, ValidateResponse>) { export async function validate(ctx: Ctx<Row, ValidateResponse>) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
// external tables are hard to validate currently // external tables are hard to validate currently
if (isExternalTable(tableId)) { if (isExternalTableID(tableId)) {
ctx.body = { valid: true, errors: {} } ctx.body = { valid: true, errors: {} }
} else { } else {
ctx.body = await sdk.rows.utils.validate({ ctx.body = await sdk.rows.utils.validate({

View File

@ -1,3 +1,5 @@
import { ValidFileExtensions } from "@budibase/shared-core"
require("svelte/register") require("svelte/register")
import { join } from "../../../utilities/centralPath" import { join } from "../../../utilities/centralPath"
@ -11,34 +13,21 @@ import {
} from "../../../utilities/fileSystem" } from "../../../utilities/fileSystem"
import env from "../../../environment" import env from "../../../environment"
import { DocumentType } from "../../../db/utils" import { DocumentType } from "../../../db/utils"
import { context, objectStore, utils, configs } from "@budibase/backend-core" import {
context,
objectStore,
utils,
configs,
BadRequestError,
} from "@budibase/backend-core"
import AWS from "aws-sdk" import AWS from "aws-sdk"
import fs from "fs" import fs from "fs"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
import { App, Ctx } from "@budibase/types" import { App, Ctx, ProcessAttachmentResponse, Upload } from "@budibase/types"
const send = require("koa-send") const send = require("koa-send")
async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
const response = await objectStore.upload({
bucket,
metadata,
filename: s3Key,
path: file.path,
type: file.type,
})
// don't store a URL, work this out on the way out as the URL could change
return {
size: file.size,
name: file.name,
url: objectStore.getAppFileUrl(s3Key),
extension: [...file.name.split(".")].pop(),
key: response.Key,
}
}
export const toggleBetaUiFeature = async function (ctx: Ctx) { export const toggleBetaUiFeature = async function (ctx: Ctx) {
const cookieName = `beta:${ctx.params.feature}` const cookieName = `beta:${ctx.params.feature}`
@ -72,23 +61,58 @@ export const serveBuilder = async function (ctx: Ctx) {
await send(ctx, ctx.file, { root: builderPath }) await send(ctx, ctx.file, { root: builderPath })
} }
export const uploadFile = async function (ctx: Ctx) { export const uploadFile = async function (
ctx: Ctx<{}, ProcessAttachmentResponse>
) {
const file = ctx.request?.files?.file const file = ctx.request?.files?.file
if (!file) {
throw new BadRequestError("No file provided")
}
let files = file && Array.isArray(file) ? Array.from(file) : [file] let files = file && Array.isArray(file) ? Array.from(file) : [file]
const uploads = files.map(async (file: any) => { ctx.body = await Promise.all(
const fileExtension = [...file.name.split(".")].pop() files.map(async file => {
if (!file.name) {
throw new BadRequestError(
"Attempted to upload a file without a filename"
)
}
const extension = [...file.name.split(".")].pop()
if (!extension) {
throw new BadRequestError(
`File "${file.name}" has no extension, an extension is required to upload a file`
)
}
if (!env.SELF_HOSTED && !ValidFileExtensions.includes(extension)) {
throw new BadRequestError(
`File "${file.name}" has an invalid extension: "${extension}"`
)
}
// filenames converted to UUIDs so they are unique // filenames converted to UUIDs so they are unique
const processedFileName = `${uuid.v4()}.${fileExtension}` const processedFileName = `${uuid.v4()}.${extension}`
return prepareUpload({ const s3Key = `${context.getProdAppId()}/attachments/${processedFileName}`
file,
s3Key: `${context.getProdAppId()}/attachments/${processedFileName}`, const response = await objectStore.upload({
bucket: ObjectStoreBuckets.APPS, bucket: ObjectStoreBuckets.APPS,
}) filename: s3Key,
path: file.path,
type: file.type,
}) })
ctx.body = await Promise.all(uploads) return {
size: file.size,
name: file.name,
url: objectStore.getAppFileUrl(s3Key),
extension,
key: response.Key,
}
})
)
} }
export const deleteObjects = async function (ctx: Ctx) { export const deleteObjects = async function (ctx: Ctx) {

View File

@ -5,18 +5,27 @@ import {
isSchema, isSchema,
validate as validateSchema, validate as validateSchema,
} from "../../../utilities/schema" } from "../../../utilities/schema"
import { isExternalTable, isSQL } from "../../../integrations/utils" import {
isExternalTable,
isExternalTableID,
isSQL,
} from "../../../integrations/utils"
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
import { import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
DocumentType,
FetchTablesResponse, FetchTablesResponse,
MigrateRequest,
MigrateResponse,
Row,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
TableResponse, TableResponse,
TableSourceType,
UserCtx, UserCtx,
Row, SEPARATOR,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv" import { jsonFromCsvString } from "../../../utilities/csv"
@ -24,12 +33,10 @@ import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash" import { cloneDeep, isEqual } from "lodash"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) { function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) { if (table && isExternalTable(table)) {
tableId = table._id
}
if (table && table.type === "external") {
return external return external
} else if (tableId && isExternalTable(tableId)) { }
if (tableId && isExternalTableID(tableId)) {
return external return external
} }
return internal return internal
@ -46,8 +53,8 @@ export async function fetch(ctx: UserCtx<void, FetchTablesResponse>) {
if (entities) { if (entities) {
return Object.values(entities).map<Table>((entity: Table) => ({ return Object.values(entities).map<Table>((entity: Table) => ({
...entity, ...entity,
type: "external", sourceType: TableSourceType.EXTERNAL,
sourceId: datasource._id, sourceId: datasource._id!,
sql: isSQL(datasource), sql: isSQL(datasource),
})) }))
} else { } else {
@ -158,3 +165,19 @@ export async function validateExistingTableImport(ctx: UserCtx) {
ctx.status = 422 ctx.status = 422
} }
} }
export async function migrate(ctx: UserCtx<MigrateRequest, MigrateResponse>) {
const { oldColumn, newColumn } = ctx.request.body
let tableId = ctx.params.tableId as string
const table = await sdk.tables.getTable(tableId)
let result = await sdk.tables.migrate(table, oldColumn, newColumn)
for (let table of result.tablesUpdated) {
builderSocket?.emitTableUpdate(ctx, table, {
includeOriginator: true,
})
}
ctx.status = 200
ctx.body = { message: `Column ${oldColumn.name} migrated.` }
}

View File

@ -7,6 +7,7 @@ import {
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
TableSourceType,
UserCtx, UserCtx,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
@ -16,10 +17,11 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
let tableToSave: Table & { let tableToSave: Table & {
_rename?: RenameColumn _rename?: RenameColumn
} = { } = {
type: "table",
_id: generateTableID(), _id: generateTableID(),
views: {},
...rest, ...rest,
type: "table",
sourceType: TableSourceType.INTERNAL,
views: {},
} }
const renaming = tableToSave._rename const renaming = tableToSave._rename
delete tableToSave._rename delete tableToSave._rename

View File

@ -11,128 +11,24 @@ const { PermissionType, PermissionLevel } = permissions
const router: Router = new Router() const router: Router = new Router()
router router
/**
* @api {get} /api/:sourceId/:rowId/enrich Get an enriched row
* @apiName Get an enriched row
* @apiGroup rows
* @apiPermission table read access
* @apiDescription This API is only useful when dealing with rows that have relationships.
* Normally when a row is a returned from the API relationships will only have the structure
* `{ primaryDisplay: "name", _id: ... }` but this call will return the full related rows
* for each relationship instead.
*
* @apiParam {string} rowId The ID of the row which is to be retrieved and enriched.
*
* @apiSuccess {object} row The response body will be the enriched row.
*/
.get( .get(
"/api/:sourceId/:rowId/enrich", "/api/:sourceId/:rowId/enrich",
paramSubResource("sourceId", "rowId"), paramSubResource("sourceId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.fetchEnrichedRow rowController.fetchEnrichedRow
) )
/**
* @api {get} /api/:sourceId/rows Get all rows in a table
* @apiName Get all rows in a table
* @apiGroup rows
* @apiPermission table read access
* @apiDescription This is a deprecated endpoint that should not be used anymore, instead use the search endpoint.
* This endpoint gets all of the rows within the specified table - it is not heavily used
* due to its lack of support for pagination. With SQL tables this will retrieve up to a limit and then
* will simply stop.
*
* @apiParam {string} sourceId The ID of the table to retrieve all rows within.
*
* @apiSuccess {object[]} rows The response body will be an array of all rows found.
*/
.get( .get(
"/api/:sourceId/rows", "/api/:sourceId/rows",
paramResource("sourceId"), paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.fetch rowController.fetch
) )
/**
* @api {get} /api/:sourceId/rows/:rowId Retrieve a single row
* @apiName Retrieve a single row
* @apiGroup rows
* @apiPermission table read access
* @apiDescription This endpoint retrieves only the specified row. If you wish to retrieve
* a row by anything other than its _id field, use the search endpoint.
*
* @apiParam {string} sourceId The ID of the table to retrieve a row from.
* @apiParam {string} rowId The ID of the row to retrieve.
*
* @apiSuccess {object} body The response body will be the row that was found.
*/
.get( .get(
"/api/:sourceId/rows/:rowId", "/api/:sourceId/rows/:rowId",
paramSubResource("sourceId", "rowId"), paramSubResource("sourceId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.find rowController.find
) )
/**
* @api {post} /api/:sourceId/search Search for rows in a table
* @apiName Search for rows in a table
* @apiGroup rows
* @apiPermission table read access
* @apiDescription This is the primary method of accessing rows in Budibase, the data provider
* and data UI in the builder are built atop this. All filtering, sorting and pagination is
* handled through this, for internal and external (datasource plus, e.g. SQL) tables.
*
* @apiParam {string} sourceId The ID of the table to retrieve rows from.
*
* @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true,
* defaults to false.
* @apiParam (Body) {object} [query] This contains a set of filters which should be applied, if none
* specified then the request will be unfiltered. An example with all of the possible query
* options has been supplied below.
* @apiParam (Body) {number} [limit] This sets a limit for the number of rows that will be returned,
* this will be implemented at the database level if supported for performance reasons. This
* is useful when paginating to set exactly how many rows per page.
* @apiParam (Body) {string} [bookmark] If pagination is enabled then a bookmark will be returned
* with each successful search request, this should be supplied back to get the next page.
* @apiParam (Body) {object} [sort] If sort is desired this should contain the name of the column to
* sort on.
* @apiParam (Body) {string} [sortOrder] If sort is enabled then this can be either "descending" or
* "ascending" as required.
* @apiParam (Body) {string} [sortType] If sort is enabled then you must specify the type of search
* being used, either "string" or "number". This is only used for internal tables.
*
* @apiParamExample {json} Example:
* {
* "tableId": "ta_70260ff0b85c467ca74364aefc46f26d",
* "query": {
* "string": {},
* "fuzzy": {},
* "range": {
* "columnName": {
* "high": 20,
* "low": 10,
* }
* },
* "equal": {
* "columnName": "someValue"
* },
* "notEqual": {},
* "empty": {},
* "notEmpty": {},
* "oneOf": {
* "columnName": ["value"]
* }
* },
* "limit": 10,
* "sort": "name",
* "sortOrder": "descending",
* "sortType": "string",
* "paginate": true
* }
*
* @apiSuccess {object[]} rows An array of rows that was found based on the supplied parameters.
* @apiSuccess {boolean} hasNextPage If pagination was enabled then this specifies whether or
* not there is another page after this request.
* @apiSuccess {string} bookmark The bookmark to be sent with the next request to get the next
* page.
*/
.post( .post(
"/api/:sourceId/search", "/api/:sourceId/search",
internalSearchValidator(), internalSearchValidator(),
@ -148,30 +44,6 @@ router
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.search rowController.search
) )
/**
* @api {post} /api/:sourceId/rows Creates a new row
* @apiName Creates a new row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This API will create a new row based on the supplied body. If the
* body includes an "_id" field then it will update an existing row if the field
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
* already used by Budibase tables and cannot be used for columns.
*
* @apiParam {string} sourceId The ID of the table to save a row to.
*
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
* must also be provided.
* @apiParam (Body) {string} tableId The ID of the table should also be specified in the row body itself.
* @apiParam (Body) {any} [any] Any field supplied in the body will be assessed to see if it matches
* a column in the specified table. All other fields will be dropped and not stored.
*
* @apiSuccess {string} _id The ID of the row that was just saved, if it was just created this
* is the rows new ID.
* @apiSuccess {string} [_rev] If saving to an internal table a revision will also be returned.
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
*/
.post( .post(
"/api/:sourceId/rows", "/api/:sourceId/rows",
paramResource("sourceId"), paramResource("sourceId"),
@ -179,14 +51,6 @@ router
trimViewRowInfo, trimViewRowInfo,
rowController.save rowController.save
) )
/**
* @api {patch} /api/:sourceId/rows Updates a row
* @apiName Update a row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint is identical to the row creation endpoint but instead it will
* error if an _id isn't provided, it will only function for existing rows.
*/
.patch( .patch(
"/api/:sourceId/rows", "/api/:sourceId/rows",
paramResource("sourceId"), paramResource("sourceId"),
@ -194,52 +58,12 @@ router
trimViewRowInfo, trimViewRowInfo,
rowController.patch rowController.patch
) )
/**
* @api {post} /api/:sourceId/rows/validate Validate inputs for a row
* @apiName Validate inputs for a row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription When attempting to save a row you may want to check if the row is valid
* given the table schema, this will iterate through all the constraints on the table and
* check if the request body is valid.
*
* @apiParam {string} sourceId The ID of the table the row is to be validated for.
*
* @apiParam (Body) {any} [any] Any fields provided in the request body will be tested
* against the table schema and constraints.
*
* @apiSuccess {boolean} valid If inputs provided are acceptable within the table schema this
* will be true, if it is not then then errors property will be populated.
* @apiSuccess {object} [errors] A key value map of information about fields on the input
* which do not match the table schema. The key name will be the column names that have breached
* the schema.
*/
.post( .post(
"/api/:sourceId/rows/validate", "/api/:sourceId/rows/validate",
paramResource("sourceId"), paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE), authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.validate rowController.validate
) )
/**
* @api {delete} /api/:sourceId/rows Delete rows
* @apiName Delete rows
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
* fashion.
*
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
* key of the request body that are to be deleted.
* @apiParam (Body) {string} [_id] If deleting a single row then provide its ID in this field.
* @apiParam (Body) {string} [_rev] If deleting a single row from an internal table then provide its
* revision here.
*
* @apiSuccess {object[]|object} body If deleting bulk then the response body will be an array
* of the deleted rows, if deleting a single row then the body will contain a "row" property which
* is the deleted row.
*/
.delete( .delete(
"/api/:sourceId/rows", "/api/:sourceId/rows",
paramResource("sourceId"), paramResource("sourceId"),
@ -247,20 +71,6 @@ router
trimViewRowInfo, trimViewRowInfo,
rowController.destroy rowController.destroy
) )
/**
* @api {post} /api/:sourceId/rows/exportRows Export Rows
* @apiName Export rows
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This API can export a number of provided rows
*
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] The row IDs which are to be exported
*
* @apiSuccess {object[]|object}
*/
.post( .post(
"/api/:sourceId/rows/exportRows", "/api/:sourceId/rows/exportRows",
paramResource("sourceId"), paramResource("sourceId"),

View File

@ -9,99 +9,13 @@ const { BUILDER, PermissionLevel, PermissionType } = permissions
const router: Router = new Router() const router: Router = new Router()
router router
/**
* @api {get} /api/tables Fetch all tables
* @apiName Fetch all tables
* @apiGroup tables
* @apiPermission table read access
* @apiDescription This endpoint retrieves all of the tables which have been created in
* an app. This includes all of the external and internal tables; to tell the difference
* between these look for the "type" property on each table, either being "internal" or "external".
*
* @apiSuccess {object[]} body The response body will be the list of tables that was found - as
* this does not take any parameters the only error scenario is no access.
*/
.get("/api/tables", authorized(BUILDER), tableController.fetch) .get("/api/tables", authorized(BUILDER), tableController.fetch)
/**
* @api {get} /api/tables/:id Fetch a single table
* @apiName Fetch a single table
* @apiGroup tables
* @apiPermission table read access
* @apiDescription Retrieves a single table this could be be internal or external based on
* the provided table ID.
*
* @apiParam {string} id The ID of the table which is to be retrieved.
*
* @apiSuccess {object[]} body The response body will be the table that was found.
*/
.get( .get(
"/api/tables/:tableId", "/api/tables/:tableId",
paramResource("tableId"), paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.READ, { schema: true }), authorized(PermissionType.TABLE, PermissionLevel.READ, { schema: true }),
tableController.find tableController.find
) )
/**
* @api {post} /api/tables Save a table
* @apiName Save a table
* @apiGroup tables
* @apiPermission builder
* @apiDescription Create or update a table with this endpoint, this will function for both internal
* external tables.
*
* @apiParam (Body) {string} [_id] If updating an existing table then the ID of the table must be specified.
* @apiParam (Body) {string} [_rev] If updating an existing internal table then the revision must also be specified.
* @apiParam (Body) {string} type] This should either be "internal" or "external" depending on the table type -
* this will default to internal.
* @apiParam (Body) {string} [sourceId] If creating an external table then this should be set to the datasource ID. If
* building an internal table this does not need to be set, although it will be returned as "bb_internal".
* @apiParam (Body) {string} name The name of the table, this will be used in the UI. To rename the table simply
* supply the table structure to this endpoint with the name changed.
* @apiParam (Body) {object} schema A key value object which has all of the columns in the table as the keys in this
* object. For each column a "type" and "constraints" must be specified, with some types requiring further information.
* More information about the schema structure can be found in the Typescript definitions.
* @apiParam (Body) {string} [primaryDisplay] The name of the column which should be used when displaying rows
* from this table as relationships.
* @apiParam (Body) {object[]} [indexes] Specifies the search indexes - this is deprecated behaviour with the introduction
* of lucene indexes. This functionality is only available for internal tables.
* @apiParam (Body) {object} [_rename] If a column is to be renamed then the "old" column name should be set in this
* structure, and the "updated", new column name should also be supplied. The schema should also be updated, this field
* lets the server know that a field hasn't just been deleted, that the data has moved to a new name, this will fix
* the rows in the table. This functionality is only available for internal tables.
* @apiParam (Body) {object[]} [rows] When creating a table using a compatible data source, an array of objects to be imported into the new table can be provided.
*
* @apiParamExample {json} Example:
* {
* "_id": "ta_05541307fa0f4044abee071ca2a82119",
* "_rev": "10-0fbe4e78f69b255d79f1017e2eeef807",
* "type": "internal",
* "views": {},
* "name": "tableName",
* "schema": {
* "column": {
* "type": "string",
* "constraints": {
* "type": "string",
* "length": {
* "maximum": null
* },
* "presence": false
* },
* "name": "column"
* },
* },
* "primaryDisplay": "column",
* "indexes": [],
* "sourceId": "bb_internal",
* "_rename": {
* "old": "columnName",
* "updated": "newColumnName",
* },
* "rows": []
* }
*
* @apiSuccess {object} table The response body will contain the table structure after being cleaned up and
* saved to the database.
*/
.post( .post(
"/api/tables", "/api/tables",
// allows control over updating a table // allows control over updating a table
@ -125,41 +39,12 @@ router
authorized(BUILDER), authorized(BUILDER),
tableController.validateExistingTableImport tableController.validateExistingTableImport
) )
/**
* @api {post} /api/tables/:tableId/:revId Delete a table
* @apiName Delete a table
* @apiGroup tables
* @apiPermission builder
* @apiDescription This endpoint will delete a table and all of its associated data, for this reason it is
* quite dangerous - it will work for internal and external tables.
*
* @apiParam {string} tableId The ID of the table which is to be deleted.
* @apiParam {string} [revId] If deleting an internal table then the revision must also be supplied (_rev), for
* external tables this can simply be set to anything, e.g. "external".
*
* @apiSuccess {string} message A message stating that the table was deleted successfully.
*/
.delete( .delete(
"/api/tables/:tableId/:revId", "/api/tables/:tableId/:revId",
paramResource("tableId"), paramResource("tableId"),
authorized(BUILDER), authorized(BUILDER),
tableController.destroy tableController.destroy
) )
/**
* @api {post} /api/tables/:tableId/:revId Import CSV to existing table
* @apiName Import CSV to existing table
* @apiGroup tables
* @apiPermission builder
* @apiDescription This endpoint will import data to existing tables, internal or external. It is used in combination
* with the CSV validation endpoint. Take the output of the CSV validation endpoint and pass it to this endpoint to
* import the data; please note this will only import fields that already exist on the table/match the type.
*
* @apiParam {string} tableId The ID of the table which the data should be imported to.
*
* @apiParam (Body) {object[]} rows An array of objects representing the rows to be imported, key-value pairs not matching the table schema will be ignored.
*
* @apiSuccess {string} message A message stating that the data was imported successfully.
*/
.post( .post(
"/api/tables/:tableId/import", "/api/tables/:tableId/import",
paramResource("tableId"), paramResource("tableId"),
@ -167,4 +52,11 @@ router
tableController.bulkImport tableController.bulkImport
) )
.post(
"/api/tables/:tableId/migrate",
paramResource("tableId"),
authorized(BUILDER),
tableController.migrate
)
export default router export default router

View File

@ -7,7 +7,7 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"entities": [ "entities": [
{ {
"_id": "ta_users", "_id": "ta_users",
"_rev": "1-2375e1bc58aeec664dc1b1f04ad43e44", "_rev": "1-73b7912e6cbdd3d696febc60f3715844",
"createdAt": "2020-01-01T00:00:00.000Z", "createdAt": "2020-01-01T00:00:00.000Z",
"name": "Users", "name": "Users",
"primaryDisplay": "email", "primaryDisplay": "email",
@ -21,7 +21,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": true, "presence": true,
"type": "string", "type": "string",
}, },
"fieldName": "email",
"name": "email", "name": "email",
"type": "string", "type": "string",
}, },
@ -30,7 +29,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false, "presence": false,
"type": "string", "type": "string",
}, },
"fieldName": "firstName",
"name": "firstName", "name": "firstName",
"type": "string", "type": "string",
}, },
@ -39,7 +37,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false, "presence": false,
"type": "string", "type": "string",
}, },
"fieldName": "lastName",
"name": "lastName", "name": "lastName",
"type": "string", "type": "string",
}, },
@ -54,7 +51,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false, "presence": false,
"type": "string", "type": "string",
}, },
"fieldName": "roleId",
"name": "roleId", "name": "roleId",
"type": "options", "type": "options",
}, },
@ -67,11 +63,12 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false, "presence": false,
"type": "string", "type": "string",
}, },
"fieldName": "status",
"name": "status", "name": "status",
"type": "options", "type": "options",
}, },
}, },
"sourceId": "bb_internal",
"sourceType": "internal",
"type": "table", "type": "table",
"updatedAt": "2020-01-01T00:00:00.000Z", "updatedAt": "2020-01-01T00:00:00.000Z",
"views": {}, "views": {},

View File

@ -0,0 +1,49 @@
import * as setup from "./utilities"
import { APIError } from "@budibase/types"
describe("/api/applications/:appId/sync", () => {
let config = setup.getConfig()
afterAll(setup.afterAll)
beforeAll(async () => {
await config.init()
})
describe("/api/attachments/process", () => {
it("should accept an image file upload", async () => {
let resp = await config.api.attachment.process(
"1px.jpg",
Buffer.from([0])
)
expect(resp.length).toBe(1)
let upload = resp[0]
expect(upload.url.endsWith(".jpg")).toBe(true)
expect(upload.extension).toBe("jpg")
expect(upload.size).toBe(1)
expect(upload.name).toBe("1px.jpg")
})
it("should reject an upload with a malicious file extension", async () => {
await config.withEnv({ SELF_HOSTED: undefined }, async () => {
let resp = (await config.api.attachment.process(
"ohno.exe",
Buffer.from([0]),
{ expectStatus: 400 }
)) as unknown as APIError
expect(resp.message).toContain("invalid extension")
})
})
it("should reject an upload with no file", async () => {
let resp = (await config.api.attachment.process(
undefined as any,
undefined as any,
{
expectStatus: 400,
}
)) as unknown as APIError
expect(resp.message).toContain("No file provided")
})
})
})

View File

@ -5,6 +5,8 @@ import sdk from "../../../sdk"
import { checkBuilderEndpoint } from "./utilities/TestFunctions" import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import { mocks } from "@budibase/backend-core/tests" import { mocks } from "@budibase/backend-core/tests"
mocks.licenses.useBackups()
describe("/backups", () => { describe("/backups", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
@ -12,16 +14,17 @@ describe("/backups", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeEach(async () => {
tk.reset()
await config.init() await config.init()
}) })
describe("exportAppDump", () => { describe("/api/backups/export", () => {
it("should be able to export app", async () => { it("should be able to export app", async () => {
const res = await request const { body, headers } = await config.api.backup.exportBasicBackup(
.post(`/api/backups/export?appId=${config.getAppId()}`) config.getAppId()!
.set(config.defaultHeaders()) )
.expect(200) expect(body instanceof Buffer).toBe(true)
expect(res.headers["content-type"]).toEqual("application/gzip") expect(headers["content-type"]).toEqual("application/gzip")
expect(events.app.exported).toBeCalledTimes(1) expect(events.app.exported).toBeCalledTimes(1)
}) })
@ -36,11 +39,11 @@ describe("/backups", () => {
it("should infer the app name from the app", async () => { it("should infer the app name from the app", async () => {
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
const res = await request const { headers } = await config.api.backup.exportBasicBackup(
.post(`/api/backups/export?appId=${config.getAppId()}`) config.getAppId()!
.set(config.defaultHeaders()) )
expect(res.headers["content-disposition"]).toEqual( expect(headers["content-disposition"]).toEqual(
`attachment; filename="${ `attachment; filename="${
config.getApp()!.name config.getApp()!.name
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"` }-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
@ -48,6 +51,21 @@ describe("/backups", () => {
}) })
}) })
describe("/api/backups/import", () => {
it("should be able to import an app", async () => {
const appId = config.getAppId()!
const automation = await config.createAutomation()
await config.createAutomationLog(automation, appId)
await config.createScreen()
const exportRes = await config.api.backup.createBackup(appId)
expect(exportRes.backupId).toBeDefined()
const importRes = await config.api.backup.importBackup(
appId,
exportRes.backupId
)
})
})
describe("calculateBackupStats", () => { describe("calculateBackupStats", () => {
it("should be able to calculate the backup statistics", async () => { it("should be able to calculate the backup statistics", async () => {
await config.createAutomation() await config.createAutomation()

View File

@ -10,6 +10,7 @@ import {
FieldSchema, FieldSchema,
FieldType, FieldType,
FieldTypeSubtypes, FieldTypeSubtypes,
INTERNAL_TABLE_SOURCE_ID,
MonthlyQuotaName, MonthlyQuotaName,
PermissionLevel, PermissionLevel,
QuotaUsageType, QuotaUsageType,
@ -21,6 +22,7 @@ import {
SortType, SortType,
StaticQuotaName, StaticQuotaName,
Table, Table,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { import {
expectAnyExternalColsAttributes, expectAnyExternalColsAttributes,
@ -65,6 +67,8 @@ describe.each([
type: "table", type: "table",
primary: ["id"], primary: ["id"],
primaryDisplay: "name", primaryDisplay: "name",
sourceType: TableSourceType.INTERNAL,
sourceId: INTERNAL_TABLE_SOURCE_ID,
schema: { schema: {
id: { id: {
type: FieldType.AUTO, type: FieldType.AUTO,
@ -134,9 +138,22 @@ describe.each([
} }
: undefined : undefined
async function createTable(
cfg: Omit<SaveTableRequest, "sourceId" | "sourceType">,
opts?: { skipReassigning: boolean }
) {
let table
if (dsProvider) {
table = await config.createExternalTable(cfg, opts)
} else {
table = await config.createTable(cfg, opts)
}
return table
}
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
const table = await config.createTable(tableConfig) let table = await createTable(tableConfig)
tableId = table._id! tableId = table._id!
}) })
@ -165,7 +182,7 @@ describe.each([
const queryUsage = await getQueryUsage() const queryUsage = await getQueryUsage()
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
const newTable = await config.createTable( const newTable = await createTable(
{ {
...tableConfig, ...tableConfig,
name: "TestTableAuto", name: "TestTableAuto",
@ -242,7 +259,7 @@ describe.each([
}) })
it("should list all rows for given tableId", async () => { it("should list all rows for given tableId", async () => {
const table = await config.createTable(generateTableConfig(), { const table = await createTable(generateTableConfig(), {
skipReassigning: true, skipReassigning: true,
}) })
const tableId = table._id! const tableId = table._id!
@ -323,7 +340,7 @@ describe.each([
inclusion: ["Alpha", "Beta", "Gamma"], inclusion: ["Alpha", "Beta", "Gamma"],
}, },
} }
const table = await config.createTable({ const table = await createTable({
name: "TestTable2", name: "TestTable2",
type: "table", type: "table",
schema: { schema: {
@ -438,7 +455,8 @@ describe.each([
describe("view save", () => { describe("view save", () => {
it("views have extra data trimmed", async () => { it("views have extra data trimmed", async () => {
const table = await config.createTable({ const table = await createTable({
type: "table",
name: "orders", name: "orders",
primary: ["OrderID"], primary: ["OrderID"],
schema: { schema: {
@ -494,7 +512,7 @@ describe.each([
describe("patch", () => { describe("patch", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should update only the fields that are supplied", async () => { it("should update only the fields that are supplied", async () => {
@ -548,7 +566,7 @@ describe.each([
describe("destroy", () => { describe("destroy", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should be able to delete a row", async () => { it("should be able to delete a row", async () => {
@ -566,7 +584,7 @@ describe.each([
describe("validate", () => { describe("validate", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should return no errors on valid row", async () => { it("should return no errors on valid row", async () => {
@ -603,7 +621,7 @@ describe.each([
describe("bulkDelete", () => { describe("bulkDelete", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should be able to delete a bulk set of rows", async () => { it("should be able to delete a bulk set of rows", async () => {
@ -687,7 +705,7 @@ describe.each([
describe("fetchView", () => { describe("fetchView", () => {
beforeEach(async () => { beforeEach(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should be able to fetch tables contents via 'view'", async () => { it("should be able to fetch tables contents via 'view'", async () => {
@ -735,7 +753,7 @@ describe.each([
describe("fetchEnrichedRows", () => { describe("fetchEnrichedRows", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should allow enriching some linked rows", async () => { it("should allow enriching some linked rows", async () => {
@ -808,7 +826,7 @@ describe.each([
describe("attachments", () => { describe("attachments", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should allow enriching attachment rows", async () => { it("should allow enriching attachment rows", async () => {
@ -839,7 +857,7 @@ describe.each([
describe("exportData", () => { describe("exportData", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should allow exporting all columns", async () => { it("should allow exporting all columns", async () => {
@ -880,6 +898,8 @@ describe.each([
async function userTable(): Promise<Table> { async function userTable(): Promise<Table> {
return { return {
name: `users_${generator.word()}`, name: `users_${generator.word()}`,
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
type: "table", type: "table",
primary: ["id"], primary: ["id"],
schema: { schema: {
@ -925,7 +945,7 @@ describe.each([
describe("create", () => { describe("create", () => {
it("should persist a new row with only the provided view fields", async () => { it("should persist a new row with only the provided view fields", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const view = await config.createView({ const view = await config.createView({
schema: { schema: {
name: { visible: true }, name: { visible: true },
@ -960,7 +980,7 @@ describe.each([
describe("patch", () => { describe("patch", () => {
it("should update only the view fields for a row", async () => { it("should update only the view fields for a row", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const tableId = table._id! const tableId = table._id!
const view = await config.createView({ const view = await config.createView({
schema: { schema: {
@ -1001,7 +1021,7 @@ describe.each([
describe("destroy", () => { describe("destroy", () => {
it("should be able to delete a row", async () => { it("should be able to delete a row", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const tableId = table._id! const tableId = table._id!
const view = await config.createView({ const view = await config.createView({
schema: { schema: {
@ -1025,7 +1045,7 @@ describe.each([
}) })
it("should be able to delete multiple rows", async () => { it("should be able to delete multiple rows", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const tableId = table._id! const tableId = table._id!
const view = await config.createView({ const view = await config.createView({
schema: { schema: {
@ -1062,6 +1082,8 @@ describe.each([
async function userTable(): Promise<Table> { async function userTable(): Promise<Table> {
return { return {
name: `users_${generator.word()}`, name: `users_${generator.word()}`,
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
type: "table", type: "table",
primary: ["id"], primary: ["id"],
schema: { schema: {
@ -1088,7 +1110,7 @@ describe.each([
} }
it("returns empty rows from view when no schema is passed", async () => { it("returns empty rows from view when no schema is passed", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const rows = await Promise.all( const rows = await Promise.all(
Array.from({ length: 10 }, () => Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, { tableId: table._id }) config.api.row.save(table._id!, { tableId: table._id })
@ -1119,7 +1141,7 @@ describe.each([
}) })
it("searching respects the view filters", async () => { it("searching respects the view filters", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
await Promise.all( await Promise.all(
Array.from({ length: 10 }, () => Array.from({ length: 10 }, () =>
@ -1243,7 +1265,7 @@ describe.each([
describe("sorting", () => { describe("sorting", () => {
beforeAll(async () => { beforeAll(async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const users = [ const users = [
{ name: "Alice", age: 25 }, { name: "Alice", age: 25 },
{ name: "Bob", age: 30 }, { name: "Bob", age: 30 },
@ -1310,7 +1332,7 @@ describe.each([
}) })
it("when schema is defined, defined columns and row attributes are returned", async () => { it("when schema is defined, defined columns and row attributes are returned", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const rows = await Promise.all( const rows = await Promise.all(
Array.from({ length: 10 }, () => Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, { config.api.row.save(table._id!, {
@ -1341,7 +1363,7 @@ describe.each([
}) })
it("views without data can be returned", async () => { it("views without data can be returned", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const createViewResponse = await config.createView() const createViewResponse = await config.createView()
const response = await config.api.viewV2.search(createViewResponse.id) const response = await config.api.viewV2.search(createViewResponse.id)
@ -1350,7 +1372,7 @@ describe.each([
}) })
it("respects the limit parameter", async () => { it("respects the limit parameter", async () => {
await config.createTable(await userTable()) await createTable(await userTable())
await Promise.all(Array.from({ length: 10 }, () => config.createRow())) await Promise.all(Array.from({ length: 10 }, () => config.createRow()))
const limit = generator.integer({ min: 1, max: 8 }) const limit = generator.integer({ min: 1, max: 8 })
@ -1365,7 +1387,7 @@ describe.each([
}) })
it("can handle pagination", async () => { it("can handle pagination", async () => {
await config.createTable(await userTable()) await createTable(await userTable())
await Promise.all(Array.from({ length: 10 }, () => config.createRow())) await Promise.all(Array.from({ length: 10 }, () => config.createRow()))
const createViewResponse = await config.createView() const createViewResponse = await config.createView()
@ -1443,7 +1465,7 @@ describe.each([
let tableId: string let tableId: string
beforeAll(async () => { beforeAll(async () => {
await config.createTable(await userTable()) await createTable(await userTable())
await Promise.all( await Promise.all(
Array.from({ length: 10 }, () => config.createRow()) Array.from({ length: 10 }, () => config.createRow())
) )
@ -1521,13 +1543,13 @@ describe.each([
let o2mTable: Table let o2mTable: Table
let m2mTable: Table let m2mTable: Table
beforeAll(async () => { beforeAll(async () => {
o2mTable = await config.createTable( o2mTable = await createTable(
{ ...generateTableConfig(), name: "o2m" }, { ...generateTableConfig(), name: "o2m" },
{ {
skipReassigning: true, skipReassigning: true,
} }
) )
m2mTable = await config.createTable( m2mTable = await createTable(
{ ...generateTableConfig(), name: "m2m" }, { ...generateTableConfig(), name: "m2m" },
{ {
skipReassigning: true, skipReassigning: true,
@ -1597,9 +1619,9 @@ describe.each([
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
if (config.datasource) { if (config.datasource) {
tableConfig.sourceId = config.datasource._id tableConfig.sourceId = config.datasource._id!
if (config.datasource.plus) { if (config.datasource.plus) {
tableConfig.type = "external" tableConfig.sourceType = TableSourceType.EXTERNAL
} }
} }
const table = await config.api.table.create({ const table = await config.api.table.create({

View File

@ -5,11 +5,15 @@ describe("/static", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
let app let app
let cleanupEnv
afterAll(setup.afterAll) afterAll(() => {
setup.afterAll()
cleanupEnv()
})
beforeAll(async () => { beforeAll(async () => {
config.modeSelf() cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
app = await config.init() app = await config.init()
}) })

View File

@ -1,16 +1,24 @@
import { events, context } from "@budibase/backend-core" import { context, events } from "@budibase/backend-core"
import { import {
FieldType,
SaveTableRequest,
RelationshipType,
Table,
ViewCalculation,
AutoFieldSubTypes, AutoFieldSubTypes,
FieldSubtype,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
InternalTable,
RelationshipType,
Row,
SaveTableRequest,
Table,
TableSourceType,
User,
ViewCalculation,
} from "@budibase/types" } from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions" import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities" import * as setup from "./utilities"
const { basicTable } = setup.structures
import sdk from "../../../sdk" import sdk from "../../../sdk"
import uuid from "uuid"
const { basicTable } = setup.structures
describe("/tables", () => { describe("/tables", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -239,7 +247,8 @@ describe("/tables", () => {
.expect(200) .expect(200)
const fetchedTable = res.body[0] const fetchedTable = res.body[0]
expect(fetchedTable.name).toEqual(testTable.name) expect(fetchedTable.name).toEqual(testTable.name)
expect(fetchedTable.type).toEqual("internal") expect(fetchedTable.type).toEqual("table")
expect(fetchedTable.sourceType).toEqual("internal")
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {
@ -417,4 +426,281 @@ describe("/tables", () => {
}) })
}) })
}) })
describe("migrate", () => {
let users: User[]
beforeAll(async () => {
users = await Promise.all([
config.createUser({ email: `${uuid.v4()}@example.com` }),
config.createUser({ email: `${uuid.v4()}@example.com` }),
config.createUser({ email: `${uuid.v4()}@example.com` }),
])
})
it("should successfully migrate a one-to-many user relationship to a user column", async () => {
const table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.ONE_TO_MANY,
tableId: InternalTable.USER_METADATA,
},
},
})
const rows = await Promise.all(
users.map(u =>
config.api.row.save(table._id!, { "user relationship": [u] })
)
)
await config.api.table.migrate(table._id!, {
oldColumn: table.schema["user relationship"],
newColumn: {
name: "user column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USER,
},
})
const migratedTable = await config.api.table.get(table._id!)
expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const migratedRows = await config.api.row.fetch(table._id!)
rows.sort((a, b) => a._id!.localeCompare(b._id!))
migratedRows.sort((a, b) => a._id!.localeCompare(b._id!))
for (const [i, row] of rows.entries()) {
const migratedRow = migratedRows[i]
expect(migratedRow["user column"]).toBeDefined()
expect(migratedRow["user relationship"]).not.toBeDefined()
expect(row["user relationship"][0]._id).toEqual(
migratedRow["user column"][0]._id
)
}
})
it("should successfully migrate a many-to-many user relationship to a users column", async () => {
const table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.MANY_TO_MANY,
tableId: InternalTable.USER_METADATA,
},
},
})
const row1 = await config.api.row.save(table._id!, {
"user relationship": [users[0], users[1]],
})
const row2 = await config.api.row.save(table._id!, {
"user relationship": [users[1], users[2]],
})
await config.api.table.migrate(table._id!, {
oldColumn: table.schema["user relationship"],
newColumn: {
name: "user column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
})
const migratedTable = await config.api.table.get(table._id!)
expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
.body as Row
expect(row1Migrated["user relationship"]).not.toBeDefined()
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[0]._id, users[1]._id])
)
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
.body as Row
expect(row2Migrated["user relationship"]).not.toBeDefined()
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[1]._id, users[2]._id])
)
})
it("should successfully migrate a many-to-one user relationship to a users column", async () => {
const table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: InternalTable.USER_METADATA,
},
},
})
const row1 = await config.api.row.save(table._id!, {
"user relationship": [users[0], users[1]],
})
const row2 = await config.api.row.save(table._id!, {
"user relationship": [users[2]],
})
await config.api.table.migrate(table._id!, {
oldColumn: table.schema["user relationship"],
newColumn: {
name: "user column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
})
const migratedTable = await config.api.table.get(table._id!)
expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
.body as Row
expect(row1Migrated["user relationship"]).not.toBeDefined()
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[0]._id, users[1]._id])
)
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
.body as Row
expect(row2Migrated["user relationship"]).not.toBeDefined()
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([
users[2]._id,
])
})
describe("unhappy paths", () => {
let table: Table
beforeAll(async () => {
table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: InternalTable.USER_METADATA,
},
num: {
type: FieldType.NUMBER,
name: "num",
constraints: {
type: "number",
presence: false,
},
},
},
})
})
it("should fail if the new column name is blank", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: table.schema["user relationship"],
newColumn: {
name: "",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
it("should fail if the new column name is a reserved name", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: table.schema["user relationship"],
newColumn: {
name: "_id",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
it("should fail if the new column name is the same as an existing column", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: table.schema["user relationship"],
newColumn: {
name: "num",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
it("should fail if the old column name isn't a column in the table", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: {
name: "not a column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
newColumn: {
name: "new column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
})
})
}) })

View File

@ -3,10 +3,12 @@ import {
CreateViewRequest, CreateViewRequest,
FieldSchema, FieldSchema,
FieldType, FieldType,
INTERNAL_TABLE_SOURCE_ID,
SearchQueryOperators, SearchQueryOperators,
SortOrder, SortOrder,
SortType, SortType,
Table, Table,
TableSourceType,
UIFieldMetadata, UIFieldMetadata,
UpdateViewRequest, UpdateViewRequest,
ViewV2, ViewV2,
@ -18,6 +20,8 @@ function priceTable(): Table {
return { return {
name: "table", name: "table",
type: "table", type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: { schema: {
Price: { Price: {
type: FieldType.NUMBER, type: FieldType.NUMBER,
@ -54,10 +58,10 @@ describe.each([
}, },
}) })
return config.createTable({ return config.createExternalTable({
...priceTable(), ...priceTable(),
sourceId: datasource._id, sourceId: datasource._id,
type: "external", sourceType: TableSourceType.EXTERNAL,
}) })
}, },
], ],

View File

@ -8,11 +8,15 @@ describe("/webhooks", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
let webhook: Webhook let webhook: Webhook
let cleanupEnv: () => void
afterAll(setup.afterAll) afterAll(() => {
setup.afterAll()
cleanupEnv()
})
const setupTest = async () => { const setupTest = async () => {
config.modeSelf() cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
await config.init() await config.init()
const autoConfig = basicAutomation() const autoConfig = basicAutomation()
autoConfig.definition.trigger.schema = { autoConfig.definition.trigger.schema = {

View File

@ -1,5 +1,11 @@
import { objectStore, roles, constants } from "@budibase/backend-core" import { constants, objectStore, roles } from "@budibase/backend-core"
import { FieldType as FieldTypes } from "@budibase/types" import {
FieldType as FieldTypes,
INTERNAL_TABLE_SOURCE_ID,
Table,
TableSourceType,
} from "@budibase/types"
export { export {
FieldType as FieldTypes, FieldType as FieldTypes,
RelationshipType, RelationshipType,
@ -70,9 +76,11 @@ export enum SortDirection {
DESCENDING = "DESCENDING", DESCENDING = "DESCENDING",
} }
export const USERS_TABLE_SCHEMA = { export const USERS_TABLE_SCHEMA: Table = {
_id: "ta_users", _id: "ta_users",
type: "table", type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
views: {}, views: {},
name: "Users", name: "Users",
// TODO: ADMIN PANEL - when implemented this doesn't need to be carried out // TODO: ADMIN PANEL - when implemented this doesn't need to be carried out
@ -87,12 +95,10 @@ export const USERS_TABLE_SCHEMA = {
}, },
presence: true, presence: true,
}, },
fieldName: "email",
name: "email", name: "email",
}, },
firstName: { firstName: {
name: "firstName", name: "firstName",
fieldName: "firstName",
type: FieldTypes.STRING, type: FieldTypes.STRING,
constraints: { constraints: {
type: FieldTypes.STRING, type: FieldTypes.STRING,
@ -101,7 +107,6 @@ export const USERS_TABLE_SCHEMA = {
}, },
lastName: { lastName: {
name: "lastName", name: "lastName",
fieldName: "lastName",
type: FieldTypes.STRING, type: FieldTypes.STRING,
constraints: { constraints: {
type: FieldTypes.STRING, type: FieldTypes.STRING,
@ -109,7 +114,6 @@ export const USERS_TABLE_SCHEMA = {
}, },
}, },
roleId: { roleId: {
fieldName: "roleId",
name: "roleId", name: "roleId",
type: FieldTypes.OPTIONS, type: FieldTypes.OPTIONS,
constraints: { constraints: {
@ -119,7 +123,6 @@ export const USERS_TABLE_SCHEMA = {
}, },
}, },
status: { status: {
fieldName: "status",
name: "status", name: "status",
type: FieldTypes.OPTIONS, type: FieldTypes.OPTIONS,
constraints: { constraints: {
@ -169,3 +172,8 @@ export enum AutomationErrors {
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
export const MAX_AUTOMATION_RECURRING_ERRORS = 5 export const MAX_AUTOMATION_RECURRING_ERRORS = 5
export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber" export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"

View File

@ -1,4 +1,12 @@
import { FieldTypes, AutoFieldSubTypes } from "../../constants" import {
AutoFieldSubTypes,
FieldTypes,
DEFAULT_BB_DATASOURCE_ID,
DEFAULT_INVENTORY_TABLE_ID,
DEFAULT_EMPLOYEE_TABLE_ID,
DEFAULT_EXPENSES_TABLE_ID,
DEFAULT_JOBS_TABLE_ID,
} from "../../constants"
import { importToRows } from "../../api/controllers/table/utils" import { importToRows } from "../../api/controllers/table/utils"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import LinkDocument from "../linkedRows/LinkDocument" import LinkDocument from "../linkedRows/LinkDocument"
@ -8,19 +16,14 @@ import { jobsImport } from "./jobsImport"
import { expensesImport } from "./expensesImport" import { expensesImport } from "./expensesImport"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { import {
Table,
Row,
RelationshipType,
FieldType, FieldType,
RelationshipType,
Row,
Table,
TableSchema, TableSchema,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
const defaultDatasource = { const defaultDatasource = {
_id: DEFAULT_BB_DATASOURCE_ID, _id: DEFAULT_BB_DATASOURCE_ID,
type: dbCore.BUDIBASE_DATASOURCE_TYPE, type: dbCore.BUDIBASE_DATASOURCE_TYPE,
@ -89,9 +92,10 @@ const AUTO_COLUMNS: TableSchema = {
export const DEFAULT_INVENTORY_TABLE_SCHEMA: Table = { export const DEFAULT_INVENTORY_TABLE_SCHEMA: Table = {
_id: DEFAULT_INVENTORY_TABLE_ID, _id: DEFAULT_INVENTORY_TABLE_ID,
type: "internal", type: "table",
views: {}, views: {},
sourceId: DEFAULT_BB_DATASOURCE_ID, sourceId: DEFAULT_BB_DATASOURCE_ID,
sourceType: TableSourceType.INTERNAL,
primaryDisplay: "Item Name", primaryDisplay: "Item Name",
name: "Inventory", name: "Inventory",
schema: { schema: {
@ -198,10 +202,11 @@ export const DEFAULT_INVENTORY_TABLE_SCHEMA: Table = {
export const DEFAULT_EMPLOYEE_TABLE_SCHEMA: Table = { export const DEFAULT_EMPLOYEE_TABLE_SCHEMA: Table = {
_id: DEFAULT_EMPLOYEE_TABLE_ID, _id: DEFAULT_EMPLOYEE_TABLE_ID,
type: "internal", type: "table",
views: {}, views: {},
name: "Employees", name: "Employees",
sourceId: DEFAULT_BB_DATASOURCE_ID, sourceId: DEFAULT_BB_DATASOURCE_ID,
sourceType: TableSourceType.INTERNAL,
primaryDisplay: "First Name", primaryDisplay: "First Name",
schema: { schema: {
"First Name": { "First Name": {
@ -346,9 +351,10 @@ export const DEFAULT_EMPLOYEE_TABLE_SCHEMA: Table = {
export const DEFAULT_JOBS_TABLE_SCHEMA: Table = { export const DEFAULT_JOBS_TABLE_SCHEMA: Table = {
_id: DEFAULT_JOBS_TABLE_ID, _id: DEFAULT_JOBS_TABLE_ID,
type: "internal", type: "table",
name: "Jobs", name: "Jobs",
sourceId: DEFAULT_BB_DATASOURCE_ID, sourceId: DEFAULT_BB_DATASOURCE_ID,
sourceType: TableSourceType.INTERNAL,
primaryDisplay: "Job ID", primaryDisplay: "Job ID",
schema: { schema: {
"Job ID": { "Job ID": {
@ -503,10 +509,11 @@ export const DEFAULT_JOBS_TABLE_SCHEMA: Table = {
export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = { export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = {
_id: DEFAULT_EXPENSES_TABLE_ID, _id: DEFAULT_EXPENSES_TABLE_ID,
type: "internal", type: "table",
views: {}, views: {},
name: "Expenses", name: "Expenses",
sourceId: DEFAULT_BB_DATASOURCE_ID, sourceId: DEFAULT_BB_DATASOURCE_ID,
sourceType: TableSourceType.INTERNAL,
primaryDisplay: "Expense ID", primaryDisplay: "Expense ID",
schema: { schema: {
"Expense ID": { "Expense ID": {

View File

@ -2,7 +2,12 @@ import { ViewName, getQueryIndex, isRelationshipColumn } from "../utils"
import { FieldTypes } from "../../constants" import { FieldTypes } from "../../constants"
import { createLinkView } from "../views/staticViews" import { createLinkView } from "../views/staticViews"
import { context, logging } from "@budibase/backend-core" import { context, logging } from "@budibase/backend-core"
import { LinkDocument, LinkDocumentValue, Table } from "@budibase/types" import {
DatabaseQueryOpts,
LinkDocument,
LinkDocumentValue,
Table,
} from "@budibase/types"
export { createLinkView } from "../views/staticViews" export { createLinkView } from "../views/staticViews"
@ -36,13 +41,13 @@ export async function getLinkDocuments(args: {
}): Promise<LinkDocumentValue[] | LinkDocument[]> { }): Promise<LinkDocumentValue[] | LinkDocument[]> {
const { tableId, rowId, fieldName, includeDocs } = args const { tableId, rowId, fieldName, includeDocs } = args
const db = context.getAppDB() const db = context.getAppDB()
let params: any let params: DatabaseQueryOpts
if (rowId) { if (rowId) {
params = { key: [tableId, rowId] } params = { key: [tableId, rowId] }
} }
// only table is known // only table is known
else { else {
params = { startKey: [tableId], endKey: [tableId, {}] } params = { startkey: [tableId], endkey: [tableId, {}] }
} }
if (includeDocs) { if (includeDocs) {
params.include_docs = true params.include_docs = true

View File

@ -5,6 +5,7 @@ import {
FieldSchema, FieldSchema,
RelationshipFieldMetadata, RelationshipFieldMetadata,
VirtualDocumentType, VirtualDocumentType,
INTERNAL_TABLE_SOURCE_ID,
} from "@budibase/types" } from "@budibase/types"
import { FieldTypes } from "../constants" import { FieldTypes } from "../constants"
export { DocumentType, VirtualDocumentType } from "@budibase/types" export { DocumentType, VirtualDocumentType } from "@budibase/types"
@ -18,7 +19,7 @@ export const enum AppStatus {
} }
export const BudibaseInternalDB = { export const BudibaseInternalDB = {
_id: "bb_internal", _id: INTERNAL_TABLE_SOURCE_ID,
type: dbCore.BUDIBASE_DATASOURCE_TYPE, type: dbCore.BUDIBASE_DATASOURCE_TYPE,
name: "Budibase DB", name: "Budibase DB",
source: "BUDIBASE", source: "BUDIBASE",

View File

@ -75,7 +75,6 @@ const environment = {
}, },
isTest: coreEnv.isTest, isTest: coreEnv.isTest,
isJest: coreEnv.isJest, isJest: coreEnv.isJest,
isDev: coreEnv.isDev, isDev: coreEnv.isDev,
isProd: () => { isProd: () => {
return !coreEnv.isDev() return !coreEnv.isDev()

View File

@ -1,6 +1,4 @@
import fetch from "node-fetch" import fetch from "node-fetch"
// @ts-ignore
fetch.mockSearch()
import { import {
generateMakeRequest, generateMakeRequest,
MakeRequestResponse, MakeRequestResponse,
@ -13,12 +11,15 @@ import {
RelationshipType, RelationshipType,
Row, Row,
Table, Table,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import _ from "lodash" import _ from "lodash"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { utils } from "@budibase/backend-core" import { utils } from "@budibase/backend-core"
import { databaseTestProviders } from "../integrations/tests/utils" import { databaseTestProviders } from "../integrations/tests/utils"
import { Client } from "pg" import { Client } from "pg"
// @ts-ignore
fetch.mockSearch()
const config = setup.getConfig()! const config = setup.getConfig()!
@ -52,7 +53,7 @@ describe("postgres integrations", () => {
async function createAuxTable(prefix: string) { async function createAuxTable(prefix: string) {
return await config.createTable({ return await config.createTable({
name: `${prefix}_${generator.word({ length: 6 })}`, name: `${prefix}_${generator.word({ length: 6 })}`,
type: "external", type: "table",
primary: ["id"], primary: ["id"],
primaryDisplay: "title", primaryDisplay: "title",
schema: { schema: {
@ -67,6 +68,7 @@ describe("postgres integrations", () => {
}, },
}, },
sourceId: postgresDatasource._id, sourceId: postgresDatasource._id,
sourceType: TableSourceType.EXTERNAL,
}) })
} }
@ -88,7 +90,7 @@ describe("postgres integrations", () => {
primaryPostgresTable = await config.createTable({ primaryPostgresTable = await config.createTable({
name: `p_${generator.word({ length: 6 })}`, name: `p_${generator.word({ length: 6 })}`,
type: "external", type: "table",
primary: ["id"], primary: ["id"],
schema: { schema: {
id: { id: {
@ -143,6 +145,7 @@ describe("postgres integrations", () => {
}, },
}, },
sourceId: postgresDatasource._id, sourceId: postgresDatasource._id,
sourceType: TableSourceType.EXTERNAL,
}) })
}) })
@ -249,7 +252,7 @@ describe("postgres integrations", () => {
async function createDefaultPgTable() { async function createDefaultPgTable() {
return await config.createTable({ return await config.createTable({
name: generator.word({ length: 10 }), name: generator.word({ length: 10 }),
type: "external", type: "table",
primary: ["id"], primary: ["id"],
schema: { schema: {
id: { id: {
@ -259,6 +262,7 @@ describe("postgres integrations", () => {
}, },
}, },
sourceId: postgresDatasource._id, sourceId: postgresDatasource._id,
sourceType: TableSourceType.EXTERNAL,
}) })
} }

View File

@ -10,11 +10,12 @@ import {
QueryJson, QueryJson,
QueryType, QueryType,
Row, Row,
Schema,
SearchFilters, SearchFilters,
SortJson, SortJson,
ExternalTable, Table,
TableRequest, TableRequest,
Schema, TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { OAuth2Client } from "google-auth-library" import { OAuth2Client } from "google-auth-library"
import { import {
@ -262,11 +263,13 @@ class GoogleSheetsIntegration implements DatasourcePlus {
id?: string id?: string
) { ) {
// base table // base table
const table: ExternalTable = { const table: Table = {
type: "table",
name: title, name: title,
primary: [GOOGLE_SHEETS_PRIMARY_KEY], primary: [GOOGLE_SHEETS_PRIMARY_KEY],
schema: {}, schema: {},
sourceId: datasourceId, sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
} }
if (id) { if (id) {
table._id = id table._id = id
@ -283,7 +286,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
async buildSchema( async buildSchema(
datasourceId: string, datasourceId: string,
entities: Record<string, ExternalTable> entities: Record<string, Table>
): Promise<Schema> { ): Promise<Schema> {
// not fully configured yet // not fully configured yet
if (!this.config.auth) { if (!this.config.auth) {
@ -291,7 +294,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
} }
await this.connect() await this.connect()
const sheets = this.client.sheetsByIndex const sheets = this.client.sheetsByIndex
const tables: Record<string, ExternalTable> = {} const tables: Record<string, Table> = {}
let errors: Record<string, string> = {} let errors: Record<string, string> = {}
await utils.parallelForeach( await utils.parallelForeach(
sheets, sheets,

View File

@ -2,7 +2,7 @@ import {
DatasourceFieldType, DatasourceFieldType,
Integration, Integration,
Operation, Operation,
ExternalTable, Table,
TableSchema, TableSchema,
QueryJson, QueryJson,
QueryType, QueryType,
@ -12,6 +12,7 @@ import {
ConnectionInfo, ConnectionInfo,
SourceName, SourceName,
Schema, Schema,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { import {
getSqlQuery, getSqlQuery,
@ -380,7 +381,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
*/ */
async buildSchema( async buildSchema(
datasourceId: string, datasourceId: string,
entities: Record<string, ExternalTable> entities: Record<string, Table>
): Promise<Schema> { ): Promise<Schema> {
await this.connect() await this.connect()
let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL) let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL)
@ -394,7 +395,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
.map((record: any) => record.TABLE_NAME) .map((record: any) => record.TABLE_NAME)
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1) .filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
const tables: Record<string, ExternalTable> = {} const tables: Record<string, Table> = {}
for (let tableName of tableNames) { for (let tableName of tableNames) {
// get the column definition (type) // get the column definition (type)
const definition = await this.runSQL( const definition = await this.runSQL(
@ -439,7 +440,9 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
} }
tables[tableName] = { tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName), _id: buildExternalTableId(datasourceId, tableName),
type: "table",
sourceId: datasourceId, sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
primary: primaryKeys, primary: primaryKeys,
name: tableName, name: tableName,
schema, schema,

View File

@ -4,13 +4,14 @@ import {
QueryType, QueryType,
QueryJson, QueryJson,
SqlQuery, SqlQuery,
ExternalTable, Table,
TableSchema, TableSchema,
DatasourcePlus, DatasourcePlus,
DatasourceFeature, DatasourceFeature,
ConnectionInfo, ConnectionInfo,
SourceName, SourceName,
Schema, Schema,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { import {
getSqlQuery, getSqlQuery,
@ -278,9 +279,9 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async buildSchema( async buildSchema(
datasourceId: string, datasourceId: string,
entities: Record<string, ExternalTable> entities: Record<string, Table>
): Promise<Schema> { ): Promise<Schema> {
const tables: { [key: string]: ExternalTable } = {} const tables: { [key: string]: Table } = {}
await this.connect() await this.connect()
try { try {
@ -317,8 +318,10 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
} }
if (!tables[tableName]) { if (!tables[tableName]) {
tables[tableName] = { tables[tableName] = {
type: "table",
_id: buildExternalTableId(datasourceId, tableName), _id: buildExternalTableId(datasourceId, tableName),
sourceId: datasourceId, sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
primary: primaryKeys, primary: primaryKeys,
name: tableName, name: tableName,
schema, schema,

View File

@ -5,11 +5,12 @@ import {
QueryJson, QueryJson,
QueryType, QueryType,
SqlQuery, SqlQuery,
ExternalTable, Table,
DatasourcePlus, DatasourcePlus,
DatasourceFeature, DatasourceFeature,
ConnectionInfo, ConnectionInfo,
Schema, Schema,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { import {
buildExternalTableId, buildExternalTableId,
@ -263,25 +264,27 @@ class OracleIntegration extends Sql implements DatasourcePlus {
*/ */
async buildSchema( async buildSchema(
datasourceId: string, datasourceId: string,
entities: Record<string, ExternalTable> entities: Record<string, Table>
): Promise<Schema> { ): Promise<Schema> {
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({ const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL, sql: this.COLUMNS_SQL,
}) })
const oracleTables = this.mapColumns(columnsResponse) const oracleTables = this.mapColumns(columnsResponse)
const tables: { [key: string]: ExternalTable } = {} const tables: { [key: string]: Table } = {}
// iterate each table // iterate each table
Object.values(oracleTables).forEach(oracleTable => { Object.values(oracleTables).forEach(oracleTable => {
let table = tables[oracleTable.name] let table = tables[oracleTable.name]
if (!table) { if (!table) {
table = { table = {
type: "table",
_id: buildExternalTableId(datasourceId, oracleTable.name), _id: buildExternalTableId(datasourceId, oracleTable.name),
primary: [], primary: [],
name: oracleTable.name, name: oracleTable.name,
schema: {}, schema: {},
sourceId: datasourceId, sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
} }
tables[oracleTable.name] = table tables[oracleTable.name] = table
} }

View File

@ -5,12 +5,13 @@ import {
QueryType, QueryType,
QueryJson, QueryJson,
SqlQuery, SqlQuery,
ExternalTable, Table,
DatasourcePlus, DatasourcePlus,
DatasourceFeature, DatasourceFeature,
ConnectionInfo, ConnectionInfo,
SourceName, SourceName,
Schema, Schema,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { import {
getSqlQuery, getSqlQuery,
@ -273,7 +274,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
*/ */
async buildSchema( async buildSchema(
datasourceId: string, datasourceId: string,
entities: Record<string, ExternalTable> entities: Record<string, Table>
): Promise<Schema> { ): Promise<Schema> {
let tableKeys: { [key: string]: string[] } = {} let tableKeys: { [key: string]: string[] } = {}
await this.openConnection() await this.openConnection()
@ -300,7 +301,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
const columnsResponse: { rows: PostgresColumn[] } = const columnsResponse: { rows: PostgresColumn[] } =
await this.client.query(this.COLUMNS_SQL) await this.client.query(this.COLUMNS_SQL)
const tables: { [key: string]: ExternalTable } = {} const tables: { [key: string]: Table } = {}
for (let column of columnsResponse.rows) { for (let column of columnsResponse.rows) {
const tableName: string = column.table_name const tableName: string = column.table_name
@ -309,11 +310,13 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
// table key doesn't exist yet // table key doesn't exist yet
if (!tables[tableName] || !tables[tableName].schema) { if (!tables[tableName] || !tables[tableName].schema) {
tables[tableName] = { tables[tableName] = {
type: "table",
_id: buildExternalTableId(datasourceId, tableName), _id: buildExternalTableId(datasourceId, tableName),
primary: tableKeys[tableName] || [], primary: tableKeys[tableName] || [],
name: tableName, name: tableName,
schema: {}, schema: {},
sourceId: datasourceId, sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
} }
} }

View File

@ -30,18 +30,24 @@ GoogleSpreadsheet.mockImplementation(() => mockGoogleIntegration)
import { structures } from "@budibase/backend-core/tests" import { structures } from "@budibase/backend-core/tests"
import TestConfiguration from "../../tests/utilities/TestConfiguration" import TestConfiguration from "../../tests/utilities/TestConfiguration"
import GoogleSheetsIntegration from "../googlesheets" import GoogleSheetsIntegration from "../googlesheets"
import { FieldType, Table, TableSchema } from "@budibase/types" import { FieldType, Table, TableSchema, TableSourceType } from "@budibase/types"
import { generateDatasourceID } from "../../db/utils"
describe("Google Sheets Integration", () => { describe("Google Sheets Integration", () => {
let integration: any, let integration: any,
config = new TestConfiguration() config = new TestConfiguration()
let cleanupEnv: () => void
beforeAll(() => { beforeAll(() => {
config.setGoogleAuth("test") cleanupEnv = config.setEnv({
GOOGLE_CLIENT_ID: "test",
GOOGLE_CLIENT_SECRET: "test",
})
}) })
afterAll(async () => { afterAll(async () => {
await config.end() cleanupEnv()
config.end()
}) })
beforeEach(async () => { beforeEach(async () => {
@ -60,7 +66,10 @@ describe("Google Sheets Integration", () => {
function createBasicTable(name: string, columns: string[]): Table { function createBasicTable(name: string, columns: string[]): Table {
return { return {
type: "table",
name, name,
sourceId: generateDatasourceID(),
sourceType: TableSourceType.EXTERNAL,
schema: { schema: {
...columns.reduce((p, c) => { ...columns.reduce((p, c) => {
p[c] = { p[c] = {

View File

@ -1,11 +1,17 @@
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers" import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
import env from "../../../environment"
let container: StartedTestContainer | undefined let container: StartedTestContainer | undefined
const isMac = process.platform === "darwin"
export async function getDsConfig(): Promise<Datasource> { export async function getDsConfig(): Promise<Datasource> {
try {
if (!container) { if (!container) {
container = await new GenericContainer("postgres") // postgres 15-bullseye safer bet on Linux
const version = isMac ? undefined : "15-bullseye"
container = await new GenericContainer("postgres", version)
.withExposedPorts(5432) .withExposedPorts(5432)
.withEnv("POSTGRES_PASSWORD", "password") .withEnv("POSTGRES_PASSWORD", "password")
.withWaitStrategy( .withWaitStrategy(
@ -15,7 +21,6 @@ export async function getDsConfig(): Promise<Datasource> {
) )
.start() .start()
} }
const host = container.getContainerIpAddress() const host = container.getContainerIpAddress()
const port = container.getMappedPort(5432) const port = container.getMappedPort(5432)
@ -35,6 +40,9 @@ export async function getDsConfig(): Promise<Datasource> {
ca: false, ca: false,
}, },
} }
} catch (err) {
throw new Error("**UNABLE TO CREATE TO POSTGRES CONTAINER**")
}
} }
export async function stopContainer() { export async function stopContainer() {

View File

@ -4,10 +4,14 @@ import {
SearchFilters, SearchFilters,
Datasource, Datasource,
FieldType, FieldType,
ExternalTable, TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils" import { DocumentType, SEPARATOR } from "../db/utils"
import { InvalidColumns, NoEmptyFilterStrings } from "../constants" import {
InvalidColumns,
NoEmptyFilterStrings,
DEFAULT_BB_DATASOURCE_ID,
} from "../constants"
import { helpers } from "@budibase/shared-core" import { helpers } from "@budibase/shared-core"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
@ -83,10 +87,29 @@ export enum SqlClient {
ORACLE = "oracledb", ORACLE = "oracledb",
} }
export function isExternalTable(tableId: string) { export function isExternalTableID(tableId: string) {
return tableId.includes(DocumentType.DATASOURCE) return tableId.includes(DocumentType.DATASOURCE)
} }
export function isInternalTableID(tableId: string) {
return !isExternalTableID(tableId)
}
export function isExternalTable(table: Table) {
if (
table?.sourceId &&
table.sourceId.includes(DocumentType.DATASOURCE + SEPARATOR) &&
table?.sourceId !== DEFAULT_BB_DATASOURCE_ID
) {
return true
} else if (table?.sourceType === TableSourceType.EXTERNAL) {
return true
} else if (table?._id && isExternalTableID(table._id)) {
return true
}
return false
}
export function buildExternalTableId(datasourceId: string, tableName: string) { export function buildExternalTableId(datasourceId: string, tableName: string) {
// encode spaces // encode spaces
if (tableName.includes(" ")) { if (tableName.includes(" ")) {
@ -297,9 +320,9 @@ function copyExistingPropsOver(
* @param entities The old list of tables, if there was any to look for definitions in. * @param entities The old list of tables, if there was any to look for definitions in.
*/ */
export function finaliseExternalTables( export function finaliseExternalTables(
tables: Record<string, ExternalTable>, tables: Record<string, Table>,
entities: Record<string, ExternalTable> entities: Record<string, Table>
): Record<string, ExternalTable> { ): Record<string, Table> {
let finalTables: Record<string, Table> = {} let finalTables: Record<string, Table> = {}
const tableIds = Object.values(tables).map(table => table._id!) const tableIds = Object.values(tables).map(table => table._id!)
for (let [name, table] of Object.entries(tables)) { for (let [name, table] of Object.entries(tables)) {
@ -312,7 +335,7 @@ export function finaliseExternalTables(
} }
export function checkExternalTables( export function checkExternalTables(
tables: Record<string, ExternalTable> tables: Record<string, Table>
): Record<string, string> { ): Record<string, string> {
const invalidColumns = Object.values(InvalidColumns) as string[] const invalidColumns = Object.values(InvalidColumns) as string[]
const errors: Record<string, string> = {} const errors: Record<string, string> = {}

View File

@ -1,5 +1,12 @@
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { BBRequest, FieldType, Row, Table } from "@budibase/types" import {
BBRequest,
FieldType,
Row,
Table,
INTERNAL_TABLE_SOURCE_ID,
TableSourceType,
} from "@budibase/types"
import * as utils from "../../db/utils" import * as utils from "../../db/utils"
import trimViewRowInfoMiddleware from "../trimViewRowInfo" import trimViewRowInfoMiddleware from "../trimViewRowInfo"
@ -73,6 +80,8 @@ describe("trimViewRowInfo middleware", () => {
const table: Table = { const table: Table = {
_id: tableId, _id: tableId,
name: generator.word(), name: generator.word(),
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
type: "table", type: "table",
schema: { schema: {
name: { name: {

View File

@ -1,7 +1,6 @@
import { Ctx, Row } from "@budibase/types" import { Ctx, Row } from "@budibase/types"
import * as utils from "../db/utils" import * as utils from "../db/utils"
import sdk from "../sdk" import sdk from "../sdk"
import { db } from "@budibase/backend-core"
import { Next } from "koa" import { Next } from "koa"
import { getTableId } from "../api/controllers/row/utils" import { getTableId } from "../api/controllers/row/utils"

View File

@ -11,10 +11,11 @@ import {
isOIDCConfig, isOIDCConfig,
isSettingsConfig, isSettingsConfig,
ConfigType, ConfigType,
DatabaseQueryOpts,
} from "@budibase/types" } from "@budibase/types"
import env from "./../../../../environment" import env from "./../../../../environment"
export const getConfigParams = () => { export function getConfigParams(): DatabaseQueryOpts {
return { return {
include_docs: true, include_docs: true,
startkey: `${DocumentType.CONFIG}${SEPARATOR}`, startkey: `${DocumentType.CONFIG}${SEPARATOR}`,

View File

@ -26,7 +26,6 @@ export interface DBDumpOpts {
export interface ExportOpts extends DBDumpOpts { export interface ExportOpts extends DBDumpOpts {
tar?: boolean tar?: boolean
excludeRows?: boolean excludeRows?: boolean
excludeLogs?: boolean
encryptPassword?: string encryptPassword?: string
} }
@ -83,14 +82,15 @@ export async function exportDB(
}) })
} }
function defineFilter(excludeRows?: boolean, excludeLogs?: boolean) { function defineFilter(excludeRows?: boolean) {
const ids = [USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX] const ids = [
USER_METDATA_PREFIX,
LINK_USER_METADATA_PREFIX,
AUTOMATION_LOG_PREFIX,
]
if (excludeRows) { if (excludeRows) {
ids.push(TABLE_ROW_PREFIX) ids.push(TABLE_ROW_PREFIX)
} }
if (excludeLogs) {
ids.push(AUTOMATION_LOG_PREFIX)
}
return (doc: any) => return (doc: any) =>
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr) !ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
} }
@ -118,7 +118,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
fs.writeFileSync(join(tmpPath, path), contents) fs.writeFileSync(join(tmpPath, path), contents)
} }
} }
// get all of the files // get all the files
else { else {
tmpPath = await objectStore.retrieveDirectory( tmpPath = await objectStore.retrieveDirectory(
ObjectStoreBuckets.APPS, ObjectStoreBuckets.APPS,
@ -141,7 +141,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
// enforce an export of app DB to the tmp path // enforce an export of app DB to the tmp path
const dbPath = join(tmpPath, DB_EXPORT_FILE) const dbPath = join(tmpPath, DB_EXPORT_FILE)
await exportDB(appId, { await exportDB(appId, {
filter: defineFilter(config?.excludeRows, config?.excludeLogs), filter: defineFilter(config?.excludeRows),
exportPath: dbPath, exportPath: dbPath,
}) })
@ -191,7 +191,6 @@ export async function streamExportApp({
}) { }) {
const tmpPath = await exportApp(appId, { const tmpPath = await exportApp(appId, {
excludeRows, excludeRows,
excludeLogs: true,
tar: true, tar: true,
encryptPassword, encryptPassword,
}) })

View File

@ -0,0 +1,5 @@
import * as links from "./links"
export default {
...links,
}

View File

@ -0,0 +1,39 @@
import { context } from "@budibase/backend-core"
import { isTableId } from "@budibase/backend-core/src/docIds"
import {
DatabaseQueryOpts,
LinkDocument,
LinkDocumentValue,
} from "@budibase/types"
import { ViewName, getQueryIndex } from "../../../../src/db/utils"
export async function fetch(tableId: string): Promise<LinkDocumentValue[]> {
if (!isTableId(tableId)) {
throw new Error(`Invalid tableId: ${tableId}`)
}
const db = context.getAppDB()
const params: DatabaseQueryOpts = {
startkey: [tableId],
endkey: [tableId, {}],
}
const linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows
return linkRows.map(row => row.value as LinkDocumentValue)
}
export async function fetchWithDocument(
tableId: string
): Promise<LinkDocument[]> {
if (!isTableId(tableId)) {
throw new Error(`Invalid tableId: ${tableId}`)
}
const db = context.getAppDB()
const params: DatabaseQueryOpts = {
startkey: [tableId],
endkey: [tableId, {}],
include_docs: true,
}
const linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows
return linkRows.map(row => row.doc as LinkDocument)
}

View File

@ -1,4 +1,4 @@
import { IncludeRelationship, Operation, Row } from "@budibase/types" import { IncludeRelationship, Operation } from "@budibase/types"
import { handleRequest } from "../../../api/controllers/row/external" import { handleRequest } from "../../../api/controllers/row/external"
import { breakRowIdField } from "../../../integrations/utils" import { breakRowIdField } from "../../../integrations/utils"

View File

@ -1,5 +1,5 @@
import { SearchFilters, SearchParams, Row } from "@budibase/types" import { Row, SearchFilters, SearchParams } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./search/internal" import * as internal from "./search/internal"
import * as external from "./search/external" import * as external from "./search/external"
import { Format } from "../../../api/controllers/view/exporters" import { Format } from "../../../api/controllers/view/exporters"
@ -12,7 +12,7 @@ export interface ViewParams {
} }
function pickApi(tableId: any) { function pickApi(tableId: any) {
if (isExternalTable(tableId)) { if (isExternalTableID(tableId)) {
return external return external
} }
return internal return internal
@ -49,6 +49,10 @@ export async function fetch(tableId: string): Promise<Row[]> {
return pickApi(tableId).fetch(tableId) return pickApi(tableId).fetch(tableId)
} }
export async function fetchRaw(tableId: string): Promise<Row[]> {
return pickApi(tableId).fetchRaw(tableId)
}
export async function fetchView( export async function fetchView(
tableId: string, tableId: string,
viewName: string, viewName: string,

View File

@ -186,6 +186,12 @@ export async function fetch(tableId: string): Promise<Row[]> {
}) })
} }
export async function fetchRaw(tableId: string): Promise<Row[]> {
return await handleRequest<Operation.READ>(Operation.READ, tableId, {
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
}
export async function fetchView(viewName: string) { export async function fetchView(viewName: string) {
// there are no views in external datasources, shouldn't ever be called // there are no views in external datasources, shouldn't ever be called
// for now just fetch // for now just fetch

View File

@ -140,14 +140,13 @@ export async function exportRows(
} }
export async function fetch(tableId: string): Promise<Row[]> { export async function fetch(tableId: string): Promise<Row[]> {
const db = context.getAppDB()
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
const rows = await getRawTableData(db, tableId) const rows = await fetchRaw(tableId)
return await outputProcessing(table, rows) return await outputProcessing(table, rows)
} }
async function getRawTableData(db: Database, tableId: string) { export async function fetchRaw(tableId: string): Promise<Row[]> {
const db = context.getAppDB()
let rows let rows
if (tableId === InternalTables.USER_METADATA) { if (tableId === InternalTables.USER_METADATA) {
rows = await sdk.users.fetchMetadata() rows = await sdk.users.fetchMetadata()
@ -182,7 +181,7 @@ export async function fetchView(
}) })
} else { } else {
const tableId = viewInfo.meta.tableId const tableId = viewInfo.meta.tableId
const data = await getRawTableData(db, tableId) const data = await fetchRaw(tableId)
response = await inMemoryViews.runView( response = await inMemoryViews.runView(
viewInfo, viewInfo,
calculation as string, calculation as string,
@ -198,11 +197,7 @@ export async function fetchView(
try { try {
table = await sdk.tables.getTable(viewInfo.meta.tableId) table = await sdk.tables.getTable(viewInfo.meta.tableId)
} catch (err) { } catch (err) {
/* istanbul ignore next */ throw new Error("Unable to retrieve view table.")
table = {
name: "",
schema: {},
}
} }
rows = await outputProcessing(table, response.rows) rows = await outputProcessing(table, response.rows)
} }

View File

@ -7,6 +7,7 @@ import {
SourceName, SourceName,
Table, Table,
SearchParams, SearchParams,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
@ -15,6 +16,7 @@ import {
expectAnyExternalColsAttributes, expectAnyExternalColsAttributes,
generator, generator,
} from "@budibase/backend-core/tests" } from "@budibase/backend-core/tests"
import datasource from "../../../../../api/routes/datasource"
jest.unmock("mysql2/promise") jest.unmock("mysql2/promise")
@ -23,36 +25,7 @@ jest.setTimeout(30000)
describe.skip("external", () => { describe.skip("external", () => {
const config = new TestConfiguration() const config = new TestConfiguration()
let externalDatasource: Datasource let externalDatasource: Datasource, tableData: Table
const tableData: Table = {
name: generator.word(),
type: "external",
primary: ["id"],
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
},
},
}
beforeAll(async () => { beforeAll(async () => {
const container = await new GenericContainer("mysql") const container = await new GenericContainer("mysql")
@ -84,12 +57,43 @@ describe.skip("external", () => {
}, },
}, },
}) })
tableData = {
name: generator.word(),
type: "table",
primary: ["id"],
sourceId: externalDatasource._id!,
sourceType: TableSourceType.EXTERNAL,
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
},
},
}
}) })
describe("search", () => { describe("search", () => {
const rows: Row[] = [] const rows: Row[] = []
beforeAll(async () => { beforeAll(async () => {
const table = await config.createTable({ const table = await config.createExternalTable({
...tableData, ...tableData,
sourceId: externalDatasource._id, sourceId: externalDatasource._id,
}) })

View File

@ -1,4 +1,11 @@
import { FieldType, Row, Table, SearchParams } from "@budibase/types" import {
FieldType,
Row,
Table,
SearchParams,
INTERNAL_TABLE_SOURCE_ID,
TableSourceType,
} from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { search } from "../internal" import { search } from "../internal"
import { import {
@ -12,6 +19,8 @@ describe("internal", () => {
const tableData: Table = { const tableData: Table = {
name: generator.word(), name: generator.word(),
type: "table", type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: { schema: {
name: { name: {
name: "name", name: "name",

View File

@ -3,14 +3,19 @@ import { db as dbCore } from "@budibase/backend-core"
import { import {
FieldType, FieldType,
FieldTypeSubtypes, FieldTypeSubtypes,
Table, INTERNAL_TABLE_SOURCE_ID,
SearchParams, SearchParams,
Table,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
const tableId = "ta_a" const tableId = "ta_a"
const tableWithUserCol: Table = { const tableWithUserCol: Table = {
type: "table",
_id: tableId, _id: tableId,
name: "table", name: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: { schema: {
user: { user: {
name: "user", name: "user",
@ -21,8 +26,11 @@ const tableWithUserCol: Table = {
} }
const tableWithUsersCol: Table = { const tableWithUsersCol: Table = {
type: "table",
_id: tableId, _id: tableId,
name: "table", name: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: { schema: {
user: { user: {
name: "user", name: "user",

View File

@ -35,10 +35,10 @@ export async function save(
opts?: { tableId?: string; renaming?: RenameColumn } opts?: { tableId?: string; renaming?: RenameColumn }
) { ) {
let tableToSave: TableRequest = { let tableToSave: TableRequest = {
...update,
type: "table", type: "table",
_id: buildExternalTableId(datasourceId, update.name), _id: buildExternalTableId(datasourceId, update.name),
sourceId: datasourceId, sourceId: datasourceId,
...update,
} }
const tableId = opts?.tableId || update._id const tableId = opts?.tableId || update._id

View File

@ -6,6 +6,7 @@ import {
RelationshipFieldMetadata, RelationshipFieldMetadata,
RelationshipType, RelationshipType,
Table, Table,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { FieldTypes } from "../../../../constants" import { FieldTypes } from "../../../../constants"
import { import {
@ -76,12 +77,16 @@ export function generateManyLinkSchema(
const primary = table.name + table.primary[0] const primary = table.name + table.primary[0]
const relatedPrimary = relatedTable.name + relatedTable.primary[0] const relatedPrimary = relatedTable.name + relatedTable.primary[0]
const jcTblName = generateJunctionTableName(column, table, relatedTable) const jcTblName = generateJunctionTableName(column, table, relatedTable)
const datasourceId = datasource._id!
// first create the new table // first create the new table
const junctionTable = { const junctionTable: Table = {
_id: buildExternalTableId(datasource._id!, jcTblName), type: "table",
_id: buildExternalTableId(datasourceId, jcTblName),
name: jcTblName, name: jcTblName,
primary: [primary, relatedPrimary], primary: [primary, relatedPrimary],
constrained: [primary, relatedPrimary], constrained: [primary, relatedPrimary],
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
schema: { schema: {
[primary]: foreignKeyStructure(primary, { [primary]: foreignKeyStructure(primary, {
toTable: table.name, toTable: table.name,

View File

@ -1,30 +1,47 @@
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { import { getMultiIDParams, getTableParams } from "../../../db/utils"
BudibaseInternalDB,
getMultiIDParams,
getTableParams,
} from "../../../db/utils"
import { import {
breakExternalTableId, breakExternalTableId,
isExternalTable, isExternalTableID,
isSQL, isSQL,
} from "../../../integrations/utils" } from "../../../integrations/utils"
import { import {
AllDocsResponse,
Database, Database,
INTERNAL_TABLE_SOURCE_ID,
Table, Table,
TableResponse, TableResponse,
TableSourceType,
TableViewsResponse, TableViewsResponse,
} from "@budibase/types" } from "@budibase/types"
import datasources from "../datasources" import datasources from "../datasources"
import sdk from "../../../sdk" import sdk from "../../../sdk"
function processInternalTables(docs: AllDocsResponse<Table[]>): Table[] { export function processTable(table: Table): Table {
return docs.rows.map((tableDoc: any) => ({ if (table._id && isExternalTableID(table._id)) {
...tableDoc.doc, return {
type: "internal", ...table,
sourceId: tableDoc.doc.sourceId || BudibaseInternalDB._id, type: "table",
})) sourceType: TableSourceType.EXTERNAL,
}
} else {
return {
...table,
type: "table",
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
}
}
}
export function processTables(tables: Table[]): Table[] {
return tables.map(table => processTable(table))
}
function processEntities(tables: Record<string, Table>) {
for (let key of Object.keys(tables)) {
tables[key] = processTable(tables[key])
}
return tables
} }
export async function getAllInternalTables(db?: Database): Promise<Table[]> { export async function getAllInternalTables(db?: Database): Promise<Table[]> {
@ -36,7 +53,7 @@ export async function getAllInternalTables(db?: Database): Promise<Table[]> {
include_docs: true, include_docs: true,
}) })
) )
return processInternalTables(internalTables) return processTables(internalTables.rows.map(row => row.doc!))
} }
async function getAllExternalTables(): Promise<Table[]> { async function getAllExternalTables(): Promise<Table[]> {
@ -48,7 +65,7 @@ async function getAllExternalTables(): Promise<Table[]> {
final = final.concat(Object.values(entities)) final = final.concat(Object.values(entities))
} }
} }
return final return processTables(final)
} }
export async function getExternalTable( export async function getExternalTable(
@ -56,19 +73,21 @@ export async function getExternalTable(
tableName: string tableName: string
): Promise<Table> { ): Promise<Table> {
const entities = await getExternalTablesInDatasource(datasourceId) const entities = await getExternalTablesInDatasource(datasourceId)
return entities[tableName] return processTable(entities[tableName])
} }
export async function getTable(tableId: string): Promise<Table> { export async function getTable(tableId: string): Promise<Table> {
const db = context.getAppDB() const db = context.getAppDB()
if (isExternalTable(tableId)) { let output: Table
if (isExternalTableID(tableId)) {
let { datasourceId, tableName } = breakExternalTableId(tableId) let { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource = await datasources.get(datasourceId!) const datasource = await datasources.get(datasourceId!)
const table = await getExternalTable(datasourceId!, tableName!) const table = await getExternalTable(datasourceId!, tableName!)
return { ...table, sql: isSQL(datasource) } output = { ...table, sql: isSQL(datasource) }
} else { } else {
return db.get(tableId) output = await db.get<Table>(tableId)
} }
return processTable(output)
} }
export async function getAllTables() { export async function getAllTables() {
@ -76,7 +95,7 @@ export async function getAllTables() {
getAllInternalTables(), getAllInternalTables(),
getAllExternalTables(), getAllExternalTables(),
]) ])
return [...internal, ...external] return processTables([...internal, ...external])
} }
export async function getExternalTablesInDatasource( export async function getExternalTablesInDatasource(
@ -86,12 +105,14 @@ export async function getExternalTablesInDatasource(
if (!datasource || !datasource.entities) { if (!datasource || !datasource.entities) {
throw new Error("Datasource is not configured fully.") throw new Error("Datasource is not configured fully.")
} }
return datasource.entities return processEntities(datasource.entities)
} }
export async function getTables(tableIds: string[]): Promise<Table[]> { export async function getTables(tableIds: string[]): Promise<Table[]> {
const externalTableIds = tableIds.filter(tableId => isExternalTable(tableId)), const externalTableIds = tableIds.filter(tableId =>
internalTableIds = tableIds.filter(tableId => !isExternalTable(tableId)) isExternalTableID(tableId)
),
internalTableIds = tableIds.filter(tableId => !isExternalTableID(tableId))
let tables: Table[] = [] let tables: Table[] = []
if (externalTableIds.length) { if (externalTableIds.length) {
const externalTables = await getAllExternalTables() const externalTables = await getAllExternalTables()
@ -106,9 +127,9 @@ export async function getTables(tableIds: string[]): Promise<Table[]> {
const internalTableDocs = await db.allDocs<Table[]>( const internalTableDocs = await db.allDocs<Table[]>(
getMultiIDParams(internalTableIds) getMultiIDParams(internalTableIds)
) )
tables = tables.concat(processInternalTables(internalTableDocs)) tables = tables.concat(internalTableDocs.rows.map(row => row.doc!))
} }
return tables return processTables(tables)
} }
export function enrichViewSchemas(table: Table): TableResponse { export function enrichViewSchemas(table: Table): TableResponse {

View File

@ -2,10 +2,12 @@ import { populateExternalTableSchemas } from "./validation"
import * as getters from "./getters" import * as getters from "./getters"
import * as updates from "./update" import * as updates from "./update"
import * as utils from "./utils" import * as utils from "./utils"
import { migrate } from "./migration"
export default { export default {
populateExternalTableSchemas, populateExternalTableSchemas,
...updates, ...updates,
...getters, ...getters,
...utils, ...utils,
migrate,
} }

View File

@ -0,0 +1,194 @@
import { BadRequestError, context, db as dbCore } from "@budibase/backend-core"
import {
BBReferenceFieldMetadata,
FieldSchema,
FieldSubtype,
InternalTable,
isBBReferenceField,
isRelationshipField,
LinkDocument,
RelationshipFieldMetadata,
RelationshipType,
Row,
Table,
} from "@budibase/types"
import sdk from "../../../sdk"
import { isExternalTableID } from "../../../integrations/utils"
import { EventType, updateLinks } from "../../../db/linkedRows"
import { cloneDeep } from "lodash"
import { isInternalColumnName } from "@budibase/backend-core/src/db"
export interface MigrationResult {
tablesUpdated: Table[]
}
export async function migrate(
table: Table,
oldColumn: FieldSchema,
newColumn: FieldSchema
): Promise<MigrationResult> {
if (newColumn.name in table.schema) {
throw new BadRequestError(`Column "${newColumn.name}" already exists`)
}
if (newColumn.name === "") {
throw new BadRequestError(`Column name cannot be empty`)
}
if (isInternalColumnName(newColumn.name)) {
throw new BadRequestError(`Column name cannot be a reserved column name`)
}
table.schema[newColumn.name] = newColumn
table = await sdk.tables.saveTable(table)
let migrator = getColumnMigrator(table, oldColumn, newColumn)
try {
return await migrator.doMigration()
} catch (e) {
// If the migration fails then we need to roll back the table schema
// change.
delete table.schema[newColumn.name]
await sdk.tables.saveTable(table)
throw e
}
}
interface ColumnMigrator {
doMigration(): Promise<MigrationResult>
}
function getColumnMigrator(
table: Table,
oldColumn: FieldSchema,
newColumn: FieldSchema
): ColumnMigrator {
// For now, we're only supporting migrations of user relationships to user
// columns in internal tables. In the future, we may want to support other
// migrations but for now return an error if we aren't migrating a user
// relationship.
if (isExternalTableID(table._id!)) {
throw new BadRequestError("External tables cannot be migrated")
}
if (!(oldColumn.name in table.schema)) {
throw new BadRequestError(`Column "${oldColumn.name}" does not exist`)
}
if (!isBBReferenceField(newColumn)) {
throw new BadRequestError(`Column "${newColumn.name}" is not a user column`)
}
if (newColumn.subtype !== "user" && newColumn.subtype !== "users") {
throw new BadRequestError(`Column "${newColumn.name}" is not a user column`)
}
if (!isRelationshipField(oldColumn)) {
throw new BadRequestError(
`Column "${oldColumn.name}" is not a user relationship`
)
}
if (oldColumn.tableId !== InternalTable.USER_METADATA) {
throw new BadRequestError(
`Column "${oldColumn.name}" is not a user relationship`
)
}
if (oldColumn.relationshipType === RelationshipType.ONE_TO_MANY) {
if (newColumn.subtype !== FieldSubtype.USER) {
throw new BadRequestError(
`Column "${oldColumn.name}" is a one-to-many column but "${newColumn.name}" is not a single user column`
)
}
return new SingleUserColumnMigrator(table, oldColumn, newColumn)
}
if (
oldColumn.relationshipType === RelationshipType.MANY_TO_MANY ||
oldColumn.relationshipType === RelationshipType.MANY_TO_ONE
) {
if (newColumn.subtype !== FieldSubtype.USERS) {
throw new BadRequestError(
`Column "${oldColumn.name}" is a ${oldColumn.relationshipType} column but "${newColumn.name}" is not a multi user column`
)
}
return new MultiUserColumnMigrator(table, oldColumn, newColumn)
}
throw new BadRequestError(`Unknown migration type`)
}
abstract class UserColumnMigrator implements ColumnMigrator {
constructor(
protected table: Table,
protected oldColumn: RelationshipFieldMetadata,
protected newColumn: BBReferenceFieldMetadata
) {}
abstract updateRow(row: Row, link: LinkDocument): void
async doMigration(): Promise<MigrationResult> {
let oldTable = cloneDeep(this.table)
let rows = await sdk.rows.fetchRaw(this.table._id!)
let rowsById = rows.reduce((acc, row) => {
acc[row._id!] = row
return acc
}, {} as Record<string, Row>)
let links = await sdk.links.fetchWithDocument(this.table._id!)
for (let link of links) {
if (
link.doc1.tableId !== this.table._id ||
link.doc1.fieldName !== this.oldColumn.name ||
link.doc2.tableId !== InternalTable.USER_METADATA
) {
continue
}
let row = rowsById[link.doc1.rowId]
if (!row) {
// This can happen if the row has been deleted but the link hasn't,
// which was a state that was found during the initial testing of this
// feature. Not sure exactly what can cause it, but best to be safe.
continue
}
this.updateRow(row, link)
}
let db = context.getAppDB()
await db.bulkDocs(rows)
delete this.table.schema[this.oldColumn.name]
this.table = await sdk.tables.saveTable(this.table)
await updateLinks({
eventType: EventType.TABLE_UPDATED,
table: this.table,
oldTable,
})
let otherTable = await sdk.tables.getTable(this.oldColumn.tableId)
return {
tablesUpdated: [this.table, otherTable],
}
}
}
class SingleUserColumnMigrator extends UserColumnMigrator {
updateRow(row: Row, link: LinkDocument): void {
row[this.newColumn.name] = dbCore.getGlobalIDFromUserMetadataID(
link.doc2.rowId
)
}
}
class MultiUserColumnMigrator extends UserColumnMigrator {
updateRow(row: Row, link: LinkDocument): void {
if (!row[this.newColumn.name]) {
row[this.newColumn.name] = []
}
row[this.newColumn.name].push(
dbCore.getGlobalIDFromUserMetadataID(link.doc2.rowId)
)
}
}

View File

@ -1,4 +1,10 @@
import { FieldType, Table, ViewV2 } from "@budibase/types" import {
FieldType,
INTERNAL_TABLE_SOURCE_ID,
Table,
TableSourceType,
ViewV2,
} from "@budibase/types"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import sdk from "../../.." import sdk from "../../.."
@ -13,6 +19,8 @@ describe("table sdk", () => {
_id: generator.guid(), _id: generator.guid(),
name: "TestTable", name: "TestTable",
type: "table", type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: { schema: {
name: { name: {
type: FieldType.STRING, type: FieldType.STRING,

View File

@ -1,73 +1,92 @@
import { populateExternalTableSchemas } from "../validation" import { populateExternalTableSchemas } from "../validation"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { AutoReason, Datasource, Table } from "@budibase/types" import {
AutoReason,
Datasource,
FieldType,
RelationshipType,
SourceName,
Table,
TableSourceType,
} from "@budibase/types"
import { isEqual } from "lodash" import { isEqual } from "lodash"
import { generateDatasourceID } from "../../../../db/utils"
const SCHEMA = { const datasourceId = generateDatasourceID()
const SCHEMA: Datasource = {
source: SourceName.POSTGRES,
type: "datasource",
_id: datasourceId,
entities: { entities: {
client: { client: {
type: "table",
_id: "tableA", _id: "tableA",
name: "client", name: "client",
primary: ["idC"], primary: ["idC"],
primaryDisplay: "Name", primaryDisplay: "Name",
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
schema: { schema: {
idC: { idC: {
autocolumn: true, autocolumn: true,
externalType: "int unsigned", externalType: "int unsigned",
name: "idC", name: "idC",
type: "number", type: FieldType.NUMBER,
}, },
Name: { Name: {
autocolumn: false, autocolumn: false,
externalType: "varchar(255)", externalType: "varchar(255)",
name: "Name", name: "Name",
type: "string", type: FieldType.STRING,
}, },
project: { project: {
fieldName: "idC", fieldName: "idC",
foreignKey: "idC", foreignKey: "idC",
main: true, main: true,
name: "project", name: "project",
relationshipType: "many-to-one", relationshipType: RelationshipType.MANY_TO_ONE,
tableId: "tableB", tableId: "tableB",
type: "link", type: FieldType.LINK,
}, },
}, },
}, },
project: { project: {
type: "table",
_id: "tableB", _id: "tableB",
name: "project", name: "project",
primary: ["idP"], primary: ["idP"],
primaryDisplay: "Name", primaryDisplay: "Name",
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
schema: { schema: {
idC: { idC: {
externalType: "int unsigned", externalType: "int unsigned",
name: "idC", name: "idC",
type: "number", type: FieldType.NUMBER,
}, },
idP: { idP: {
autocolumn: true, autocolumn: true,
externalType: "int unsigned", externalType: "int unsigned",
name: "idProject", name: "idProject",
type: "number", type: FieldType.NUMBER,
}, },
Name: { Name: {
autocolumn: false, autocolumn: false,
externalType: "varchar(255)", externalType: "varchar(255)",
name: "Name", name: "Name",
type: "string", type: FieldType.STRING,
}, },
client: { client: {
fieldName: "idC", fieldName: "idC",
foreignKey: "idC", foreignKey: "idC",
name: "client", name: "client",
relationshipType: "one-to-many", relationshipType: RelationshipType.ONE_TO_MANY,
tableId: "tableA", tableId: "tableA",
type: "link", type: FieldType.LINK,
}, },
}, },
sql: true, sql: true,
type: "table",
}, },
}, },
} }
@ -95,12 +114,12 @@ describe("validation and update of external table schemas", () => {
function noOtherTableChanges(response: any) { function noOtherTableChanges(response: any) {
checkOtherColumns( checkOtherColumns(
response.entities!.client!, response.entities!.client!,
SCHEMA.entities.client as Table, SCHEMA.entities!.client,
OTHER_CLIENT_COLS OTHER_CLIENT_COLS
) )
checkOtherColumns( checkOtherColumns(
response.entities!.project!, response.entities!.project!,
SCHEMA.entities.project as Table, SCHEMA.entities!.project,
OTHER_PROJECT_COLS OTHER_PROJECT_COLS
) )
} }

View File

@ -1,23 +1,30 @@
import { Table, RenameColumn } from "@budibase/types" import { Table, RenameColumn } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import sdk from "../../index" import sdk from "../../index"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { isExternal } from "./utils" import { isExternal } from "./utils"
import { DocumentInsertResponse } from "@budibase/nano"
import * as external from "./external" import * as external from "./external"
import * as internal from "./internal" import * as internal from "./internal"
import { cloneDeep } from "lodash"
export * as external from "./external" export * as external from "./external"
export * as internal from "./internal" export * as internal from "./internal"
export async function saveTable(table: Table) { export async function saveTable(table: Table): Promise<Table> {
const db = context.getAppDB() const db = context.getAppDB()
if (isExternalTable(table._id!)) { let resp: DocumentInsertResponse
if (isExternalTableID(table._id!)) {
const datasource = await sdk.datasources.get(table.sourceId!) const datasource = await sdk.datasources.get(table.sourceId!)
datasource.entities![table.name] = table datasource.entities![table.name] = table
await db.put(datasource) resp = await db.put(datasource)
} else { } else {
await db.put(table) resp = await db.put(table)
} }
let tableClone = cloneDeep(table)
tableClone._rev = resp.rev
return tableClone
} }
export async function update(table: Table, renaming?: RenameColumn) { export async function update(table: Table, renaming?: RenameColumn) {

View File

@ -1,10 +1,10 @@
import { Table } from "@budibase/types" import { Table, TableSourceType } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
export function isExternal(opts: { table?: Table; tableId?: string }): boolean { export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
if (opts.table && opts.table.type === "external") { if (opts.table && opts.table.sourceType === TableSourceType.EXTERNAL) {
return true return true
} else if (opts.tableId && isExternalTable(opts.tableId)) { } else if (opts.tableId && isExternalTableID(opts.tableId)) {
return true return true
} }
return false return false

View File

@ -4,13 +4,13 @@ import { cloneDeep } from "lodash"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as utils from "../../../db/utils" import * as utils from "../../../db/utils"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./internal" import * as internal from "./internal"
import * as external from "./external" import * as external from "./external"
function pickApi(tableId: any) { function pickApi(tableId: any) {
if (isExternalTable(tableId)) { if (isExternalTableID(tableId)) {
return external return external
} }
return internal return internal

View File

@ -2,8 +2,10 @@ import _ from "lodash"
import { import {
FieldSchema, FieldSchema,
FieldType, FieldType,
INTERNAL_TABLE_SOURCE_ID,
Table, Table,
TableSchema, TableSchema,
TableSourceType,
ViewV2, ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
@ -14,6 +16,8 @@ describe("table sdk", () => {
_id: generator.guid(), _id: generator.guid(),
name: "TestTable", name: "TestTable",
type: "table", type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: { schema: {
name: { name: {
type: FieldType.STRING, type: FieldType.STRING,

Some files were not shown because too many files have changed in this diff Show More