Merge branch 'master' into BUDI-7580/account_portal_submodule

This commit is contained in:
Adria Navarro 2023-11-02 13:35:17 +01:00
commit 8c90b9ddfb
117 changed files with 1837 additions and 517 deletions

View File

@ -11,10 +11,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: peter-evans/repository-dispatch@v2
env:
PAYLOAD_VERSION: ${{ github.sha }}
REF_NAME: ${{ github.ref_name}}
with:
repository: budibase/budibase-deploys
event-type: budicloud-qa-deploy
token: ${{ secrets.GH_ACCESS_TOKEN }}
client-payload: |-
{
"PAYLOAD_VERSION": "${{ github.sha }}",
"REF_NAME": "${{ github.ref_name}}"
}

View File

@ -165,17 +165,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the current budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
REF_NAME: ${{ github.ref_name}}
- uses: peter-evans/repository-dispatch@v2
with:
repository: budibase/budibase-deploys
event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}
client-payload: |-
{
"PAYLOAD_VERSION": "${{ github.ref_name }}",
"REF_NAME": "${{ github.ref_name}}"
}

View File

@ -66,14 +66,21 @@ jobs:
context: .
push: true
platforms: linux/amd64,linux/arm64
build-args: BUDIBASE_VERSION=$BUDIBASE_VERSION
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile.v2
env:
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
build-args: |
TARGETBUILD=aas
BUDIBASE_VERSION=$BUDIBASE_VERSION
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile.v2
env:
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}

View File

@ -1,10 +0,0 @@
#!/bin/bash
if [[ $TARGETARCH == arm* ]] ;
then
echo "INSTALLING ARM64 MINIO"
wget https://dl.min.io/server/minio/release/linux-arm64/minio
else
echo "INSTALLING AMD64 MINIO"
wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi
chmod +x minio

View File

@ -42,6 +42,7 @@ COPY packages/string-templates packages/string-templates
FROM budibase/couchdb as runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
ENV NODE_MAJOR 18
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
@ -49,10 +50,10 @@ ENV TARGETBUILD $TARGETBUILD
# install base dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server libaio1
# Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \
RUN apt install -y software-properties-common apt-transport-https ca-certificates gnupg \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
@ -61,10 +62,8 @@ RUN apt install software-properties-common apt-transport-https gpg -y \
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
COPY scripts/install-node.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
# setup nginx
COPY hosting/single/nginx/nginx.conf /etc/nginx

View File

@ -77,7 +77,7 @@ mkdir -p ${DATA_DIR}/minio
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/bbcouch-runner.sh &
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
/etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
# Add monthly cron job to renew certbot certificate

View File

@ -1,5 +1,5 @@
{
"version": "2.12.1",
"version": "2.12.4",
"npmClient": "yarn",
"packages": [
"packages/*",

View File

@ -26,7 +26,7 @@
"scripts": {
"preinstall": "node scripts/syncProPackage.js",
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"build": "lerna run build --stream",
"build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run check:types",
"build:sdk": "lerna run --stream build:sdk",

View File

@ -3,6 +3,7 @@ const mockS3 = {
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
getObject: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}`

View File

@ -8,3 +8,7 @@ export const CONSTANT_INTERNAL_ROW_COLS = [
] as const
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
export function isInternalColumnName(name: string): boolean {
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
}

View File

@ -6,6 +6,7 @@ import {
ViewName,
} from "../constants"
import { getProdAppID } from "./conversions"
import { DatabaseQueryOpts } from "@budibase/types"
/**
* If creating DB allDocs/query params with only a single top level ID this can be used, this
@ -22,8 +23,8 @@ import { getProdAppID } from "./conversions"
export function getDocParams(
docType: string,
docId?: string | null,
otherProps: any = {}
) {
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
if (docId == null) {
docId = ""
}
@ -45,8 +46,8 @@ export function getDocParams(
export function getRowParams(
tableId?: string | null,
rowId?: string | null,
otherProps = {}
) {
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
if (tableId == null) {
return getDocParams(DocumentType.ROW, null, otherProps)
}
@ -88,7 +89,10 @@ export const isDatasourceId = (id: string) => {
/**
* Gets parameters for retrieving workspaces.
*/
export function getWorkspaceParams(id = "", otherProps = {}) {
export function getWorkspaceParams(
id = "",
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
return {
...otherProps,
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
@ -99,7 +103,10 @@ export function getWorkspaceParams(id = "", otherProps = {}) {
/**
* Gets parameters for retrieving users.
*/
export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
export function getGlobalUserParams(
globalId: any,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
if (!globalId) {
globalId = ""
}
@ -117,11 +124,17 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
/**
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
*/
export function getUserMetadataParams(userId?: string | null, otherProps = {}) {
export function getUserMetadataParams(
userId?: string | null,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
return getRowParams(InternalTable.USER_METADATA, userId, otherProps)
}
export function getUsersByAppParams(appId: any, otherProps: any = {}) {
export function getUsersByAppParams(
appId: any,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
const prodAppId = getProdAppID(appId)
return {
...otherProps,

View File

@ -30,6 +30,7 @@ export * as timers from "./timers"
export { default as env } from "./environment"
export * as blacklist from "./blacklist"
export * as docUpdates from "./docUpdates"
export * from "./utils/Duration"
export { SearchParams } from "./db"
// Add context to tenancy for backwards compatibility
// only do this for external usages to prevent internal

View File

@ -36,7 +36,7 @@ class InMemoryQueue {
* @param opts This is not used by the in memory queue as there is no real use
* case when in memory, but is the same API as Bull
*/
constructor(name: string, opts = null) {
constructor(name: string, opts?: any) {
this._name = name
this._opts = opts
this._messages = []

View File

@ -2,11 +2,18 @@ import env from "../environment"
import { getRedisOptions } from "../redis/utils"
import { JobQueue } from "./constants"
import InMemoryQueue from "./inMemoryQueue"
import BullQueue from "bull"
import BullQueue, { QueueOptions } from "bull"
import { addListeners, StalledFn } from "./listeners"
import { Duration } from "../utils"
import * as timers from "../timers"
import * as Redis from "ioredis"
const CLEANUP_PERIOD_MS = 60 * 1000
// the queue lock is held for 5 minutes
const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
// queue lock is refreshed every 30 seconds
const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()
// cleanup the queue every 60 seconds
const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
let cleanupInterval: NodeJS.Timeout
@ -21,7 +28,14 @@ export function createQueue<T>(
opts: { removeStalledCb?: StalledFn } = {}
): BullQueue.Queue<T> {
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
const queueConfig: any = redisProtocolUrl || { redis: redisOpts }
const queueConfig: QueueOptions = {
redis: redisProtocolUrl! || (redisOpts as Redis.RedisOptions),
settings: {
maxStalledCount: 0,
lockDuration: QUEUE_LOCK_MS,
lockRenewTime: QUEUE_LOCK_RENEW_INTERNAL_MS,
},
}
let queue: any
if (!env.isTest()) {
queue = new BullQueue(jobQueue, queueConfig)

View File

@ -165,13 +165,9 @@ export class UserDB {
}
static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {
const params: any = {
include_docs: true,
limit: opts.limit || 50,
}
let response: User[] = await usersCore.searchGlobalUsersByAppAccess(
opts.appId,
params
{ limit: opts.limit || 50 }
)
return response
}

View File

@ -20,6 +20,7 @@ import {
User,
ContextUser,
DatabaseQueryOpts,
CouchFindOptions,
} from "@budibase/types"
import { getGlobalDB } from "../context"
import * as context from "../context"
@ -140,7 +141,7 @@ export const getGlobalUserByEmail = async (
export const searchGlobalUsersByApp = async (
appId: any,
opts: any,
opts: DatabaseQueryOpts,
getOpts?: GetOpts
) => {
if (typeof appId !== "string") {
@ -166,7 +167,10 @@ export const searchGlobalUsersByApp = async (
Return any user who potentially has access to the application
Admins, developers and app users with the explicitly role.
*/
export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
export const searchGlobalUsersByAppAccess = async (
appId: any,
opts?: { limit?: number }
) => {
const roleSelector = `roles.${appId}`
let orQuery: any[] = [
@ -187,7 +191,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
orQuery.push(roleCheck)
}
let searchOptions = {
let searchOptions: CouchFindOptions = {
selector: {
$or: orQuery,
_id: {
@ -198,7 +202,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
}
const resp = await directCouchFind(context.getGlobalDBName(), searchOptions)
return resp?.rows
return resp.rows
}
export const getGlobalUserByAppPage = (appId: string, user: User) => {
@ -245,7 +249,8 @@ export const paginatedUsers = async ({
limit,
}: SearchUsersRequest = {}) => {
const db = getGlobalDB()
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
const pageSize = limit ?? PAGE_LIMIT
const pageLimit = pageSize + 1
// get one extra document, to have the next page
const opts: DatabaseQueryOpts = {
include_docs: true,
@ -272,7 +277,7 @@ export const paginatedUsers = async ({
const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc)
}
return pagination(userList, pageLimit, {
return pagination(userList, pageSize, {
paginate: true,
property,
getKey,

View File

@ -0,0 +1,49 @@
export enum DurationType {
MILLISECONDS = "milliseconds",
SECONDS = "seconds",
MINUTES = "minutes",
HOURS = "hours",
DAYS = "days",
}
const conversion: Record<DurationType, number> = {
milliseconds: 1,
seconds: 1000,
minutes: 60 * 1000,
hours: 60 * 60 * 1000,
days: 24 * 60 * 60 * 1000,
}
export class Duration {
static convert(from: DurationType, to: DurationType, duration: number) {
const milliseconds = duration * conversion[from]
return milliseconds / conversion[to]
}
static from(from: DurationType, duration: number) {
return {
to: (to: DurationType) => {
return Duration.convert(from, to, duration)
},
toMs: () => {
return Duration.convert(from, DurationType.MILLISECONDS, duration)
},
}
}
static fromSeconds(duration: number) {
return Duration.from(DurationType.SECONDS, duration)
}
static fromMinutes(duration: number) {
return Duration.from(DurationType.MINUTES, duration)
}
static fromHours(duration: number) {
return Duration.from(DurationType.HOURS, duration)
}
static fromDays(duration: number) {
return Duration.from(DurationType.DAYS, duration)
}
}

View File

@ -1,3 +1,4 @@
export * from "./hashing"
export * from "./utils"
export * from "./stringUtils"
export * from "./Duration"

View File

@ -0,0 +1,19 @@
import { Duration, DurationType } from "../Duration"
describe("duration", () => {
it("should convert minutes to milliseconds", () => {
expect(Duration.fromMinutes(5).toMs()).toBe(300000)
})
it("should convert seconds to milliseconds", () => {
expect(Duration.fromSeconds(30).toMs()).toBe(30000)
})
it("should convert days to milliseconds", () => {
expect(Duration.fromDays(1).toMs()).toBe(86400000)
})
it("should convert minutes to days", () => {
expect(Duration.fromMinutes(1440).to(DurationType.DAYS)).toBe(1)
})
})

View File

@ -1,2 +1,3 @@
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
export const MOCK_DATE_TIMESTAMP = 1577836800000

View File

@ -5,4 +5,4 @@ package-lock.json
release/
dist/
routify
.routify/
.routify/

View File

@ -580,7 +580,7 @@ export const getFrontendStore = () => {
let table = validTables.find(table => {
return (
table.sourceId !== BUDIBASE_INTERNAL_DB_ID &&
table.type === DB_TYPE_INTERNAL
table.sourceType === DB_TYPE_INTERNAL
)
})
if (table) {
@ -591,7 +591,7 @@ export const getFrontendStore = () => {
table = validTables.find(table => {
return (
table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
table.type === DB_TYPE_INTERNAL
table.sourceType === DB_TYPE_INTERNAL
)
})
if (table) {
@ -599,7 +599,7 @@ export const getFrontendStore = () => {
}
// Finally try an external table
return validTables.find(table => table.type === DB_TYPE_EXTERNAL)
return validTables.find(table => table.sourceType === DB_TYPE_EXTERNAL)
},
enrichEmptySettings: (component, opts) => {
if (!component?._component) {

View File

@ -16,7 +16,6 @@
$: linkedTable = $tables.list.find(table => table._id === linkedTableId)
$: schema = linkedTable?.schema
$: table = $tables.list.find(table => table._id === tableId)
$: type = table?.type
$: fetchData(tableId, rowId)
$: {
let rowLabel = row?.[table?.primaryDisplay]
@ -41,5 +40,5 @@
</script>
{#if row && row._id === rowId}
<Table {title} {schema} {data} {type} />
<Table {title} {schema} {data} />
{/if}

View File

@ -16,6 +16,7 @@
import GridRelationshipButton from "components/backend/DataTable/buttons/grid/GridRelationshipButton.svelte"
import GridEditColumnModal from "components/backend/DataTable/modals/grid/GridEditColumnModal.svelte"
import GridUsersTableButton from "components/backend/DataTable/modals/grid/GridUsersTableButton.svelte"
import { DB_TYPE_EXTERNAL } from "constants/backend"
const userSchemaOverrides = {
firstName: { displayName: "First name", disabled: true },
@ -27,7 +28,7 @@
$: id = $tables.selected?._id
$: isUsersTable = id === TableNames.USERS
$: isInternal = $tables.selected?.type !== "external"
$: isInternal = $tables.selected?.sourceType !== DB_TYPE_EXTERNAL
$: gridDatasource = {
type: "table",
tableId: id,
@ -46,10 +47,7 @@
tables.replaceTable(id, e.detail)
// We need to refresh datasources when an external table changes.
// Type "external" may exist - sometimes type is "table" and sometimes it
// is "external" - it has different meanings in different endpoints.
// If we check both these then we hopefully catch all external tables.
if (e.detail?.type === "external" || e.detail?.sql) {
if (e.detail?.sourceType === DB_TYPE_EXTERNAL) {
await datasources.fetch()
}
}

View File

@ -17,7 +17,6 @@
let hideAutocolumns = true
let data = []
let loading = false
let type = "internal"
$: name = view.name
$: schema = view.schema
@ -66,7 +65,6 @@
tableId={view.tableId}
{data}
{loading}
{type}
rowCount={10}
allowEditing={false}
bind:hideAutocolumns

View File

@ -10,6 +10,6 @@
<ImportButton
{disabled}
tableId={$datasource?.tableId}
tableType={$definition?.type}
tableType={$definition?.sourceType}
on:importrows={rows.actions.refreshData}
/>

View File

@ -26,6 +26,7 @@
ALLOWABLE_NUMBER_TYPES,
SWITCHABLE_TYPES,
PrettyRelationshipDefinitions,
DB_TYPE_EXTERNAL,
} from "constants/backend"
import { getAutoColumnInformation, buildAutoColumn } from "builderStore/utils"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
@ -254,10 +255,11 @@
!uneditable &&
editableColumn?.type !== AUTO_TYPE &&
!editableColumn.autocolumn
$: external = table.type === "external"
$: externalTable = table.sourceType === DB_TYPE_EXTERNAL
// in the case of internal tables the sourceId will just be undefined
$: tableOptions = $tables.list.filter(
opt => opt.type === table.type && table.sourceId === opt.sourceId
opt =>
opt.sourceType === table.sourceType && table.sourceId === opt.sourceId
)
$: typeEnabled =
!originalName ||
@ -409,7 +411,7 @@
editableColumn.type === FieldType.BB_REFERENCE &&
editableColumn.subtype === FieldSubtype.USERS
if (!external) {
if (!externalTable) {
return [
FIELDS.STRING,
FIELDS.BARCODEQR,
@ -441,7 +443,7 @@
isUsers ? FIELDS.USERS : FIELDS.USER,
]
// no-sql or a spreadsheet
if (!external || table.sql) {
if (!externalTable || table.sql) {
fields = [...fields, FIELDS.LINK, FIELDS.ARRAY]
}
return fields
@ -486,7 +488,7 @@
})
}
const newError = {}
if (!external && fieldInfo.name?.startsWith("_")) {
if (!externalTable && fieldInfo.name?.startsWith("_")) {
newError.name = `Column name cannot start with an underscore.`
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {
newError.name = `Illegal character; must be alpha-numeric.`
@ -498,7 +500,7 @@
newError.name = `Column name already in use.`
}
if (fieldInfo.type == "auto" && !fieldInfo.subtype) {
if (fieldInfo.type === "auto" && !fieldInfo.subtype) {
newError.subtype = `Auto Column requires a type`
}

View File

@ -1,6 +1,6 @@
<script>
import { Select, Toggle, Multiselect } from "@budibase/bbui"
import { FIELDS } from "constants/backend"
import { DB_TYPE_INTERNAL, FIELDS } from "constants/backend"
import { API } from "api"
import { parseFile } from "./utils"
@ -169,7 +169,7 @@
</div>
{/each}
</div>
{#if tableType === "internal"}
{#if tableType === DB_TYPE_INTERNAL}
<br />
<Toggle
bind:value={updateExistingRows}

View File

@ -8,6 +8,7 @@
import {
BUDIBASE_INTERNAL_DB_ID,
BUDIBASE_DATASOURCE_TYPE,
DB_TYPE_INTERNAL,
} from "constants/backend"
$: tableNames = $tables.list.map(table => table.name)
@ -55,8 +56,9 @@
name,
schema: { ...schema },
rows,
type: "internal",
type: "table",
sourceId: targetDatasourceId,
sourceType: DB_TYPE_INTERNAL,
}
// Only set primary display if defined

View File

@ -13,6 +13,7 @@
notifications,
} from "@budibase/bbui"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { DB_TYPE_EXTERNAL } from "constants/backend"
export let table
@ -27,8 +28,8 @@
let willBeDeleted
let deleteTableName
$: external = table?.type === "external"
$: allowDeletion = !external || table?.created
$: externalTable = table?.sourceType === DB_TYPE_EXTERNAL
$: allowDeletion = !externalTable || table?.created
function showDeleteModal() {
templateScreens = $store.screens.filter(
@ -48,7 +49,7 @@
for (let screen of templateScreens) {
await store.actions.screens.delete(screen)
}
if (table.type === "external") {
if (table.sourceType === DB_TYPE_EXTERNAL) {
await datasources.fetch()
}
notifications.success("Table deleted")
@ -91,7 +92,7 @@
<div slot="control" class="icon">
<Icon s hoverable name="MoreSmallList" />
</div>
{#if !external}
{#if !externalTable}
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
{/if}
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>

View File

@ -23,7 +23,7 @@
</script>
<div class="table">
<Table {schema} data={rowsCopy} type="external" allowEditing={false} />
<Table {schema} data={rowsCopy} allowEditing={false} />
</div>
<style>

View File

@ -2,6 +2,7 @@
import { ModalContent, Body, Input, notifications } from "@budibase/bbui"
import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify"
import { DB_TYPE_EXTERNAL } from "constants/backend"
export let datasource
@ -16,9 +17,10 @@
function buildDefaultTable(tableName, datasourceId) {
return {
name: tableName,
type: "external",
type: "table",
primary: ["id"],
sourceId: datasourceId,
sourceType: DB_TYPE_EXTERNAL,
schema: {
id: {
autocolumn: true,

View File

@ -5,7 +5,7 @@
import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify"
import { onMount } from "svelte"
import { BUDIBASE_INTERNAL_DB_ID } from "constants/backend"
import { BUDIBASE_INTERNAL_DB_ID, DB_TYPE_EXTERNAL } from "constants/backend"
import { TableNames } from "constants"
import { store } from "builderStore"
@ -14,7 +14,7 @@
$: store.actions.websocket.selectResource(BUDIBASE_INTERNAL_DB_ID)
$: internalTablesBySourceId = $tables.list.filter(
table =>
table.type !== "external" &&
table.sourceType !== DB_TYPE_EXTERNAL &&
table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
table._id !== TableNames.USERS
)

View File

@ -4,7 +4,7 @@
import ICONS from "components/backend/DatasourceNavigator/icons"
import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify"
import { DEFAULT_BB_DATASOURCE_ID } from "constants/backend"
import { DEFAULT_BB_DATASOURCE_ID, DB_TYPE_EXTERNAL } from "constants/backend"
import { onMount } from "svelte"
import { store } from "builderStore"
@ -13,7 +13,8 @@
$: store.actions.websocket.selectResource(DEFAULT_BB_DATASOURCE_ID)
$: internalTablesBySourceId = $tables.list.filter(
table =>
table.type !== "external" && table.sourceId === DEFAULT_BB_DATASOURCE_ID
table.sourceType !== DB_TYPE_EXTERNAL &&
table.sourceId === DEFAULT_BB_DATASOURCE_ID
)
onMount(() => {

View File

@ -3,7 +3,6 @@
Heading,
Body,
Button,
ButtonGroup,
Table,
Layout,
Modal,
@ -46,6 +45,10 @@
datasource: {
type: "user",
},
options: {
paginate: true,
limit: 10,
},
})
let groupsLoaded = !$licensing.groupsEnabled || $groups?.length
@ -65,10 +68,12 @@
{ column: "role", component: RoleTableRenderer },
]
let userData = []
let invitesLoaded = false
let pendingInvites = []
let parsedInvites = []
$: isOwner = $auth.accountPortalAccess && $admin.cloud
$: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled
$: debouncedUpdateFetch(searchEmail)
$: schema = {
email: {
@ -88,16 +93,6 @@
width: "1fr",
},
}
const getPendingSchema = tblSchema => {
if (!tblSchema) {
return {}
}
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
pendingSchema.email.displayName = "Pending Invites"
return pendingSchema
}
$: pendingSchema = getPendingSchema(schema)
$: userData = []
$: inviteUsersResponse = { successful: [], unsuccessful: [] }
@ -121,9 +116,15 @@
}
})
}
let invitesLoaded = false
let pendingInvites = []
let parsedInvites = []
const getPendingSchema = tblSchema => {
if (!tblSchema) {
return {}
}
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
pendingSchema.email.displayName = "Pending Invites"
return pendingSchema
}
const invitesToSchema = invites => {
return invites.map(invite => {
@ -143,7 +144,9 @@
const updateFetch = email => {
fetch.update({
query: {
email,
string: {
email,
},
},
})
}
@ -296,7 +299,7 @@
{/if}
<div class="controls">
{#if !readonly}
<ButtonGroup>
<div class="buttons">
<Button
disabled={readonly}
on:click={$licensing.userLimitReached
@ -315,7 +318,7 @@
>
Import
</Button>
</ButtonGroup>
</div>
{:else}
<ScimBanner />
{/if}
@ -390,12 +393,15 @@
</Modal>
<style>
.buttons {
display: flex;
gap: 10px;
}
.pagination {
display: flex;
flex-direction: row;
justify-content: flex-end;
}
.controls {
display: flex;
flex-direction: row;
@ -403,7 +409,6 @@
align-items: center;
gap: var(--spacing-xl);
}
.controls-right {
display: flex;
flex-direction: row;
@ -411,7 +416,6 @@
align-items: center;
gap: var(--spacing-xl);
}
.controls-right :global(.spectrum-Search) {
width: 200px;
}

View File

@ -5467,17 +5467,17 @@
},
"settings": [
{
"type": "select",
"type": "table",
"label": "Data",
"key": "dataSource"
},
{
"type": "radio",
"label": "Type",
"key": "actionType",
"options": ["Create", "Update", "View"],
"defaultValue": "Create"
},
{
"type": "table",
"label": "Data",
"key": "dataSource"
},
{
"type": "text",
"label": "Title",
@ -5508,13 +5508,37 @@
},
{
"type": "text",
"label": "Empty text",
"label": "No rows found",
"key": "noRowsMessage",
"defaultValue": "We couldn't find a row to display",
"nested": true
}
]
},
{
"section": true,
"name": "Fields",
"settings": [
{
"type": "fieldConfiguration",
"key": "fields",
"nested": true,
"resetOn": "dataSource",
"selectAllFields": true
},
{
"type": "boolean",
"label": "Disabled",
"key": "disabled",
"defaultValue": false,
"dependsOn": {
"setting": "actionType",
"value": "View",
"invert": true
}
}
]
},
{
"section": true,
"name": "Buttons",
@ -5566,30 +5590,6 @@
}
]
},
{
"section": true,
"name": "Fields",
"settings": [
{
"type": "fieldConfiguration",
"key": "fields",
"nested": true,
"resetOn": "dataSource",
"selectAllFields": true
},
{
"type": "boolean",
"label": "Disabled",
"key": "disabled",
"defaultValue": false,
"dependsOn": {
"setting": "actionType",
"value": "View",
"invert": true
}
}
]
},
{
"tag": "style",
"type": "select",
@ -5924,4 +5924,4 @@
}
]
}
}
}

View File

@ -220,15 +220,11 @@
</BlockComponent>
{/if}
</BlockComponent>
{#if description}
<BlockComponent
type="text"
props={{ text: description }}
order={1}
/>
{/if}
</BlockComponent>
{/if}
{#if description}
<BlockComponent type="text" props={{ text: description }} order={1} />
{/if}
{#key fields}
<BlockComponent type="fieldgroup" props={{ labelPosition }} order={1}>
{#each fields as field, idx}

View File

@ -140,4 +140,13 @@ export const buildTableEndpoints = API => ({
},
})
},
migrateColumn: async ({ tableId, oldColumn, newColumn }) => {
return await API.post({
url: `/api/tables/${tableId}/migrate`,
body: {
oldColumn,
newColumn,
},
})
},
})

View File

@ -1,11 +1,20 @@
<script>
import { getContext, onMount, tick } from "svelte"
import { canBeDisplayColumn, canBeSortColumn } from "@budibase/shared-core"
import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui"
import {
Icon,
Popover,
Menu,
MenuItem,
clickOutside,
Modal,
} from "@budibase/bbui"
import GridCell from "./GridCell.svelte"
import { getColumnIcon } from "../lib/utils"
import MigrationModal from "../controls/MigrationModal.svelte"
import { debounce } from "../../../utils/utils"
import { FieldType, FormulaTypes } from "@budibase/types"
import { TableNames } from "../../../constants"
export let column
export let idx
@ -45,6 +54,7 @@
let editIsOpen = false
let timeout
let popover
let migrationModal
let searchValue
let input
@ -189,6 +199,11 @@
})
}
const openMigrationModal = () => {
migrationModal.show()
open = false
}
const startSearching = async () => {
$focusedCellId = null
searchValue = ""
@ -224,6 +239,10 @@
onMount(() => subscribe("close-edit-column", cancelEdit))
</script>
<Modal bind:this={migrationModal}>
<MigrationModal {column} />
</Modal>
<div
class="header-cell"
class:open
@ -363,6 +382,11 @@
>
Hide column
</MenuItem>
{#if $config.canEditColumns && column.schema.type === "link" && column.schema.tableId === TableNames.USERS}
<MenuItem icon="User" on:click={openMigrationModal}>
Migrate to user column
</MenuItem>
{/if}
</Menu>
{/if}
</Popover>

View File

@ -0,0 +1,73 @@
<script>
import {
ModalContent,
notifications,
Input,
InlineAlert,
} from "@budibase/bbui"
import { getContext } from "svelte"
import { ValidColumnNameRegex } from "@budibase/shared-core"
import { FieldSubtype, FieldType, RelationshipType } from "@budibase/types"
const { API, definition, rows } = getContext("grid")
export let column
let newColumnName = `${column.schema.name} migrated`
$: error = checkNewColumnName(newColumnName)
const checkNewColumnName = newColumnName => {
if (newColumnName === "") {
return "Column name can't be empty."
}
if (newColumnName in $definition.schema) {
return "New column name can't be the same as an existing column name."
}
if (newColumnName.match(ValidColumnNameRegex) === null) {
return "Illegal character; must be alpha-numeric."
}
}
const migrateUserColumn = async () => {
let subtype = FieldSubtype.USERS
if (column.schema.relationshipType === RelationshipType.ONE_TO_MANY) {
subtype = FieldSubtype.USER
}
try {
await API.migrateColumn({
tableId: $definition._id,
oldColumn: column.schema,
newColumn: {
name: newColumnName,
type: FieldType.BB_REFERENCE,
subtype,
},
})
notifications.success("Column migrated")
} catch (e) {
notifications.error(`Failed to migrate: ${e.message}`)
}
await rows.actions.refreshData()
}
</script>
<ModalContent
title="Migrate column"
confirmText="Continue"
cancelText="Cancel"
onConfirm={migrateUserColumn}
disabled={error !== undefined}
size="M"
>
This operation will kick off a migration of the column "{column.schema.name}"
to a new column, with the name provided - this operation may take a moment to
complete.
<InlineAlert
type="error"
header="Are you sure?"
message="This will leave bindings which utilised the user relationship column in a state where they will need to be updated to use the new column instead."
/>
<Input bind:value={newColumnName} label="New column name" {error} />
</ModalContent>

View File

@ -70,6 +70,13 @@ module AwsMock {
Contents: {},
})
)
// @ts-ignore
this.getObject = jest.fn(
response({
Body: "",
})
)
}
aws.DynamoDB = { DocumentClient }

View File

@ -2,7 +2,7 @@ version: "3.8"
services:
db:
container_name: postgres
image: postgres:15
image: postgres:15-bullseye
restart: unless-stopped
environment:
POSTGRES_USER: root

View File

@ -32,11 +32,8 @@ import {
tenancy,
users,
} from "@budibase/backend-core"
import { USERS_TABLE_SCHEMA } from "../../constants"
import {
buildDefaultDocs,
DEFAULT_BB_DATASOURCE_ID,
} from "../../db/defaultData/datasource_bb_default"
import { USERS_TABLE_SCHEMA, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
import { buildDefaultDocs } from "../../db/defaultData/datasource_bb_default"
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
import { stringToReadStream } from "../../utilities"
import { doesUserHaveLock } from "../../utilities/redis"

View File

@ -12,7 +12,6 @@ import {
CreateDatasourceResponse,
Datasource,
DatasourcePlus,
ExternalTable,
FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse,
IntegrationBase,
@ -59,7 +58,7 @@ async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus
return await connector.buildSchema(
datasource._id!,
datasource.entities! as Record<string, ExternalTable>
datasource.entities! as Record<string, Table>
)
}

View File

@ -1,12 +1,12 @@
import { context } from "@budibase/backend-core"
import { isExternalTable } from "../../../integrations/utils"
import { isExternalTableID } from "../../../integrations/utils"
import { APP_PREFIX, DocumentType } from "../../../db/utils"
export async function addRev(
body: { _id?: string; _rev?: string },
tableId?: string
) {
if (!body._id || (tableId && isExternalTable(tableId))) {
if (!body._id || (tableId && isExternalTableID(tableId))) {
return body
}
let id = body._id

View File

@ -1,7 +1,7 @@
import { quotas } from "@budibase/pro"
import * as internal from "./internal"
import * as external from "./external"
import { isExternalTable } from "../../../integrations/utils"
import { isExternalTableID } from "../../../integrations/utils"
import {
Ctx,
UserCtx,
@ -30,7 +30,7 @@ import { Format } from "../view/exporters"
export * as views from "./views"
function pickApi(tableId: any) {
if (isExternalTable(tableId)) {
if (isExternalTableID(tableId)) {
return external
}
return internal
@ -227,7 +227,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
export async function validate(ctx: Ctx<Row, ValidateResponse>) {
const tableId = utils.getTableId(ctx)
// external tables are hard to validate currently
if (isExternalTable(tableId)) {
if (isExternalTableID(tableId)) {
ctx.body = { valid: true, errors: {} }
} else {
ctx.body = await sdk.rows.utils.validate({

View File

@ -5,18 +5,27 @@ import {
isSchema,
validate as validateSchema,
} from "../../../utilities/schema"
import { isExternalTable, isSQL } from "../../../integrations/utils"
import {
isExternalTable,
isExternalTableID,
isSQL,
} from "../../../integrations/utils"
import { events } from "@budibase/backend-core"
import {
BulkImportRequest,
BulkImportResponse,
DocumentType,
FetchTablesResponse,
MigrateRequest,
MigrateResponse,
Row,
SaveTableRequest,
SaveTableResponse,
Table,
TableResponse,
TableSourceType,
UserCtx,
Row,
SEPARATOR,
} from "@budibase/types"
import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
@ -24,12 +33,10 @@ import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) {
tableId = table._id
}
if (table && table.type === "external") {
if (table && isExternalTable(table)) {
return external
} else if (tableId && isExternalTable(tableId)) {
}
if (tableId && isExternalTableID(tableId)) {
return external
}
return internal
@ -46,8 +53,8 @@ export async function fetch(ctx: UserCtx<void, FetchTablesResponse>) {
if (entities) {
return Object.values(entities).map<Table>((entity: Table) => ({
...entity,
type: "external",
sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL,
sourceId: datasource._id!,
sql: isSQL(datasource),
}))
} else {
@ -158,3 +165,19 @@ export async function validateExistingTableImport(ctx: UserCtx) {
ctx.status = 422
}
}
export async function migrate(ctx: UserCtx<MigrateRequest, MigrateResponse>) {
const { oldColumn, newColumn } = ctx.request.body
let tableId = ctx.params.tableId as string
const table = await sdk.tables.getTable(tableId)
let result = await sdk.tables.migrate(table, oldColumn, newColumn)
for (let table of result.tablesUpdated) {
builderSocket?.emitTableUpdate(ctx, table, {
includeOriginator: true,
})
}
ctx.status = 200
ctx.body = { message: `Column ${oldColumn.name} migrated.` }
}

View File

@ -7,6 +7,7 @@ import {
SaveTableRequest,
SaveTableResponse,
Table,
TableSourceType,
UserCtx,
} from "@budibase/types"
import sdk from "../../../sdk"
@ -16,10 +17,11 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
let tableToSave: Table & {
_rename?: RenameColumn
} = {
type: "table",
_id: generateTableID(),
views: {},
...rest,
type: "table",
sourceType: TableSourceType.INTERNAL,
views: {},
}
const renaming = tableToSave._rename
delete tableToSave._rename

View File

@ -52,4 +52,11 @@ router
tableController.bulkImport
)
.post(
"/api/tables/:tableId/migrate",
paramResource("tableId"),
authorized(BUILDER),
tableController.migrate
)
export default router

View File

@ -7,7 +7,7 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"entities": [
{
"_id": "ta_users",
"_rev": "1-2375e1bc58aeec664dc1b1f04ad43e44",
"_rev": "1-73b7912e6cbdd3d696febc60f3715844",
"createdAt": "2020-01-01T00:00:00.000Z",
"name": "Users",
"primaryDisplay": "email",
@ -21,7 +21,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": true,
"type": "string",
},
"fieldName": "email",
"name": "email",
"type": "string",
},
@ -30,7 +29,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false,
"type": "string",
},
"fieldName": "firstName",
"name": "firstName",
"type": "string",
},
@ -39,7 +37,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false,
"type": "string",
},
"fieldName": "lastName",
"name": "lastName",
"type": "string",
},
@ -54,7 +51,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false,
"type": "string",
},
"fieldName": "roleId",
"name": "roleId",
"type": "options",
},
@ -67,11 +63,12 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false,
"type": "string",
},
"fieldName": "status",
"name": "status",
"type": "options",
},
},
"sourceId": "bb_internal",
"sourceType": "internal",
"type": "table",
"updatedAt": "2020-01-01T00:00:00.000Z",
"views": {},

View File

@ -5,6 +5,8 @@ import sdk from "../../../sdk"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import { mocks } from "@budibase/backend-core/tests"
mocks.licenses.useBackups()
describe("/backups", () => {
let request = setup.getRequest()
let config = setup.getConfig()
@ -12,16 +14,17 @@ describe("/backups", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
tk.reset()
await config.init()
})
describe("exportAppDump", () => {
describe("/api/backups/export", () => {
it("should be able to export app", async () => {
const res = await request
.post(`/api/backups/export?appId=${config.getAppId()}`)
.set(config.defaultHeaders())
.expect(200)
expect(res.headers["content-type"]).toEqual("application/gzip")
const { body, headers } = await config.api.backup.exportBasicBackup(
config.getAppId()!
)
expect(body instanceof Buffer).toBe(true)
expect(headers["content-type"]).toEqual("application/gzip")
expect(events.app.exported).toBeCalledTimes(1)
})
@ -36,11 +39,11 @@ describe("/backups", () => {
it("should infer the app name from the app", async () => {
tk.freeze(mocks.date.MOCK_DATE)
const res = await request
.post(`/api/backups/export?appId=${config.getAppId()}`)
.set(config.defaultHeaders())
const { headers } = await config.api.backup.exportBasicBackup(
config.getAppId()!
)
expect(res.headers["content-disposition"]).toEqual(
expect(headers["content-disposition"]).toEqual(
`attachment; filename="${
config.getApp()!.name
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
@ -48,6 +51,21 @@ describe("/backups", () => {
})
})
describe("/api/backups/import", () => {
it("should be able to import an app", async () => {
const appId = config.getAppId()!
const automation = await config.createAutomation()
await config.createAutomationLog(automation, appId)
await config.createScreen()
const exportRes = await config.api.backup.createBackup(appId)
expect(exportRes.backupId).toBeDefined()
const importRes = await config.api.backup.importBackup(
appId,
exportRes.backupId
)
})
})
describe("calculateBackupStats", () => {
it("should be able to calculate the backup statistics", async () => {
await config.createAutomation()

View File

@ -10,6 +10,7 @@ import {
FieldSchema,
FieldType,
FieldTypeSubtypes,
INTERNAL_TABLE_SOURCE_ID,
MonthlyQuotaName,
PermissionLevel,
QuotaUsageType,
@ -21,6 +22,7 @@ import {
SortType,
StaticQuotaName,
Table,
TableSourceType,
} from "@budibase/types"
import {
expectAnyExternalColsAttributes,
@ -65,6 +67,8 @@ describe.each([
type: "table",
primary: ["id"],
primaryDisplay: "name",
sourceType: TableSourceType.INTERNAL,
sourceId: INTERNAL_TABLE_SOURCE_ID,
schema: {
id: {
type: FieldType.AUTO,
@ -134,9 +138,22 @@ describe.each([
}
: undefined
async function createTable(
cfg: Omit<SaveTableRequest, "sourceId" | "sourceType">,
opts?: { skipReassigning: boolean }
) {
let table
if (dsProvider) {
table = await config.createExternalTable(cfg, opts)
} else {
table = await config.createTable(cfg, opts)
}
return table
}
beforeAll(async () => {
const tableConfig = generateTableConfig()
const table = await config.createTable(tableConfig)
let table = await createTable(tableConfig)
tableId = table._id!
})
@ -165,7 +182,7 @@ describe.each([
const queryUsage = await getQueryUsage()
const tableConfig = generateTableConfig()
const newTable = await config.createTable(
const newTable = await createTable(
{
...tableConfig,
name: "TestTableAuto",
@ -242,7 +259,7 @@ describe.each([
})
it("should list all rows for given tableId", async () => {
const table = await config.createTable(generateTableConfig(), {
const table = await createTable(generateTableConfig(), {
skipReassigning: true,
})
const tableId = table._id!
@ -323,7 +340,7 @@ describe.each([
inclusion: ["Alpha", "Beta", "Gamma"],
},
}
const table = await config.createTable({
const table = await createTable({
name: "TestTable2",
type: "table",
schema: {
@ -438,7 +455,8 @@ describe.each([
describe("view save", () => {
it("views have extra data trimmed", async () => {
const table = await config.createTable({
const table = await createTable({
type: "table",
name: "orders",
primary: ["OrderID"],
schema: {
@ -494,7 +512,7 @@ describe.each([
describe("patch", () => {
beforeAll(async () => {
const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig)
table = await createTable(tableConfig)
})
it("should update only the fields that are supplied", async () => {
@ -548,7 +566,7 @@ describe.each([
describe("destroy", () => {
beforeAll(async () => {
const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig)
table = await createTable(tableConfig)
})
it("should be able to delete a row", async () => {
@ -566,7 +584,7 @@ describe.each([
describe("validate", () => {
beforeAll(async () => {
const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig)
table = await createTable(tableConfig)
})
it("should return no errors on valid row", async () => {
@ -603,7 +621,7 @@ describe.each([
describe("bulkDelete", () => {
beforeAll(async () => {
const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig)
table = await createTable(tableConfig)
})
it("should be able to delete a bulk set of rows", async () => {
@ -687,7 +705,7 @@ describe.each([
describe("fetchView", () => {
beforeEach(async () => {
const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig)
table = await createTable(tableConfig)
})
it("should be able to fetch tables contents via 'view'", async () => {
@ -735,7 +753,7 @@ describe.each([
describe("fetchEnrichedRows", () => {
beforeAll(async () => {
const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig)
table = await createTable(tableConfig)
})
it("should allow enriching some linked rows", async () => {
@ -808,7 +826,7 @@ describe.each([
describe("attachments", () => {
beforeAll(async () => {
const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig)
table = await createTable(tableConfig)
})
it("should allow enriching attachment rows", async () => {
@ -839,7 +857,7 @@ describe.each([
describe("exportData", () => {
beforeAll(async () => {
const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig)
table = await createTable(tableConfig)
})
it("should allow exporting all columns", async () => {
@ -880,6 +898,8 @@ describe.each([
async function userTable(): Promise<Table> {
return {
name: `users_${generator.word()}`,
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
type: "table",
primary: ["id"],
schema: {
@ -925,7 +945,7 @@ describe.each([
describe("create", () => {
it("should persist a new row with only the provided view fields", async () => {
const table = await config.createTable(await userTable())
const table = await createTable(await userTable())
const view = await config.createView({
schema: {
name: { visible: true },
@ -960,7 +980,7 @@ describe.each([
describe("patch", () => {
it("should update only the view fields for a row", async () => {
const table = await config.createTable(await userTable())
const table = await createTable(await userTable())
const tableId = table._id!
const view = await config.createView({
schema: {
@ -1001,7 +1021,7 @@ describe.each([
describe("destroy", () => {
it("should be able to delete a row", async () => {
const table = await config.createTable(await userTable())
const table = await createTable(await userTable())
const tableId = table._id!
const view = await config.createView({
schema: {
@ -1025,7 +1045,7 @@ describe.each([
})
it("should be able to delete multiple rows", async () => {
const table = await config.createTable(await userTable())
const table = await createTable(await userTable())
const tableId = table._id!
const view = await config.createView({
schema: {
@ -1062,6 +1082,8 @@ describe.each([
async function userTable(): Promise<Table> {
return {
name: `users_${generator.word()}`,
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
type: "table",
primary: ["id"],
schema: {
@ -1088,7 +1110,7 @@ describe.each([
}
it("returns empty rows from view when no schema is passed", async () => {
const table = await config.createTable(await userTable())
const table = await createTable(await userTable())
const rows = await Promise.all(
Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, { tableId: table._id })
@ -1119,7 +1141,7 @@ describe.each([
})
it("searching respects the view filters", async () => {
const table = await config.createTable(await userTable())
const table = await createTable(await userTable())
await Promise.all(
Array.from({ length: 10 }, () =>
@ -1243,7 +1265,7 @@ describe.each([
describe("sorting", () => {
beforeAll(async () => {
const table = await config.createTable(await userTable())
const table = await createTable(await userTable())
const users = [
{ name: "Alice", age: 25 },
{ name: "Bob", age: 30 },
@ -1310,7 +1332,7 @@ describe.each([
})
it("when schema is defined, defined columns and row attributes are returned", async () => {
const table = await config.createTable(await userTable())
const table = await createTable(await userTable())
const rows = await Promise.all(
Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, {
@ -1341,7 +1363,7 @@ describe.each([
})
it("views without data can be returned", async () => {
const table = await config.createTable(await userTable())
const table = await createTable(await userTable())
const createViewResponse = await config.createView()
const response = await config.api.viewV2.search(createViewResponse.id)
@ -1350,7 +1372,7 @@ describe.each([
})
it("respects the limit parameter", async () => {
await config.createTable(await userTable())
await createTable(await userTable())
await Promise.all(Array.from({ length: 10 }, () => config.createRow()))
const limit = generator.integer({ min: 1, max: 8 })
@ -1365,7 +1387,7 @@ describe.each([
})
it("can handle pagination", async () => {
await config.createTable(await userTable())
await createTable(await userTable())
await Promise.all(Array.from({ length: 10 }, () => config.createRow()))
const createViewResponse = await config.createView()
@ -1443,7 +1465,7 @@ describe.each([
let tableId: string
beforeAll(async () => {
await config.createTable(await userTable())
await createTable(await userTable())
await Promise.all(
Array.from({ length: 10 }, () => config.createRow())
)
@ -1521,13 +1543,13 @@ describe.each([
let o2mTable: Table
let m2mTable: Table
beforeAll(async () => {
o2mTable = await config.createTable(
o2mTable = await createTable(
{ ...generateTableConfig(), name: "o2m" },
{
skipReassigning: true,
}
)
m2mTable = await config.createTable(
m2mTable = await createTable(
{ ...generateTableConfig(), name: "m2m" },
{
skipReassigning: true,
@ -1597,9 +1619,9 @@ describe.each([
const tableConfig = generateTableConfig()
if (config.datasource) {
tableConfig.sourceId = config.datasource._id
tableConfig.sourceId = config.datasource._id!
if (config.datasource.plus) {
tableConfig.type = "external"
tableConfig.sourceType = TableSourceType.EXTERNAL
}
}
const table = await config.api.table.create({

View File

@ -1,16 +1,24 @@
import { events, context } from "@budibase/backend-core"
import { context, events } from "@budibase/backend-core"
import {
FieldType,
SaveTableRequest,
RelationshipType,
Table,
ViewCalculation,
AutoFieldSubTypes,
FieldSubtype,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
InternalTable,
RelationshipType,
Row,
SaveTableRequest,
Table,
TableSourceType,
User,
ViewCalculation,
} from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
const { basicTable } = setup.structures
import sdk from "../../../sdk"
import uuid from "uuid"
const { basicTable } = setup.structures
describe("/tables", () => {
let request = setup.getRequest()
@ -239,7 +247,8 @@ describe("/tables", () => {
.expect(200)
const fetchedTable = res.body[0]
expect(fetchedTable.name).toEqual(testTable.name)
expect(fetchedTable.type).toEqual("internal")
expect(fetchedTable.type).toEqual("table")
expect(fetchedTable.sourceType).toEqual("internal")
})
it("should apply authorization to endpoint", async () => {
@ -417,4 +426,281 @@ describe("/tables", () => {
})
})
})
describe("migrate", () => {
let users: User[]
beforeAll(async () => {
users = await Promise.all([
config.createUser({ email: `${uuid.v4()}@example.com` }),
config.createUser({ email: `${uuid.v4()}@example.com` }),
config.createUser({ email: `${uuid.v4()}@example.com` }),
])
})
it("should successfully migrate a one-to-many user relationship to a user column", async () => {
const table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.ONE_TO_MANY,
tableId: InternalTable.USER_METADATA,
},
},
})
const rows = await Promise.all(
users.map(u =>
config.api.row.save(table._id!, { "user relationship": [u] })
)
)
await config.api.table.migrate(table._id!, {
oldColumn: table.schema["user relationship"],
newColumn: {
name: "user column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USER,
},
})
const migratedTable = await config.api.table.get(table._id!)
expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const migratedRows = await config.api.row.fetch(table._id!)
rows.sort((a, b) => a._id!.localeCompare(b._id!))
migratedRows.sort((a, b) => a._id!.localeCompare(b._id!))
for (const [i, row] of rows.entries()) {
const migratedRow = migratedRows[i]
expect(migratedRow["user column"]).toBeDefined()
expect(migratedRow["user relationship"]).not.toBeDefined()
expect(row["user relationship"][0]._id).toEqual(
migratedRow["user column"][0]._id
)
}
})
it("should successfully migrate a many-to-many user relationship to a users column", async () => {
const table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.MANY_TO_MANY,
tableId: InternalTable.USER_METADATA,
},
},
})
const row1 = await config.api.row.save(table._id!, {
"user relationship": [users[0], users[1]],
})
const row2 = await config.api.row.save(table._id!, {
"user relationship": [users[1], users[2]],
})
await config.api.table.migrate(table._id!, {
oldColumn: table.schema["user relationship"],
newColumn: {
name: "user column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
})
const migratedTable = await config.api.table.get(table._id!)
expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
.body as Row
expect(row1Migrated["user relationship"]).not.toBeDefined()
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[0]._id, users[1]._id])
)
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
.body as Row
expect(row2Migrated["user relationship"]).not.toBeDefined()
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[1]._id, users[2]._id])
)
})
it("should successfully migrate a many-to-one user relationship to a users column", async () => {
const table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: InternalTable.USER_METADATA,
},
},
})
const row1 = await config.api.row.save(table._id!, {
"user relationship": [users[0], users[1]],
})
const row2 = await config.api.row.save(table._id!, {
"user relationship": [users[2]],
})
await config.api.table.migrate(table._id!, {
oldColumn: table.schema["user relationship"],
newColumn: {
name: "user column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
})
const migratedTable = await config.api.table.get(table._id!)
expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
.body as Row
expect(row1Migrated["user relationship"]).not.toBeDefined()
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[0]._id, users[1]._id])
)
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
.body as Row
expect(row2Migrated["user relationship"]).not.toBeDefined()
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([
users[2]._id,
])
})
describe("unhappy paths", () => {
let table: Table
beforeAll(async () => {
table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: InternalTable.USER_METADATA,
},
num: {
type: FieldType.NUMBER,
name: "num",
constraints: {
type: "number",
presence: false,
},
},
},
})
})
it("should fail if the new column name is blank", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: table.schema["user relationship"],
newColumn: {
name: "",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
it("should fail if the new column name is a reserved name", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: table.schema["user relationship"],
newColumn: {
name: "_id",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
it("should fail if the new column name is the same as an existing column", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: table.schema["user relationship"],
newColumn: {
name: "num",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
it("should fail if the old column name isn't a column in the table", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: {
name: "not a column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
newColumn: {
name: "new column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
})
})
})

View File

@ -3,10 +3,12 @@ import {
CreateViewRequest,
FieldSchema,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
SearchQueryOperators,
SortOrder,
SortType,
Table,
TableSourceType,
UIFieldMetadata,
UpdateViewRequest,
ViewV2,
@ -18,6 +20,8 @@ function priceTable(): Table {
return {
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
Price: {
type: FieldType.NUMBER,
@ -54,10 +58,10 @@ describe.each([
},
})
return config.createTable({
return config.createExternalTable({
...priceTable(),
sourceId: datasource._id,
type: "external",
sourceType: TableSourceType.EXTERNAL,
})
},
],

View File

@ -1,5 +1,11 @@
import { objectStore, roles, constants } from "@budibase/backend-core"
import { FieldType as FieldTypes } from "@budibase/types"
import { constants, objectStore, roles } from "@budibase/backend-core"
import {
FieldType as FieldTypes,
INTERNAL_TABLE_SOURCE_ID,
Table,
TableSourceType,
} from "@budibase/types"
export {
FieldType as FieldTypes,
RelationshipType,
@ -70,9 +76,11 @@ export enum SortDirection {
DESCENDING = "DESCENDING",
}
export const USERS_TABLE_SCHEMA = {
export const USERS_TABLE_SCHEMA: Table = {
_id: "ta_users",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
views: {},
name: "Users",
// TODO: ADMIN PANEL - when implemented this doesn't need to be carried out
@ -87,12 +95,10 @@ export const USERS_TABLE_SCHEMA = {
},
presence: true,
},
fieldName: "email",
name: "email",
},
firstName: {
name: "firstName",
fieldName: "firstName",
type: FieldTypes.STRING,
constraints: {
type: FieldTypes.STRING,
@ -101,7 +107,6 @@ export const USERS_TABLE_SCHEMA = {
},
lastName: {
name: "lastName",
fieldName: "lastName",
type: FieldTypes.STRING,
constraints: {
type: FieldTypes.STRING,
@ -109,7 +114,6 @@ export const USERS_TABLE_SCHEMA = {
},
},
roleId: {
fieldName: "roleId",
name: "roleId",
type: FieldTypes.OPTIONS,
constraints: {
@ -119,7 +123,6 @@ export const USERS_TABLE_SCHEMA = {
},
},
status: {
fieldName: "status",
name: "status",
type: FieldTypes.OPTIONS,
constraints: {
@ -169,3 +172,8 @@ export enum AutomationErrors {
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
export const MAX_AUTOMATION_RECURRING_ERRORS = 5
export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"

View File

@ -1,4 +1,12 @@
import { FieldTypes, AutoFieldSubTypes } from "../../constants"
import {
AutoFieldSubTypes,
FieldTypes,
DEFAULT_BB_DATASOURCE_ID,
DEFAULT_INVENTORY_TABLE_ID,
DEFAULT_EMPLOYEE_TABLE_ID,
DEFAULT_EXPENSES_TABLE_ID,
DEFAULT_JOBS_TABLE_ID,
} from "../../constants"
import { importToRows } from "../../api/controllers/table/utils"
import { cloneDeep } from "lodash/fp"
import LinkDocument from "../linkedRows/LinkDocument"
@ -8,19 +16,14 @@ import { jobsImport } from "./jobsImport"
import { expensesImport } from "./expensesImport"
import { db as dbCore } from "@budibase/backend-core"
import {
Table,
Row,
RelationshipType,
FieldType,
RelationshipType,
Row,
Table,
TableSchema,
TableSourceType,
} from "@budibase/types"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
const defaultDatasource = {
_id: DEFAULT_BB_DATASOURCE_ID,
type: dbCore.BUDIBASE_DATASOURCE_TYPE,
@ -89,9 +92,10 @@ const AUTO_COLUMNS: TableSchema = {
export const DEFAULT_INVENTORY_TABLE_SCHEMA: Table = {
_id: DEFAULT_INVENTORY_TABLE_ID,
type: "internal",
type: "table",
views: {},
sourceId: DEFAULT_BB_DATASOURCE_ID,
sourceType: TableSourceType.INTERNAL,
primaryDisplay: "Item Name",
name: "Inventory",
schema: {
@ -198,10 +202,11 @@ export const DEFAULT_INVENTORY_TABLE_SCHEMA: Table = {
export const DEFAULT_EMPLOYEE_TABLE_SCHEMA: Table = {
_id: DEFAULT_EMPLOYEE_TABLE_ID,
type: "internal",
type: "table",
views: {},
name: "Employees",
sourceId: DEFAULT_BB_DATASOURCE_ID,
sourceType: TableSourceType.INTERNAL,
primaryDisplay: "First Name",
schema: {
"First Name": {
@ -346,9 +351,10 @@ export const DEFAULT_EMPLOYEE_TABLE_SCHEMA: Table = {
export const DEFAULT_JOBS_TABLE_SCHEMA: Table = {
_id: DEFAULT_JOBS_TABLE_ID,
type: "internal",
type: "table",
name: "Jobs",
sourceId: DEFAULT_BB_DATASOURCE_ID,
sourceType: TableSourceType.INTERNAL,
primaryDisplay: "Job ID",
schema: {
"Job ID": {
@ -503,10 +509,11 @@ export const DEFAULT_JOBS_TABLE_SCHEMA: Table = {
export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = {
_id: DEFAULT_EXPENSES_TABLE_ID,
type: "internal",
type: "table",
views: {},
name: "Expenses",
sourceId: DEFAULT_BB_DATASOURCE_ID,
sourceType: TableSourceType.INTERNAL,
primaryDisplay: "Expense ID",
schema: {
"Expense ID": {

View File

@ -2,7 +2,12 @@ import { ViewName, getQueryIndex, isRelationshipColumn } from "../utils"
import { FieldTypes } from "../../constants"
import { createLinkView } from "../views/staticViews"
import { context, logging } from "@budibase/backend-core"
import { LinkDocument, LinkDocumentValue, Table } from "@budibase/types"
import {
DatabaseQueryOpts,
LinkDocument,
LinkDocumentValue,
Table,
} from "@budibase/types"
export { createLinkView } from "../views/staticViews"
@ -36,13 +41,13 @@ export async function getLinkDocuments(args: {
}): Promise<LinkDocumentValue[] | LinkDocument[]> {
const { tableId, rowId, fieldName, includeDocs } = args
const db = context.getAppDB()
let params: any
let params: DatabaseQueryOpts
if (rowId) {
params = { key: [tableId, rowId] }
}
// only table is known
else {
params = { startKey: [tableId], endKey: [tableId, {}] }
params = { startkey: [tableId], endkey: [tableId, {}] }
}
if (includeDocs) {
params.include_docs = true

View File

@ -5,6 +5,7 @@ import {
FieldSchema,
RelationshipFieldMetadata,
VirtualDocumentType,
INTERNAL_TABLE_SOURCE_ID,
} from "@budibase/types"
import { FieldTypes } from "../constants"
export { DocumentType, VirtualDocumentType } from "@budibase/types"
@ -18,7 +19,7 @@ export const enum AppStatus {
}
export const BudibaseInternalDB = {
_id: "bb_internal",
_id: INTERNAL_TABLE_SOURCE_ID,
type: dbCore.BUDIBASE_DATASOURCE_TYPE,
name: "Budibase DB",
source: "BUDIBASE",

View File

@ -75,7 +75,6 @@ const environment = {
},
isTest: coreEnv.isTest,
isJest: coreEnv.isJest,
isDev: coreEnv.isDev,
isProd: () => {
return !coreEnv.isDev()

View File

@ -1,6 +1,4 @@
import fetch from "node-fetch"
// @ts-ignore
fetch.mockSearch()
import {
generateMakeRequest,
MakeRequestResponse,
@ -13,12 +11,15 @@ import {
RelationshipType,
Row,
Table,
TableSourceType,
} from "@budibase/types"
import _ from "lodash"
import { generator } from "@budibase/backend-core/tests"
import { utils } from "@budibase/backend-core"
import { databaseTestProviders } from "../integrations/tests/utils"
import { Client } from "pg"
// @ts-ignore
fetch.mockSearch()
const config = setup.getConfig()!
@ -52,7 +53,7 @@ describe("postgres integrations", () => {
async function createAuxTable(prefix: string) {
return await config.createTable({
name: `${prefix}_${generator.word({ length: 6 })}`,
type: "external",
type: "table",
primary: ["id"],
primaryDisplay: "title",
schema: {
@ -67,6 +68,7 @@ describe("postgres integrations", () => {
},
},
sourceId: postgresDatasource._id,
sourceType: TableSourceType.EXTERNAL,
})
}
@ -88,7 +90,7 @@ describe("postgres integrations", () => {
primaryPostgresTable = await config.createTable({
name: `p_${generator.word({ length: 6 })}`,
type: "external",
type: "table",
primary: ["id"],
schema: {
id: {
@ -143,6 +145,7 @@ describe("postgres integrations", () => {
},
},
sourceId: postgresDatasource._id,
sourceType: TableSourceType.EXTERNAL,
})
})
@ -249,7 +252,7 @@ describe("postgres integrations", () => {
async function createDefaultPgTable() {
return await config.createTable({
name: generator.word({ length: 10 }),
type: "external",
type: "table",
primary: ["id"],
schema: {
id: {
@ -259,6 +262,7 @@ describe("postgres integrations", () => {
},
},
sourceId: postgresDatasource._id,
sourceType: TableSourceType.EXTERNAL,
})
}

View File

@ -10,11 +10,12 @@ import {
QueryJson,
QueryType,
Row,
Schema,
SearchFilters,
SortJson,
ExternalTable,
Table,
TableRequest,
Schema,
TableSourceType,
} from "@budibase/types"
import { OAuth2Client } from "google-auth-library"
import {
@ -262,11 +263,13 @@ class GoogleSheetsIntegration implements DatasourcePlus {
id?: string
) {
// base table
const table: ExternalTable = {
const table: Table = {
type: "table",
name: title,
primary: [GOOGLE_SHEETS_PRIMARY_KEY],
schema: {},
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
}
if (id) {
table._id = id
@ -283,7 +286,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
entities: Record<string, Table>
): Promise<Schema> {
// not fully configured yet
if (!this.config.auth) {
@ -291,7 +294,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
}
await this.connect()
const sheets = this.client.sheetsByIndex
const tables: Record<string, ExternalTable> = {}
const tables: Record<string, Table> = {}
let errors: Record<string, string> = {}
await utils.parallelForeach(
sheets,

View File

@ -2,7 +2,7 @@ import {
DatasourceFieldType,
Integration,
Operation,
ExternalTable,
Table,
TableSchema,
QueryJson,
QueryType,
@ -12,6 +12,7 @@ import {
ConnectionInfo,
SourceName,
Schema,
TableSourceType,
} from "@budibase/types"
import {
getSqlQuery,
@ -380,7 +381,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
*/
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
entities: Record<string, Table>
): Promise<Schema> {
await this.connect()
let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL)
@ -394,7 +395,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
.map((record: any) => record.TABLE_NAME)
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
const tables: Record<string, ExternalTable> = {}
const tables: Record<string, Table> = {}
for (let tableName of tableNames) {
// get the column definition (type)
const definition = await this.runSQL(
@ -439,7 +440,9 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
}
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
type: "table",
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
primary: primaryKeys,
name: tableName,
schema,

View File

@ -4,13 +4,14 @@ import {
QueryType,
QueryJson,
SqlQuery,
ExternalTable,
Table,
TableSchema,
DatasourcePlus,
DatasourceFeature,
ConnectionInfo,
SourceName,
Schema,
TableSourceType,
} from "@budibase/types"
import {
getSqlQuery,
@ -278,9 +279,9 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
entities: Record<string, Table>
): Promise<Schema> {
const tables: { [key: string]: ExternalTable } = {}
const tables: { [key: string]: Table } = {}
await this.connect()
try {
@ -317,8 +318,10 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
}
if (!tables[tableName]) {
tables[tableName] = {
type: "table",
_id: buildExternalTableId(datasourceId, tableName),
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
primary: primaryKeys,
name: tableName,
schema,

View File

@ -5,11 +5,12 @@ import {
QueryJson,
QueryType,
SqlQuery,
ExternalTable,
Table,
DatasourcePlus,
DatasourceFeature,
ConnectionInfo,
Schema,
TableSourceType,
} from "@budibase/types"
import {
buildExternalTableId,
@ -263,25 +264,27 @@ class OracleIntegration extends Sql implements DatasourcePlus {
*/
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
entities: Record<string, Table>
): Promise<Schema> {
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL,
})
const oracleTables = this.mapColumns(columnsResponse)
const tables: { [key: string]: ExternalTable } = {}
const tables: { [key: string]: Table } = {}
// iterate each table
Object.values(oracleTables).forEach(oracleTable => {
let table = tables[oracleTable.name]
if (!table) {
table = {
type: "table",
_id: buildExternalTableId(datasourceId, oracleTable.name),
primary: [],
name: oracleTable.name,
schema: {},
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
}
tables[oracleTable.name] = table
}

View File

@ -5,12 +5,13 @@ import {
QueryType,
QueryJson,
SqlQuery,
ExternalTable,
Table,
DatasourcePlus,
DatasourceFeature,
ConnectionInfo,
SourceName,
Schema,
TableSourceType,
} from "@budibase/types"
import {
getSqlQuery,
@ -273,7 +274,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
*/
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
entities: Record<string, Table>
): Promise<Schema> {
let tableKeys: { [key: string]: string[] } = {}
await this.openConnection()
@ -300,7 +301,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
const columnsResponse: { rows: PostgresColumn[] } =
await this.client.query(this.COLUMNS_SQL)
const tables: { [key: string]: ExternalTable } = {}
const tables: { [key: string]: Table } = {}
for (let column of columnsResponse.rows) {
const tableName: string = column.table_name
@ -309,11 +310,13 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
// table key doesn't exist yet
if (!tables[tableName] || !tables[tableName].schema) {
tables[tableName] = {
type: "table",
_id: buildExternalTableId(datasourceId, tableName),
primary: tableKeys[tableName] || [],
name: tableName,
schema: {},
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
}
}

View File

@ -30,7 +30,8 @@ GoogleSpreadsheet.mockImplementation(() => mockGoogleIntegration)
import { structures } from "@budibase/backend-core/tests"
import TestConfiguration from "../../tests/utilities/TestConfiguration"
import GoogleSheetsIntegration from "../googlesheets"
import { FieldType, Table, TableSchema } from "@budibase/types"
import { FieldType, Table, TableSchema, TableSourceType } from "@budibase/types"
import { generateDatasourceID } from "../../db/utils"
describe("Google Sheets Integration", () => {
let integration: any,
@ -65,7 +66,10 @@ describe("Google Sheets Integration", () => {
function createBasicTable(name: string, columns: string[]): Table {
return {
type: "table",
name,
sourceId: generateDatasourceID(),
sourceType: TableSourceType.EXTERNAL,
schema: {
...columns.reduce((p, c) => {
p[c] = {

View File

@ -1,39 +1,47 @@
import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
import env from "../../../environment"
let container: StartedTestContainer | undefined
const isMac = process.platform === "darwin"
export async function getDsConfig(): Promise<Datasource> {
if (!container) {
container = await new GenericContainer("postgres")
.withExposedPorts(5432)
.withEnv("POSTGRES_PASSWORD", "password")
.withWaitStrategy(
Wait.forLogMessage(
"PostgreSQL init process complete; ready for start up."
try {
if (!container) {
// postgres 15-bullseye safer bet on Linux
const version = isMac ? undefined : "15-bullseye"
container = await new GenericContainer("postgres", version)
.withExposedPorts(5432)
.withEnv("POSTGRES_PASSWORD", "password")
.withWaitStrategy(
Wait.forLogMessage(
"PostgreSQL init process complete; ready for start up."
)
)
)
.start()
}
.start()
}
const host = container.getContainerIpAddress()
const port = container.getMappedPort(5432)
const host = container.getContainerIpAddress()
const port = container.getMappedPort(5432)
return {
type: "datasource_plus",
source: SourceName.POSTGRES,
plus: true,
config: {
host,
port,
database: "postgres",
user: "postgres",
password: "password",
schema: "public",
ssl: false,
rejectUnauthorized: false,
ca: false,
},
return {
type: "datasource_plus",
source: SourceName.POSTGRES,
plus: true,
config: {
host,
port,
database: "postgres",
user: "postgres",
password: "password",
schema: "public",
ssl: false,
rejectUnauthorized: false,
ca: false,
},
}
} catch (err) {
throw new Error("**UNABLE TO CREATE TO POSTGRES CONTAINER**")
}
}

View File

@ -4,10 +4,14 @@ import {
SearchFilters,
Datasource,
FieldType,
ExternalTable,
TableSourceType,
} from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils"
import { InvalidColumns, NoEmptyFilterStrings } from "../constants"
import {
InvalidColumns,
NoEmptyFilterStrings,
DEFAULT_BB_DATASOURCE_ID,
} from "../constants"
import { helpers } from "@budibase/shared-core"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
@ -83,10 +87,29 @@ export enum SqlClient {
ORACLE = "oracledb",
}
export function isExternalTable(tableId: string) {
export function isExternalTableID(tableId: string) {
return tableId.includes(DocumentType.DATASOURCE)
}
export function isInternalTableID(tableId: string) {
return !isExternalTableID(tableId)
}
export function isExternalTable(table: Table) {
if (
table?.sourceId &&
table.sourceId.includes(DocumentType.DATASOURCE + SEPARATOR) &&
table?.sourceId !== DEFAULT_BB_DATASOURCE_ID
) {
return true
} else if (table?.sourceType === TableSourceType.EXTERNAL) {
return true
} else if (table?._id && isExternalTableID(table._id)) {
return true
}
return false
}
export function buildExternalTableId(datasourceId: string, tableName: string) {
// encode spaces
if (tableName.includes(" ")) {
@ -297,9 +320,9 @@ function copyExistingPropsOver(
* @param entities The old list of tables, if there was any to look for definitions in.
*/
export function finaliseExternalTables(
tables: Record<string, ExternalTable>,
entities: Record<string, ExternalTable>
): Record<string, ExternalTable> {
tables: Record<string, Table>,
entities: Record<string, Table>
): Record<string, Table> {
let finalTables: Record<string, Table> = {}
const tableIds = Object.values(tables).map(table => table._id!)
for (let [name, table] of Object.entries(tables)) {
@ -312,7 +335,7 @@ export function finaliseExternalTables(
}
export function checkExternalTables(
tables: Record<string, ExternalTable>
tables: Record<string, Table>
): Record<string, string> {
const invalidColumns = Object.values(InvalidColumns) as string[]
const errors: Record<string, string> = {}

View File

@ -1,5 +1,12 @@
import { generator } from "@budibase/backend-core/tests"
import { BBRequest, FieldType, Row, Table } from "@budibase/types"
import {
BBRequest,
FieldType,
Row,
Table,
INTERNAL_TABLE_SOURCE_ID,
TableSourceType,
} from "@budibase/types"
import * as utils from "../../db/utils"
import trimViewRowInfoMiddleware from "../trimViewRowInfo"
@ -73,6 +80,8 @@ describe("trimViewRowInfo middleware", () => {
const table: Table = {
_id: tableId,
name: generator.word(),
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
type: "table",
schema: {
name: {

View File

@ -1,7 +1,6 @@
import { Ctx, Row } from "@budibase/types"
import * as utils from "../db/utils"
import sdk from "../sdk"
import { db } from "@budibase/backend-core"
import { Next } from "koa"
import { getTableId } from "../api/controllers/row/utils"

View File

@ -11,10 +11,11 @@ import {
isOIDCConfig,
isSettingsConfig,
ConfigType,
DatabaseQueryOpts,
} from "@budibase/types"
import env from "./../../../../environment"
export const getConfigParams = () => {
export function getConfigParams(): DatabaseQueryOpts {
return {
include_docs: true,
startkey: `${DocumentType.CONFIG}${SEPARATOR}`,

View File

@ -26,7 +26,6 @@ export interface DBDumpOpts {
export interface ExportOpts extends DBDumpOpts {
tar?: boolean
excludeRows?: boolean
excludeLogs?: boolean
encryptPassword?: string
}
@ -83,14 +82,15 @@ export async function exportDB(
})
}
function defineFilter(excludeRows?: boolean, excludeLogs?: boolean) {
const ids = [USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX]
function defineFilter(excludeRows?: boolean) {
const ids = [
USER_METDATA_PREFIX,
LINK_USER_METADATA_PREFIX,
AUTOMATION_LOG_PREFIX,
]
if (excludeRows) {
ids.push(TABLE_ROW_PREFIX)
}
if (excludeLogs) {
ids.push(AUTOMATION_LOG_PREFIX)
}
return (doc: any) =>
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
}
@ -118,7 +118,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
fs.writeFileSync(join(tmpPath, path), contents)
}
}
// get all of the files
// get all the files
else {
tmpPath = await objectStore.retrieveDirectory(
ObjectStoreBuckets.APPS,
@ -141,7 +141,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
// enforce an export of app DB to the tmp path
const dbPath = join(tmpPath, DB_EXPORT_FILE)
await exportDB(appId, {
filter: defineFilter(config?.excludeRows, config?.excludeLogs),
filter: defineFilter(config?.excludeRows),
exportPath: dbPath,
})
@ -191,7 +191,6 @@ export async function streamExportApp({
}) {
const tmpPath = await exportApp(appId, {
excludeRows,
excludeLogs: true,
tar: true,
encryptPassword,
})

View File

@ -0,0 +1,5 @@
import * as links from "./links"
export default {
...links,
}

View File

@ -0,0 +1,39 @@
import { context } from "@budibase/backend-core"
import { isTableId } from "@budibase/backend-core/src/docIds"
import {
DatabaseQueryOpts,
LinkDocument,
LinkDocumentValue,
} from "@budibase/types"
import { ViewName, getQueryIndex } from "../../../../src/db/utils"
export async function fetch(tableId: string): Promise<LinkDocumentValue[]> {
if (!isTableId(tableId)) {
throw new Error(`Invalid tableId: ${tableId}`)
}
const db = context.getAppDB()
const params: DatabaseQueryOpts = {
startkey: [tableId],
endkey: [tableId, {}],
}
const linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows
return linkRows.map(row => row.value as LinkDocumentValue)
}
export async function fetchWithDocument(
tableId: string
): Promise<LinkDocument[]> {
if (!isTableId(tableId)) {
throw new Error(`Invalid tableId: ${tableId}`)
}
const db = context.getAppDB()
const params: DatabaseQueryOpts = {
startkey: [tableId],
endkey: [tableId, {}],
include_docs: true,
}
const linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows
return linkRows.map(row => row.doc as LinkDocument)
}

View File

@ -1,4 +1,4 @@
import { IncludeRelationship, Operation, Row } from "@budibase/types"
import { IncludeRelationship, Operation } from "@budibase/types"
import { handleRequest } from "../../../api/controllers/row/external"
import { breakRowIdField } from "../../../integrations/utils"

View File

@ -1,5 +1,5 @@
import { SearchFilters, SearchParams, Row } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils"
import { Row, SearchFilters, SearchParams } from "@budibase/types"
import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./search/internal"
import * as external from "./search/external"
import { Format } from "../../../api/controllers/view/exporters"
@ -12,7 +12,7 @@ export interface ViewParams {
}
function pickApi(tableId: any) {
if (isExternalTable(tableId)) {
if (isExternalTableID(tableId)) {
return external
}
return internal
@ -49,6 +49,10 @@ export async function fetch(tableId: string): Promise<Row[]> {
return pickApi(tableId).fetch(tableId)
}
export async function fetchRaw(tableId: string): Promise<Row[]> {
return pickApi(tableId).fetchRaw(tableId)
}
export async function fetchView(
tableId: string,
viewName: string,

View File

@ -186,6 +186,12 @@ export async function fetch(tableId: string): Promise<Row[]> {
})
}
export async function fetchRaw(tableId: string): Promise<Row[]> {
return await handleRequest<Operation.READ>(Operation.READ, tableId, {
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
}
export async function fetchView(viewName: string) {
// there are no views in external datasources, shouldn't ever be called
// for now just fetch

View File

@ -140,14 +140,13 @@ export async function exportRows(
}
export async function fetch(tableId: string): Promise<Row[]> {
const db = context.getAppDB()
const table = await sdk.tables.getTable(tableId)
const rows = await getRawTableData(db, tableId)
const rows = await fetchRaw(tableId)
return await outputProcessing(table, rows)
}
async function getRawTableData(db: Database, tableId: string) {
export async function fetchRaw(tableId: string): Promise<Row[]> {
const db = context.getAppDB()
let rows
if (tableId === InternalTables.USER_METADATA) {
rows = await sdk.users.fetchMetadata()
@ -182,7 +181,7 @@ export async function fetchView(
})
} else {
const tableId = viewInfo.meta.tableId
const data = await getRawTableData(db, tableId)
const data = await fetchRaw(tableId)
response = await inMemoryViews.runView(
viewInfo,
calculation as string,
@ -198,11 +197,7 @@ export async function fetchView(
try {
table = await sdk.tables.getTable(viewInfo.meta.tableId)
} catch (err) {
/* istanbul ignore next */
table = {
name: "",
schema: {},
}
throw new Error("Unable to retrieve view table.")
}
rows = await outputProcessing(table, response.rows)
}

View File

@ -7,6 +7,7 @@ import {
SourceName,
Table,
SearchParams,
TableSourceType,
} from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
@ -15,6 +16,7 @@ import {
expectAnyExternalColsAttributes,
generator,
} from "@budibase/backend-core/tests"
import datasource from "../../../../../api/routes/datasource"
jest.unmock("mysql2/promise")
@ -23,36 +25,7 @@ jest.setTimeout(30000)
describe.skip("external", () => {
const config = new TestConfiguration()
let externalDatasource: Datasource
const tableData: Table = {
name: generator.word(),
type: "external",
primary: ["id"],
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
},
},
}
let externalDatasource: Datasource, tableData: Table
beforeAll(async () => {
const container = await new GenericContainer("mysql")
@ -84,12 +57,43 @@ describe.skip("external", () => {
},
},
})
tableData = {
name: generator.word(),
type: "table",
primary: ["id"],
sourceId: externalDatasource._id!,
sourceType: TableSourceType.EXTERNAL,
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
},
},
}
})
describe("search", () => {
const rows: Row[] = []
beforeAll(async () => {
const table = await config.createTable({
const table = await config.createExternalTable({
...tableData,
sourceId: externalDatasource._id,
})

View File

@ -1,4 +1,11 @@
import { FieldType, Row, Table, SearchParams } from "@budibase/types"
import {
FieldType,
Row,
Table,
SearchParams,
INTERNAL_TABLE_SOURCE_ID,
TableSourceType,
} from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { search } from "../internal"
import {
@ -12,6 +19,8 @@ describe("internal", () => {
const tableData: Table = {
name: generator.word(),
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
name: {
name: "name",

View File

@ -3,14 +3,19 @@ import { db as dbCore } from "@budibase/backend-core"
import {
FieldType,
FieldTypeSubtypes,
Table,
INTERNAL_TABLE_SOURCE_ID,
SearchParams,
Table,
TableSourceType,
} from "@budibase/types"
const tableId = "ta_a"
const tableWithUserCol: Table = {
type: "table",
_id: tableId,
name: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
user: {
name: "user",
@ -21,8 +26,11 @@ const tableWithUserCol: Table = {
}
const tableWithUsersCol: Table = {
type: "table",
_id: tableId,
name: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
user: {
name: "user",

View File

@ -35,10 +35,10 @@ export async function save(
opts?: { tableId?: string; renaming?: RenameColumn }
) {
let tableToSave: TableRequest = {
...update,
type: "table",
_id: buildExternalTableId(datasourceId, update.name),
sourceId: datasourceId,
...update,
}
const tableId = opts?.tableId || update._id

View File

@ -6,6 +6,7 @@ import {
RelationshipFieldMetadata,
RelationshipType,
Table,
TableSourceType,
} from "@budibase/types"
import { FieldTypes } from "../../../../constants"
import {
@ -76,12 +77,16 @@ export function generateManyLinkSchema(
const primary = table.name + table.primary[0]
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
const jcTblName = generateJunctionTableName(column, table, relatedTable)
const datasourceId = datasource._id!
// first create the new table
const junctionTable = {
_id: buildExternalTableId(datasource._id!, jcTblName),
const junctionTable: Table = {
type: "table",
_id: buildExternalTableId(datasourceId, jcTblName),
name: jcTblName,
primary: [primary, relatedPrimary],
constrained: [primary, relatedPrimary],
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
schema: {
[primary]: foreignKeyStructure(primary, {
toTable: table.name,

View File

@ -1,30 +1,47 @@
import { context } from "@budibase/backend-core"
import {
BudibaseInternalDB,
getMultiIDParams,
getTableParams,
} from "../../../db/utils"
import { getMultiIDParams, getTableParams } from "../../../db/utils"
import {
breakExternalTableId,
isExternalTable,
isExternalTableID,
isSQL,
} from "../../../integrations/utils"
import {
AllDocsResponse,
Database,
INTERNAL_TABLE_SOURCE_ID,
Table,
TableResponse,
TableSourceType,
TableViewsResponse,
} from "@budibase/types"
import datasources from "../datasources"
import sdk from "../../../sdk"
function processInternalTables(docs: AllDocsResponse<Table[]>): Table[] {
return docs.rows.map((tableDoc: any) => ({
...tableDoc.doc,
type: "internal",
sourceId: tableDoc.doc.sourceId || BudibaseInternalDB._id,
}))
export function processTable(table: Table): Table {
if (table._id && isExternalTableID(table._id)) {
return {
...table,
type: "table",
sourceType: TableSourceType.EXTERNAL,
}
} else {
return {
...table,
type: "table",
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
}
}
}
export function processTables(tables: Table[]): Table[] {
return tables.map(table => processTable(table))
}
function processEntities(tables: Record<string, Table>) {
for (let key of Object.keys(tables)) {
tables[key] = processTable(tables[key])
}
return tables
}
export async function getAllInternalTables(db?: Database): Promise<Table[]> {
@ -36,7 +53,7 @@ export async function getAllInternalTables(db?: Database): Promise<Table[]> {
include_docs: true,
})
)
return processInternalTables(internalTables)
return processTables(internalTables.rows.map(row => row.doc!))
}
async function getAllExternalTables(): Promise<Table[]> {
@ -48,7 +65,7 @@ async function getAllExternalTables(): Promise<Table[]> {
final = final.concat(Object.values(entities))
}
}
return final
return processTables(final)
}
export async function getExternalTable(
@ -56,19 +73,21 @@ export async function getExternalTable(
tableName: string
): Promise<Table> {
const entities = await getExternalTablesInDatasource(datasourceId)
return entities[tableName]
return processTable(entities[tableName])
}
export async function getTable(tableId: string): Promise<Table> {
const db = context.getAppDB()
if (isExternalTable(tableId)) {
let output: Table
if (isExternalTableID(tableId)) {
let { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource = await datasources.get(datasourceId!)
const table = await getExternalTable(datasourceId!, tableName!)
return { ...table, sql: isSQL(datasource) }
output = { ...table, sql: isSQL(datasource) }
} else {
return db.get(tableId)
output = await db.get<Table>(tableId)
}
return processTable(output)
}
export async function getAllTables() {
@ -76,7 +95,7 @@ export async function getAllTables() {
getAllInternalTables(),
getAllExternalTables(),
])
return [...internal, ...external]
return processTables([...internal, ...external])
}
export async function getExternalTablesInDatasource(
@ -86,12 +105,14 @@ export async function getExternalTablesInDatasource(
if (!datasource || !datasource.entities) {
throw new Error("Datasource is not configured fully.")
}
return datasource.entities
return processEntities(datasource.entities)
}
export async function getTables(tableIds: string[]): Promise<Table[]> {
const externalTableIds = tableIds.filter(tableId => isExternalTable(tableId)),
internalTableIds = tableIds.filter(tableId => !isExternalTable(tableId))
const externalTableIds = tableIds.filter(tableId =>
isExternalTableID(tableId)
),
internalTableIds = tableIds.filter(tableId => !isExternalTableID(tableId))
let tables: Table[] = []
if (externalTableIds.length) {
const externalTables = await getAllExternalTables()
@ -106,9 +127,9 @@ export async function getTables(tableIds: string[]): Promise<Table[]> {
const internalTableDocs = await db.allDocs<Table[]>(
getMultiIDParams(internalTableIds)
)
tables = tables.concat(processInternalTables(internalTableDocs))
tables = tables.concat(internalTableDocs.rows.map(row => row.doc!))
}
return tables
return processTables(tables)
}
export function enrichViewSchemas(table: Table): TableResponse {

View File

@ -2,10 +2,12 @@ import { populateExternalTableSchemas } from "./validation"
import * as getters from "./getters"
import * as updates from "./update"
import * as utils from "./utils"
import { migrate } from "./migration"
export default {
populateExternalTableSchemas,
...updates,
...getters,
...utils,
migrate,
}

View File

@ -0,0 +1,194 @@
import { BadRequestError, context, db as dbCore } from "@budibase/backend-core"
import {
BBReferenceFieldMetadata,
FieldSchema,
FieldSubtype,
InternalTable,
isBBReferenceField,
isRelationshipField,
LinkDocument,
RelationshipFieldMetadata,
RelationshipType,
Row,
Table,
} from "@budibase/types"
import sdk from "../../../sdk"
import { isExternalTableID } from "../../../integrations/utils"
import { EventType, updateLinks } from "../../../db/linkedRows"
import { cloneDeep } from "lodash"
import { isInternalColumnName } from "@budibase/backend-core/src/db"
export interface MigrationResult {
tablesUpdated: Table[]
}
export async function migrate(
table: Table,
oldColumn: FieldSchema,
newColumn: FieldSchema
): Promise<MigrationResult> {
if (newColumn.name in table.schema) {
throw new BadRequestError(`Column "${newColumn.name}" already exists`)
}
if (newColumn.name === "") {
throw new BadRequestError(`Column name cannot be empty`)
}
if (isInternalColumnName(newColumn.name)) {
throw new BadRequestError(`Column name cannot be a reserved column name`)
}
table.schema[newColumn.name] = newColumn
table = await sdk.tables.saveTable(table)
let migrator = getColumnMigrator(table, oldColumn, newColumn)
try {
return await migrator.doMigration()
} catch (e) {
// If the migration fails then we need to roll back the table schema
// change.
delete table.schema[newColumn.name]
await sdk.tables.saveTable(table)
throw e
}
}
interface ColumnMigrator {
doMigration(): Promise<MigrationResult>
}
function getColumnMigrator(
table: Table,
oldColumn: FieldSchema,
newColumn: FieldSchema
): ColumnMigrator {
// For now, we're only supporting migrations of user relationships to user
// columns in internal tables. In the future, we may want to support other
// migrations but for now return an error if we aren't migrating a user
// relationship.
if (isExternalTableID(table._id!)) {
throw new BadRequestError("External tables cannot be migrated")
}
if (!(oldColumn.name in table.schema)) {
throw new BadRequestError(`Column "${oldColumn.name}" does not exist`)
}
if (!isBBReferenceField(newColumn)) {
throw new BadRequestError(`Column "${newColumn.name}" is not a user column`)
}
if (newColumn.subtype !== "user" && newColumn.subtype !== "users") {
throw new BadRequestError(`Column "${newColumn.name}" is not a user column`)
}
if (!isRelationshipField(oldColumn)) {
throw new BadRequestError(
`Column "${oldColumn.name}" is not a user relationship`
)
}
if (oldColumn.tableId !== InternalTable.USER_METADATA) {
throw new BadRequestError(
`Column "${oldColumn.name}" is not a user relationship`
)
}
if (oldColumn.relationshipType === RelationshipType.ONE_TO_MANY) {
if (newColumn.subtype !== FieldSubtype.USER) {
throw new BadRequestError(
`Column "${oldColumn.name}" is a one-to-many column but "${newColumn.name}" is not a single user column`
)
}
return new SingleUserColumnMigrator(table, oldColumn, newColumn)
}
if (
oldColumn.relationshipType === RelationshipType.MANY_TO_MANY ||
oldColumn.relationshipType === RelationshipType.MANY_TO_ONE
) {
if (newColumn.subtype !== FieldSubtype.USERS) {
throw new BadRequestError(
`Column "${oldColumn.name}" is a ${oldColumn.relationshipType} column but "${newColumn.name}" is not a multi user column`
)
}
return new MultiUserColumnMigrator(table, oldColumn, newColumn)
}
throw new BadRequestError(`Unknown migration type`)
}
abstract class UserColumnMigrator implements ColumnMigrator {
constructor(
protected table: Table,
protected oldColumn: RelationshipFieldMetadata,
protected newColumn: BBReferenceFieldMetadata
) {}
abstract updateRow(row: Row, link: LinkDocument): void
async doMigration(): Promise<MigrationResult> {
let oldTable = cloneDeep(this.table)
let rows = await sdk.rows.fetchRaw(this.table._id!)
let rowsById = rows.reduce((acc, row) => {
acc[row._id!] = row
return acc
}, {} as Record<string, Row>)
let links = await sdk.links.fetchWithDocument(this.table._id!)
for (let link of links) {
if (
link.doc1.tableId !== this.table._id ||
link.doc1.fieldName !== this.oldColumn.name ||
link.doc2.tableId !== InternalTable.USER_METADATA
) {
continue
}
let row = rowsById[link.doc1.rowId]
if (!row) {
// This can happen if the row has been deleted but the link hasn't,
// which was a state that was found during the initial testing of this
// feature. Not sure exactly what can cause it, but best to be safe.
continue
}
this.updateRow(row, link)
}
let db = context.getAppDB()
await db.bulkDocs(rows)
delete this.table.schema[this.oldColumn.name]
this.table = await sdk.tables.saveTable(this.table)
await updateLinks({
eventType: EventType.TABLE_UPDATED,
table: this.table,
oldTable,
})
let otherTable = await sdk.tables.getTable(this.oldColumn.tableId)
return {
tablesUpdated: [this.table, otherTable],
}
}
}
class SingleUserColumnMigrator extends UserColumnMigrator {
updateRow(row: Row, link: LinkDocument): void {
row[this.newColumn.name] = dbCore.getGlobalIDFromUserMetadataID(
link.doc2.rowId
)
}
}
class MultiUserColumnMigrator extends UserColumnMigrator {
updateRow(row: Row, link: LinkDocument): void {
if (!row[this.newColumn.name]) {
row[this.newColumn.name] = []
}
row[this.newColumn.name].push(
dbCore.getGlobalIDFromUserMetadataID(link.doc2.rowId)
)
}
}

View File

@ -1,4 +1,10 @@
import { FieldType, Table, ViewV2 } from "@budibase/types"
import {
FieldType,
INTERNAL_TABLE_SOURCE_ID,
Table,
TableSourceType,
ViewV2,
} from "@budibase/types"
import { generator } from "@budibase/backend-core/tests"
import sdk from "../../.."
@ -13,6 +19,8 @@ describe("table sdk", () => {
_id: generator.guid(),
name: "TestTable",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
name: {
type: FieldType.STRING,

View File

@ -1,73 +1,92 @@
import { populateExternalTableSchemas } from "../validation"
import { cloneDeep } from "lodash/fp"
import { AutoReason, Datasource, Table } from "@budibase/types"
import {
AutoReason,
Datasource,
FieldType,
RelationshipType,
SourceName,
Table,
TableSourceType,
} from "@budibase/types"
import { isEqual } from "lodash"
import { generateDatasourceID } from "../../../../db/utils"
const SCHEMA = {
const datasourceId = generateDatasourceID()
const SCHEMA: Datasource = {
source: SourceName.POSTGRES,
type: "datasource",
_id: datasourceId,
entities: {
client: {
type: "table",
_id: "tableA",
name: "client",
primary: ["idC"],
primaryDisplay: "Name",
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
schema: {
idC: {
autocolumn: true,
externalType: "int unsigned",
name: "idC",
type: "number",
type: FieldType.NUMBER,
},
Name: {
autocolumn: false,
externalType: "varchar(255)",
name: "Name",
type: "string",
type: FieldType.STRING,
},
project: {
fieldName: "idC",
foreignKey: "idC",
main: true,
name: "project",
relationshipType: "many-to-one",
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: "tableB",
type: "link",
type: FieldType.LINK,
},
},
},
project: {
type: "table",
_id: "tableB",
name: "project",
primary: ["idP"],
primaryDisplay: "Name",
sourceId: datasourceId,
sourceType: TableSourceType.EXTERNAL,
schema: {
idC: {
externalType: "int unsigned",
name: "idC",
type: "number",
type: FieldType.NUMBER,
},
idP: {
autocolumn: true,
externalType: "int unsigned",
name: "idProject",
type: "number",
type: FieldType.NUMBER,
},
Name: {
autocolumn: false,
externalType: "varchar(255)",
name: "Name",
type: "string",
type: FieldType.STRING,
},
client: {
fieldName: "idC",
foreignKey: "idC",
name: "client",
relationshipType: "one-to-many",
relationshipType: RelationshipType.ONE_TO_MANY,
tableId: "tableA",
type: "link",
type: FieldType.LINK,
},
},
sql: true,
type: "table",
},
},
}
@ -95,12 +114,12 @@ describe("validation and update of external table schemas", () => {
function noOtherTableChanges(response: any) {
checkOtherColumns(
response.entities!.client!,
SCHEMA.entities.client as Table,
SCHEMA.entities!.client,
OTHER_CLIENT_COLS
)
checkOtherColumns(
response.entities!.project!,
SCHEMA.entities.project as Table,
SCHEMA.entities!.project,
OTHER_PROJECT_COLS
)
}

View File

@ -1,23 +1,30 @@
import { Table, RenameColumn } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils"
import { isExternalTableID } from "../../../integrations/utils"
import sdk from "../../index"
import { context } from "@budibase/backend-core"
import { isExternal } from "./utils"
import { DocumentInsertResponse } from "@budibase/nano"
import * as external from "./external"
import * as internal from "./internal"
import { cloneDeep } from "lodash"
export * as external from "./external"
export * as internal from "./internal"
export async function saveTable(table: Table) {
export async function saveTable(table: Table): Promise<Table> {
const db = context.getAppDB()
if (isExternalTable(table._id!)) {
let resp: DocumentInsertResponse
if (isExternalTableID(table._id!)) {
const datasource = await sdk.datasources.get(table.sourceId!)
datasource.entities![table.name] = table
await db.put(datasource)
resp = await db.put(datasource)
} else {
await db.put(table)
resp = await db.put(table)
}
let tableClone = cloneDeep(table)
tableClone._rev = resp.rev
return tableClone
}
export async function update(table: Table, renaming?: RenameColumn) {

View File

@ -1,10 +1,10 @@
import { Table } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils"
import { Table, TableSourceType } from "@budibase/types"
import { isExternalTableID } from "../../../integrations/utils"
export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
if (opts.table && opts.table.type === "external") {
if (opts.table && opts.table.sourceType === TableSourceType.EXTERNAL) {
return true
} else if (opts.tableId && isExternalTable(opts.tableId)) {
} else if (opts.tableId && isExternalTableID(opts.tableId)) {
return true
}
return false

View File

@ -4,13 +4,13 @@ import { cloneDeep } from "lodash"
import sdk from "../../../sdk"
import * as utils from "../../../db/utils"
import { isExternalTable } from "../../../integrations/utils"
import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./internal"
import * as external from "./external"
function pickApi(tableId: any) {
if (isExternalTable(tableId)) {
if (isExternalTableID(tableId)) {
return external
}
return internal

View File

@ -2,8 +2,10 @@ import _ from "lodash"
import {
FieldSchema,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
Table,
TableSchema,
TableSourceType,
ViewV2,
} from "@budibase/types"
import { generator } from "@budibase/backend-core/tests"
@ -14,6 +16,8 @@ describe("table sdk", () => {
_id: generator.guid(),
name: "TestTable",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
name: {
type: FieldType.STRING,

View File

@ -5,6 +5,7 @@ import { default as applications } from "./app/applications"
import { default as datasources } from "./app/datasources"
import { default as queries } from "./app/queries"
import { default as rows } from "./app/rows"
import { default as links } from "./app/links"
import { default as users } from "./users"
import { default as plugins } from "./plugins"
import * as views from "./app/views"
@ -22,6 +23,7 @@ const sdk = {
plugins,
views,
permissions,
links,
}
// default export for TS

View File

@ -39,12 +39,12 @@ describe("syncGlobalUsers", () => {
expect(metadata).toHaveLength(3)
expect(metadata).toContainEqual(
expect.objectContaining({
_id: db.generateUserMetadataID(user1._id),
_id: db.generateUserMetadataID(user1._id!),
})
)
expect(metadata).toContainEqual(
expect.objectContaining({
_id: db.generateUserMetadataID(user2._id),
_id: db.generateUserMetadataID(user2._id!),
})
)
})
@ -59,7 +59,7 @@ describe("syncGlobalUsers", () => {
expect(metadata).toHaveLength(1)
expect(metadata).not.toContainEqual(
expect.objectContaining({
_id: db.generateUserMetadataID(user._id),
_id: db.generateUserMetadataID(user._id!),
})
)
})
@ -70,7 +70,7 @@ describe("syncGlobalUsers", () => {
const group = await proSdk.groups.save(structures.userGroups.userGroup())
const user1 = await config.createUser({ admin: false, builder: false })
const user2 = await config.createUser({ admin: false, builder: false })
await proSdk.groups.addUsers(group.id, [user1._id, user2._id])
await proSdk.groups.addUsers(group.id, [user1._id!, user2._id!])
await config.doInContext(config.appId, async () => {
await syncGlobalUsers()
@ -87,12 +87,12 @@ describe("syncGlobalUsers", () => {
expect(metadata).toHaveLength(3)
expect(metadata).toContainEqual(
expect.objectContaining({
_id: db.generateUserMetadataID(user1._id),
_id: db.generateUserMetadataID(user1._id!),
})
)
expect(metadata).toContainEqual(
expect.objectContaining({
_id: db.generateUserMetadataID(user2._id),
_id: db.generateUserMetadataID(user2._id!),
})
)
})
@ -109,7 +109,7 @@ describe("syncGlobalUsers", () => {
{ appId: config.prodAppId!, roleId: roles.BUILTIN_ROLE_IDS.BASIC },
],
})
await proSdk.groups.addUsers(group.id, [user1._id, user2._id])
await proSdk.groups.addUsers(group.id, [user1._id!, user2._id!])
await config.doInContext(config.appId, async () => {
await syncGlobalUsers()

View File

@ -2,37 +2,31 @@ import { generator, mocks, structures } from "@budibase/backend-core/tests"
// init the licensing mock
import * as pro from "@budibase/pro"
mocks.licenses.init(pro)
// use unlimited license by default
mocks.licenses.useUnlimited()
import { init as dbInit } from "../../db"
dbInit()
import env from "../../environment"
import {
basicTable,
basicRow,
basicRole,
basicAutomation,
basicDatasource,
basicQuery,
basicScreen,
basicLayout,
basicWebhook,
basicAutomationResults,
basicDatasource,
basicLayout,
basicQuery,
basicRole,
basicRow,
basicScreen,
basicTable,
basicWebhook,
} from "./structures"
import {
constants,
tenancy,
sessions,
auth,
cache,
constants,
context,
db as dbCore,
encryption,
auth,
roles,
env as coreEnv,
roles,
sessions,
tenancy,
} from "@budibase/backend-core"
import * as controllers from "./controllers"
import { cleanup } from "../../utilities/fileSystem"
@ -43,23 +37,33 @@ import supertest from "supertest"
import {
App,
AuthToken,
Automation,
CreateViewRequest,
Datasource,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
RelationshipFieldMetadata,
RelationshipType,
Row,
SearchFilters,
SourceName,
Table,
SearchFilters,
TableSourceType,
User,
UserRoles,
Automation,
View,
FieldType,
RelationshipType,
CreateViewRequest,
RelationshipFieldMetadata,
} from "@budibase/types"
import API from "./api"
import { cloneDeep } from "lodash"
mocks.licenses.init(pro)
// use unlimited license by default
mocks.licenses.useUnlimited()
dbInit()
type DefaultUserValues = {
globalUserId: string
email: string
@ -68,6 +72,11 @@ type DefaultUserValues = {
csrfToken: string
}
interface TableToBuild extends Omit<Table, "sourceId" | "sourceType"> {
sourceId?: string
sourceType?: TableSourceType
}
class TestConfiguration {
server: any
request: supertest.SuperTest<supertest.Test> | undefined
@ -263,7 +272,7 @@ class TestConfiguration {
} catch (err) {
existing = { email }
}
const user = {
const user: User = {
_id: id,
...existing,
roles: roles || {},
@ -303,7 +312,7 @@ class TestConfiguration {
admin?: boolean
roles?: UserRoles
} = {}
) {
): Promise<User> {
let { id, firstName, lastName, email, builder, admin, roles } = user
firstName = firstName || this.defaultUserValues.firstName
lastName = lastName || this.defaultUserValues.lastName
@ -323,10 +332,7 @@ class TestConfiguration {
roles,
})
await cache.user.invalidateUser(globalId)
return {
...resp,
globalId,
}
return resp
}
async createGroup(roleId: string = roles.BUILTIN_ROLE_IDS.BASIC) {
@ -549,10 +555,12 @@ class TestConfiguration {
// TABLE
async updateTable(
config?: Table,
config?: TableToBuild,
{ skipReassigning } = { skipReassigning: false }
): Promise<Table> {
config = config || basicTable()
config.sourceType = config.sourceType || TableSourceType.INTERNAL
config.sourceId = config.sourceId || INTERNAL_TABLE_SOURCE_ID
const response = await this._req(config, null, controllers.table.save)
if (!skipReassigning) {
this.table = response
@ -560,18 +568,32 @@ class TestConfiguration {
return response
}
async createTable(config?: Table, options = { skipReassigning: false }) {
async createTable(
config?: TableToBuild,
options = { skipReassigning: false }
) {
if (config != null && config._id) {
delete config._id
}
config = config || basicTable()
if (this.datasource && !config.sourceId) {
config.sourceId = this.datasource._id
if (this.datasource.plus) {
config.type = "external"
}
if (!config.sourceId) {
config.sourceId = INTERNAL_TABLE_SOURCE_ID
}
return this.updateTable(config, options)
}
async createExternalTable(
config?: TableToBuild,
options = { skipReassigning: false }
) {
if (config != null && config._id) {
delete config._id
}
config = config || basicTable()
if (this.datasource?._id) {
config.sourceId = this.datasource._id
config.sourceType = TableSourceType.EXTERNAL
}
return this.updateTable(config, options)
}
@ -583,12 +605,15 @@ class TestConfiguration {
async createLinkedTable(
relationshipType = RelationshipType.ONE_TO_MANY,
links: any = ["link"],
config?: Table
config?: TableToBuild
) {
if (!this.table) {
throw "Must have created a table first."
}
const tableConfig = config || basicTable()
if (!tableConfig.sourceId) {
tableConfig.sourceId = INTERNAL_TABLE_SOURCE_ID
}
tableConfig.primaryDisplay = "name"
for (let link of links) {
tableConfig.schema[link] = {
@ -600,15 +625,12 @@ class TestConfiguration {
} as RelationshipFieldMetadata
}
if (this.datasource && !tableConfig.sourceId) {
if (this.datasource?._id) {
tableConfig.sourceId = this.datasource._id
if (this.datasource.plus) {
tableConfig.type = "external"
}
tableConfig.sourceType = TableSourceType.EXTERNAL
}
const linkedTable = await this.createTable(tableConfig)
return linkedTable
return await this.createTable(tableConfig)
}
async createAttachmentTable() {
@ -783,8 +805,9 @@ class TestConfiguration {
// AUTOMATION LOG
async createAutomationLog(automation: Automation) {
return await context.doInAppContext(this.getProdAppId(), async () => {
async createAutomationLog(automation: Automation, appId?: string) {
appId = appId || this.getProdAppId()
return await context.doInAppContext(appId!, async () => {
return await pro.sdk.automations.logs.storeLog(
automation,
basicAutomationResults(automation._id!)

View File

@ -0,0 +1,45 @@
import {
CreateAppBackupResponse,
ImportAppBackupResponse,
} from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
export class BackupAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
exportBasicBackup = async (appId: string) => {
const result = await this.request
.post(`/api/backups/export?appId=${appId}`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /application\/gzip/)
.expect(200)
return {
body: result.body as Buffer,
headers: result.headers,
}
}
createBackup = async (appId: string) => {
const result = await this.request
.post(`/api/apps/${appId}/backups`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return result.body as CreateAppBackupResponse
}
importBackup = async (
appId: string,
backupId: string
): Promise<ImportAppBackupResponse> => {
const result = await this.request
.post(`/api/apps/${appId}/backups/${backupId}/import`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return result.body as ImportAppBackupResponse
}
}

View File

@ -7,6 +7,7 @@ import { DatasourceAPI } from "./datasource"
import { LegacyViewAPI } from "./legacyView"
import { ScreenAPI } from "./screen"
import { ApplicationAPI } from "./application"
import { BackupAPI } from "./backup"
import { AttachmentAPI } from "./attachment"
export default class API {
@ -18,6 +19,7 @@ export default class API {
datasource: DatasourceAPI
screen: ScreenAPI
application: ApplicationAPI
backup: BackupAPI
attachment: AttachmentAPI
constructor(config: TestConfiguration) {
@ -29,6 +31,7 @@ export default class API {
this.datasource = new DatasourceAPI(config)
this.screen = new ScreenAPI(config)
this.application = new ApplicationAPI(config)
this.backup = new BackupAPI(config)
this.attachment = new AttachmentAPI(config)
}
}

View File

@ -1,4 +1,10 @@
import { SaveTableRequest, SaveTableResponse, Table } from "@budibase/types"
import {
MigrateRequest,
MigrateResponse,
SaveTableRequest,
SaveTableResponse,
Table,
} from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
@ -42,4 +48,23 @@ export class TableAPI extends TestAPI {
.expect(expectStatus)
return res.body
}
migrate = async (
tableId: string,
data: MigrateRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<MigrateResponse> => {
const res = await this.request
.post(`/api/tables/${tableId}/migrate`)
.send(data)
.set(this.config.defaultHeaders())
if (res.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
res.status
} with body ${JSON.stringify(res.body)}`
)
}
return res.body
}
}

View File

@ -19,6 +19,8 @@ import {
FieldType,
SourceName,
Table,
INTERNAL_TABLE_SOURCE_ID,
TableSourceType,
} from "@budibase/types"
const { BUILTIN_ROLE_IDS } = roles
@ -26,6 +28,8 @@ export function basicTable(): Table {
return {
name: "TestTable",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
name: {
type: FieldType.STRING,

View File

@ -17,7 +17,7 @@ import {
processInputBBReferences,
processOutputBBReferences,
} from "./bbReferenceProcessor"
import { isExternalTable } from "../../integrations/utils"
import { isExternalTableID } from "../../integrations/utils"
export * from "./utils"
type AutoColumnProcessingOpts = {
@ -51,7 +51,7 @@ function getRemovedAttachmentKeys(
/**
* This will update any auto columns that are found on the row/table with the correct information based on
* time now and the current logged in user making the request.
* @param user The user to be used for an appId as well as the createdBy and createdAt fields.
* @param userId The user to be used for an appId as well as the createdBy and createdAt fields.
* @param table The table which is to be used for the schema, as well as handling auto IDs incrementing.
* @param row The row which is to be updated with information for the auto columns.
* @param opts specific options for function to carry out optional features.
@ -267,7 +267,7 @@ export async function outputProcessing<T extends Row[] | Row>(
)) as Row[]
}
// remove null properties to match internal API
if (isExternalTable(table._id!)) {
if (isExternalTableID(table._id!)) {
for (let row of enriched) {
for (let key of Object.keys(row)) {
if (row[key] === null) {

Some files were not shown because too many files have changed in this diff Show More