Merge master.
This commit is contained in:
commit
a563cf3594
|
@ -18,8 +18,7 @@ env:
|
|||
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
|
||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
NX_BASE_BRANCH: origin/${{ github.base_ref }}
|
||||
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
|
||||
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
|
||||
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
|
@ -231,7 +230,7 @@ jobs:
|
|||
cache: "yarn"
|
||||
- run: yarn --frozen-lockfile
|
||||
- name: Build packages
|
||||
run: yarn build --scope @budibase/server --scope @budibase/worker --scope @budibase/client --scope @budibase/backend-core
|
||||
run: yarn build --scope @budibase/server --scope @budibase/worker
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd qa-core
|
||||
|
|
|
@ -4,6 +4,8 @@ on:
|
|||
types: [created]
|
||||
pull_request_target:
|
||||
types: [opened,closed,synchronize]
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
CLAssistant:
|
||||
|
@ -33,4 +35,4 @@ jobs:
|
|||
#custom-pr-sign-comment: 'The signature to be committed in order to sign the CLA'
|
||||
#custom-allsigned-prcomment: 'pull request comment when all contributors has signed, defaults to **CLA Assistant Lite bot** All Contributors have signed the CLA.'
|
||||
#lock-pullrequest-aftermerge: false - if you don't want this bot to automatically lock the pull request after merging (default - true)
|
||||
#use-dco-flag: true - If you are using DCO instead of CLA
|
||||
#use-dco-flag: true - If you are using DCO instead of CLA
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
name: Deploy QA
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
trigger-deploy-to-qa-env:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: peter-evans/repository-dispatch@v2
|
||||
env:
|
||||
PAYLOAD_VERSION: ${{ github.sha }}
|
||||
REF_NAME: ${{ github.ref_name}}
|
||||
with:
|
||||
repository: budibase/budibase-deploys
|
||||
event-type: budicloud-qa-deploy
|
||||
token: ${{ secrets.GH_ACCESS_TOKEN }}
|
|
@ -123,6 +123,7 @@ jobs:
|
|||
- uses: passeidireto/trigger-external-workflow-action@main
|
||||
env:
|
||||
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
REF_NAME: ${{ github.ref_name}}
|
||||
with:
|
||||
repository: budibase/budibase-deploys
|
||||
event: budicloud-qa-deploy
|
||||
|
|
|
@ -5,7 +5,7 @@ ENV COUCHDB_PASSWORD admin
|
|||
EXPOSE 5984
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
||||
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | sudo apt-key add - && \
|
||||
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
|
||||
apt-add-repository 'deb http://security.debian.org/debian-security bullseye-security/updates main' && \
|
||||
apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \
|
||||
apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bullseye main' && \
|
||||
|
|
|
@ -4,7 +4,9 @@ version: "3"
|
|||
|
||||
services:
|
||||
app-service:
|
||||
build: ../packages/server
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: packages/server/Dockerfile.v2
|
||||
container_name: build-bbapps
|
||||
environment:
|
||||
SELF_HOSTED: 1
|
||||
|
@ -32,7 +34,9 @@ services:
|
|||
# - /some/path/to/plugins:/plugins
|
||||
|
||||
worker-service:
|
||||
build: ../packages/worker
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: packages/worker/Dockerfile.v2
|
||||
container_name: build-bbworker
|
||||
environment:
|
||||
SELF_HOSTED: 1
|
||||
|
|
|
@ -19,13 +19,15 @@ COPY packages/string-templates/package.json packages/string-templates/package.js
|
|||
|
||||
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
|
||||
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
|
||||
RUN ./scripts/removeWorkspaceDependencies.sh
|
||||
RUN ./scripts/removeWorkspaceDependencies.sh packages/server/package.json
|
||||
RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json
|
||||
|
||||
|
||||
# We will never want to sync pro, but the script is still required
|
||||
RUN echo '' > scripts/syncProPackage.js
|
||||
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
|
||||
RUN ./scripts/removeWorkspaceDependencies.sh package.json
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
|
||||
|
||||
# copy the actual code
|
||||
COPY packages/server/dist packages/server/dist
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.11.39",
|
||||
"version": "2.11.41",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
8
nx.json
8
nx.json
|
@ -3,14 +3,16 @@
|
|||
"default": {
|
||||
"runner": "nx-cloud",
|
||||
"options": {
|
||||
"cacheableOperations": ["build", "test", "check:types"],
|
||||
"accessToken": "MmM4OGYxNzItMDBlYy00ZmE3LTk4MTYtNmJhYWMyZjBjZTUyfHJlYWQ="
|
||||
"cacheableOperations": ["build", "test", "check:types"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targetDefaults": {
|
||||
"build": {
|
||||
"inputs": ["{workspaceRoot}/scripts/build.js"]
|
||||
"inputs": [
|
||||
"{workspaceRoot}/scripts/build.js",
|
||||
"{workspaceRoot}/lerna.json"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@
|
|||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
||||
"dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server",
|
||||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||
"dev:docker": "yarn build && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
||||
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
||||
"test": "lerna run --stream test --stream",
|
||||
"lint:eslint": "eslint packages qa-core --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
|
|
|
@ -35,7 +35,7 @@
|
|||
"dotenv": "16.0.1",
|
||||
"ioredis": "5.3.2",
|
||||
"joi": "17.6.0",
|
||||
"jsonwebtoken": "9.0.0",
|
||||
"jsonwebtoken": "9.0.2",
|
||||
"koa-passport": "4.1.4",
|
||||
"koa-pino-logger": "4.0.0",
|
||||
"lodash": "4.17.21",
|
||||
|
@ -63,7 +63,7 @@
|
|||
"@types/chance": "1.1.3",
|
||||
"@types/cookies": "0.7.8",
|
||||
"@types/jest": "29.5.5",
|
||||
"@types/lodash": "4.14.180",
|
||||
"@types/lodash": "4.14.200",
|
||||
"@types/node": "18.17.0",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
|
|
|
@ -164,14 +164,14 @@ export class UserDB {
|
|||
}
|
||||
}
|
||||
|
||||
static async getUsersByAppAccess(appId?: string) {
|
||||
const opts: any = {
|
||||
static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {
|
||||
const params: any = {
|
||||
include_docs: true,
|
||||
limit: 50,
|
||||
limit: opts.limit || 50,
|
||||
}
|
||||
let response: User[] = await usersCore.searchGlobalUsersByAppAccess(
|
||||
appId,
|
||||
opts
|
||||
opts.appId,
|
||||
params
|
||||
)
|
||||
return response
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ import {
|
|||
SearchUsersRequest,
|
||||
User,
|
||||
ContextUser,
|
||||
DatabaseQueryOpts,
|
||||
} from "@budibase/types"
|
||||
import { getGlobalDB } from "../context"
|
||||
import * as context from "../context"
|
||||
|
@ -241,12 +242,14 @@ export const paginatedUsers = async ({
|
|||
bookmark,
|
||||
query,
|
||||
appId,
|
||||
limit,
|
||||
}: SearchUsersRequest = {}) => {
|
||||
const db = getGlobalDB()
|
||||
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
|
||||
// get one extra document, to have the next page
|
||||
const opts: any = {
|
||||
const opts: DatabaseQueryOpts = {
|
||||
include_docs: true,
|
||||
limit: PAGE_LIMIT + 1,
|
||||
limit: pageLimit,
|
||||
}
|
||||
// add a startkey if the page was specified (anchor)
|
||||
if (bookmark) {
|
||||
|
@ -269,7 +272,7 @@ export const paginatedUsers = async ({
|
|||
const response = await db.allDocs(getGlobalUserParams(null, opts))
|
||||
userList = response.rows.map((row: any) => row.doc)
|
||||
}
|
||||
return pagination(userList, PAGE_LIMIT, {
|
||||
return pagination(userList, pageLimit, {
|
||||
paginate: true,
|
||||
property,
|
||||
getKey,
|
||||
|
|
|
@ -3,13 +3,10 @@
|
|||
import { goto, params } from "@roxi/routify"
|
||||
import { Table, Heading, Layout } from "@budibase/bbui"
|
||||
import Spinner from "components/common/Spinner.svelte"
|
||||
import {
|
||||
TableNames,
|
||||
UNEDITABLE_USER_FIELDS,
|
||||
UNSORTABLE_TYPES,
|
||||
} from "constants"
|
||||
import { TableNames, UNEDITABLE_USER_FIELDS } from "constants"
|
||||
import RoleCell from "./cells/RoleCell.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { canBeSortColumn } from "@budibase/shared-core"
|
||||
|
||||
export let schema = {}
|
||||
export let data = []
|
||||
|
@ -32,12 +29,10 @@
|
|||
$: isUsersTable = tableId === TableNames.USERS
|
||||
$: data && resetSelectedRows()
|
||||
$: {
|
||||
UNSORTABLE_TYPES.forEach(type => {
|
||||
Object.values(schema || {}).forEach(col => {
|
||||
if (col.type === type) {
|
||||
col.sortable = false
|
||||
}
|
||||
})
|
||||
Object.values(schema || {}).forEach(col => {
|
||||
if (!canBeSortColumn(col.type)) {
|
||||
col.sortable = false
|
||||
}
|
||||
})
|
||||
}
|
||||
$: {
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
<script>
|
||||
import { getContextProviderComponents } from "builderStore/dataBinding"
|
||||
import {
|
||||
getContextProviderComponents,
|
||||
readableToRuntimeBinding,
|
||||
runtimeToReadableBinding,
|
||||
} from "builderStore/dataBinding"
|
||||
import {
|
||||
Button,
|
||||
Popover,
|
||||
|
@ -9,6 +13,11 @@
|
|||
Heading,
|
||||
Drawer,
|
||||
DrawerContent,
|
||||
Icon,
|
||||
Modal,
|
||||
ModalContent,
|
||||
CoreDropzone,
|
||||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { store, currentAsset } from "builderStore"
|
||||
|
@ -22,6 +31,8 @@
|
|||
import BindingBuilder from "components/integration/QueryBindingBuilder.svelte"
|
||||
import IntegrationQueryEditor from "components/integration/index.svelte"
|
||||
import { makePropSafe as safe } from "@budibase/string-templates"
|
||||
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
|
||||
import { API } from "api"
|
||||
|
||||
export let value = {}
|
||||
export let otherSources
|
||||
|
@ -31,9 +42,13 @@
|
|||
|
||||
const dispatch = createEventDispatcher()
|
||||
const arrayTypes = ["attachment", "array"]
|
||||
|
||||
let anchorRight, dropdownRight
|
||||
let drawer
|
||||
let tmpQueryParams
|
||||
let tmpCustomData
|
||||
let customDataValid = true
|
||||
let modal
|
||||
|
||||
$: text = value?.label ?? "Choose an option"
|
||||
$: tables = $tablesStore.list.map(m => ({
|
||||
|
@ -125,6 +140,10 @@
|
|||
value: `{{ literal ${runtimeBinding} }}`,
|
||||
}
|
||||
})
|
||||
$: custom = {
|
||||
type: "custom",
|
||||
label: "JSON / CSV",
|
||||
}
|
||||
|
||||
const handleSelected = selected => {
|
||||
dispatch("change", selected)
|
||||
|
@ -151,6 +170,11 @@
|
|||
drawer.show()
|
||||
}
|
||||
|
||||
const openCustomDrawer = () => {
|
||||
tmpCustomData = runtimeToReadableBinding(bindings, value.data || "")
|
||||
drawer.show()
|
||||
}
|
||||
|
||||
const getQueryValue = queries => {
|
||||
return queries.find(q => q._id === value._id) || value
|
||||
}
|
||||
|
@ -162,6 +186,35 @@
|
|||
})
|
||||
drawer.hide()
|
||||
}
|
||||
|
||||
const saveCustomData = () => {
|
||||
handleSelected({
|
||||
...value,
|
||||
data: readableToRuntimeBinding(bindings, tmpCustomData),
|
||||
})
|
||||
drawer.hide()
|
||||
}
|
||||
|
||||
const promptForCSV = () => {
|
||||
drawer.hide()
|
||||
modal.show()
|
||||
}
|
||||
|
||||
const handleCSV = async e => {
|
||||
try {
|
||||
const csv = await e.detail[0]?.text()
|
||||
if (csv?.length) {
|
||||
const js = await API.csvToJson(csv)
|
||||
tmpCustomData = JSON.stringify(js)
|
||||
}
|
||||
modal.hide()
|
||||
saveCustomData()
|
||||
} catch (error) {
|
||||
notifications.error("Failed to parse CSV")
|
||||
modal.hide()
|
||||
drawer.show()
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="container" bind:this={anchorRight}>
|
||||
|
@ -172,7 +225,9 @@
|
|||
on:click={dropdownRight.show}
|
||||
/>
|
||||
{#if value?.type === "query"}
|
||||
<i class="ri-settings-5-line" on:click={openQueryParamsDrawer} />
|
||||
<div class="icon">
|
||||
<Icon hoverable name="Settings" on:click={openQueryParamsDrawer} />
|
||||
</div>
|
||||
<Drawer title={"Query Bindings"} bind:this={drawer}>
|
||||
<Button slot="buttons" cta on:click={saveQueryParams}>Save</Button>
|
||||
<DrawerContent slot="body">
|
||||
|
@ -198,6 +253,29 @@
|
|||
</DrawerContent>
|
||||
</Drawer>
|
||||
{/if}
|
||||
{#if value?.type === "custom"}
|
||||
<div class="icon">
|
||||
<Icon hoverable name="Settings" on:click={openCustomDrawer} />
|
||||
</div>
|
||||
<Drawer title="Custom data" bind:this={drawer}>
|
||||
<div slot="buttons" style="display:contents">
|
||||
<Button primary on:click={promptForCSV}>Load CSV</Button>
|
||||
<Button cta on:click={saveCustomData} disabled={!customDataValid}>
|
||||
Save
|
||||
</Button>
|
||||
</div>
|
||||
<div slot="description">Provide a JSON array to use as data</div>
|
||||
<ClientBindingPanel
|
||||
slot="body"
|
||||
bind:valid={customDataValid}
|
||||
value={tmpCustomData}
|
||||
on:change={event => (tmpCustomData = event.detail)}
|
||||
{bindings}
|
||||
allowJS
|
||||
allowHelpers
|
||||
/>
|
||||
</Drawer>
|
||||
{/if}
|
||||
</div>
|
||||
<Popover bind:this={dropdownRight} anchor={anchorRight}>
|
||||
<div class="dropdown">
|
||||
|
@ -285,20 +363,27 @@
|
|||
{/each}
|
||||
</ul>
|
||||
{/if}
|
||||
{#if otherSources?.length}
|
||||
<Divider />
|
||||
<div class="title">
|
||||
<Heading size="XS">Other</Heading>
|
||||
</div>
|
||||
<ul>
|
||||
<Divider />
|
||||
<div class="title">
|
||||
<Heading size="XS">Other</Heading>
|
||||
</div>
|
||||
<ul>
|
||||
<li on:click={() => handleSelected(custom)}>{custom.label}</li>
|
||||
{#if otherSources?.length}
|
||||
{#each otherSources as source}
|
||||
<li on:click={() => handleSelected(source)}>{source.label}</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{/if}
|
||||
{/if}
|
||||
</ul>
|
||||
</div>
|
||||
</Popover>
|
||||
|
||||
<Modal bind:this={modal}>
|
||||
<ModalContent title="Load CSV" showConfirmButton={false}>
|
||||
<CoreDropzone compact extensions=".csv" on:change={handleCSV} />
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
|
||||
<style>
|
||||
.container {
|
||||
display: flex;
|
||||
|
@ -340,16 +425,7 @@
|
|||
background-color: var(--spectrum-global-color-gray-200);
|
||||
}
|
||||
|
||||
i {
|
||||
margin-left: 5px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
transition: all 0.2s;
|
||||
}
|
||||
|
||||
i:hover {
|
||||
transform: scale(1.1);
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
.icon {
|
||||
margin-left: 8px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
} from "builderStore/dataBinding"
|
||||
import { currentAsset } from "builderStore"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { UNSORTABLE_TYPES } from "constants"
|
||||
import { canBeSortColumn } from "@budibase/shared-core"
|
||||
|
||||
export let componentInstance = {}
|
||||
export let value = ""
|
||||
|
@ -20,7 +20,7 @@
|
|||
|
||||
const getSortableFields = schema => {
|
||||
return Object.entries(schema || {})
|
||||
.filter(entry => !UNSORTABLE_TYPES.includes(entry[1].type))
|
||||
.filter(entry => canBeSortColumn(entry[1].type))
|
||||
.map(entry => entry[0])
|
||||
}
|
||||
|
||||
|
|
|
@ -34,8 +34,6 @@ export const UNEDITABLE_USER_FIELDS = [
|
|||
"lastName",
|
||||
]
|
||||
|
||||
export const UNSORTABLE_TYPES = ["formula", "attachment", "array", "link"]
|
||||
|
||||
export const LAYOUT_NAMES = {
|
||||
MASTER: {
|
||||
PRIVATE: "layout_private_master",
|
||||
|
|
|
@ -114,8 +114,9 @@
|
|||
query: {
|
||||
appId: query || !filterByAppAccess ? null : prodAppId,
|
||||
email: query,
|
||||
paginated: query || !filterByAppAccess ? null : false,
|
||||
},
|
||||
limit: 50,
|
||||
paginate: query || !filterByAppAccess ? null : false,
|
||||
})
|
||||
await usersFetch.refresh()
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { isEmpty } from "lodash/fp"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
import { Input, DetailSummary, notifications } from "@budibase/bbui"
|
||||
import { store } from "builderStore"
|
||||
import PropertyControl from "components/design/settings/controls/PropertyControl.svelte"
|
||||
|
@ -70,41 +70,43 @@
|
|||
}
|
||||
|
||||
const shouldDisplay = (instance, setting) => {
|
||||
// Parse dependant settings
|
||||
if (setting.dependsOn) {
|
||||
let dependantSetting = setting.dependsOn
|
||||
let dependantValue = null
|
||||
let invert = !!setting.dependsOn.invert
|
||||
if (typeof setting.dependsOn === "object") {
|
||||
dependantSetting = setting.dependsOn.setting
|
||||
dependantValue = setting.dependsOn.value
|
||||
let dependsOn = setting.dependsOn
|
||||
if (dependsOn && !Array.isArray(dependsOn)) {
|
||||
dependsOn = [dependsOn]
|
||||
}
|
||||
if (!dependsOn?.length) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Ensure all conditions are met
|
||||
return dependsOn.every(condition => {
|
||||
let dependantSetting = condition
|
||||
let dependantValues = null
|
||||
let invert = !!condition.invert
|
||||
if (typeof condition === "object") {
|
||||
dependantSetting = condition.setting
|
||||
dependantValues = condition.value
|
||||
}
|
||||
if (!dependantSetting) {
|
||||
return false
|
||||
}
|
||||
|
||||
// If no specific value is depended upon, check if a value exists at all
|
||||
// for the dependent setting
|
||||
if (dependantValue == null) {
|
||||
const currentValue = instance[dependantSetting]
|
||||
if (currentValue === false) {
|
||||
return false
|
||||
}
|
||||
if (currentValue === true) {
|
||||
return true
|
||||
}
|
||||
return !isEmpty(currentValue)
|
||||
// Ensure values is an array
|
||||
if (!Array.isArray(dependantValues)) {
|
||||
dependantValues = [dependantValues]
|
||||
}
|
||||
|
||||
// Otherwise check the value matches
|
||||
if (invert) {
|
||||
return instance[dependantSetting] !== dependantValue
|
||||
} else {
|
||||
return instance[dependantSetting] === dependantValue
|
||||
}
|
||||
}
|
||||
|
||||
return typeof setting.visible == "boolean" ? setting.visible : true
|
||||
// If inverting, we want to ensure that we don't have any matches.
|
||||
// If not inverting, we want to ensure that we do have any matches.
|
||||
const currentVal = helpers.deepGet(instance, dependantSetting)
|
||||
const anyMatches = dependantValues.some(dependantVal => {
|
||||
if (dependantVal == null) {
|
||||
return currentVal != null && currentVal !== false && currentVal !== ""
|
||||
}
|
||||
return dependantVal === currentVal
|
||||
})
|
||||
return anyMatches !== invert
|
||||
})
|
||||
}
|
||||
|
||||
const canRenderControl = (instance, setting, isScreen) => {
|
||||
|
|
|
@ -81,9 +81,9 @@ export function createDatasourcesStore() {
|
|||
}))
|
||||
}
|
||||
|
||||
const updateDatasource = response => {
|
||||
const updateDatasource = (response, { ignoreErrors } = {}) => {
|
||||
const { datasource, errors } = response
|
||||
if (errors && Object.keys(errors).length > 0) {
|
||||
if (!ignoreErrors && errors && Object.keys(errors).length > 0) {
|
||||
throw new TableImportError(errors)
|
||||
}
|
||||
replaceDatasource(datasource._id, datasource)
|
||||
|
@ -137,7 +137,7 @@ export function createDatasourcesStore() {
|
|||
fetchSchema: integration.plus,
|
||||
})
|
||||
|
||||
return updateDatasource(response)
|
||||
return updateDatasource(response, { ignoreErrors: true })
|
||||
}
|
||||
|
||||
const update = async ({ integration, datasource }) => {
|
||||
|
|
|
@ -5556,10 +5556,9 @@
|
|||
"width": 600,
|
||||
"height": 400
|
||||
},
|
||||
"info": "Grid Blocks are only compatible with internal or SQL tables",
|
||||
"settings": [
|
||||
{
|
||||
"type": "table",
|
||||
"type": "dataSource",
|
||||
"label": "Data",
|
||||
"key": "table",
|
||||
"required": true
|
||||
|
@ -5568,18 +5567,35 @@
|
|||
"type": "columns/grid",
|
||||
"label": "Columns",
|
||||
"key": "columns",
|
||||
"dependsOn": "table"
|
||||
"dependsOn": [
|
||||
"table",
|
||||
{
|
||||
"setting": "table.type",
|
||||
"value": "custom",
|
||||
"invert": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "filter",
|
||||
"label": "Filtering",
|
||||
"key": "initialFilter"
|
||||
"key": "initialFilter",
|
||||
"dependsOn": {
|
||||
"setting": "table.type",
|
||||
"value": "custom",
|
||||
"invert": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "field/sortable",
|
||||
"label": "Sort column",
|
||||
"key": "initialSortColumn",
|
||||
"placeholder": "Default"
|
||||
"placeholder": "Default",
|
||||
"dependsOn": {
|
||||
"setting": "table.type",
|
||||
"value": "custom",
|
||||
"invert": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
|
@ -5618,29 +5634,37 @@
|
|||
"label": "Clicked row",
|
||||
"key": "row"
|
||||
}
|
||||
],
|
||||
"dependsOn": {
|
||||
"setting": "allowEditRows",
|
||||
"value": false
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"label": "Add rows",
|
||||
"key": "allowAddRows",
|
||||
"defaultValue": true
|
||||
"defaultValue": true,
|
||||
"dependsOn": {
|
||||
"setting": "table.type",
|
||||
"value": ["table", "viewV2"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"label": "Edit rows",
|
||||
"key": "allowEditRows",
|
||||
"defaultValue": true
|
||||
"defaultValue": true,
|
||||
"dependsOn": {
|
||||
"setting": "table.type",
|
||||
"value": ["table", "viewV2"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"label": "Delete rows",
|
||||
"key": "allowDeleteRows",
|
||||
"defaultValue": true
|
||||
"defaultValue": true,
|
||||
"dependsOn": {
|
||||
"setting": "table.type",
|
||||
"value": ["table", "viewV2"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
import { getContext } from "svelte"
|
||||
import { Grid } from "@budibase/frontend-core"
|
||||
|
||||
// table is actually any datasource, but called table for legacy compatibility
|
||||
export let table
|
||||
export let allowAddRows = true
|
||||
export let allowEditRows = true
|
||||
|
@ -21,7 +22,6 @@
|
|||
|
||||
$: columnWhitelist = columns?.map(col => col.name)
|
||||
$: schemaOverrides = getSchemaOverrides(columns)
|
||||
$: handleRowClick = allowEditRows ? undefined : onRowClick
|
||||
|
||||
const getSchemaOverrides = columns => {
|
||||
let overrides = {}
|
||||
|
@ -58,7 +58,7 @@
|
|||
showControls={false}
|
||||
notifySuccess={notificationStore.actions.success}
|
||||
notifyError={notificationStore.actions.error}
|
||||
on:rowclick={e => handleRowClick?.({ row: e.detail })}
|
||||
on:rowclick={e => onRowClick?.({ row: e.detail })}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
import { getContext } from "svelte"
|
||||
import { Table } from "@budibase/bbui"
|
||||
import SlotRenderer from "./SlotRenderer.svelte"
|
||||
import { UnsortableTypes } from "../../../constants"
|
||||
import { onDestroy } from "svelte"
|
||||
import { canBeSortColumn } from "@budibase/shared-core"
|
||||
|
||||
export let dataProvider
|
||||
export let columns
|
||||
|
@ -102,7 +102,7 @@
|
|||
return
|
||||
}
|
||||
newSchema[columnName] = schema[columnName]
|
||||
if (UnsortableTypes.includes(schema[columnName].type)) {
|
||||
if (!canBeSortColumn(schema[columnName].type)) {
|
||||
newSchema[columnName].sortable = false
|
||||
}
|
||||
|
||||
|
|
|
@ -1,13 +1,5 @@
|
|||
import { FieldType as FieldTypes } from "@budibase/types"
|
||||
export { FieldType as FieldTypes } from "@budibase/types"
|
||||
|
||||
export const UnsortableTypes = [
|
||||
FieldTypes.FORMULA,
|
||||
FieldTypes.ATTACHMENT,
|
||||
FieldTypes.ARRAY,
|
||||
FieldTypes.LINK,
|
||||
]
|
||||
|
||||
export const ActionTypes = {
|
||||
ValidateForm: "ValidateForm",
|
||||
UpdateFieldValue: "UpdateFieldValue",
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
column.schema.autocolumn ||
|
||||
column.schema.disabled ||
|
||||
column.schema.type === "formula" ||
|
||||
(!$config.canEditRows && row._id)
|
||||
(!$config.canEditRows && !row._isNewRow)
|
||||
|
||||
// Register this cell API if the row is focused
|
||||
$: {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<script>
|
||||
import { getContext, onMount, tick } from "svelte"
|
||||
import { canBeDisplayColumn } from "@budibase/shared-core"
|
||||
import { canBeDisplayColumn, canBeSortColumn } from "@budibase/shared-core"
|
||||
import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui"
|
||||
import GridCell from "./GridCell.svelte"
|
||||
import { getColumnIcon } from "../lib/utils"
|
||||
|
@ -23,6 +23,7 @@
|
|||
columns,
|
||||
definition,
|
||||
datasource,
|
||||
schema,
|
||||
} = getContext("grid")
|
||||
|
||||
let anchor
|
||||
|
@ -119,16 +120,16 @@
|
|||
// Generate new name
|
||||
let newName = `${column.name} copy`
|
||||
let attempts = 2
|
||||
while ($definition.schema[newName]) {
|
||||
while ($schema[newName]) {
|
||||
newName = `${column.name} copy ${attempts++}`
|
||||
}
|
||||
|
||||
// Save schema with new column
|
||||
const existingColumnDefinition = $definition.schema[column.name]
|
||||
const existingColumnDefinition = $schema[column.name]
|
||||
await datasource.actions.saveDefinition({
|
||||
...$definition,
|
||||
schema: {
|
||||
...$definition.schema,
|
||||
...$schema,
|
||||
[newName]: {
|
||||
...existingColumnDefinition,
|
||||
name: newName,
|
||||
|
@ -231,14 +232,16 @@
|
|||
<MenuItem
|
||||
icon="SortOrderUp"
|
||||
on:click={sortAscending}
|
||||
disabled={column.name === $sort.column && $sort.order === "ascending"}
|
||||
disabled={!canBeSortColumn(column.schema.type) ||
|
||||
(column.name === $sort.column && $sort.order === "ascending")}
|
||||
>
|
||||
Sort {ascendingLabel}
|
||||
</MenuItem>
|
||||
<MenuItem
|
||||
icon="SortOrderDown"
|
||||
on:click={sortDescending}
|
||||
disabled={column.name === $sort.column && $sort.order === "descending"}
|
||||
disabled={!canBeSortColumn(column.schema.type) ||
|
||||
(column.name === $sort.column && $sort.order === "descending")}
|
||||
>
|
||||
Sort {descendingLabel}
|
||||
</MenuItem>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
<script>
|
||||
import { getContext } from "svelte"
|
||||
import { ActionButton, Popover, Select } from "@budibase/bbui"
|
||||
import { canBeSortColumn } from "@budibase/shared-core"
|
||||
|
||||
const { sort, columns, stickyColumn } = getContext("grid")
|
||||
|
||||
|
@ -19,7 +20,7 @@
|
|||
type: stickyColumn.schema?.type,
|
||||
})
|
||||
}
|
||||
return [
|
||||
options = [
|
||||
...options,
|
||||
...columns.map(col => ({
|
||||
label: col.label || col.name,
|
||||
|
@ -27,6 +28,7 @@
|
|||
type: col.schema?.type,
|
||||
})),
|
||||
]
|
||||
return options.filter(col => canBeSortColumn(col.type))
|
||||
}
|
||||
|
||||
const getOrderOptions = (column, columnOptions) => {
|
||||
|
|
|
@ -141,7 +141,14 @@
|
|||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{#if $loaded}
|
||||
{#if $error}
|
||||
<div class="grid-error">
|
||||
<div class="grid-error-title">There was a problem loading your grid</div>
|
||||
<div class="grid-error-subtitle">
|
||||
{$error}
|
||||
</div>
|
||||
</div>
|
||||
{:else if $loaded}
|
||||
<div class="grid-data-outer" use:clickOutside={ui.actions.blur}>
|
||||
<div class="grid-data-inner">
|
||||
<StickyColumn>
|
||||
|
@ -171,13 +178,6 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{:else if $error}
|
||||
<div class="grid-error">
|
||||
<div class="grid-error-title">There was a problem loading your grid</div>
|
||||
<div class="grid-error-subtitle">
|
||||
{$error}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{#if $loading && !$error}
|
||||
<div in:fade|local={{ duration: 130 }} class="grid-loading">
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
contentLines,
|
||||
isDragging,
|
||||
dispatch,
|
||||
rows,
|
||||
} = getContext("grid")
|
||||
|
||||
$: rowSelected = !!$selectedRows[row._id]
|
||||
|
@ -31,7 +32,7 @@
|
|||
on:focus
|
||||
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
|
||||
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
|
||||
on:click={() => dispatch("rowclick", row)}
|
||||
on:click={() => dispatch("rowclick", rows.actions.cleanRow(row))}
|
||||
>
|
||||
{#each $renderedColumns as column, columnIdx (column.name)}
|
||||
{@const cellId = `${row._id}-${column.name}`}
|
||||
|
|
|
@ -33,7 +33,7 @@
|
|||
|
||||
let visible = false
|
||||
let isAdding = false
|
||||
let newRow = {}
|
||||
let newRow
|
||||
let offset = 0
|
||||
|
||||
$: firstColumn = $stickyColumn || $renderedColumns[0]
|
||||
|
@ -58,7 +58,9 @@
|
|||
|
||||
// Create row
|
||||
const newRowIndex = offset ? undefined : 0
|
||||
const savedRow = await rows.actions.addRow(newRow, newRowIndex)
|
||||
let rowToCreate = { ...newRow }
|
||||
delete rowToCreate._isNewRow
|
||||
const savedRow = await rows.actions.addRow(rowToCreate, newRowIndex)
|
||||
if (savedRow) {
|
||||
// Reset state
|
||||
clear()
|
||||
|
@ -109,7 +111,7 @@
|
|||
}
|
||||
|
||||
// Update state and select initial cell
|
||||
newRow = {}
|
||||
newRow = { _isNewRow: true }
|
||||
visible = true
|
||||
$hoveredRowId = NewRowID
|
||||
if (firstColumn) {
|
||||
|
|
|
@ -74,7 +74,7 @@
|
|||
class="row"
|
||||
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
|
||||
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
|
||||
on:click={() => dispatch("rowclick", row)}
|
||||
on:click={() => dispatch("rowclick", rows.actions.cleanRow(row))}
|
||||
>
|
||||
<GutterCell {row} {rowFocused} {rowHovered} {rowSelected} />
|
||||
{#if $stickyColumn}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
export const getColor = (idx, opacity = 0.3) => {
|
||||
if (idx == null || idx === -1) {
|
||||
return null
|
||||
idx = 0
|
||||
}
|
||||
return `hsla(${((idx + 1) * 222) % 360}, 90%, 75%, ${opacity})`
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
focusedCellAPI,
|
||||
focusedRowId,
|
||||
notifications,
|
||||
isDatasourcePlus,
|
||||
} = getContext("grid")
|
||||
|
||||
$: style = makeStyle($menu)
|
||||
|
@ -75,7 +76,7 @@
|
|||
</MenuItem>
|
||||
<MenuItem
|
||||
icon="Copy"
|
||||
disabled={isNewRow || !$focusedRow?._id}
|
||||
disabled={isNewRow || !$focusedRow?._id || !$isDatasourcePlus}
|
||||
on:click={() => copyToClipboard($focusedRow?._id)}
|
||||
on:click={menu.actions.close}
|
||||
>
|
||||
|
|
|
@ -69,7 +69,7 @@ export const deriveStores = context => {
|
|||
}
|
||||
|
||||
export const createActions = context => {
|
||||
const { columns, stickyColumn, datasource, definition } = context
|
||||
const { columns, stickyColumn, datasource, definition, schema } = context
|
||||
|
||||
// Updates the datasources primary display column
|
||||
const changePrimaryDisplay = async column => {
|
||||
|
@ -101,7 +101,7 @@ export const createActions = context => {
|
|||
const $columns = get(columns)
|
||||
const $definition = get(definition)
|
||||
const $stickyColumn = get(stickyColumn)
|
||||
const newSchema = cloneDeep($definition.schema)
|
||||
let newSchema = cloneDeep(get(schema)) || {}
|
||||
|
||||
// Build new updated datasource schema
|
||||
Object.keys(newSchema).forEach(column => {
|
||||
|
@ -142,26 +142,35 @@ export const createActions = context => {
|
|||
}
|
||||
|
||||
export const initialise = context => {
|
||||
const { definition, columns, stickyColumn, schema } = context
|
||||
const { definition, columns, stickyColumn, enrichedSchema } = context
|
||||
|
||||
// Merge new schema fields with existing schema in order to preserve widths
|
||||
schema.subscribe($schema => {
|
||||
if (!$schema) {
|
||||
enrichedSchema.subscribe($enrichedSchema => {
|
||||
if (!$enrichedSchema) {
|
||||
columns.set([])
|
||||
stickyColumn.set(null)
|
||||
return
|
||||
}
|
||||
const $definition = get(definition)
|
||||
const $columns = get(columns)
|
||||
const $stickyColumn = get(stickyColumn)
|
||||
|
||||
// Generate array of all columns to easily find pre-existing columns
|
||||
let allColumns = $columns || []
|
||||
if ($stickyColumn) {
|
||||
allColumns.push($stickyColumn)
|
||||
}
|
||||
|
||||
// Find primary display
|
||||
let primaryDisplay
|
||||
if ($definition.primaryDisplay && $schema[$definition.primaryDisplay]) {
|
||||
primaryDisplay = $definition.primaryDisplay
|
||||
const candidatePD = $definition.primaryDisplay || $stickyColumn?.name
|
||||
if (candidatePD && $enrichedSchema[candidatePD]) {
|
||||
primaryDisplay = candidatePD
|
||||
}
|
||||
|
||||
// Get field list
|
||||
let fields = []
|
||||
Object.keys($schema).forEach(field => {
|
||||
Object.keys($enrichedSchema).forEach(field => {
|
||||
if (field !== primaryDisplay) {
|
||||
fields.push(field)
|
||||
}
|
||||
|
@ -170,14 +179,18 @@ export const initialise = context => {
|
|||
// Update columns, removing extraneous columns and adding missing ones
|
||||
columns.set(
|
||||
fields
|
||||
.map(field => ({
|
||||
name: field,
|
||||
label: $schema[field].displayName || field,
|
||||
schema: $schema[field],
|
||||
width: $schema[field].width || DefaultColumnWidth,
|
||||
visible: $schema[field].visible ?? true,
|
||||
order: $schema[field].order,
|
||||
}))
|
||||
.map(field => {
|
||||
const fieldSchema = $enrichedSchema[field]
|
||||
const oldColumn = allColumns?.find(x => x.name === field)
|
||||
return {
|
||||
name: field,
|
||||
label: fieldSchema.displayName || field,
|
||||
schema: fieldSchema,
|
||||
width: fieldSchema.width || oldColumn?.width || DefaultColumnWidth,
|
||||
visible: fieldSchema.visible ?? true,
|
||||
order: fieldSchema.order ?? oldColumn?.order,
|
||||
}
|
||||
})
|
||||
.sort((a, b) => {
|
||||
// Sort by order first
|
||||
const orderA = a.order
|
||||
|
@ -205,11 +218,13 @@ export const initialise = context => {
|
|||
stickyColumn.set(null)
|
||||
return
|
||||
}
|
||||
const stickySchema = $enrichedSchema[primaryDisplay]
|
||||
const oldStickyColumn = allColumns?.find(x => x.name === primaryDisplay)
|
||||
stickyColumn.set({
|
||||
name: primaryDisplay,
|
||||
label: $schema[primaryDisplay].displayName || primaryDisplay,
|
||||
schema: $schema[primaryDisplay],
|
||||
width: $schema[primaryDisplay].width || DefaultColumnWidth,
|
||||
label: stickySchema.displayName || primaryDisplay,
|
||||
schema: stickySchema,
|
||||
width: stickySchema.width || oldStickyColumn?.width || DefaultColumnWidth,
|
||||
visible: true,
|
||||
order: 0,
|
||||
left: GutterWidth,
|
||||
|
|
|
@ -37,9 +37,10 @@ export const deriveStores = context => {
|
|||
[props, hasNonAutoColumn],
|
||||
([$props, $hasNonAutoColumn]) => {
|
||||
let config = { ...$props }
|
||||
const type = $props.datasource?.type
|
||||
|
||||
// Disable some features if we're editing a view
|
||||
if ($props.datasource?.type === "viewV2") {
|
||||
if (type === "viewV2") {
|
||||
config.canEditColumns = false
|
||||
}
|
||||
|
||||
|
@ -48,6 +49,16 @@ export const deriveStores = context => {
|
|||
config.canAddRows = false
|
||||
}
|
||||
|
||||
// Disable features for non DS+
|
||||
if (!["table", "viewV2"].includes(type)) {
|
||||
config.canAddRows = false
|
||||
config.canEditRows = false
|
||||
config.canDeleteRows = false
|
||||
config.canExpandRows = false
|
||||
config.canSaveSchema = false
|
||||
config.canEditColumns = false
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { derived, get, writable } from "svelte/store"
|
||||
import { getDatasourceDefinition } from "../../../fetch"
|
||||
|
||||
export const createStores = () => {
|
||||
const definition = writable(null)
|
||||
|
@ -9,21 +10,38 @@ export const createStores = () => {
|
|||
}
|
||||
|
||||
export const deriveStores = context => {
|
||||
const { definition, schemaOverrides, columnWhitelist } = context
|
||||
const { definition, schemaOverrides, columnWhitelist, datasource } = context
|
||||
|
||||
const schema = derived(
|
||||
[definition, schemaOverrides, columnWhitelist],
|
||||
([$definition, $schemaOverrides, $columnWhitelist]) => {
|
||||
if (!$definition?.schema) {
|
||||
const schema = derived(definition, $definition => {
|
||||
let schema = $definition?.schema
|
||||
if (!schema) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Ensure schema is configured as objects.
|
||||
// Certain datasources like queries use primitives.
|
||||
Object.keys(schema || {}).forEach(key => {
|
||||
if (typeof schema[key] !== "object") {
|
||||
schema[key] = { type: schema[key] }
|
||||
}
|
||||
})
|
||||
|
||||
return schema
|
||||
})
|
||||
|
||||
const enrichedSchema = derived(
|
||||
[schema, schemaOverrides, columnWhitelist],
|
||||
([$schema, $schemaOverrides, $columnWhitelist]) => {
|
||||
if (!$schema) {
|
||||
return null
|
||||
}
|
||||
let newSchema = { ...$definition?.schema }
|
||||
let enrichedSchema = { ...$schema }
|
||||
|
||||
// Apply schema overrides
|
||||
Object.keys($schemaOverrides || {}).forEach(field => {
|
||||
if (newSchema[field]) {
|
||||
newSchema[field] = {
|
||||
...newSchema[field],
|
||||
if (enrichedSchema[field]) {
|
||||
enrichedSchema[field] = {
|
||||
...enrichedSchema[field],
|
||||
...$schemaOverrides[field],
|
||||
}
|
||||
}
|
||||
|
@ -31,41 +49,64 @@ export const deriveStores = context => {
|
|||
|
||||
// Apply whitelist if specified
|
||||
if ($columnWhitelist?.length) {
|
||||
Object.keys(newSchema).forEach(key => {
|
||||
Object.keys(enrichedSchema).forEach(key => {
|
||||
if (!$columnWhitelist.includes(key)) {
|
||||
delete newSchema[key]
|
||||
delete enrichedSchema[key]
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return newSchema
|
||||
return enrichedSchema
|
||||
}
|
||||
)
|
||||
|
||||
const isDatasourcePlus = derived(datasource, $datasource => {
|
||||
return ["table", "viewV2"].includes($datasource?.type)
|
||||
})
|
||||
|
||||
return {
|
||||
schema,
|
||||
enrichedSchema,
|
||||
isDatasourcePlus,
|
||||
}
|
||||
}
|
||||
|
||||
export const createActions = context => {
|
||||
const { datasource, definition, config, dispatch, table, viewV2 } = context
|
||||
const {
|
||||
API,
|
||||
datasource,
|
||||
definition,
|
||||
config,
|
||||
dispatch,
|
||||
table,
|
||||
viewV2,
|
||||
nonPlus,
|
||||
} = context
|
||||
|
||||
// Gets the appropriate API for the configured datasource type
|
||||
const getAPI = () => {
|
||||
const $datasource = get(datasource)
|
||||
switch ($datasource?.type) {
|
||||
const type = $datasource?.type
|
||||
if (!type) {
|
||||
return null
|
||||
}
|
||||
switch (type) {
|
||||
case "table":
|
||||
return table
|
||||
case "viewV2":
|
||||
return viewV2
|
||||
default:
|
||||
return null
|
||||
return nonPlus
|
||||
}
|
||||
}
|
||||
|
||||
// Refreshes the datasource definition
|
||||
const refreshDefinition = async () => {
|
||||
return await getAPI()?.actions.refreshDefinition()
|
||||
const def = await getDatasourceDefinition({
|
||||
API,
|
||||
datasource: get(datasource),
|
||||
})
|
||||
definition.set(def)
|
||||
}
|
||||
|
||||
// Saves the datasource definition
|
||||
|
@ -113,6 +154,11 @@ export const createActions = context => {
|
|||
return getAPI()?.actions.canUseColumn(name)
|
||||
}
|
||||
|
||||
// Gets the default number of rows for a single page
|
||||
const getFeatures = () => {
|
||||
return getAPI()?.actions.getFeatures()
|
||||
}
|
||||
|
||||
return {
|
||||
datasource: {
|
||||
...datasource,
|
||||
|
@ -125,6 +171,7 @@ export const createActions = context => {
|
|||
getRow,
|
||||
isDatasourceValid,
|
||||
canUseColumn,
|
||||
getFeatures,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,124 @@
|
|||
import { get } from "svelte/store"
|
||||
|
||||
export const createActions = context => {
|
||||
const { columns, stickyColumn, table, viewV2 } = context
|
||||
|
||||
const saveDefinition = async () => {
|
||||
throw "This datasource does not support updating the definition"
|
||||
}
|
||||
|
||||
const saveRow = async () => {
|
||||
throw "This datasource does not support saving rows"
|
||||
}
|
||||
|
||||
const deleteRows = async () => {
|
||||
throw "This datasource does not support deleting rows"
|
||||
}
|
||||
|
||||
const getRow = () => {
|
||||
throw "This datasource does not support fetching individual rows"
|
||||
}
|
||||
|
||||
const isDatasourceValid = datasource => {
|
||||
// There are many different types and shapes of datasource, so we only
|
||||
// check that we aren't null
|
||||
return (
|
||||
!table.actions.isDatasourceValid(datasource) &&
|
||||
!viewV2.actions.isDatasourceValid(datasource) &&
|
||||
datasource?.type != null
|
||||
)
|
||||
}
|
||||
|
||||
const canUseColumn = name => {
|
||||
const $columns = get(columns)
|
||||
const $sticky = get(stickyColumn)
|
||||
return $columns.some(col => col.name === name) || $sticky?.name === name
|
||||
}
|
||||
|
||||
const getFeatures = () => {
|
||||
// We don't support any features
|
||||
return {}
|
||||
}
|
||||
|
||||
return {
|
||||
nonPlus: {
|
||||
actions: {
|
||||
saveDefinition,
|
||||
addRow: saveRow,
|
||||
updateRow: saveRow,
|
||||
deleteRows,
|
||||
getRow,
|
||||
isDatasourceValid,
|
||||
canUseColumn,
|
||||
getFeatures,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Small util to compare datasource definitions
|
||||
const isSameDatasource = (a, b) => {
|
||||
return JSON.stringify(a) === JSON.stringify(b)
|
||||
}
|
||||
|
||||
export const initialise = context => {
|
||||
const {
|
||||
datasource,
|
||||
sort,
|
||||
filter,
|
||||
nonPlus,
|
||||
initialFilter,
|
||||
initialSortColumn,
|
||||
initialSortOrder,
|
||||
fetch,
|
||||
} = context
|
||||
// Keep a list of subscriptions so that we can clear them when the datasource
|
||||
// config changes
|
||||
let unsubscribers = []
|
||||
|
||||
// Observe datasource changes and apply logic for view V2 datasources
|
||||
datasource.subscribe($datasource => {
|
||||
// Clear previous subscriptions
|
||||
unsubscribers?.forEach(unsubscribe => unsubscribe())
|
||||
unsubscribers = []
|
||||
if (!nonPlus.actions.isDatasourceValid($datasource)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Wipe state
|
||||
filter.set(get(initialFilter))
|
||||
sort.set({
|
||||
column: get(initialSortColumn),
|
||||
order: get(initialSortOrder) || "ascending",
|
||||
})
|
||||
|
||||
// Update fetch when filter changes
|
||||
unsubscribers.push(
|
||||
filter.subscribe($filter => {
|
||||
// Ensure we're updating the correct fetch
|
||||
const $fetch = get(fetch)
|
||||
if (!isSameDatasource($fetch?.options?.datasource, $datasource)) {
|
||||
return
|
||||
}
|
||||
$fetch.update({
|
||||
filter: $filter,
|
||||
})
|
||||
})
|
||||
)
|
||||
|
||||
// Update fetch when sorting changes
|
||||
unsubscribers.push(
|
||||
sort.subscribe($sort => {
|
||||
// Ensure we're updating the correct fetch
|
||||
const $fetch = get(fetch)
|
||||
if (!isSameDatasource($fetch?.options?.datasource, $datasource)) {
|
||||
return
|
||||
}
|
||||
$fetch.update({
|
||||
sortOrder: $sort.order || "ascending",
|
||||
sortColumn: $sort.column,
|
||||
})
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
|
@ -1,13 +1,10 @@
|
|||
import { get } from "svelte/store"
|
||||
import TableFetch from "../../../../fetch/TableFetch"
|
||||
|
||||
const SuppressErrors = true
|
||||
|
||||
export const createActions = context => {
|
||||
const { definition, API, datasource, columns, stickyColumn } = context
|
||||
|
||||
const refreshDefinition = async () => {
|
||||
definition.set(await API.fetchTableDefinition(get(datasource).tableId))
|
||||
}
|
||||
const { API, datasource, columns, stickyColumn } = context
|
||||
|
||||
const saveDefinition = async newDefinition => {
|
||||
await API.saveTable(newDefinition)
|
||||
|
@ -49,10 +46,13 @@ export const createActions = context => {
|
|||
return $columns.some(col => col.name === name) || $sticky?.name === name
|
||||
}
|
||||
|
||||
const getFeatures = () => {
|
||||
return new TableFetch({ API }).determineFeatureFlags()
|
||||
}
|
||||
|
||||
return {
|
||||
table: {
|
||||
actions: {
|
||||
refreshDefinition,
|
||||
saveDefinition,
|
||||
addRow: saveRow,
|
||||
updateRow: saveRow,
|
||||
|
@ -60,6 +60,7 @@ export const createActions = context => {
|
|||
getRow,
|
||||
isDatasourceValid,
|
||||
canUseColumn,
|
||||
getFeatures,
|
||||
},
|
||||
},
|
||||
}
|
|
@ -1,22 +1,10 @@
|
|||
import { get } from "svelte/store"
|
||||
import ViewV2Fetch from "../../../../fetch/ViewV2Fetch"
|
||||
|
||||
const SuppressErrors = true
|
||||
|
||||
export const createActions = context => {
|
||||
const { definition, API, datasource, columns, stickyColumn } = context
|
||||
|
||||
const refreshDefinition = async () => {
|
||||
const $datasource = get(datasource)
|
||||
if (!$datasource) {
|
||||
definition.set(null)
|
||||
return
|
||||
}
|
||||
const table = await API.fetchTableDefinition($datasource.tableId)
|
||||
const view = Object.values(table?.views || {}).find(
|
||||
view => view.id === $datasource.id
|
||||
)
|
||||
definition.set(view)
|
||||
}
|
||||
const { API, datasource, columns, stickyColumn } = context
|
||||
|
||||
const saveDefinition = async newDefinition => {
|
||||
await API.viewV2.update(newDefinition)
|
||||
|
@ -58,10 +46,13 @@ export const createActions = context => {
|
|||
)
|
||||
}
|
||||
|
||||
const getFeatures = () => {
|
||||
return new ViewV2Fetch({ API }).determineFeatureFlags()
|
||||
}
|
||||
|
||||
return {
|
||||
viewV2: {
|
||||
actions: {
|
||||
refreshDefinition,
|
||||
saveDefinition,
|
||||
addRow: saveRow,
|
||||
updateRow: saveRow,
|
||||
|
@ -69,6 +60,7 @@ export const createActions = context => {
|
|||
getRow,
|
||||
isDatasourceValid,
|
||||
canUseColumn,
|
||||
getFeatures,
|
||||
},
|
||||
},
|
||||
}
|
|
@ -15,9 +15,10 @@ import * as Config from "./config"
|
|||
import * as Sort from "./sort"
|
||||
import * as Filter from "./filter"
|
||||
import * as Notifications from "./notifications"
|
||||
import * as Table from "./table"
|
||||
import * as ViewV2 from "./viewV2"
|
||||
import * as Datasource from "./datasource"
|
||||
import * as Table from "./datasources/table"
|
||||
import * as ViewV2 from "./datasources/viewV2"
|
||||
import * as NonPlus from "./datasources/nonPlus"
|
||||
|
||||
const DependencyOrderedStores = [
|
||||
Sort,
|
||||
|
@ -26,6 +27,7 @@ const DependencyOrderedStores = [
|
|||
Scroll,
|
||||
Table,
|
||||
ViewV2,
|
||||
NonPlus,
|
||||
Datasource,
|
||||
Columns,
|
||||
Rows,
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import { writable, derived, get } from "svelte/store"
|
||||
import { fetchData } from "../../../fetch/fetchData"
|
||||
import { fetchData } from "../../../fetch"
|
||||
import { NewRowID, RowPageSize } from "../lib/constants"
|
||||
import { tick } from "svelte"
|
||||
import { Helpers } from "@budibase/bbui"
|
||||
|
||||
export const createStores = () => {
|
||||
const rows = writable([])
|
||||
|
@ -76,11 +77,11 @@ export const createActions = context => {
|
|||
columns,
|
||||
rowChangeCache,
|
||||
inProgressChanges,
|
||||
previousFocusedRowId,
|
||||
hasNextPage,
|
||||
error,
|
||||
notifications,
|
||||
fetch,
|
||||
isDatasourcePlus,
|
||||
} = context
|
||||
const instanceLoaded = writable(false)
|
||||
|
||||
|
@ -93,12 +94,14 @@ export const createActions = context => {
|
|||
datasource.subscribe(async $datasource => {
|
||||
// Unsub from previous fetch if one exists
|
||||
unsubscribe?.()
|
||||
unsubscribe = null
|
||||
fetch.set(null)
|
||||
instanceLoaded.set(false)
|
||||
loading.set(true)
|
||||
|
||||
// Abandon if we don't have a valid datasource
|
||||
if (!datasource.actions.isDatasourceValid($datasource)) {
|
||||
error.set("Datasource is invalid")
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -108,6 +111,10 @@ export const createActions = context => {
|
|||
const $filter = get(filter)
|
||||
const $sort = get(sort)
|
||||
|
||||
// Determine how many rows to fetch per page
|
||||
const features = datasource.actions.getFeatures()
|
||||
const limit = features?.supportsPagination ? RowPageSize : null
|
||||
|
||||
// Create new fetch model
|
||||
const newFetch = fetchData({
|
||||
API,
|
||||
|
@ -116,7 +123,7 @@ export const createActions = context => {
|
|||
filter: $filter,
|
||||
sortColumn: $sort.column,
|
||||
sortOrder: $sort.order,
|
||||
limit: RowPageSize,
|
||||
limit,
|
||||
paginate: true,
|
||||
},
|
||||
})
|
||||
|
@ -355,7 +362,7 @@ export const createActions = context => {
|
|||
|
||||
// Update row
|
||||
const saved = await datasource.actions.updateRow({
|
||||
...row,
|
||||
...cleanRow(row),
|
||||
...get(rowChangeCache)[rowId],
|
||||
})
|
||||
|
||||
|
@ -411,8 +418,17 @@ export const createActions = context => {
|
|||
}
|
||||
let rowsToAppend = []
|
||||
let newRow
|
||||
const $isDatasourcePlus = get(isDatasourcePlus)
|
||||
for (let i = 0; i < newRows.length; i++) {
|
||||
newRow = newRows[i]
|
||||
|
||||
// Ensure we have a unique _id.
|
||||
// This means generating one for non DS+, overriting any that may already
|
||||
// exist as we cannot allow duplicates.
|
||||
if (!$isDatasourcePlus) {
|
||||
newRow._id = Helpers.uuid()
|
||||
}
|
||||
|
||||
if (!rowCacheMap[newRow._id]) {
|
||||
rowCacheMap[newRow._id] = true
|
||||
rowsToAppend.push(newRow)
|
||||
|
@ -449,15 +465,16 @@ export const createActions = context => {
|
|||
return get(rowLookupMap)[id] != null
|
||||
}
|
||||
|
||||
// Wipe the row change cache when changing row
|
||||
previousFocusedRowId.subscribe(id => {
|
||||
if (id && !get(inProgressChanges)[id]) {
|
||||
rowChangeCache.update(state => {
|
||||
delete state[id]
|
||||
return state
|
||||
})
|
||||
// Cleans a row by removing any internal grid metadata from it.
|
||||
// Call this before passing a row to any sort of external flow.
|
||||
const cleanRow = row => {
|
||||
let clone = { ...row }
|
||||
delete clone.__idx
|
||||
if (!get(isDatasourcePlus)) {
|
||||
delete clone._id
|
||||
}
|
||||
})
|
||||
return clone
|
||||
}
|
||||
|
||||
return {
|
||||
rows: {
|
||||
|
@ -474,7 +491,22 @@ export const createActions = context => {
|
|||
refreshRow,
|
||||
replaceRow,
|
||||
refreshData,
|
||||
cleanRow,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const initialise = context => {
|
||||
const { rowChangeCache, inProgressChanges, previousFocusedRowId } = context
|
||||
|
||||
// Wipe the row change cache when changing row
|
||||
previousFocusedRowId.subscribe(id => {
|
||||
if (id && !get(inProgressChanges)[id]) {
|
||||
rowChangeCache.update(state => {
|
||||
delete state[id]
|
||||
return state
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ export const createStores = context => {
|
|||
}
|
||||
|
||||
export const initialise = context => {
|
||||
const { sort, initialSortColumn, initialSortOrder, definition } = context
|
||||
const { sort, initialSortColumn, initialSortOrder, schema } = context
|
||||
|
||||
// Reset sort when initial sort props change
|
||||
initialSortColumn.subscribe(newSortColumn => {
|
||||
|
@ -28,15 +28,12 @@ export const initialise = context => {
|
|||
})
|
||||
|
||||
// Derive if the current sort column exists in the schema
|
||||
const sortColumnExists = derived(
|
||||
[sort, definition],
|
||||
([$sort, $definition]) => {
|
||||
if (!$sort?.column || !$definition) {
|
||||
return true
|
||||
}
|
||||
return $definition.schema?.[$sort.column] != null
|
||||
const sortColumnExists = derived([sort, schema], ([$sort, $schema]) => {
|
||||
if (!$sort?.column || !$schema) {
|
||||
return true
|
||||
}
|
||||
)
|
||||
return $schema[$sort.column] != null
|
||||
})
|
||||
|
||||
// Clear sort state if our sort column does not exist
|
||||
sortColumnExists.subscribe(exists => {
|
||||
|
|
|
@ -0,0 +1,145 @@
|
|||
import DataFetch from "./DataFetch.js"
|
||||
|
||||
export default class CustomFetch extends DataFetch {
|
||||
// Gets the correct Budibase type for a JS value
|
||||
getType(value) {
|
||||
if (value == null) {
|
||||
return "string"
|
||||
}
|
||||
const type = typeof value
|
||||
if (type === "object") {
|
||||
if (Array.isArray(value)) {
|
||||
// Use our custom array type to render badges
|
||||
return "array"
|
||||
}
|
||||
// Use JSON for objects to ensure they are stringified
|
||||
return "json"
|
||||
} else if (!isNaN(value)) {
|
||||
return "number"
|
||||
} else {
|
||||
return "string"
|
||||
}
|
||||
}
|
||||
|
||||
// Parses the custom data into an array format
|
||||
parseCustomData(data) {
|
||||
if (!data) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Happy path - already an array
|
||||
if (Array.isArray(data)) {
|
||||
return data
|
||||
}
|
||||
|
||||
// For strings, try JSON then fall back to attempting a CSV
|
||||
if (typeof data === "string") {
|
||||
try {
|
||||
const js = JSON.parse(data)
|
||||
return Array.isArray(js) ? js : [js]
|
||||
} catch (error) {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
// Try splitting by newlines first
|
||||
if (data.includes("\n")) {
|
||||
return data.split("\n").map(x => x.trim())
|
||||
}
|
||||
|
||||
// Split by commas next
|
||||
return data.split(",").map(x => x.trim())
|
||||
}
|
||||
|
||||
// Other cases we just assume it's a single object and wrap it
|
||||
return [data]
|
||||
}
|
||||
|
||||
// Enriches the custom data to ensure the structure and format is usable
|
||||
enrichCustomData(data) {
|
||||
if (!data?.length) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Filter out any invalid values
|
||||
data = data.filter(x => x != null && x !== "" && !Array.isArray(x))
|
||||
|
||||
// Ensure all values are packed into objects
|
||||
return data.map(value => {
|
||||
if (typeof value === "object") {
|
||||
return value
|
||||
}
|
||||
|
||||
// Try parsing strings
|
||||
if (typeof value === "string") {
|
||||
const split = value.split(",").map(x => x.trim())
|
||||
let obj = {}
|
||||
for (let i = 0; i < split.length; i++) {
|
||||
const suffix = i === 0 ? "" : ` ${i + 1}`
|
||||
const key = `Value${suffix}`
|
||||
obj[key] = split[i]
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
// For anything else, wrap in an object
|
||||
return { Value: value }
|
||||
})
|
||||
}
|
||||
|
||||
// Extracts and parses the custom data from the datasource definition
|
||||
getCustomData(datasource) {
|
||||
return this.enrichCustomData(this.parseCustomData(datasource?.data))
|
||||
}
|
||||
|
||||
async getDefinition(datasource) {
|
||||
// Try and work out the schema from the array provided
|
||||
let schema = {}
|
||||
const data = this.getCustomData(datasource)
|
||||
if (!data?.length) {
|
||||
return { schema }
|
||||
}
|
||||
|
||||
// Go through every object and extract all valid keys
|
||||
for (let datum of data) {
|
||||
for (let key of Object.keys(datum)) {
|
||||
if (key === "_id") {
|
||||
continue
|
||||
}
|
||||
if (!schema[key]) {
|
||||
let type = this.getType(datum[key])
|
||||
let constraints = {}
|
||||
|
||||
// Determine whether we should render text columns as options instead
|
||||
if (type === "string") {
|
||||
const uniqueValues = [...new Set(data.map(x => x[key]))]
|
||||
const uniqueness = uniqueValues.length / data.length
|
||||
if (uniqueness <= 0.8 && uniqueValues.length > 1) {
|
||||
type = "options"
|
||||
constraints.inclusion = uniqueValues
|
||||
}
|
||||
}
|
||||
|
||||
// Generate options for array columns
|
||||
else if (type === "array") {
|
||||
constraints.inclusion = [...new Set(data.map(x => x[key]).flat())]
|
||||
}
|
||||
|
||||
schema[key] = {
|
||||
type,
|
||||
constraints,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return { schema }
|
||||
}
|
||||
|
||||
async getData() {
|
||||
const { datasource } = this.options
|
||||
return {
|
||||
rows: this.getCustomData(datasource),
|
||||
hasNextPage: false,
|
||||
cursor: null,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -8,6 +8,7 @@ import FieldFetch from "./FieldFetch.js"
|
|||
import JSONArrayFetch from "./JSONArrayFetch.js"
|
||||
import UserFetch from "./UserFetch.js"
|
||||
import GroupUserFetch from "./GroupUserFetch.js"
|
||||
import CustomFetch from "./CustomFetch.js"
|
||||
|
||||
const DataFetchMap = {
|
||||
table: TableFetch,
|
||||
|
@ -17,6 +18,7 @@ const DataFetchMap = {
|
|||
link: RelationshipFetch,
|
||||
user: UserFetch,
|
||||
groupUser: GroupUserFetch,
|
||||
custom: CustomFetch,
|
||||
|
||||
// Client specific datasource types
|
||||
provider: NestedProviderFetch,
|
||||
|
@ -24,7 +26,18 @@ const DataFetchMap = {
|
|||
jsonarray: JSONArrayFetch,
|
||||
}
|
||||
|
||||
// Constructs a new fetch model for a certain datasource
|
||||
export const fetchData = ({ API, datasource, options }) => {
|
||||
const Fetch = DataFetchMap[datasource?.type] || TableFetch
|
||||
return new Fetch({ API, datasource, ...options })
|
||||
}
|
||||
|
||||
// Fetches the definition of any type of datasource
|
||||
export const getDatasourceDefinition = async ({ API, datasource }) => {
|
||||
const handler = DataFetchMap[datasource?.type]
|
||||
if (!handler) {
|
||||
return null
|
||||
}
|
||||
const instance = new handler({ API })
|
||||
return await instance.getDefinition(datasource)
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
export { createAPIClient } from "./api"
|
||||
export { fetchData } from "./fetch/fetchData"
|
||||
export { fetchData } from "./fetch"
|
||||
export { Utils } from "./utils"
|
||||
export * as Constants from "./constants"
|
||||
export * from "./stores"
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
FROM node:18-slim
|
||||
|
||||
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
|
||||
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"
|
||||
LABEL com.centurylinklabs.watchtower.lifecycle.post-update="scripts/watchtower-hooks/post-update.sh"
|
||||
LABEL com.centurylinklabs.watchtower.lifecycle.post-check="scripts/watchtower-hooks/post-check.sh"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV PORT=4001
|
||||
ENV COUCH_DB_URL=https://couchdb.budi.live:5984
|
||||
ENV BUDIBASE_ENVIRONMENT=PRODUCTION
|
||||
ENV SERVICE=app-service
|
||||
ENV POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
|
||||
ENV TENANT_FEATURE_FLAGS=*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR
|
||||
ENV ACCOUNT_PORTAL_URL=https://account.budibase.app
|
||||
ENV TOP_LEVEL_PATH=/
|
||||
|
||||
# handle node-gyp
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends g++ make python3 jq
|
||||
RUN yarn global add pm2
|
||||
|
||||
# Install client for oracle datasource
|
||||
RUN apt-get install unzip libaio1
|
||||
COPY packages/server/scripts/integrations/oracle/ scripts/integrations/oracle/
|
||||
RUN /bin/bash -e scripts/integrations/oracle/instantclient/linux/x86-64/install.sh
|
||||
|
||||
# Install postgres client for pg_dump utils
|
||||
RUN apt update && apt upgrade -y \
|
||||
&& apt install software-properties-common apt-transport-https curl gpg -y \
|
||||
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
|
||||
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
|
||||
&& apt update -y \
|
||||
&& apt install postgresql-client-15 -y \
|
||||
&& apt remove software-properties-common apt-transport-https curl gpg -y
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
|
||||
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
|
||||
|
||||
|
||||
WORKDIR /string-templates
|
||||
COPY packages/string-templates/package.json package.json
|
||||
RUN ../scripts/removeWorkspaceDependencies.sh package.json
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true
|
||||
COPY packages/string-templates .
|
||||
|
||||
|
||||
WORKDIR /app
|
||||
COPY packages/server/package.json .
|
||||
COPY packages/server/dist/yarn.lock .
|
||||
RUN cd ../string-templates && yarn link && cd - && yarn link @budibase/string-templates
|
||||
|
||||
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
|
||||
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
|
||||
RUN ./scripts/removeWorkspaceDependencies.sh package.json
|
||||
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true \
|
||||
# Remove unneeded data from file system to reduce image size
|
||||
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python jq \
|
||||
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
|
||||
|
||||
COPY packages/server/dist/ dist/
|
||||
COPY packages/server/docker_run.sh .
|
||||
COPY packages/server/builder/ builder/
|
||||
COPY packages/server/client/ client/
|
||||
|
||||
EXPOSE 4001
|
||||
|
||||
# have to add node environment production after install
|
||||
# due to this causing yarn to stop installing dev dependencies
|
||||
# which are actually needed to get this environment up and running
|
||||
ENV NODE_ENV=production
|
||||
ENV CLUSTER_MODE=${CLUSTER_MODE}
|
||||
ENV TOP_LEVEL_PATH=/app
|
||||
|
||||
CMD ["./docker_run.sh"]
|
|
@ -122,7 +122,7 @@
|
|||
"@types/jest": "29.5.5",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/koa__router": "8.0.8",
|
||||
"@types/lodash": "4.14.180",
|
||||
"@types/lodash": "4.14.200",
|
||||
"@types/mssql": "8.1.2",
|
||||
"@types/node": "18.17.0",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
|
|
|
@ -4,7 +4,6 @@ import {
|
|||
getQueryParams,
|
||||
getTableParams,
|
||||
} from "../../db/utils"
|
||||
import { destroy as tableDestroy } from "./table/internal"
|
||||
import { getIntegration } from "../../integrations"
|
||||
import { invalidateDynamicVariables } from "../../threads/utils"
|
||||
import { context, db as dbCore, events } from "@budibase/backend-core"
|
||||
|
@ -325,11 +324,7 @@ async function destroyInternalTablesBySourceId(datasourceId: string) {
|
|||
|
||||
// Destroy the tables.
|
||||
for (const table of datasourceTableDocs) {
|
||||
await tableDestroy({
|
||||
params: {
|
||||
tableId: table._id,
|
||||
},
|
||||
})
|
||||
await sdk.tables.internal.destroy(table)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
import {
|
||||
Datasource,
|
||||
Operation,
|
||||
QueryJson,
|
||||
RenameColumn,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
|
||||
export async function makeTableRequest(
|
||||
datasource: Datasource,
|
||||
operation: Operation,
|
||||
table: Table,
|
||||
tables: Record<string, Table>,
|
||||
oldTable?: Table,
|
||||
renamed?: RenameColumn
|
||||
) {
|
||||
const json: QueryJson = {
|
||||
endpoint: {
|
||||
datasourceId: datasource._id!,
|
||||
entityId: table._id!,
|
||||
operation,
|
||||
},
|
||||
meta: {
|
||||
tables,
|
||||
},
|
||||
table,
|
||||
}
|
||||
if (oldTable) {
|
||||
json.meta!.table = oldTable
|
||||
}
|
||||
if (renamed) {
|
||||
json.meta!.renamed = renamed
|
||||
}
|
||||
return makeExternalQuery(datasource, json)
|
||||
}
|
|
@ -1,108 +1,20 @@
|
|||
import {
|
||||
breakExternalTableId,
|
||||
buildExternalTableId,
|
||||
} from "../../../integrations/utils"
|
||||
import {
|
||||
foreignKeyStructure,
|
||||
generateForeignKey,
|
||||
generateJunctionTableName,
|
||||
hasTypeChanged,
|
||||
setStaticSchemas,
|
||||
} from "./utils"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import { breakExternalTableId } from "../../../integrations/utils"
|
||||
import { handleRequest } from "../row/external"
|
||||
import { context, events } from "@budibase/backend-core"
|
||||
import { events } from "@budibase/backend-core"
|
||||
import { isRows, isSchema, parse } from "../../../utilities/schema"
|
||||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
Datasource,
|
||||
FieldSchema,
|
||||
ManyToManyRelationshipFieldMetadata,
|
||||
ManyToOneRelationshipFieldMetadata,
|
||||
OneToManyRelationshipFieldMetadata,
|
||||
Operation,
|
||||
QueryJson,
|
||||
RelationshipFieldMetadata,
|
||||
RelationshipType,
|
||||
RenameColumn,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
Table,
|
||||
TableRequest,
|
||||
UserCtx,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { builderSocket } from "../../../websockets"
|
||||
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
|
||||
async function makeTableRequest(
|
||||
datasource: Datasource,
|
||||
operation: Operation,
|
||||
table: Table,
|
||||
tables: Record<string, Table>,
|
||||
oldTable?: Table,
|
||||
renamed?: RenameColumn
|
||||
) {
|
||||
const json: QueryJson = {
|
||||
endpoint: {
|
||||
datasourceId: datasource._id!,
|
||||
entityId: table._id!,
|
||||
operation,
|
||||
},
|
||||
meta: {
|
||||
tables,
|
||||
},
|
||||
table,
|
||||
}
|
||||
if (oldTable) {
|
||||
json.meta!.table = oldTable
|
||||
}
|
||||
if (renamed) {
|
||||
json.meta!.renamed = renamed
|
||||
}
|
||||
return makeExternalQuery(datasource, json)
|
||||
}
|
||||
|
||||
function cleanupRelationships(
|
||||
table: Table,
|
||||
tables: Record<string, Table>,
|
||||
oldTable?: Table
|
||||
) {
|
||||
const tableToIterate = oldTable ? oldTable : table
|
||||
// clean up relationships in couch table schemas
|
||||
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
|
||||
if (
|
||||
schema.type === FieldTypes.LINK &&
|
||||
(!oldTable || table.schema[key] == null)
|
||||
) {
|
||||
const schemaTableId = schema.tableId
|
||||
const relatedTable = Object.values(tables).find(
|
||||
table => table._id === schemaTableId
|
||||
)
|
||||
const foreignKey =
|
||||
schema.relationshipType !== RelationshipType.MANY_TO_MANY &&
|
||||
schema.foreignKey
|
||||
if (!relatedTable || !foreignKey) {
|
||||
continue
|
||||
}
|
||||
for (let [relatedKey, relatedSchema] of Object.entries(
|
||||
relatedTable.schema
|
||||
)) {
|
||||
if (
|
||||
relatedSchema.type === FieldTypes.LINK &&
|
||||
relatedSchema.fieldName === foreignKey
|
||||
) {
|
||||
delete relatedTable.schema[relatedKey]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getDatasourceId(table: Table) {
|
||||
if (!table) {
|
||||
throw "No table supplied"
|
||||
|
@ -113,247 +25,32 @@ function getDatasourceId(table: Table) {
|
|||
return breakExternalTableId(table._id).datasourceId
|
||||
}
|
||||
|
||||
function otherRelationshipType(type?: string) {
|
||||
if (type === RelationshipType.MANY_TO_MANY) {
|
||||
return RelationshipType.MANY_TO_MANY
|
||||
}
|
||||
return type === RelationshipType.ONE_TO_MANY
|
||||
? RelationshipType.MANY_TO_ONE
|
||||
: RelationshipType.ONE_TO_MANY
|
||||
}
|
||||
|
||||
function generateManyLinkSchema(
|
||||
datasource: Datasource,
|
||||
column: ManyToManyRelationshipFieldMetadata,
|
||||
table: Table,
|
||||
relatedTable: Table
|
||||
): Table {
|
||||
if (!table.primary || !relatedTable.primary) {
|
||||
throw new Error("Unable to generate many link schema, no primary keys")
|
||||
}
|
||||
const primary = table.name + table.primary[0]
|
||||
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
|
||||
const jcTblName = generateJunctionTableName(column, table, relatedTable)
|
||||
// first create the new table
|
||||
const junctionTable = {
|
||||
_id: buildExternalTableId(datasource._id!, jcTblName),
|
||||
name: jcTblName,
|
||||
primary: [primary, relatedPrimary],
|
||||
constrained: [primary, relatedPrimary],
|
||||
schema: {
|
||||
[primary]: foreignKeyStructure(primary, {
|
||||
toTable: table.name,
|
||||
toKey: table.primary[0],
|
||||
}),
|
||||
[relatedPrimary]: foreignKeyStructure(relatedPrimary, {
|
||||
toTable: relatedTable.name,
|
||||
toKey: relatedTable.primary[0],
|
||||
}),
|
||||
},
|
||||
}
|
||||
column.through = junctionTable._id
|
||||
column.throughFrom = relatedPrimary
|
||||
column.throughTo = primary
|
||||
column.fieldName = relatedPrimary
|
||||
return junctionTable
|
||||
}
|
||||
|
||||
function generateLinkSchema(
|
||||
column:
|
||||
| OneToManyRelationshipFieldMetadata
|
||||
| ManyToOneRelationshipFieldMetadata,
|
||||
table: Table,
|
||||
relatedTable: Table,
|
||||
type: RelationshipType.ONE_TO_MANY | RelationshipType.MANY_TO_ONE
|
||||
) {
|
||||
if (!table.primary || !relatedTable.primary) {
|
||||
throw new Error("Unable to generate link schema, no primary keys")
|
||||
}
|
||||
const isOneSide = type === RelationshipType.ONE_TO_MANY
|
||||
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
|
||||
// generate a foreign key
|
||||
const foreignKey = generateForeignKey(column, relatedTable)
|
||||
column.relationshipType = type
|
||||
column.foreignKey = isOneSide ? foreignKey : primary
|
||||
column.fieldName = isOneSide ? primary : foreignKey
|
||||
return foreignKey
|
||||
}
|
||||
|
||||
function generateRelatedSchema(
|
||||
linkColumn: RelationshipFieldMetadata,
|
||||
table: Table,
|
||||
relatedTable: Table,
|
||||
columnName: string
|
||||
) {
|
||||
// generate column for other table
|
||||
const relatedSchema = cloneDeep(linkColumn)
|
||||
const isMany2Many =
|
||||
linkColumn.relationshipType === RelationshipType.MANY_TO_MANY
|
||||
// swap them from the main link
|
||||
if (!isMany2Many && linkColumn.foreignKey) {
|
||||
relatedSchema.fieldName = linkColumn.foreignKey
|
||||
relatedSchema.foreignKey = linkColumn.fieldName
|
||||
}
|
||||
// is many to many
|
||||
else if (isMany2Many) {
|
||||
// don't need to copy through, already got it
|
||||
relatedSchema.fieldName = linkColumn.throughTo
|
||||
relatedSchema.throughTo = linkColumn.throughFrom
|
||||
relatedSchema.throughFrom = linkColumn.throughTo
|
||||
}
|
||||
relatedSchema.relationshipType = otherRelationshipType(
|
||||
linkColumn.relationshipType
|
||||
)
|
||||
relatedSchema.tableId = relatedTable._id
|
||||
relatedSchema.name = columnName
|
||||
table.schema[columnName] = relatedSchema
|
||||
}
|
||||
|
||||
function isRelationshipSetup(column: RelationshipFieldMetadata) {
|
||||
return (column as any).foreignKey || (column as any).through
|
||||
}
|
||||
|
||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||
const inputs = ctx.request.body
|
||||
const renamed = inputs?._rename
|
||||
const renaming = inputs?._rename
|
||||
// can't do this right now
|
||||
delete inputs.rows
|
||||
const datasourceId = getDatasourceId(ctx.request.body)!
|
||||
const tableId = ctx.request.body._id
|
||||
const datasourceId = getDatasourceId(ctx.request.body)
|
||||
// table doesn't exist already, note that it is created
|
||||
if (!inputs._id) {
|
||||
inputs.created = true
|
||||
}
|
||||
let tableToSave: TableRequest = {
|
||||
type: "table",
|
||||
_id: buildExternalTableId(datasourceId, inputs.name),
|
||||
sourceId: datasourceId,
|
||||
...inputs,
|
||||
}
|
||||
|
||||
let oldTable: Table | undefined
|
||||
if (ctx.request.body && ctx.request.body._id) {
|
||||
oldTable = await sdk.tables.getTable(ctx.request.body._id)
|
||||
}
|
||||
|
||||
if (hasTypeChanged(tableToSave, oldTable)) {
|
||||
ctx.throw(400, "A column type has changed.")
|
||||
}
|
||||
|
||||
for (let view in tableToSave.views) {
|
||||
const tableView = tableToSave.views[view]
|
||||
if (!tableView || !sdk.views.isV2(tableView)) continue
|
||||
|
||||
tableToSave.views[view] = sdk.views.syncSchema(
|
||||
oldTable!.views![view] as ViewV2,
|
||||
tableToSave.schema,
|
||||
renamed
|
||||
try {
|
||||
const { datasource, table } = await sdk.tables.external.save(
|
||||
datasourceId!,
|
||||
inputs,
|
||||
{ tableId, renaming }
|
||||
)
|
||||
}
|
||||
|
||||
const db = context.getAppDB()
|
||||
const datasource = await sdk.datasources.get(datasourceId)
|
||||
if (!datasource.entities) {
|
||||
datasource.entities = {}
|
||||
}
|
||||
|
||||
// GSheets is a specific case - only ever has a static primary key
|
||||
tableToSave = setStaticSchemas(datasource, tableToSave)
|
||||
|
||||
const oldTables = cloneDeep(datasource.entities)
|
||||
const tables: Record<string, Table> = datasource.entities
|
||||
|
||||
const extraTablesToUpdate = []
|
||||
|
||||
// check if relations need setup
|
||||
for (let schema of Object.values(tableToSave.schema)) {
|
||||
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
|
||||
continue
|
||||
}
|
||||
const schemaTableId = schema.tableId
|
||||
const relatedTable = Object.values(tables).find(
|
||||
table => table._id === schemaTableId
|
||||
)
|
||||
if (!relatedTable) {
|
||||
continue
|
||||
}
|
||||
const relatedColumnName = schema.fieldName!
|
||||
const relationType = schema.relationshipType
|
||||
if (relationType === RelationshipType.MANY_TO_MANY) {
|
||||
const junctionTable = generateManyLinkSchema(
|
||||
datasource,
|
||||
schema,
|
||||
tableToSave,
|
||||
relatedTable
|
||||
)
|
||||
if (tables[junctionTable.name]) {
|
||||
throw "Junction table already exists, cannot create another relationship."
|
||||
}
|
||||
tables[junctionTable.name] = junctionTable
|
||||
extraTablesToUpdate.push(junctionTable)
|
||||
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
||||
return table
|
||||
} catch (err: any) {
|
||||
if (err instanceof Error) {
|
||||
ctx.throw(400, err.message)
|
||||
} else {
|
||||
const fkTable =
|
||||
relationType === RelationshipType.ONE_TO_MANY
|
||||
? tableToSave
|
||||
: relatedTable
|
||||
const foreignKey = generateLinkSchema(
|
||||
schema,
|
||||
tableToSave,
|
||||
relatedTable,
|
||||
relationType
|
||||
)
|
||||
fkTable.schema[foreignKey] = foreignKeyStructure(foreignKey)
|
||||
if (fkTable.constrained == null) {
|
||||
fkTable.constrained = []
|
||||
}
|
||||
if (fkTable.constrained.indexOf(foreignKey) === -1) {
|
||||
fkTable.constrained.push(foreignKey)
|
||||
}
|
||||
// foreign key is in other table, need to save it to external
|
||||
if (fkTable._id !== tableToSave._id) {
|
||||
extraTablesToUpdate.push(fkTable)
|
||||
}
|
||||
ctx.throw(err.status || 500, err?.message || err)
|
||||
}
|
||||
generateRelatedSchema(schema, relatedTable, tableToSave, relatedColumnName)
|
||||
schema.main = true
|
||||
}
|
||||
|
||||
cleanupRelationships(tableToSave, tables, oldTable)
|
||||
|
||||
const operation = oldTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
|
||||
await makeTableRequest(
|
||||
datasource,
|
||||
operation,
|
||||
tableToSave,
|
||||
tables,
|
||||
oldTable,
|
||||
renamed
|
||||
)
|
||||
// update any extra tables (like foreign keys in other tables)
|
||||
for (let extraTable of extraTablesToUpdate) {
|
||||
const oldExtraTable = oldTables[extraTable.name]
|
||||
let op = oldExtraTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
|
||||
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
|
||||
}
|
||||
|
||||
// make sure the constrained list, all still exist
|
||||
if (Array.isArray(tableToSave.constrained)) {
|
||||
tableToSave.constrained = tableToSave.constrained.filter(constraint =>
|
||||
Object.keys(tableToSave.schema).includes(constraint)
|
||||
)
|
||||
}
|
||||
|
||||
// remove the rename prop
|
||||
delete tableToSave._rename
|
||||
// store it into couch now for budibase reference
|
||||
datasource.entities[tableToSave.name] = tableToSave
|
||||
await db.put(sdk.tables.populateExternalTableSchemas(datasource))
|
||||
|
||||
// Since tables are stored inside datasources, we need to notify clients
|
||||
// that the datasource definition changed
|
||||
const updatedDatasource = await sdk.datasources.get(datasource._id!)
|
||||
builderSocket?.emitDatasourceUpdate(ctx, updatedDatasource)
|
||||
|
||||
return tableToSave
|
||||
}
|
||||
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
|
@ -364,27 +61,20 @@ export async function destroy(ctx: UserCtx) {
|
|||
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
|
||||
}
|
||||
const datasourceId = getDatasourceId(tableToDelete)
|
||||
|
||||
const db = context.getAppDB()
|
||||
const datasource = await sdk.datasources.get(datasourceId!)
|
||||
const tables = datasource.entities
|
||||
|
||||
const operation = Operation.DELETE_TABLE
|
||||
if (tables) {
|
||||
await makeTableRequest(datasource, operation, tableToDelete, tables)
|
||||
cleanupRelationships(tableToDelete, tables)
|
||||
delete tables[tableToDelete.name]
|
||||
datasource.entities = tables
|
||||
try {
|
||||
const { datasource, table } = await sdk.tables.external.destroy(
|
||||
datasourceId!,
|
||||
tableToDelete
|
||||
)
|
||||
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
||||
return table
|
||||
} catch (err: any) {
|
||||
if (err instanceof Error) {
|
||||
ctx.throw(400, err.message)
|
||||
} else {
|
||||
ctx.throw(err.status || 500, err.message || err)
|
||||
}
|
||||
}
|
||||
|
||||
await db.put(sdk.tables.populateExternalTableSchemas(datasource))
|
||||
|
||||
// Since tables are stored inside datasources, we need to notify clients
|
||||
// that the datasource definition changed
|
||||
const updatedDatasource = await sdk.datasources.get(datasource._id!)
|
||||
builderSocket?.emitDatasourceUpdate(ctx, updatedDatasource)
|
||||
|
||||
return tableToDelete
|
||||
}
|
||||
|
||||
export async function bulkImport(
|
||||
|
|
|
@ -1,14 +1,5 @@
|
|||
import { updateLinks, EventType } from "../../../db/linkedRows"
|
||||
import { getRowParams, generateTableID } from "../../../db/utils"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
import { TableSaveFunctions, hasTypeChanged, handleDataImport } from "./utils"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import env from "../../../environment"
|
||||
import {
|
||||
cleanupAttachments,
|
||||
fixAutoColumnSubType,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import { runStaticFormulaChecks } from "./bulkFormula"
|
||||
import { generateTableID } from "../../../db/utils"
|
||||
import { handleDataImport } from "./utils"
|
||||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
|
@ -17,195 +8,52 @@ import {
|
|||
SaveTableResponse,
|
||||
Table,
|
||||
UserCtx,
|
||||
ViewStatisticsSchema,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import isEqual from "lodash/isEqual"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
function checkAutoColumns(table: Table, oldTable?: Table) {
|
||||
if (!table.schema) {
|
||||
return table
|
||||
}
|
||||
for (let [key, schema] of Object.entries(table.schema)) {
|
||||
if (!schema.autocolumn || schema.subtype) {
|
||||
continue
|
||||
}
|
||||
const oldSchema = oldTable && oldTable.schema[key]
|
||||
if (oldSchema && oldSchema.subtype) {
|
||||
table.schema[key].subtype = oldSchema.subtype
|
||||
} else {
|
||||
table.schema[key] = fixAutoColumnSubType(schema)
|
||||
}
|
||||
}
|
||||
return table
|
||||
}
|
||||
|
||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||
const db = context.getAppDB()
|
||||
const { rows, ...rest } = ctx.request.body
|
||||
let tableToSave: Table & {
|
||||
_rename?: { old: string; updated: string } | undefined
|
||||
_rename?: RenameColumn
|
||||
} = {
|
||||
type: "table",
|
||||
_id: generateTableID(),
|
||||
views: {},
|
||||
...rest,
|
||||
}
|
||||
const renaming = tableToSave._rename
|
||||
delete tableToSave._rename
|
||||
|
||||
// if the table obj had an _id then it will have been retrieved
|
||||
let oldTable: Table | undefined
|
||||
if (ctx.request.body && ctx.request.body._id) {
|
||||
oldTable = await sdk.tables.getTable(ctx.request.body._id)
|
||||
}
|
||||
|
||||
// check all types are correct
|
||||
if (hasTypeChanged(tableToSave, oldTable)) {
|
||||
ctx.throw(400, "A column type has changed.")
|
||||
}
|
||||
// check that subtypes have been maintained
|
||||
tableToSave = checkAutoColumns(tableToSave, oldTable)
|
||||
|
||||
// saving a table is a complex operation, involving many different steps, this
|
||||
// has been broken out into a utility to make it more obvious/easier to manipulate
|
||||
const tableSaveFunctions = new TableSaveFunctions({
|
||||
user: ctx.user,
|
||||
oldTable,
|
||||
importRows: rows,
|
||||
})
|
||||
tableToSave = await tableSaveFunctions.before(tableToSave)
|
||||
|
||||
// make sure that types don't change of a column, have to remove
|
||||
// the column if you want to change the type
|
||||
if (oldTable && oldTable.schema) {
|
||||
for (const propKey of Object.keys(tableToSave.schema)) {
|
||||
let oldColumn = oldTable.schema[propKey]
|
||||
if (oldColumn && oldColumn.type === FieldTypes.INTERNAL) {
|
||||
oldTable.schema[propKey].type = FieldTypes.AUTO
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Don't rename if the name is the same
|
||||
let _rename: RenameColumn | undefined = tableToSave._rename
|
||||
/* istanbul ignore next */
|
||||
if (_rename && _rename.old === _rename.updated) {
|
||||
_rename = undefined
|
||||
delete tableToSave._rename
|
||||
}
|
||||
|
||||
// rename row fields when table column is renamed
|
||||
/* istanbul ignore next */
|
||||
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
|
||||
ctx.throw(400, "Cannot rename a linked column.")
|
||||
}
|
||||
|
||||
tableToSave = await tableSaveFunctions.mid(tableToSave)
|
||||
|
||||
// update schema of non-statistics views when new columns are added
|
||||
for (let view in tableToSave.views) {
|
||||
const tableView = tableToSave.views[view]
|
||||
if (!tableView) continue
|
||||
|
||||
if (sdk.views.isV2(tableView)) {
|
||||
tableToSave.views[view] = sdk.views.syncSchema(
|
||||
oldTable!.views![view] as ViewV2,
|
||||
tableToSave.schema,
|
||||
_rename
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
(tableView.schema as ViewStatisticsSchema).group ||
|
||||
tableView.schema.field
|
||||
)
|
||||
continue
|
||||
tableView.schema = tableToSave.schema
|
||||
}
|
||||
|
||||
// update linked rows
|
||||
try {
|
||||
const linkResp: any = await updateLinks({
|
||||
eventType: oldTable ? EventType.TABLE_UPDATED : EventType.TABLE_SAVE,
|
||||
table: tableToSave,
|
||||
oldTable: oldTable,
|
||||
const { table } = await sdk.tables.internal.save(tableToSave, {
|
||||
user: ctx.user,
|
||||
rowsToImport: rows,
|
||||
tableId: ctx.request.body._id,
|
||||
renaming: renaming,
|
||||
})
|
||||
if (linkResp != null && linkResp._rev) {
|
||||
tableToSave._rev = linkResp._rev
|
||||
|
||||
return table
|
||||
} catch (err: any) {
|
||||
if (err instanceof Error) {
|
||||
ctx.throw(400, err.message)
|
||||
} else {
|
||||
ctx.throw(err.status || 500, err.message || err)
|
||||
}
|
||||
} catch (err) {
|
||||
ctx.throw(400, err as string)
|
||||
}
|
||||
|
||||
// don't perform any updates until relationships have been
|
||||
// checked by the updateLinks function
|
||||
const updatedRows = tableSaveFunctions.getUpdatedRows()
|
||||
if (updatedRows && updatedRows.length !== 0) {
|
||||
await db.bulkDocs(updatedRows)
|
||||
}
|
||||
let result = await db.put(tableToSave)
|
||||
tableToSave._rev = result.rev
|
||||
const savedTable = cloneDeep(tableToSave)
|
||||
|
||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||
// the table may be updated as part of the table save after functionality - need to write it
|
||||
if (!isEqual(savedTable, tableToSave)) {
|
||||
result = await db.put(tableToSave)
|
||||
tableToSave._rev = result.rev
|
||||
}
|
||||
// has to run after, make sure it has _id
|
||||
await runStaticFormulaChecks(tableToSave, { oldTable, deletion: false })
|
||||
return tableToSave
|
||||
}
|
||||
|
||||
export async function destroy(ctx: any) {
|
||||
const db = context.getAppDB()
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
const tableToDelete = await sdk.tables.getTable(ctx.params.tableId)
|
||||
|
||||
// Delete all rows for that table
|
||||
const rowsData = await db.allDocs(
|
||||
getRowParams(ctx.params.tableId, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
await db.bulkDocs(
|
||||
rowsData.rows.map((row: any) => ({ ...row.doc, _deleted: true }))
|
||||
)
|
||||
await quotas.removeRows(rowsData.rows.length, {
|
||||
tableId: ctx.params.tableId,
|
||||
})
|
||||
|
||||
// update linked rows
|
||||
await updateLinks({
|
||||
eventType: EventType.TABLE_DELETE,
|
||||
table: tableToDelete,
|
||||
})
|
||||
|
||||
// don't remove the table itself until very end
|
||||
await db.remove(tableToDelete._id!, tableToDelete._rev)
|
||||
|
||||
// remove table search index
|
||||
if (!env.isTest() || env.COUCH_DB_URL) {
|
||||
const currentIndexes = await db.getIndexes()
|
||||
const existingIndex = currentIndexes.indexes.find(
|
||||
(existing: any) => existing.name === `search:${ctx.params.tableId}`
|
||||
)
|
||||
if (existingIndex) {
|
||||
await db.deleteIndex(existingIndex)
|
||||
try {
|
||||
const { table } = await sdk.tables.internal.destroy(tableToDelete)
|
||||
return table
|
||||
} catch (err: any) {
|
||||
if (err instanceof Error) {
|
||||
ctx.throw(400, err.message)
|
||||
} else {
|
||||
ctx.throw(err.status || 500, err.message || err)
|
||||
}
|
||||
}
|
||||
|
||||
// has to run after, make sure it has _id
|
||||
await runStaticFormulaChecks(tableToDelete, {
|
||||
deletion: true,
|
||||
})
|
||||
await cleanupAttachments(tableToDelete, {
|
||||
rows: rowsData.rows.map((row: any) => row.doc),
|
||||
})
|
||||
return tableToDelete
|
||||
}
|
||||
|
||||
export async function bulkImport(
|
||||
|
@ -213,6 +61,10 @@ export async function bulkImport(
|
|||
) {
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { rows, identifierFields } = ctx.request.body
|
||||
await handleDataImport(ctx.user, table, rows, identifierFields)
|
||||
await handleDataImport(table, {
|
||||
importRows: rows,
|
||||
identifierFields,
|
||||
user: ctx.user,
|
||||
})
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -26,9 +26,16 @@ import {
|
|||
Row,
|
||||
SourceName,
|
||||
Table,
|
||||
Database,
|
||||
RenameColumn,
|
||||
NumberFieldMetadata,
|
||||
FieldSchema,
|
||||
View,
|
||||
RelationshipFieldMetadata,
|
||||
FieldType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export async function clearColumns(table: any, columnNames: any) {
|
||||
export async function clearColumns(table: Table, columnNames: string[]) {
|
||||
const db = context.getAppDB()
|
||||
const rows = await db.allDocs(
|
||||
getRowParams(table._id, null, {
|
||||
|
@ -43,10 +50,13 @@ export async function clearColumns(table: any, columnNames: any) {
|
|||
)) as { id: string; _rev?: string }[]
|
||||
}
|
||||
|
||||
export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
||||
export async function checkForColumnUpdates(
|
||||
updatedTable: Table,
|
||||
oldTable?: Table,
|
||||
columnRename?: RenameColumn
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
let updatedRows = []
|
||||
const rename = updatedTable._rename
|
||||
let deletedColumns: any = []
|
||||
if (oldTable && oldTable.schema && updatedTable.schema) {
|
||||
deletedColumns = Object.keys(oldTable.schema).filter(
|
||||
|
@ -54,7 +64,7 @@ export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
|||
)
|
||||
}
|
||||
// check for renaming of columns or deleted columns
|
||||
if (rename || deletedColumns.length !== 0) {
|
||||
if (columnRename || deletedColumns.length !== 0) {
|
||||
// Update all rows
|
||||
const rows = await db.allDocs(
|
||||
getRowParams(updatedTable._id, null, {
|
||||
|
@ -64,9 +74,9 @@ export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
|||
const rawRows = rows.rows.map(({ doc }: any) => doc)
|
||||
updatedRows = rawRows.map((row: any) => {
|
||||
row = cloneDeep(row)
|
||||
if (rename) {
|
||||
row[rename.updated] = row[rename.old]
|
||||
delete row[rename.old]
|
||||
if (columnRename) {
|
||||
row[columnRename.updated] = row[columnRename.old]
|
||||
delete row[columnRename.old]
|
||||
} else if (deletedColumns.length !== 0) {
|
||||
deletedColumns.forEach((colName: any) => delete row[colName])
|
||||
}
|
||||
|
@ -76,14 +86,13 @@ export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
|||
// cleanup any attachments from object storage for deleted attachment columns
|
||||
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
|
||||
// Update views
|
||||
await checkForViewUpdates(updatedTable, rename, deletedColumns)
|
||||
delete updatedTable._rename
|
||||
await checkForViewUpdates(updatedTable, deletedColumns, columnRename)
|
||||
}
|
||||
return { rows: updatedRows, table: updatedTable }
|
||||
}
|
||||
|
||||
// makes sure the passed in table isn't going to reset the auto ID
|
||||
export function makeSureTableUpToDate(table: any, tableToSave: any) {
|
||||
export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
|
||||
if (!table) {
|
||||
return tableToSave
|
||||
}
|
||||
|
@ -99,16 +108,17 @@ export function makeSureTableUpToDate(table: any, tableToSave: any) {
|
|||
column.subtype === AutoFieldSubTypes.AUTO_ID &&
|
||||
tableToSave.schema[field]
|
||||
) {
|
||||
tableToSave.schema[field].lastID = column.lastID
|
||||
const tableCol = tableToSave.schema[field] as NumberFieldMetadata
|
||||
tableCol.lastID = column.lastID
|
||||
}
|
||||
}
|
||||
return tableToSave
|
||||
}
|
||||
|
||||
export async function importToRows(
|
||||
data: any[],
|
||||
data: Row[],
|
||||
table: Table,
|
||||
user: ContextUser | null = null
|
||||
user?: ContextUser
|
||||
) {
|
||||
let originalTable = table
|
||||
let finalData: any = []
|
||||
|
@ -150,19 +160,20 @@ export async function importToRows(
|
|||
}
|
||||
|
||||
export async function handleDataImport(
|
||||
user: ContextUser,
|
||||
table: Table,
|
||||
rows: Row[],
|
||||
identifierFields: Array<string> = []
|
||||
opts?: { identifierFields?: string[]; user?: ContextUser; importRows?: Row[] }
|
||||
) {
|
||||
const schema = table.schema
|
||||
const identifierFields = opts?.identifierFields || []
|
||||
const user = opts?.user
|
||||
const importRows = opts?.importRows
|
||||
|
||||
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
||||
if (!importRows || !isRows(importRows) || !isSchema(schema)) {
|
||||
return table
|
||||
}
|
||||
|
||||
const db = context.getAppDB()
|
||||
const data = parse(rows, schema)
|
||||
const data = parse(importRows, schema)
|
||||
|
||||
let finalData: any = await importToRows(data, table, user)
|
||||
|
||||
|
@ -200,7 +211,7 @@ export async function handleDataImport(
|
|||
return table
|
||||
}
|
||||
|
||||
export async function handleSearchIndexes(table: any) {
|
||||
export async function handleSearchIndexes(table: Table) {
|
||||
const db = context.getAppDB()
|
||||
// create relevant search indexes
|
||||
if (table.indexes && table.indexes.length > 0) {
|
||||
|
@ -244,13 +255,13 @@ export async function handleSearchIndexes(table: any) {
|
|||
return table
|
||||
}
|
||||
|
||||
export function checkStaticTables(table: any) {
|
||||
export function checkStaticTables(table: Table) {
|
||||
// check user schema has all required elements
|
||||
if (table._id === InternalTables.USER_METADATA) {
|
||||
for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) {
|
||||
// check if the schema exists on the table to be created/updated
|
||||
if (table.schema[key] == null) {
|
||||
table.schema[key] = schema
|
||||
table.schema[key] = schema as FieldSchema
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -258,13 +269,21 @@ export function checkStaticTables(table: any) {
|
|||
}
|
||||
|
||||
class TableSaveFunctions {
|
||||
db: any
|
||||
user: any
|
||||
oldTable: any
|
||||
importRows: any
|
||||
rows: any
|
||||
db: Database
|
||||
user?: ContextUser
|
||||
oldTable?: Table
|
||||
importRows?: Row[]
|
||||
rows: Row[]
|
||||
|
||||
constructor({ user, oldTable, importRows }: any) {
|
||||
constructor({
|
||||
user,
|
||||
oldTable,
|
||||
importRows,
|
||||
}: {
|
||||
user?: ContextUser
|
||||
oldTable?: Table
|
||||
importRows?: Row[]
|
||||
}) {
|
||||
this.db = context.getAppDB()
|
||||
this.user = user
|
||||
this.oldTable = oldTable
|
||||
|
@ -274,7 +293,7 @@ class TableSaveFunctions {
|
|||
}
|
||||
|
||||
// before anything is done
|
||||
async before(table: any) {
|
||||
async before(table: Table) {
|
||||
if (this.oldTable) {
|
||||
table = makeSureTableUpToDate(this.oldTable, table)
|
||||
}
|
||||
|
@ -283,16 +302,23 @@ class TableSaveFunctions {
|
|||
}
|
||||
|
||||
// when confirmed valid
|
||||
async mid(table: any) {
|
||||
let response = await checkForColumnUpdates(this.oldTable, table)
|
||||
async mid(table: Table, columnRename?: RenameColumn) {
|
||||
let response = await checkForColumnUpdates(
|
||||
table,
|
||||
this.oldTable,
|
||||
columnRename
|
||||
)
|
||||
this.rows = this.rows.concat(response.rows)
|
||||
return table
|
||||
}
|
||||
|
||||
// after saving
|
||||
async after(table: any) {
|
||||
async after(table: Table) {
|
||||
table = await handleSearchIndexes(table)
|
||||
table = await handleDataImport(this.user, table, this.importRows)
|
||||
table = await handleDataImport(table, {
|
||||
importRows: this.importRows,
|
||||
user: this.user,
|
||||
})
|
||||
return table
|
||||
}
|
||||
|
||||
|
@ -302,9 +328,9 @@ class TableSaveFunctions {
|
|||
}
|
||||
|
||||
export async function checkForViewUpdates(
|
||||
table: any,
|
||||
rename: any,
|
||||
deletedColumns: any
|
||||
table: Table,
|
||||
deletedColumns: string[],
|
||||
columnRename?: RenameColumn
|
||||
) {
|
||||
const views = await getViews()
|
||||
const tableViews = views.filter(view => view.meta.tableId === table._id)
|
||||
|
@ -314,30 +340,30 @@ export async function checkForViewUpdates(
|
|||
let needsUpdated = false
|
||||
|
||||
// First check for renames, otherwise check for deletions
|
||||
if (rename) {
|
||||
if (columnRename) {
|
||||
// Update calculation field if required
|
||||
if (view.meta.field === rename.old) {
|
||||
view.meta.field = rename.updated
|
||||
if (view.meta.field === columnRename.old) {
|
||||
view.meta.field = columnRename.updated
|
||||
needsUpdated = true
|
||||
}
|
||||
|
||||
// Update group by field if required
|
||||
if (view.meta.groupBy === rename.old) {
|
||||
view.meta.groupBy = rename.updated
|
||||
if (view.meta.groupBy === columnRename.old) {
|
||||
view.meta.groupBy = columnRename.updated
|
||||
needsUpdated = true
|
||||
}
|
||||
|
||||
// Update filters if required
|
||||
if (view.meta.filters) {
|
||||
view.meta.filters.forEach((filter: any) => {
|
||||
if (filter.key === rename.old) {
|
||||
filter.key = rename.updated
|
||||
if (filter.key === columnRename.old) {
|
||||
filter.key = columnRename.updated
|
||||
needsUpdated = true
|
||||
}
|
||||
})
|
||||
}
|
||||
} else if (deletedColumns) {
|
||||
deletedColumns.forEach((column: any) => {
|
||||
deletedColumns.forEach((column: string) => {
|
||||
// Remove calculation statement if required
|
||||
if (view.meta.field === column) {
|
||||
delete view.meta.field
|
||||
|
@ -378,24 +404,29 @@ export async function checkForViewUpdates(
|
|||
if (!newViewTemplate.meta.schema) {
|
||||
newViewTemplate.meta.schema = table.schema
|
||||
}
|
||||
table.views[view.name] = newViewTemplate.meta
|
||||
if (table.views?.[view.name]) {
|
||||
table.views[view.name] = newViewTemplate.meta as View
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function generateForeignKey(column: any, relatedTable: any) {
|
||||
export function generateForeignKey(
|
||||
column: RelationshipFieldMetadata,
|
||||
relatedTable: Table
|
||||
) {
|
||||
return `fk_${relatedTable.name}_${column.fieldName}`
|
||||
}
|
||||
|
||||
export function generateJunctionTableName(
|
||||
column: any,
|
||||
table: any,
|
||||
relatedTable: any
|
||||
column: RelationshipFieldMetadata,
|
||||
table: Table,
|
||||
relatedTable: Table
|
||||
) {
|
||||
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
|
||||
}
|
||||
|
||||
export function foreignKeyStructure(keyName: any, meta?: any) {
|
||||
export function foreignKeyStructure(keyName: string, meta?: any) {
|
||||
const structure: any = {
|
||||
type: FieldTypes.NUMBER,
|
||||
constraints: {},
|
||||
|
@ -407,7 +438,7 @@ export function foreignKeyStructure(keyName: any, meta?: any) {
|
|||
return structure
|
||||
}
|
||||
|
||||
export function areSwitchableTypes(type1: any, type2: any) {
|
||||
export function areSwitchableTypes(type1: FieldType, type2: FieldType) {
|
||||
if (
|
||||
SwitchableTypes.indexOf(type1) === -1 &&
|
||||
SwitchableTypes.indexOf(type2) === -1
|
||||
|
|
|
@ -12,14 +12,14 @@ describe("run misc tests", () => {
|
|||
})
|
||||
|
||||
describe("/bbtel", () => {
|
||||
it("check if analytics enabled", async () => {
|
||||
const res = await request
|
||||
.get(`/api/bbtel`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(typeof res.body.enabled).toEqual("boolean")
|
||||
})
|
||||
it("check if analytics enabled", async () => {
|
||||
const res = await request
|
||||
.get(`/api/bbtel`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(typeof res.body.enabled).toEqual("boolean")
|
||||
})
|
||||
})
|
||||
|
||||
describe("/health", () => {
|
||||
|
@ -37,7 +37,6 @@ describe("run misc tests", () => {
|
|||
} else {
|
||||
expect(text.split(".").length).toEqual(3)
|
||||
}
|
||||
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -93,77 +92,79 @@ describe("run misc tests", () => {
|
|||
constraints: {
|
||||
type: "array",
|
||||
presence: {
|
||||
"allowEmpty": true
|
||||
allowEmpty: true,
|
||||
},
|
||||
inclusion: [
|
||||
"One",
|
||||
"Two",
|
||||
"Three",
|
||||
]
|
||||
inclusion: ["One", "Two", "Three"],
|
||||
},
|
||||
name: "Sample Tags",
|
||||
sortable: false
|
||||
sortable: false,
|
||||
},
|
||||
g: {
|
||||
type: "options",
|
||||
constraints: {
|
||||
type: "string",
|
||||
presence: false,
|
||||
inclusion: [
|
||||
"Alpha",
|
||||
"Beta",
|
||||
"Gamma"
|
||||
]
|
||||
inclusion: ["Alpha", "Beta", "Gamma"],
|
||||
},
|
||||
name: "Sample Opts"
|
||||
}
|
||||
name: "Sample Opts",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
const importRows = [
|
||||
{ a: "1", b: "2", c: "3", d: "4", f: "['One']", g: "Alpha" },
|
||||
{ a: "5", b: "6", c: "7", d: "8", f: "[]", g: undefined },
|
||||
{ a: "9", b: "10", c: "11", d: "12", f: "['Two','Four']", g: "" },
|
||||
{ a: "13", b: "14", c: "15", d: "16", g: "Omega" },
|
||||
]
|
||||
// Shift specific row tests to the row spec
|
||||
await tableUtils.handleDataImport(
|
||||
{ userId: "test" },
|
||||
table,
|
||||
[
|
||||
{ a: '1', b: '2', c: '3', d: '4', f: "['One']", g: "Alpha" },
|
||||
{ a: '5', b: '6', c: '7', d: '8', f: "[]", g: undefined},
|
||||
{ a: '9', b: '10', c: '11', d: '12', f: "['Two','Four']", g: ""},
|
||||
{ a: '13', b: '14', c: '15', d: '16', g: "Omega"}
|
||||
]
|
||||
)
|
||||
await tableUtils.handleDataImport(table, {
|
||||
importRows,
|
||||
user: { userId: "test" },
|
||||
})
|
||||
|
||||
// 4 rows imported, the auto ID starts at 1
|
||||
// We expect the handleDataImport function to update the lastID
|
||||
expect(table.schema.e.lastID).toEqual(4);
|
||||
|
||||
expect(table.schema.e.lastID).toEqual(4)
|
||||
|
||||
// Array/Multi - should have added a new value to the inclusion.
|
||||
expect(table.schema.f.constraints.inclusion).toEqual(['Four','One','Three','Two']);
|
||||
expect(table.schema.f.constraints.inclusion).toEqual([
|
||||
"Four",
|
||||
"One",
|
||||
"Three",
|
||||
"Two",
|
||||
])
|
||||
|
||||
// Options - should have a new value in the inclusion
|
||||
expect(table.schema.g.constraints.inclusion).toEqual(['Alpha','Beta','Gamma','Omega']);
|
||||
expect(table.schema.g.constraints.inclusion).toEqual([
|
||||
"Alpha",
|
||||
"Beta",
|
||||
"Gamma",
|
||||
"Omega",
|
||||
])
|
||||
|
||||
const rows = await config.getRows()
|
||||
expect(rows.length).toEqual(4);
|
||||
expect(rows.length).toEqual(4)
|
||||
|
||||
const rowOne = rows.find(row => row.e === 1)
|
||||
expect(rowOne.a).toEqual("1")
|
||||
expect(rowOne.f).toEqual(['One'])
|
||||
expect(rowOne.g).toEqual('Alpha')
|
||||
expect(rowOne.f).toEqual(["One"])
|
||||
expect(rowOne.g).toEqual("Alpha")
|
||||
|
||||
const rowTwo = rows.find(row => row.e === 2)
|
||||
expect(rowTwo.a).toEqual("5")
|
||||
expect(rowTwo.f).toEqual([])
|
||||
expect(rowTwo.g).toEqual(undefined)
|
||||
|
||||
|
||||
const rowThree = rows.find(row => row.e === 3)
|
||||
expect(rowThree.a).toEqual("9")
|
||||
expect(rowThree.f).toEqual(['Two','Four'])
|
||||
expect(rowThree.f).toEqual(["Two", "Four"])
|
||||
expect(rowThree.g).toEqual(null)
|
||||
|
||||
const rowFour = rows.find(row => row.e === 4)
|
||||
expect(rowFour.a).toEqual("13")
|
||||
expect(rowFour.f).toEqual(undefined)
|
||||
expect(rowFour.g).toEqual('Omega')
|
||||
expect(rowFour.g).toEqual("Omega")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -0,0 +1,196 @@
|
|||
import {
|
||||
Operation,
|
||||
RelationshipType,
|
||||
RenameColumn,
|
||||
Table,
|
||||
TableRequest,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { buildExternalTableId } from "../../../../integrations/utils"
|
||||
import {
|
||||
foreignKeyStructure,
|
||||
hasTypeChanged,
|
||||
setStaticSchemas,
|
||||
} from "../../../../api/controllers/table/utils"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { FieldTypes } from "../../../../constants"
|
||||
import { makeTableRequest } from "../../../../api/controllers/table/ExternalRequest"
|
||||
import {
|
||||
isRelationshipSetup,
|
||||
cleanupRelationships,
|
||||
generateLinkSchema,
|
||||
generateManyLinkSchema,
|
||||
generateRelatedSchema,
|
||||
} from "./utils"
|
||||
|
||||
import { getTable } from "../getters"
|
||||
import { populateExternalTableSchemas } from "../validation"
|
||||
import datasourceSdk from "../../datasources"
|
||||
import * as viewSdk from "../../views"
|
||||
|
||||
export async function save(
|
||||
datasourceId: string,
|
||||
update: Table,
|
||||
opts?: { tableId?: string; renaming?: RenameColumn }
|
||||
) {
|
||||
let tableToSave: TableRequest = {
|
||||
type: "table",
|
||||
_id: buildExternalTableId(datasourceId, update.name),
|
||||
sourceId: datasourceId,
|
||||
...update,
|
||||
}
|
||||
|
||||
const tableId = opts?.tableId || update._id
|
||||
let oldTable: Table | undefined
|
||||
if (tableId) {
|
||||
oldTable = await getTable(tableId)
|
||||
}
|
||||
|
||||
if (hasTypeChanged(tableToSave, oldTable)) {
|
||||
throw new Error("A column type has changed.")
|
||||
}
|
||||
|
||||
for (let view in tableToSave.views) {
|
||||
const tableView = tableToSave.views[view]
|
||||
if (!tableView || !viewSdk.isV2(tableView)) continue
|
||||
|
||||
tableToSave.views[view] = viewSdk.syncSchema(
|
||||
oldTable!.views![view] as ViewV2,
|
||||
tableToSave.schema,
|
||||
opts?.renaming
|
||||
)
|
||||
}
|
||||
|
||||
const db = context.getAppDB()
|
||||
const datasource = await datasourceSdk.get(datasourceId)
|
||||
if (!datasource.entities) {
|
||||
datasource.entities = {}
|
||||
}
|
||||
|
||||
// GSheets is a specific case - only ever has a static primary key
|
||||
tableToSave = setStaticSchemas(datasource, tableToSave)
|
||||
|
||||
const oldTables = cloneDeep(datasource.entities)
|
||||
const tables: Record<string, Table> = datasource.entities
|
||||
|
||||
const extraTablesToUpdate = []
|
||||
|
||||
// check if relations need setup
|
||||
for (let schema of Object.values(tableToSave.schema)) {
|
||||
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
|
||||
continue
|
||||
}
|
||||
const schemaTableId = schema.tableId
|
||||
const relatedTable = Object.values(tables).find(
|
||||
table => table._id === schemaTableId
|
||||
)
|
||||
if (!relatedTable) {
|
||||
continue
|
||||
}
|
||||
const relatedColumnName = schema.fieldName!
|
||||
const relationType = schema.relationshipType
|
||||
if (relationType === RelationshipType.MANY_TO_MANY) {
|
||||
const junctionTable = generateManyLinkSchema(
|
||||
datasource,
|
||||
schema,
|
||||
tableToSave,
|
||||
relatedTable
|
||||
)
|
||||
if (tables[junctionTable.name]) {
|
||||
throw new Error(
|
||||
"Junction table already exists, cannot create another relationship."
|
||||
)
|
||||
}
|
||||
tables[junctionTable.name] = junctionTable
|
||||
extraTablesToUpdate.push(junctionTable)
|
||||
} else {
|
||||
const fkTable =
|
||||
relationType === RelationshipType.ONE_TO_MANY
|
||||
? tableToSave
|
||||
: relatedTable
|
||||
const foreignKey = generateLinkSchema(
|
||||
schema,
|
||||
tableToSave,
|
||||
relatedTable,
|
||||
relationType
|
||||
)
|
||||
if (fkTable.schema[foreignKey] != null) {
|
||||
throw new Error(
|
||||
`Unable to generate foreign key - column ${foreignKey} already in use.`
|
||||
)
|
||||
}
|
||||
fkTable.schema[foreignKey] = foreignKeyStructure(foreignKey)
|
||||
if (fkTable.constrained == null) {
|
||||
fkTable.constrained = []
|
||||
}
|
||||
if (fkTable.constrained.indexOf(foreignKey) === -1) {
|
||||
fkTable.constrained.push(foreignKey)
|
||||
}
|
||||
// foreign key is in other table, need to save it to external
|
||||
if (fkTable._id !== tableToSave._id) {
|
||||
extraTablesToUpdate.push(fkTable)
|
||||
}
|
||||
}
|
||||
generateRelatedSchema(schema, relatedTable, tableToSave, relatedColumnName)
|
||||
schema.main = true
|
||||
}
|
||||
|
||||
cleanupRelationships(tableToSave, tables, oldTable)
|
||||
|
||||
const operation = tableId ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
|
||||
await makeTableRequest(
|
||||
datasource,
|
||||
operation,
|
||||
tableToSave,
|
||||
tables,
|
||||
oldTable,
|
||||
opts?.renaming
|
||||
)
|
||||
// update any extra tables (like foreign keys in other tables)
|
||||
for (let extraTable of extraTablesToUpdate) {
|
||||
const oldExtraTable = oldTables[extraTable.name]
|
||||
let op = oldExtraTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
|
||||
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
|
||||
}
|
||||
|
||||
// make sure the constrained list, all still exist
|
||||
if (Array.isArray(tableToSave.constrained)) {
|
||||
tableToSave.constrained = tableToSave.constrained.filter(constraint =>
|
||||
Object.keys(tableToSave.schema).includes(constraint)
|
||||
)
|
||||
}
|
||||
|
||||
// remove the rename prop
|
||||
delete tableToSave._rename
|
||||
// store it into couch now for budibase reference
|
||||
datasource.entities[tableToSave.name] = tableToSave
|
||||
await db.put(populateExternalTableSchemas(datasource))
|
||||
|
||||
// Since tables are stored inside datasources, we need to notify clients
|
||||
// that the datasource definition changed
|
||||
const updatedDatasource = await datasourceSdk.get(datasource._id!)
|
||||
|
||||
return { datasource: updatedDatasource, table: tableToSave }
|
||||
}
|
||||
|
||||
export async function destroy(datasourceId: string, table: Table) {
|
||||
const db = context.getAppDB()
|
||||
const datasource = await datasourceSdk.get(datasourceId)
|
||||
const tables = datasource.entities
|
||||
|
||||
const operation = Operation.DELETE_TABLE
|
||||
if (tables) {
|
||||
await makeTableRequest(datasource, operation, table, tables)
|
||||
cleanupRelationships(table, tables)
|
||||
delete tables[table.name]
|
||||
datasource.entities = tables
|
||||
}
|
||||
|
||||
await db.put(populateExternalTableSchemas(datasource))
|
||||
|
||||
// Since tables are stored inside datasources, we need to notify clients
|
||||
// that the datasource definition changed
|
||||
const updatedDatasource = await datasourceSdk.get(datasource._id!)
|
||||
return { datasource: updatedDatasource, table }
|
||||
}
|
|
@ -0,0 +1,161 @@
|
|||
import {
|
||||
Datasource,
|
||||
ManyToManyRelationshipFieldMetadata,
|
||||
ManyToOneRelationshipFieldMetadata,
|
||||
OneToManyRelationshipFieldMetadata,
|
||||
RelationshipFieldMetadata,
|
||||
RelationshipType,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import { FieldTypes } from "../../../../constants"
|
||||
import {
|
||||
foreignKeyStructure,
|
||||
generateForeignKey,
|
||||
generateJunctionTableName,
|
||||
} from "../../../../api/controllers/table/utils"
|
||||
import { buildExternalTableId } from "../../../../integrations/utils"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
|
||||
export function cleanupRelationships(
|
||||
table: Table,
|
||||
tables: Record<string, Table>,
|
||||
oldTable?: Table
|
||||
) {
|
||||
const tableToIterate = oldTable ? oldTable : table
|
||||
// clean up relationships in couch table schemas
|
||||
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
|
||||
if (
|
||||
schema.type === FieldTypes.LINK &&
|
||||
(!oldTable || table.schema[key] == null)
|
||||
) {
|
||||
const schemaTableId = schema.tableId
|
||||
const relatedTable = Object.values(tables).find(
|
||||
table => table._id === schemaTableId
|
||||
)
|
||||
const foreignKey =
|
||||
schema.relationshipType !== RelationshipType.MANY_TO_MANY &&
|
||||
schema.foreignKey
|
||||
if (!relatedTable || !foreignKey) {
|
||||
continue
|
||||
}
|
||||
for (let [relatedKey, relatedSchema] of Object.entries(
|
||||
relatedTable.schema
|
||||
)) {
|
||||
if (
|
||||
relatedSchema.type === FieldTypes.LINK &&
|
||||
relatedSchema.fieldName === foreignKey
|
||||
) {
|
||||
delete relatedTable.schema[relatedKey]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function otherRelationshipType(type: RelationshipType) {
|
||||
if (type === RelationshipType.MANY_TO_MANY) {
|
||||
return RelationshipType.MANY_TO_MANY
|
||||
}
|
||||
return type === RelationshipType.ONE_TO_MANY
|
||||
? RelationshipType.MANY_TO_ONE
|
||||
: RelationshipType.ONE_TO_MANY
|
||||
}
|
||||
|
||||
export function generateManyLinkSchema(
|
||||
datasource: Datasource,
|
||||
column: ManyToManyRelationshipFieldMetadata,
|
||||
table: Table,
|
||||
relatedTable: Table
|
||||
): Table {
|
||||
if (!table.primary || !relatedTable.primary) {
|
||||
const noPrimaryName = !table.primary ? table.name : relatedTable.name
|
||||
throw new Error(
|
||||
`Unable to generate many link schema, "${noPrimaryName}" does not have a primary key`
|
||||
)
|
||||
}
|
||||
const primary = table.name + table.primary[0]
|
||||
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
|
||||
const jcTblName = generateJunctionTableName(column, table, relatedTable)
|
||||
// first create the new table
|
||||
const junctionTable = {
|
||||
_id: buildExternalTableId(datasource._id!, jcTblName),
|
||||
name: jcTblName,
|
||||
primary: [primary, relatedPrimary],
|
||||
constrained: [primary, relatedPrimary],
|
||||
schema: {
|
||||
[primary]: foreignKeyStructure(primary, {
|
||||
toTable: table.name,
|
||||
toKey: table.primary[0],
|
||||
}),
|
||||
[relatedPrimary]: foreignKeyStructure(relatedPrimary, {
|
||||
toTable: relatedTable.name,
|
||||
toKey: relatedTable.primary[0],
|
||||
}),
|
||||
},
|
||||
}
|
||||
column.through = junctionTable._id
|
||||
column.throughFrom = relatedPrimary
|
||||
column.throughTo = primary
|
||||
column.fieldName = relatedPrimary
|
||||
return junctionTable
|
||||
}
|
||||
|
||||
export function generateLinkSchema(
|
||||
column:
|
||||
| OneToManyRelationshipFieldMetadata
|
||||
| ManyToOneRelationshipFieldMetadata,
|
||||
table: Table,
|
||||
relatedTable: Table,
|
||||
type: RelationshipType.ONE_TO_MANY | RelationshipType.MANY_TO_ONE
|
||||
) {
|
||||
if (!table.primary || !relatedTable.primary) {
|
||||
throw new Error("Unable to generate link schema, no primary keys")
|
||||
}
|
||||
const isOneSide = type === RelationshipType.ONE_TO_MANY
|
||||
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
|
||||
// generate a foreign key
|
||||
const foreignKey = generateForeignKey(column, relatedTable)
|
||||
column.relationshipType = type
|
||||
column.foreignKey = isOneSide ? foreignKey : primary
|
||||
column.fieldName = isOneSide ? primary : foreignKey
|
||||
return foreignKey
|
||||
}
|
||||
|
||||
export function generateRelatedSchema(
|
||||
linkColumn: RelationshipFieldMetadata,
|
||||
table: Table,
|
||||
relatedTable: Table,
|
||||
columnName: string
|
||||
) {
|
||||
// generate column for other table
|
||||
let relatedSchema: RelationshipFieldMetadata
|
||||
const isMany2Many =
|
||||
linkColumn.relationshipType === RelationshipType.MANY_TO_MANY
|
||||
// swap them from the main link
|
||||
if (!isMany2Many && linkColumn.foreignKey) {
|
||||
relatedSchema = cloneDeep(linkColumn) as
|
||||
| OneToManyRelationshipFieldMetadata
|
||||
| ManyToOneRelationshipFieldMetadata
|
||||
relatedSchema.fieldName = linkColumn.foreignKey
|
||||
relatedSchema.foreignKey = linkColumn.fieldName
|
||||
}
|
||||
// is many to many
|
||||
else {
|
||||
const manyToManyCol = linkColumn as ManyToManyRelationshipFieldMetadata
|
||||
relatedSchema = cloneDeep(linkColumn) as ManyToManyRelationshipFieldMetadata
|
||||
// don't need to copy through, already got it
|
||||
relatedSchema.fieldName = manyToManyCol.throughTo!
|
||||
relatedSchema.throughTo = manyToManyCol.throughFrom
|
||||
relatedSchema.throughFrom = manyToManyCol.throughTo
|
||||
}
|
||||
relatedSchema.relationshipType = otherRelationshipType(
|
||||
linkColumn.relationshipType
|
||||
)
|
||||
relatedSchema.tableId = relatedTable._id!
|
||||
relatedSchema.name = columnName
|
||||
table.schema[columnName] = relatedSchema
|
||||
}
|
||||
|
||||
export function isRelationshipSetup(column: RelationshipFieldMetadata) {
|
||||
return (column as any).foreignKey || (column as any).through
|
||||
}
|
|
@ -0,0 +1,124 @@
|
|||
import { context } from "@budibase/backend-core"
|
||||
import {
|
||||
BudibaseInternalDB,
|
||||
getMultiIDParams,
|
||||
getTableParams,
|
||||
} from "../../../db/utils"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
isExternalTable,
|
||||
isSQL,
|
||||
} from "../../../integrations/utils"
|
||||
import {
|
||||
AllDocsResponse,
|
||||
Database,
|
||||
Table,
|
||||
TableResponse,
|
||||
TableViewsResponse,
|
||||
} from "@budibase/types"
|
||||
import datasources from "../datasources"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
function processInternalTables(docs: AllDocsResponse<Table[]>): Table[] {
|
||||
return docs.rows.map((tableDoc: any) => ({
|
||||
...tableDoc.doc,
|
||||
type: "internal",
|
||||
sourceId: tableDoc.doc.sourceId || BudibaseInternalDB._id,
|
||||
}))
|
||||
}
|
||||
|
||||
export async function getAllInternalTables(db?: Database): Promise<Table[]> {
|
||||
if (!db) {
|
||||
db = context.getAppDB()
|
||||
}
|
||||
const internalTables = await db.allDocs<Table[]>(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
return processInternalTables(internalTables)
|
||||
}
|
||||
|
||||
async function getAllExternalTables(): Promise<Table[]> {
|
||||
const datasources = await sdk.datasources.fetch({ enriched: true })
|
||||
const allEntities = datasources.map(datasource => datasource.entities)
|
||||
let final: Table[] = []
|
||||
for (let entities of allEntities) {
|
||||
if (entities) {
|
||||
final = final.concat(Object.values(entities))
|
||||
}
|
||||
}
|
||||
return final
|
||||
}
|
||||
|
||||
export async function getExternalTable(
|
||||
datasourceId: string,
|
||||
tableName: string
|
||||
): Promise<Table> {
|
||||
const entities = await getExternalTablesInDatasource(datasourceId)
|
||||
return entities[tableName]
|
||||
}
|
||||
|
||||
export async function getTable(tableId: string): Promise<Table> {
|
||||
const db = context.getAppDB()
|
||||
if (isExternalTable(tableId)) {
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const datasource = await datasources.get(datasourceId!)
|
||||
const table = await getExternalTable(datasourceId!, tableName!)
|
||||
return { ...table, sql: isSQL(datasource) }
|
||||
} else {
|
||||
return db.get(tableId)
|
||||
}
|
||||
}
|
||||
|
||||
export async function getAllTables() {
|
||||
const [internal, external] = await Promise.all([
|
||||
getAllInternalTables(),
|
||||
getAllExternalTables(),
|
||||
])
|
||||
return [...internal, ...external]
|
||||
}
|
||||
|
||||
export async function getExternalTablesInDatasource(
|
||||
datasourceId: string
|
||||
): Promise<Record<string, Table>> {
|
||||
const datasource = await datasources.get(datasourceId, { enriched: true })
|
||||
if (!datasource || !datasource.entities) {
|
||||
throw new Error("Datasource is not configured fully.")
|
||||
}
|
||||
return datasource.entities
|
||||
}
|
||||
|
||||
export async function getTables(tableIds: string[]): Promise<Table[]> {
|
||||
const externalTableIds = tableIds.filter(tableId => isExternalTable(tableId)),
|
||||
internalTableIds = tableIds.filter(tableId => !isExternalTable(tableId))
|
||||
let tables: Table[] = []
|
||||
if (externalTableIds.length) {
|
||||
const externalTables = await getAllExternalTables()
|
||||
tables = tables.concat(
|
||||
externalTables.filter(
|
||||
table => externalTableIds.indexOf(table._id!) !== -1
|
||||
)
|
||||
)
|
||||
}
|
||||
if (internalTableIds.length) {
|
||||
const db = context.getAppDB()
|
||||
const internalTableDocs = await db.allDocs<Table[]>(
|
||||
getMultiIDParams(internalTableIds)
|
||||
)
|
||||
tables = tables.concat(processInternalTables(internalTableDocs))
|
||||
}
|
||||
return tables
|
||||
}
|
||||
|
||||
export function enrichViewSchemas(table: Table): TableResponse {
|
||||
return {
|
||||
...table,
|
||||
views: Object.values(table.views ?? [])
|
||||
.map(v => sdk.views.enrichSchema(v, table.schema))
|
||||
.reduce((p, v) => {
|
||||
p[v.name!] = v
|
||||
return p
|
||||
}, {} as TableViewsResponse),
|
||||
}
|
||||
}
|
|
@ -1,167 +1,11 @@
|
|||
import { context } from "@budibase/backend-core"
|
||||
import {
|
||||
BudibaseInternalDB,
|
||||
getMultiIDParams,
|
||||
getTableParams,
|
||||
} from "../../../db/utils"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
isExternalTable,
|
||||
isSQL,
|
||||
} from "../../../integrations/utils"
|
||||
import {
|
||||
AllDocsResponse,
|
||||
Database,
|
||||
FieldSchema,
|
||||
Table,
|
||||
TableResponse,
|
||||
TableViewsResponse,
|
||||
} from "@budibase/types"
|
||||
import datasources from "../datasources"
|
||||
import { populateExternalTableSchemas } from "./validation"
|
||||
import sdk from "../../../sdk"
|
||||
import { migrate } from "./migration"
|
||||
import { DocumentInsertResponse } from "@budibase/nano"
|
||||
import { cloneDeep } from "lodash"
|
||||
|
||||
function processInternalTables(docs: AllDocsResponse<Table[]>): Table[] {
|
||||
return docs.rows.map((tableDoc: any) => ({
|
||||
...tableDoc.doc,
|
||||
type: "internal",
|
||||
sourceId: tableDoc.doc.sourceId || BudibaseInternalDB._id,
|
||||
}))
|
||||
}
|
||||
|
||||
async function getAllInternalTables(db?: Database): Promise<Table[]> {
|
||||
if (!db) {
|
||||
db = context.getAppDB()
|
||||
}
|
||||
const internalTableDocs = await db.allDocs<Table[]>(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
return processInternalTables(internalTableDocs)
|
||||
}
|
||||
|
||||
async function getAllExternalTables(): Promise<Table[]> {
|
||||
const datasources = await sdk.datasources.fetch({ enriched: true })
|
||||
const allEntities = datasources.map(datasource => datasource.entities)
|
||||
let final: Table[] = []
|
||||
for (let entities of allEntities) {
|
||||
if (entities) {
|
||||
final = final.concat(Object.values(entities))
|
||||
}
|
||||
}
|
||||
return final
|
||||
}
|
||||
|
||||
async function getAllTables() {
|
||||
const [internal, external] = await Promise.all([
|
||||
getAllInternalTables(),
|
||||
getAllExternalTables(),
|
||||
])
|
||||
return [...internal, external]
|
||||
}
|
||||
|
||||
async function getTables(tableIds: string[]): Promise<Table[]> {
|
||||
const externalTableIds = tableIds.filter(tableId => isExternalTable(tableId)),
|
||||
internalTableIds = tableIds.filter(tableId => !isExternalTable(tableId))
|
||||
let tables: Table[] = []
|
||||
if (externalTableIds.length) {
|
||||
const externalTables = await getAllExternalTables()
|
||||
tables = tables.concat(
|
||||
externalTables.filter(
|
||||
table => externalTableIds.indexOf(table._id!) !== -1
|
||||
)
|
||||
)
|
||||
}
|
||||
if (internalTableIds.length) {
|
||||
const db = context.getAppDB()
|
||||
const internalTableDocs = await db.allDocs<Table[]>(
|
||||
getMultiIDParams(internalTableIds)
|
||||
)
|
||||
tables = tables.concat(processInternalTables(internalTableDocs))
|
||||
}
|
||||
return tables
|
||||
}
|
||||
|
||||
async function getExternalTablesInDatasource(
|
||||
datasourceId: string
|
||||
): Promise<Record<string, Table>> {
|
||||
const datasource = await datasources.get(datasourceId, { enriched: true })
|
||||
if (!datasource || !datasource.entities) {
|
||||
throw "Datasource is not configured fully."
|
||||
}
|
||||
return datasource.entities
|
||||
}
|
||||
|
||||
async function getExternalTable(
|
||||
datasourceId: string,
|
||||
tableName: string
|
||||
): Promise<Table> {
|
||||
const entities = await getExternalTablesInDatasource(datasourceId)
|
||||
return entities[tableName]
|
||||
}
|
||||
|
||||
async function getTable(tableId: string): Promise<Table> {
|
||||
const db = context.getAppDB()
|
||||
if (isExternalTable(tableId)) {
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const datasource = await datasources.get(datasourceId!)
|
||||
const table = await getExternalTable(datasourceId!, tableName!)
|
||||
return { ...table, sql: isSQL(datasource) }
|
||||
} else {
|
||||
return db.get<Table>(tableId)
|
||||
}
|
||||
}
|
||||
|
||||
function enrichViewSchemas(table: Table): TableResponse {
|
||||
return {
|
||||
...table,
|
||||
views: Object.values(table.views ?? [])
|
||||
.map(v => sdk.views.enrichSchema(v, table.schema))
|
||||
.reduce((p, v) => {
|
||||
p[v.name] = v
|
||||
return p
|
||||
}, {} as TableViewsResponse),
|
||||
}
|
||||
}
|
||||
|
||||
async function saveTable(table: Table): Promise<Table> {
|
||||
const db = context.getAppDB()
|
||||
let resp: DocumentInsertResponse
|
||||
if (isExternalTable(table._id!)) {
|
||||
const datasource = await sdk.datasources.get(table.sourceId!)
|
||||
datasource.entities![table.name] = table
|
||||
resp = await db.put(datasource)
|
||||
} else {
|
||||
resp = await db.put(table)
|
||||
}
|
||||
|
||||
let tableClone = cloneDeep(table)
|
||||
tableClone._rev = resp.rev
|
||||
return tableClone
|
||||
}
|
||||
|
||||
async function addColumn(table: Table, newColumn: FieldSchema): Promise<Table> {
|
||||
if (newColumn.name in table.schema) {
|
||||
throw `Column "${newColumn.name}" already exists on table "${table.name}"`
|
||||
}
|
||||
table.schema[newColumn.name] = newColumn
|
||||
return await saveTable(table)
|
||||
}
|
||||
import * as getters from "./getters"
|
||||
import * as updates from "./update"
|
||||
import * as utils from "./utils"
|
||||
|
||||
export default {
|
||||
getAllInternalTables,
|
||||
getExternalTablesInDatasource,
|
||||
getExternalTable,
|
||||
getTable,
|
||||
getAllTables,
|
||||
getTables,
|
||||
populateExternalTableSchemas,
|
||||
enrichViewSchemas,
|
||||
saveTable,
|
||||
addColumn,
|
||||
migrate,
|
||||
...updates,
|
||||
...getters,
|
||||
...utils,
|
||||
}
|
||||
|
|
|
@ -0,0 +1,172 @@
|
|||
import {
|
||||
RenameColumn,
|
||||
Table,
|
||||
ViewStatisticsSchema,
|
||||
ViewV2,
|
||||
Row,
|
||||
ContextUser,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
hasTypeChanged,
|
||||
TableSaveFunctions,
|
||||
} from "../../../../api/controllers/table/utils"
|
||||
import { FieldTypes } from "../../../../constants"
|
||||
import { EventType, updateLinks } from "../../../../db/linkedRows"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import isEqual from "lodash/isEqual"
|
||||
import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { getTable } from "../getters"
|
||||
import { checkAutoColumns } from "./utils"
|
||||
import * as viewsSdk from "../../views"
|
||||
import sdk from "../../../index"
|
||||
import { getRowParams } from "../../../../db/utils"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import env from "../../../../environment"
|
||||
import { cleanupAttachments } from "../../../../utilities/rowProcessor"
|
||||
|
||||
export async function save(
|
||||
table: Table,
|
||||
opts?: {
|
||||
user?: ContextUser
|
||||
tableId?: string
|
||||
rowsToImport?: Row[]
|
||||
renaming?: RenameColumn
|
||||
}
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
|
||||
// if the table obj had an _id then it will have been retrieved
|
||||
let oldTable: Table | undefined
|
||||
if (opts?.tableId) {
|
||||
oldTable = await getTable(opts.tableId)
|
||||
}
|
||||
|
||||
// check all types are correct
|
||||
if (hasTypeChanged(table, oldTable)) {
|
||||
throw new Error("A column type has changed.")
|
||||
}
|
||||
// check that subtypes have been maintained
|
||||
table = checkAutoColumns(table, oldTable)
|
||||
|
||||
// saving a table is a complex operation, involving many different steps, this
|
||||
// has been broken out into a utility to make it more obvious/easier to manipulate
|
||||
const tableSaveFunctions = new TableSaveFunctions({
|
||||
user: opts?.user,
|
||||
oldTable,
|
||||
importRows: opts?.rowsToImport,
|
||||
})
|
||||
table = await tableSaveFunctions.before(table)
|
||||
|
||||
let renaming = opts?.renaming
|
||||
if (renaming && renaming.old === renaming.updated) {
|
||||
renaming = undefined
|
||||
}
|
||||
|
||||
// rename row fields when table column is renamed
|
||||
if (renaming && table.schema[renaming.updated].type === FieldTypes.LINK) {
|
||||
throw new Error("Cannot rename a linked column.")
|
||||
}
|
||||
|
||||
table = await tableSaveFunctions.mid(table, renaming)
|
||||
|
||||
// update schema of non-statistics views when new columns are added
|
||||
for (let view in table.views) {
|
||||
const tableView = table.views[view]
|
||||
if (!tableView) continue
|
||||
|
||||
if (viewsSdk.isV2(tableView)) {
|
||||
table.views[view] = viewsSdk.syncSchema(
|
||||
oldTable!.views![view] as ViewV2,
|
||||
table.schema,
|
||||
renaming
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
(tableView.schema as ViewStatisticsSchema).group ||
|
||||
tableView.schema.field
|
||||
)
|
||||
continue
|
||||
tableView.schema = table.schema
|
||||
}
|
||||
|
||||
// update linked rows
|
||||
const linkResp: any = await updateLinks({
|
||||
eventType: oldTable ? EventType.TABLE_UPDATED : EventType.TABLE_SAVE,
|
||||
table: table,
|
||||
oldTable: oldTable,
|
||||
})
|
||||
if (linkResp != null && linkResp._rev) {
|
||||
table._rev = linkResp._rev
|
||||
}
|
||||
|
||||
// don't perform any updates until relationships have been
|
||||
// checked by the updateLinks function
|
||||
const updatedRows = tableSaveFunctions.getUpdatedRows()
|
||||
if (updatedRows && updatedRows.length !== 0) {
|
||||
await db.bulkDocs(updatedRows)
|
||||
}
|
||||
let result = await db.put(table)
|
||||
table._rev = result.rev
|
||||
const savedTable = cloneDeep(table)
|
||||
|
||||
table = await tableSaveFunctions.after(table)
|
||||
// the table may be updated as part of the table save after functionality - need to write it
|
||||
if (!isEqual(savedTable, table)) {
|
||||
result = await db.put(table)
|
||||
table._rev = result.rev
|
||||
}
|
||||
// has to run after, make sure it has _id
|
||||
await runStaticFormulaChecks(table, { oldTable, deletion: false })
|
||||
return { table }
|
||||
}
|
||||
|
||||
export async function destroy(table: Table) {
|
||||
const db = context.getAppDB()
|
||||
const tableId = table._id!
|
||||
|
||||
// Delete all rows for that table
|
||||
const rowsData = await db.allDocs(
|
||||
getRowParams(tableId, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
await db.bulkDocs(
|
||||
rowsData.rows.map((row: any) => ({ ...row.doc, _deleted: true }))
|
||||
)
|
||||
await quotas.removeRows(rowsData.rows.length, {
|
||||
tableId,
|
||||
})
|
||||
|
||||
// update linked rows
|
||||
await updateLinks({
|
||||
eventType: EventType.TABLE_DELETE,
|
||||
table: table,
|
||||
})
|
||||
|
||||
// don't remove the table itself until very end
|
||||
await db.remove(tableId, table._rev)
|
||||
|
||||
// remove table search index
|
||||
if (!env.isTest() || env.COUCH_DB_URL) {
|
||||
const currentIndexes = await db.getIndexes()
|
||||
const existingIndex = currentIndexes.indexes.find(
|
||||
(existing: any) => existing.name === `search:${tableId}`
|
||||
)
|
||||
if (existingIndex) {
|
||||
await db.deleteIndex(existingIndex)
|
||||
}
|
||||
}
|
||||
|
||||
// has to run after, make sure it has _id
|
||||
await runStaticFormulaChecks(table, {
|
||||
deletion: true,
|
||||
})
|
||||
await cleanupAttachments(table, {
|
||||
rows: rowsData.rows.map((row: any) => row.doc),
|
||||
})
|
||||
|
||||
return { table }
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
import { Table } from "@budibase/types"
|
||||
import { fixAutoColumnSubType } from "../../../../utilities/rowProcessor"
|
||||
|
||||
export function checkAutoColumns(table: Table, oldTable?: Table) {
|
||||
if (!table.schema) {
|
||||
return table
|
||||
}
|
||||
for (let [key, schema] of Object.entries(table.schema)) {
|
||||
if (!schema.autocolumn || schema.subtype) {
|
||||
continue
|
||||
}
|
||||
const oldSchema = oldTable && oldTable.schema[key]
|
||||
if (oldSchema && oldSchema.subtype) {
|
||||
table.schema[key].subtype = oldSchema.subtype
|
||||
} else {
|
||||
table.schema[key] = fixAutoColumnSubType(schema)
|
||||
}
|
||||
}
|
||||
return table
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
import { Table, RenameColumn } from "@budibase/types"
|
||||
import { isExternalTable } from "../../../integrations/utils"
|
||||
import sdk from "../../index"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { isExternal } from "./utils"
|
||||
|
||||
import * as external from "./external"
|
||||
import * as internal from "./internal"
|
||||
export * as external from "./external"
|
||||
export * as internal from "./internal"
|
||||
|
||||
export async function saveTable(table: Table) {
|
||||
const db = context.getAppDB()
|
||||
if (isExternalTable(table._id!)) {
|
||||
const datasource = await sdk.datasources.get(table.sourceId!)
|
||||
datasource.entities![table.name] = table
|
||||
await db.put(datasource)
|
||||
} else {
|
||||
await db.put(table)
|
||||
}
|
||||
}
|
||||
|
||||
export async function update(table: Table, renaming?: RenameColumn) {
|
||||
const tableId = table._id
|
||||
if (isExternal({ table })) {
|
||||
const datasourceId = table.sourceId!
|
||||
await external.save(datasourceId, table, { tableId, renaming })
|
||||
} else {
|
||||
await internal.save(table, { tableId, renaming })
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
import { Table } from "@budibase/types"
|
||||
import { isExternalTable } from "../../../integrations/utils"
|
||||
|
||||
export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
|
||||
if (opts.table && opts.table.type === "external") {
|
||||
return true
|
||||
} else if (opts.tableId && isExternalTable(opts.tableId)) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
|
@ -59,11 +59,10 @@ export async function update(tableId: string, view: ViewV2): Promise<ViewV2> {
|
|||
const existingView = Object.values(views).find(
|
||||
v => isV2(v) && v.id === view.id
|
||||
)
|
||||
if (!existingView) {
|
||||
if (!existingView || !existingView.name) {
|
||||
throw new HTTPError(`View ${view.id} not found in table ${tableId}`, 404)
|
||||
}
|
||||
|
||||
console.log("set to", view)
|
||||
delete views[existingView.name]
|
||||
views[view.name] = view
|
||||
await db.put(ds)
|
||||
|
|
|
@ -51,11 +51,10 @@ export async function update(tableId: string, view: ViewV2): Promise<ViewV2> {
|
|||
const existingView = Object.values(table.views).find(
|
||||
v => isV2(v) && v.id === view.id
|
||||
)
|
||||
if (!existingView) {
|
||||
if (!existingView || !existingView.name) {
|
||||
throw new HTTPError(`View ${view.id} not found in table ${tableId}`, 404)
|
||||
}
|
||||
|
||||
console.log("set to", view)
|
||||
delete table.views[existingView.name]
|
||||
table.views[view.name] = view
|
||||
await db.put(table)
|
||||
|
|
|
@ -20,6 +20,30 @@ const allowDisplayColumnByType: Record<FieldType, boolean> = {
|
|||
[FieldType.BB_REFERENCE]: false,
|
||||
}
|
||||
|
||||
const allowSortColumnByType: Record<FieldType, boolean> = {
|
||||
[FieldType.STRING]: true,
|
||||
[FieldType.LONGFORM]: true,
|
||||
[FieldType.OPTIONS]: true,
|
||||
[FieldType.NUMBER]: true,
|
||||
[FieldType.DATETIME]: true,
|
||||
[FieldType.AUTO]: true,
|
||||
[FieldType.INTERNAL]: true,
|
||||
[FieldType.BARCODEQR]: true,
|
||||
[FieldType.BIGINT]: true,
|
||||
[FieldType.BOOLEAN]: true,
|
||||
[FieldType.JSON]: true,
|
||||
|
||||
[FieldType.FORMULA]: false,
|
||||
[FieldType.ATTACHMENT]: false,
|
||||
[FieldType.ARRAY]: false,
|
||||
[FieldType.LINK]: false,
|
||||
[FieldType.BB_REFERENCE]: false,
|
||||
}
|
||||
|
||||
export function canBeDisplayColumn(type: FieldType): boolean {
|
||||
return !!allowDisplayColumnByType[type]
|
||||
}
|
||||
|
||||
export function canBeSortColumn(type: FieldType): boolean {
|
||||
return !!allowSortColumnByType[type]
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
"@budibase/handlebars-helpers": "^0.11.9",
|
||||
"dayjs": "^1.10.8",
|
||||
"handlebars": "^4.7.6",
|
||||
"lodash": "^4.17.20",
|
||||
"lodash": "4.17.21",
|
||||
"vm2": "^3.9.19"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -55,6 +55,7 @@ export interface SearchUsersRequest {
|
|||
bookmark?: string
|
||||
query?: SearchQuery
|
||||
appId?: string
|
||||
limit?: number
|
||||
paginate?: boolean
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ import { SearchFilter, SortOrder, SortType } from "../../api"
|
|||
import { UIFieldMetadata } from "./table"
|
||||
|
||||
export interface View {
|
||||
name: string
|
||||
name?: string
|
||||
tableId: string
|
||||
field?: string
|
||||
filters: ViewFilter[]
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
FROM node:18-alpine
|
||||
|
||||
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
|
||||
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"
|
||||
LABEL com.centurylinklabs.watchtower.lifecycle.post-update="scripts/watchtower-hooks/post-update.sh"
|
||||
LABEL com.centurylinklabs.watchtower.lifecycle.post-check="scripts/watchtower-hooks/post-check.sh"
|
||||
|
||||
|
||||
# handle node-gyp
|
||||
RUN apk add --no-cache --virtual .gyp python3 make g++ jq
|
||||
RUN yarn global add pm2
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
|
||||
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
|
||||
|
||||
|
||||
WORKDIR /string-templates
|
||||
COPY packages/string-templates/package.json package.json
|
||||
RUN ../scripts/removeWorkspaceDependencies.sh package.json
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true
|
||||
COPY packages/string-templates .
|
||||
|
||||
|
||||
WORKDIR /app
|
||||
COPY packages/worker/package.json .
|
||||
COPY packages/worker/dist/yarn.lock .
|
||||
RUN cd ../string-templates && yarn link && cd - && yarn link @budibase/string-templates
|
||||
|
||||
RUN ../scripts/removeWorkspaceDependencies.sh package.json
|
||||
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true
|
||||
# Remove unneeded data from file system to reduce image size
|
||||
RUN apk del .gyp \
|
||||
&& yarn cache clean
|
||||
|
||||
COPY packages/worker/dist/ dist/
|
||||
COPY packages/worker/docker_run.sh .
|
||||
|
||||
EXPOSE 4001
|
||||
|
||||
# have to add node environment production after install
|
||||
# due to this causing yarn to stop installing dev dependencies
|
||||
# which are actually needed to get this environment up and running
|
||||
ENV NODE_ENV=production
|
||||
ENV CLUSTER_MODE=${CLUSTER_MODE}
|
||||
ENV SERVICE=worker-service
|
||||
ENV POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
|
||||
ENV TENANT_FEATURE_FLAGS=*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR
|
||||
ENV ACCOUNT_PORTAL_URL=https://account.budibase.app
|
||||
|
||||
CMD ["./docker_run.sh"]
|
|
@ -48,6 +48,7 @@
|
|||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bull": "4.10.1",
|
||||
"dd-trace": "3.13.2",
|
||||
"dotenv": "8.6.0",
|
||||
"global-agent": "3.0.0",
|
||||
|
@ -61,6 +62,7 @@
|
|||
"koa-session": "5.13.1",
|
||||
"koa-static": "5.0.0",
|
||||
"koa-useragent": "^4.1.0",
|
||||
"lodash": "4.17.21",
|
||||
"node-fetch": "2.6.7",
|
||||
"nodemailer": "6.7.2",
|
||||
"passport-google-oauth": "2.0.0",
|
||||
|
@ -77,14 +79,13 @@
|
|||
"@types/jsonwebtoken": "8.5.1",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/koa__router": "8.0.8",
|
||||
"@types/lodash": "^4.14.191",
|
||||
"@types/lodash": "4.14.200",
|
||||
"@types/node": "18.17.0",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/server-destroy": "1.0.1",
|
||||
"@types/supertest": "2.0.12",
|
||||
"@types/uuid": "8.3.4",
|
||||
"jest": "29.6.2",
|
||||
"lodash": "4.17.21",
|
||||
"nodemon": "2.0.15",
|
||||
"rimraf": "3.0.2",
|
||||
"supertest": "6.2.2",
|
||||
|
|
|
@ -189,7 +189,10 @@ export const destroy = async (ctx: any) => {
|
|||
|
||||
export const getAppUsers = async (ctx: Ctx<SearchUsersRequest>) => {
|
||||
const body = ctx.request.body
|
||||
const users = await userSdk.db.getUsersByAppAccess(body?.appId)
|
||||
const users = await userSdk.db.getUsersByAppAccess({
|
||||
appId: body.appId,
|
||||
limit: body.limit,
|
||||
})
|
||||
|
||||
ctx.body = { data: users }
|
||||
}
|
||||
|
|
|
@ -569,9 +569,13 @@ describe("/api/global/users", () => {
|
|||
{
|
||||
query: { equal: { firstName: user.firstName } },
|
||||
},
|
||||
501
|
||||
{ status: 501 }
|
||||
)
|
||||
})
|
||||
|
||||
it("should throw an error if public query performed", async () => {
|
||||
await config.api.users.searchUsers({}, { status: 403, noHeaders: true })
|
||||
})
|
||||
})
|
||||
|
||||
describe("DELETE /api/global/users/:userId", () => {
|
||||
|
|
|
@ -72,7 +72,8 @@ router
|
|||
)
|
||||
|
||||
.get("/api/global/users", auth.builderOrAdmin, controller.fetch)
|
||||
.post("/api/global/users/search", auth.builderOrAdmin, controller.search)
|
||||
// search can be used by any user now, to retrieve users for user column
|
||||
.post("/api/global/users/search", controller.search)
|
||||
.delete("/api/global/users/:id", auth.adminOnly, controller.destroy)
|
||||
.get(
|
||||
"/api/global/users/count/:appId",
|
||||
|
|
|
@ -134,13 +134,19 @@ export class UserAPI extends TestAPI {
|
|||
.expect(status ? status : 200)
|
||||
}
|
||||
|
||||
searchUsers = ({ query }: { query?: SearchQuery }, status = 200) => {
|
||||
return this.request
|
||||
searchUsers = (
|
||||
{ query }: { query?: SearchQuery },
|
||||
opts?: { status?: number; noHeaders?: boolean }
|
||||
) => {
|
||||
const req = this.request
|
||||
.post("/api/global/users/search")
|
||||
.set(this.config.defaultHeaders())
|
||||
.send({ query })
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(status ? status : 200)
|
||||
.expect(opts?.status ? opts.status : 200)
|
||||
if (!opts?.noHeaders) {
|
||||
req.set(this.config.defaultHeaders())
|
||||
}
|
||||
return req
|
||||
}
|
||||
|
||||
getUser = (userId: string, opts?: TestAPIOpts) => {
|
||||
|
|
|
@ -1,31 +1,16 @@
|
|||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
|
||||
packages_to_remove=(
|
||||
@budibase/backend-core
|
||||
@budibase/bbui
|
||||
@budibase/builder
|
||||
@budibase/cli
|
||||
@budibase/client
|
||||
@budibase/frontend-core
|
||||
@budibase/pro
|
||||
@budibase/sdk
|
||||
@budibase/server
|
||||
@budibase/shared-core
|
||||
# We cannot remove string-templates yet because it cannot be bundled by esbuild as a dependency
|
||||
@budibase/string-templates
|
||||
@budibase/types
|
||||
@budibase/worker
|
||||
)
|
||||
packages_to_remove="@budibase/backend-core @budibase/bbui @budibase/builder @budibase/cli @budibase/client @budibase/frontend-core @budibase/pro @budibase/sdk @budibase/server @budibase/shared-core @budibase/string-templates @budibase/types @budibase/worker"
|
||||
|
||||
|
||||
root_package_json=$(cat "package.json")
|
||||
package_json_path="$1"
|
||||
package_json=$(cat "$package_json_path")
|
||||
|
||||
process_package() {
|
||||
local pkg="$1"
|
||||
local package_json=$(cat "$pkg/package.json")
|
||||
local has_changes=false
|
||||
pkg_path="$1"
|
||||
package_json=$(cat "$pkg_path")
|
||||
has_changes=false
|
||||
|
||||
for package_name in "${packages_to_remove[@]}"; do
|
||||
for package_name in $packages_to_remove; do
|
||||
if echo "$package_json" | jq -e --arg package_name "$package_name" '.dependencies | has($package_name)' > /dev/null; then
|
||||
package_json=$(echo "$package_json" | jq "del(.dependencies[\"$package_name\"])")
|
||||
has_changes=true
|
||||
|
@ -33,20 +18,11 @@ process_package() {
|
|||
done
|
||||
|
||||
if [ "$has_changes" = true ]; then
|
||||
echo "$package_json" > "$1/package.json"
|
||||
echo "$package_json" > "$pkg_path"
|
||||
fi
|
||||
}
|
||||
|
||||
process_package "$package_json_path"
|
||||
|
||||
for pkg in $(echo "$root_package_json" | jq -r '.workspaces.packages[]' ); do
|
||||
if [[ "$pkg" == *"*"* ]]; then
|
||||
# Use find to iterate through immediate subdirectories
|
||||
find "$pkg" -maxdepth 1 -type d -print | while read -r workspace_package; do
|
||||
process_package "$workspace_package"
|
||||
done
|
||||
else
|
||||
process_package "$pkg"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "$root_package_json" | jq "del(.resolutions)" > "package.json"
|
||||
package_json=$(cat "$package_json_path")
|
||||
echo "$package_json" | jq "del(.resolutions)" > "$1"
|
||||
|
|
27
yarn.lock
27
yarn.lock
|
@ -5595,15 +5595,10 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/linkify-it/-/linkify-it-3.0.3.tgz#15a0712296c5041733c79efe233ba17ae5a7587b"
|
||||
integrity sha512-pTjcqY9E4nOI55Wgpz7eiI8+LzdYnw3qxXCfHyBDdPbYvbyLgWLJGh8EdPvqawwMK1Uo1794AUkkR38Fr0g+2g==
|
||||
|
||||
"@types/lodash@4.14.180":
|
||||
version "4.14.180"
|
||||
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.180.tgz#4ab7c9ddfc92ec4a887886483bc14c79fb380670"
|
||||
integrity sha512-XOKXa1KIxtNXgASAnwj7cnttJxS4fksBRywK/9LzRV5YxrF80BXZIGeQSuoESQ/VkUj30Ae0+YcuHc15wJCB2g==
|
||||
|
||||
"@types/lodash@^4.14.191":
|
||||
version "4.14.192"
|
||||
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.192.tgz#5790406361a2852d332d41635d927f1600811285"
|
||||
integrity sha512-km+Vyn3BYm5ytMO13k9KTp27O75rbQ0NFw+U//g+PX7VZyjCioXaRFisqSIJRECljcTv73G3i6BpglNGHgUQ5A==
|
||||
"@types/lodash@4.14.200":
|
||||
version "4.14.200"
|
||||
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.200.tgz#435b6035c7eba9cdf1e039af8212c9e9281e7149"
|
||||
integrity sha512-YI/M/4HRImtNf3pJgbF+W6FrXovqj+T+/HpENLTooK9PnkacBsDpeP3IpHab40CClUfhNmdM2WTNP2sa2dni5Q==
|
||||
|
||||
"@types/long@^4.0.0", "@types/long@^4.0.1":
|
||||
version "4.0.2"
|
||||
|
@ -13835,10 +13830,10 @@ jsonschema@1.4.0:
|
|||
resolved "https://registry.yarnpkg.com/jsonschema/-/jsonschema-1.4.0.tgz#1afa34c4bc22190d8e42271ec17ac8b3404f87b2"
|
||||
integrity sha512-/YgW6pRMr6M7C+4o8kS+B/2myEpHCrxO4PEWnqJNBFMjn7EWXqlQ4tGwL6xTHeRplwuZmcAncdvfOad1nT2yMw==
|
||||
|
||||
jsonwebtoken@8.5.1:
|
||||
version "8.5.1"
|
||||
resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d"
|
||||
integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==
|
||||
jsonwebtoken@9.0.2:
|
||||
version "9.0.2"
|
||||
resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz#65ff91f4abef1784697d40952bb1998c504caaf3"
|
||||
integrity sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==
|
||||
dependencies:
|
||||
jws "^3.2.2"
|
||||
lodash.includes "^4.3.0"
|
||||
|
@ -13849,9 +13844,9 @@ jsonwebtoken@8.5.1:
|
|||
lodash.isstring "^4.0.1"
|
||||
lodash.once "^4.0.0"
|
||||
ms "^2.1.1"
|
||||
semver "^5.6.0"
|
||||
semver "^7.5.4"
|
||||
|
||||
jsonwebtoken@9.0.0, jsonwebtoken@^9.0.0:
|
||||
jsonwebtoken@^9.0.0:
|
||||
version "9.0.0"
|
||||
resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz#d0faf9ba1cc3a56255fe49c0961a67e520c1926d"
|
||||
integrity sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw==
|
||||
|
@ -14799,7 +14794,7 @@ lodash.xor@^4.5.0:
|
|||
resolved "https://registry.yarnpkg.com/lodash.xor/-/lodash.xor-4.5.0.tgz#4d48ed7e98095b0632582ba714d3ff8ae8fb1db6"
|
||||
integrity sha512-sVN2zimthq7aZ5sPGXnSz32rZPuqcparVW50chJQe+mzTYV+IsxSsl/2gnkWWE2Of7K3myBQBqtLKOUEHJKRsQ==
|
||||
|
||||
lodash@4.17.21, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.3, lodash@^4.7.0:
|
||||
lodash@4.17.21, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.21, lodash@^4.17.3, lodash@^4.7.0:
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
|
Loading…
Reference in New Issue