Merge branch 'master' into master
This commit is contained in:
commit
f1dde44384
|
@ -17,6 +17,6 @@ jobs:
|
||||||
token: ${{ secrets.GH_ACCESS_TOKEN }}
|
token: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||||
client-payload: |-
|
client-payload: |-
|
||||||
{
|
{
|
||||||
"PAYLOAD_VERSION": "${{ github.sha }}",
|
"VERSION": "${{ github.sha }}",
|
||||||
"REF_NAME": "${{ github.ref_name}}"
|
"REF_NAME": "${{ github.ref_name}}"
|
||||||
}
|
}
|
||||||
|
|
|
@ -169,10 +169,10 @@ jobs:
|
||||||
- uses: peter-evans/repository-dispatch@v2
|
- uses: peter-evans/repository-dispatch@v2
|
||||||
with:
|
with:
|
||||||
repository: budibase/budibase-deploys
|
repository: budibase/budibase-deploys
|
||||||
event: budicloud-qa-deploy
|
event-type: budicloud-qa-deploy
|
||||||
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}
|
token: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||||
client-payload: |-
|
client-payload: |-
|
||||||
{
|
{
|
||||||
"PAYLOAD_VERSION": "${{ github.ref_name }}",
|
"VERSION": "${{ github.ref_name }}",
|
||||||
"REF_NAME": "${{ github.ref_name}}"
|
"REF_NAME": "${{ github.ref_name}}"
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,7 +66,7 @@ jobs:
|
||||||
context: .
|
context: .
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
build-args: BUDIBASE_VERSION=$BUDIBASE_VERSION
|
build-args: BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
|
||||||
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
|
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
|
||||||
file: ./hosting/single/Dockerfile.v2
|
file: ./hosting/single/Dockerfile.v2
|
||||||
env:
|
env:
|
||||||
|
@ -79,7 +79,7 @@ jobs:
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
build-args: |
|
build-args: |
|
||||||
TARGETBUILD=aas
|
TARGETBUILD=aas
|
||||||
BUDIBASE_VERSION=$BUDIBASE_VERSION
|
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
|
||||||
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
|
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
|
||||||
file: ./hosting/single/Dockerfile.v2
|
file: ./hosting/single/Dockerfile.v2
|
||||||
env:
|
env:
|
||||||
|
|
|
@ -120,7 +120,7 @@ VOLUME /data
|
||||||
ARG BUDIBASE_VERSION
|
ARG BUDIBASE_VERSION
|
||||||
# Ensuring the version argument is sent
|
# Ensuring the version argument is sent
|
||||||
RUN test -n "$BUDIBASE_VERSION"
|
RUN test -n "$BUDIBASE_VERSION"
|
||||||
ENV BUDIBASE_VERSION $BUDIBASE_VERSION
|
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
|
||||||
|
|
||||||
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"
|
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"
|
||||||
|
|
||||||
|
|
|
@ -77,7 +77,7 @@ mkdir -p ${DATA_DIR}/minio
|
||||||
chown -R couchdb:couchdb ${DATA_DIR}/couch
|
chown -R couchdb:couchdb ${DATA_DIR}/couch
|
||||||
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
|
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
|
||||||
/bbcouch-runner.sh &
|
/bbcouch-runner.sh &
|
||||||
minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
|
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
|
||||||
/etc/init.d/nginx restart
|
/etc/init.d/nginx restart
|
||||||
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
||||||
# Add monthly cron job to renew certbot certificate
|
# Add monthly cron job to renew certbot certificate
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.12.5",
|
"version": "2.12.11",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
|
@ -30,7 +30,6 @@ export * as timers from "./timers"
|
||||||
export { default as env } from "./environment"
|
export { default as env } from "./environment"
|
||||||
export * as blacklist from "./blacklist"
|
export * as blacklist from "./blacklist"
|
||||||
export * as docUpdates from "./docUpdates"
|
export * as docUpdates from "./docUpdates"
|
||||||
export * from "./utils/Duration"
|
|
||||||
export { SearchParams } from "./db"
|
export { SearchParams } from "./db"
|
||||||
// Add context to tenancy for backwards compatibility
|
// Add context to tenancy for backwards compatibility
|
||||||
// only do this for external usages to prevent internal
|
// only do this for external usages to prevent internal
|
||||||
|
|
|
@ -18,8 +18,12 @@ export const ObjectStoreBuckets = {
|
||||||
}
|
}
|
||||||
|
|
||||||
const bbTmp = join(tmpdir(), ".budibase")
|
const bbTmp = join(tmpdir(), ".budibase")
|
||||||
if (!fs.existsSync(bbTmp)) {
|
try {
|
||||||
fs.mkdirSync(bbTmp)
|
fs.mkdirSync(bbTmp)
|
||||||
|
} catch (e: any) {
|
||||||
|
if (e.code !== "EEXIST") {
|
||||||
|
throw e
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function budibaseTempDir() {
|
export function budibaseTempDir() {
|
||||||
|
|
|
@ -36,7 +36,7 @@ class InMemoryQueue {
|
||||||
* @param opts This is not used by the in memory queue as there is no real use
|
* @param opts This is not used by the in memory queue as there is no real use
|
||||||
* case when in memory, but is the same API as Bull
|
* case when in memory, but is the same API as Bull
|
||||||
*/
|
*/
|
||||||
constructor(name: string, opts?: any) {
|
constructor(name: string, opts = null) {
|
||||||
this._name = name
|
this._name = name
|
||||||
this._opts = opts
|
this._opts = opts
|
||||||
this._messages = []
|
this._messages = []
|
||||||
|
|
|
@ -2,18 +2,11 @@ import env from "../environment"
|
||||||
import { getRedisOptions } from "../redis/utils"
|
import { getRedisOptions } from "../redis/utils"
|
||||||
import { JobQueue } from "./constants"
|
import { JobQueue } from "./constants"
|
||||||
import InMemoryQueue from "./inMemoryQueue"
|
import InMemoryQueue from "./inMemoryQueue"
|
||||||
import BullQueue, { QueueOptions } from "bull"
|
import BullQueue from "bull"
|
||||||
import { addListeners, StalledFn } from "./listeners"
|
import { addListeners, StalledFn } from "./listeners"
|
||||||
import { Duration } from "../utils"
|
|
||||||
import * as timers from "../timers"
|
import * as timers from "../timers"
|
||||||
import * as Redis from "ioredis"
|
|
||||||
|
|
||||||
// the queue lock is held for 5 minutes
|
const CLEANUP_PERIOD_MS = 60 * 1000
|
||||||
const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
|
|
||||||
// queue lock is refreshed every 30 seconds
|
|
||||||
const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()
|
|
||||||
// cleanup the queue every 60 seconds
|
|
||||||
const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()
|
|
||||||
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
|
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
|
||||||
let cleanupInterval: NodeJS.Timeout
|
let cleanupInterval: NodeJS.Timeout
|
||||||
|
|
||||||
|
@ -28,14 +21,7 @@ export function createQueue<T>(
|
||||||
opts: { removeStalledCb?: StalledFn } = {}
|
opts: { removeStalledCb?: StalledFn } = {}
|
||||||
): BullQueue.Queue<T> {
|
): BullQueue.Queue<T> {
|
||||||
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
|
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
|
||||||
const queueConfig: QueueOptions = {
|
const queueConfig: any = redisProtocolUrl || { redis: redisOpts }
|
||||||
redis: redisProtocolUrl! || (redisOpts as Redis.RedisOptions),
|
|
||||||
settings: {
|
|
||||||
maxStalledCount: 0,
|
|
||||||
lockDuration: QUEUE_LOCK_MS,
|
|
||||||
lockRenewTime: QUEUE_LOCK_RENEW_INTERNAL_MS,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
let queue: any
|
let queue: any
|
||||||
if (!env.isTest()) {
|
if (!env.isTest()) {
|
||||||
queue = new BullQueue(jobQueue, queueConfig)
|
queue = new BullQueue(jobQueue, queueConfig)
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
export enum DurationType {
|
|
||||||
MILLISECONDS = "milliseconds",
|
|
||||||
SECONDS = "seconds",
|
|
||||||
MINUTES = "minutes",
|
|
||||||
HOURS = "hours",
|
|
||||||
DAYS = "days",
|
|
||||||
}
|
|
||||||
|
|
||||||
const conversion: Record<DurationType, number> = {
|
|
||||||
milliseconds: 1,
|
|
||||||
seconds: 1000,
|
|
||||||
minutes: 60 * 1000,
|
|
||||||
hours: 60 * 60 * 1000,
|
|
||||||
days: 24 * 60 * 60 * 1000,
|
|
||||||
}
|
|
||||||
|
|
||||||
export class Duration {
|
|
||||||
static convert(from: DurationType, to: DurationType, duration: number) {
|
|
||||||
const milliseconds = duration * conversion[from]
|
|
||||||
return milliseconds / conversion[to]
|
|
||||||
}
|
|
||||||
|
|
||||||
static from(from: DurationType, duration: number) {
|
|
||||||
return {
|
|
||||||
to: (to: DurationType) => {
|
|
||||||
return Duration.convert(from, to, duration)
|
|
||||||
},
|
|
||||||
toMs: () => {
|
|
||||||
return Duration.convert(from, DurationType.MILLISECONDS, duration)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static fromSeconds(duration: number) {
|
|
||||||
return Duration.from(DurationType.SECONDS, duration)
|
|
||||||
}
|
|
||||||
|
|
||||||
static fromMinutes(duration: number) {
|
|
||||||
return Duration.from(DurationType.MINUTES, duration)
|
|
||||||
}
|
|
||||||
|
|
||||||
static fromHours(duration: number) {
|
|
||||||
return Duration.from(DurationType.HOURS, duration)
|
|
||||||
}
|
|
||||||
|
|
||||||
static fromDays(duration: number) {
|
|
||||||
return Duration.from(DurationType.DAYS, duration)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,4 +1,3 @@
|
||||||
export * from "./hashing"
|
export * from "./hashing"
|
||||||
export * from "./utils"
|
export * from "./utils"
|
||||||
export * from "./stringUtils"
|
export * from "./stringUtils"
|
||||||
export * from "./Duration"
|
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
import { Duration, DurationType } from "../Duration"
|
|
||||||
|
|
||||||
describe("duration", () => {
|
|
||||||
it("should convert minutes to milliseconds", () => {
|
|
||||||
expect(Duration.fromMinutes(5).toMs()).toBe(300000)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should convert seconds to milliseconds", () => {
|
|
||||||
expect(Duration.fromSeconds(30).toMs()).toBe(30000)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should convert days to milliseconds", () => {
|
|
||||||
expect(Duration.fromDays(1).toMs()).toBe(86400000)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should convert minutes to days", () => {
|
|
||||||
expect(Duration.fromMinutes(1440).to(DurationType.DAYS)).toBe(1)
|
|
||||||
})
|
|
||||||
})
|
|
|
@ -30,15 +30,15 @@
|
||||||
part2: PrettyRelationshipDefinitions.MANY,
|
part2: PrettyRelationshipDefinitions.MANY,
|
||||||
},
|
},
|
||||||
[RelationshipType.MANY_TO_ONE]: {
|
[RelationshipType.MANY_TO_ONE]: {
|
||||||
part1: PrettyRelationshipDefinitions.ONE,
|
part1: PrettyRelationshipDefinitions.MANY,
|
||||||
part2: PrettyRelationshipDefinitions.MANY,
|
part2: PrettyRelationshipDefinitions.ONE,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions)
|
let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions)
|
||||||
let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions)
|
let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions)
|
||||||
|
|
||||||
let relationshipPart1 = PrettyRelationshipDefinitions.MANY
|
let relationshipPart1 = PrettyRelationshipDefinitions.ONE
|
||||||
let relationshipPart2 = PrettyRelationshipDefinitions.ONE
|
let relationshipPart2 = PrettyRelationshipDefinitions.MANY
|
||||||
|
|
||||||
let originalFromColumnName = toRelationship.name,
|
let originalFromColumnName = toRelationship.name,
|
||||||
originalToColumnName = fromRelationship.name
|
originalToColumnName = fromRelationship.name
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { Checkbox, Select, RadioGroup, Stepper, Input } from "@budibase/bbui"
|
import { Checkbox, Select, RadioGroup, Stepper, Input } from "@budibase/bbui"
|
||||||
import DataSourceSelect from "./controls/DataSourceSelect.svelte"
|
import DataSourceSelect from "./controls/DataSourceSelect/DataSourceSelect.svelte"
|
||||||
import S3DataSourceSelect from "./controls/S3DataSourceSelect.svelte"
|
import S3DataSourceSelect from "./controls/S3DataSourceSelect.svelte"
|
||||||
import DataProviderSelect from "./controls/DataProviderSelect.svelte"
|
import DataProviderSelect from "./controls/DataProviderSelect.svelte"
|
||||||
import ButtonActionEditor from "./controls/ButtonActionEditor/ButtonActionEditor.svelte"
|
import ButtonActionEditor from "./controls/ButtonActionEditor/ButtonActionEditor.svelte"
|
||||||
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
<script>
|
||||||
|
import { Divider, Heading } from "@budibase/bbui"
|
||||||
|
|
||||||
|
export let dividerState
|
||||||
|
export let heading
|
||||||
|
export let dataSet
|
||||||
|
export let value
|
||||||
|
export let onSelect
|
||||||
|
</script>
|
||||||
|
|
||||||
|
{#if dividerState}
|
||||||
|
<Divider />
|
||||||
|
{/if}
|
||||||
|
{#if heading}
|
||||||
|
<div class="title">
|
||||||
|
<Heading size="XS">{heading}</Heading>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
<ul class="spectrum-Menu" role="listbox">
|
||||||
|
{#each dataSet as data}
|
||||||
|
<li
|
||||||
|
class="spectrum-Menu-item"
|
||||||
|
class:is-selected={value?.label === data.label &&
|
||||||
|
value?.type === data.type}
|
||||||
|
role="option"
|
||||||
|
aria-selected="true"
|
||||||
|
tabindex="0"
|
||||||
|
on:click={() => onSelect(data)}
|
||||||
|
>
|
||||||
|
<span class="spectrum-Menu-itemLabel">
|
||||||
|
{data.label}
|
||||||
|
</span>
|
||||||
|
<svg
|
||||||
|
class="spectrum-Icon spectrum-UIIcon-Checkmark100 spectrum-Menu-checkmark spectrum-Menu-itemIcon"
|
||||||
|
focusable="false"
|
||||||
|
aria-hidden="true"
|
||||||
|
>
|
||||||
|
<use xlink:href="#spectrum-css-icon-Checkmark100" />
|
||||||
|
</svg>
|
||||||
|
</li>
|
||||||
|
{/each}
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.title {
|
||||||
|
padding: 0 var(--spacing-m) var(--spacing-s) var(--spacing-m);
|
||||||
|
}
|
||||||
|
|
||||||
|
ul {
|
||||||
|
list-style: none;
|
||||||
|
padding-left: 0px;
|
||||||
|
margin: 0px;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
</style>
|
|
@ -7,10 +7,8 @@
|
||||||
import {
|
import {
|
||||||
Button,
|
Button,
|
||||||
Popover,
|
Popover,
|
||||||
Divider,
|
|
||||||
Select,
|
Select,
|
||||||
Layout,
|
Layout,
|
||||||
Heading,
|
|
||||||
Drawer,
|
Drawer,
|
||||||
DrawerContent,
|
DrawerContent,
|
||||||
Icon,
|
Icon,
|
||||||
|
@ -32,6 +30,7 @@
|
||||||
import IntegrationQueryEditor from "components/integration/index.svelte"
|
import IntegrationQueryEditor from "components/integration/index.svelte"
|
||||||
import { makePropSafe as safe } from "@budibase/string-templates"
|
import { makePropSafe as safe } from "@budibase/string-templates"
|
||||||
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
|
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
|
||||||
|
import DataSourceCategory from "components/design/settings/controls/DataSourceSelect/DataSourceCategory.svelte"
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
|
|
||||||
export let value = {}
|
export let value = {}
|
||||||
|
@ -279,102 +278,81 @@
|
||||||
</div>
|
</div>
|
||||||
<Popover bind:this={dropdownRight} anchor={anchorRight}>
|
<Popover bind:this={dropdownRight} anchor={anchorRight}>
|
||||||
<div class="dropdown">
|
<div class="dropdown">
|
||||||
<div class="title">
|
<DataSourceCategory
|
||||||
<Heading size="XS">Tables</Heading>
|
heading="Tables"
|
||||||
</div>
|
dataSet={tables}
|
||||||
<ul>
|
{value}
|
||||||
{#each tables as table}
|
onSelect={handleSelected}
|
||||||
<li on:click={() => handleSelected(table)}>{table.label}</li>
|
/>
|
||||||
{/each}
|
|
||||||
</ul>
|
|
||||||
{#if views?.length}
|
{#if views?.length}
|
||||||
<Divider />
|
<DataSourceCategory
|
||||||
<div class="title">
|
dividerState={true}
|
||||||
<Heading size="XS">Views</Heading>
|
heading="Views"
|
||||||
</div>
|
dataSet={views}
|
||||||
<ul>
|
{value}
|
||||||
{#each views as view}
|
onSelect={handleSelected}
|
||||||
<li on:click={() => handleSelected(view)}>{view.label}</li>
|
/>
|
||||||
{/each}
|
|
||||||
</ul>
|
|
||||||
{/if}
|
{/if}
|
||||||
{#if queries?.length}
|
{#if queries?.length}
|
||||||
<Divider />
|
<DataSourceCategory
|
||||||
<div class="title">
|
dividerState={true}
|
||||||
<Heading size="XS">Queries</Heading>
|
heading="Queries"
|
||||||
</div>
|
dataSet={queries}
|
||||||
<ul>
|
{value}
|
||||||
{#each queries as query}
|
onSelect={handleSelected}
|
||||||
<li
|
/>
|
||||||
class:selected={value === query}
|
|
||||||
on:click={() => handleSelected(query)}
|
|
||||||
>
|
|
||||||
{query.label}
|
|
||||||
</li>
|
|
||||||
{/each}
|
|
||||||
</ul>
|
|
||||||
{/if}
|
{/if}
|
||||||
{#if links?.length}
|
{#if links?.length}
|
||||||
<Divider />
|
<DataSourceCategory
|
||||||
<div class="title">
|
dividerState={true}
|
||||||
<Heading size="XS">Relationships</Heading>
|
heading="Links"
|
||||||
</div>
|
dataSet={links}
|
||||||
<ul>
|
{value}
|
||||||
{#each links as link}
|
onSelect={handleSelected}
|
||||||
<li on:click={() => handleSelected(link)}>{link.label}</li>
|
/>
|
||||||
{/each}
|
|
||||||
</ul>
|
|
||||||
{/if}
|
{/if}
|
||||||
{#if fields?.length}
|
{#if fields?.length}
|
||||||
<Divider />
|
<DataSourceCategory
|
||||||
<div class="title">
|
dividerState={true}
|
||||||
<Heading size="XS">Fields</Heading>
|
heading="Fields"
|
||||||
</div>
|
dataSet={fields}
|
||||||
<ul>
|
{value}
|
||||||
{#each fields as field}
|
onSelect={handleSelected}
|
||||||
<li on:click={() => handleSelected(field)}>{field.label}</li>
|
/>
|
||||||
{/each}
|
|
||||||
</ul>
|
|
||||||
{/if}
|
{/if}
|
||||||
{#if jsonArrays?.length}
|
{#if jsonArrays?.length}
|
||||||
<Divider />
|
<DataSourceCategory
|
||||||
<div class="title">
|
dividerState={true}
|
||||||
<Heading size="XS">JSON Arrays</Heading>
|
heading="JSON Arrays"
|
||||||
</div>
|
dataSet={jsonArrays}
|
||||||
<ul>
|
{value}
|
||||||
{#each jsonArrays as field}
|
onSelect={handleSelected}
|
||||||
<li on:click={() => handleSelected(field)}>{field.label}</li>
|
/>
|
||||||
{/each}
|
|
||||||
</ul>
|
|
||||||
{/if}
|
{/if}
|
||||||
{#if showDataProviders && dataProviders?.length}
|
{#if showDataProviders && dataProviders?.length}
|
||||||
<Divider />
|
<DataSourceCategory
|
||||||
<div class="title">
|
dividerState={true}
|
||||||
<Heading size="XS">Data Providers</Heading>
|
heading="Data Providers"
|
||||||
</div>
|
dataSet={dataProviders}
|
||||||
<ul>
|
{value}
|
||||||
{#each dataProviders as provider}
|
onSelect={handleSelected}
|
||||||
<li
|
/>
|
||||||
class:selected={value === provider}
|
{/if}
|
||||||
on:click={() => handleSelected(provider)}
|
<DataSourceCategory
|
||||||
>
|
dividerState={true}
|
||||||
{provider.label}
|
heading="Other"
|
||||||
</li>
|
dataSet={[custom]}
|
||||||
{/each}
|
{value}
|
||||||
</ul>
|
onSelect={handleSelected}
|
||||||
|
/>
|
||||||
|
{#if otherSources?.length}
|
||||||
|
<DataSourceCategory
|
||||||
|
dividerState={false}
|
||||||
|
dataSet={otherSources}
|
||||||
|
{value}
|
||||||
|
onSelect={handleSelected}
|
||||||
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
<Divider />
|
|
||||||
<div class="title">
|
|
||||||
<Heading size="XS">Other</Heading>
|
|
||||||
</div>
|
|
||||||
<ul>
|
|
||||||
<li on:click={() => handleSelected(custom)}>{custom.label}</li>
|
|
||||||
{#if otherSources?.length}
|
|
||||||
{#each otherSources as source}
|
|
||||||
<li on:click={() => handleSelected(source)}>{source.label}</li>
|
|
||||||
{/each}
|
|
||||||
{/if}
|
|
||||||
</ul>
|
|
||||||
</div>
|
</div>
|
||||||
</Popover>
|
</Popover>
|
||||||
|
|
||||||
|
@ -398,31 +376,6 @@
|
||||||
.dropdown {
|
.dropdown {
|
||||||
padding: var(--spacing-m) 0;
|
padding: var(--spacing-m) 0;
|
||||||
z-index: 99999999;
|
z-index: 99999999;
|
||||||
overflow-y: scroll;
|
|
||||||
}
|
|
||||||
.title {
|
|
||||||
padding: 0 var(--spacing-m) var(--spacing-s) var(--spacing-m);
|
|
||||||
}
|
|
||||||
|
|
||||||
ul {
|
|
||||||
list-style: none;
|
|
||||||
padding-left: 0px;
|
|
||||||
margin: 0px;
|
|
||||||
}
|
|
||||||
|
|
||||||
li {
|
|
||||||
cursor: pointer;
|
|
||||||
margin: 0px;
|
|
||||||
padding: var(--spacing-s) var(--spacing-m);
|
|
||||||
font-size: var(--font-size-m);
|
|
||||||
}
|
|
||||||
|
|
||||||
.selected {
|
|
||||||
color: var(--spectrum-global-color-blue-600);
|
|
||||||
}
|
|
||||||
|
|
||||||
li:hover {
|
|
||||||
background-color: var(--spectrum-global-color-gray-200);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.icon {
|
.icon {
|
|
@ -1,5 +1,5 @@
|
||||||
<script>
|
<script>
|
||||||
import DataSourceSelect from "./DataSourceSelect.svelte"
|
import DataSourceSelect from "./DataSourceSelect/DataSourceSelect.svelte"
|
||||||
|
|
||||||
const otherSources = [{ name: "Custom", label: "Custom" }]
|
const otherSources = [{ name: "Custom", label: "Custom" }]
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -1,27 +1,10 @@
|
||||||
<script context="module">
|
|
||||||
// We can create a module level cache for all relationship cells to avoid
|
|
||||||
// having to fetch the table definition one time for each cell
|
|
||||||
let primaryDisplayCache = {}
|
|
||||||
|
|
||||||
const getPrimaryDisplayForTableId = async (API, tableId) => {
|
|
||||||
if (primaryDisplayCache[tableId]) {
|
|
||||||
return primaryDisplayCache[tableId]
|
|
||||||
}
|
|
||||||
const definition = await API.fetchTableDefinition(tableId)
|
|
||||||
const primaryDisplay =
|
|
||||||
definition?.primaryDisplay || definition?.schema?.[0]?.name
|
|
||||||
primaryDisplayCache[tableId] = primaryDisplay
|
|
||||||
return primaryDisplay
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
import { getColor } from "../lib/utils"
|
import { getColor } from "../lib/utils"
|
||||||
import { onMount, getContext } from "svelte"
|
import { onMount, getContext } from "svelte"
|
||||||
import { Icon, Input, ProgressCircle, clickOutside } from "@budibase/bbui"
|
import { Icon, Input, ProgressCircle, clickOutside } from "@budibase/bbui"
|
||||||
import { debounce } from "../../../utils/utils"
|
import { debounce } from "../../../utils/utils"
|
||||||
|
|
||||||
const { API, dispatch } = getContext("grid")
|
const { API, dispatch, cache } = getContext("grid")
|
||||||
|
|
||||||
export let value
|
export let value
|
||||||
export let api
|
export let api
|
||||||
|
@ -147,7 +130,9 @@
|
||||||
// Find the primary display for the related table
|
// Find the primary display for the related table
|
||||||
if (!primaryDisplay) {
|
if (!primaryDisplay) {
|
||||||
searching = true
|
searching = true
|
||||||
primaryDisplay = await getPrimaryDisplayForTableId(API, schema.tableId)
|
primaryDisplay = await cache.actions.getPrimaryDisplayForTableId(
|
||||||
|
schema.tableId
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Show initial list of results
|
// Show initial list of results
|
||||||
|
@ -195,7 +180,7 @@
|
||||||
const toggleRow = async row => {
|
const toggleRow = async row => {
|
||||||
if (value?.some(x => x._id === row._id)) {
|
if (value?.some(x => x._id === row._id)) {
|
||||||
// If the row is already included, remove it and update the candidate
|
// If the row is already included, remove it and update the candidate
|
||||||
// row to be the the same position if possible
|
// row to be the same position if possible
|
||||||
if (oneRowOnly) {
|
if (oneRowOnly) {
|
||||||
await onChange([])
|
await onChange([])
|
||||||
} else {
|
} else {
|
||||||
|
@ -260,31 +245,29 @@
|
||||||
class:wrap={editable || contentLines > 1}
|
class:wrap={editable || contentLines > 1}
|
||||||
on:wheel={e => (focused ? e.stopPropagation() : null)}
|
on:wheel={e => (focused ? e.stopPropagation() : null)}
|
||||||
>
|
>
|
||||||
{#if Array.isArray(value) && value.length}
|
{#each value || [] as relationship}
|
||||||
{#each value as relationship}
|
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
|
||||||
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
|
<div class="badge">
|
||||||
<div class="badge">
|
<span
|
||||||
<span
|
on:click={editable
|
||||||
on:click={editable
|
? () => showRelationship(relationship._id)
|
||||||
? () => showRelationship(relationship._id)
|
: null}
|
||||||
: null}
|
>
|
||||||
>
|
{readable(
|
||||||
{readable(
|
relationship[primaryDisplay] || relationship.primaryDisplay
|
||||||
relationship[primaryDisplay] || relationship.primaryDisplay
|
)}
|
||||||
)}
|
</span>
|
||||||
</span>
|
{#if editable}
|
||||||
{#if editable}
|
<Icon
|
||||||
<Icon
|
name="Close"
|
||||||
name="Close"
|
size="XS"
|
||||||
size="XS"
|
hoverable
|
||||||
hoverable
|
on:click={() => toggleRow(relationship)}
|
||||||
on:click={() => toggleRow(relationship)}
|
/>
|
||||||
/>
|
{/if}
|
||||||
{/if}
|
</div>
|
||||||
</div>
|
{/if}
|
||||||
{/if}
|
{/each}
|
||||||
{/each}
|
|
||||||
{/if}
|
|
||||||
{#if editable}
|
{#if editable}
|
||||||
<div class="add" on:click={open}>
|
<div class="add" on:click={open}>
|
||||||
<Icon name="Add" size="S" />
|
<Icon name="Add" size="S" />
|
||||||
|
@ -320,7 +303,7 @@
|
||||||
<div class="searching">
|
<div class="searching">
|
||||||
<ProgressCircle size="S" />
|
<ProgressCircle size="S" />
|
||||||
</div>
|
</div>
|
||||||
{:else if Array.isArray(searchResults) && searchResults.length}
|
{:else if searchResults?.length}
|
||||||
<div class="results">
|
<div class="results">
|
||||||
{#each searchResults as row, idx}
|
{#each searchResults as row, idx}
|
||||||
<div
|
<div
|
||||||
|
|
|
@ -0,0 +1,47 @@
|
||||||
|
export const createActions = context => {
|
||||||
|
const { API } = context
|
||||||
|
|
||||||
|
// Cache for the primary display columns of different tables.
|
||||||
|
// If we ever need to cache table definitions for other purposes then we can
|
||||||
|
// expand this to be a more generic cache.
|
||||||
|
let primaryDisplayCache = {}
|
||||||
|
|
||||||
|
const resetPrimaryDisplayCache = () => {
|
||||||
|
primaryDisplayCache = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
const getPrimaryDisplayForTableId = async tableId => {
|
||||||
|
// If we've never encountered this tableId before then store a promise that
|
||||||
|
// resolves to the primary display so that subsequent invocations before the
|
||||||
|
// promise completes can reuse this promise
|
||||||
|
if (!primaryDisplayCache[tableId]) {
|
||||||
|
primaryDisplayCache[tableId] = new Promise(resolve => {
|
||||||
|
API.fetchTableDefinition(tableId).then(def => {
|
||||||
|
const display = def?.primaryDisplay || def?.schema?.[0]?.name
|
||||||
|
primaryDisplayCache[tableId] = display
|
||||||
|
resolve(display)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// We await the result so that we account for both promises and primitives
|
||||||
|
return await primaryDisplayCache[tableId]
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
cache: {
|
||||||
|
actions: {
|
||||||
|
getPrimaryDisplayForTableId,
|
||||||
|
resetPrimaryDisplayCache,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const initialise = context => {
|
||||||
|
const { datasource, cache } = context
|
||||||
|
|
||||||
|
// Wipe the caches whenever the datasource changes to ensure we aren't
|
||||||
|
// storing any stale information
|
||||||
|
datasource.subscribe(cache.actions.resetPrimaryDisplayCache)
|
||||||
|
}
|
|
@ -160,11 +160,6 @@ export const createActions = context => {
|
||||||
return getAPI()?.actions.canUseColumn(name)
|
return getAPI()?.actions.canUseColumn(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Gets the default number of rows for a single page
|
|
||||||
const getFeatures = () => {
|
|
||||||
return getAPI()?.actions.getFeatures()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
datasource: {
|
datasource: {
|
||||||
...datasource,
|
...datasource,
|
||||||
|
@ -177,7 +172,6 @@ export const createActions = context => {
|
||||||
getRow,
|
getRow,
|
||||||
isDatasourceValid,
|
isDatasourceValid,
|
||||||
canUseColumn,
|
canUseColumn,
|
||||||
getFeatures,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,11 +35,6 @@ export const createActions = context => {
|
||||||
return $columns.some(col => col.name === name) || $sticky?.name === name
|
return $columns.some(col => col.name === name) || $sticky?.name === name
|
||||||
}
|
}
|
||||||
|
|
||||||
const getFeatures = () => {
|
|
||||||
// We don't support any features
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
nonPlus: {
|
nonPlus: {
|
||||||
actions: {
|
actions: {
|
||||||
|
@ -50,7 +45,6 @@ export const createActions = context => {
|
||||||
getRow,
|
getRow,
|
||||||
isDatasourceValid,
|
isDatasourceValid,
|
||||||
canUseColumn,
|
canUseColumn,
|
||||||
getFeatures,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import { get } from "svelte/store"
|
import { get } from "svelte/store"
|
||||||
import TableFetch from "../../../../fetch/TableFetch"
|
|
||||||
|
|
||||||
const SuppressErrors = true
|
const SuppressErrors = true
|
||||||
|
|
||||||
|
@ -46,10 +45,6 @@ export const createActions = context => {
|
||||||
return $columns.some(col => col.name === name) || $sticky?.name === name
|
return $columns.some(col => col.name === name) || $sticky?.name === name
|
||||||
}
|
}
|
||||||
|
|
||||||
const getFeatures = () => {
|
|
||||||
return new TableFetch({ API }).determineFeatureFlags()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
table: {
|
table: {
|
||||||
actions: {
|
actions: {
|
||||||
|
@ -60,7 +55,6 @@ export const createActions = context => {
|
||||||
getRow,
|
getRow,
|
||||||
isDatasourceValid,
|
isDatasourceValid,
|
||||||
canUseColumn,
|
canUseColumn,
|
||||||
getFeatures,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import { get } from "svelte/store"
|
import { get } from "svelte/store"
|
||||||
import ViewV2Fetch from "../../../../fetch/ViewV2Fetch"
|
|
||||||
|
|
||||||
const SuppressErrors = true
|
const SuppressErrors = true
|
||||||
|
|
||||||
|
@ -46,10 +45,6 @@ export const createActions = context => {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const getFeatures = () => {
|
|
||||||
return new ViewV2Fetch({ API }).determineFeatureFlags()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
viewV2: {
|
viewV2: {
|
||||||
actions: {
|
actions: {
|
||||||
|
@ -60,7 +55,6 @@ export const createActions = context => {
|
||||||
getRow,
|
getRow,
|
||||||
isDatasourceValid,
|
isDatasourceValid,
|
||||||
canUseColumn,
|
canUseColumn,
|
||||||
getFeatures,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ import * as Datasource from "./datasource"
|
||||||
import * as Table from "./datasources/table"
|
import * as Table from "./datasources/table"
|
||||||
import * as ViewV2 from "./datasources/viewV2"
|
import * as ViewV2 from "./datasources/viewV2"
|
||||||
import * as NonPlus from "./datasources/nonPlus"
|
import * as NonPlus from "./datasources/nonPlus"
|
||||||
|
import * as Cache from "./cache"
|
||||||
|
|
||||||
const DependencyOrderedStores = [
|
const DependencyOrderedStores = [
|
||||||
Sort,
|
Sort,
|
||||||
|
@ -42,6 +43,7 @@ const DependencyOrderedStores = [
|
||||||
Clipboard,
|
Clipboard,
|
||||||
Config,
|
Config,
|
||||||
Notifications,
|
Notifications,
|
||||||
|
Cache,
|
||||||
]
|
]
|
||||||
|
|
||||||
export const attachStores = context => {
|
export const attachStores = context => {
|
||||||
|
|
|
@ -114,10 +114,6 @@ export const createActions = context => {
|
||||||
const $allFilters = get(allFilters)
|
const $allFilters = get(allFilters)
|
||||||
const $sort = get(sort)
|
const $sort = get(sort)
|
||||||
|
|
||||||
// Determine how many rows to fetch per page
|
|
||||||
const features = datasource.actions.getFeatures()
|
|
||||||
const limit = features?.supportsPagination ? RowPageSize : null
|
|
||||||
|
|
||||||
// Create new fetch model
|
// Create new fetch model
|
||||||
const newFetch = fetchData({
|
const newFetch = fetchData({
|
||||||
API,
|
API,
|
||||||
|
@ -126,8 +122,12 @@ export const createActions = context => {
|
||||||
filter: $allFilters,
|
filter: $allFilters,
|
||||||
sortColumn: $sort.column,
|
sortColumn: $sort.column,
|
||||||
sortOrder: $sort.order,
|
sortOrder: $sort.order,
|
||||||
limit,
|
limit: RowPageSize,
|
||||||
paginate: true,
|
paginate: true,
|
||||||
|
|
||||||
|
// Disable client side limiting, so that for queries and custom data
|
||||||
|
// sources we don't impose fake row limits. We want all the data.
|
||||||
|
clientSideLimiting: false,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -43,6 +43,11 @@ export default class DataFetch {
|
||||||
|
|
||||||
// Pagination config
|
// Pagination config
|
||||||
paginate: true,
|
paginate: true,
|
||||||
|
|
||||||
|
// Client side feature customisation
|
||||||
|
clientSideSearching: true,
|
||||||
|
clientSideSorting: true,
|
||||||
|
clientSideLimiting: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
// State of the fetch
|
// State of the fetch
|
||||||
|
@ -208,24 +213,32 @@ export default class DataFetch {
|
||||||
* Fetches some filtered, sorted and paginated data
|
* Fetches some filtered, sorted and paginated data
|
||||||
*/
|
*/
|
||||||
async getPage() {
|
async getPage() {
|
||||||
const { sortColumn, sortOrder, sortType, limit } = this.options
|
const {
|
||||||
|
sortColumn,
|
||||||
|
sortOrder,
|
||||||
|
sortType,
|
||||||
|
limit,
|
||||||
|
clientSideSearching,
|
||||||
|
clientSideSorting,
|
||||||
|
clientSideLimiting,
|
||||||
|
} = this.options
|
||||||
const { query } = get(this.store)
|
const { query } = get(this.store)
|
||||||
|
|
||||||
// Get the actual data
|
// Get the actual data
|
||||||
let { rows, info, hasNextPage, cursor, error } = await this.getData()
|
let { rows, info, hasNextPage, cursor, error } = await this.getData()
|
||||||
|
|
||||||
// If we don't support searching, do a client search
|
// If we don't support searching, do a client search
|
||||||
if (!this.features.supportsSearch) {
|
if (!this.features.supportsSearch && clientSideSearching) {
|
||||||
rows = runLuceneQuery(rows, query)
|
rows = runLuceneQuery(rows, query)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we don't support sorting, do a client-side sort
|
// If we don't support sorting, do a client-side sort
|
||||||
if (!this.features.supportsSort) {
|
if (!this.features.supportsSort && clientSideSorting) {
|
||||||
rows = luceneSort(rows, sortColumn, sortOrder, sortType)
|
rows = luceneSort(rows, sortColumn, sortOrder, sortType)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we don't support pagination, do a client-side limit
|
// If we don't support pagination, do a client-side limit
|
||||||
if (!this.features.supportsPagination) {
|
if (!this.features.supportsPagination && clientSideLimiting) {
|
||||||
rows = luceneLimit(rows, limit)
|
rows = luceneLimit(rows, limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -49,7 +49,12 @@ describe.each([
|
||||||
let table: Table
|
let table: Table
|
||||||
let tableId: string
|
let tableId: string
|
||||||
|
|
||||||
afterAll(setup.afterAll)
|
afterAll(async () => {
|
||||||
|
if (dsProvider) {
|
||||||
|
await dsProvider.stopContainer()
|
||||||
|
}
|
||||||
|
setup.afterAll()
|
||||||
|
})
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
|
@ -521,20 +526,17 @@ describe.each([
|
||||||
const rowUsage = await getRowUsage()
|
const rowUsage = await getRowUsage()
|
||||||
const queryUsage = await getQueryUsage()
|
const queryUsage = await getQueryUsage()
|
||||||
|
|
||||||
const res = await config.api.row.patch(table._id!, {
|
const row = await config.api.row.patch(table._id!, {
|
||||||
_id: existing._id!,
|
_id: existing._id!,
|
||||||
_rev: existing._rev!,
|
_rev: existing._rev!,
|
||||||
tableId: table._id!,
|
tableId: table._id!,
|
||||||
name: "Updated Name",
|
name: "Updated Name",
|
||||||
})
|
})
|
||||||
|
|
||||||
expect((res as any).res.statusMessage).toEqual(
|
expect(row.name).toEqual("Updated Name")
|
||||||
`${table.name} updated successfully.`
|
expect(row.description).toEqual(existing.description)
|
||||||
)
|
|
||||||
expect(res.body.name).toEqual("Updated Name")
|
|
||||||
expect(res.body.description).toEqual(existing.description)
|
|
||||||
|
|
||||||
const savedRow = await loadRow(res.body._id, table._id!)
|
const savedRow = await loadRow(row._id!, table._id!)
|
||||||
|
|
||||||
expect(savedRow.body.description).toEqual(existing.description)
|
expect(savedRow.body.description).toEqual(existing.description)
|
||||||
expect(savedRow.body.name).toEqual("Updated Name")
|
expect(savedRow.body.name).toEqual("Updated Name")
|
||||||
|
|
|
@ -492,6 +492,67 @@ describe("/tables", () => {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should succeed when the row is created from the other side of the relationship", async () => {
|
||||||
|
// We found a bug just after releasing this feature where if the row was created from the
|
||||||
|
// users table, not the table linking to it, the migration would succeed but lose the data.
|
||||||
|
// This happened because the order of the documents in the link was reversed.
|
||||||
|
const table = await config.api.table.create({
|
||||||
|
name: "table",
|
||||||
|
type: "table",
|
||||||
|
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||||
|
sourceType: TableSourceType.INTERNAL,
|
||||||
|
schema: {
|
||||||
|
"user relationship": {
|
||||||
|
type: FieldType.LINK,
|
||||||
|
fieldName: "test",
|
||||||
|
name: "user relationship",
|
||||||
|
constraints: {
|
||||||
|
type: "array",
|
||||||
|
presence: false,
|
||||||
|
},
|
||||||
|
relationshipType: RelationshipType.MANY_TO_ONE,
|
||||||
|
tableId: InternalTable.USER_METADATA,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
let testRow = await config.api.row.save(table._id!, {})
|
||||||
|
|
||||||
|
await Promise.all(
|
||||||
|
users.map(u =>
|
||||||
|
config.api.row.patch(InternalTable.USER_METADATA, {
|
||||||
|
tableId: InternalTable.USER_METADATA,
|
||||||
|
_rev: u._rev!,
|
||||||
|
_id: u._id!,
|
||||||
|
test: [testRow],
|
||||||
|
})
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await config.api.table.migrate(table._id!, {
|
||||||
|
oldColumn: table.schema["user relationship"],
|
||||||
|
newColumn: {
|
||||||
|
name: "user column",
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: FieldSubtype.USERS,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const migratedTable = await config.api.table.get(table._id!)
|
||||||
|
expect(migratedTable.schema["user column"]).toBeDefined()
|
||||||
|
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
|
||||||
|
|
||||||
|
const resp = await config.api.row.get(table._id!, testRow._id!)
|
||||||
|
const migratedRow = resp.body as Row
|
||||||
|
|
||||||
|
expect(migratedRow["user column"]).toBeDefined()
|
||||||
|
expect(migratedRow["user relationship"]).not.toBeDefined()
|
||||||
|
expect(migratedRow["user column"]).toHaveLength(3)
|
||||||
|
expect(migratedRow["user column"].map((u: Row) => u._id)).toEqual(
|
||||||
|
expect.arrayContaining(users.map(u => u._id))
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
it("should successfully migrate a many-to-many user relationship to a users column", async () => {
|
it("should successfully migrate a many-to-many user relationship to a users column", async () => {
|
||||||
const table = await config.api.table.create({
|
const table = await config.api.table.create({
|
||||||
name: "table",
|
name: "table",
|
||||||
|
|
|
@ -7,6 +7,7 @@ import {
|
||||||
isBBReferenceField,
|
isBBReferenceField,
|
||||||
isRelationshipField,
|
isRelationshipField,
|
||||||
LinkDocument,
|
LinkDocument,
|
||||||
|
LinkInfo,
|
||||||
RelationshipFieldMetadata,
|
RelationshipFieldMetadata,
|
||||||
RelationshipType,
|
RelationshipType,
|
||||||
Row,
|
Row,
|
||||||
|
@ -125,7 +126,23 @@ abstract class UserColumnMigrator implements ColumnMigrator {
|
||||||
protected newColumn: BBReferenceFieldMetadata
|
protected newColumn: BBReferenceFieldMetadata
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
abstract updateRow(row: Row, link: LinkDocument): void
|
abstract updateRow(row: Row, linkInfo: LinkInfo): void
|
||||||
|
|
||||||
|
pickUserTableLinkSide(link: LinkDocument): LinkInfo {
|
||||||
|
if (link.doc1.tableId === InternalTable.USER_METADATA) {
|
||||||
|
return link.doc1
|
||||||
|
} else {
|
||||||
|
return link.doc2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pickOtherTableLinkSide(link: LinkDocument): LinkInfo {
|
||||||
|
if (link.doc1.tableId === InternalTable.USER_METADATA) {
|
||||||
|
return link.doc2
|
||||||
|
} else {
|
||||||
|
return link.doc1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async doMigration(): Promise<MigrationResult> {
|
async doMigration(): Promise<MigrationResult> {
|
||||||
let oldTable = cloneDeep(this.table)
|
let oldTable = cloneDeep(this.table)
|
||||||
|
@ -137,15 +154,17 @@ abstract class UserColumnMigrator implements ColumnMigrator {
|
||||||
|
|
||||||
let links = await sdk.links.fetchWithDocument(this.table._id!)
|
let links = await sdk.links.fetchWithDocument(this.table._id!)
|
||||||
for (let link of links) {
|
for (let link of links) {
|
||||||
|
const userSide = this.pickUserTableLinkSide(link)
|
||||||
|
const otherSide = this.pickOtherTableLinkSide(link)
|
||||||
if (
|
if (
|
||||||
link.doc1.tableId !== this.table._id ||
|
otherSide.tableId !== this.table._id ||
|
||||||
link.doc1.fieldName !== this.oldColumn.name ||
|
otherSide.fieldName !== this.oldColumn.name ||
|
||||||
link.doc2.tableId !== InternalTable.USER_METADATA
|
userSide.tableId !== InternalTable.USER_METADATA
|
||||||
) {
|
) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
let row = rowsById[link.doc1.rowId]
|
let row = rowsById[otherSide.rowId]
|
||||||
if (!row) {
|
if (!row) {
|
||||||
// This can happen if the row has been deleted but the link hasn't,
|
// This can happen if the row has been deleted but the link hasn't,
|
||||||
// which was a state that was found during the initial testing of this
|
// which was a state that was found during the initial testing of this
|
||||||
|
@ -153,7 +172,7 @@ abstract class UserColumnMigrator implements ColumnMigrator {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
this.updateRow(row, link)
|
this.updateRow(row, userSide)
|
||||||
}
|
}
|
||||||
|
|
||||||
let db = context.getAppDB()
|
let db = context.getAppDB()
|
||||||
|
@ -175,20 +194,20 @@ abstract class UserColumnMigrator implements ColumnMigrator {
|
||||||
}
|
}
|
||||||
|
|
||||||
class SingleUserColumnMigrator extends UserColumnMigrator {
|
class SingleUserColumnMigrator extends UserColumnMigrator {
|
||||||
updateRow(row: Row, link: LinkDocument): void {
|
updateRow(row: Row, linkInfo: LinkInfo): void {
|
||||||
row[this.newColumn.name] = dbCore.getGlobalIDFromUserMetadataID(
|
row[this.newColumn.name] = dbCore.getGlobalIDFromUserMetadataID(
|
||||||
link.doc2.rowId
|
linkInfo.rowId
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class MultiUserColumnMigrator extends UserColumnMigrator {
|
class MultiUserColumnMigrator extends UserColumnMigrator {
|
||||||
updateRow(row: Row, link: LinkDocument): void {
|
updateRow(row: Row, linkInfo: LinkInfo): void {
|
||||||
if (!row[this.newColumn.name]) {
|
if (!row[this.newColumn.name]) {
|
||||||
row[this.newColumn.name] = []
|
row[this.newColumn.name] = []
|
||||||
}
|
}
|
||||||
row[this.newColumn.name].push(
|
row[this.newColumn.name].push(
|
||||||
dbCore.getGlobalIDFromUserMetadataID(link.doc2.rowId)
|
dbCore.getGlobalIDFromUserMetadataID(linkInfo.rowId)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,7 +55,13 @@ export class RowAPI extends TestAPI {
|
||||||
.send(row)
|
.send(row)
|
||||||
.set(this.config.defaultHeaders())
|
.set(this.config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(expectStatus)
|
if (resp.status !== expectStatus) {
|
||||||
|
throw new Error(
|
||||||
|
`Expected status ${expectStatus} but got ${
|
||||||
|
resp.status
|
||||||
|
}, body: ${JSON.stringify(resp.body)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
return resp.body as Row
|
return resp.body as Row
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,13 +83,20 @@ export class RowAPI extends TestAPI {
|
||||||
sourceId: string,
|
sourceId: string,
|
||||||
row: PatchRowRequest,
|
row: PatchRowRequest,
|
||||||
{ expectStatus } = { expectStatus: 200 }
|
{ expectStatus } = { expectStatus: 200 }
|
||||||
) => {
|
): Promise<Row> => {
|
||||||
return this.request
|
let resp = await this.request
|
||||||
.patch(`/api/${sourceId}/rows`)
|
.patch(`/api/${sourceId}/rows`)
|
||||||
.send(row)
|
.send(row)
|
||||||
.set(this.config.defaultHeaders())
|
.set(this.config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(expectStatus)
|
if (resp.status !== expectStatus) {
|
||||||
|
throw new Error(
|
||||||
|
`Expected status ${expectStatus} but got ${
|
||||||
|
resp.status
|
||||||
|
}, body: ${JSON.stringify(resp.body)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return resp.body as Row
|
||||||
}
|
}
|
||||||
|
|
||||||
delete = async (
|
delete = async (
|
||||||
|
|
|
@ -1,17 +1,15 @@
|
||||||
import { Document } from "../document"
|
import { Document } from "../document"
|
||||||
|
|
||||||
|
export interface LinkInfo {
|
||||||
|
rowId: string
|
||||||
|
fieldName: string
|
||||||
|
tableId: string
|
||||||
|
}
|
||||||
|
|
||||||
export interface LinkDocument extends Document {
|
export interface LinkDocument extends Document {
|
||||||
type: string
|
type: string
|
||||||
doc1: {
|
doc1: LinkInfo
|
||||||
rowId: string
|
doc2: LinkInfo
|
||||||
fieldName: string
|
|
||||||
tableId: string
|
|
||||||
}
|
|
||||||
doc2: {
|
|
||||||
rowId: string
|
|
||||||
fieldName: string
|
|
||||||
tableId: string
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface LinkDocumentValue {
|
export interface LinkDocumentValue {
|
||||||
|
|
|
@ -1,16 +1,12 @@
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
_Describe the problem or feature in addition to a link to the relevant github issues._
|
_Describe the problem or feature in addition to a link to the relevant github issues._
|
||||||
|
|
||||||
Addresses:
|
### Addresses:
|
||||||
|
|
||||||
- `<Enter the Link to the issue(s) this PR addresses>`
|
- `<Enter the Link to the issue(s) this PR addresses>`
|
||||||
- ...more if required
|
- ...more if required
|
||||||
|
|
||||||
## App Export
|
## App Export
|
||||||
|
|
||||||
- If possible, attach an app export file along with your request template to make QA testing easier, with minimal setup.
|
- If possible, attach an app export file along with your request template to make QA testing easier, with minimal setup.
|
||||||
|
|
||||||
## Screenshots
|
## Screenshots
|
||||||
|
|
||||||
_If a UI facing feature, a short video of the happy path, and some screenshots of the new functionality._
|
_If a UI facing feature, a short video of the happy path, and some screenshots of the new functionality._
|
||||||
|
|
|
@ -2,9 +2,9 @@
|
||||||
if [[ $TARGETARCH == arm* ]] ;
|
if [[ $TARGETARCH == arm* ]] ;
|
||||||
then
|
then
|
||||||
echo "INSTALLING ARM64 MINIO"
|
echo "INSTALLING ARM64 MINIO"
|
||||||
wget wget https://dl.min.io/server/minio/release/linux-arm64/archive/minio.deb -O minio.deb
|
wget https://dl.min.io/server/minio/release/linux-arm64/minio
|
||||||
else
|
else
|
||||||
echo "INSTALLING AMD64 MINIO"
|
echo "INSTALLING AMD64 MINIO"
|
||||||
wget wget https://dl.min.io/server/minio/release/linux-amd64/archive/minio.deb -O minio.deb
|
wget https://dl.min.io/server/minio/release/linux-amd64/minio
|
||||||
fi
|
fi
|
||||||
dpkg -i minio.deb
|
chmod +x minio
|
Loading…
Reference in New Issue