Merge remote-tracking branch 'origin/v3-ui' into feature/automation-branching-ux

This commit is contained in:
Dean 2024-09-20 16:08:20 +01:00
commit eff76e10d7
29 changed files with 393 additions and 236 deletions

View File

@ -108,7 +108,7 @@ jobs:
- name: Pull testcontainers images - name: Pull testcontainers images
run: | run: |
docker pull testcontainers/ryuk:0.5.1 & docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.3.3 & docker pull budibase/couchdb:v3.3.3-sqs-v2.1.1 &
docker pull redis & docker pull redis &
wait $(jobs -p) wait $(jobs -p)
@ -179,7 +179,7 @@ jobs:
docker pull minio/minio & docker pull minio/minio &
docker pull redis & docker pull redis &
docker pull testcontainers/ryuk:0.5.1 & docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.3.3 & docker pull budibase/couchdb:v3.3.3-sqs-v2.1.1 &
wait $(jobs -p) wait $(jobs -p)

View File

@ -641,7 +641,7 @@ couchdb:
# @ignore # @ignore
repository: budibase/couchdb repository: budibase/couchdb
# @ignore # @ignore
tag: v3.3.3 tag: v3.3.3-sqs-v2.1.1
# @ignore # @ignore
pullPolicy: Always pullPolicy: Always

View File

@ -46,7 +46,7 @@ export default async function setup() {
await killContainers(containers) await killContainers(containers)
try { try {
const couchdb = new GenericContainer("budibase/couchdb:v3.3.3") const couchdb = new GenericContainer("budibase/couchdb:v3.3.3-sqs-v2.1.1")
.withExposedPorts(5984, 4984) .withExposedPorts(5984, 4984)
.withEnvironment({ .withEnvironment({
COUCHDB_PASSWORD: "budibase", COUCHDB_PASSWORD: "budibase",

View File

@ -1,4 +1,4 @@
ARG BASEIMG=budibase/couchdb:v3.3.3 ARG BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1
FROM node:20-slim as build FROM node:20-slim as build
# install node-gyp dependencies # install node-gyp dependencies

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.32.5", "version": "2.32.6",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -143,6 +143,7 @@ const environment = {
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN, POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
POSTHOG_PERSONAL_TOKEN: process.env.POSTHOG_PERSONAL_TOKEN, POSTHOG_PERSONAL_TOKEN: process.env.POSTHOG_PERSONAL_TOKEN,
POSTHOG_API_HOST: process.env.POSTHOG_API_HOST || "https://us.i.posthog.com", POSTHOG_API_HOST: process.env.POSTHOG_API_HOST || "https://us.i.posthog.com",
POSTHOG_FEATURE_FLAGS_ENABLED: process.env.POSTHOG_FEATURE_FLAGS_ENABLED,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS, ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS, TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
CLOUDFRONT_CDN: process.env.CLOUDFRONT_CDN, CLOUDFRONT_CDN: process.env.CLOUDFRONT_CDN,

View File

@ -6,7 +6,12 @@ import tracer from "dd-trace"
let posthog: PostHog | undefined let posthog: PostHog | undefined
export function init(opts?: PostHogOptions) { export function init(opts?: PostHogOptions) {
if (env.POSTHOG_TOKEN && env.POSTHOG_API_HOST && !env.SELF_HOSTED) { if (
env.POSTHOG_TOKEN &&
env.POSTHOG_API_HOST &&
!env.SELF_HOSTED &&
env.POSTHOG_FEATURE_FLAGS_ENABLED
) {
console.log("initializing posthog client...") console.log("initializing posthog client...")
posthog = new PostHog(env.POSTHOG_TOKEN, { posthog = new PostHog(env.POSTHOG_TOKEN, {
host: env.POSTHOG_API_HOST, host: env.POSTHOG_API_HOST,

View File

@ -148,6 +148,7 @@ describe("feature flags", () => {
const env: Partial<typeof environment> = { const env: Partial<typeof environment> = {
TENANT_FEATURE_FLAGS: environmentFlags, TENANT_FEATURE_FLAGS: environmentFlags,
SELF_HOSTED: false, SELF_HOSTED: false,
POSTHOG_FEATURE_FLAGS_ENABLED: "true",
} }
if (posthogFlags) { if (posthogFlags) {

View File

@ -102,10 +102,6 @@ export const useAppBuilders = () => {
return useFeature(Feature.APP_BUILDERS) return useFeature(Feature.APP_BUILDERS)
} }
export const useViewReadonlyColumns = () => {
return useFeature(Feature.VIEW_READONLY_COLUMNS)
}
// QUOTAS // QUOTAS
export const setAutomationLogsQuota = (value: number) => { export const setAutomationLogsQuota = (value: number) => {

View File

@ -74,7 +74,7 @@
display: flex; display: flex;
flex-direction: row; flex-direction: row;
align-items: center; align-items: center;
gap: var(--spacing-l); gap: var(--spacing-s);
} }
.left { .left {
width: 0; width: 0;

View File

@ -53,7 +53,7 @@
selected={filterCount > 0} selected={filterCount > 0}
accentColor="#004EA6" accentColor="#004EA6"
> >
{filterCount ? `Filter (${filterCount})` : "Filter"} {filterCount ? `Filter: ${filterCount}` : "Filter"}
</ActionButton> </ActionButton>
<Drawer <Drawer

View File

@ -10,7 +10,6 @@
import { getContext } from "svelte" import { getContext } from "svelte"
import { ActionButton, Popover } from "@budibase/bbui" import { ActionButton, Popover } from "@budibase/bbui"
import ColumnsSettingContent from "./ColumnsSettingContent.svelte" import ColumnsSettingContent from "./ColumnsSettingContent.svelte"
import { licensing } from "stores/portal"
import { isEnabled } from "helpers/featureFlags" import { isEnabled } from "helpers/featureFlags"
import { FeatureFlag } from "@budibase/types" import { FeatureFlag } from "@budibase/types"
@ -21,7 +20,6 @@
$: anyRestricted = $columns.filter(col => !col.visible || col.readonly).length $: anyRestricted = $columns.filter(col => !col.visible || col.readonly).length
$: text = anyRestricted ? `Columns: ${anyRestricted} restricted` : "Columns" $: text = anyRestricted ? `Columns: ${anyRestricted} restricted` : "Columns"
$: allowViewReadonlyColumns = $licensing.isViewReadonlyColumnsEnabled
$: permissions = $: permissions =
$datasource.type === "viewV2" $datasource.type === "viewV2"
? [ ? [
@ -30,9 +28,6 @@
FieldPermissions.HIDDEN, FieldPermissions.HIDDEN,
] ]
: [FieldPermissions.WRITABLE, FieldPermissions.HIDDEN] : [FieldPermissions.WRITABLE, FieldPermissions.HIDDEN]
$: disabledPermissions = allowViewReadonlyColumns
? []
: [FieldPermissions.READONLY]
</script> </script>
<div bind:this={anchor}> <div bind:this={anchor}>
@ -54,6 +49,5 @@
columns={$columns} columns={$columns}
canSetRelationshipSchemas={isEnabled(FeatureFlag.ENRICHED_RELATIONSHIPS)} canSetRelationshipSchemas={isEnabled(FeatureFlag.ENRICHED_RELATIONSHIPS)}
{permissions} {permissions}
{disabledPermissions}
/> />
</Popover> </Popover>

View File

@ -4,6 +4,7 @@
datasources, datasources,
userSelectedResourceMap, userSelectedResourceMap,
contextMenuStore, contextMenuStore,
appStore,
} from "stores/builder" } from "stores/builder"
import IntegrationIcon from "components/backend/DatasourceNavigator/IntegrationIcon.svelte" import IntegrationIcon from "components/backend/DatasourceNavigator/IntegrationIcon.svelte"
import { Icon, ActionButton, ActionMenu, MenuItem } from "@budibase/bbui" import { Icon, ActionButton, ActionMenu, MenuItem } from "@budibase/bbui"
@ -179,11 +180,15 @@
</script> </script>
<div class="nav"> <div class="nav">
<IntegrationIcon <a
integrationType={datasource?.source} href={`/builder/app/${$appStore.appId}/data/datasource/${datasource?._id}`}
schema={datasource?.schema} >
size="24" <IntegrationIcon
/> integrationType={datasource?.source}
schema={datasource?.schema}
size="24"
/>
</a>
<a <a
href={$tableUrl(tableId)} href={$tableUrl(tableId)}
class="nav-item" class="nav-item"

View File

@ -131,23 +131,15 @@ export const createLicensingStore = () => {
const triggerAutomationRunEnabled = license.features.includes( const triggerAutomationRunEnabled = license.features.includes(
Constants.Features.TRIGGER_AUTOMATION_RUN Constants.Features.TRIGGER_AUTOMATION_RUN
) )
const perAppBuildersEnabled = license.features.includes( const perAppBuildersEnabled = license.features.includes(
Constants.Features.APP_BUILDERS Constants.Features.APP_BUILDERS
) )
const isViewReadonlyColumnsEnabled = license.features.includes(
Constants.Features.VIEW_READONLY_COLUMNS
)
const budibaseAIEnabled = license.features.includes( const budibaseAIEnabled = license.features.includes(
Constants.Features.BUDIBASE_AI Constants.Features.BUDIBASE_AI
) )
const customAIConfigsEnabled = license.features.includes( const customAIConfigsEnabled = license.features.includes(
Constants.Features.AI_CUSTOM_CONFIGS Constants.Features.AI_CUSTOM_CONFIGS
) )
store.update(state => { store.update(state => {
return { return {
...state, ...state,
@ -168,7 +160,6 @@ export const createLicensingStore = () => {
syncAutomationsEnabled, syncAutomationsEnabled,
triggerAutomationRunEnabled, triggerAutomationRunEnabled,
perAppBuildersEnabled, perAppBuildersEnabled,
isViewReadonlyColumnsEnabled,
} }
}) })
}, },

View File

@ -1,4 +1,5 @@
import { QueryUtils } from "@budibase/frontend-core" import { QueryUtils } from "@budibase/frontend-core"
import { EmptyFilterOption } from "@budibase/types"
export const getActiveConditions = conditions => { export const getActiveConditions = conditions => {
if (!conditions?.length) { if (!conditions?.length) {
@ -33,7 +34,8 @@ export const getActiveConditions = conditions => {
value: condition.referenceValue, value: condition.referenceValue,
} }
const query = QueryUtils.buildQuery([luceneCondition]) let query = QueryUtils.buildQuery([luceneCondition])
query.onEmptyFilter = EmptyFilterOption.RETURN_NONE
const result = QueryUtils.runQuery([luceneCondition], query) const result = QueryUtils.runQuery([luceneCondition], query)
return result.length > 0 return result.length > 0
}) })

View File

@ -29,8 +29,18 @@ export const createActions = context => {
}) })
} }
const getRow = () => { const getRow = async id => {
throw "Views don't support fetching individual rows" const res = await API.viewV2.fetch({
viewId: get(datasource).id,
limit: 1,
query: {
equal: {
_id: id,
},
},
paginate: false,
})
return res?.rows?.[0]
} }
const isDatasourceValid = datasource => { const isDatasourceValid = datasource => {
@ -97,9 +107,12 @@ export const initialise = context => {
order: get(initialSortOrder) || "ascending", order: get(initialSortOrder) || "ascending",
}) })
// Keep sort and filter state in line with the view definition // Keep sort and filter state in line with the view definition when in builder
unsubscribers.push( unsubscribers.push(
definition.subscribe($definition => { definition.subscribe($definition => {
if (!get(config).canSaveSchema) {
return
}
if ($definition?.id !== $datasource.id) { if ($definition?.id !== $datasource.id) {
return return
} }
@ -122,7 +135,6 @@ export const initialise = context => {
sort.subscribe(async $sort => { sort.subscribe(async $sort => {
// If we can mutate schema then update the view definition // If we can mutate schema then update the view definition
if (get(config).canSaveSchema) { if (get(config).canSaveSchema) {
// Ensure we're updating the correct view
const $view = get(definition) const $view = get(definition)
if ($view?.id !== $datasource.id) { if ($view?.id !== $datasource.id) {
return return
@ -144,7 +156,7 @@ export const initialise = context => {
// Also update the fetch to ensure the new sort is respected. // Also update the fetch to ensure the new sort is respected.
// Ensure we're updating the correct fetch. // Ensure we're updating the correct fetch.
const $fetch = get(fetch) const $fetch = get(fetch)
if ($fetch?.options?.datasource?.tableId !== $datasource.tableId) { if ($fetch?.options?.datasource?.id !== $datasource.id) {
return return
} }
$fetch.update({ $fetch.update({
@ -157,32 +169,49 @@ export const initialise = context => {
// When filters change, ensure view definition is kept up to date // When filters change, ensure view definition is kept up to date
unsubscribers?.push( unsubscribers?.push(
filter.subscribe(async $filter => { filter.subscribe(async $filter => {
// If we can mutate schema then update the view definition if (!get(config).canSaveSchema) {
if (get(config).canSaveSchema) { return
// Ensure we're updating the correct view }
const $view = get(definition) const $view = get(definition)
if ($view?.id !== $datasource.id) { if ($view?.id !== $datasource.id) {
return return
} }
if (JSON.stringify($filter) !== JSON.stringify($view.query)) { if (JSON.stringify($filter) !== JSON.stringify($view.query)) {
await datasource.actions.saveDefinition({ await datasource.actions.saveDefinition({
...$view, ...$view,
query: $filter, query: $filter,
}) })
}
// Refresh data since view definition changed
await rows.actions.refreshData()
} }
}) })
) )
// Keep fetch up to date with filters. // Keep fetch up to date with inline filters when in the data section
// If we're able to save filters against the view then we only need to apply unsubscribers.push(
// inline filters to the fetch, as saved filters are applied server side. inlineFilters.subscribe($inlineFilters => {
// If we can't save filters, then all filters must be applied to the fetch. if (!get(config).canSaveSchema) {
return
}
const $fetch = get(fetch)
if ($fetch?.options?.datasource?.id !== $datasource.id) {
return
}
$fetch.update({
filter: $inlineFilters,
})
})
)
// Keep fetch up to date with all filters when not in the data section
unsubscribers.push( unsubscribers.push(
allFilters.subscribe($allFilters => { allFilters.subscribe($allFilters => {
// Ensure we're updating the correct fetch if (get(config).canSaveSchema) {
return
}
const $fetch = get(fetch) const $fetch = get(fetch)
if ($fetch?.options?.datasource?.tableId !== $datasource.tableId) { if ($fetch?.options?.datasource?.id !== $datasource.id) {
return return
} }
$fetch.update({ $fetch.update({

View File

@ -1,12 +1,13 @@
import { writable, get, derived } from "svelte/store" import { get, derived } from "svelte/store"
import { FieldType, FilterGroupLogicalOperator } from "@budibase/types" import { FieldType, FilterGroupLogicalOperator } from "@budibase/types"
import { memo } from "../../../utils/memo"
export const createStores = context => { export const createStores = context => {
const { props } = context const { props } = context
// Initialise to default props // Initialise to default props
const filter = writable(get(props).initialFilter) const filter = memo(get(props).initialFilter)
const inlineFilters = writable([]) const inlineFilters = memo([])
return { return {
filter, filter,
@ -19,19 +20,26 @@ export const deriveStores = context => {
const allFilters = derived( const allFilters = derived(
[filter, inlineFilters], [filter, inlineFilters],
([$filter, $inlineFilters]) => { ([$filter, $inlineFilters]) => {
const inlineFilterGroup = $inlineFilters?.length // Just use filter prop if no inline filters
? { if (!$inlineFilters?.length) {
return $filter
}
let allFilters = {
logicalOperator: FilterGroupLogicalOperator.ALL,
groups: [
{
logicalOperator: FilterGroupLogicalOperator.ALL, logicalOperator: FilterGroupLogicalOperator.ALL,
filters: [...($inlineFilters || [])], filters: $inlineFilters,
} },
: null ],
}
return inlineFilterGroup // Just use inline if no filter
? { if (!$filter?.groups?.length) {
logicalOperator: FilterGroupLogicalOperator.ALL, return allFilters
groups: [...($filter?.groups || []), inlineFilterGroup], }
} // Join them together if both
: $filter allFilters.groups = [...allFilters.groups, ...$filter.groups]
return allFilters
} }
) )

View File

@ -178,7 +178,6 @@ export default class DataFetch {
// Build the query // Build the query
let query = this.options.query let query = this.options.query
if (!query && this.features.supportsSearch) { if (!query && this.features.supportsSearch) {
query = buildQuery(filter) query = buildQuery(filter)
} }
@ -365,7 +364,9 @@ export default class DataFetch {
let refresh = false let refresh = false
const entries = Object.entries(newOptions || {}) const entries = Object.entries(newOptions || {})
for (let [key, value] of entries) { for (let [key, value] of entries) {
if (JSON.stringify(value) !== JSON.stringify(this.options[key])) { const oldVal = this.options[key] == null ? null : this.options[key]
const newVal = value == null ? null : value
if (JSON.stringify(newVal) !== JSON.stringify(oldVal)) {
refresh = true refresh = true
break break
} }

View File

@ -1,6 +1,5 @@
import DataFetch from "./DataFetch.js" import DataFetch from "./DataFetch.js"
import { get } from "svelte/store" import { get } from "svelte/store"
import { utils } from "@budibase/shared-core"
export default class ViewV2Fetch extends DataFetch { export default class ViewV2Fetch extends DataFetch {
determineFeatureFlags() { determineFeatureFlags() {
@ -36,15 +35,8 @@ export default class ViewV2Fetch extends DataFetch {
} }
async getData() { async getData() {
const { const { datasource, limit, sortColumn, sortOrder, sortType, paginate } =
datasource, this.options
limit,
sortColumn,
sortOrder,
sortType,
paginate,
filter,
} = this.options
const { cursor, query, definition } = get(this.store) const { cursor, query, definition } = get(this.store)
// If sort/filter params are not defined, update options to store the // If sort/filter params are not defined, update options to store the
@ -55,12 +47,6 @@ export default class ViewV2Fetch extends DataFetch {
this.options.sortOrder = definition.sort.order this.options.sortOrder = definition.sort.order
} }
const parsed = utils.processSearchFilters(filter)
if (!parsed?.groups?.length && definition.query?.groups?.length) {
this.options.filter = definition.query
}
try { try {
const res = await this.API.viewV2.fetch({ const res = await this.API.viewV2.fetch({
viewId: datasource.id, viewId: datasource.id,

View File

@ -80,7 +80,7 @@
"dotenv": "8.2.0", "dotenv": "8.2.0",
"form-data": "4.0.0", "form-data": "4.0.0",
"global-agent": "3.0.0", "global-agent": "3.0.0",
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.3", "google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5",
"ioredis": "5.3.2", "ioredis": "5.3.2",
"isolated-vm": "^4.7.2", "isolated-vm": "^4.7.2",
"jimp": "0.22.12", "jimp": "0.22.12",

View File

@ -138,7 +138,7 @@ async function processDeleteRowsRequest(ctx: UserCtx<DeleteRowRequest>) {
const { tableId } = utils.getSourceId(ctx) const { tableId } = utils.getSourceId(ctx)
const processedRows = request.rows.map(row => { const processedRows = request.rows.map(row => {
let processedRow: Row = typeof row == "string" ? { _id: row } : row let processedRow: Row = typeof row == "string" ? { _id: row, tableId } : row
return !processedRow._rev return !processedRow._rev
? addRev(fixRow(processedRow, ctx.params), tableId) ? addRev(fixRow(processedRow, ctx.params), tableId)
: fixRow(processedRow, ctx.params) : fixRow(processedRow, ctx.params)

View File

@ -1138,6 +1138,18 @@ describe.each([
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
}) })
it("should be able to delete a row with ID only", async () => {
const createdRow = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
const res = await config.api.row.bulkDelete(table._id!, {
rows: [createdRow._id!],
})
expect(res[0]._id).toEqual(createdRow._id)
expect(res[0].tableId).toEqual(table._id!)
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
})
it("should be able to bulk delete rows, including a row that doesn't exist", async () => { it("should be able to bulk delete rows, including a row that doesn't exist", async () => {
const createdRow = await config.api.row.save(table._id!, {}) const createdRow = await config.api.row.save(table._id!, {})
const createdRow2 = await config.api.row.save(table._id!, {}) const createdRow2 = await config.api.row.save(table._id!, {})

View File

@ -309,10 +309,6 @@ describe.each([
}) })
describe("readonly fields", () => { describe("readonly fields", () => {
beforeEach(() => {
mocks.licenses.useViewReadonlyColumns()
})
it("readonly fields are persisted", async () => { it("readonly fields are persisted", async () => {
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
@ -436,7 +432,7 @@ describe.each([
}) })
}) })
it("readonly fields cannot be used on free license", async () => { it("readonly fields can be used on free license", async () => {
mocks.licenses.useCloudFree() mocks.licenses.useCloudFree()
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
@ -466,11 +462,7 @@ describe.each([
} }
await config.api.viewV2.create(newView, { await config.api.viewV2.create(newView, {
status: 400, status: 201,
body: {
message: "Readonly fields are not enabled",
status: 400,
},
}) })
}) })
}) })
@ -513,7 +505,6 @@ describe.each([
}) })
it("display fields can be readonly", async () => { it("display fields can be readonly", async () => {
mocks.licenses.useViewReadonlyColumns()
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
schema: { schema: {
@ -588,7 +579,6 @@ describe.each([
}) })
it("can update all fields", async () => { it("can update all fields", async () => {
mocks.licenses.useViewReadonlyColumns()
const tableId = table._id! const tableId = table._id!
const updatedData: Required<UpdateViewRequest> = { const updatedData: Required<UpdateViewRequest> = {
@ -802,71 +792,6 @@ describe.each([
) )
}) })
it("cannot update views with readonly on on free license", async () => {
mocks.licenses.useViewReadonlyColumns()
view = await config.api.viewV2.update({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: true,
},
},
})
mocks.licenses.useCloudFree()
await config.api.viewV2.update(view, {
status: 400,
body: {
message: "Readonly fields are not enabled",
},
})
})
it("can remove readonly config after license downgrade", async () => {
mocks.licenses.useViewReadonlyColumns()
view = await config.api.viewV2.update({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: true,
},
Category: {
visible: true,
readonly: true,
},
},
})
mocks.licenses.useCloudFree()
const res = await config.api.viewV2.update({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: false,
},
},
})
expect(res).toEqual(
expect.objectContaining({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: false,
},
},
})
)
})
isInternal && isInternal &&
it("updating schema will only validate modified field", async () => { it("updating schema will only validate modified field", async () => {
let view = await config.api.viewV2.create({ let view = await config.api.viewV2.create({
@ -1046,7 +971,6 @@ describe.each([
}) })
it("should be able to fetch readonly config after downgrades", async () => { it("should be able to fetch readonly config after downgrades", async () => {
mocks.licenses.useViewReadonlyColumns()
const res = await config.api.viewV2.create({ const res = await config.api.viewV2.create({
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
@ -1112,8 +1036,6 @@ describe.each([
}) })
it("rejects if field is readonly in any view", async () => { it("rejects if field is readonly in any view", async () => {
mocks.licenses.useViewReadonlyColumns()
await config.api.viewV2.create({ await config.api.viewV2.create({
name: "view a", name: "view a",
tableId: table._id!, tableId: table._id!,
@ -1538,7 +1460,6 @@ describe.each([
}) })
it("can't persist readonly columns", async () => { it("can't persist readonly columns", async () => {
mocks.licenses.useViewReadonlyColumns()
const view = await config.api.viewV2.create({ const view = await config.api.viewV2.create({
tableId: table._id!, tableId: table._id!,
name: generator.guid(), name: generator.guid(),
@ -1607,7 +1528,6 @@ describe.each([
}) })
it("can't update readonly columns", async () => { it("can't update readonly columns", async () => {
mocks.licenses.useViewReadonlyColumns()
const view = await config.api.viewV2.create({ const view = await config.api.viewV2.create({
tableId: table._id!, tableId: table._id!,
name: generator.guid(), name: generator.guid(),

View File

@ -330,15 +330,16 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
return { tables: {}, errors: {} } return { tables: {}, errors: {} }
} }
await this.connect() await this.connect()
const sheets = this.client.sheetsByIndex const sheets = this.client.sheetsByIndex
const tables: Record<string, Table> = {} const tables: Record<string, Table> = {}
let errors: Record<string, string> = {} let errors: Record<string, string> = {}
await utils.parallelForeach( await utils.parallelForeach(
sheets, sheets,
async sheet => { async sheet => {
// must fetch rows to determine schema
try { try {
await sheet.getRows() await sheet.getRows({ limit: 1 })
} catch (err) { } catch (err) {
// We expect this to always be an Error so if it's not, rethrow it to // We expect this to always be an Error so if it's not, rethrow it to
// make sure we don't fail quietly. // make sure we don't fail quietly.
@ -346,26 +347,34 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
throw err throw err
} }
if (err.message.startsWith("No values in the header row")) { if (
errors[sheet.title] = err.message err.message.startsWith("No values in the header row") ||
} else { err.message.startsWith("All your header cells are blank")
// If we get an error we don't expect, rethrow to avoid failing ) {
// quietly. errors[
throw err sheet.title
] = `Failed to find a header row in sheet "${sheet.title}", is the first row blank?`
return
} }
return
}
const id = buildExternalTableId(datasourceId, sheet.title) // If we get an error we don't expect, rethrow to avoid failing
tables[sheet.title] = this.getTableSchema( // quietly.
sheet.title, throw err
sheet.headerValues, }
datasourceId,
id
)
}, },
10 10
) )
for (const sheet of sheets) {
const id = buildExternalTableId(datasourceId, sheet.title)
tables[sheet.title] = this.getTableSchema(
sheet.title,
sheet.headerValues,
datasourceId,
id
)
}
let externalTables = finaliseExternalTables(tables, entities) let externalTables = finaliseExternalTables(tables, entities)
errors = { ...errors, ...checkExternalTables(externalTables) } errors = { ...errors, ...checkExternalTables(externalTables) }
return { tables: externalTables, errors } return { tables: externalTables, errors }

View File

@ -244,6 +244,20 @@ describe("Google Sheets Integration", () => {
expect.arrayContaining(Array.from({ length: 248 }, (_, i) => `${i}`)) expect.arrayContaining(Array.from({ length: 248 }, (_, i) => `${i}`))
) )
}) })
it("can export rows", async () => {
const resp = await config.api.row.exportRows(table._id!, {})
const parsed = JSON.parse(resp)
expect(parsed.length).toEqual(2)
expect(parsed[0]).toMatchObject({
name: "Test Contact 1",
description: "original description 1",
})
expect(parsed[1]).toMatchObject({
name: "Test Contact 2",
description: "original description 2",
})
})
}) })
describe("update", () => { describe("update", () => {
@ -491,4 +505,97 @@ describe("Google Sheets Integration", () => {
expect(emptyRows.length).toEqual(0) expect(emptyRows.length).toEqual(0)
}) })
}) })
describe("fetch schema", () => {
it("should fail to import a completely blank sheet", async () => {
mock.createSheet({ title: "Sheet1" })
await config.api.datasource.fetchSchema(
{
datasourceId: datasource._id!,
tablesFilter: ["Sheet1"],
},
{
status: 200,
body: {
errors: {
Sheet1:
'Failed to find a header row in sheet "Sheet1", is the first row blank?',
},
},
}
)
})
it("should fail to import multiple sheets with blank headers", async () => {
mock.createSheet({ title: "Sheet1" })
mock.createSheet({ title: "Sheet2" })
await config.api.datasource.fetchSchema(
{
datasourceId: datasource!._id!,
tablesFilter: ["Sheet1", "Sheet2"],
},
{
status: 200,
body: {
errors: {
Sheet1:
'Failed to find a header row in sheet "Sheet1", is the first row blank?',
Sheet2:
'Failed to find a header row in sheet "Sheet2", is the first row blank?',
},
},
}
)
})
it("should only fail the sheet with missing headers", async () => {
mock.createSheet({ title: "Sheet1" })
mock.createSheet({ title: "Sheet2" })
mock.createSheet({ title: "Sheet3" })
mock.set("Sheet1!A1", "name")
mock.set("Sheet1!B1", "dob")
mock.set("Sheet2!A1", "name")
mock.set("Sheet2!B1", "dob")
await config.api.datasource.fetchSchema(
{
datasourceId: datasource!._id!,
tablesFilter: ["Sheet1", "Sheet2", "Sheet3"],
},
{
status: 200,
body: {
errors: {
Sheet3:
'Failed to find a header row in sheet "Sheet3", is the first row blank?',
},
},
}
)
})
it("should only succeed if sheet with missing headers is not being imported", async () => {
mock.createSheet({ title: "Sheet1" })
mock.createSheet({ title: "Sheet2" })
mock.createSheet({ title: "Sheet3" })
mock.set("Sheet1!A1", "name")
mock.set("Sheet1!B1", "dob")
mock.set("Sheet2!A1", "name")
mock.set("Sheet2!B1", "dob")
await config.api.datasource.fetchSchema(
{
datasourceId: datasource!._id!,
tablesFilter: ["Sheet1", "Sheet2"],
},
{
status: 200,
body: { errors: {} },
}
)
})
})
}) })

View File

@ -22,6 +22,7 @@ import type {
CellPadding, CellPadding,
Color, Color,
GridRange, GridRange,
DataSourceSheetProperties,
} from "google-spreadsheet/src/lib/types/sheets-types" } from "google-spreadsheet/src/lib/types/sheets-types"
const BLACK: Color = { red: 0, green: 0, blue: 0 } const BLACK: Color = { red: 0, green: 0, blue: 0 }
@ -91,7 +92,7 @@ interface UpdateValuesResponse {
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/request#AddSheetRequest // https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/request#AddSheetRequest
interface AddSheetRequest { interface AddSheetRequest {
properties: WorksheetProperties properties: Partial<WorksheetProperties>
} }
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/response#AddSheetResponse // https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/response#AddSheetResponse
@ -236,6 +237,38 @@ export class GoogleSheetsMock {
this.mockAPI() this.mockAPI()
} }
public cell(cell: string): Value | undefined {
const cellData = this.cellData(cell)
if (!cellData) {
return undefined
}
return this.cellValue(cellData)
}
public set(cell: string, value: Value): void {
const cellData = this.cellData(cell)
if (!cellData) {
throw new Error(`Cell ${cell} not found`)
}
cellData.userEnteredValue = this.createValue(value)
}
public sheet(name: string | number): Sheet | undefined {
if (typeof name === "number") {
return this.getSheetById(name)
}
return this.getSheetByName(name)
}
public createSheet(opts: Partial<WorksheetProperties>): Sheet {
const properties = this.defaultWorksheetProperties(opts)
if (this.getSheetByName(properties.title)) {
throw new Error(`Sheet ${properties.title} already exists`)
}
const resp = this.handleAddSheet({ properties })
return this.getSheetById(resp.properties.sheetId)!
}
private route( private route(
method: "get" | "put" | "post", method: "get" | "put" | "post",
path: string | RegExp, path: string | RegExp,
@ -462,35 +495,39 @@ export class GoogleSheetsMock {
return response return response
} }
private handleAddSheet(request: AddSheetRequest): AddSheetResponse { private defaultWorksheetProperties(
const properties: Omit<WorksheetProperties, "dataSourceSheetProperties"> = { opts: Partial<WorksheetProperties>
): WorksheetProperties {
return {
index: this.spreadsheet.sheets.length, index: this.spreadsheet.sheets.length,
hidden: false, hidden: false,
rightToLeft: false, rightToLeft: false,
tabColor: BLACK, tabColor: BLACK,
tabColorStyle: { rgbColor: BLACK }, tabColorStyle: { rgbColor: BLACK },
sheetType: "GRID", sheetType: "GRID",
title: request.properties.title, title: "Sheet",
sheetId: this.spreadsheet.sheets.length, sheetId: this.spreadsheet.sheets.length,
gridProperties: { gridProperties: {
rowCount: 100, rowCount: 100,
columnCount: 26, columnCount: 26,
frozenRowCount: 0,
frozenColumnCount: 0,
hideGridlines: false,
rowGroupControlAfter: false,
columnGroupControlAfter: false,
}, },
dataSourceSheetProperties: {} as DataSourceSheetProperties,
...opts,
} }
}
private handleAddSheet(request: AddSheetRequest): AddSheetResponse {
const properties = this.defaultWorksheetProperties(request.properties)
this.spreadsheet.sheets.push({ this.spreadsheet.sheets.push({
properties: properties as WorksheetProperties, properties,
data: [this.createEmptyGrid(100, 26)], data: [
this.createEmptyGrid(
properties.gridProperties.rowCount,
properties.gridProperties.columnCount
),
],
}) })
return { properties }
// dataSourceSheetProperties is only returned by the API if the sheet type is
// DATA_SOURCE, which we aren't using, so sadly we need to cast here.
return { properties: properties as WorksheetProperties }
} }
private handleDeleteRange(request: DeleteRangeRequest) { private handleDeleteRange(request: DeleteRangeRequest) {
@ -767,21 +804,6 @@ export class GoogleSheetsMock {
return this.getCellNumericIndexes(sheetId, startRowIndex, startColumnIndex) return this.getCellNumericIndexes(sheetId, startRowIndex, startColumnIndex)
} }
public cell(cell: string): Value | undefined {
const cellData = this.cellData(cell)
if (!cellData) {
return undefined
}
return this.cellValue(cellData)
}
public sheet(name: string | number): Sheet | undefined {
if (typeof name === "number") {
return this.getSheetById(name)
}
return this.getSheetByName(name)
}
private getCellNumericIndexes( private getCellNumericIndexes(
sheet: Sheet | number, sheet: Sheet | number,
row: number, row: number,

View File

@ -5,13 +5,11 @@ import {
Table, Table,
TableSchema, TableSchema,
View, View,
ViewFieldMetadata,
ViewV2, ViewV2,
ViewV2ColumnEnriched, ViewV2ColumnEnriched,
ViewV2Enriched, ViewV2Enriched,
} from "@budibase/types" } from "@budibase/types"
import { HTTPError } from "@budibase/backend-core" import { HTTPError } from "@budibase/backend-core"
import { features } from "@budibase/pro"
import { import {
helpers, helpers,
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
@ -59,13 +57,6 @@ async function guardViewSchema(
} }
if (viewSchema[field].readonly) { if (viewSchema[field].readonly) {
if (
!(await features.isViewReadonlyColumnsEnabled()) &&
!(tableSchemaField as ViewFieldMetadata).readonly
) {
throw new HTTPError(`Readonly fields are not enabled`, 400)
}
if (!viewSchema[field].visible) { if (!viewSchema[field].visible) {
throw new HTTPError( throw new HTTPError(
`Field "${field}" must be visible if you want to make it readonly`, `Field "${field}" must be visible if you want to make it readonly`,

View File

@ -2,4 +2,4 @@
yarn build:apps yarn build:apps
version=$(./scripts/getCurrentVersion.sh) version=$(./scripts/getCurrentVersion.sh)
docker build -f hosting/single/Dockerfile -t budibase:sqs --build-arg BUDIBASE_VERSION=$version --build-arg TARGETBUILD=single --build-arg BASEIMG=budibase/couchdb:v3.3.3-sqs . docker build -f hosting/single/Dockerfile -t budibase:sqs --build-arg BUDIBASE_VERSION=$version --build-arg TARGETBUILD=single --build-arg BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1 .

View File

@ -2053,6 +2053,44 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@2.32.6":
version "0.0.0"
dependencies:
"@budibase/nano" "10.1.5"
"@budibase/pouchdb-replication-stream" "1.2.11"
"@budibase/shared-core" "0.0.0"
"@budibase/types" "0.0.0"
aws-cloudfront-sign "3.0.2"
aws-sdk "2.1030.0"
bcrypt "5.1.0"
bcryptjs "2.4.3"
bull "4.10.1"
correlation-id "4.0.0"
dd-trace "5.2.0"
dotenv "16.0.1"
ioredis "5.3.2"
joi "17.6.0"
jsonwebtoken "9.0.2"
knex "2.4.2"
koa-passport "^6.0.0"
koa-pino-logger "4.0.0"
lodash "4.17.21"
node-fetch "2.6.7"
passport-google-oauth "2.0.0"
passport-local "1.0.0"
passport-oauth2-refresh "^2.1.0"
pino "8.11.0"
pino-http "8.3.3"
posthog-node "4.0.1"
pouchdb "7.3.0"
pouchdb-find "7.2.2"
redlock "4.2.0"
rotating-file-stream "3.1.0"
sanitize-s3-objectkey "0.0.1"
semver "^7.5.4"
tar-fs "2.1.1"
uuid "^8.3.2"
"@budibase/handlebars-helpers@^0.13.2": "@budibase/handlebars-helpers@^0.13.2":
version "0.13.2" version "0.13.2"
resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.13.2.tgz#73ab51c464e91fd955b429017648e0257060db77" resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.13.2.tgz#73ab51c464e91fd955b429017648e0257060db77"
@ -2095,6 +2133,45 @@
pouchdb-promise "^6.0.4" pouchdb-promise "^6.0.4"
through2 "^2.0.0" through2 "^2.0.0"
"@budibase/pro@npm:@budibase/pro@latest":
version "2.32.6"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.32.6.tgz#02ddef737ee8f52dafd8fab8f8f277dfc89cd33f"
integrity sha512-+XEv4JtMvUKZWyllcw+iFOh44zxsoJLmUdShu4bAjj5zXWgElF6LjFpK51IrQzM6xKfQxn7N2vmxu7175u5dDQ==
dependencies:
"@budibase/backend-core" "2.32.6"
"@budibase/shared-core" "2.32.6"
"@budibase/string-templates" "2.32.6"
"@budibase/types" "2.32.6"
"@koa/router" "8.0.8"
bull "4.10.1"
dd-trace "5.2.0"
joi "17.6.0"
jsonwebtoken "9.0.2"
lru-cache "^7.14.1"
memorystream "^0.3.1"
node-fetch "2.6.7"
scim-patch "^0.8.1"
scim2-parse-filter "^0.2.8"
"@budibase/shared-core@2.32.6":
version "0.0.0"
dependencies:
"@budibase/types" "0.0.0"
cron-validate "1.4.5"
"@budibase/string-templates@2.32.6":
version "0.0.0"
dependencies:
"@budibase/handlebars-helpers" "^0.13.2"
dayjs "^1.10.8"
handlebars "^4.7.8"
lodash.clonedeep "^4.5.0"
"@budibase/types@2.32.6":
version "0.0.0"
dependencies:
scim-patch "^0.8.1"
"@bull-board/api@5.10.2": "@bull-board/api@5.10.2":
version "5.10.2" version "5.10.2"
resolved "https://registry.yarnpkg.com/@bull-board/api/-/api-5.10.2.tgz#ae8ff6918b23897bf879a6ead3683f964374c4b3" resolved "https://registry.yarnpkg.com/@bull-board/api/-/api-5.10.2.tgz#ae8ff6918b23897bf879a6ead3683f964374c4b3"
@ -12277,10 +12354,10 @@ google-p12-pem@^4.0.0:
dependencies: dependencies:
node-forge "^1.3.1" node-forge "^1.3.1"
"google-spreadsheet@npm:@budibase/google-spreadsheet@4.1.3": "google-spreadsheet@npm:@budibase/google-spreadsheet@4.1.5":
version "4.1.3" version "4.1.5"
resolved "https://registry.yarnpkg.com/@budibase/google-spreadsheet/-/google-spreadsheet-4.1.3.tgz#bcee7bd9d90f82c54b16a9aca963b87aceb050ad" resolved "https://registry.yarnpkg.com/@budibase/google-spreadsheet/-/google-spreadsheet-4.1.5.tgz#c89ffcbfcb1a3538e910d9275f73efc1d7deb85f"
integrity sha512-03VX3/K5NXIh6+XAIDZgcHPmR76xwd8vIDL7RedMpvM2IcXK0Iq/KU7FmLY0t/mKqORAGC7+0rajd0jLFezC4w== integrity sha512-t1uBjuRSkNLnZ89DYtYQ2GW33xVU84qOyOPbGi+M0w7cAJofs95PwlBLhVol6Pv5VbeL0I1J7M4XyVqp0nSZtQ==
dependencies: dependencies:
axios "^1.4.0" axios "^1.4.0"
lodash "^4.17.21" lodash "^4.17.21"