Merge remote-tracking branch 'origin/v3-ui' into feature/automation-branching-ux

This commit is contained in:
Dean 2024-09-20 16:08:20 +01:00
commit eff76e10d7
29 changed files with 393 additions and 236 deletions

View File

@ -108,7 +108,7 @@ jobs:
- name: Pull testcontainers images
run: |
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.3.3 &
docker pull budibase/couchdb:v3.3.3-sqs-v2.1.1 &
docker pull redis &
wait $(jobs -p)
@ -179,7 +179,7 @@ jobs:
docker pull minio/minio &
docker pull redis &
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.3.3 &
docker pull budibase/couchdb:v3.3.3-sqs-v2.1.1 &
wait $(jobs -p)

View File

@ -641,7 +641,7 @@ couchdb:
# @ignore
repository: budibase/couchdb
# @ignore
tag: v3.3.3
tag: v3.3.3-sqs-v2.1.1
# @ignore
pullPolicy: Always

View File

@ -46,7 +46,7 @@ export default async function setup() {
await killContainers(containers)
try {
const couchdb = new GenericContainer("budibase/couchdb:v3.3.3")
const couchdb = new GenericContainer("budibase/couchdb:v3.3.3-sqs-v2.1.1")
.withExposedPorts(5984, 4984)
.withEnvironment({
COUCHDB_PASSWORD: "budibase",

View File

@ -1,4 +1,4 @@
ARG BASEIMG=budibase/couchdb:v3.3.3
ARG BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1
FROM node:20-slim as build
# install node-gyp dependencies

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.32.5",
"version": "2.32.6",
"npmClient": "yarn",
"packages": [
"packages/*",

View File

@ -143,6 +143,7 @@ const environment = {
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
POSTHOG_PERSONAL_TOKEN: process.env.POSTHOG_PERSONAL_TOKEN,
POSTHOG_API_HOST: process.env.POSTHOG_API_HOST || "https://us.i.posthog.com",
POSTHOG_FEATURE_FLAGS_ENABLED: process.env.POSTHOG_FEATURE_FLAGS_ENABLED,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
CLOUDFRONT_CDN: process.env.CLOUDFRONT_CDN,

View File

@ -6,7 +6,12 @@ import tracer from "dd-trace"
let posthog: PostHog | undefined
export function init(opts?: PostHogOptions) {
if (env.POSTHOG_TOKEN && env.POSTHOG_API_HOST && !env.SELF_HOSTED) {
if (
env.POSTHOG_TOKEN &&
env.POSTHOG_API_HOST &&
!env.SELF_HOSTED &&
env.POSTHOG_FEATURE_FLAGS_ENABLED
) {
console.log("initializing posthog client...")
posthog = new PostHog(env.POSTHOG_TOKEN, {
host: env.POSTHOG_API_HOST,

View File

@ -148,6 +148,7 @@ describe("feature flags", () => {
const env: Partial<typeof environment> = {
TENANT_FEATURE_FLAGS: environmentFlags,
SELF_HOSTED: false,
POSTHOG_FEATURE_FLAGS_ENABLED: "true",
}
if (posthogFlags) {

View File

@ -102,10 +102,6 @@ export const useAppBuilders = () => {
return useFeature(Feature.APP_BUILDERS)
}
export const useViewReadonlyColumns = () => {
return useFeature(Feature.VIEW_READONLY_COLUMNS)
}
// QUOTAS
export const setAutomationLogsQuota = (value: number) => {

View File

@ -74,7 +74,7 @@
display: flex;
flex-direction: row;
align-items: center;
gap: var(--spacing-l);
gap: var(--spacing-s);
}
.left {
width: 0;

View File

@ -53,7 +53,7 @@
selected={filterCount > 0}
accentColor="#004EA6"
>
{filterCount ? `Filter (${filterCount})` : "Filter"}
{filterCount ? `Filter: ${filterCount}` : "Filter"}
</ActionButton>
<Drawer

View File

@ -10,7 +10,6 @@
import { getContext } from "svelte"
import { ActionButton, Popover } from "@budibase/bbui"
import ColumnsSettingContent from "./ColumnsSettingContent.svelte"
import { licensing } from "stores/portal"
import { isEnabled } from "helpers/featureFlags"
import { FeatureFlag } from "@budibase/types"
@ -21,7 +20,6 @@
$: anyRestricted = $columns.filter(col => !col.visible || col.readonly).length
$: text = anyRestricted ? `Columns: ${anyRestricted} restricted` : "Columns"
$: allowViewReadonlyColumns = $licensing.isViewReadonlyColumnsEnabled
$: permissions =
$datasource.type === "viewV2"
? [
@ -30,9 +28,6 @@
FieldPermissions.HIDDEN,
]
: [FieldPermissions.WRITABLE, FieldPermissions.HIDDEN]
$: disabledPermissions = allowViewReadonlyColumns
? []
: [FieldPermissions.READONLY]
</script>
<div bind:this={anchor}>
@ -54,6 +49,5 @@
columns={$columns}
canSetRelationshipSchemas={isEnabled(FeatureFlag.ENRICHED_RELATIONSHIPS)}
{permissions}
{disabledPermissions}
/>
</Popover>

View File

@ -4,6 +4,7 @@
datasources,
userSelectedResourceMap,
contextMenuStore,
appStore,
} from "stores/builder"
import IntegrationIcon from "components/backend/DatasourceNavigator/IntegrationIcon.svelte"
import { Icon, ActionButton, ActionMenu, MenuItem } from "@budibase/bbui"
@ -179,11 +180,15 @@
</script>
<div class="nav">
<IntegrationIcon
integrationType={datasource?.source}
schema={datasource?.schema}
size="24"
/>
<a
href={`/builder/app/${$appStore.appId}/data/datasource/${datasource?._id}`}
>
<IntegrationIcon
integrationType={datasource?.source}
schema={datasource?.schema}
size="24"
/>
</a>
<a
href={$tableUrl(tableId)}
class="nav-item"

View File

@ -131,23 +131,15 @@ export const createLicensingStore = () => {
const triggerAutomationRunEnabled = license.features.includes(
Constants.Features.TRIGGER_AUTOMATION_RUN
)
const perAppBuildersEnabled = license.features.includes(
Constants.Features.APP_BUILDERS
)
const isViewReadonlyColumnsEnabled = license.features.includes(
Constants.Features.VIEW_READONLY_COLUMNS
)
const budibaseAIEnabled = license.features.includes(
Constants.Features.BUDIBASE_AI
)
const customAIConfigsEnabled = license.features.includes(
Constants.Features.AI_CUSTOM_CONFIGS
)
store.update(state => {
return {
...state,
@ -168,7 +160,6 @@ export const createLicensingStore = () => {
syncAutomationsEnabled,
triggerAutomationRunEnabled,
perAppBuildersEnabled,
isViewReadonlyColumnsEnabled,
}
})
},

View File

@ -1,4 +1,5 @@
import { QueryUtils } from "@budibase/frontend-core"
import { EmptyFilterOption } from "@budibase/types"
export const getActiveConditions = conditions => {
if (!conditions?.length) {
@ -33,7 +34,8 @@ export const getActiveConditions = conditions => {
value: condition.referenceValue,
}
const query = QueryUtils.buildQuery([luceneCondition])
let query = QueryUtils.buildQuery([luceneCondition])
query.onEmptyFilter = EmptyFilterOption.RETURN_NONE
const result = QueryUtils.runQuery([luceneCondition], query)
return result.length > 0
})

View File

@ -29,8 +29,18 @@ export const createActions = context => {
})
}
const getRow = () => {
throw "Views don't support fetching individual rows"
const getRow = async id => {
const res = await API.viewV2.fetch({
viewId: get(datasource).id,
limit: 1,
query: {
equal: {
_id: id,
},
},
paginate: false,
})
return res?.rows?.[0]
}
const isDatasourceValid = datasource => {
@ -97,9 +107,12 @@ export const initialise = context => {
order: get(initialSortOrder) || "ascending",
})
// Keep sort and filter state in line with the view definition
// Keep sort and filter state in line with the view definition when in builder
unsubscribers.push(
definition.subscribe($definition => {
if (!get(config).canSaveSchema) {
return
}
if ($definition?.id !== $datasource.id) {
return
}
@ -122,7 +135,6 @@ export const initialise = context => {
sort.subscribe(async $sort => {
// If we can mutate schema then update the view definition
if (get(config).canSaveSchema) {
// Ensure we're updating the correct view
const $view = get(definition)
if ($view?.id !== $datasource.id) {
return
@ -144,7 +156,7 @@ export const initialise = context => {
// Also update the fetch to ensure the new sort is respected.
// Ensure we're updating the correct fetch.
const $fetch = get(fetch)
if ($fetch?.options?.datasource?.tableId !== $datasource.tableId) {
if ($fetch?.options?.datasource?.id !== $datasource.id) {
return
}
$fetch.update({
@ -157,32 +169,49 @@ export const initialise = context => {
// When filters change, ensure view definition is kept up to date
unsubscribers?.push(
filter.subscribe(async $filter => {
// If we can mutate schema then update the view definition
if (get(config).canSaveSchema) {
// Ensure we're updating the correct view
const $view = get(definition)
if ($view?.id !== $datasource.id) {
return
}
if (JSON.stringify($filter) !== JSON.stringify($view.query)) {
await datasource.actions.saveDefinition({
...$view,
query: $filter,
})
}
if (!get(config).canSaveSchema) {
return
}
const $view = get(definition)
if ($view?.id !== $datasource.id) {
return
}
if (JSON.stringify($filter) !== JSON.stringify($view.query)) {
await datasource.actions.saveDefinition({
...$view,
query: $filter,
})
// Refresh data since view definition changed
await rows.actions.refreshData()
}
})
)
// Keep fetch up to date with filters.
// If we're able to save filters against the view then we only need to apply
// inline filters to the fetch, as saved filters are applied server side.
// If we can't save filters, then all filters must be applied to the fetch.
// Keep fetch up to date with inline filters when in the data section
unsubscribers.push(
inlineFilters.subscribe($inlineFilters => {
if (!get(config).canSaveSchema) {
return
}
const $fetch = get(fetch)
if ($fetch?.options?.datasource?.id !== $datasource.id) {
return
}
$fetch.update({
filter: $inlineFilters,
})
})
)
// Keep fetch up to date with all filters when not in the data section
unsubscribers.push(
allFilters.subscribe($allFilters => {
// Ensure we're updating the correct fetch
if (get(config).canSaveSchema) {
return
}
const $fetch = get(fetch)
if ($fetch?.options?.datasource?.tableId !== $datasource.tableId) {
if ($fetch?.options?.datasource?.id !== $datasource.id) {
return
}
$fetch.update({

View File

@ -1,12 +1,13 @@
import { writable, get, derived } from "svelte/store"
import { get, derived } from "svelte/store"
import { FieldType, FilterGroupLogicalOperator } from "@budibase/types"
import { memo } from "../../../utils/memo"
export const createStores = context => {
const { props } = context
// Initialise to default props
const filter = writable(get(props).initialFilter)
const inlineFilters = writable([])
const filter = memo(get(props).initialFilter)
const inlineFilters = memo([])
return {
filter,
@ -19,19 +20,26 @@ export const deriveStores = context => {
const allFilters = derived(
[filter, inlineFilters],
([$filter, $inlineFilters]) => {
const inlineFilterGroup = $inlineFilters?.length
? {
// Just use filter prop if no inline filters
if (!$inlineFilters?.length) {
return $filter
}
let allFilters = {
logicalOperator: FilterGroupLogicalOperator.ALL,
groups: [
{
logicalOperator: FilterGroupLogicalOperator.ALL,
filters: [...($inlineFilters || [])],
}
: null
return inlineFilterGroup
? {
logicalOperator: FilterGroupLogicalOperator.ALL,
groups: [...($filter?.groups || []), inlineFilterGroup],
}
: $filter
filters: $inlineFilters,
},
],
}
// Just use inline if no filter
if (!$filter?.groups?.length) {
return allFilters
}
// Join them together if both
allFilters.groups = [...allFilters.groups, ...$filter.groups]
return allFilters
}
)

View File

@ -178,7 +178,6 @@ export default class DataFetch {
// Build the query
let query = this.options.query
if (!query && this.features.supportsSearch) {
query = buildQuery(filter)
}
@ -365,7 +364,9 @@ export default class DataFetch {
let refresh = false
const entries = Object.entries(newOptions || {})
for (let [key, value] of entries) {
if (JSON.stringify(value) !== JSON.stringify(this.options[key])) {
const oldVal = this.options[key] == null ? null : this.options[key]
const newVal = value == null ? null : value
if (JSON.stringify(newVal) !== JSON.stringify(oldVal)) {
refresh = true
break
}

View File

@ -1,6 +1,5 @@
import DataFetch from "./DataFetch.js"
import { get } from "svelte/store"
import { utils } from "@budibase/shared-core"
export default class ViewV2Fetch extends DataFetch {
determineFeatureFlags() {
@ -36,15 +35,8 @@ export default class ViewV2Fetch extends DataFetch {
}
async getData() {
const {
datasource,
limit,
sortColumn,
sortOrder,
sortType,
paginate,
filter,
} = this.options
const { datasource, limit, sortColumn, sortOrder, sortType, paginate } =
this.options
const { cursor, query, definition } = get(this.store)
// If sort/filter params are not defined, update options to store the
@ -55,12 +47,6 @@ export default class ViewV2Fetch extends DataFetch {
this.options.sortOrder = definition.sort.order
}
const parsed = utils.processSearchFilters(filter)
if (!parsed?.groups?.length && definition.query?.groups?.length) {
this.options.filter = definition.query
}
try {
const res = await this.API.viewV2.fetch({
viewId: datasource.id,

View File

@ -80,7 +80,7 @@
"dotenv": "8.2.0",
"form-data": "4.0.0",
"global-agent": "3.0.0",
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.3",
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5",
"ioredis": "5.3.2",
"isolated-vm": "^4.7.2",
"jimp": "0.22.12",

View File

@ -138,7 +138,7 @@ async function processDeleteRowsRequest(ctx: UserCtx<DeleteRowRequest>) {
const { tableId } = utils.getSourceId(ctx)
const processedRows = request.rows.map(row => {
let processedRow: Row = typeof row == "string" ? { _id: row } : row
let processedRow: Row = typeof row == "string" ? { _id: row, tableId } : row
return !processedRow._rev
? addRev(fixRow(processedRow, ctx.params), tableId)
: fixRow(processedRow, ctx.params)

View File

@ -1138,6 +1138,18 @@ describe.each([
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
})
it("should be able to delete a row with ID only", async () => {
const createdRow = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
const res = await config.api.row.bulkDelete(table._id!, {
rows: [createdRow._id!],
})
expect(res[0]._id).toEqual(createdRow._id)
expect(res[0].tableId).toEqual(table._id!)
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
})
it("should be able to bulk delete rows, including a row that doesn't exist", async () => {
const createdRow = await config.api.row.save(table._id!, {})
const createdRow2 = await config.api.row.save(table._id!, {})

View File

@ -309,10 +309,6 @@ describe.each([
})
describe("readonly fields", () => {
beforeEach(() => {
mocks.licenses.useViewReadonlyColumns()
})
it("readonly fields are persisted", async () => {
const table = await config.api.table.save(
saveTableRequest({
@ -436,7 +432,7 @@ describe.each([
})
})
it("readonly fields cannot be used on free license", async () => {
it("readonly fields can be used on free license", async () => {
mocks.licenses.useCloudFree()
const table = await config.api.table.save(
saveTableRequest({
@ -466,11 +462,7 @@ describe.each([
}
await config.api.viewV2.create(newView, {
status: 400,
body: {
message: "Readonly fields are not enabled",
status: 400,
},
status: 201,
})
})
})
@ -513,7 +505,6 @@ describe.each([
})
it("display fields can be readonly", async () => {
mocks.licenses.useViewReadonlyColumns()
const table = await config.api.table.save(
saveTableRequest({
schema: {
@ -588,7 +579,6 @@ describe.each([
})
it("can update all fields", async () => {
mocks.licenses.useViewReadonlyColumns()
const tableId = table._id!
const updatedData: Required<UpdateViewRequest> = {
@ -802,71 +792,6 @@ describe.each([
)
})
it("cannot update views with readonly on on free license", async () => {
mocks.licenses.useViewReadonlyColumns()
view = await config.api.viewV2.update({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: true,
},
},
})
mocks.licenses.useCloudFree()
await config.api.viewV2.update(view, {
status: 400,
body: {
message: "Readonly fields are not enabled",
},
})
})
it("can remove readonly config after license downgrade", async () => {
mocks.licenses.useViewReadonlyColumns()
view = await config.api.viewV2.update({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: true,
},
Category: {
visible: true,
readonly: true,
},
},
})
mocks.licenses.useCloudFree()
const res = await config.api.viewV2.update({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: false,
},
},
})
expect(res).toEqual(
expect.objectContaining({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: false,
},
},
})
)
})
isInternal &&
it("updating schema will only validate modified field", async () => {
let view = await config.api.viewV2.create({
@ -1046,7 +971,6 @@ describe.each([
})
it("should be able to fetch readonly config after downgrades", async () => {
mocks.licenses.useViewReadonlyColumns()
const res = await config.api.viewV2.create({
name: generator.name(),
tableId: table._id!,
@ -1112,8 +1036,6 @@ describe.each([
})
it("rejects if field is readonly in any view", async () => {
mocks.licenses.useViewReadonlyColumns()
await config.api.viewV2.create({
name: "view a",
tableId: table._id!,
@ -1538,7 +1460,6 @@ describe.each([
})
it("can't persist readonly columns", async () => {
mocks.licenses.useViewReadonlyColumns()
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
@ -1607,7 +1528,6 @@ describe.each([
})
it("can't update readonly columns", async () => {
mocks.licenses.useViewReadonlyColumns()
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),

View File

@ -330,15 +330,16 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
return { tables: {}, errors: {} }
}
await this.connect()
const sheets = this.client.sheetsByIndex
const tables: Record<string, Table> = {}
let errors: Record<string, string> = {}
await utils.parallelForeach(
sheets,
async sheet => {
// must fetch rows to determine schema
try {
await sheet.getRows()
await sheet.getRows({ limit: 1 })
} catch (err) {
// We expect this to always be an Error so if it's not, rethrow it to
// make sure we don't fail quietly.
@ -346,26 +347,34 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
throw err
}
if (err.message.startsWith("No values in the header row")) {
errors[sheet.title] = err.message
} else {
// If we get an error we don't expect, rethrow to avoid failing
// quietly.
throw err
if (
err.message.startsWith("No values in the header row") ||
err.message.startsWith("All your header cells are blank")
) {
errors[
sheet.title
] = `Failed to find a header row in sheet "${sheet.title}", is the first row blank?`
return
}
return
}
const id = buildExternalTableId(datasourceId, sheet.title)
tables[sheet.title] = this.getTableSchema(
sheet.title,
sheet.headerValues,
datasourceId,
id
)
// If we get an error we don't expect, rethrow to avoid failing
// quietly.
throw err
}
},
10
)
for (const sheet of sheets) {
const id = buildExternalTableId(datasourceId, sheet.title)
tables[sheet.title] = this.getTableSchema(
sheet.title,
sheet.headerValues,
datasourceId,
id
)
}
let externalTables = finaliseExternalTables(tables, entities)
errors = { ...errors, ...checkExternalTables(externalTables) }
return { tables: externalTables, errors }

View File

@ -244,6 +244,20 @@ describe("Google Sheets Integration", () => {
expect.arrayContaining(Array.from({ length: 248 }, (_, i) => `${i}`))
)
})
it("can export rows", async () => {
const resp = await config.api.row.exportRows(table._id!, {})
const parsed = JSON.parse(resp)
expect(parsed.length).toEqual(2)
expect(parsed[0]).toMatchObject({
name: "Test Contact 1",
description: "original description 1",
})
expect(parsed[1]).toMatchObject({
name: "Test Contact 2",
description: "original description 2",
})
})
})
describe("update", () => {
@ -491,4 +505,97 @@ describe("Google Sheets Integration", () => {
expect(emptyRows.length).toEqual(0)
})
})
describe("fetch schema", () => {
it("should fail to import a completely blank sheet", async () => {
mock.createSheet({ title: "Sheet1" })
await config.api.datasource.fetchSchema(
{
datasourceId: datasource._id!,
tablesFilter: ["Sheet1"],
},
{
status: 200,
body: {
errors: {
Sheet1:
'Failed to find a header row in sheet "Sheet1", is the first row blank?',
},
},
}
)
})
it("should fail to import multiple sheets with blank headers", async () => {
mock.createSheet({ title: "Sheet1" })
mock.createSheet({ title: "Sheet2" })
await config.api.datasource.fetchSchema(
{
datasourceId: datasource!._id!,
tablesFilter: ["Sheet1", "Sheet2"],
},
{
status: 200,
body: {
errors: {
Sheet1:
'Failed to find a header row in sheet "Sheet1", is the first row blank?',
Sheet2:
'Failed to find a header row in sheet "Sheet2", is the first row blank?',
},
},
}
)
})
it("should only fail the sheet with missing headers", async () => {
mock.createSheet({ title: "Sheet1" })
mock.createSheet({ title: "Sheet2" })
mock.createSheet({ title: "Sheet3" })
mock.set("Sheet1!A1", "name")
mock.set("Sheet1!B1", "dob")
mock.set("Sheet2!A1", "name")
mock.set("Sheet2!B1", "dob")
await config.api.datasource.fetchSchema(
{
datasourceId: datasource!._id!,
tablesFilter: ["Sheet1", "Sheet2", "Sheet3"],
},
{
status: 200,
body: {
errors: {
Sheet3:
'Failed to find a header row in sheet "Sheet3", is the first row blank?',
},
},
}
)
})
it("should only succeed if sheet with missing headers is not being imported", async () => {
mock.createSheet({ title: "Sheet1" })
mock.createSheet({ title: "Sheet2" })
mock.createSheet({ title: "Sheet3" })
mock.set("Sheet1!A1", "name")
mock.set("Sheet1!B1", "dob")
mock.set("Sheet2!A1", "name")
mock.set("Sheet2!B1", "dob")
await config.api.datasource.fetchSchema(
{
datasourceId: datasource!._id!,
tablesFilter: ["Sheet1", "Sheet2"],
},
{
status: 200,
body: { errors: {} },
}
)
})
})
})

View File

@ -22,6 +22,7 @@ import type {
CellPadding,
Color,
GridRange,
DataSourceSheetProperties,
} from "google-spreadsheet/src/lib/types/sheets-types"
const BLACK: Color = { red: 0, green: 0, blue: 0 }
@ -91,7 +92,7 @@ interface UpdateValuesResponse {
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/request#AddSheetRequest
interface AddSheetRequest {
properties: WorksheetProperties
properties: Partial<WorksheetProperties>
}
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/response#AddSheetResponse
@ -236,6 +237,38 @@ export class GoogleSheetsMock {
this.mockAPI()
}
public cell(cell: string): Value | undefined {
const cellData = this.cellData(cell)
if (!cellData) {
return undefined
}
return this.cellValue(cellData)
}
public set(cell: string, value: Value): void {
const cellData = this.cellData(cell)
if (!cellData) {
throw new Error(`Cell ${cell} not found`)
}
cellData.userEnteredValue = this.createValue(value)
}
public sheet(name: string | number): Sheet | undefined {
if (typeof name === "number") {
return this.getSheetById(name)
}
return this.getSheetByName(name)
}
public createSheet(opts: Partial<WorksheetProperties>): Sheet {
const properties = this.defaultWorksheetProperties(opts)
if (this.getSheetByName(properties.title)) {
throw new Error(`Sheet ${properties.title} already exists`)
}
const resp = this.handleAddSheet({ properties })
return this.getSheetById(resp.properties.sheetId)!
}
private route(
method: "get" | "put" | "post",
path: string | RegExp,
@ -462,35 +495,39 @@ export class GoogleSheetsMock {
return response
}
private handleAddSheet(request: AddSheetRequest): AddSheetResponse {
const properties: Omit<WorksheetProperties, "dataSourceSheetProperties"> = {
private defaultWorksheetProperties(
opts: Partial<WorksheetProperties>
): WorksheetProperties {
return {
index: this.spreadsheet.sheets.length,
hidden: false,
rightToLeft: false,
tabColor: BLACK,
tabColorStyle: { rgbColor: BLACK },
sheetType: "GRID",
title: request.properties.title,
title: "Sheet",
sheetId: this.spreadsheet.sheets.length,
gridProperties: {
rowCount: 100,
columnCount: 26,
frozenRowCount: 0,
frozenColumnCount: 0,
hideGridlines: false,
rowGroupControlAfter: false,
columnGroupControlAfter: false,
},
dataSourceSheetProperties: {} as DataSourceSheetProperties,
...opts,
}
}
private handleAddSheet(request: AddSheetRequest): AddSheetResponse {
const properties = this.defaultWorksheetProperties(request.properties)
this.spreadsheet.sheets.push({
properties: properties as WorksheetProperties,
data: [this.createEmptyGrid(100, 26)],
properties,
data: [
this.createEmptyGrid(
properties.gridProperties.rowCount,
properties.gridProperties.columnCount
),
],
})
// dataSourceSheetProperties is only returned by the API if the sheet type is
// DATA_SOURCE, which we aren't using, so sadly we need to cast here.
return { properties: properties as WorksheetProperties }
return { properties }
}
private handleDeleteRange(request: DeleteRangeRequest) {
@ -767,21 +804,6 @@ export class GoogleSheetsMock {
return this.getCellNumericIndexes(sheetId, startRowIndex, startColumnIndex)
}
public cell(cell: string): Value | undefined {
const cellData = this.cellData(cell)
if (!cellData) {
return undefined
}
return this.cellValue(cellData)
}
public sheet(name: string | number): Sheet | undefined {
if (typeof name === "number") {
return this.getSheetById(name)
}
return this.getSheetByName(name)
}
private getCellNumericIndexes(
sheet: Sheet | number,
row: number,

View File

@ -5,13 +5,11 @@ import {
Table,
TableSchema,
View,
ViewFieldMetadata,
ViewV2,
ViewV2ColumnEnriched,
ViewV2Enriched,
} from "@budibase/types"
import { HTTPError } from "@budibase/backend-core"
import { features } from "@budibase/pro"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
@ -59,13 +57,6 @@ async function guardViewSchema(
}
if (viewSchema[field].readonly) {
if (
!(await features.isViewReadonlyColumnsEnabled()) &&
!(tableSchemaField as ViewFieldMetadata).readonly
) {
throw new HTTPError(`Readonly fields are not enabled`, 400)
}
if (!viewSchema[field].visible) {
throw new HTTPError(
`Field "${field}" must be visible if you want to make it readonly`,

View File

@ -2,4 +2,4 @@
yarn build:apps
version=$(./scripts/getCurrentVersion.sh)
docker build -f hosting/single/Dockerfile -t budibase:sqs --build-arg BUDIBASE_VERSION=$version --build-arg TARGETBUILD=single --build-arg BASEIMG=budibase/couchdb:v3.3.3-sqs .
docker build -f hosting/single/Dockerfile -t budibase:sqs --build-arg BUDIBASE_VERSION=$version --build-arg TARGETBUILD=single --build-arg BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1 .

View File

@ -2053,6 +2053,44 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@2.32.6":
version "0.0.0"
dependencies:
"@budibase/nano" "10.1.5"
"@budibase/pouchdb-replication-stream" "1.2.11"
"@budibase/shared-core" "0.0.0"
"@budibase/types" "0.0.0"
aws-cloudfront-sign "3.0.2"
aws-sdk "2.1030.0"
bcrypt "5.1.0"
bcryptjs "2.4.3"
bull "4.10.1"
correlation-id "4.0.0"
dd-trace "5.2.0"
dotenv "16.0.1"
ioredis "5.3.2"
joi "17.6.0"
jsonwebtoken "9.0.2"
knex "2.4.2"
koa-passport "^6.0.0"
koa-pino-logger "4.0.0"
lodash "4.17.21"
node-fetch "2.6.7"
passport-google-oauth "2.0.0"
passport-local "1.0.0"
passport-oauth2-refresh "^2.1.0"
pino "8.11.0"
pino-http "8.3.3"
posthog-node "4.0.1"
pouchdb "7.3.0"
pouchdb-find "7.2.2"
redlock "4.2.0"
rotating-file-stream "3.1.0"
sanitize-s3-objectkey "0.0.1"
semver "^7.5.4"
tar-fs "2.1.1"
uuid "^8.3.2"
"@budibase/handlebars-helpers@^0.13.2":
version "0.13.2"
resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.13.2.tgz#73ab51c464e91fd955b429017648e0257060db77"
@ -2095,6 +2133,45 @@
pouchdb-promise "^6.0.4"
through2 "^2.0.0"
"@budibase/pro@npm:@budibase/pro@latest":
version "2.32.6"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.32.6.tgz#02ddef737ee8f52dafd8fab8f8f277dfc89cd33f"
integrity sha512-+XEv4JtMvUKZWyllcw+iFOh44zxsoJLmUdShu4bAjj5zXWgElF6LjFpK51IrQzM6xKfQxn7N2vmxu7175u5dDQ==
dependencies:
"@budibase/backend-core" "2.32.6"
"@budibase/shared-core" "2.32.6"
"@budibase/string-templates" "2.32.6"
"@budibase/types" "2.32.6"
"@koa/router" "8.0.8"
bull "4.10.1"
dd-trace "5.2.0"
joi "17.6.0"
jsonwebtoken "9.0.2"
lru-cache "^7.14.1"
memorystream "^0.3.1"
node-fetch "2.6.7"
scim-patch "^0.8.1"
scim2-parse-filter "^0.2.8"
"@budibase/shared-core@2.32.6":
version "0.0.0"
dependencies:
"@budibase/types" "0.0.0"
cron-validate "1.4.5"
"@budibase/string-templates@2.32.6":
version "0.0.0"
dependencies:
"@budibase/handlebars-helpers" "^0.13.2"
dayjs "^1.10.8"
handlebars "^4.7.8"
lodash.clonedeep "^4.5.0"
"@budibase/types@2.32.6":
version "0.0.0"
dependencies:
scim-patch "^0.8.1"
"@bull-board/api@5.10.2":
version "5.10.2"
resolved "https://registry.yarnpkg.com/@bull-board/api/-/api-5.10.2.tgz#ae8ff6918b23897bf879a6ead3683f964374c4b3"
@ -12277,10 +12354,10 @@ google-p12-pem@^4.0.0:
dependencies:
node-forge "^1.3.1"
"google-spreadsheet@npm:@budibase/google-spreadsheet@4.1.3":
version "4.1.3"
resolved "https://registry.yarnpkg.com/@budibase/google-spreadsheet/-/google-spreadsheet-4.1.3.tgz#bcee7bd9d90f82c54b16a9aca963b87aceb050ad"
integrity sha512-03VX3/K5NXIh6+XAIDZgcHPmR76xwd8vIDL7RedMpvM2IcXK0Iq/KU7FmLY0t/mKqORAGC7+0rajd0jLFezC4w==
"google-spreadsheet@npm:@budibase/google-spreadsheet@4.1.5":
version "4.1.5"
resolved "https://registry.yarnpkg.com/@budibase/google-spreadsheet/-/google-spreadsheet-4.1.5.tgz#c89ffcbfcb1a3538e910d9275f73efc1d7deb85f"
integrity sha512-t1uBjuRSkNLnZ89DYtYQ2GW33xVU84qOyOPbGi+M0w7cAJofs95PwlBLhVol6Pv5VbeL0I1J7M4XyVqp0nSZtQ==
dependencies:
axios "^1.4.0"
lodash "^4.17.21"