Merge branch 'v3-ui' into fix-flaky-export-test

This commit is contained in:
Sam Rose 2024-10-31 17:39:46 +00:00 committed by GitHub
commit 6f92b395af
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 141 additions and 30 deletions

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.33.13", "version": "2.33.14",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -316,11 +316,9 @@ class InternalBuilder {
const columnSchema = schema[column] const columnSchema = schema[column]
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(columnSchema)) { if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(columnSchema)) {
// TODO: figure out how to express this safely without string return this.knex.raw(`??::money::numeric as ??`, [
// interpolation.
return this.knex.raw(`??::money::numeric as "${field}"`, [
this.rawQuotedIdentifier([table, column].join(".")), this.rawQuotedIdentifier([table, column].join(".")),
field, this.knex.raw(this.quote(field)),
]) ])
} }
@ -330,8 +328,9 @@ class InternalBuilder {
// TODO: figure out how to express this safely without string // TODO: figure out how to express this safely without string
// interpolation. // interpolation.
return this.knex.raw(`CONVERT(varchar, ??, 108) as "${field}"`, [ return this.knex.raw(`CONVERT(varchar, ??, 108) as ??`, [
this.rawQuotedIdentifier(field), this.rawQuotedIdentifier(field),
this.knex.raw(this.quote(field)),
]) ])
} }

View File

@ -90,8 +90,15 @@
const getFieldOptions = (self, calculations, schema) => { const getFieldOptions = (self, calculations, schema) => {
return Object.entries(schema) return Object.entries(schema)
.filter(([field, fieldSchema]) => { .filter(([field, fieldSchema]) => {
// Only allow numeric fields that are not calculations themselves // Don't allow other calculation columns
if (fieldSchema.calculationType || !isNumeric(fieldSchema.type)) { if (fieldSchema.calculationType) {
return false
}
// Only allow numeric columns for most calculation types
if (
self.type !== CalculationType.COUNT &&
!isNumeric(fieldSchema.type)
) {
return false return false
} }
// Don't allow duplicates // Don't allow duplicates
@ -234,7 +241,7 @@
<InfoDisplay <InfoDisplay
icon="Help" icon="Help"
quiet quiet
body="Calculations only work with numeric columns and a maximum of 5 calculations can be added at once." body="Most calculations only work with numeric columns and a maximum of 5 calculations can be added at once."
/> />
<div> <div>
@ -257,6 +264,4 @@
.group-by { .group-by {
grid-column: 2 / 5; grid-column: 2 / 5;
} }
span {
}
</style> </style>

View File

@ -60,8 +60,11 @@
// Provide additional data context for live binding eval // Provide additional data context for live binding eval
export const getAdditionalDataContext = () => { export const getAdditionalDataContext = () => {
const rows = get(grid?.getContext()?.rows) const gridContext = grid?.getContext()
const goldenRow = generateGoldenSample(rows) const rows = get(gridContext?.rows) || []
const clean = gridContext?.rows.actions.cleanRow || (x => x)
const cleaned = rows.map(clean)
const goldenRow = generateGoldenSample(cleaned)
const id = get(component).id const id = get(component).id
return { return {
// Not sure what this one is for... // Not sure what this one is for...

View File

@ -10,6 +10,8 @@ export const DefaultColumnWidth = 200
export const MinColumnWidth = 80 export const MinColumnWidth = 80
export const NewRowID = "new" export const NewRowID = "new"
export const BlankRowID = "blank" export const BlankRowID = "blank"
export const GeneratedIDPrefix = "‽‽"
export const CellIDSeparator = "‽‽"
export const RowPageSize = 100 export const RowPageSize = 100
export const FocusedCellMinOffset = ScrollBarSize * 3 export const FocusedCellMinOffset = ScrollBarSize * 3
export const ControlsHeight = 50 export const ControlsHeight = 50

View File

@ -1,18 +1,17 @@
// We can't use "-" as a separator as this can be present in the ID import { GeneratedIDPrefix, CellIDSeparator } from "./constants"
// or column name, so we use something very unusual to avoid this problem import { Helpers } from "@budibase/bbui"
const JOINING_CHARACTER = "‽‽"
export const parseCellID = cellId => { export const parseCellID = cellId => {
if (!cellId) { if (!cellId) {
return { rowId: undefined, field: undefined } return { rowId: undefined, field: undefined }
} }
const parts = cellId.split(JOINING_CHARACTER) const parts = cellId.split(CellIDSeparator)
const field = parts.pop() const field = parts.pop()
return { rowId: parts.join(JOINING_CHARACTER), field } return { rowId: parts.join(CellIDSeparator), field }
} }
export const getCellID = (rowId, fieldName) => { export const getCellID = (rowId, fieldName) => {
return `${rowId}${JOINING_CHARACTER}${fieldName}` return `${rowId}${CellIDSeparator}${fieldName}`
} }
export const parseEventLocation = e => { export const parseEventLocation = e => {
@ -21,3 +20,11 @@ export const parseEventLocation = e => {
y: e.clientY ?? e.touches?.[0]?.clientY, y: e.clientY ?? e.touches?.[0]?.clientY,
} }
} }
export const generateRowID = () => {
return `${GeneratedIDPrefix}${Helpers.uuid()}`
}
export const isGeneratedRowID = id => {
return id?.startsWith(GeneratedIDPrefix)
}

View File

@ -1,7 +1,12 @@
import { writable, derived, get } from "svelte/store" import { writable, derived, get } from "svelte/store"
import { fetchData } from "../../../fetch" import { fetchData } from "../../../fetch"
import { NewRowID, RowPageSize } from "../lib/constants" import { NewRowID, RowPageSize } from "../lib/constants"
import { getCellID, parseCellID } from "../lib/utils" import {
generateRowID,
getCellID,
isGeneratedRowID,
parseCellID,
} from "../lib/utils"
import { tick } from "svelte" import { tick } from "svelte"
import { Helpers } from "@budibase/bbui" import { Helpers } from "@budibase/bbui"
import { sleep } from "../../../utils/utils" import { sleep } from "../../../utils/utils"
@ -634,10 +639,10 @@ export const createActions = context => {
newRow = newRows[i] newRow = newRows[i]
// Ensure we have a unique _id. // Ensure we have a unique _id.
// This means generating one for non DS+, overwriting any that may already // We generate one for non DS+ where required, but trust that any existing
// exist as we cannot allow duplicates. // _id values are unique (e.g. Mongo)
if (!$hasBudibaseIdentifiers) { if (!$hasBudibaseIdentifiers && !newRow._id?.length) {
newRow._id = Helpers.uuid() newRow._id = generateRowID()
} }
if (!rowCacheMap[newRow._id]) { if (!rowCacheMap[newRow._id]) {
@ -674,7 +679,7 @@ export const createActions = context => {
let clone = { ...row } let clone = { ...row }
delete clone.__idx delete clone.__idx
delete clone.__metadata delete clone.__metadata
if (!get(hasBudibaseIdentifiers)) { if (!get(hasBudibaseIdentifiers) && isGeneratedRowID(clone._id)) {
delete clone._id delete clone._id
} }
return clone return clone

View File

@ -15,7 +15,7 @@ import { getViews, saveView } from "../view/utils"
import viewTemplate from "../view/viewBuilder" import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { context, events, features } from "@budibase/backend-core" import { context, events, features, HTTPError } from "@budibase/backend-core"
import { import {
AutoFieldSubType, AutoFieldSubType,
Database, Database,
@ -145,14 +145,21 @@ export async function importToRows(
// the real schema of the table passed in, not the clone used for // the real schema of the table passed in, not the clone used for
// incrementing auto IDs // incrementing auto IDs
for (const [fieldName, schema] of Object.entries(originalTable.schema)) { for (const [fieldName, schema] of Object.entries(originalTable.schema)) {
const rowVal = Array.isArray(row[fieldName]) if (schema.type === FieldType.LINK) {
? row[fieldName] throw new HTTPError(
: [row[fieldName]] `Can't bulk import relationship fields for internal databases, found value in field "${fieldName}"`,
400
)
}
if ( if (
(schema.type === FieldType.OPTIONS || (schema.type === FieldType.OPTIONS ||
schema.type === FieldType.ARRAY) && schema.type === FieldType.ARRAY) &&
row[fieldName] row[fieldName]
) { ) {
const rowVal = Array.isArray(row[fieldName])
? row[fieldName]
: [row[fieldName]]
let merged = [...schema.constraints!.inclusion!, ...rowVal] let merged = [...schema.constraints!.inclusion!, ...rowVal]
let superSet = new Set(merged) let superSet = new Set(merged)
schema.constraints!.inclusion = Array.from(superSet) schema.constraints!.inclusion = Array.from(superSet)

View File

@ -1823,6 +1823,39 @@ describe.each([
expect(row.autoId).toEqual(3) expect(row.autoId).toEqual(3)
}) })
isInternal &&
it("should reject bulkImporting relationship fields", async () => {
const table1 = await config.api.table.save(saveTableRequest())
const table2 = await config.api.table.save(
saveTableRequest({
schema: {
relationship: {
name: "relationship",
type: FieldType.LINK,
tableId: table1._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "relationship",
},
},
})
)
const table1Row1 = await config.api.row.save(table1._id!, {})
await config.api.row.bulkImport(
table2._id!,
{
rows: [{ relationship: [table1Row1._id!] }],
},
{
status: 400,
body: {
message:
'Can\'t bulk import relationship fields for internal databases, found value in field "relationship"',
},
}
)
})
it("should be able to bulkImport rows", async () => { it("should be able to bulkImport rows", async () => {
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({

View File

@ -1,5 +1,5 @@
import * as setup from "../api/routes/tests/utilities" import * as setup from "../api/routes/tests/utilities"
import { Datasource, FieldType } from "@budibase/types" import { Datasource, FieldType, Table } from "@budibase/types"
import _ from "lodash" import _ from "lodash"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { import {
@ -229,4 +229,54 @@ describe("postgres integrations", () => {
).toBeUndefined() ).toBeUndefined()
}) })
}) })
describe("money field 💰", () => {
const tableName = "moneytable"
let table: Table
beforeAll(async () => {
await client.raw(`
CREATE TABLE ${tableName} (
id serial PRIMARY KEY,
price money
)
`)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
table = response.datasource.entities![tableName]
})
it("should be able to import a money field", async () => {
expect(table).toBeDefined()
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
})
it("should be able to search a money field", async () => {
await config.api.row.bulkImport(table._id!, {
rows: [{ price: 200 }, { price: 300 }],
})
const { rows } = await config.api.row.search(table._id!, {
query: {
equal: {
price: 200,
},
},
})
expect(rows).toHaveLength(1)
expect(rows[0].price).toBe("200.00")
})
it("should be able to update a money field", async () => {
let row = await config.api.row.save(table._id!, { price: 200 })
expect(row.price).toBe("200.00")
row = await config.api.row.save(table._id!, { ...row, price: 300 })
expect(row.price).toBe("300.00")
row = await config.api.row.save(table._id!, { ...row, price: "400.00" })
expect(row.price).toBe("400.00")
})
})
}) })