Merge remote-tracking branch 'origin/feature/automation-branching-ux' into automation-branching-ux-updates
This commit is contained in:
commit
22b0cd4d90
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "2.33.13",
|
||||
"version": "2.33.14",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -316,11 +316,9 @@ class InternalBuilder {
|
|||
const columnSchema = schema[column]
|
||||
|
||||
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(columnSchema)) {
|
||||
// TODO: figure out how to express this safely without string
|
||||
// interpolation.
|
||||
return this.knex.raw(`??::money::numeric as "${field}"`, [
|
||||
return this.knex.raw(`??::money::numeric as ??`, [
|
||||
this.rawQuotedIdentifier([table, column].join(".")),
|
||||
field,
|
||||
this.knex.raw(this.quote(field)),
|
||||
])
|
||||
}
|
||||
|
||||
|
@ -330,8 +328,9 @@ class InternalBuilder {
|
|||
|
||||
// TODO: figure out how to express this safely without string
|
||||
// interpolation.
|
||||
return this.knex.raw(`CONVERT(varchar, ??, 108) as "${field}"`, [
|
||||
return this.knex.raw(`CONVERT(varchar, ??, 108) as ??`, [
|
||||
this.rawQuotedIdentifier(field),
|
||||
this.knex.raw(this.quote(field)),
|
||||
])
|
||||
}
|
||||
|
||||
|
|
|
@ -90,8 +90,15 @@
|
|||
const getFieldOptions = (self, calculations, schema) => {
|
||||
return Object.entries(schema)
|
||||
.filter(([field, fieldSchema]) => {
|
||||
// Only allow numeric fields that are not calculations themselves
|
||||
if (fieldSchema.calculationType || !isNumeric(fieldSchema.type)) {
|
||||
// Don't allow other calculation columns
|
||||
if (fieldSchema.calculationType) {
|
||||
return false
|
||||
}
|
||||
// Only allow numeric columns for most calculation types
|
||||
if (
|
||||
self.type !== CalculationType.COUNT &&
|
||||
!isNumeric(fieldSchema.type)
|
||||
) {
|
||||
return false
|
||||
}
|
||||
// Don't allow duplicates
|
||||
|
@ -234,7 +241,7 @@
|
|||
<InfoDisplay
|
||||
icon="Help"
|
||||
quiet
|
||||
body="Calculations only work with numeric columns and a maximum of 5 calculations can be added at once."
|
||||
body="Most calculations only work with numeric columns and a maximum of 5 calculations can be added at once."
|
||||
/>
|
||||
|
||||
<div>
|
||||
|
@ -257,6 +264,4 @@
|
|||
.group-by {
|
||||
grid-column: 2 / 5;
|
||||
}
|
||||
span {
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -60,8 +60,11 @@
|
|||
|
||||
// Provide additional data context for live binding eval
|
||||
export const getAdditionalDataContext = () => {
|
||||
const rows = get(grid?.getContext()?.rows)
|
||||
const goldenRow = generateGoldenSample(rows)
|
||||
const gridContext = grid?.getContext()
|
||||
const rows = get(gridContext?.rows) || []
|
||||
const clean = gridContext?.rows.actions.cleanRow || (x => x)
|
||||
const cleaned = rows.map(clean)
|
||||
const goldenRow = generateGoldenSample(cleaned)
|
||||
const id = get(component).id
|
||||
return {
|
||||
// Not sure what this one is for...
|
||||
|
|
|
@ -10,6 +10,8 @@ export const DefaultColumnWidth = 200
|
|||
export const MinColumnWidth = 80
|
||||
export const NewRowID = "new"
|
||||
export const BlankRowID = "blank"
|
||||
export const GeneratedIDPrefix = "‽‽"
|
||||
export const CellIDSeparator = "‽‽"
|
||||
export const RowPageSize = 100
|
||||
export const FocusedCellMinOffset = ScrollBarSize * 3
|
||||
export const ControlsHeight = 50
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
// We can't use "-" as a separator as this can be present in the ID
|
||||
// or column name, so we use something very unusual to avoid this problem
|
||||
const JOINING_CHARACTER = "‽‽"
|
||||
import { GeneratedIDPrefix, CellIDSeparator } from "./constants"
|
||||
import { Helpers } from "@budibase/bbui"
|
||||
|
||||
export const parseCellID = cellId => {
|
||||
if (!cellId) {
|
||||
return { rowId: undefined, field: undefined }
|
||||
}
|
||||
const parts = cellId.split(JOINING_CHARACTER)
|
||||
const parts = cellId.split(CellIDSeparator)
|
||||
const field = parts.pop()
|
||||
return { rowId: parts.join(JOINING_CHARACTER), field }
|
||||
return { rowId: parts.join(CellIDSeparator), field }
|
||||
}
|
||||
|
||||
export const getCellID = (rowId, fieldName) => {
|
||||
return `${rowId}${JOINING_CHARACTER}${fieldName}`
|
||||
return `${rowId}${CellIDSeparator}${fieldName}`
|
||||
}
|
||||
|
||||
export const parseEventLocation = e => {
|
||||
|
@ -21,3 +20,11 @@ export const parseEventLocation = e => {
|
|||
y: e.clientY ?? e.touches?.[0]?.clientY,
|
||||
}
|
||||
}
|
||||
|
||||
export const generateRowID = () => {
|
||||
return `${GeneratedIDPrefix}${Helpers.uuid()}`
|
||||
}
|
||||
|
||||
export const isGeneratedRowID = id => {
|
||||
return id?.startsWith(GeneratedIDPrefix)
|
||||
}
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
import { writable, derived, get } from "svelte/store"
|
||||
import { fetchData } from "../../../fetch"
|
||||
import { NewRowID, RowPageSize } from "../lib/constants"
|
||||
import { getCellID, parseCellID } from "../lib/utils"
|
||||
import {
|
||||
generateRowID,
|
||||
getCellID,
|
||||
isGeneratedRowID,
|
||||
parseCellID,
|
||||
} from "../lib/utils"
|
||||
import { tick } from "svelte"
|
||||
import { Helpers } from "@budibase/bbui"
|
||||
import { sleep } from "../../../utils/utils"
|
||||
|
@ -634,10 +639,10 @@ export const createActions = context => {
|
|||
newRow = newRows[i]
|
||||
|
||||
// Ensure we have a unique _id.
|
||||
// This means generating one for non DS+, overwriting any that may already
|
||||
// exist as we cannot allow duplicates.
|
||||
if (!$hasBudibaseIdentifiers) {
|
||||
newRow._id = Helpers.uuid()
|
||||
// We generate one for non DS+ where required, but trust that any existing
|
||||
// _id values are unique (e.g. Mongo)
|
||||
if (!$hasBudibaseIdentifiers && !newRow._id?.length) {
|
||||
newRow._id = generateRowID()
|
||||
}
|
||||
|
||||
if (!rowCacheMap[newRow._id]) {
|
||||
|
@ -674,7 +679,7 @@ export const createActions = context => {
|
|||
let clone = { ...row }
|
||||
delete clone.__idx
|
||||
delete clone.__metadata
|
||||
if (!get(hasBudibaseIdentifiers)) {
|
||||
if (!get(hasBudibaseIdentifiers) && isGeneratedRowID(clone._id)) {
|
||||
delete clone._id
|
||||
}
|
||||
return clone
|
||||
|
|
|
@ -136,16 +136,15 @@ export async function importToRows(
|
|||
|
||||
// We use a reference to table here and update it after input processing,
|
||||
// so that we can auto increment auto IDs in imported data properly
|
||||
const processed = await inputProcessing(userId, table, row, {
|
||||
row = await inputProcessing(userId, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
row = processed
|
||||
|
||||
// However here we must reference the original table, as we want to mutate
|
||||
// the real schema of the table passed in, not the clone used for
|
||||
// incrementing auto IDs
|
||||
for (const [fieldName, schema] of Object.entries(originalTable.schema)) {
|
||||
if (schema.type === FieldType.LINK) {
|
||||
if (schema.type === FieldType.LINK && data.find(row => row[fieldName])) {
|
||||
throw new HTTPError(
|
||||
`Can't bulk import relationship fields for internal databases, found value in field "${fieldName}"`,
|
||||
400
|
||||
|
|
|
@ -9,29 +9,33 @@ function getHeaders(
|
|||
return headers.map(header => `"${customHeaders[header] || header}"`)
|
||||
}
|
||||
|
||||
function escapeCsvString(str: string) {
|
||||
return str.replace(/"/g, '""')
|
||||
}
|
||||
|
||||
export function csv(
|
||||
headers: string[],
|
||||
rows: Row[],
|
||||
delimiter: string = ",",
|
||||
customHeaders: { [key: string]: string } = {}
|
||||
) {
|
||||
let csv = getHeaders(headers, customHeaders).join(delimiter)
|
||||
let csvRows = [getHeaders(headers, customHeaders)]
|
||||
|
||||
for (let row of rows) {
|
||||
csv = `${csv}\n${headers
|
||||
.map(header => {
|
||||
let val = row[header]
|
||||
val =
|
||||
typeof val === "object" && !(val instanceof Date)
|
||||
? `"${JSON.stringify(val).replace(/"/g, "'")}"`
|
||||
: val !== undefined
|
||||
? `"${val}"`
|
||||
: ""
|
||||
return val.trim()
|
||||
})
|
||||
.join(delimiter)}`
|
||||
csvRows.push(
|
||||
headers.map(header => {
|
||||
const val = row[header]
|
||||
if (typeof val === "object" && !(val instanceof Date)) {
|
||||
return `"${JSON.stringify(val).replace(/"/g, "'")}"`
|
||||
}
|
||||
return csv
|
||||
if (val !== undefined) {
|
||||
return `"${escapeCsvString(val.toString())}"`
|
||||
}
|
||||
return ""
|
||||
})
|
||||
)
|
||||
}
|
||||
return csvRows.map(row => row.join(delimiter)).join("\n")
|
||||
}
|
||||
|
||||
export function json(rows: Row[]) {
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { DEFAULT_TABLES } from "../../../db/defaultData/datasource_bb_default"
|
||||
|
||||
jest.mock("../../../utilities/redis", () => ({
|
||||
init: jest.fn(),
|
||||
getLocksById: () => {
|
||||
|
@ -447,4 +449,18 @@ describe("/applications", () => {
|
|||
expect(devLogs.data.length).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/applications/:appId/sample", () => {
|
||||
it("should be able to add sample data", async () => {
|
||||
await config.api.application.addSampleData(config.getAppId())
|
||||
for (let table of DEFAULT_TABLES) {
|
||||
const res = await config.api.row.search(
|
||||
table._id!,
|
||||
{ query: {} },
|
||||
{ status: 200 }
|
||||
)
|
||||
expect(res.rows.length).not.toEqual(0)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -2630,6 +2630,40 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
it("can handle csv-special characters in strings", async () => {
|
||||
const badString = 'test":, wow", "test": "wow"'
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
string: {
|
||||
type: FieldType.STRING,
|
||||
name: "string",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
await config.api.row.save(table._id!, { string: badString })
|
||||
|
||||
const exportedValue = await config.api.row.exportRows(
|
||||
table._id!,
|
||||
{ query: {} },
|
||||
RowExportFormat.CSV
|
||||
)
|
||||
|
||||
const json = await config.api.table.csvToJson(
|
||||
{
|
||||
csvString: exportedValue,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
}
|
||||
)
|
||||
|
||||
expect(json).toHaveLength(1)
|
||||
expect(json[0].string).toEqual(badString)
|
||||
})
|
||||
|
||||
it("exported data can be re-imported", async () => {
|
||||
// export all
|
||||
const exportedValue = await config.api.row.exportRows(
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as setup from "../api/routes/tests/utilities"
|
||||
import { Datasource, FieldType } from "@budibase/types"
|
||||
import { Datasource, FieldType, Table } from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import {
|
||||
|
@ -229,4 +229,54 @@ describe("postgres integrations", () => {
|
|||
).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("money field 💰", () => {
|
||||
const tableName = "moneytable"
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await client.raw(`
|
||||
CREATE TABLE ${tableName} (
|
||||
id serial PRIMARY KEY,
|
||||
price money
|
||||
)
|
||||
`)
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
table = response.datasource.entities![tableName]
|
||||
})
|
||||
|
||||
it("should be able to import a money field", async () => {
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
|
||||
})
|
||||
|
||||
it("should be able to search a money field", async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [{ price: 200 }, { price: 300 }],
|
||||
})
|
||||
|
||||
const { rows } = await config.api.row.search(table._id!, {
|
||||
query: {
|
||||
equal: {
|
||||
price: 200,
|
||||
},
|
||||
},
|
||||
})
|
||||
expect(rows).toHaveLength(1)
|
||||
expect(rows[0].price).toBe("200.00")
|
||||
})
|
||||
|
||||
it("should be able to update a money field", async () => {
|
||||
let row = await config.api.row.save(table._id!, { price: 200 })
|
||||
expect(row.price).toBe("200.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: 300 })
|
||||
expect(row.price).toBe("300.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: "400.00" })
|
||||
expect(row.price).toBe("400.00")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -5,8 +5,10 @@ export async function jsonFromCsvString(csvString: string) {
|
|||
csvString
|
||||
)
|
||||
|
||||
// By default the csvtojson library casts empty values as empty strings. This is causing issues on conversion.
|
||||
// ignoreEmpty will remove the key completly if empty, so creating this empty object will ensure we return the values with the keys but empty values
|
||||
// By default the csvtojson library casts empty values as empty strings. This
|
||||
// is causing issues on conversion. ignoreEmpty will remove the key completly
|
||||
// if empty, so creating this empty object will ensure we return the values
|
||||
// with the keys but empty values
|
||||
const result = await csv({ ignoreEmpty: false }).fromString(csvString)
|
||||
result.forEach((r, i) => {
|
||||
for (const [key] of Object.entries(r).filter(([, value]) => value === "")) {
|
||||
|
|
Loading…
Reference in New Issue