Merge branch 'v3-ui' of github.com:Budibase/budibase into views-openapi
This commit is contained in:
commit
583100c9dc
|
@ -61,6 +61,15 @@ export const createGridWebsocket = context => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
socket.on(
|
||||||
|
GridSocketEvent.DatasourceChange,
|
||||||
|
({ datasource: newDatasource }) => {
|
||||||
|
// Listen builder renames, as these aren't handled otherwise
|
||||||
|
if (newDatasource?.name !== get(definition).name) {
|
||||||
|
definition.set(newDatasource)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
// Change websocket connection when table changes
|
// Change websocket connection when table changes
|
||||||
datasource.subscribe(connectToDatasource)
|
datasource.subscribe(connectToDatasource)
|
||||||
|
|
|
@ -136,16 +136,15 @@ export async function importToRows(
|
||||||
|
|
||||||
// We use a reference to table here and update it after input processing,
|
// We use a reference to table here and update it after input processing,
|
||||||
// so that we can auto increment auto IDs in imported data properly
|
// so that we can auto increment auto IDs in imported data properly
|
||||||
const processed = await inputProcessing(userId, table, row, {
|
row = await inputProcessing(userId, table, row, {
|
||||||
noAutoRelationships: true,
|
noAutoRelationships: true,
|
||||||
})
|
})
|
||||||
row = processed
|
|
||||||
|
|
||||||
// However here we must reference the original table, as we want to mutate
|
// However here we must reference the original table, as we want to mutate
|
||||||
// the real schema of the table passed in, not the clone used for
|
// the real schema of the table passed in, not the clone used for
|
||||||
// incrementing auto IDs
|
// incrementing auto IDs
|
||||||
for (const [fieldName, schema] of Object.entries(originalTable.schema)) {
|
for (const [fieldName, schema] of Object.entries(originalTable.schema)) {
|
||||||
if (schema.type === FieldType.LINK) {
|
if (schema.type === FieldType.LINK && data.find(row => row[fieldName])) {
|
||||||
throw new HTTPError(
|
throw new HTTPError(
|
||||||
`Can't bulk import relationship fields for internal databases, found value in field "${fieldName}"`,
|
`Can't bulk import relationship fields for internal databases, found value in field "${fieldName}"`,
|
||||||
400
|
400
|
||||||
|
|
|
@ -9,29 +9,33 @@ function getHeaders(
|
||||||
return headers.map(header => `"${customHeaders[header] || header}"`)
|
return headers.map(header => `"${customHeaders[header] || header}"`)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function escapeCsvString(str: string) {
|
||||||
|
return str.replace(/"/g, '""')
|
||||||
|
}
|
||||||
|
|
||||||
export function csv(
|
export function csv(
|
||||||
headers: string[],
|
headers: string[],
|
||||||
rows: Row[],
|
rows: Row[],
|
||||||
delimiter: string = ",",
|
delimiter: string = ",",
|
||||||
customHeaders: { [key: string]: string } = {}
|
customHeaders: { [key: string]: string } = {}
|
||||||
) {
|
) {
|
||||||
let csv = getHeaders(headers, customHeaders).join(delimiter)
|
let csvRows = [getHeaders(headers, customHeaders)]
|
||||||
|
|
||||||
for (let row of rows) {
|
for (let row of rows) {
|
||||||
csv = `${csv}\n${headers
|
csvRows.push(
|
||||||
.map(header => {
|
headers.map(header => {
|
||||||
let val = row[header]
|
const val = row[header]
|
||||||
val =
|
if (typeof val === "object" && !(val instanceof Date)) {
|
||||||
typeof val === "object" && !(val instanceof Date)
|
return `"${JSON.stringify(val).replace(/"/g, "'")}"`
|
||||||
? `"${JSON.stringify(val).replace(/"/g, "'")}"`
|
|
||||||
: val !== undefined
|
|
||||||
? `"${val}"`
|
|
||||||
: ""
|
|
||||||
return val.trim()
|
|
||||||
})
|
|
||||||
.join(delimiter)}`
|
|
||||||
}
|
}
|
||||||
return csv
|
if (val !== undefined) {
|
||||||
|
return `"${escapeCsvString(val.toString())}"`
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return csvRows.map(row => row.join(delimiter)).join("\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
export function json(rows: Row[]) {
|
export function json(rows: Row[]) {
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { DEFAULT_TABLES } from "../../../db/defaultData/datasource_bb_default"
|
||||||
|
|
||||||
jest.mock("../../../utilities/redis", () => ({
|
jest.mock("../../../utilities/redis", () => ({
|
||||||
init: jest.fn(),
|
init: jest.fn(),
|
||||||
getLocksById: () => {
|
getLocksById: () => {
|
||||||
|
@ -447,4 +449,18 @@ describe("/applications", () => {
|
||||||
expect(devLogs.data.length).toBe(0)
|
expect(devLogs.data.length).toBe(0)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("POST /api/applications/:appId/sample", () => {
|
||||||
|
it("should be able to add sample data", async () => {
|
||||||
|
await config.api.application.addSampleData(config.getAppId())
|
||||||
|
for (let table of DEFAULT_TABLES) {
|
||||||
|
const res = await config.api.row.search(
|
||||||
|
table._id!,
|
||||||
|
{ query: {} },
|
||||||
|
{ status: 200 }
|
||||||
|
)
|
||||||
|
expect(res.rows.length).not.toEqual(0)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -2630,6 +2630,40 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("can handle csv-special characters in strings", async () => {
|
||||||
|
const badString = 'test":, wow", "test": "wow"'
|
||||||
|
const table = await config.api.table.save(
|
||||||
|
saveTableRequest({
|
||||||
|
schema: {
|
||||||
|
string: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "string",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
await config.api.row.save(table._id!, { string: badString })
|
||||||
|
|
||||||
|
const exportedValue = await config.api.row.exportRows(
|
||||||
|
table._id!,
|
||||||
|
{ query: {} },
|
||||||
|
RowExportFormat.CSV
|
||||||
|
)
|
||||||
|
|
||||||
|
const json = await config.api.table.csvToJson(
|
||||||
|
{
|
||||||
|
csvString: exportedValue,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(json).toHaveLength(1)
|
||||||
|
expect(json[0].string).toEqual(badString)
|
||||||
|
})
|
||||||
|
|
||||||
it("exported data can be re-imported", async () => {
|
it("exported data can be re-imported", async () => {
|
||||||
// export all
|
// export all
|
||||||
const exportedValue = await config.api.row.exportRows(
|
const exportedValue = await config.api.row.exportRows(
|
||||||
|
|
|
@ -5,8 +5,10 @@ export async function jsonFromCsvString(csvString: string) {
|
||||||
csvString
|
csvString
|
||||||
)
|
)
|
||||||
|
|
||||||
// By default the csvtojson library casts empty values as empty strings. This is causing issues on conversion.
|
// By default the csvtojson library casts empty values as empty strings. This
|
||||||
// ignoreEmpty will remove the key completly if empty, so creating this empty object will ensure we return the values with the keys but empty values
|
// is causing issues on conversion. ignoreEmpty will remove the key completly
|
||||||
|
// if empty, so creating this empty object will ensure we return the values
|
||||||
|
// with the keys but empty values
|
||||||
const result = await csv({ ignoreEmpty: false }).fromString(csvString)
|
const result = await csv({ ignoreEmpty: false }).fromString(csvString)
|
||||||
result.forEach((r, i) => {
|
result.forEach((r, i) => {
|
||||||
for (const [key] of Object.entries(r).filter(([, value]) => value === "")) {
|
for (const [key] of Object.entries(r).filter(([, value]) => value === "")) {
|
||||||
|
|
Loading…
Reference in New Issue