Merge pull request #14932 from Budibase/fix-flaky-export-test
Fix flaky export test.
This commit is contained in:
commit
8c09ff830a
|
@ -9,29 +9,33 @@ function getHeaders(
|
|||
return headers.map(header => `"${customHeaders[header] || header}"`)
|
||||
}
|
||||
|
||||
function escapeCsvString(str: string) {
|
||||
return str.replace(/"/g, '""')
|
||||
}
|
||||
|
||||
export function csv(
|
||||
headers: string[],
|
||||
rows: Row[],
|
||||
delimiter: string = ",",
|
||||
customHeaders: { [key: string]: string } = {}
|
||||
) {
|
||||
let csv = getHeaders(headers, customHeaders).join(delimiter)
|
||||
let csvRows = [getHeaders(headers, customHeaders)]
|
||||
|
||||
for (let row of rows) {
|
||||
csv = `${csv}\n${headers
|
||||
.map(header => {
|
||||
let val = row[header]
|
||||
val =
|
||||
typeof val === "object" && !(val instanceof Date)
|
||||
? `"${JSON.stringify(val).replace(/"/g, "'")}"`
|
||||
: val !== undefined
|
||||
? `"${val}"`
|
||||
: ""
|
||||
return val.trim()
|
||||
csvRows.push(
|
||||
headers.map(header => {
|
||||
const val = row[header]
|
||||
if (typeof val === "object" && !(val instanceof Date)) {
|
||||
return `"${JSON.stringify(val).replace(/"/g, "'")}"`
|
||||
}
|
||||
if (val !== undefined) {
|
||||
return `"${escapeCsvString(val.toString())}"`
|
||||
}
|
||||
return ""
|
||||
})
|
||||
.join(delimiter)}`
|
||||
)
|
||||
}
|
||||
return csv
|
||||
return csvRows.map(row => row.join(delimiter)).join("\n")
|
||||
}
|
||||
|
||||
export function json(rows: Row[]) {
|
||||
|
|
|
@ -2630,6 +2630,40 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
it("can handle csv-special characters in strings", async () => {
|
||||
const badString = 'test":, wow", "test": "wow"'
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
string: {
|
||||
type: FieldType.STRING,
|
||||
name: "string",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
await config.api.row.save(table._id!, { string: badString })
|
||||
|
||||
const exportedValue = await config.api.row.exportRows(
|
||||
table._id!,
|
||||
{ query: {} },
|
||||
RowExportFormat.CSV
|
||||
)
|
||||
|
||||
const json = await config.api.table.csvToJson(
|
||||
{
|
||||
csvString: exportedValue,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
}
|
||||
)
|
||||
|
||||
expect(json).toHaveLength(1)
|
||||
expect(json[0].string).toEqual(badString)
|
||||
})
|
||||
|
||||
it("exported data can be re-imported", async () => {
|
||||
// export all
|
||||
const exportedValue = await config.api.row.exportRows(
|
||||
|
|
|
@ -5,8 +5,10 @@ export async function jsonFromCsvString(csvString: string) {
|
|||
csvString
|
||||
)
|
||||
|
||||
// By default the csvtojson library casts empty values as empty strings. This is causing issues on conversion.
|
||||
// ignoreEmpty will remove the key completly if empty, so creating this empty object will ensure we return the values with the keys but empty values
|
||||
// By default the csvtojson library casts empty values as empty strings. This
|
||||
// is causing issues on conversion. ignoreEmpty will remove the key completly
|
||||
// if empty, so creating this empty object will ensure we return the values
|
||||
// with the keys but empty values
|
||||
const result = await csv({ ignoreEmpty: false }).fromString(csvString)
|
||||
result.forEach((r, i) => {
|
||||
for (const [key] of Object.entries(r).filter(([, value]) => value === "")) {
|
||||
|
|
Loading…
Reference in New Issue