Merge branch 'master' into codeowners
This commit is contained in:
commit
fe0f31eef4
|
@ -133,9 +133,14 @@ export async function exportRows(
|
||||||
|
|
||||||
let result = await search({ tableId, query: requestQuery, sort, sortOrder })
|
let result = await search({ tableId, query: requestQuery, sort, sortOrder })
|
||||||
let rows: Row[] = []
|
let rows: Row[] = []
|
||||||
|
let headers
|
||||||
|
|
||||||
|
if (!tableName) {
|
||||||
|
throw new HTTPError("Could not find table name.", 400)
|
||||||
|
}
|
||||||
|
const schema = datasource.entities[tableName].schema
|
||||||
|
|
||||||
// Filter data to only specified columns if required
|
// Filter data to only specified columns if required
|
||||||
|
|
||||||
if (columns && columns.length) {
|
if (columns && columns.length) {
|
||||||
for (let i = 0; i < result.rows.length; i++) {
|
for (let i = 0; i < result.rows.length; i++) {
|
||||||
rows[i] = {}
|
rows[i] = {}
|
||||||
|
@ -143,22 +148,17 @@ export async function exportRows(
|
||||||
rows[i][column] = result.rows[i][column]
|
rows[i][column] = result.rows[i][column]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
headers = columns
|
||||||
} else {
|
} else {
|
||||||
rows = result.rows
|
rows = result.rows
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!tableName) {
|
|
||||||
throw new HTTPError("Could not find table name.", 400)
|
|
||||||
}
|
|
||||||
const schema = datasource.entities[tableName].schema
|
|
||||||
let exportRows = cleanExportRows(rows, schema, format, columns)
|
let exportRows = cleanExportRows(rows, schema, format, columns)
|
||||||
|
|
||||||
let headers = Object.keys(schema)
|
|
||||||
|
|
||||||
let content: string
|
let content: string
|
||||||
switch (format) {
|
switch (format) {
|
||||||
case exporters.Format.CSV:
|
case exporters.Format.CSV:
|
||||||
content = exporters.csv(headers, exportRows)
|
content = exporters.csv(headers ?? Object.keys(schema), exportRows)
|
||||||
break
|
break
|
||||||
case exporters.Format.JSON:
|
case exporters.Format.JSON:
|
||||||
content = exporters.json(exportRows)
|
content = exporters.json(exportRows)
|
||||||
|
|
|
@ -110,7 +110,7 @@ export async function exportRows(
|
||||||
|
|
||||||
let rows: Row[] = []
|
let rows: Row[] = []
|
||||||
let schema = table.schema
|
let schema = table.schema
|
||||||
|
let headers
|
||||||
// Filter data to only specified columns if required
|
// Filter data to only specified columns if required
|
||||||
if (columns && columns.length) {
|
if (columns && columns.length) {
|
||||||
for (let i = 0; i < result.length; i++) {
|
for (let i = 0; i < result.length; i++) {
|
||||||
|
@ -119,6 +119,7 @@ export async function exportRows(
|
||||||
rows[i][column] = result[i][column]
|
rows[i][column] = result[i][column]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
headers = columns
|
||||||
} else {
|
} else {
|
||||||
rows = result
|
rows = result
|
||||||
}
|
}
|
||||||
|
@ -127,7 +128,7 @@ export async function exportRows(
|
||||||
if (format === Format.CSV) {
|
if (format === Format.CSV) {
|
||||||
return {
|
return {
|
||||||
fileName: "export.csv",
|
fileName: "export.csv",
|
||||||
content: csv(Object.keys(rows[0]), exportRows),
|
content: csv(headers ?? Object.keys(rows[0]), exportRows),
|
||||||
}
|
}
|
||||||
} else if (format === Format.JSON) {
|
} else if (format === Format.JSON) {
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -18,7 +18,6 @@ jest.mock("../../../utilities/rowProcessor", () => ({
|
||||||
|
|
||||||
jest.mock("../../../api/controllers/view/exporters", () => ({
|
jest.mock("../../../api/controllers/view/exporters", () => ({
|
||||||
...jest.requireActual("../../../api/controllers/view/exporters"),
|
...jest.requireActual("../../../api/controllers/view/exporters"),
|
||||||
csv: jest.fn(),
|
|
||||||
Format: {
|
Format: {
|
||||||
CSV: "csv",
|
CSV: "csv",
|
||||||
},
|
},
|
||||||
|
@ -102,5 +101,32 @@ describe("external row sdk", () => {
|
||||||
new HTTPError("Could not find table name.", 400)
|
new HTTPError("Could not find table name.", 400)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should only export specified columns", async () => {
|
||||||
|
mockDatasourcesGet.mockImplementation(async () => ({
|
||||||
|
entities: {
|
||||||
|
tablename: {
|
||||||
|
schema: {
|
||||||
|
name: {},
|
||||||
|
age: {},
|
||||||
|
dob: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
const headers = ["name", "dob"]
|
||||||
|
|
||||||
|
const result = await exportRows({
|
||||||
|
tableId: "datasource__tablename",
|
||||||
|
format: Format.CSV,
|
||||||
|
query: {},
|
||||||
|
columns: headers,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
fileName: "export.csv",
|
||||||
|
content: `"name","dob"`,
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -315,7 +315,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
||||||
new Date(docValue).getTime() > new Date(testValue.high).getTime()
|
new Date(docValue).getTime() > new Date(testValue.high).getTime()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
throw "Cannot perform range filter - invalid type."
|
return false
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -130,32 +130,28 @@ describe("runLuceneQuery", () => {
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2])
|
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should throw an error is an invalid doc value is passed into a range filter", async () => {
|
it("should return return all docs if an invalid doc value is passed into a range filter", async () => {
|
||||||
|
const docs = [
|
||||||
|
{
|
||||||
|
order_id: 4,
|
||||||
|
customer_id: 1758,
|
||||||
|
order_status: 5,
|
||||||
|
order_date: "{{ Binding.INVALID }}",
|
||||||
|
required_date: "2017-03-05T00:00:00.000Z",
|
||||||
|
shipped_date: "2017-03-03T00:00:00.000Z",
|
||||||
|
store_id: 2,
|
||||||
|
staff_id: 7,
|
||||||
|
description: undefined,
|
||||||
|
label: "",
|
||||||
|
},
|
||||||
|
]
|
||||||
const query = buildQuery("range", {
|
const query = buildQuery("range", {
|
||||||
order_date: {
|
order_date: {
|
||||||
low: "2016-01-04T00:00:00.000Z",
|
low: "2016-01-04T00:00:00.000Z",
|
||||||
high: "2016-01-11T00:00:00.000Z",
|
high: "2016-01-11T00:00:00.000Z",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
expect(() =>
|
expect(runLuceneQuery(docs, query)).toEqual(docs)
|
||||||
runLuceneQuery(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
order_id: 4,
|
|
||||||
customer_id: 1758,
|
|
||||||
order_status: 5,
|
|
||||||
order_date: "INVALID",
|
|
||||||
required_date: "2017-03-05T00:00:00.000Z",
|
|
||||||
shipped_date: "2017-03-03T00:00:00.000Z",
|
|
||||||
store_id: 2,
|
|
||||||
staff_id: 7,
|
|
||||||
description: undefined,
|
|
||||||
label: "",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
query
|
|
||||||
)
|
|
||||||
).toThrowError("Cannot perform range filter - invalid type.")
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return rows with matches on empty filter", () => {
|
it("should return rows with matches on empty filter", () => {
|
||||||
|
|
Loading…
Reference in New Issue