Merge remote-tracking branch 'origin/master' into fix/bindings-fixes

This commit is contained in:
Dean 2023-06-12 09:03:37 +01:00
commit c910ddcc81
7 changed files with 163 additions and 9 deletions

View File

@ -1,5 +1,5 @@
{
"version": "2.7.9",
"version": "2.7.11",
"npmClient": "yarn",
"packages": [
"packages/backend-core",

View File

@ -343,6 +343,9 @@ export class QueryBuilder<T> {
}
const oneOf = (key: string, value: any) => {
if (!value) {
return `*:*`
}
if (!Array.isArray(value)) {
if (typeof value === "string") {
value = value.split(",")

View File

@ -114,6 +114,25 @@ describe("lucene", () => {
expect(resp.rows.length).toBe(2)
})
it("should return all rows when doing a one of search against falsey value", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addOneOf("property", null)
let resp = await builder.run()
expect(resp.rows.length).toBe(3)
builder.addOneOf("property", undefined)
resp = await builder.run()
expect(resp.rows.length).toBe(3)
builder.addOneOf("property", "")
resp = await builder.run()
expect(resp.rows.length).toBe(3)
builder.addOneOf("property", [])
resp = await builder.run()
expect(resp.rows.length).toBe(0)
})
it("should be able to perform a contains search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addContains("property", ["word"])

View File

@ -204,6 +204,12 @@
})
return columns
.sort((a, b) => {
if (a.divider) {
return a
}
if (b.divider) {
return b
}
const orderA = a.order || Number.MAX_SAFE_INTEGER
const orderB = b.order || Number.MAX_SAFE_INTEGER
const nameA = getDisplayName(a)

View File

@ -148,9 +148,9 @@
class:floating={offset > 0}
style="--offset:{offset}px; --sticky-width:{width}px;"
>
<div class="underlay sticky" transition:fade={{ duration: 130 }} />
<div class="underlay" transition:fade={{ duration: 130 }} />
<div class="sticky-column" transition:fade={{ duration: 130 }}>
<div class="underlay sticky" transition:fade|local={{ duration: 130 }} />
<div class="underlay" transition:fade|local={{ duration: 130 }} />
<div class="sticky-column" transition:fade|local={{ duration: 130 }}>
<GutterCell on:expand={addViaModal} rowHovered>
<Icon name="Add" color="var(--spectrum-global-color-gray-500)" />
{#if isAdding}
@ -179,7 +179,7 @@
</DataCell>
{/if}
</div>
<div class="normal-columns" transition:fade={{ duration: 130 }}>
<div class="normal-columns" transition:fade|local={{ duration: 130 }}>
<GridScrollWrapper scrollHorizontally wheelInteractive>
<div class="row">
{#each $renderedColumns as column, columnIdx}
@ -209,7 +209,7 @@
</div>
</GridScrollWrapper>
</div>
<div class="buttons" transition:fade={{ duration: 130 }}>
<div class="buttons" transition:fade|local={{ duration: 130 }}>
<Button size="M" cta on:click={addRow} disabled={isAdding}>
<div class="button-with-keys">
Save

View File

@ -237,9 +237,15 @@ export async function exportRows(ctx: UserCtx) {
ctx.request.body = {
query: {
oneOf: {
_id: ctx.request.body.rows.map(
(row: string) => JSON.parse(decodeURI(row))[0]
),
_id: ctx.request.body.rows.map((row: string) => {
const ids = JSON.parse(
decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",")
)
if (ids.length > 1) {
ctx.throw(400, "Export data does not support composite keys.")
}
return ids[0]
}),
},
},
}

View File

@ -0,0 +1,120 @@
import { exportRows } from "../row/external"
import sdk from "../../../sdk"
import { ExternalRequest } from "../row/ExternalRequest"
// @ts-ignore
sdk.datasources = {
get: jest.fn(),
}
jest.mock("../row/ExternalRequest")
jest.mock("../view/exporters", () => ({
csv: jest.fn(),
Format: {
CSV: "csv",
},
}))
jest.mock("../../../utilities/fileSystem")
function getUserCtx() {
return {
params: {
tableId: "datasource__tablename",
},
query: {
format: "csv",
},
request: {
body: {},
},
throw: jest.fn(() => {
throw "Err"
}),
attachment: jest.fn(),
}
}
describe("external row controller", () => {
describe("exportRows", () => {
beforeAll(() => {
//@ts-ignore
jest.spyOn(ExternalRequest.prototype, "run").mockImplementation(() => [])
})
afterEach(() => {
jest.clearAllMocks()
})
it("should throw a 400 if no datasource entities are present", async () => {
let userCtx = getUserCtx()
try {
//@ts-ignore
await exportRows(userCtx)
} catch (e) {
expect(userCtx.throw).toHaveBeenCalledWith(
400,
"Datasource has not been configured for plus API."
)
}
})
it("should handle single quotes from a row ID", async () => {
//@ts-ignore
sdk.datasources.get.mockImplementation(() => ({
entities: {
tablename: {
schema: {},
},
},
}))
let userCtx = getUserCtx()
userCtx.request.body = {
rows: ["['d001']"],
}
//@ts-ignore
await exportRows(userCtx)
expect(userCtx.request.body).toEqual({
query: {
oneOf: {
_id: ["d001"],
},
},
})
})
it("should throw a 400 if any composite keys are present", async () => {
let userCtx = getUserCtx()
userCtx.request.body = {
rows: ["[123]", "['d001'%2C'10111']"],
}
try {
//@ts-ignore
await exportRows(userCtx)
} catch (e) {
expect(userCtx.throw).toHaveBeenCalledWith(
400,
"Export data does not support composite keys."
)
}
})
it("should throw a 400 if no table name was found", async () => {
let userCtx = getUserCtx()
userCtx.params.tableId = "datasource__"
userCtx.request.body = {
rows: ["[123]"],
}
try {
//@ts-ignore
await exportRows(userCtx)
} catch (e) {
expect(userCtx.throw).toHaveBeenCalledWith(
400,
"Could not find table name."
)
}
})
})
})