@@ -110,6 +52,7 @@
{#if filteredScreens?.length}
{#each filteredScreens as screen (screen._id)}
screensHeight.set("210px")}
+ class:disabled={searching}
+ use:resizableHandle
/>
@@ -148,14 +93,12 @@
min-height: 147px;
max-height: calc(100% - 147px);
position: relative;
- transition: height 300ms ease-out;
+ transition: height 300ms ease-out, max-height 300ms ease-out;
+ height: 210px;
}
- .screens.search {
- max-height: none;
- }
- .screens.resizing {
- user-select: none;
- cursor: row-resize;
+ .screens.searching {
+ max-height: 100%;
+ height: 100% !important;
}
.header {
@@ -177,9 +120,6 @@
overflow: auto;
flex-grow: 1;
}
- .screens.resizing .content {
- pointer-events: none;
- }
.screens :global(.nav-item) {
padding-right: 8px !important;
@@ -217,4 +157,10 @@
.divider:hover:after {
background: var(--spectrum-global-color-gray-300);
}
+ .divider.disabled {
+ cursor: auto;
+ }
+ .divider.disabled:after {
+ background: var(--spectrum-global-color-gray-200);
+ }
diff --git a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_layout.svelte
index 0e630b4f39..ab29f2ea0d 100644
--- a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_layout.svelte
+++ b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_layout.svelte
@@ -40,6 +40,7 @@
}
.content {
+ width: 100vw;
display: flex;
flex-direction: row;
justify-content: flex-start;
diff --git a/packages/client/manifest.json b/packages/client/manifest.json
index c7a207fc28..fdb0ad9db1 100644
--- a/packages/client/manifest.json
+++ b/packages/client/manifest.json
@@ -6056,18 +6056,6 @@
"options": ["Create", "Update", "View"],
"defaultValue": "Create"
},
- {
- "type": "text",
- "label": "Title",
- "key": "title",
- "nested": true
- },
- {
- "type": "text",
- "label": "Description",
- "key": "description",
- "nested": true
- },
{
"section": true,
"dependsOn": {
@@ -6075,7 +6063,7 @@
"value": "Create",
"invert": true
},
- "name": "Row details",
+ "name": "Row ID",
"info": "How to pass a row ID using bindings",
"settings": [
{
@@ -6095,8 +6083,20 @@
},
{
"section": true,
- "name": "Fields",
+ "name": "Details",
"settings": [
+ {
+ "type": "text",
+ "label": "Title",
+ "key": "title",
+ "nested": true
+ },
+ {
+ "type": "text",
+ "label": "Description",
+ "key": "description",
+ "nested": true
+ },
{
"type": "fieldConfiguration",
"key": "fields",
diff --git a/packages/server/CODEOWNERS b/packages/server/CODEOWNERS
new file mode 100644
index 0000000000..84313fb9cf
--- /dev/null
+++ b/packages/server/CODEOWNERS
@@ -0,0 +1 @@
+* @Budibase/backend
\ No newline at end of file
diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts
index 2fc6caeb39..8465f997e3 100644
--- a/packages/server/src/sdk/app/rows/search/external.ts
+++ b/packages/server/src/sdk/app/rows/search/external.ts
@@ -133,9 +133,14 @@ export async function exportRows(
let result = await search({ tableId, query: requestQuery, sort, sortOrder })
let rows: Row[] = []
+ let headers
+
+ if (!tableName) {
+ throw new HTTPError("Could not find table name.", 400)
+ }
+ const schema = datasource.entities[tableName].schema
// Filter data to only specified columns if required
-
if (columns && columns.length) {
for (let i = 0; i < result.rows.length; i++) {
rows[i] = {}
@@ -143,22 +148,17 @@ export async function exportRows(
rows[i][column] = result.rows[i][column]
}
}
+ headers = columns
} else {
rows = result.rows
}
- if (!tableName) {
- throw new HTTPError("Could not find table name.", 400)
- }
- const schema = datasource.entities[tableName].schema
let exportRows = cleanExportRows(rows, schema, format, columns)
- let headers = Object.keys(schema)
-
let content: string
switch (format) {
case exporters.Format.CSV:
- content = exporters.csv(headers, exportRows)
+ content = exporters.csv(headers ?? Object.keys(schema), exportRows)
break
case exporters.Format.JSON:
content = exporters.json(exportRows)
diff --git a/packages/server/src/sdk/app/rows/search/internal.ts b/packages/server/src/sdk/app/rows/search/internal.ts
index 87a33c0ba0..22cb3985b7 100644
--- a/packages/server/src/sdk/app/rows/search/internal.ts
+++ b/packages/server/src/sdk/app/rows/search/internal.ts
@@ -110,7 +110,7 @@ export async function exportRows(
let rows: Row[] = []
let schema = table.schema
-
+ let headers
// Filter data to only specified columns if required
if (columns && columns.length) {
for (let i = 0; i < result.length; i++) {
@@ -119,6 +119,7 @@ export async function exportRows(
rows[i][column] = result[i][column]
}
}
+ headers = columns
} else {
rows = result
}
@@ -127,7 +128,7 @@ export async function exportRows(
if (format === Format.CSV) {
return {
fileName: "export.csv",
- content: csv(Object.keys(rows[0]), exportRows),
+ content: csv(headers ?? Object.keys(rows[0]), exportRows),
}
} else if (format === Format.JSON) {
return {
diff --git a/packages/server/src/sdk/tests/rows/row.spec.ts b/packages/server/src/sdk/tests/rows/row.spec.ts
index af3d405e15..8b01356e35 100644
--- a/packages/server/src/sdk/tests/rows/row.spec.ts
+++ b/packages/server/src/sdk/tests/rows/row.spec.ts
@@ -18,7 +18,6 @@ jest.mock("../../../utilities/rowProcessor", () => ({
jest.mock("../../../api/controllers/view/exporters", () => ({
...jest.requireActual("../../../api/controllers/view/exporters"),
- csv: jest.fn(),
Format: {
CSV: "csv",
},
@@ -102,5 +101,32 @@ describe("external row sdk", () => {
new HTTPError("Could not find table name.", 400)
)
})
+
+ it("should only export specified columns", async () => {
+ mockDatasourcesGet.mockImplementation(async () => ({
+ entities: {
+ tablename: {
+ schema: {
+ name: {},
+ age: {},
+ dob: {},
+ },
+ },
+ },
+ }))
+ const headers = ["name", "dob"]
+
+ const result = await exportRows({
+ tableId: "datasource__tablename",
+ format: Format.CSV,
+ query: {},
+ columns: headers,
+ })
+
+ expect(result).toEqual({
+ fileName: "export.csv",
+ content: `"name","dob"`,
+ })
+ })
})
})
diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts
index 564e8a52c9..5e24b640d4 100644
--- a/packages/shared-core/src/filters.ts
+++ b/packages/shared-core/src/filters.ts
@@ -315,7 +315,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
new Date(docValue).getTime() > new Date(testValue.high).getTime()
)
}
- throw "Cannot perform range filter - invalid type."
+ return false
}
)
diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts
index 6f488cffbd..bddd6cb1f0 100644
--- a/packages/shared-core/src/tests/filters.test.ts
+++ b/packages/shared-core/src/tests/filters.test.ts
@@ -130,32 +130,28 @@ describe("runLuceneQuery", () => {
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2])
})
- it("should throw an error is an invalid doc value is passed into a range filter", async () => {
+ it("should return return all docs if an invalid doc value is passed into a range filter", async () => {
+ const docs = [
+ {
+ order_id: 4,
+ customer_id: 1758,
+ order_status: 5,
+ order_date: "{{ Binding.INVALID }}",
+ required_date: "2017-03-05T00:00:00.000Z",
+ shipped_date: "2017-03-03T00:00:00.000Z",
+ store_id: 2,
+ staff_id: 7,
+ description: undefined,
+ label: "",
+ },
+ ]
const query = buildQuery("range", {
order_date: {
low: "2016-01-04T00:00:00.000Z",
high: "2016-01-11T00:00:00.000Z",
},
})
- expect(() =>
- runLuceneQuery(
- [
- {
- order_id: 4,
- customer_id: 1758,
- order_status: 5,
- order_date: "INVALID",
- required_date: "2017-03-05T00:00:00.000Z",
- shipped_date: "2017-03-03T00:00:00.000Z",
- store_id: 2,
- staff_id: 7,
- description: undefined,
- label: "",
- },
- ],
- query
- )
- ).toThrowError("Cannot perform range filter - invalid type.")
+ expect(runLuceneQuery(docs, query)).toEqual(docs)
})
it("should return rows with matches on empty filter", () => {
diff --git a/packages/worker/CODEOWNERS b/packages/worker/CODEOWNERS
new file mode 100644
index 0000000000..84313fb9cf
--- /dev/null
+++ b/packages/worker/CODEOWNERS
@@ -0,0 +1 @@
+* @Budibase/backend
\ No newline at end of file