Merge branch 'fix-relationship-filtering' of github.com:Budibase/budibase into fix-relationship-filtering
This commit is contained in:
commit
562f0e2c01
|
@ -449,8 +449,12 @@ class InternalBuilder {
|
||||||
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
|
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// always add sorting by the primary key - make sure result is deterministic
|
|
||||||
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
|
// add sorting by the primary key if the result isn't already sorted by it,
|
||||||
|
// to make sure result is deterministic
|
||||||
|
if (!sort || sort[primaryKey[0]] === undefined) {
|
||||||
|
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
|
||||||
|
}
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -604,7 +608,8 @@ class InternalBuilder {
|
||||||
if (!primary) {
|
if (!primary) {
|
||||||
throw new Error("Primary key is required for upsert")
|
throw new Error("Primary key is required for upsert")
|
||||||
}
|
}
|
||||||
return query.insert(parsedBody).onConflict(primary).merge()
|
const ret = query.insert(parsedBody).onConflict(primary).merge()
|
||||||
|
return ret
|
||||||
} else if (this.client === SqlClient.MS_SQL) {
|
} else if (this.client === SqlClient.MS_SQL) {
|
||||||
// No upsert or onConflict support in MSSQL yet, see:
|
// No upsert or onConflict support in MSSQL yet, see:
|
||||||
// https://github.com/knex/knex/pull/6050
|
// https://github.com/knex/knex/pull/6050
|
||||||
|
|
|
@ -1,9 +1,14 @@
|
||||||
<script>
|
<script>
|
||||||
import { FieldType, BBReferenceFieldSubType } from "@budibase/types"
|
import {
|
||||||
|
FieldType,
|
||||||
|
BBReferenceFieldSubType,
|
||||||
|
SourceName,
|
||||||
|
} from "@budibase/types"
|
||||||
import { Select, Toggle, Multiselect } from "@budibase/bbui"
|
import { Select, Toggle, Multiselect } from "@budibase/bbui"
|
||||||
import { DB_TYPE_INTERNAL } from "constants/backend"
|
import { DB_TYPE_INTERNAL } from "constants/backend"
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
import { parseFile } from "./utils"
|
import { parseFile } from "./utils"
|
||||||
|
import { tables, datasources } from "stores/builder"
|
||||||
|
|
||||||
let error = null
|
let error = null
|
||||||
let fileName = null
|
let fileName = null
|
||||||
|
@ -80,6 +85,9 @@
|
||||||
schema = fetchSchema(tableId)
|
schema = fetchSchema(tableId)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$: table = $tables.list.find(table => table._id === tableId)
|
||||||
|
$: datasource = $datasources.list.find(ds => ds._id === table?.sourceId)
|
||||||
|
|
||||||
async function fetchSchema(tableId) {
|
async function fetchSchema(tableId) {
|
||||||
try {
|
try {
|
||||||
const definition = await API.fetchTableDefinition(tableId)
|
const definition = await API.fetchTableDefinition(tableId)
|
||||||
|
@ -185,20 +193,25 @@
|
||||||
</div>
|
</div>
|
||||||
{/each}
|
{/each}
|
||||||
</div>
|
</div>
|
||||||
{#if tableType === DB_TYPE_INTERNAL}
|
<br />
|
||||||
<br />
|
<!-- SQL Server doesn't yet support overwriting rows by existing keys -->
|
||||||
|
{#if datasource?.source !== SourceName.SQL_SERVER}
|
||||||
<Toggle
|
<Toggle
|
||||||
bind:value={updateExistingRows}
|
bind:value={updateExistingRows}
|
||||||
on:change={() => (identifierFields = [])}
|
on:change={() => (identifierFields = [])}
|
||||||
thin
|
thin
|
||||||
text="Update existing rows"
|
text="Update existing rows"
|
||||||
/>
|
/>
|
||||||
{#if updateExistingRows}
|
{/if}
|
||||||
|
{#if updateExistingRows}
|
||||||
|
{#if tableType === DB_TYPE_INTERNAL}
|
||||||
<Multiselect
|
<Multiselect
|
||||||
label="Identifier field(s)"
|
label="Identifier field(s)"
|
||||||
options={Object.keys(validation)}
|
options={Object.keys(validation)}
|
||||||
bind:value={identifierFields}
|
bind:value={identifierFields}
|
||||||
/>
|
/>
|
||||||
|
{:else}
|
||||||
|
<p>Rows will be updated based on the table's primary key.</p>
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
{#if invalidColumns.length > 0}
|
{#if invalidColumns.length > 0}
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit bf30f47a28292d619cf0837f21d66790ff31c3a6
|
Subproject commit 6c8d0174ca58c578a37022965ddb923fdbf8e32a
|
|
@ -1,4 +1,4 @@
|
||||||
FROM mcr.microsoft.com/mssql/server:2017-latest
|
FROM mcr.microsoft.com/mssql/server:2022-latest
|
||||||
|
|
||||||
ENV ACCEPT_EULA=Y
|
ENV ACCEPT_EULA=Y
|
||||||
ENV SA_PASSWORD=Passw0rd
|
ENV SA_PASSWORD=Passw0rd
|
||||||
|
|
|
@ -316,7 +316,13 @@ export class ExternalRequest<T extends Operation> {
|
||||||
manyRelationships: ManyRelationship[] = []
|
manyRelationships: ManyRelationship[] = []
|
||||||
for (let [key, field] of Object.entries(table.schema)) {
|
for (let [key, field] of Object.entries(table.schema)) {
|
||||||
// if set already, or not set just skip it
|
// if set already, or not set just skip it
|
||||||
if (row[key] === undefined || newRow[key] || !isEditableColumn(field)) {
|
if (row[key] === undefined || newRow[key]) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
!(this.operation === Operation.BULK_UPSERT) &&
|
||||||
|
!isEditableColumn(field)
|
||||||
|
) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// parse floats/numbers
|
// parse floats/numbers
|
||||||
|
@ -690,6 +696,7 @@ export class ExternalRequest<T extends Operation> {
|
||||||
},
|
},
|
||||||
meta: {
|
meta: {
|
||||||
table,
|
table,
|
||||||
|
id: config.id,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ import {
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import { builderSocket } from "../../../websockets"
|
import { builderSocket } from "../../../websockets"
|
||||||
import { inputProcessing } from "../../../utilities/rowProcessor"
|
import { inputProcessing } from "../../../utilities/rowProcessor"
|
||||||
|
import { isEqual } from "lodash"
|
||||||
|
|
||||||
function getDatasourceId(table: Table) {
|
function getDatasourceId(table: Table) {
|
||||||
if (!table) {
|
if (!table) {
|
||||||
|
@ -85,15 +86,30 @@ export async function bulkImport(
|
||||||
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
|
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
|
||||||
) {
|
) {
|
||||||
let table = await sdk.tables.getTable(ctx.params.tableId)
|
let table = await sdk.tables.getTable(ctx.params.tableId)
|
||||||
const { rows } = ctx.request.body
|
const { rows, identifierFields } = ctx.request.body
|
||||||
const schema = table.schema
|
const schema = table.schema
|
||||||
|
|
||||||
|
if (
|
||||||
|
identifierFields &&
|
||||||
|
identifierFields.length > 0 &&
|
||||||
|
!isEqual(identifierFields, table.primary)
|
||||||
|
) {
|
||||||
|
// This is becuse we make use of the ON CONFLICT functionality in SQL
|
||||||
|
// databases, which only triggers when there's a conflict against a unique
|
||||||
|
// index. The only unique index we can count on atm in Budibase is the
|
||||||
|
// primary key, so this functionality always uses the primary key.
|
||||||
|
ctx.throw(
|
||||||
|
400,
|
||||||
|
"Identifier fields are not supported for bulk import into an external datasource."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
||||||
ctx.throw(400, "Provided data import information is invalid.")
|
ctx.throw(400, "Provided data import information is invalid.")
|
||||||
}
|
}
|
||||||
|
|
||||||
const parsedRows = []
|
const parsedRows = []
|
||||||
for (const row of parse(rows, schema)) {
|
for (const row of parse(rows, table)) {
|
||||||
const processed = await inputProcessing(ctx.user?._id, table, row, {
|
const processed = await inputProcessing(ctx.user?._id, table, row, {
|
||||||
noAutoRelationships: true,
|
noAutoRelationships: true,
|
||||||
})
|
})
|
||||||
|
|
|
@ -178,7 +178,7 @@ export async function handleDataImport(
|
||||||
}
|
}
|
||||||
|
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const data = parse(importRows, schema)
|
const data = parse(importRows, table)
|
||||||
|
|
||||||
let finalData: any = await importToRows(data, table, user)
|
let finalData: any = await importToRows(data, table, user)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import {
|
||||||
|
DatabaseName,
|
||||||
|
getDatasource,
|
||||||
|
knexClient,
|
||||||
|
} from "../../../integrations/tests/utils"
|
||||||
|
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import emitter from "../../../../src/events"
|
import emitter from "../../../../src/events"
|
||||||
|
@ -31,6 +35,7 @@ import {
|
||||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||||
import _, { merge } from "lodash"
|
import _, { merge } from "lodash"
|
||||||
import * as uuid from "uuid"
|
import * as uuid from "uuid"
|
||||||
|
import { Knex } from "knex"
|
||||||
|
|
||||||
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
||||||
tk.freeze(timestamp)
|
tk.freeze(timestamp)
|
||||||
|
@ -70,13 +75,16 @@ describe.each([
|
||||||
|
|
||||||
let table: Table
|
let table: Table
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
|
let client: Knex | undefined
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
if (dsProvider) {
|
if (dsProvider) {
|
||||||
|
const rawDatasource = await dsProvider
|
||||||
datasource = await config.createDatasource({
|
datasource = await config.createDatasource({
|
||||||
datasource: await dsProvider,
|
datasource: rawDatasource,
|
||||||
})
|
})
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -307,13 +315,13 @@ describe.each([
|
||||||
// as quickly as possible.
|
// as quickly as possible.
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
sequence.map(async () => {
|
sequence.map(async () => {
|
||||||
const attempts = 20
|
const attempts = 30
|
||||||
for (let attempt = 0; attempt < attempts; attempt++) {
|
for (let attempt = 0; attempt < attempts; attempt++) {
|
||||||
try {
|
try {
|
||||||
await config.api.row.save(table._id!, {})
|
await config.api.row.save(table._id!, {})
|
||||||
return
|
return
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
await new Promise(r => setTimeout(r, Math.random() * 15))
|
await new Promise(r => setTimeout(r, Math.random() * 50))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new Error(`Failed to create row after ${attempts} attempts`)
|
throw new Error(`Failed to create row after ${attempts} attempts`)
|
||||||
|
@ -598,6 +606,35 @@ describe.each([
|
||||||
expect(res.name).toEqual("Updated Name")
|
expect(res.name).toEqual("Updated Name")
|
||||||
await assertRowUsage(rowUsage)
|
await assertRowUsage(rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
!isInternal &&
|
||||||
|
it("can update a row on an external table with a primary key", async () => {
|
||||||
|
const tableName = uuid.v4().substring(0, 10)
|
||||||
|
await client!.schema.createTable(tableName, table => {
|
||||||
|
table.increments("id").primary()
|
||||||
|
table.string("name")
|
||||||
|
})
|
||||||
|
|
||||||
|
const res = await config.api.datasource.fetchSchema({
|
||||||
|
datasourceId: datasource!._id!,
|
||||||
|
})
|
||||||
|
const table = res.datasource.entities![tableName]
|
||||||
|
|
||||||
|
const row = await config.api.row.save(table._id!, {
|
||||||
|
id: 1,
|
||||||
|
name: "Row 1",
|
||||||
|
})
|
||||||
|
|
||||||
|
const updatedRow = await config.api.row.save(table._id!, {
|
||||||
|
_id: row._id!,
|
||||||
|
name: "Row 1 Updated",
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(updatedRow.name).toEqual("Row 1 Updated")
|
||||||
|
|
||||||
|
const rows = await config.api.row.fetch(table._id!)
|
||||||
|
expect(rows).toHaveLength(1)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("patch", () => {
|
describe("patch", () => {
|
||||||
|
@ -667,6 +704,7 @@ describe.each([
|
||||||
expect(event.oldRow.description).toEqual(beforeRow.description)
|
expect(event.oldRow.description).toEqual(beforeRow.description)
|
||||||
expect(event.row.description).toEqual(beforeRow.description)
|
expect(event.row.description).toEqual(beforeRow.description)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should throw an error when given improper types", async () => {
|
it("should throw an error when given improper types", async () => {
|
||||||
const existing = await config.api.row.save(table._id!, {})
|
const existing = await config.api.row.save(table._id!, {})
|
||||||
const rowUsage = await getRowUsage()
|
const rowUsage = await getRowUsage()
|
||||||
|
@ -758,7 +796,8 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
!isInternal &&
|
!isInternal &&
|
||||||
// TODO: SQL is having issues creating composite keys
|
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
||||||
|
// to identity columns. This is not something Budibase does currently.
|
||||||
providerType !== DatabaseName.SQL_SERVER &&
|
providerType !== DatabaseName.SQL_SERVER &&
|
||||||
it("should support updating fields that are part of a composite key", async () => {
|
it("should support updating fields that are part of a composite key", async () => {
|
||||||
const tableRequest = saveTableRequest({
|
const tableRequest = saveTableRequest({
|
||||||
|
@ -911,32 +950,21 @@ describe.each([
|
||||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("Should ignore malformed/invalid delete requests", async () => {
|
it.each([{ not: "valid" }, { rows: 123 }, "invalid"])(
|
||||||
const rowUsage = await getRowUsage()
|
"Should ignore malformed/invalid delete request: %s",
|
||||||
|
async (request: any) => {
|
||||||
|
const rowUsage = await getRowUsage()
|
||||||
|
|
||||||
await config.api.row.delete(table._id!, { not: "valid" } as any, {
|
await config.api.row.delete(table._id!, request, {
|
||||||
status: 400,
|
status: 400,
|
||||||
body: {
|
body: {
|
||||||
message: "Invalid delete rows request",
|
message: "Invalid delete rows request",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
await config.api.row.delete(table._id!, { rows: 123 } as any, {
|
await assertRowUsage(rowUsage)
|
||||||
status: 400,
|
}
|
||||||
body: {
|
)
|
||||||
message: "Invalid delete rows request",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await config.api.row.delete(table._id!, "invalid" as any, {
|
|
||||||
status: 400,
|
|
||||||
body: {
|
|
||||||
message: "Invalid delete rows request",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await assertRowUsage(rowUsage)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("bulkImport", () => {
|
describe("bulkImport", () => {
|
||||||
|
@ -1085,6 +1113,121 @@ describe.each([
|
||||||
expect(rows[2].name).toEqual("Row 3")
|
expect(rows[2].name).toEqual("Row 3")
|
||||||
expect(rows[2].description).toEqual("Row 3 description")
|
expect(rows[2].description).toEqual("Row 3 description")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Upserting isn't yet supported in MSSQL, see:
|
||||||
|
// https://github.com/knex/knex/pull/6050
|
||||||
|
!isMSSQL &&
|
||||||
|
!isInternal &&
|
||||||
|
it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
|
||||||
|
const tableName = uuid.v4()
|
||||||
|
await client?.schema.createTable(tableName, table => {
|
||||||
|
table.integer("companyId")
|
||||||
|
table.integer("userId")
|
||||||
|
table.string("name")
|
||||||
|
table.string("description")
|
||||||
|
table.primary(["companyId", "userId"])
|
||||||
|
})
|
||||||
|
|
||||||
|
const resp = await config.api.datasource.fetchSchema({
|
||||||
|
datasourceId: datasource!._id!,
|
||||||
|
})
|
||||||
|
const table = resp.datasource.entities![tableName]
|
||||||
|
|
||||||
|
const row1 = await config.api.row.save(table._id!, {
|
||||||
|
companyId: 1,
|
||||||
|
userId: 1,
|
||||||
|
name: "Row 1",
|
||||||
|
description: "Row 1 description",
|
||||||
|
})
|
||||||
|
|
||||||
|
const row2 = await config.api.row.save(table._id!, {
|
||||||
|
companyId: 1,
|
||||||
|
userId: 2,
|
||||||
|
name: "Row 2",
|
||||||
|
description: "Row 2 description",
|
||||||
|
})
|
||||||
|
|
||||||
|
await config.api.row.bulkImport(table._id!, {
|
||||||
|
identifierFields: ["companyId", "userId"],
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
companyId: 1,
|
||||||
|
userId: row1.userId,
|
||||||
|
name: "Row 1 updated",
|
||||||
|
description: "Row 1 description updated",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
companyId: 1,
|
||||||
|
userId: row2.userId,
|
||||||
|
name: "Row 2 updated",
|
||||||
|
description: "Row 2 description updated",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
companyId: 1,
|
||||||
|
userId: 3,
|
||||||
|
name: "Row 3",
|
||||||
|
description: "Row 3 description",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
|
const rows = await config.api.row.fetch(table._id!)
|
||||||
|
expect(rows.length).toEqual(3)
|
||||||
|
|
||||||
|
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||||
|
expect(rows[0].name).toEqual("Row 1 updated")
|
||||||
|
expect(rows[0].description).toEqual("Row 1 description updated")
|
||||||
|
expect(rows[1].name).toEqual("Row 2 updated")
|
||||||
|
expect(rows[1].description).toEqual("Row 2 description updated")
|
||||||
|
expect(rows[2].name).toEqual("Row 3")
|
||||||
|
expect(rows[2].description).toEqual("Row 3 description")
|
||||||
|
})
|
||||||
|
|
||||||
|
// Upserting isn't yet supported in MSSQL, see:
|
||||||
|
// https://github.com/knex/knex/pull/6050
|
||||||
|
!isMSSQL &&
|
||||||
|
!isInternal &&
|
||||||
|
it("should be able to update existing rows an autoID primary key", async () => {
|
||||||
|
const tableName = uuid.v4()
|
||||||
|
await client!.schema.createTable(tableName, table => {
|
||||||
|
table.increments("userId").primary()
|
||||||
|
table.string("name")
|
||||||
|
})
|
||||||
|
|
||||||
|
const resp = await config.api.datasource.fetchSchema({
|
||||||
|
datasourceId: datasource!._id!,
|
||||||
|
})
|
||||||
|
const table = resp.datasource.entities![tableName]
|
||||||
|
|
||||||
|
const row1 = await config.api.row.save(table._id!, {
|
||||||
|
name: "Clare",
|
||||||
|
})
|
||||||
|
|
||||||
|
const row2 = await config.api.row.save(table._id!, {
|
||||||
|
name: "Jeff",
|
||||||
|
})
|
||||||
|
|
||||||
|
await config.api.row.bulkImport(table._id!, {
|
||||||
|
identifierFields: ["userId"],
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
userId: row1.userId,
|
||||||
|
name: "Clare updated",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: row2.userId,
|
||||||
|
name: "Jeff updated",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
|
const rows = await config.api.row.fetch(table._id!)
|
||||||
|
expect(rows.length).toEqual(2)
|
||||||
|
|
||||||
|
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||||
|
expect(rows[0].name).toEqual("Clare updated")
|
||||||
|
expect(rows[1].name).toEqual("Jeff updated")
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("enrich", () => {
|
describe("enrich", () => {
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import {
|
||||||
|
DatabaseName,
|
||||||
|
getDatasource,
|
||||||
|
knexClient,
|
||||||
|
} from "../../../integrations/tests/utils"
|
||||||
import { db as dbCore, utils } from "@budibase/backend-core"
|
import { db as dbCore, utils } from "@budibase/backend-core"
|
||||||
|
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
|
@ -24,6 +28,8 @@ import _ from "lodash"
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import { encodeJSBinding } from "@budibase/string-templates"
|
import { encodeJSBinding } from "@budibase/string-templates"
|
||||||
import { dataFilters } from "@budibase/shared-core"
|
import { dataFilters } from "@budibase/shared-core"
|
||||||
|
import { Knex } from "knex"
|
||||||
|
import { structures } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
["in-memory", undefined],
|
["in-memory", undefined],
|
||||||
|
@ -42,6 +48,7 @@ describe.each([
|
||||||
|
|
||||||
let envCleanup: (() => void) | undefined
|
let envCleanup: (() => void) | undefined
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
|
let client: Knex | undefined
|
||||||
let table: Table
|
let table: Table
|
||||||
let rows: Row[]
|
let rows: Row[]
|
||||||
|
|
||||||
|
@ -63,8 +70,10 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
if (dsProvider) {
|
if (dsProvider) {
|
||||||
|
const rawDatasource = await dsProvider
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
datasource = await config.createDatasource({
|
datasource = await config.createDatasource({
|
||||||
datasource: await dsProvider,
|
datasource: rawDatasource,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -909,6 +918,44 @@ describe.each([
|
||||||
}).toMatchExactly([{ name: "foo" }, { name: "bar" }])
|
}).toMatchExactly([{ name: "foo" }, { name: "bar" }])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
!isInternal &&
|
||||||
|
!isInMemory &&
|
||||||
|
// This test was added because we automatically add in a sort by the
|
||||||
|
// primary key, and we used to do this unconditionally which caused
|
||||||
|
// problems because it was possible for the primary key to appear twice
|
||||||
|
// in the resulting SQL ORDER BY clause, resulting in an SQL error.
|
||||||
|
// We now check first to make sure that the primary key isn't already
|
||||||
|
// in the sort before adding it.
|
||||||
|
describe("sort on primary key", () => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
const tableName = structures.uuid().substring(0, 10)
|
||||||
|
await client!.schema.createTable(tableName, t => {
|
||||||
|
t.string("name").primary()
|
||||||
|
})
|
||||||
|
const resp = await config.api.datasource.fetchSchema({
|
||||||
|
datasourceId: datasource!._id!,
|
||||||
|
})
|
||||||
|
|
||||||
|
table = resp.datasource.entities![tableName]
|
||||||
|
|
||||||
|
await createRows([{ name: "foo" }, { name: "bar" }])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to sort by a primary key column ascending", async () =>
|
||||||
|
expectSearch({
|
||||||
|
query: {},
|
||||||
|
sort: "name",
|
||||||
|
sortOrder: SortOrder.ASCENDING,
|
||||||
|
}).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
|
||||||
|
|
||||||
|
it("should be able to sort by a primary key column descending", async () =>
|
||||||
|
expectSearch({
|
||||||
|
query: {},
|
||||||
|
sort: "name",
|
||||||
|
sortOrder: SortOrder.DESCENDING,
|
||||||
|
}).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ import {
|
||||||
TableSchema,
|
TableSchema,
|
||||||
FieldSchema,
|
FieldSchema,
|
||||||
Row,
|
Row,
|
||||||
|
Table,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
|
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
|
||||||
import { db } from "@budibase/backend-core"
|
import { db } from "@budibase/backend-core"
|
||||||
|
@ -118,16 +119,26 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
|
||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
export function parse(rows: Rows, schema: TableSchema): Rows {
|
export function parse(rows: Rows, table: Table): Rows {
|
||||||
return rows.map(row => {
|
return rows.map(row => {
|
||||||
const parsedRow: Row = {}
|
const parsedRow: Row = {}
|
||||||
|
|
||||||
Object.entries(row).forEach(([columnName, columnData]) => {
|
Object.entries(row).forEach(([columnName, columnData]) => {
|
||||||
if (!(columnName in schema) || schema[columnName]?.autocolumn) {
|
const schema = table.schema
|
||||||
|
if (!(columnName in schema)) {
|
||||||
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
schema[columnName].autocolumn &&
|
||||||
|
!table.primary?.includes(columnName)
|
||||||
|
) {
|
||||||
|
// Don't want the user specifying values for autocolumns unless they're updating
|
||||||
|
// a row through its primary key.
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
const columnSchema = schema[columnName]
|
const columnSchema = schema[columnName]
|
||||||
const { type: columnType } = columnSchema
|
const { type: columnType } = columnSchema
|
||||||
if (columnType === FieldType.NUMBER) {
|
if (columnType === FieldType.NUMBER) {
|
||||||
|
|
Loading…
Reference in New Issue