Merge branch 'master' into dean-fixes
This commit is contained in:
commit
b440f8d532
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||||
"version": "2.29.27",
|
"version": "2.29.28",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*",
|
"packages/*",
|
||||||
|
|
|
@ -37,10 +37,12 @@ import { helpers } from "@budibase/shared-core"
|
||||||
|
|
||||||
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
|
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
|
||||||
|
|
||||||
const envLimit = environment.SQL_MAX_ROWS
|
function getBaseLimit() {
|
||||||
? parseInt(environment.SQL_MAX_ROWS)
|
const envLimit = environment.SQL_MAX_ROWS
|
||||||
: null
|
? parseInt(environment.SQL_MAX_ROWS)
|
||||||
const BASE_LIMIT = envLimit || 5000
|
: null
|
||||||
|
return envLimit || 5000
|
||||||
|
}
|
||||||
|
|
||||||
// Takes a string like foo and returns a quoted string like [foo] for SQL Server
|
// Takes a string like foo and returns a quoted string like [foo] for SQL Server
|
||||||
// and "foo" for Postgres.
|
// and "foo" for Postgres.
|
||||||
|
@ -838,7 +840,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
private readonly limit: number
|
private readonly limit: number
|
||||||
|
|
||||||
// pass through client to get flavour of SQL
|
// pass through client to get flavour of SQL
|
||||||
constructor(client: SqlClient, limit: number = BASE_LIMIT) {
|
constructor(client: SqlClient, limit: number = getBaseLimit()) {
|
||||||
super(client)
|
super(client)
|
||||||
this.limit = limit
|
this.limit = limit
|
||||||
}
|
}
|
||||||
|
@ -882,7 +884,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
query = builder.read(client, json, {
|
query = builder.read(client, json, {
|
||||||
limits: {
|
limits: {
|
||||||
query: this.limit,
|
query: this.limit,
|
||||||
base: BASE_LIMIT,
|
base: getBaseLimit(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
break
|
break
|
||||||
|
|
|
@ -66,9 +66,14 @@ export interface RunConfig {
|
||||||
includeSqlRelationships?: IncludeRelationship
|
includeSqlRelationships?: IncludeRelationship
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type ExternalReadRequestReturnType = {
|
||||||
|
rows: Row[]
|
||||||
|
rawResponseSize: number
|
||||||
|
}
|
||||||
|
|
||||||
export type ExternalRequestReturnType<T extends Operation> =
|
export type ExternalRequestReturnType<T extends Operation> =
|
||||||
T extends Operation.READ
|
T extends Operation.READ
|
||||||
? Row[]
|
? ExternalReadRequestReturnType
|
||||||
: T extends Operation.COUNT
|
: T extends Operation.COUNT
|
||||||
? number
|
? number
|
||||||
: { row: Row; table: Table }
|
: { row: Row; table: Table }
|
||||||
|
@ -741,9 +746,11 @@ export class ExternalRequest<T extends Operation> {
|
||||||
)
|
)
|
||||||
// if reading it'll just be an array of rows, return whole thing
|
// if reading it'll just be an array of rows, return whole thing
|
||||||
if (operation === Operation.READ) {
|
if (operation === Operation.READ) {
|
||||||
return (
|
const rows = Array.isArray(output) ? output : [output]
|
||||||
Array.isArray(output) ? output : [output]
|
return {
|
||||||
) as ExternalRequestReturnType<T>
|
rows,
|
||||||
|
rawResponseSize: responseRows.length,
|
||||||
|
} as ExternalRequestReturnType<T>
|
||||||
} else {
|
} else {
|
||||||
return { row: output[0], table } as ExternalRequestReturnType<T>
|
return { row: output[0], table } as ExternalRequestReturnType<T>
|
||||||
}
|
}
|
||||||
|
|
|
@ -136,7 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
})
|
})
|
||||||
const table: Table = tables[tableName]
|
const table: Table = tables[tableName]
|
||||||
const row = response[0]
|
const row = response.rows[0]
|
||||||
// this seems like a lot of work, but basically we need to dig deeper for the enrich
|
// this seems like a lot of work, but basically we need to dig deeper for the enrich
|
||||||
// for a single row, there is probably a better way to do this with some smart multi-layer joins
|
// for a single row, there is probably a better way to do this with some smart multi-layer joins
|
||||||
for (let [fieldName, field] of Object.entries(table.schema)) {
|
for (let [fieldName, field] of Object.entries(table.schema)) {
|
||||||
|
@ -163,10 +163,14 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||||
},
|
},
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
})
|
})
|
||||||
row[fieldName] = await outputProcessing(linkedTable, relatedRows, {
|
row[fieldName] = await outputProcessing<Row[]>(
|
||||||
squash: true,
|
linkedTable,
|
||||||
preserveLinks: true,
|
relatedRows.rows,
|
||||||
})
|
{
|
||||||
|
squash: true,
|
||||||
|
preserveLinks: true,
|
||||||
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
return row
|
return row
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,7 +51,3 @@ export function jsonWithSchema(schema: TableSchema, rows: Row[]) {
|
||||||
export function isFormat(format: any): format is RowExportFormat {
|
export function isFormat(format: any): format is RowExportFormat {
|
||||||
return Object.values(RowExportFormat).includes(format as RowExportFormat)
|
return Object.values(RowExportFormat).includes(format as RowExportFormat)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function parseCsvExport<T>(value: string) {
|
|
||||||
return JSON.parse(value) as T
|
|
||||||
}
|
|
||||||
|
|
|
@ -33,6 +33,7 @@ import {
|
||||||
UpdatedRowEventEmitter,
|
UpdatedRowEventEmitter,
|
||||||
TableSchema,
|
TableSchema,
|
||||||
JsonFieldSubType,
|
JsonFieldSubType,
|
||||||
|
RowExportFormat,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||||
import _, { merge } from "lodash"
|
import _, { merge } from "lodash"
|
||||||
|
@ -1811,6 +1812,7 @@ describe.each([
|
||||||
await config.api.row.exportRows(
|
await config.api.row.exportRows(
|
||||||
"1234567",
|
"1234567",
|
||||||
{ rows: [existing._id!] },
|
{ rows: [existing._id!] },
|
||||||
|
RowExportFormat.JSON,
|
||||||
{ status: 404 }
|
{ status: 404 }
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -1849,6 +1851,202 @@ describe.each([
|
||||||
const results = JSON.parse(res)
|
const results = JSON.parse(res)
|
||||||
expect(results.length).toEqual(3)
|
expect(results.length).toEqual(3)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("should allow exporting all column types", () => {
|
||||||
|
let tableId: string
|
||||||
|
let expectedRowData: Row
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const fullSchema = setup.structures.fullSchemaWithoutLinks({
|
||||||
|
allRequired: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
const table = await config.api.table.save(
|
||||||
|
saveTableRequest({
|
||||||
|
...setup.structures.basicTable(),
|
||||||
|
schema: fullSchema,
|
||||||
|
primary: ["string"],
|
||||||
|
})
|
||||||
|
)
|
||||||
|
tableId = table._id!
|
||||||
|
|
||||||
|
const rowValues: Record<keyof typeof fullSchema, any> = {
|
||||||
|
[FieldType.STRING]: generator.guid(),
|
||||||
|
[FieldType.LONGFORM]: generator.paragraph(),
|
||||||
|
[FieldType.OPTIONS]: "option 2",
|
||||||
|
[FieldType.ARRAY]: ["options 2", "options 4"],
|
||||||
|
[FieldType.NUMBER]: generator.natural(),
|
||||||
|
[FieldType.BOOLEAN]: generator.bool(),
|
||||||
|
[FieldType.DATETIME]: generator.date().toISOString(),
|
||||||
|
[FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()],
|
||||||
|
[FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(),
|
||||||
|
[FieldType.FORMULA]: undefined, // generated field
|
||||||
|
[FieldType.AUTO]: undefined, // generated field
|
||||||
|
[FieldType.JSON]: { name: generator.guid() },
|
||||||
|
[FieldType.INTERNAL]: generator.guid(),
|
||||||
|
[FieldType.BARCODEQR]: generator.guid(),
|
||||||
|
[FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(),
|
||||||
|
[FieldType.BIGINT]: generator.integer().toString(),
|
||||||
|
[FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }],
|
||||||
|
[FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id },
|
||||||
|
}
|
||||||
|
const row = await config.api.row.save(table._id!, rowValues)
|
||||||
|
expectedRowData = {
|
||||||
|
_id: row._id,
|
||||||
|
[FieldType.STRING]: rowValues[FieldType.STRING],
|
||||||
|
[FieldType.LONGFORM]: rowValues[FieldType.LONGFORM],
|
||||||
|
[FieldType.OPTIONS]: rowValues[FieldType.OPTIONS],
|
||||||
|
[FieldType.ARRAY]: rowValues[FieldType.ARRAY],
|
||||||
|
[FieldType.NUMBER]: rowValues[FieldType.NUMBER],
|
||||||
|
[FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN],
|
||||||
|
[FieldType.DATETIME]: rowValues[FieldType.DATETIME],
|
||||||
|
[FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map(
|
||||||
|
(a: any) =>
|
||||||
|
expect.objectContaining({
|
||||||
|
...a,
|
||||||
|
url: expect.any(String),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
[FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({
|
||||||
|
...rowValues[FieldType.ATTACHMENT_SINGLE],
|
||||||
|
url: expect.any(String),
|
||||||
|
}),
|
||||||
|
[FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula,
|
||||||
|
[FieldType.AUTO]: expect.any(Number),
|
||||||
|
[FieldType.JSON]: rowValues[FieldType.JSON],
|
||||||
|
[FieldType.INTERNAL]: rowValues[FieldType.INTERNAL],
|
||||||
|
[FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR],
|
||||||
|
[FieldType.SIGNATURE_SINGLE]: expect.objectContaining({
|
||||||
|
...rowValues[FieldType.SIGNATURE_SINGLE],
|
||||||
|
url: expect.any(String),
|
||||||
|
}),
|
||||||
|
[FieldType.BIGINT]: rowValues[FieldType.BIGINT],
|
||||||
|
[FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map(
|
||||||
|
expect.objectContaining
|
||||||
|
),
|
||||||
|
[FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining(
|
||||||
|
rowValues[FieldType.BB_REFERENCE_SINGLE]
|
||||||
|
),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("as csv", async () => {
|
||||||
|
const exportedValue = await config.api.row.exportRows(
|
||||||
|
tableId,
|
||||||
|
{ query: {} },
|
||||||
|
RowExportFormat.CSV
|
||||||
|
)
|
||||||
|
|
||||||
|
const jsonResult = await config.api.table.csvToJson({
|
||||||
|
csvString: exportedValue,
|
||||||
|
})
|
||||||
|
|
||||||
|
const stringified = (value: string) =>
|
||||||
|
JSON.stringify(value).replace(/"/g, "'")
|
||||||
|
|
||||||
|
const matchingObject = (key: string, value: any, isArray: boolean) => {
|
||||||
|
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
|
||||||
|
if (isArray) {
|
||||||
|
return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`))
|
||||||
|
}
|
||||||
|
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(jsonResult).toEqual([
|
||||||
|
{
|
||||||
|
...expectedRowData,
|
||||||
|
auto: expect.any(String),
|
||||||
|
array: stringified(expectedRowData["array"]),
|
||||||
|
attachment: matchingObject(
|
||||||
|
"key",
|
||||||
|
expectedRowData["attachment"][0].sample,
|
||||||
|
true
|
||||||
|
),
|
||||||
|
attachment_single: matchingObject(
|
||||||
|
"key",
|
||||||
|
expectedRowData["attachment_single"].sample,
|
||||||
|
false
|
||||||
|
),
|
||||||
|
boolean: stringified(expectedRowData["boolean"]),
|
||||||
|
json: stringified(expectedRowData["json"]),
|
||||||
|
number: stringified(expectedRowData["number"]),
|
||||||
|
signature_single: matchingObject(
|
||||||
|
"key",
|
||||||
|
expectedRowData["signature_single"].sample,
|
||||||
|
false
|
||||||
|
),
|
||||||
|
bb_reference: matchingObject(
|
||||||
|
"_id",
|
||||||
|
expectedRowData["bb_reference"][0].sample,
|
||||||
|
true
|
||||||
|
),
|
||||||
|
bb_reference_single: matchingObject(
|
||||||
|
"_id",
|
||||||
|
expectedRowData["bb_reference_single"].sample,
|
||||||
|
false
|
||||||
|
),
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("as json", async () => {
|
||||||
|
const exportedValue = await config.api.row.exportRows(
|
||||||
|
tableId,
|
||||||
|
{ query: {} },
|
||||||
|
RowExportFormat.JSON
|
||||||
|
)
|
||||||
|
|
||||||
|
const json = JSON.parse(exportedValue)
|
||||||
|
expect(json).toEqual([expectedRowData])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("as json with schema", async () => {
|
||||||
|
const exportedValue = await config.api.row.exportRows(
|
||||||
|
tableId,
|
||||||
|
{ query: {} },
|
||||||
|
RowExportFormat.JSON_WITH_SCHEMA
|
||||||
|
)
|
||||||
|
|
||||||
|
const json = JSON.parse(exportedValue)
|
||||||
|
expect(json).toEqual({
|
||||||
|
schema: expect.any(Object),
|
||||||
|
rows: [expectedRowData],
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("exported data can be re-imported", async () => {
|
||||||
|
// export all
|
||||||
|
const exportedValue = await config.api.row.exportRows(
|
||||||
|
tableId,
|
||||||
|
{ query: {} },
|
||||||
|
RowExportFormat.CSV
|
||||||
|
)
|
||||||
|
|
||||||
|
// import all twice
|
||||||
|
const rows = await config.api.table.csvToJson({
|
||||||
|
csvString: exportedValue,
|
||||||
|
})
|
||||||
|
await config.api.row.bulkImport(tableId, {
|
||||||
|
rows,
|
||||||
|
})
|
||||||
|
await config.api.row.bulkImport(tableId, {
|
||||||
|
rows,
|
||||||
|
})
|
||||||
|
|
||||||
|
const { rows: allRows } = await config.api.row.search(tableId)
|
||||||
|
|
||||||
|
const expectedRow = {
|
||||||
|
...expectedRowData,
|
||||||
|
_id: expect.any(String),
|
||||||
|
_rev: expect.any(String),
|
||||||
|
type: "row",
|
||||||
|
tableId: tableId,
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
}
|
||||||
|
expect(allRows).toEqual([expectedRow, expectedRow, expectedRow])
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
let o2mTable: Table
|
let o2mTable: Table
|
||||||
|
|
|
@ -53,6 +53,7 @@ describe.each([
|
||||||
const isLucene = name === "lucene"
|
const isLucene = name === "lucene"
|
||||||
const isInMemory = name === "in-memory"
|
const isInMemory = name === "in-memory"
|
||||||
const isInternal = isSqs || isLucene || isInMemory
|
const isInternal = isSqs || isLucene || isInMemory
|
||||||
|
const isSql = !isInMemory && !isLucene
|
||||||
const config = setup.getConfig()
|
const config = setup.getConfig()
|
||||||
|
|
||||||
let envCleanup: (() => void) | undefined
|
let envCleanup: (() => void) | undefined
|
||||||
|
@ -192,7 +193,8 @@ describe.each([
|
||||||
// different to the one passed in will cause the assertion to fail. Extra
|
// different to the one passed in will cause the assertion to fail. Extra
|
||||||
// rows returned by the query will also cause the assertion to fail.
|
// rows returned by the query will also cause the assertion to fail.
|
||||||
async toMatchExactly(expectedRows: any[]) {
|
async toMatchExactly(expectedRows: any[]) {
|
||||||
const { rows: foundRows } = await this.performSearch()
|
const response = await this.performSearch()
|
||||||
|
const foundRows = response.rows
|
||||||
|
|
||||||
// eslint-disable-next-line jest/no-standalone-expect
|
// eslint-disable-next-line jest/no-standalone-expect
|
||||||
expect(foundRows).toHaveLength(expectedRows.length)
|
expect(foundRows).toHaveLength(expectedRows.length)
|
||||||
|
@ -202,13 +204,15 @@ describe.each([
|
||||||
expect.objectContaining(this.popRow(expectedRow, foundRows))
|
expect.objectContaining(this.popRow(expectedRow, foundRows))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
// Asserts that the query returns rows matching exactly the set of rows
|
// Asserts that the query returns rows matching exactly the set of rows
|
||||||
// passed in. The order of the rows is not important, but extra rows will
|
// passed in. The order of the rows is not important, but extra rows will
|
||||||
// cause the assertion to fail.
|
// cause the assertion to fail.
|
||||||
async toContainExactly(expectedRows: any[]) {
|
async toContainExactly(expectedRows: any[]) {
|
||||||
const { rows: foundRows } = await this.performSearch()
|
const response = await this.performSearch()
|
||||||
|
const foundRows = response.rows
|
||||||
|
|
||||||
// eslint-disable-next-line jest/no-standalone-expect
|
// eslint-disable-next-line jest/no-standalone-expect
|
||||||
expect(foundRows).toHaveLength(expectedRows.length)
|
expect(foundRows).toHaveLength(expectedRows.length)
|
||||||
|
@ -220,6 +224,7 @@ describe.each([
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
// Asserts that the query returns some property values - this cannot be used
|
// Asserts that the query returns some property values - this cannot be used
|
||||||
|
@ -236,6 +241,7 @@ describe.each([
|
||||||
expect(response[key]).toEqual(properties[key])
|
expect(response[key]).toEqual(properties[key])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
// Asserts that the query doesn't return a property, e.g. pagination parameters.
|
// Asserts that the query doesn't return a property, e.g. pagination parameters.
|
||||||
|
@ -245,13 +251,15 @@ describe.each([
|
||||||
// eslint-disable-next-line jest/no-standalone-expect
|
// eslint-disable-next-line jest/no-standalone-expect
|
||||||
expect(response[property]).toBeUndefined()
|
expect(response[property]).toBeUndefined()
|
||||||
}
|
}
|
||||||
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
// Asserts that the query returns rows matching the set of rows passed in.
|
// Asserts that the query returns rows matching the set of rows passed in.
|
||||||
// The order of the rows is not important. Extra rows will not cause the
|
// The order of the rows is not important. Extra rows will not cause the
|
||||||
// assertion to fail.
|
// assertion to fail.
|
||||||
async toContain(expectedRows: any[]) {
|
async toContain(expectedRows: any[]) {
|
||||||
const { rows: foundRows } = await this.performSearch()
|
const response = await this.performSearch()
|
||||||
|
const foundRows = response.rows
|
||||||
|
|
||||||
// eslint-disable-next-line jest/no-standalone-expect
|
// eslint-disable-next-line jest/no-standalone-expect
|
||||||
expect([...foundRows]).toEqual(
|
expect([...foundRows]).toEqual(
|
||||||
|
@ -261,6 +269,7 @@ describe.each([
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
async toFindNothing() {
|
async toFindNothing() {
|
||||||
|
@ -2608,4 +2617,79 @@ describe.each([
|
||||||
}).toContainExactly([row])
|
}).toContainExactly([row])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
isSql &&
|
||||||
|
describe("pagination edge case with relationships", () => {
|
||||||
|
let mainRows: Row[] = []
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const toRelateTable = await createTable({
|
||||||
|
name: {
|
||||||
|
name: "name",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
table = await createTable({
|
||||||
|
name: {
|
||||||
|
name: "name",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
},
|
||||||
|
rel: {
|
||||||
|
name: "rel",
|
||||||
|
type: FieldType.LINK,
|
||||||
|
relationshipType: RelationshipType.MANY_TO_ONE,
|
||||||
|
tableId: toRelateTable._id!,
|
||||||
|
fieldName: "rel",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
const relatedRows = await Promise.all([
|
||||||
|
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
|
||||||
|
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
|
||||||
|
config.api.row.save(toRelateTable._id!, { name: "tag 3" }),
|
||||||
|
config.api.row.save(toRelateTable._id!, { name: "tag 4" }),
|
||||||
|
config.api.row.save(toRelateTable._id!, { name: "tag 5" }),
|
||||||
|
config.api.row.save(toRelateTable._id!, { name: "tag 6" }),
|
||||||
|
])
|
||||||
|
mainRows = await Promise.all([
|
||||||
|
config.api.row.save(table._id!, {
|
||||||
|
name: "product 1",
|
||||||
|
rel: relatedRows.map(row => row._id),
|
||||||
|
}),
|
||||||
|
config.api.row.save(table._id!, {
|
||||||
|
name: "product 2",
|
||||||
|
rel: [],
|
||||||
|
}),
|
||||||
|
config.api.row.save(table._id!, {
|
||||||
|
name: "product 3",
|
||||||
|
rel: [],
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can still page when the hard limit is hit", async () => {
|
||||||
|
await config.withCoreEnv(
|
||||||
|
{
|
||||||
|
SQL_MAX_ROWS: "6",
|
||||||
|
},
|
||||||
|
async () => {
|
||||||
|
const params: Omit<RowSearchParams, "tableId"> = {
|
||||||
|
query: {},
|
||||||
|
paginate: true,
|
||||||
|
limit: 3,
|
||||||
|
sort: "name",
|
||||||
|
sortType: SortType.STRING,
|
||||||
|
sortOrder: SortOrder.ASCENDING,
|
||||||
|
}
|
||||||
|
const page1 = await expectSearch(params).toContain([mainRows[0]])
|
||||||
|
expect(page1.hasNextPage).toBe(true)
|
||||||
|
expect(page1.bookmark).toBeDefined()
|
||||||
|
const page2 = await expectSearch({
|
||||||
|
...params,
|
||||||
|
bookmark: page1.bookmark,
|
||||||
|
}).toContain([mainRows[1], mainRows[2]])
|
||||||
|
expect(page2.hasNextPage).toBe(false)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -17,8 +17,10 @@ import {
|
||||||
TableSchema,
|
TableSchema,
|
||||||
TableSourceType,
|
TableSourceType,
|
||||||
User,
|
User,
|
||||||
|
ValidateTableImportResponse,
|
||||||
ViewCalculation,
|
ViewCalculation,
|
||||||
ViewV2Enriched,
|
ViewV2Enriched,
|
||||||
|
RowExportFormat,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
|
@ -1086,7 +1088,10 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("import validation", () => {
|
describe.each([
|
||||||
|
[RowExportFormat.CSV, (val: any) => JSON.stringify(val).replace(/"/g, "'")],
|
||||||
|
[RowExportFormat.JSON, (val: any) => val],
|
||||||
|
])("import validation (%s)", (_, userParser) => {
|
||||||
const basicSchema: TableSchema = {
|
const basicSchema: TableSchema = {
|
||||||
id: {
|
id: {
|
||||||
type: FieldType.NUMBER,
|
type: FieldType.NUMBER,
|
||||||
|
@ -1098,9 +1103,41 @@ describe.each([
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("validateNewTableImport", () => {
|
const importCases: [
|
||||||
it("can validate basic imports", async () => {
|
string,
|
||||||
const result = await config.api.table.validateNewTableImport(
|
(rows: Row[], schema: TableSchema) => Promise<ValidateTableImportResponse>
|
||||||
|
][] = [
|
||||||
|
[
|
||||||
|
"validateNewTableImport",
|
||||||
|
async (rows: Row[], schema: TableSchema) => {
|
||||||
|
const result = await config.api.table.validateNewTableImport({
|
||||||
|
rows,
|
||||||
|
schema,
|
||||||
|
})
|
||||||
|
return result
|
||||||
|
},
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"validateExistingTableImport",
|
||||||
|
async (rows: Row[], schema: TableSchema) => {
|
||||||
|
const table = await config.api.table.save(
|
||||||
|
tableForDatasource(datasource, {
|
||||||
|
primary: ["id"],
|
||||||
|
schema,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
const result = await config.api.table.validateExistingTableImport({
|
||||||
|
tableId: table._id,
|
||||||
|
rows,
|
||||||
|
})
|
||||||
|
return result
|
||||||
|
},
|
||||||
|
],
|
||||||
|
]
|
||||||
|
|
||||||
|
describe.each(importCases)("%s", (_, testDelegate) => {
|
||||||
|
it("validates basic imports", async () => {
|
||||||
|
const result = await testDelegate(
|
||||||
[{ id: generator.natural(), name: generator.first() }],
|
[{ id: generator.natural(), name: generator.first() }],
|
||||||
basicSchema
|
basicSchema
|
||||||
)
|
)
|
||||||
|
@ -1119,18 +1156,18 @@ describe.each([
|
||||||
it.each(
|
it.each(
|
||||||
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
||||||
)("don't allow protected names in schema (%s)", async columnName => {
|
)("don't allow protected names in schema (%s)", async columnName => {
|
||||||
const result = await config.api.table.validateNewTableImport(
|
const result = await config.api.table.validateNewTableImport({
|
||||||
[
|
rows: [
|
||||||
{
|
{
|
||||||
id: generator.natural(),
|
id: generator.natural(),
|
||||||
name: generator.first(),
|
name: generator.first(),
|
||||||
[columnName]: generator.word(),
|
[columnName]: generator.word(),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
{
|
schema: {
|
||||||
...basicSchema,
|
...basicSchema,
|
||||||
}
|
},
|
||||||
)
|
})
|
||||||
|
|
||||||
expect(result).toEqual({
|
expect(result).toEqual({
|
||||||
allValid: false,
|
allValid: false,
|
||||||
|
@ -1146,25 +1183,53 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("does not allow imports without rows", async () => {
|
||||||
|
const result = await testDelegate([], basicSchema)
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
allValid: false,
|
||||||
|
errors: {},
|
||||||
|
invalidColumns: [],
|
||||||
|
schemaValidation: {},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("validates imports with some empty rows", async () => {
|
||||||
|
const result = await testDelegate(
|
||||||
|
[{}, { id: generator.natural(), name: generator.first() }, {}],
|
||||||
|
basicSchema
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
allValid: true,
|
||||||
|
errors: {},
|
||||||
|
invalidColumns: [],
|
||||||
|
schemaValidation: {
|
||||||
|
id: true,
|
||||||
|
name: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
isInternal &&
|
isInternal &&
|
||||||
it.each(
|
it.each(
|
||||||
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
||||||
)("don't allow protected names in the rows (%s)", async columnName => {
|
)("don't allow protected names in the rows (%s)", async columnName => {
|
||||||
const result = await config.api.table.validateNewTableImport(
|
const result = await config.api.table.validateNewTableImport({
|
||||||
[
|
rows: [
|
||||||
{
|
{
|
||||||
id: generator.natural(),
|
id: generator.natural(),
|
||||||
name: generator.first(),
|
name: generator.first(),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
{
|
schema: {
|
||||||
...basicSchema,
|
...basicSchema,
|
||||||
[columnName]: {
|
[columnName]: {
|
||||||
name: columnName,
|
name: columnName,
|
||||||
type: FieldType.STRING,
|
type: FieldType.STRING,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
)
|
})
|
||||||
|
|
||||||
expect(result).toEqual({
|
expect(result).toEqual({
|
||||||
allValid: false,
|
allValid: false,
|
||||||
|
@ -1179,20 +1244,24 @@ describe.each([
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
|
||||||
|
|
||||||
describe("validateExistingTableImport", () => {
|
it("validates required fields and valid rows", async () => {
|
||||||
it("can validate basic imports", async () => {
|
const schema: TableSchema = {
|
||||||
const table = await config.api.table.save(
|
...basicSchema,
|
||||||
tableForDatasource(datasource, {
|
name: {
|
||||||
primary: ["id"],
|
type: FieldType.STRING,
|
||||||
schema: basicSchema,
|
name: "name",
|
||||||
})
|
constraints: { presence: true },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await testDelegate(
|
||||||
|
[
|
||||||
|
{ id: generator.natural(), name: generator.first() },
|
||||||
|
{ id: generator.natural(), name: generator.first() },
|
||||||
|
],
|
||||||
|
schema
|
||||||
)
|
)
|
||||||
const result = await config.api.table.validateExistingTableImport({
|
|
||||||
tableId: table._id,
|
|
||||||
rows: [{ id: generator.natural(), name: generator.first() }],
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(result).toEqual({
|
expect(result).toEqual({
|
||||||
allValid: true,
|
allValid: true,
|
||||||
|
@ -1205,6 +1274,154 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("validates required fields and non-valid rows", async () => {
|
||||||
|
const schema: TableSchema = {
|
||||||
|
...basicSchema,
|
||||||
|
name: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "name",
|
||||||
|
constraints: { presence: true },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await testDelegate(
|
||||||
|
[
|
||||||
|
{ id: generator.natural(), name: generator.first() },
|
||||||
|
{ id: generator.natural(), name: "" },
|
||||||
|
],
|
||||||
|
schema
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
allValid: false,
|
||||||
|
errors: {},
|
||||||
|
invalidColumns: [],
|
||||||
|
schemaValidation: {
|
||||||
|
id: true,
|
||||||
|
name: false,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("bb references", () => {
|
||||||
|
const getUserValues = () => ({
|
||||||
|
_id: docIds.generateGlobalUserID(),
|
||||||
|
primaryDisplay: generator.first(),
|
||||||
|
email: generator.email({}),
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can validate user column imports", async () => {
|
||||||
|
const schema: TableSchema = {
|
||||||
|
...basicSchema,
|
||||||
|
user: {
|
||||||
|
type: FieldType.BB_REFERENCE_SINGLE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
name: "user",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await testDelegate(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
id: generator.natural(),
|
||||||
|
name: generator.first(),
|
||||||
|
user: userParser(getUserValues()),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
schema
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
allValid: true,
|
||||||
|
errors: {},
|
||||||
|
invalidColumns: [],
|
||||||
|
schemaValidation: {
|
||||||
|
id: true,
|
||||||
|
name: true,
|
||||||
|
user: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can validate user column imports with invalid data", async () => {
|
||||||
|
const schema: TableSchema = {
|
||||||
|
...basicSchema,
|
||||||
|
user: {
|
||||||
|
type: FieldType.BB_REFERENCE_SINGLE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
name: "user",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await testDelegate(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
id: generator.natural(),
|
||||||
|
name: generator.first(),
|
||||||
|
user: userParser(getUserValues()),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: generator.natural(),
|
||||||
|
name: generator.first(),
|
||||||
|
user: "no valid user data",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
schema
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
allValid: false,
|
||||||
|
errors: {},
|
||||||
|
invalidColumns: [],
|
||||||
|
schemaValidation: {
|
||||||
|
id: true,
|
||||||
|
name: true,
|
||||||
|
user: false,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can validate users column imports", async () => {
|
||||||
|
const schema: TableSchema = {
|
||||||
|
...basicSchema,
|
||||||
|
user: {
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
name: "user",
|
||||||
|
externalType: "array",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await testDelegate(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
id: generator.natural(),
|
||||||
|
name: generator.first(),
|
||||||
|
user: userParser([
|
||||||
|
getUserValues(),
|
||||||
|
getUserValues(),
|
||||||
|
getUserValues(),
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
schema
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
allValid: true,
|
||||||
|
errors: {},
|
||||||
|
invalidColumns: [],
|
||||||
|
schemaValidation: {
|
||||||
|
id: true,
|
||||||
|
name: true,
|
||||||
|
user: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("validateExistingTableImport", () => {
|
||||||
isInternal &&
|
isInternal &&
|
||||||
it("can reimport _id fields for internal tables", async () => {
|
it("can reimport _id fields for internal tables", async () => {
|
||||||
const table = await config.api.table.save(
|
const table = await config.api.table.save(
|
||||||
|
|
|
@ -21,7 +21,8 @@ export async function getRow(
|
||||||
? IncludeRelationship.INCLUDE
|
? IncludeRelationship.INCLUDE
|
||||||
: IncludeRelationship.EXCLUDE,
|
: IncludeRelationship.EXCLUDE,
|
||||||
})
|
})
|
||||||
return response ? response[0] : response
|
const rows = response?.rows || []
|
||||||
|
return rows[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function save(
|
export async function save(
|
||||||
|
|
|
@ -8,7 +8,6 @@ import {
|
||||||
import { isExternalTableID } from "../../../integrations/utils"
|
import { isExternalTableID } from "../../../integrations/utils"
|
||||||
import * as internal from "./search/internal"
|
import * as internal from "./search/internal"
|
||||||
import * as external from "./search/external"
|
import * as external from "./search/external"
|
||||||
import * as sqs from "./search/sqs"
|
|
||||||
import { ExportRowsParams, ExportRowsResult } from "./search/types"
|
import { ExportRowsParams, ExportRowsResult } from "./search/types"
|
||||||
import { dataFilters } from "@budibase/shared-core"
|
import { dataFilters } from "@budibase/shared-core"
|
||||||
import sdk from "../../index"
|
import sdk from "../../index"
|
||||||
|
@ -55,9 +54,9 @@ export async function search(
|
||||||
if (isExternalTable) {
|
if (isExternalTable) {
|
||||||
return external.search(options, table)
|
return external.search(options, table)
|
||||||
} else if (dbCore.isSqsEnabledForTenant()) {
|
} else if (dbCore.isSqsEnabledForTenant()) {
|
||||||
return sqs.search(options, table)
|
return internal.sqs.search(options, table)
|
||||||
} else {
|
} else {
|
||||||
return internal.search(options, table)
|
return internal.lucene.search(options, table)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,7 @@ function getPaginationAndLimitParameters(
|
||||||
limit: limit + 1,
|
limit: limit + 1,
|
||||||
}
|
}
|
||||||
if (bookmark) {
|
if (bookmark) {
|
||||||
paginateObj.offset = limit * bookmark
|
paginateObj.offset = bookmark
|
||||||
}
|
}
|
||||||
} else if (limit) {
|
} else if (limit) {
|
||||||
paginateObj = {
|
paginateObj = {
|
||||||
|
@ -105,37 +105,37 @@ export async function search(
|
||||||
paginate: paginateObj as PaginationJson,
|
paginate: paginateObj as PaginationJson,
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
}
|
}
|
||||||
const queries: Promise<Row[] | number>[] = []
|
const [{ rows, rawResponseSize }, totalRows] = await Promise.all([
|
||||||
queries.push(handleRequest(Operation.READ, tableId, parameters))
|
handleRequest(Operation.READ, tableId, parameters),
|
||||||
if (countRows) {
|
countRows
|
||||||
queries.push(handleRequest(Operation.COUNT, tableId, parameters))
|
? handleRequest(Operation.COUNT, tableId, parameters)
|
||||||
}
|
: Promise.resolve(undefined),
|
||||||
const responses = await Promise.all(queries)
|
])
|
||||||
let rows = responses[0] as Row[]
|
|
||||||
const totalRows =
|
|
||||||
responses.length > 1 ? (responses[1] as number) : undefined
|
|
||||||
|
|
||||||
let hasNextPage = false
|
let processed = await outputProcessing<Row[]>(table, rows, {
|
||||||
// remove the extra row if it's there
|
|
||||||
if (paginate && limit && rows.length > limit) {
|
|
||||||
rows.pop()
|
|
||||||
hasNextPage = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.fields) {
|
|
||||||
const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS]
|
|
||||||
rows = rows.map((r: any) => pick(r, fields))
|
|
||||||
}
|
|
||||||
|
|
||||||
rows = await outputProcessing<Row[]>(table, rows, {
|
|
||||||
preserveLinks: true,
|
preserveLinks: true,
|
||||||
squash: true,
|
squash: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
let hasNextPage = false
|
||||||
|
// if the raw rows is greater than the limit then we likely need to paginate
|
||||||
|
if (paginate && limit && rawResponseSize > limit) {
|
||||||
|
hasNextPage = true
|
||||||
|
// processed rows has merged relationships down, this might not be more than limit
|
||||||
|
if (processed.length > limit) {
|
||||||
|
processed.pop()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.fields) {
|
||||||
|
const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS]
|
||||||
|
processed = processed.map((r: any) => pick(r, fields))
|
||||||
|
}
|
||||||
|
|
||||||
// need wrapper object for bookmarks etc when paginating
|
// need wrapper object for bookmarks etc when paginating
|
||||||
const response: SearchResponse<Row> = { rows, hasNextPage }
|
const response: SearchResponse<Row> = { rows: processed, hasNextPage }
|
||||||
if (hasNextPage && bookmark != null) {
|
if (hasNextPage && bookmark != null) {
|
||||||
response.bookmark = bookmark + 1
|
response.bookmark = bookmark + processed.length
|
||||||
}
|
}
|
||||||
if (totalRows != null) {
|
if (totalRows != null) {
|
||||||
response.totalRows = totalRows
|
response.totalRows = totalRows
|
||||||
|
@ -255,24 +255,21 @@ export async function exportRows(
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(tableId: string): Promise<Row[]> {
|
export async function fetch(tableId: string): Promise<Row[]> {
|
||||||
const response = await handleRequest<Operation.READ>(
|
const response = await handleRequest(Operation.READ, tableId, {
|
||||||
Operation.READ,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
tableId,
|
})
|
||||||
{
|
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
const table = await sdk.tables.getTable(tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
return await outputProcessing<Row[]>(table, response, {
|
return await outputProcessing<Row[]>(table, response.rows, {
|
||||||
preserveLinks: true,
|
preserveLinks: true,
|
||||||
squash: true,
|
squash: true,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchRaw(tableId: string): Promise<Row[]> {
|
export async function fetchRaw(tableId: string): Promise<Row[]> {
|
||||||
return await handleRequest<Operation.READ>(Operation.READ, tableId, {
|
const response = await handleRequest(Operation.READ, tableId, {
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
})
|
})
|
||||||
|
return response.rows
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchView(viewName: string) {
|
export async function fetchView(viewName: string) {
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
export * as sqs from "./sqs"
|
||||||
|
export * as lucene from "./lucene"
|
||||||
|
export * from "./internal"
|
|
@ -1,90 +1,30 @@
|
||||||
import { context, HTTPError } from "@budibase/backend-core"
|
import { context, HTTPError } from "@budibase/backend-core"
|
||||||
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
import env from "../../../../../environment"
|
||||||
import env from "../../../../environment"
|
import { getRowParams, InternalTables } from "../../../../../db/utils"
|
||||||
import { fullSearch, paginatedSearch } from "./utils"
|
|
||||||
import { getRowParams, InternalTables } from "../../../../db/utils"
|
|
||||||
import {
|
import {
|
||||||
Database,
|
Database,
|
||||||
DocumentType,
|
DocumentType,
|
||||||
Row,
|
Row,
|
||||||
RowSearchParams,
|
|
||||||
SearchResponse,
|
|
||||||
SortType,
|
|
||||||
Table,
|
Table,
|
||||||
TableSchema,
|
TableSchema,
|
||||||
User,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
|
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
||||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
|
||||||
import {
|
import {
|
||||||
csv,
|
csv,
|
||||||
Format,
|
Format,
|
||||||
json,
|
json,
|
||||||
jsonWithSchema,
|
jsonWithSchema,
|
||||||
} from "../../../../api/controllers/view/exporters"
|
} from "../../../../../api/controllers/view/exporters"
|
||||||
import * as inMemoryViews from "../../../../db/inMemoryView"
|
import * as inMemoryViews from "../../../../../db/inMemoryView"
|
||||||
import {
|
import {
|
||||||
getFromDesignDoc,
|
getFromDesignDoc,
|
||||||
getFromMemoryDoc,
|
getFromMemoryDoc,
|
||||||
migrateToDesignView,
|
migrateToDesignView,
|
||||||
migrateToInMemoryView,
|
migrateToInMemoryView,
|
||||||
} from "../../../../api/controllers/view/utils"
|
} from "../../../../../api/controllers/view/utils"
|
||||||
import sdk from "../../../../sdk"
|
import sdk from "../../../../../sdk"
|
||||||
import { ExportRowsParams, ExportRowsResult } from "./types"
|
import { ExportRowsParams, ExportRowsResult } from "../types"
|
||||||
import pick from "lodash/pick"
|
import { breakRowIdField } from "../../../../../integrations/utils"
|
||||||
import { breakRowIdField } from "../../../../integrations/utils"
|
|
||||||
|
|
||||||
export async function search(
|
|
||||||
options: RowSearchParams,
|
|
||||||
table: Table
|
|
||||||
): Promise<SearchResponse<Row>> {
|
|
||||||
const { tableId } = options
|
|
||||||
|
|
||||||
const { paginate, query } = options
|
|
||||||
|
|
||||||
const params: RowSearchParams = {
|
|
||||||
tableId: options.tableId,
|
|
||||||
sort: options.sort,
|
|
||||||
sortOrder: options.sortOrder,
|
|
||||||
sortType: options.sortType,
|
|
||||||
limit: options.limit,
|
|
||||||
bookmark: options.bookmark,
|
|
||||||
version: options.version,
|
|
||||||
disableEscaping: options.disableEscaping,
|
|
||||||
query: {},
|
|
||||||
}
|
|
||||||
|
|
||||||
if (params.sort && !params.sortType) {
|
|
||||||
const schema = table.schema
|
|
||||||
const sortField = schema[params.sort]
|
|
||||||
params.sortType =
|
|
||||||
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
|
|
||||||
}
|
|
||||||
|
|
||||||
let response
|
|
||||||
if (paginate) {
|
|
||||||
response = await paginatedSearch(query, params)
|
|
||||||
} else {
|
|
||||||
response = await fullSearch(query, params)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Enrich search results with relationships
|
|
||||||
if (response.rows && response.rows.length) {
|
|
||||||
// enrich with global users if from users table
|
|
||||||
if (tableId === InternalTables.USER_METADATA) {
|
|
||||||
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.fields) {
|
|
||||||
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS]
|
|
||||||
response.rows = response.rows.map((r: any) => pick(r, fields))
|
|
||||||
}
|
|
||||||
|
|
||||||
response.rows = await outputProcessing(table, response.rows)
|
|
||||||
}
|
|
||||||
|
|
||||||
return response
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function exportRows(
|
export async function exportRows(
|
||||||
options: ExportRowsParams
|
options: ExportRowsParams
|
||||||
|
@ -123,15 +63,12 @@ export async function exportRows(
|
||||||
|
|
||||||
result = await outputProcessing<Row[]>(table, response)
|
result = await outputProcessing<Row[]>(table, response)
|
||||||
} else if (query) {
|
} else if (query) {
|
||||||
let searchResponse = await search(
|
let searchResponse = await sdk.rows.search({
|
||||||
{
|
tableId,
|
||||||
tableId,
|
query,
|
||||||
query,
|
sort,
|
||||||
sort,
|
sortOrder,
|
||||||
sortOrder,
|
})
|
||||||
},
|
|
||||||
table
|
|
||||||
)
|
|
||||||
result = searchResponse.rows
|
result = searchResponse.rows
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,66 @@
|
||||||
|
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
||||||
|
import { fullSearch, paginatedSearch } from "../utils"
|
||||||
|
import { InternalTables } from "../../../../../db/utils"
|
||||||
|
import {
|
||||||
|
Row,
|
||||||
|
RowSearchParams,
|
||||||
|
SearchResponse,
|
||||||
|
SortType,
|
||||||
|
Table,
|
||||||
|
User,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
|
||||||
|
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
||||||
|
import pick from "lodash/pick"
|
||||||
|
|
||||||
|
export async function search(
|
||||||
|
options: RowSearchParams,
|
||||||
|
table: Table
|
||||||
|
): Promise<SearchResponse<Row>> {
|
||||||
|
const { tableId } = options
|
||||||
|
|
||||||
|
const { paginate, query } = options
|
||||||
|
|
||||||
|
const params: RowSearchParams = {
|
||||||
|
tableId: options.tableId,
|
||||||
|
sort: options.sort,
|
||||||
|
sortOrder: options.sortOrder,
|
||||||
|
sortType: options.sortType,
|
||||||
|
limit: options.limit,
|
||||||
|
bookmark: options.bookmark,
|
||||||
|
version: options.version,
|
||||||
|
disableEscaping: options.disableEscaping,
|
||||||
|
query: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.sort && !params.sortType) {
|
||||||
|
const schema = table.schema
|
||||||
|
const sortField = schema[params.sort]
|
||||||
|
params.sortType =
|
||||||
|
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
|
||||||
|
}
|
||||||
|
|
||||||
|
let response
|
||||||
|
if (paginate) {
|
||||||
|
response = await paginatedSearch(query, params)
|
||||||
|
} else {
|
||||||
|
response = await fullSearch(query, params)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enrich search results with relationships
|
||||||
|
if (response.rows && response.rows.length) {
|
||||||
|
// enrich with global users if from users table
|
||||||
|
if (tableId === InternalTables.USER_METADATA) {
|
||||||
|
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.fields) {
|
||||||
|
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS]
|
||||||
|
response.rows = response.rows.map((r: any) => pick(r, fields))
|
||||||
|
}
|
||||||
|
|
||||||
|
response.rows = await outputProcessing(table, response.rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
return response
|
||||||
|
}
|
|
@ -18,34 +18,35 @@ import {
|
||||||
import {
|
import {
|
||||||
buildInternalRelationships,
|
buildInternalRelationships,
|
||||||
sqlOutputProcessing,
|
sqlOutputProcessing,
|
||||||
} from "../../../../api/controllers/row/utils"
|
} from "../../../../../api/controllers/row/utils"
|
||||||
import {
|
import {
|
||||||
decodeNonAscii,
|
decodeNonAscii,
|
||||||
mapToUserColumn,
|
mapToUserColumn,
|
||||||
USER_COLUMN_PREFIX,
|
USER_COLUMN_PREFIX,
|
||||||
} from "../../tables/internal/sqs"
|
} from "../../../tables/internal/sqs"
|
||||||
import sdk from "../../../index"
|
import sdk from "../../../../index"
|
||||||
import {
|
import {
|
||||||
context,
|
context,
|
||||||
sql,
|
sql,
|
||||||
SQLITE_DESIGN_DOC_ID,
|
SQLITE_DESIGN_DOC_ID,
|
||||||
SQS_DATASOURCE_INTERNAL,
|
SQS_DATASOURCE_INTERNAL,
|
||||||
} from "@budibase/backend-core"
|
} from "@budibase/backend-core"
|
||||||
import { generateJunctionTableID } from "../../../../db/utils"
|
import { generateJunctionTableID } from "../../../../../db/utils"
|
||||||
import AliasTables from "../sqlAlias"
|
import AliasTables from "../../sqlAlias"
|
||||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
||||||
import pick from "lodash/pick"
|
import pick from "lodash/pick"
|
||||||
import { processRowCountResponse } from "../utils"
|
import { processRowCountResponse } from "../../utils"
|
||||||
import {
|
import {
|
||||||
updateFilterKeys,
|
updateFilterKeys,
|
||||||
getRelationshipColumns,
|
getRelationshipColumns,
|
||||||
getTableIDList,
|
getTableIDList,
|
||||||
} from "./filters"
|
} from "../filters"
|
||||||
import { dataFilters, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
import { dataFilters, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
||||||
import { isSearchingByRowID } from "./utils"
|
import { isSearchingByRowID } from "../utils"
|
||||||
import tracer from "dd-trace"
|
import tracer from "dd-trace"
|
||||||
|
|
||||||
const builder = new sql.Sql(SqlClient.SQL_LITE)
|
const builder = new sql.Sql(SqlClient.SQL_LITE)
|
||||||
|
const SQLITE_COLUMN_LIMIT = 2000
|
||||||
const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
|
const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
|
||||||
const MISSING_TABLE_REGX = new RegExp(`no such table: .+`)
|
const MISSING_TABLE_REGX = new RegExp(`no such table: .+`)
|
||||||
const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`)
|
const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`)
|
||||||
|
@ -56,12 +57,14 @@ function buildInternalFieldList(
|
||||||
opts?: { relationships?: RelationshipsJson[] }
|
opts?: { relationships?: RelationshipsJson[] }
|
||||||
) {
|
) {
|
||||||
let fieldList: string[] = []
|
let fieldList: string[] = []
|
||||||
const addJunctionFields = (relatedTable: Table, fields: string[]) => {
|
const getJunctionFields = (relatedTable: Table, fields: string[]) => {
|
||||||
|
const junctionFields: string[] = []
|
||||||
fields.forEach(field => {
|
fields.forEach(field => {
|
||||||
fieldList.push(
|
junctionFields.push(
|
||||||
`${generateJunctionTableID(table._id!, relatedTable._id!)}.${field}`
|
`${generateJunctionTableID(table._id!, relatedTable._id!)}.${field}`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
return junctionFields
|
||||||
}
|
}
|
||||||
fieldList = fieldList.concat(
|
fieldList = fieldList.concat(
|
||||||
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`)
|
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`)
|
||||||
|
@ -71,18 +74,22 @@ function buildInternalFieldList(
|
||||||
if (!opts?.relationships && isRelationship) {
|
if (!opts?.relationships && isRelationship) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if (isRelationship) {
|
if (!isRelationship) {
|
||||||
|
fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`)
|
||||||
|
} else {
|
||||||
const linkCol = col as RelationshipFieldMetadata
|
const linkCol = col as RelationshipFieldMetadata
|
||||||
const relatedTable = tables.find(table => table._id === linkCol.tableId)
|
const relatedTable = tables.find(table => table._id === linkCol.tableId)
|
||||||
// no relationships provided, don't go more than a layer deep
|
if (!relatedTable) {
|
||||||
if (relatedTable) {
|
continue
|
||||||
fieldList = fieldList.concat(
|
|
||||||
buildInternalFieldList(relatedTable, tables)
|
|
||||||
)
|
|
||||||
addJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
|
|
||||||
}
|
}
|
||||||
} else {
|
const relatedFields = buildInternalFieldList(relatedTable, tables).concat(
|
||||||
fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`)
|
getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
|
||||||
|
)
|
||||||
|
// break out of the loop if we have reached the max number of columns
|
||||||
|
if (relatedFields.length + fieldList.length > SQLITE_COLUMN_LIMIT) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
fieldList = fieldList.concat(relatedFields)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return [...new Set(fieldList)]
|
return [...new Set(fieldList)]
|
||||||
|
@ -320,25 +327,19 @@ export async function search(
|
||||||
paginate = true
|
paginate = true
|
||||||
request.paginate = {
|
request.paginate = {
|
||||||
limit: params.limit + 1,
|
limit: params.limit + 1,
|
||||||
offset: bookmark * params.limit,
|
offset: bookmark,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const queries: Promise<Row[] | number>[] = []
|
const [rows, totalRows] = await Promise.all([
|
||||||
queries.push(runSqlQuery(request, allTables, relationships))
|
runSqlQuery(request, allTables, relationships),
|
||||||
if (options.countRows) {
|
options.countRows
|
||||||
// get the total count of rows
|
? runSqlQuery(request, allTables, relationships, {
|
||||||
queries.push(
|
countTotalRows: true,
|
||||||
runSqlQuery(request, allTables, relationships, {
|
})
|
||||||
countTotalRows: true,
|
: Promise.resolve(undefined),
|
||||||
})
|
])
|
||||||
)
|
|
||||||
}
|
|
||||||
const responses = await Promise.all(queries)
|
|
||||||
let rows = responses[0] as Row[]
|
|
||||||
const totalRows =
|
|
||||||
responses.length > 1 ? (responses[1] as number) : undefined
|
|
||||||
|
|
||||||
// process from the format of tableId.column to expected format also
|
// process from the format of tableId.column to expected format also
|
||||||
// make sure JSON columns corrected
|
// make sure JSON columns corrected
|
||||||
|
@ -350,10 +351,13 @@ export async function search(
|
||||||
)
|
)
|
||||||
|
|
||||||
// check for pagination final row
|
// check for pagination final row
|
||||||
let nextRow: Row | undefined
|
let nextRow: boolean = false
|
||||||
if (paginate && params.limit && rows.length > params.limit) {
|
if (paginate && params.limit && rows.length > params.limit) {
|
||||||
// remove the extra row that confirmed if there is another row to move to
|
// remove the extra row that confirmed if there is another row to move to
|
||||||
nextRow = processed.pop()
|
nextRow = true
|
||||||
|
if (processed.length > params.limit) {
|
||||||
|
processed.pop()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// get the rows
|
// get the rows
|
||||||
|
@ -377,7 +381,7 @@ export async function search(
|
||||||
// check for pagination
|
// check for pagination
|
||||||
if (paginate && nextRow) {
|
if (paginate && nextRow) {
|
||||||
response.hasNextPage = true
|
response.hasNextPage = true
|
||||||
response.bookmark = bookmark + 1
|
response.bookmark = bookmark + processed.length
|
||||||
}
|
}
|
||||||
if (paginate && !nextRow) {
|
if (paginate && !nextRow) {
|
||||||
response.hasNextPage = false
|
response.hasNextPage = false
|
|
@ -11,6 +11,7 @@ import {
|
||||||
DeleteRows,
|
DeleteRows,
|
||||||
DeleteRow,
|
DeleteRow,
|
||||||
PaginatedSearchRowResponse,
|
PaginatedSearchRowResponse,
|
||||||
|
RowExportFormat,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { Expectations, TestAPI } from "./base"
|
import { Expectations, TestAPI } from "./base"
|
||||||
|
|
||||||
|
@ -105,6 +106,7 @@ export class RowAPI extends TestAPI {
|
||||||
exportRows = async (
|
exportRows = async (
|
||||||
tableId: string,
|
tableId: string,
|
||||||
body: ExportRowsRequest,
|
body: ExportRowsRequest,
|
||||||
|
format: RowExportFormat = RowExportFormat.JSON,
|
||||||
expectations?: Expectations
|
expectations?: Expectations
|
||||||
) => {
|
) => {
|
||||||
const response = await this._requestRaw(
|
const response = await this._requestRaw(
|
||||||
|
@ -112,7 +114,7 @@ export class RowAPI extends TestAPI {
|
||||||
`/api/${tableId}/rows/exportRows`,
|
`/api/${tableId}/rows/exportRows`,
|
||||||
{
|
{
|
||||||
body,
|
body,
|
||||||
query: { format: "json" },
|
query: { format },
|
||||||
expectations,
|
expectations,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
import {
|
import {
|
||||||
BulkImportRequest,
|
BulkImportRequest,
|
||||||
BulkImportResponse,
|
BulkImportResponse,
|
||||||
|
CsvToJsonRequest,
|
||||||
|
CsvToJsonResponse,
|
||||||
MigrateRequest,
|
MigrateRequest,
|
||||||
MigrateResponse,
|
MigrateResponse,
|
||||||
Row,
|
|
||||||
SaveTableRequest,
|
SaveTableRequest,
|
||||||
SaveTableResponse,
|
SaveTableResponse,
|
||||||
Table,
|
Table,
|
||||||
TableSchema,
|
ValidateNewTableImportRequest,
|
||||||
ValidateTableImportRequest,
|
ValidateTableImportRequest,
|
||||||
ValidateTableImportResponse,
|
ValidateTableImportResponse,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
@ -71,17 +72,13 @@ export class TableAPI extends TestAPI {
|
||||||
}
|
}
|
||||||
|
|
||||||
validateNewTableImport = async (
|
validateNewTableImport = async (
|
||||||
rows: Row[],
|
body: ValidateNewTableImportRequest,
|
||||||
schema: TableSchema,
|
|
||||||
expectations?: Expectations
|
expectations?: Expectations
|
||||||
): Promise<ValidateTableImportResponse> => {
|
): Promise<ValidateTableImportResponse> => {
|
||||||
return await this._post<ValidateTableImportResponse>(
|
return await this._post<ValidateTableImportResponse>(
|
||||||
`/api/tables/validateNewTableImport`,
|
`/api/tables/validateNewTableImport`,
|
||||||
{
|
{
|
||||||
body: {
|
body,
|
||||||
rows,
|
|
||||||
schema,
|
|
||||||
},
|
|
||||||
expectations,
|
expectations,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -99,4 +96,14 @@ export class TableAPI extends TestAPI {
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
csvToJson = async (
|
||||||
|
body: CsvToJsonRequest,
|
||||||
|
expectations?: Expectations
|
||||||
|
): Promise<CsvToJsonResponse> => {
|
||||||
|
return await this._post<CsvToJsonResponse>(`/api/convert/csvToJson`, {
|
||||||
|
body,
|
||||||
|
expectations,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,10 @@ import {
|
||||||
WebhookActionType,
|
WebhookActionType,
|
||||||
AutomationEventType,
|
AutomationEventType,
|
||||||
LoopStepType,
|
LoopStepType,
|
||||||
|
FieldSchema,
|
||||||
|
BBReferenceFieldSubType,
|
||||||
|
JsonFieldSubType,
|
||||||
|
AutoFieldSubType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { LoopInput } from "../../definitions/automations"
|
import { LoopInput } from "../../definitions/automations"
|
||||||
import { merge } from "lodash"
|
import { merge } from "lodash"
|
||||||
|
@ -573,3 +577,161 @@ export function basicEnvironmentVariable(
|
||||||
development: dev || prod,
|
development: dev || prod,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function fullSchemaWithoutLinks({
|
||||||
|
allRequired,
|
||||||
|
}: {
|
||||||
|
allRequired?: boolean
|
||||||
|
}) {
|
||||||
|
const schema: {
|
||||||
|
[type in Exclude<FieldType, FieldType.LINK>]: FieldSchema & { type: type }
|
||||||
|
} = {
|
||||||
|
[FieldType.STRING]: {
|
||||||
|
name: "string",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.LONGFORM]: {
|
||||||
|
name: "longform",
|
||||||
|
type: FieldType.LONGFORM,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.OPTIONS]: {
|
||||||
|
name: "options",
|
||||||
|
type: FieldType.OPTIONS,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
inclusion: ["option 1", "option 2", "option 3", "option 4"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.ARRAY]: {
|
||||||
|
name: "array",
|
||||||
|
type: FieldType.ARRAY,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
type: JsonFieldSubType.ARRAY,
|
||||||
|
inclusion: ["options 1", "options 2", "options 3", "options 4"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.NUMBER]: {
|
||||||
|
name: "number",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.BOOLEAN]: {
|
||||||
|
name: "boolean",
|
||||||
|
type: FieldType.BOOLEAN,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.DATETIME]: {
|
||||||
|
name: "datetime",
|
||||||
|
type: FieldType.DATETIME,
|
||||||
|
dateOnly: true,
|
||||||
|
timeOnly: false,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.FORMULA]: {
|
||||||
|
name: "formula",
|
||||||
|
type: FieldType.FORMULA,
|
||||||
|
formula: "any formula",
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.BARCODEQR]: {
|
||||||
|
name: "barcodeqr",
|
||||||
|
type: FieldType.BARCODEQR,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.BIGINT]: {
|
||||||
|
name: "bigint",
|
||||||
|
type: FieldType.BIGINT,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.BB_REFERENCE]: {
|
||||||
|
name: "user",
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.BB_REFERENCE_SINGLE]: {
|
||||||
|
name: "users",
|
||||||
|
type: FieldType.BB_REFERENCE_SINGLE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.ATTACHMENTS]: {
|
||||||
|
name: "attachments",
|
||||||
|
type: FieldType.ATTACHMENTS,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.ATTACHMENT_SINGLE]: {
|
||||||
|
name: "attachment_single",
|
||||||
|
type: FieldType.ATTACHMENT_SINGLE,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.AUTO]: {
|
||||||
|
name: "auto",
|
||||||
|
type: FieldType.AUTO,
|
||||||
|
subtype: AutoFieldSubType.AUTO_ID,
|
||||||
|
autocolumn: true,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.JSON]: {
|
||||||
|
name: "json",
|
||||||
|
type: FieldType.JSON,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.INTERNAL]: {
|
||||||
|
name: "internal",
|
||||||
|
type: FieldType.INTERNAL,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[FieldType.SIGNATURE_SINGLE]: {
|
||||||
|
name: "signature_single",
|
||||||
|
type: FieldType.SIGNATURE_SINGLE,
|
||||||
|
constraints: {
|
||||||
|
presence: allRequired,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return schema
|
||||||
|
}
|
||||||
|
export function basicAttachment() {
|
||||||
|
return {
|
||||||
|
key: generator.guid(),
|
||||||
|
name: generator.word(),
|
||||||
|
extension: generator.word(),
|
||||||
|
size: generator.natural(),
|
||||||
|
url: `/${generator.guid()}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -8,7 +8,6 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
|
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
|
||||||
import { db } from "@budibase/backend-core"
|
import { db } from "@budibase/backend-core"
|
||||||
import { parseCsvExport } from "../api/controllers/view/exporters"
|
|
||||||
|
|
||||||
type Rows = Array<Row>
|
type Rows = Array<Row>
|
||||||
|
|
||||||
|
@ -85,7 +84,7 @@ export function validate(
|
||||||
"Column names can't contain special characters"
|
"Column names can't contain special characters"
|
||||||
} else if (
|
} else if (
|
||||||
columnData == null &&
|
columnData == null &&
|
||||||
!schema[columnName].constraints?.presence
|
!helpers.schema.isRequired(constraints)
|
||||||
) {
|
) {
|
||||||
results.schemaValidation[columnName] = true
|
results.schemaValidation[columnName] = true
|
||||||
} else if (
|
} else if (
|
||||||
|
@ -95,6 +94,12 @@ export function validate(
|
||||||
isAutoColumn
|
isAutoColumn
|
||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
|
} else if (
|
||||||
|
[FieldType.STRING].includes(columnType) &&
|
||||||
|
!columnData &&
|
||||||
|
helpers.schema.isRequired(constraints)
|
||||||
|
) {
|
||||||
|
results.schemaValidation[columnName] = false
|
||||||
} else if (columnType === FieldType.NUMBER && isNaN(Number(columnData))) {
|
} else if (columnType === FieldType.NUMBER && isNaN(Number(columnData))) {
|
||||||
// If provided must be a valid number
|
// If provided must be a valid number
|
||||||
results.schemaValidation[columnName] = false
|
results.schemaValidation[columnName] = false
|
||||||
|
@ -159,7 +164,7 @@ export function parse(rows: Rows, table: Table): Rows {
|
||||||
|
|
||||||
const columnSchema = schema[columnName]
|
const columnSchema = schema[columnName]
|
||||||
const { type: columnType } = columnSchema
|
const { type: columnType } = columnSchema
|
||||||
if (columnType === FieldType.NUMBER) {
|
if ([FieldType.NUMBER].includes(columnType)) {
|
||||||
// If provided must be a valid number
|
// If provided must be a valid number
|
||||||
parsedRow[columnName] = columnData ? Number(columnData) : columnData
|
parsedRow[columnName] = columnData ? Number(columnData) : columnData
|
||||||
} else if (
|
} else if (
|
||||||
|
@ -171,16 +176,23 @@ export function parse(rows: Rows, table: Table): Rows {
|
||||||
parsedRow[columnName] = columnData
|
parsedRow[columnName] = columnData
|
||||||
? new Date(columnData).toISOString()
|
? new Date(columnData).toISOString()
|
||||||
: columnData
|
: columnData
|
||||||
|
} else if (
|
||||||
|
columnType === FieldType.JSON &&
|
||||||
|
typeof columnData === "string"
|
||||||
|
) {
|
||||||
|
parsedRow[columnName] = parseJsonExport(columnData)
|
||||||
} else if (columnType === FieldType.BB_REFERENCE) {
|
} else if (columnType === FieldType.BB_REFERENCE) {
|
||||||
let parsedValues: { _id: string }[] = columnData || []
|
let parsedValues: { _id: string }[] = columnData || []
|
||||||
if (columnData) {
|
if (columnData && typeof columnData === "string") {
|
||||||
parsedValues = parseCsvExport<{ _id: string }[]>(columnData)
|
parsedValues = parseJsonExport<{ _id: string }[]>(columnData)
|
||||||
}
|
}
|
||||||
|
|
||||||
parsedRow[columnName] = parsedValues?.map(u => u._id)
|
parsedRow[columnName] = parsedValues?.map(u => u._id)
|
||||||
} else if (columnType === FieldType.BB_REFERENCE_SINGLE) {
|
} else if (columnType === FieldType.BB_REFERENCE_SINGLE) {
|
||||||
const parsedValue =
|
let parsedValue = columnData
|
||||||
columnData && parseCsvExport<{ _id: string }>(columnData)
|
if (columnData && typeof columnData === "string") {
|
||||||
|
parsedValue = parseJsonExport<{ _id: string }>(columnData)
|
||||||
|
}
|
||||||
parsedRow[columnName] = parsedValue?._id
|
parsedRow[columnName] = parsedValue?._id
|
||||||
} else if (
|
} else if (
|
||||||
(columnType === FieldType.ATTACHMENTS ||
|
(columnType === FieldType.ATTACHMENTS ||
|
||||||
|
@ -188,7 +200,7 @@ export function parse(rows: Rows, table: Table): Rows {
|
||||||
columnType === FieldType.SIGNATURE_SINGLE) &&
|
columnType === FieldType.SIGNATURE_SINGLE) &&
|
||||||
typeof columnData === "string"
|
typeof columnData === "string"
|
||||||
) {
|
) {
|
||||||
parsedRow[columnName] = parseCsvExport(columnData)
|
parsedRow[columnName] = parseJsonExport(columnData)
|
||||||
} else {
|
} else {
|
||||||
parsedRow[columnName] = columnData
|
parsedRow[columnName] = columnData
|
||||||
}
|
}
|
||||||
|
@ -204,32 +216,54 @@ function isValidBBReference(
|
||||||
subtype: BBReferenceFieldSubType,
|
subtype: BBReferenceFieldSubType,
|
||||||
isRequired: boolean
|
isRequired: boolean
|
||||||
): boolean {
|
): boolean {
|
||||||
if (typeof data !== "string") {
|
try {
|
||||||
|
if (type === FieldType.BB_REFERENCE_SINGLE) {
|
||||||
|
if (!data) {
|
||||||
|
return !isRequired
|
||||||
|
}
|
||||||
|
const user = parseJsonExport<{ _id: string }>(data)
|
||||||
|
return db.isGlobalUserID(user._id)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (subtype) {
|
||||||
|
case BBReferenceFieldSubType.USER:
|
||||||
|
case BBReferenceFieldSubType.USERS: {
|
||||||
|
const userArray = parseJsonExport<{ _id: string }[]>(data)
|
||||||
|
if (!Array.isArray(userArray)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const constainsWrongId = userArray.find(
|
||||||
|
user => !db.isGlobalUserID(user._id)
|
||||||
|
)
|
||||||
|
return !constainsWrongId
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
throw utils.unreachable(subtype)
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (type === FieldType.BB_REFERENCE_SINGLE) {
|
function parseJsonExport<T>(value: any) {
|
||||||
if (!data) {
|
if (typeof value !== "string") {
|
||||||
return !isRequired
|
return value
|
||||||
}
|
|
||||||
const user = parseCsvExport<{ _id: string }>(data)
|
|
||||||
return db.isGlobalUserID(user._id)
|
|
||||||
}
|
}
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(value)
|
||||||
|
|
||||||
switch (subtype) {
|
return parsed as T
|
||||||
case BBReferenceFieldSubType.USER:
|
} catch (e: any) {
|
||||||
case BBReferenceFieldSubType.USERS: {
|
if (
|
||||||
const userArray = parseCsvExport<{ _id: string }[]>(data)
|
e.message.startsWith("Expected property name or '}' in JSON at position ")
|
||||||
if (!Array.isArray(userArray)) {
|
) {
|
||||||
return false
|
// This was probably converted as CSV and it has single quotes instead of double ones
|
||||||
}
|
const parsed = JSON.parse(value.replace(/'/g, '"'))
|
||||||
|
return parsed as T
|
||||||
const constainsWrongId = userArray.find(
|
|
||||||
user => !db.isGlobalUserID(user._id)
|
|
||||||
)
|
|
||||||
return !constainsWrongId
|
|
||||||
}
|
}
|
||||||
default:
|
|
||||||
throw utils.unreachable(subtype)
|
// It is no a valid JSON
|
||||||
|
throw e
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue