Merge branch 'master' into fix/automations-ux

This commit is contained in:
deanhannigan 2024-07-10 09:04:16 +01:00 committed by GitHub
commit 7e7447e180
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 284 additions and 137 deletions

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.29.13", "version": "2.29.15",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -1,5 +0,0 @@
export {
CONSTANT_INTERNAL_ROW_COLS,
CONSTANT_EXTERNAL_ROW_COLS,
isInternalColumnName,
} from "@budibase/shared-core"

View File

@ -13,6 +13,7 @@ import {
isDocument, isDocument,
RowResponse, RowResponse,
RowValue, RowValue,
SqlClient,
SQLiteDefinition, SQLiteDefinition,
SqlQueryBinding, SqlQueryBinding,
} from "@budibase/types" } from "@budibase/types"
@ -25,6 +26,7 @@ import { SQLITE_DESIGN_DOC_ID } from "../../constants"
import { DDInstrumentedDatabase } from "../instrumentation" import { DDInstrumentedDatabase } from "../instrumentation"
import { checkSlashesInUrl } from "../../helpers" import { checkSlashesInUrl } from "../../helpers"
import env from "../../environment" import env from "../../environment"
import { sqlLog } from "../../sql/utils"
const DATABASE_NOT_FOUND = "Database does not exist." const DATABASE_NOT_FOUND = "Database does not exist."
@ -322,6 +324,7 @@ export class DatabaseImpl implements Database {
): Promise<T[]> { ): Promise<T[]> {
const dbName = this.name const dbName = this.name
const url = `/${dbName}/${SQLITE_DESIGN_DOC_ID}` const url = `/${dbName}/${SQLITE_DESIGN_DOC_ID}`
sqlLog(SqlClient.SQL_LITE, sql, parameters)
return await this._sqlQuery<T[]>(url, "POST", { return await this._sqlQuery<T[]>(url, "POST", {
query: sql, query: sql,
args: parameters, args: parameters,

View File

@ -2,4 +2,3 @@ export * from "./connections"
export * from "./DatabaseImpl" export * from "./DatabaseImpl"
export * from "./utils" export * from "./utils"
export { init, getPouch, getPouchDB, closePouchDB } from "./pouchDB" export { init, getPouch, getPouchDB, closePouchDB } from "./pouchDB"
export * from "../constants"

View File

@ -3,16 +3,20 @@ import * as dbCore from "../db"
import { import {
getNativeSql, getNativeSql,
isExternalTable, isExternalTable,
isIsoDateString, isValidISODateString,
isValidFilter, isValidFilter,
sqlLog,
isInvalidISODateString,
} from "./utils" } from "./utils"
import { SqlStatements } from "./sqlStatements" import { SqlStatements } from "./sqlStatements"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
import { import {
AnySearchFilter,
BBReferenceFieldMetadata, BBReferenceFieldMetadata,
FieldSchema, FieldSchema,
FieldType, FieldType,
INTERNAL_TABLE_SOURCE_ID, INTERNAL_TABLE_SOURCE_ID,
InternalSearchFilterOperator,
JsonFieldMetadata, JsonFieldMetadata,
JsonTypes, JsonTypes,
Operation, Operation,
@ -38,11 +42,7 @@ const envLimit = environment.SQL_MAX_ROWS
: null : null
const BASE_LIMIT = envLimit || 5000 const BASE_LIMIT = envLimit || 5000
// these are invalid dates sent by the client, need to convert them to a real max date function likeKey(client: string | string[], key: string): string {
const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z"
const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z"
function likeKey(client: string, key: string): string {
let start: string, end: string let start: string, end: string
switch (client) { switch (client) {
case SqlClient.MY_SQL: case SqlClient.MY_SQL:
@ -75,10 +75,10 @@ function parse(input: any) {
if (typeof input !== "string") { if (typeof input !== "string") {
return input return input
} }
if (input === MAX_ISO_DATE || input === MIN_ISO_DATE) { if (isInvalidISODateString(input)) {
return null return null
} }
if (isIsoDateString(input)) { if (isValidISODateString(input)) {
return new Date(input.trim()) return new Date(input.trim())
} }
return input return input
@ -208,17 +208,32 @@ class InternalBuilder {
return alias || name return alias || name
} }
function iterate( function iterate(
structure: { [key: string]: any }, structure: AnySearchFilter,
fn: (key: string, value: any) => void fn: (key: string, value: any) => void,
complexKeyFn?: (key: string[], value: any) => void
) { ) {
for (let [key, value] of Object.entries(structure)) { for (const key in structure) {
const value = structure[key]
const updatedKey = dbCore.removeKeyNumbering(key) const updatedKey = dbCore.removeKeyNumbering(key)
const isRelationshipField = updatedKey.includes(".") const isRelationshipField = updatedKey.includes(".")
if (!opts.relationship && !isRelationshipField) {
let castedTypeValue
if (
key === InternalSearchFilterOperator.COMPLEX_ID_OPERATOR &&
(castedTypeValue = structure[key]) &&
complexKeyFn
) {
const alias = getTableAlias(tableName)
complexKeyFn(
castedTypeValue.id.map((x: string) =>
alias ? `${alias}.${x}` : x
),
castedTypeValue.values
)
} else if (!opts.relationship && !isRelationshipField) {
const alias = getTableAlias(tableName) const alias = getTableAlias(tableName)
fn(alias ? `${alias}.${updatedKey}` : updatedKey, value) fn(alias ? `${alias}.${updatedKey}` : updatedKey, value)
} } else if (opts.relationship && isRelationshipField) {
if (opts.relationship && isRelationshipField) {
const [filterTableName, property] = updatedKey.split(".") const [filterTableName, property] = updatedKey.split(".")
const alias = getTableAlias(filterTableName) const alias = getTableAlias(filterTableName)
fn(alias ? `${alias}.${property}` : property, value) fn(alias ? `${alias}.${property}` : property, value)
@ -241,7 +256,7 @@ class InternalBuilder {
} }
} }
const contains = (mode: object, any: boolean = false) => { const contains = (mode: AnySearchFilter, any: boolean = false) => {
const rawFnc = allOr ? "orWhereRaw" : "whereRaw" const rawFnc = allOr ? "orWhereRaw" : "whereRaw"
const not = mode === filters?.notContains ? "NOT " : "" const not = mode === filters?.notContains ? "NOT " : ""
function stringifyArray(value: Array<any>, quoteStyle = '"'): string { function stringifyArray(value: Array<any>, quoteStyle = '"'): string {
@ -253,7 +268,7 @@ class InternalBuilder {
return `[${value.join(",")}]` return `[${value.join(",")}]`
} }
if (this.client === SqlClient.POSTGRES) { if (this.client === SqlClient.POSTGRES) {
iterate(mode, (key: string, value: Array<any>) => { iterate(mode, (key, value) => {
const wrap = any ? "" : "'" const wrap = any ? "" : "'"
const op = any ? "\\?| array" : "@>" const op = any ? "\\?| array" : "@>"
const fieldNames = key.split(/\./g) const fieldNames = key.split(/\./g)
@ -268,7 +283,7 @@ class InternalBuilder {
}) })
} else if (this.client === SqlClient.MY_SQL) { } else if (this.client === SqlClient.MY_SQL) {
const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS" const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS"
iterate(mode, (key: string, value: Array<any>) => { iterate(mode, (key, value) => {
query = query[rawFnc]( query = query[rawFnc](
`${not}COALESCE(${jsonFnc}(${key}, '${stringifyArray( `${not}COALESCE(${jsonFnc}(${key}, '${stringifyArray(
value value
@ -277,7 +292,7 @@ class InternalBuilder {
}) })
} else { } else {
const andOr = mode === filters?.containsAny ? " OR " : " AND " const andOr = mode === filters?.containsAny ? " OR " : " AND "
iterate(mode, (key: string, value: Array<any>) => { iterate(mode, (key, value) => {
let statement = "" let statement = ""
for (let i in value) { for (let i in value) {
if (typeof value[i] === "string") { if (typeof value[i] === "string") {
@ -301,10 +316,16 @@ class InternalBuilder {
} }
if (filters.oneOf) { if (filters.oneOf) {
iterate(filters.oneOf, (key, array) => { const fnc = allOr ? "orWhereIn" : "whereIn"
const fnc = allOr ? "orWhereIn" : "whereIn" iterate(
query = query[fnc](key, Array.isArray(array) ? array : [array]) filters.oneOf,
}) (key: string, array) => {
query = query[fnc](key, Array.isArray(array) ? array : [array])
},
(key: string[], array) => {
query = query[fnc](key, Array.isArray(array) ? array : [array])
}
)
} }
if (filters.string) { if (filters.string) {
iterate(filters.string, (key, value) => { iterate(filters.string, (key, value) => {
@ -746,6 +767,7 @@ class InternalBuilder {
class SqlQueryBuilder extends SqlTableQueryBuilder { class SqlQueryBuilder extends SqlTableQueryBuilder {
private readonly limit: number private readonly limit: number
// pass through client to get flavour of SQL // pass through client to get flavour of SQL
constructor(client: string, limit: number = BASE_LIMIT) { constructor(client: string, limit: number = BASE_LIMIT) {
super(client) super(client)
@ -938,15 +960,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
} }
log(query: string, values?: SqlQueryBinding) { log(query: string, values?: SqlQueryBinding) {
if (!environment.SQL_LOGGING_ENABLE) { sqlLog(this.getSqlClient(), query, values)
return
}
const sqlClient = this.getSqlClient()
let string = `[SQL] [${sqlClient.toUpperCase()}] query="${query}"`
if (values) {
string += ` values="${values.join(", ")}"`
}
console.log(string)
} }
} }

View File

@ -2,10 +2,12 @@ import { DocumentType, SqlQuery, Table, TableSourceType } from "@budibase/types"
import { DEFAULT_BB_DATASOURCE_ID } from "../constants" import { DEFAULT_BB_DATASOURCE_ID } from "../constants"
import { Knex } from "knex" import { Knex } from "knex"
import { SEPARATOR } from "../db" import { SEPARATOR } from "../db"
import environment from "../environment"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g const ROW_ID_REGEX = /^\[.*]$/g
const ENCODED_SPACE = encodeURIComponent(" ") const ENCODED_SPACE = encodeURIComponent(" ")
const ISO_DATE_REGEX = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/
export function isExternalTableID(tableId: string) { export function isExternalTableID(tableId: string) {
return tableId.startsWith(DocumentType.DATASOURCE + SEPARATOR) return tableId.startsWith(DocumentType.DATASOURCE + SEPARATOR)
@ -120,15 +122,38 @@ export function breakRowIdField(_id: string | { _id: string }): any[] {
} }
} }
export function isIsoDateString(str: string) { export function isInvalidISODateString(str: string) {
const trimmedValue = str.trim() const trimmedValue = str.trim()
if (!/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/.test(trimmedValue)) { if (!ISO_DATE_REGEX.test(trimmedValue)) {
return false return false
} }
let d = new Date(trimmedValue) let d = new Date(trimmedValue)
return isNaN(d.getTime())
}
export function isValidISODateString(str: string) {
const trimmedValue = str.trim()
if (!ISO_DATE_REGEX.test(trimmedValue)) {
return false
}
let d = new Date(trimmedValue)
if (isNaN(d.getTime())) {
return false
}
return d.toISOString() === trimmedValue return d.toISOString() === trimmedValue
} }
export function isValidFilter(value: any) { export function isValidFilter(value: any) {
return value != null && value !== "" return value != null && value !== ""
} }
export function sqlLog(client: string, query: string, values?: any[]) {
if (!environment.SQL_LOGGING_ENABLE) {
return
}
let string = `[SQL] [${client.toUpperCase()}] query="${query}"`
if (values) {
string += ` values="${values.join(", ")}"`
}
console.log(string)
}

View File

@ -1,4 +1,7 @@
import { db } from "../../../src" import {
CONSTANT_EXTERNAL_ROW_COLS,
CONSTANT_INTERNAL_ROW_COLS,
} from "@budibase/shared-core"
export function expectFunctionWasCalledTimesWith( export function expectFunctionWasCalledTimesWith(
jestFunction: any, jestFunction: any,
@ -11,7 +14,7 @@ export function expectFunctionWasCalledTimesWith(
} }
export const expectAnyInternalColsAttributes: { export const expectAnyInternalColsAttributes: {
[K in (typeof db.CONSTANT_INTERNAL_ROW_COLS)[number]]: any [K in (typeof CONSTANT_INTERNAL_ROW_COLS)[number]]: any
} = { } = {
tableId: expect.anything(), tableId: expect.anything(),
type: expect.anything(), type: expect.anything(),
@ -22,7 +25,7 @@ export const expectAnyInternalColsAttributes: {
} }
export const expectAnyExternalColsAttributes: { export const expectAnyExternalColsAttributes: {
[K in (typeof db.CONSTANT_EXTERNAL_ROW_COLS)[number]]: any [K in (typeof CONSTANT_EXTERNAL_ROW_COLS)[number]]: any
} = { } = {
tableId: expect.anything(), tableId: expect.anything(),
_id: expect.anything(), _id: expect.anything(),

View File

@ -3,12 +3,11 @@
import { Modal, ActionButton, TooltipType, TempTooltip } from "@budibase/bbui" import { Modal, ActionButton, TooltipType, TempTooltip } from "@budibase/bbui"
import GridCreateViewModal from "../../modals/grid/GridCreateViewModal.svelte" import GridCreateViewModal from "../../modals/grid/GridCreateViewModal.svelte"
const { rows, columns, filter } = getContext("grid") const { filter } = getContext("grid")
let modal let modal
let firstFilterUsage = false let firstFilterUsage = false
$: disabled = !$columns.length || !$rows.length
$: { $: {
if ($filter?.length && !firstFilterUsage) { if ($filter?.length && !firstFilterUsage) {
firstFilterUsage = true firstFilterUsage = true
@ -21,7 +20,7 @@
type={TooltipType.Info} type={TooltipType.Info}
condition={firstFilterUsage} condition={firstFilterUsage}
> >
<ActionButton {disabled} icon="CollectionAdd" quiet on:click={modal.show}> <ActionButton icon="CollectionAdd" quiet on:click={modal.show}>
Create view Create view
</ActionButton> </ActionButton>
</TempTooltip> </TempTooltip>

View File

@ -146,13 +146,13 @@ const automationActions = store => ({
await store.actions.save(automation) await store.actions.save(automation)
notifications.success( notifications.success(
`Automation ${ `Automation ${
automation.disabled ? "enabled" : "disabled" automation.disabled ? "disabled" : "enabled"
} successfully` } successfully`
) )
} catch (error) { } catch (error) {
notifications.error( notifications.error(
`Error ${ `Error ${
automation && automation.disabled ? "enabling" : "disabling" automation && automation.disabled ? "disabling" : "enabling"
} automation` } automation`
) )
} }

View File

@ -56,7 +56,7 @@
<Modal <Modal
on:cancel={handleModalClose} on:cancel={handleModalClose}
bind:this={modal} bind:this={modal}
disableCancel={$builderStore.inBuilder} disableCancel={$builderStore.inBuilder || ignoreClicksOutside}
zIndex={2} zIndex={2}
> >
<div use:styleable={$component.styles} class={`modal-content ${size}`}> <div use:styleable={$component.styles} class={`modal-content ${size}`}>

@ -1 +1 @@
Subproject commit 11379517b76264a7f938c2d520bd259f586edada Subproject commit 7dbe323aec724ae6336b13c06aaefa4a89837edf

View File

@ -7,6 +7,7 @@ import {
FieldType, FieldType,
FilterType, FilterType,
IncludeRelationship, IncludeRelationship,
InternalSearchFilterOperator,
isManyToOne, isManyToOne,
OneToManyRelationshipFieldMetadata, OneToManyRelationshipFieldMetadata,
Operation, Operation,
@ -189,15 +190,22 @@ export class ExternalRequest<T extends Operation> {
if (filters) { if (filters) {
// need to map over the filters and make sure the _id field isn't present // need to map over the filters and make sure the _id field isn't present
let prefix = 1 let prefix = 1
for (let operator of Object.values(filters)) { for (const operator of Object.values(filters)) {
for (let field of Object.keys(operator || {})) { for (const field of Object.keys(operator || {})) {
if (dbCore.removeKeyNumbering(field) === "_id") { if (dbCore.removeKeyNumbering(field) === "_id") {
if (primary) { if (primary) {
const parts = breakRowIdField(operator[field]) const parts = breakRowIdField(operator[field])
for (let field of primary) { if (primary.length > 1) {
operator[`${prefix}:${field}`] = parts.shift() operator[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR] = {
id: primary,
values: parts[0],
}
} else {
for (let field of primary) {
operator[`${prefix}:${field}`] = parts.shift()
}
prefix++
} }
prefix++
} }
// make sure this field doesn't exist on any filter // make sure this field doesn't exist on any filter
delete operator[field] delete operator[field]

View File

@ -1428,22 +1428,6 @@ describe.each([
expect(row._id).toEqual(existing._id) expect(row._id).toEqual(existing._id)
}) })
it("should return an error on composite keys", async () => {
const existing = await config.api.row.save(table._id!, {})
await config.api.row.exportRows(
table._id!,
{
rows: [`['${existing._id!}']`, "['d001', '10111']"],
},
{
status: 400,
body: {
message: "Export data does not support composite keys.",
},
}
)
})
it("should return an error if no table is found", async () => { it("should return an error if no table is found", async () => {
const existing = await config.api.row.save(table._id!, {}) const existing = await config.api.row.save(table._id!, {})
await config.api.row.exportRows( await config.api.row.exportRows(
@ -1452,6 +1436,46 @@ describe.each([
{ status: 404 } { status: 404 }
) )
}) })
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
// to identity columns. This is not something Budibase does currently.
providerType !== DatabaseName.SQL_SERVER &&
it("should handle filtering by composite primary keys", async () => {
const tableRequest = saveTableRequest({
primary: ["number", "string"],
schema: {
string: {
type: FieldType.STRING,
name: "string",
},
number: {
type: FieldType.NUMBER,
name: "number",
},
},
})
delete tableRequest.schema.id
const table = await config.api.table.save(tableRequest)
const rows = await Promise.all(
generator
.unique(
() => ({
string: generator.word({ length: 30 }),
number: generator.integer({ min: 0, max: 10000 }),
}),
10
)
.map(d => config.api.row.save(table._id!, d))
)
const res = await config.api.row.exportRows(table._id!, {
rows: _.sampleSize(rows, 3).map(r => r._id!),
})
const results = JSON.parse(res)
expect(results.length).toEqual(3)
})
}) })
let o2mTable: Table let o2mTable: Table

View File

@ -30,6 +30,7 @@ import { encodeJSBinding } from "@budibase/string-templates"
import { dataFilters } from "@budibase/shared-core" import { dataFilters } from "@budibase/shared-core"
import { Knex } from "knex" import { Knex } from "knex"
import { structures } from "@budibase/backend-core/tests" import { structures } from "@budibase/backend-core/tests"
import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default"
describe.each([ describe.each([
["in-memory", undefined], ["in-memory", undefined],
@ -2167,6 +2168,53 @@ describe.each([
} }
) )
isInternal &&
describe("sample data", () => {
beforeAll(async () => {
await config.api.application.addSampleData(config.appId!)
table = DEFAULT_EMPLOYEE_TABLE_SCHEMA
})
it("should be able to search sample data", async () => {
await expectSearch({
query: {},
}).toContain([
{
"First Name": "Mandy",
},
])
})
})
describe.each([
{ low: "2024-07-03T00:00:00.000Z", high: "9999-00-00T00:00:00.000Z" },
{ low: "2024-07-03T00:00:00.000Z", high: "9998-00-00T00:00:00.000Z" },
{ low: "0000-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" },
{ low: "0001-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" },
])("date special cases", ({ low, high }) => {
const earlyDate = "2024-07-03T10:00:00.000Z",
laterDate = "2024-07-03T11:00:00.000Z"
beforeAll(async () => {
table = await createTable({
date: {
name: "date",
type: FieldType.DATETIME,
},
})
await createRows([{ date: earlyDate }, { date: laterDate }])
})
it("should be able to handle a date search", async () => {
await expectSearch({
query: {
range: {
"1:date": { low, high },
},
},
}).toContainExactly([{ date: earlyDate }, { date: laterDate }])
})
})
describe.each([ describe.each([
"名前", // Japanese for "name" "名前", // Japanese for "name"
"Benutzer-ID", // German for "user ID", includes a hyphen "Benutzer-ID", // German for "user ID", includes a hyphen

View File

@ -173,3 +173,9 @@ export const DEFAULT_INVENTORY_TABLE_ID = constants.DEFAULT_INVENTORY_TABLE_ID
export const DEFAULT_EXPENSES_TABLE_ID = constants.DEFAULT_EXPENSES_TABLE_ID export const DEFAULT_EXPENSES_TABLE_ID = constants.DEFAULT_EXPENSES_TABLE_ID
export const DEFAULT_EMPLOYEE_TABLE_ID = constants.DEFAULT_EMPLOYEE_TABLE_ID export const DEFAULT_EMPLOYEE_TABLE_ID = constants.DEFAULT_EMPLOYEE_TABLE_ID
export const DEFAULT_BB_DATASOURCE_ID = constants.DEFAULT_BB_DATASOURCE_ID export const DEFAULT_BB_DATASOURCE_ID = constants.DEFAULT_BB_DATASOURCE_ID
export const DEFAULT_TABLE_IDS = [
DEFAULT_JOBS_TABLE_ID,
DEFAULT_INVENTORY_TABLE_ID,
DEFAULT_EXPENSES_TABLE_ID,
DEFAULT_EMPLOYEE_TABLE_ID,
]

View File

@ -619,6 +619,13 @@ export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = {
}, },
} }
export const DEFAULT_TABLES: Table[] = [
DEFAULT_INVENTORY_TABLE_SCHEMA,
DEFAULT_EMPLOYEE_TABLE_SCHEMA,
DEFAULT_JOBS_TABLE_SCHEMA,
DEFAULT_EXPENSES_TABLE_SCHEMA,
]
export async function buildDefaultDocs() { export async function buildDefaultDocs() {
const inventoryData = await tableImport( const inventoryData = await tableImport(
DEFAULT_INVENTORY_TABLE_SCHEMA, DEFAULT_INVENTORY_TABLE_SCHEMA,

View File

@ -16,9 +16,9 @@ import {
breakExternalTableId, breakExternalTableId,
breakRowIdField, breakRowIdField,
} from "../../../../integrations/utils" } from "../../../../integrations/utils"
import { utils } from "@budibase/shared-core" import { utils, CONSTANT_EXTERNAL_ROW_COLS } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "./types" import { ExportRowsParams, ExportRowsResult } from "./types"
import { db, HTTPError } from "@budibase/backend-core" import { HTTPError } from "@budibase/backend-core"
import pick from "lodash/pick" import pick from "lodash/pick"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import sdk from "../../../" import sdk from "../../../"
@ -99,7 +99,7 @@ export async function search(
} }
if (options.fields) { if (options.fields) {
const fields = [...options.fields, ...db.CONSTANT_EXTERNAL_ROW_COLS] const fields = [...options.fields, ...CONSTANT_EXTERNAL_ROW_COLS]
rows = rows.map((r: any) => pick(r, fields)) rows = rows.map((r: any) => pick(r, fields))
} }
@ -158,10 +158,7 @@ export async function exportRows(
_id: rowIds.map((row: string) => { _id: rowIds.map((row: string) => {
const ids = breakRowIdField(row) const ids = breakRowIdField(row)
if (ids.length > 1) { if (ids.length > 1) {
throw new HTTPError( return ids
"Export data does not support composite keys.",
400
)
} }
return ids[0] return ids[0]
}), }),

View File

@ -1,4 +1,5 @@
import { context, db, HTTPError } from "@budibase/backend-core" import { context, HTTPError } from "@budibase/backend-core"
import { CONSTANT_INTERNAL_ROW_COLS } from "@budibase/shared-core"
import env from "../../../../environment" import env from "../../../../environment"
import { fullSearch, paginatedSearch } from "./utils" import { fullSearch, paginatedSearch } from "./utils"
import { getRowParams, InternalTables } from "../../../../db/utils" import { getRowParams, InternalTables } from "../../../../db/utils"
@ -74,7 +75,7 @@ export async function search(
} }
if (options.fields) { if (options.fields) {
const fields = [...options.fields, ...db.CONSTANT_INTERNAL_ROW_COLS] const fields = [...options.fields, ...CONSTANT_INTERNAL_ROW_COLS]
response.rows = response.rows.map((r: any) => pick(r, fields)) response.rows = response.rows.map((r: any) => pick(r, fields))
} }

View File

@ -41,6 +41,7 @@ import {
getTableIDList, getTableIDList,
} from "./filters" } from "./filters"
import { dataFilters } from "@budibase/shared-core" import { dataFilters } from "@budibase/shared-core"
import { DEFAULT_TABLE_IDS } from "../../../../constants"
const builder = new sql.Sql(SqlClient.SQL_LITE) const builder = new sql.Sql(SqlClient.SQL_LITE)
const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`) const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
@ -211,6 +212,18 @@ async function runSqlQuery(
return response return response
} }
function resyncDefinitionsRequired(status: number, message: string) {
// pre data_ prefix on column names, need to resync
return (
(status === 400 && message?.match(USER_COLUMN_PREFIX_REGEX)) ||
// default tables aren't included in definition
(status === 400 &&
DEFAULT_TABLE_IDS.find(tableId => message?.includes(tableId))) ||
// no design document found, needs a full sync
(status === 404 && message?.includes(SQLITE_DESIGN_DOC_ID))
)
}
export async function search( export async function search(
options: RowSearchParams, options: RowSearchParams,
table: Table table: Table
@ -338,10 +351,7 @@ export async function search(
return response return response
} catch (err: any) { } catch (err: any) {
const msg = typeof err === "string" ? err : err.message const msg = typeof err === "string" ? err : err.message
const syncAndRepeat = if (resyncDefinitionsRequired(err.status, msg)) {
(err.status === 400 && msg?.match(USER_COLUMN_PREFIX_REGEX)) ||
(err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID))
if (syncAndRepeat) {
await sdk.tables.sqs.syncDefinition() await sdk.tables.sqs.syncDefinition()
return search(options, table) return search(options, table)
} }

View File

@ -15,6 +15,7 @@ import {
generateJunctionTableID, generateJunctionTableID,
} from "../../../../db/utils" } from "../../../../db/utils"
import { isEqual } from "lodash" import { isEqual } from "lodash"
import { DEFAULT_TABLES } from "../../../../db/defaultData/datasource_bb_default"
const FieldTypeMap: Record<FieldType, SQLiteType> = { const FieldTypeMap: Record<FieldType, SQLiteType> = {
[FieldType.BOOLEAN]: SQLiteType.NUMERIC, [FieldType.BOOLEAN]: SQLiteType.NUMERIC,
@ -126,8 +127,9 @@ function mapTable(table: Table): SQLiteTables {
// nothing exists, need to iterate though existing tables // nothing exists, need to iterate though existing tables
async function buildBaseDefinition(): Promise<PreSaveSQLiteDefinition> { async function buildBaseDefinition(): Promise<PreSaveSQLiteDefinition> {
const tables = await tablesSdk.getAllInternalTables() const tables = await tablesSdk.getAllInternalTables()
const defaultTables = DEFAULT_TABLES
const definition = sql.designDoc.base("tableId") const definition = sql.designDoc.base("tableId")
for (let table of tables) { for (let table of tables.concat(defaultTables)) {
definition.sql.tables = { definition.sql.tables = {
...definition.sql.tables, ...definition.sql.tables,
...mapTable(table), ...mapTable(table),

View File

@ -18,6 +18,7 @@ import sdk from "../../../sdk"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import { EventType, updateLinks } from "../../../db/linkedRows" import { EventType, updateLinks } from "../../../db/linkedRows"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import { isInternalColumnName } from "@budibase/shared-core"
export interface MigrationResult { export interface MigrationResult {
tablesUpdated: Table[] tablesUpdated: Table[]
@ -36,7 +37,7 @@ export async function migrate(
throw new BadRequestError(`Column name cannot be empty`) throw new BadRequestError(`Column name cannot be empty`)
} }
if (dbCore.isInternalColumnName(newColumnName)) { if (isInternalColumnName(newColumnName)) {
throw new BadRequestError(`Column name cannot be a reserved column name`) throw new BadRequestError(`Column name cannot be a reserved column name`)
} }

View File

@ -6,9 +6,13 @@ import {
ViewV2, ViewV2,
ViewV2Enriched, ViewV2Enriched,
} from "@budibase/types" } from "@budibase/types"
import { HTTPError, db as dbCore } from "@budibase/backend-core" import { HTTPError } from "@budibase/backend-core"
import { features } from "@budibase/pro" import { features } from "@budibase/pro"
import { helpers } from "@budibase/shared-core" import {
helpers,
CONSTANT_EXTERNAL_ROW_COLS,
CONSTANT_INTERNAL_ROW_COLS,
} from "@budibase/shared-core"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import * as utils from "../../../db/utils" import * as utils from "../../../db/utils"
@ -144,8 +148,8 @@ export function allowedFields(view: View | ViewV2) {
const fieldSchema = view.schema![key] const fieldSchema = view.schema![key]
return fieldSchema.visible && !fieldSchema.readonly return fieldSchema.visible && !fieldSchema.readonly
}), }),
...dbCore.CONSTANT_EXTERNAL_ROW_COLS, ...CONSTANT_EXTERNAL_ROW_COLS,
...dbCore.CONSTANT_INTERNAL_ROW_COLS, ...CONSTANT_INTERNAL_ROW_COLS,
] ]
} }

View File

@ -149,4 +149,8 @@ export class ApplicationAPI extends TestAPI {
query: { status }, query: { status },
}) })
} }
addSampleData = async (appId: string): Promise<void> => {
await this._post(`/api/applications/${appId}/sample`)
}
} }

View File

@ -310,16 +310,12 @@ export const buildQuery = (filter: SearchFilter[]) => {
query.equal = query.equal || {} query.equal = query.equal || {}
query.equal[field] = true query.equal[field] = true
} else { } else {
query[queryOperator] = { query[queryOperator] ??= {}
...query[queryOperator], query[queryOperator]![field] = value
[field]: value,
}
} }
} else { } else {
query[queryOperator] = { query[queryOperator] ??= {}
...query[queryOperator], query[queryOperator]![field] = value
[field]: value,
}
} }
} }
}) })

View File

@ -17,51 +17,52 @@ export enum SearchFilterOperator {
CONTAINS_ANY = "containsAny", CONTAINS_ANY = "containsAny",
} }
export enum InternalSearchFilterOperator {
COMPLEX_ID_OPERATOR = "_complexIdOperator",
}
type BasicFilter<T = any> = Record<string, T> & {
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never
}
type ArrayFilter = Record<string, any[]> & {
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: {
id: string[]
values: string[]
}
}
type RangeFilter = Record<
string,
| {
high: number | string
low: number | string
}
| { high: number | string }
| { low: number | string }
> & {
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never
}
export type AnySearchFilter = BasicFilter | ArrayFilter | RangeFilter
export interface SearchFilters { export interface SearchFilters {
allOr?: boolean allOr?: boolean
// TODO: this is just around for now - we need a better way to do or/and // TODO: this is just around for now - we need a better way to do or/and
// allows just fuzzy to be or - all the fuzzy/like parameters // allows just fuzzy to be or - all the fuzzy/like parameters
fuzzyOr?: boolean fuzzyOr?: boolean
onEmptyFilter?: EmptyFilterOption onEmptyFilter?: EmptyFilterOption
[SearchFilterOperator.STRING]?: { [SearchFilterOperator.STRING]?: BasicFilter<string>
[key: string]: string [SearchFilterOperator.FUZZY]?: BasicFilter<string>
} [SearchFilterOperator.RANGE]?: RangeFilter
[SearchFilterOperator.FUZZY]?: { [SearchFilterOperator.EQUAL]?: BasicFilter
[key: string]: string [SearchFilterOperator.NOT_EQUAL]?: BasicFilter
} [SearchFilterOperator.EMPTY]?: BasicFilter
[SearchFilterOperator.RANGE]?: { [SearchFilterOperator.NOT_EMPTY]?: BasicFilter
[key: string]: [SearchFilterOperator.ONE_OF]?: ArrayFilter
| { [SearchFilterOperator.CONTAINS]?: ArrayFilter
high: number | string [SearchFilterOperator.NOT_CONTAINS]?: ArrayFilter
low: number | string [SearchFilterOperator.CONTAINS_ANY]?: ArrayFilter
}
| { high: number | string }
| { low: number | string }
}
[SearchFilterOperator.EQUAL]?: {
[key: string]: any
}
[SearchFilterOperator.NOT_EQUAL]?: {
[key: string]: any
}
[SearchFilterOperator.EMPTY]?: {
[key: string]: any
}
[SearchFilterOperator.NOT_EMPTY]?: {
[key: string]: any
}
[SearchFilterOperator.ONE_OF]?: {
[key: string]: any[]
}
[SearchFilterOperator.CONTAINS]?: {
[key: string]: any[]
}
[SearchFilterOperator.NOT_CONTAINS]?: {
[key: string]: any[]
}
[SearchFilterOperator.CONTAINS_ANY]?: {
[key: string]: any[]
}
// specific to SQS/SQLite search on internal tables this can be used // specific to SQS/SQLite search on internal tables this can be used
// to make sure the documents returned are always filtered down to a // to make sure the documents returned are always filtered down to a
// specific document type (such as just rows) // specific document type (such as just rows)