Merge branch 'master' into feature/automation-grouping
This commit is contained in:
commit
3a1b374f32
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "2.29.13",
|
||||
"version": "2.29.15",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
export {
|
||||
CONSTANT_INTERNAL_ROW_COLS,
|
||||
CONSTANT_EXTERNAL_ROW_COLS,
|
||||
isInternalColumnName,
|
||||
} from "@budibase/shared-core"
|
|
@ -13,6 +13,7 @@ import {
|
|||
isDocument,
|
||||
RowResponse,
|
||||
RowValue,
|
||||
SqlClient,
|
||||
SQLiteDefinition,
|
||||
SqlQueryBinding,
|
||||
} from "@budibase/types"
|
||||
|
@ -25,6 +26,7 @@ import { SQLITE_DESIGN_DOC_ID } from "../../constants"
|
|||
import { DDInstrumentedDatabase } from "../instrumentation"
|
||||
import { checkSlashesInUrl } from "../../helpers"
|
||||
import env from "../../environment"
|
||||
import { sqlLog } from "../../sql/utils"
|
||||
|
||||
const DATABASE_NOT_FOUND = "Database does not exist."
|
||||
|
||||
|
@ -322,6 +324,7 @@ export class DatabaseImpl implements Database {
|
|||
): Promise<T[]> {
|
||||
const dbName = this.name
|
||||
const url = `/${dbName}/${SQLITE_DESIGN_DOC_ID}`
|
||||
sqlLog(SqlClient.SQL_LITE, sql, parameters)
|
||||
return await this._sqlQuery<T[]>(url, "POST", {
|
||||
query: sql,
|
||||
args: parameters,
|
||||
|
|
|
@ -2,4 +2,3 @@ export * from "./connections"
|
|||
export * from "./DatabaseImpl"
|
||||
export * from "./utils"
|
||||
export { init, getPouch, getPouchDB, closePouchDB } from "./pouchDB"
|
||||
export * from "../constants"
|
||||
|
|
|
@ -3,16 +3,20 @@ import * as dbCore from "../db"
|
|||
import {
|
||||
getNativeSql,
|
||||
isExternalTable,
|
||||
isIsoDateString,
|
||||
isValidISODateString,
|
||||
isValidFilter,
|
||||
sqlLog,
|
||||
isInvalidISODateString,
|
||||
} from "./utils"
|
||||
import { SqlStatements } from "./sqlStatements"
|
||||
import SqlTableQueryBuilder from "./sqlTable"
|
||||
import {
|
||||
AnySearchFilter,
|
||||
BBReferenceFieldMetadata,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
InternalSearchFilterOperator,
|
||||
JsonFieldMetadata,
|
||||
JsonTypes,
|
||||
Operation,
|
||||
|
@ -38,11 +42,7 @@ const envLimit = environment.SQL_MAX_ROWS
|
|||
: null
|
||||
const BASE_LIMIT = envLimit || 5000
|
||||
|
||||
// these are invalid dates sent by the client, need to convert them to a real max date
|
||||
const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z"
|
||||
const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z"
|
||||
|
||||
function likeKey(client: string, key: string): string {
|
||||
function likeKey(client: string | string[], key: string): string {
|
||||
let start: string, end: string
|
||||
switch (client) {
|
||||
case SqlClient.MY_SQL:
|
||||
|
@ -75,10 +75,10 @@ function parse(input: any) {
|
|||
if (typeof input !== "string") {
|
||||
return input
|
||||
}
|
||||
if (input === MAX_ISO_DATE || input === MIN_ISO_DATE) {
|
||||
if (isInvalidISODateString(input)) {
|
||||
return null
|
||||
}
|
||||
if (isIsoDateString(input)) {
|
||||
if (isValidISODateString(input)) {
|
||||
return new Date(input.trim())
|
||||
}
|
||||
return input
|
||||
|
@ -208,17 +208,32 @@ class InternalBuilder {
|
|||
return alias || name
|
||||
}
|
||||
function iterate(
|
||||
structure: { [key: string]: any },
|
||||
fn: (key: string, value: any) => void
|
||||
structure: AnySearchFilter,
|
||||
fn: (key: string, value: any) => void,
|
||||
complexKeyFn?: (key: string[], value: any) => void
|
||||
) {
|
||||
for (let [key, value] of Object.entries(structure)) {
|
||||
for (const key in structure) {
|
||||
const value = structure[key]
|
||||
const updatedKey = dbCore.removeKeyNumbering(key)
|
||||
const isRelationshipField = updatedKey.includes(".")
|
||||
if (!opts.relationship && !isRelationshipField) {
|
||||
|
||||
let castedTypeValue
|
||||
if (
|
||||
key === InternalSearchFilterOperator.COMPLEX_ID_OPERATOR &&
|
||||
(castedTypeValue = structure[key]) &&
|
||||
complexKeyFn
|
||||
) {
|
||||
const alias = getTableAlias(tableName)
|
||||
complexKeyFn(
|
||||
castedTypeValue.id.map((x: string) =>
|
||||
alias ? `${alias}.${x}` : x
|
||||
),
|
||||
castedTypeValue.values
|
||||
)
|
||||
} else if (!opts.relationship && !isRelationshipField) {
|
||||
const alias = getTableAlias(tableName)
|
||||
fn(alias ? `${alias}.${updatedKey}` : updatedKey, value)
|
||||
}
|
||||
if (opts.relationship && isRelationshipField) {
|
||||
} else if (opts.relationship && isRelationshipField) {
|
||||
const [filterTableName, property] = updatedKey.split(".")
|
||||
const alias = getTableAlias(filterTableName)
|
||||
fn(alias ? `${alias}.${property}` : property, value)
|
||||
|
@ -241,7 +256,7 @@ class InternalBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
const contains = (mode: object, any: boolean = false) => {
|
||||
const contains = (mode: AnySearchFilter, any: boolean = false) => {
|
||||
const rawFnc = allOr ? "orWhereRaw" : "whereRaw"
|
||||
const not = mode === filters?.notContains ? "NOT " : ""
|
||||
function stringifyArray(value: Array<any>, quoteStyle = '"'): string {
|
||||
|
@ -253,7 +268,7 @@ class InternalBuilder {
|
|||
return `[${value.join(",")}]`
|
||||
}
|
||||
if (this.client === SqlClient.POSTGRES) {
|
||||
iterate(mode, (key: string, value: Array<any>) => {
|
||||
iterate(mode, (key, value) => {
|
||||
const wrap = any ? "" : "'"
|
||||
const op = any ? "\\?| array" : "@>"
|
||||
const fieldNames = key.split(/\./g)
|
||||
|
@ -268,7 +283,7 @@ class InternalBuilder {
|
|||
})
|
||||
} else if (this.client === SqlClient.MY_SQL) {
|
||||
const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS"
|
||||
iterate(mode, (key: string, value: Array<any>) => {
|
||||
iterate(mode, (key, value) => {
|
||||
query = query[rawFnc](
|
||||
`${not}COALESCE(${jsonFnc}(${key}, '${stringifyArray(
|
||||
value
|
||||
|
@ -277,7 +292,7 @@ class InternalBuilder {
|
|||
})
|
||||
} else {
|
||||
const andOr = mode === filters?.containsAny ? " OR " : " AND "
|
||||
iterate(mode, (key: string, value: Array<any>) => {
|
||||
iterate(mode, (key, value) => {
|
||||
let statement = ""
|
||||
for (let i in value) {
|
||||
if (typeof value[i] === "string") {
|
||||
|
@ -301,10 +316,16 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
if (filters.oneOf) {
|
||||
iterate(filters.oneOf, (key, array) => {
|
||||
const fnc = allOr ? "orWhereIn" : "whereIn"
|
||||
iterate(
|
||||
filters.oneOf,
|
||||
(key: string, array) => {
|
||||
query = query[fnc](key, Array.isArray(array) ? array : [array])
|
||||
})
|
||||
},
|
||||
(key: string[], array) => {
|
||||
query = query[fnc](key, Array.isArray(array) ? array : [array])
|
||||
}
|
||||
)
|
||||
}
|
||||
if (filters.string) {
|
||||
iterate(filters.string, (key, value) => {
|
||||
|
@ -746,6 +767,7 @@ class InternalBuilder {
|
|||
|
||||
class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||
private readonly limit: number
|
||||
|
||||
// pass through client to get flavour of SQL
|
||||
constructor(client: string, limit: number = BASE_LIMIT) {
|
||||
super(client)
|
||||
|
@ -938,15 +960,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
}
|
||||
|
||||
log(query: string, values?: SqlQueryBinding) {
|
||||
if (!environment.SQL_LOGGING_ENABLE) {
|
||||
return
|
||||
}
|
||||
const sqlClient = this.getSqlClient()
|
||||
let string = `[SQL] [${sqlClient.toUpperCase()}] query="${query}"`
|
||||
if (values) {
|
||||
string += ` values="${values.join(", ")}"`
|
||||
}
|
||||
console.log(string)
|
||||
sqlLog(this.getSqlClient(), query, values)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,10 +2,12 @@ import { DocumentType, SqlQuery, Table, TableSourceType } from "@budibase/types"
|
|||
import { DEFAULT_BB_DATASOURCE_ID } from "../constants"
|
||||
import { Knex } from "knex"
|
||||
import { SEPARATOR } from "../db"
|
||||
import environment from "../environment"
|
||||
|
||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||
const ROW_ID_REGEX = /^\[.*]$/g
|
||||
const ENCODED_SPACE = encodeURIComponent(" ")
|
||||
const ISO_DATE_REGEX = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/
|
||||
|
||||
export function isExternalTableID(tableId: string) {
|
||||
return tableId.startsWith(DocumentType.DATASOURCE + SEPARATOR)
|
||||
|
@ -120,15 +122,38 @@ export function breakRowIdField(_id: string | { _id: string }): any[] {
|
|||
}
|
||||
}
|
||||
|
||||
export function isIsoDateString(str: string) {
|
||||
export function isInvalidISODateString(str: string) {
|
||||
const trimmedValue = str.trim()
|
||||
if (!/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/.test(trimmedValue)) {
|
||||
if (!ISO_DATE_REGEX.test(trimmedValue)) {
|
||||
return false
|
||||
}
|
||||
let d = new Date(trimmedValue)
|
||||
return isNaN(d.getTime())
|
||||
}
|
||||
|
||||
export function isValidISODateString(str: string) {
|
||||
const trimmedValue = str.trim()
|
||||
if (!ISO_DATE_REGEX.test(trimmedValue)) {
|
||||
return false
|
||||
}
|
||||
let d = new Date(trimmedValue)
|
||||
if (isNaN(d.getTime())) {
|
||||
return false
|
||||
}
|
||||
return d.toISOString() === trimmedValue
|
||||
}
|
||||
|
||||
export function isValidFilter(value: any) {
|
||||
return value != null && value !== ""
|
||||
}
|
||||
|
||||
export function sqlLog(client: string, query: string, values?: any[]) {
|
||||
if (!environment.SQL_LOGGING_ENABLE) {
|
||||
return
|
||||
}
|
||||
let string = `[SQL] [${client.toUpperCase()}] query="${query}"`
|
||||
if (values) {
|
||||
string += ` values="${values.join(", ")}"`
|
||||
}
|
||||
console.log(string)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
import { db } from "../../../src"
|
||||
import {
|
||||
CONSTANT_EXTERNAL_ROW_COLS,
|
||||
CONSTANT_INTERNAL_ROW_COLS,
|
||||
} from "@budibase/shared-core"
|
||||
|
||||
export function expectFunctionWasCalledTimesWith(
|
||||
jestFunction: any,
|
||||
|
@ -11,7 +14,7 @@ export function expectFunctionWasCalledTimesWith(
|
|||
}
|
||||
|
||||
export const expectAnyInternalColsAttributes: {
|
||||
[K in (typeof db.CONSTANT_INTERNAL_ROW_COLS)[number]]: any
|
||||
[K in (typeof CONSTANT_INTERNAL_ROW_COLS)[number]]: any
|
||||
} = {
|
||||
tableId: expect.anything(),
|
||||
type: expect.anything(),
|
||||
|
@ -22,7 +25,7 @@ export const expectAnyInternalColsAttributes: {
|
|||
}
|
||||
|
||||
export const expectAnyExternalColsAttributes: {
|
||||
[K in (typeof db.CONSTANT_EXTERNAL_ROW_COLS)[number]]: any
|
||||
[K in (typeof CONSTANT_EXTERNAL_ROW_COLS)[number]]: any
|
||||
} = {
|
||||
tableId: expect.anything(),
|
||||
_id: expect.anything(),
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
import { TableNames } from "constants"
|
||||
import { Grid } from "@budibase/frontend-core"
|
||||
import { API } from "api"
|
||||
import GridCreateAutomationButton from "./buttons/grid/GridCreateAutomationButton.svelte"
|
||||
import GridAddColumnModal from "components/backend/DataTable/modals/grid/GridCreateColumnModal.svelte"
|
||||
import GridCreateEditRowModal from "components/backend/DataTable/modals/grid/GridCreateEditRowModal.svelte"
|
||||
import GridEditUserModal from "components/backend/DataTable/modals/grid/GridEditUserModal.svelte"
|
||||
|
@ -81,6 +82,9 @@
|
|||
<GridCreateViewButton />
|
||||
{/if}
|
||||
<GridManageAccessButton />
|
||||
{#if !isUsersTable}
|
||||
<GridCreateAutomationButton />
|
||||
{/if}
|
||||
{#if relationshipsEnabled}
|
||||
<GridRelationshipButton />
|
||||
{/if}
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
<script>
|
||||
import {
|
||||
ActionButton,
|
||||
Popover,
|
||||
Menu,
|
||||
MenuItem,
|
||||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
import { getContext } from "svelte"
|
||||
import { automationStore, tables, builderStore } from "stores/builder"
|
||||
import { TriggerStepID } from "constants/backend/automations"
|
||||
import { goto } from "@roxi/routify"
|
||||
|
||||
const { datasource } = getContext("grid")
|
||||
|
||||
$: triggers = $automationStore.blockDefinitions.TRIGGER
|
||||
|
||||
$: table = $tables.list.find(table => table._id === $datasource.tableId)
|
||||
|
||||
async function createAutomation(type) {
|
||||
const triggerType = triggers[type]
|
||||
if (!triggerType) {
|
||||
console.error("Invalid trigger type", type)
|
||||
notifications.error("Invalid automation trigger type")
|
||||
return
|
||||
}
|
||||
|
||||
if (!table) {
|
||||
notifications.error("Invalid table, cannot create automation")
|
||||
return
|
||||
}
|
||||
|
||||
const automationName = `${table.name} : Row ${
|
||||
type === TriggerStepID.ROW_SAVED ? "created" : "updated"
|
||||
}`
|
||||
const triggerBlock = automationStore.actions.constructBlock(
|
||||
"TRIGGER",
|
||||
triggerType.stepId,
|
||||
triggerType
|
||||
)
|
||||
|
||||
triggerBlock.inputs = { tableId: $datasource.tableId }
|
||||
|
||||
try {
|
||||
const response = await automationStore.actions.create(
|
||||
automationName,
|
||||
triggerBlock
|
||||
)
|
||||
builderStore.setPreviousTopNavPath(
|
||||
"/builder/app/:application/data",
|
||||
window.location.pathname
|
||||
)
|
||||
$goto(`/builder/app/${response.appId}/automation/${response.id}`)
|
||||
notifications.success(`Automation created`)
|
||||
} catch (e) {
|
||||
console.error("Error creating automation", e)
|
||||
notifications.error("Error creating automation")
|
||||
}
|
||||
}
|
||||
|
||||
let anchor
|
||||
let open
|
||||
</script>
|
||||
|
||||
<div bind:this={anchor}>
|
||||
<ActionButton
|
||||
icon="MagicWand"
|
||||
quiet
|
||||
size="M"
|
||||
on:click={() => (open = !open)}
|
||||
selected={open}
|
||||
>
|
||||
Generate
|
||||
</ActionButton>
|
||||
</div>
|
||||
|
||||
<Popover bind:open {anchor} align="left">
|
||||
<Menu>
|
||||
<MenuItem
|
||||
icon="ShareAndroid"
|
||||
on:click={() => {
|
||||
open = false
|
||||
createAutomation(TriggerStepID.ROW_SAVED)
|
||||
}}
|
||||
>
|
||||
Automation: when row is created
|
||||
</MenuItem>
|
||||
<MenuItem
|
||||
icon="ShareAndroid"
|
||||
on:click={() => {
|
||||
open = false
|
||||
createAutomation(TriggerStepID.ROW_UPDATED)
|
||||
}}
|
||||
>
|
||||
Automation: when row is updated
|
||||
</MenuItem>
|
||||
</Menu>
|
||||
</Popover>
|
||||
|
||||
<style>
|
||||
</style>
|
|
@ -3,12 +3,11 @@
|
|||
import { Modal, ActionButton, TooltipType, TempTooltip } from "@budibase/bbui"
|
||||
import GridCreateViewModal from "../../modals/grid/GridCreateViewModal.svelte"
|
||||
|
||||
const { rows, columns, filter } = getContext("grid")
|
||||
const { filter } = getContext("grid")
|
||||
|
||||
let modal
|
||||
let firstFilterUsage = false
|
||||
|
||||
$: disabled = !$columns.length || !$rows.length
|
||||
$: {
|
||||
if ($filter?.length && !firstFilterUsage) {
|
||||
firstFilterUsage = true
|
||||
|
@ -21,7 +20,7 @@
|
|||
type={TooltipType.Info}
|
||||
condition={firstFilterUsage}
|
||||
>
|
||||
<ActionButton {disabled} icon="CollectionAdd" quiet on:click={modal.show}>
|
||||
<ActionButton icon="CollectionAdd" quiet on:click={modal.show}>
|
||||
Create view
|
||||
</ActionButton>
|
||||
</TempTooltip>
|
||||
|
|
|
@ -146,13 +146,13 @@ const automationActions = store => ({
|
|||
await store.actions.save(automation)
|
||||
notifications.success(
|
||||
`Automation ${
|
||||
automation.disabled ? "enabled" : "disabled"
|
||||
automation.disabled ? "disabled" : "enabled"
|
||||
} successfully`
|
||||
)
|
||||
} catch (error) {
|
||||
notifications.error(
|
||||
`Error ${
|
||||
automation && automation.disabled ? "enabling" : "disabling"
|
||||
automation && automation.disabled ? "disabling" : "enabling"
|
||||
} automation`
|
||||
)
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@
|
|||
<Modal
|
||||
on:cancel={handleModalClose}
|
||||
bind:this={modal}
|
||||
disableCancel={$builderStore.inBuilder}
|
||||
disableCancel={$builderStore.inBuilder || ignoreClicksOutside}
|
||||
zIndex={2}
|
||||
>
|
||||
<div use:styleable={$component.styles} class={`modal-content ${size}`}>
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 11379517b76264a7f938c2d520bd259f586edada
|
||||
Subproject commit 7dbe323aec724ae6336b13c06aaefa4a89837edf
|
|
@ -7,6 +7,7 @@ import {
|
|||
FieldType,
|
||||
FilterType,
|
||||
IncludeRelationship,
|
||||
InternalSearchFilterOperator,
|
||||
isManyToOne,
|
||||
OneToManyRelationshipFieldMetadata,
|
||||
Operation,
|
||||
|
@ -189,16 +190,23 @@ export class ExternalRequest<T extends Operation> {
|
|||
if (filters) {
|
||||
// need to map over the filters and make sure the _id field isn't present
|
||||
let prefix = 1
|
||||
for (let operator of Object.values(filters)) {
|
||||
for (let field of Object.keys(operator || {})) {
|
||||
for (const operator of Object.values(filters)) {
|
||||
for (const field of Object.keys(operator || {})) {
|
||||
if (dbCore.removeKeyNumbering(field) === "_id") {
|
||||
if (primary) {
|
||||
const parts = breakRowIdField(operator[field])
|
||||
if (primary.length > 1) {
|
||||
operator[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR] = {
|
||||
id: primary,
|
||||
values: parts[0],
|
||||
}
|
||||
} else {
|
||||
for (let field of primary) {
|
||||
operator[`${prefix}:${field}`] = parts.shift()
|
||||
}
|
||||
prefix++
|
||||
}
|
||||
}
|
||||
// make sure this field doesn't exist on any filter
|
||||
delete operator[field]
|
||||
}
|
||||
|
|
|
@ -1428,22 +1428,6 @@ describe.each([
|
|||
expect(row._id).toEqual(existing._id)
|
||||
})
|
||||
|
||||
it("should return an error on composite keys", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
await config.api.row.exportRows(
|
||||
table._id!,
|
||||
{
|
||||
rows: [`['${existing._id!}']`, "['d001', '10111']"],
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Export data does not support composite keys.",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should return an error if no table is found", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
await config.api.row.exportRows(
|
||||
|
@ -1452,6 +1436,46 @@ describe.each([
|
|||
{ status: 404 }
|
||||
)
|
||||
})
|
||||
|
||||
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
||||
// to identity columns. This is not something Budibase does currently.
|
||||
providerType !== DatabaseName.SQL_SERVER &&
|
||||
it("should handle filtering by composite primary keys", async () => {
|
||||
const tableRequest = saveTableRequest({
|
||||
primary: ["number", "string"],
|
||||
schema: {
|
||||
string: {
|
||||
type: FieldType.STRING,
|
||||
name: "string",
|
||||
},
|
||||
number: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "number",
|
||||
},
|
||||
},
|
||||
})
|
||||
delete tableRequest.schema.id
|
||||
|
||||
const table = await config.api.table.save(tableRequest)
|
||||
|
||||
const rows = await Promise.all(
|
||||
generator
|
||||
.unique(
|
||||
() => ({
|
||||
string: generator.word({ length: 30 }),
|
||||
number: generator.integer({ min: 0, max: 10000 }),
|
||||
}),
|
||||
10
|
||||
)
|
||||
.map(d => config.api.row.save(table._id!, d))
|
||||
)
|
||||
|
||||
const res = await config.api.row.exportRows(table._id!, {
|
||||
rows: _.sampleSize(rows, 3).map(r => r._id!),
|
||||
})
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(3)
|
||||
})
|
||||
})
|
||||
|
||||
let o2mTable: Table
|
||||
|
|
|
@ -30,6 +30,7 @@ import { encodeJSBinding } from "@budibase/string-templates"
|
|||
import { dataFilters } from "@budibase/shared-core"
|
||||
import { Knex } from "knex"
|
||||
import { structures } from "@budibase/backend-core/tests"
|
||||
import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default"
|
||||
|
||||
describe.each([
|
||||
["in-memory", undefined],
|
||||
|
@ -2167,6 +2168,53 @@ describe.each([
|
|||
}
|
||||
)
|
||||
|
||||
isInternal &&
|
||||
describe("sample data", () => {
|
||||
beforeAll(async () => {
|
||||
await config.api.application.addSampleData(config.appId!)
|
||||
table = DEFAULT_EMPLOYEE_TABLE_SCHEMA
|
||||
})
|
||||
|
||||
it("should be able to search sample data", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
}).toContain([
|
||||
{
|
||||
"First Name": "Mandy",
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe.each([
|
||||
{ low: "2024-07-03T00:00:00.000Z", high: "9999-00-00T00:00:00.000Z" },
|
||||
{ low: "2024-07-03T00:00:00.000Z", high: "9998-00-00T00:00:00.000Z" },
|
||||
{ low: "0000-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" },
|
||||
{ low: "0001-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" },
|
||||
])("date special cases", ({ low, high }) => {
|
||||
const earlyDate = "2024-07-03T10:00:00.000Z",
|
||||
laterDate = "2024-07-03T11:00:00.000Z"
|
||||
beforeAll(async () => {
|
||||
table = await createTable({
|
||||
date: {
|
||||
name: "date",
|
||||
type: FieldType.DATETIME,
|
||||
},
|
||||
})
|
||||
await createRows([{ date: earlyDate }, { date: laterDate }])
|
||||
})
|
||||
|
||||
it("should be able to handle a date search", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
range: {
|
||||
"1:date": { low, high },
|
||||
},
|
||||
},
|
||||
}).toContainExactly([{ date: earlyDate }, { date: laterDate }])
|
||||
})
|
||||
})
|
||||
|
||||
describe.each([
|
||||
"名前", // Japanese for "name"
|
||||
"Benutzer-ID", // German for "user ID", includes a hyphen
|
||||
|
|
|
@ -173,3 +173,9 @@ export const DEFAULT_INVENTORY_TABLE_ID = constants.DEFAULT_INVENTORY_TABLE_ID
|
|||
export const DEFAULT_EXPENSES_TABLE_ID = constants.DEFAULT_EXPENSES_TABLE_ID
|
||||
export const DEFAULT_EMPLOYEE_TABLE_ID = constants.DEFAULT_EMPLOYEE_TABLE_ID
|
||||
export const DEFAULT_BB_DATASOURCE_ID = constants.DEFAULT_BB_DATASOURCE_ID
|
||||
export const DEFAULT_TABLE_IDS = [
|
||||
DEFAULT_JOBS_TABLE_ID,
|
||||
DEFAULT_INVENTORY_TABLE_ID,
|
||||
DEFAULT_EXPENSES_TABLE_ID,
|
||||
DEFAULT_EMPLOYEE_TABLE_ID,
|
||||
]
|
||||
|
|
|
@ -619,6 +619,13 @@ export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = {
|
|||
},
|
||||
}
|
||||
|
||||
export const DEFAULT_TABLES: Table[] = [
|
||||
DEFAULT_INVENTORY_TABLE_SCHEMA,
|
||||
DEFAULT_EMPLOYEE_TABLE_SCHEMA,
|
||||
DEFAULT_JOBS_TABLE_SCHEMA,
|
||||
DEFAULT_EXPENSES_TABLE_SCHEMA,
|
||||
]
|
||||
|
||||
export async function buildDefaultDocs() {
|
||||
const inventoryData = await tableImport(
|
||||
DEFAULT_INVENTORY_TABLE_SCHEMA,
|
||||
|
|
|
@ -16,9 +16,9 @@ import {
|
|||
breakExternalTableId,
|
||||
breakRowIdField,
|
||||
} from "../../../../integrations/utils"
|
||||
import { utils } from "@budibase/shared-core"
|
||||
import { utils, CONSTANT_EXTERNAL_ROW_COLS } from "@budibase/shared-core"
|
||||
import { ExportRowsParams, ExportRowsResult } from "./types"
|
||||
import { db, HTTPError } from "@budibase/backend-core"
|
||||
import { HTTPError } from "@budibase/backend-core"
|
||||
import pick from "lodash/pick"
|
||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||
import sdk from "../../../"
|
||||
|
@ -99,7 +99,7 @@ export async function search(
|
|||
}
|
||||
|
||||
if (options.fields) {
|
||||
const fields = [...options.fields, ...db.CONSTANT_EXTERNAL_ROW_COLS]
|
||||
const fields = [...options.fields, ...CONSTANT_EXTERNAL_ROW_COLS]
|
||||
rows = rows.map((r: any) => pick(r, fields))
|
||||
}
|
||||
|
||||
|
@ -158,10 +158,7 @@ export async function exportRows(
|
|||
_id: rowIds.map((row: string) => {
|
||||
const ids = breakRowIdField(row)
|
||||
if (ids.length > 1) {
|
||||
throw new HTTPError(
|
||||
"Export data does not support composite keys.",
|
||||
400
|
||||
)
|
||||
return ids
|
||||
}
|
||||
return ids[0]
|
||||
}),
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { context, db, HTTPError } from "@budibase/backend-core"
|
||||
import { context, HTTPError } from "@budibase/backend-core"
|
||||
import { CONSTANT_INTERNAL_ROW_COLS } from "@budibase/shared-core"
|
||||
import env from "../../../../environment"
|
||||
import { fullSearch, paginatedSearch } from "./utils"
|
||||
import { getRowParams, InternalTables } from "../../../../db/utils"
|
||||
|
@ -74,7 +75,7 @@ export async function search(
|
|||
}
|
||||
|
||||
if (options.fields) {
|
||||
const fields = [...options.fields, ...db.CONSTANT_INTERNAL_ROW_COLS]
|
||||
const fields = [...options.fields, ...CONSTANT_INTERNAL_ROW_COLS]
|
||||
response.rows = response.rows.map((r: any) => pick(r, fields))
|
||||
}
|
||||
|
||||
|
|
|
@ -41,6 +41,7 @@ import {
|
|||
getTableIDList,
|
||||
} from "./filters"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
import { DEFAULT_TABLE_IDS } from "../../../../constants"
|
||||
|
||||
const builder = new sql.Sql(SqlClient.SQL_LITE)
|
||||
const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
|
||||
|
@ -211,6 +212,18 @@ async function runSqlQuery(
|
|||
return response
|
||||
}
|
||||
|
||||
function resyncDefinitionsRequired(status: number, message: string) {
|
||||
// pre data_ prefix on column names, need to resync
|
||||
return (
|
||||
(status === 400 && message?.match(USER_COLUMN_PREFIX_REGEX)) ||
|
||||
// default tables aren't included in definition
|
||||
(status === 400 &&
|
||||
DEFAULT_TABLE_IDS.find(tableId => message?.includes(tableId))) ||
|
||||
// no design document found, needs a full sync
|
||||
(status === 404 && message?.includes(SQLITE_DESIGN_DOC_ID))
|
||||
)
|
||||
}
|
||||
|
||||
export async function search(
|
||||
options: RowSearchParams,
|
||||
table: Table
|
||||
|
@ -338,10 +351,7 @@ export async function search(
|
|||
return response
|
||||
} catch (err: any) {
|
||||
const msg = typeof err === "string" ? err : err.message
|
||||
const syncAndRepeat =
|
||||
(err.status === 400 && msg?.match(USER_COLUMN_PREFIX_REGEX)) ||
|
||||
(err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID))
|
||||
if (syncAndRepeat) {
|
||||
if (resyncDefinitionsRequired(err.status, msg)) {
|
||||
await sdk.tables.sqs.syncDefinition()
|
||||
return search(options, table)
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
generateJunctionTableID,
|
||||
} from "../../../../db/utils"
|
||||
import { isEqual } from "lodash"
|
||||
import { DEFAULT_TABLES } from "../../../../db/defaultData/datasource_bb_default"
|
||||
|
||||
const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
||||
[FieldType.BOOLEAN]: SQLiteType.NUMERIC,
|
||||
|
@ -126,8 +127,9 @@ function mapTable(table: Table): SQLiteTables {
|
|||
// nothing exists, need to iterate though existing tables
|
||||
async function buildBaseDefinition(): Promise<PreSaveSQLiteDefinition> {
|
||||
const tables = await tablesSdk.getAllInternalTables()
|
||||
const defaultTables = DEFAULT_TABLES
|
||||
const definition = sql.designDoc.base("tableId")
|
||||
for (let table of tables) {
|
||||
for (let table of tables.concat(defaultTables)) {
|
||||
definition.sql.tables = {
|
||||
...definition.sql.tables,
|
||||
...mapTable(table),
|
||||
|
|
|
@ -18,6 +18,7 @@ import sdk from "../../../sdk"
|
|||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
import { EventType, updateLinks } from "../../../db/linkedRows"
|
||||
import { cloneDeep } from "lodash"
|
||||
import { isInternalColumnName } from "@budibase/shared-core"
|
||||
|
||||
export interface MigrationResult {
|
||||
tablesUpdated: Table[]
|
||||
|
@ -36,7 +37,7 @@ export async function migrate(
|
|||
throw new BadRequestError(`Column name cannot be empty`)
|
||||
}
|
||||
|
||||
if (dbCore.isInternalColumnName(newColumnName)) {
|
||||
if (isInternalColumnName(newColumnName)) {
|
||||
throw new BadRequestError(`Column name cannot be a reserved column name`)
|
||||
}
|
||||
|
||||
|
|
|
@ -6,9 +6,13 @@ import {
|
|||
ViewV2,
|
||||
ViewV2Enriched,
|
||||
} from "@budibase/types"
|
||||
import { HTTPError, db as dbCore } from "@budibase/backend-core"
|
||||
import { HTTPError } from "@budibase/backend-core"
|
||||
import { features } from "@budibase/pro"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
import {
|
||||
helpers,
|
||||
CONSTANT_EXTERNAL_ROW_COLS,
|
||||
CONSTANT_INTERNAL_ROW_COLS,
|
||||
} from "@budibase/shared-core"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
|
||||
import * as utils from "../../../db/utils"
|
||||
|
@ -144,8 +148,8 @@ export function allowedFields(view: View | ViewV2) {
|
|||
const fieldSchema = view.schema![key]
|
||||
return fieldSchema.visible && !fieldSchema.readonly
|
||||
}),
|
||||
...dbCore.CONSTANT_EXTERNAL_ROW_COLS,
|
||||
...dbCore.CONSTANT_INTERNAL_ROW_COLS,
|
||||
...CONSTANT_EXTERNAL_ROW_COLS,
|
||||
...CONSTANT_INTERNAL_ROW_COLS,
|
||||
]
|
||||
}
|
||||
|
||||
|
|
|
@ -149,4 +149,8 @@ export class ApplicationAPI extends TestAPI {
|
|||
query: { status },
|
||||
})
|
||||
}
|
||||
|
||||
addSampleData = async (appId: string): Promise<void> => {
|
||||
await this._post(`/api/applications/${appId}/sample`)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -310,16 +310,12 @@ export const buildQuery = (filter: SearchFilter[]) => {
|
|||
query.equal = query.equal || {}
|
||||
query.equal[field] = true
|
||||
} else {
|
||||
query[queryOperator] = {
|
||||
...query[queryOperator],
|
||||
[field]: value,
|
||||
}
|
||||
query[queryOperator] ??= {}
|
||||
query[queryOperator]![field] = value
|
||||
}
|
||||
} else {
|
||||
query[queryOperator] = {
|
||||
...query[queryOperator],
|
||||
[field]: value,
|
||||
}
|
||||
query[queryOperator] ??= {}
|
||||
query[queryOperator]![field] = value
|
||||
}
|
||||
}
|
||||
})
|
||||
|
|
|
@ -17,51 +17,52 @@ export enum SearchFilterOperator {
|
|||
CONTAINS_ANY = "containsAny",
|
||||
}
|
||||
|
||||
export interface SearchFilters {
|
||||
allOr?: boolean
|
||||
// TODO: this is just around for now - we need a better way to do or/and
|
||||
// allows just fuzzy to be or - all the fuzzy/like parameters
|
||||
fuzzyOr?: boolean
|
||||
onEmptyFilter?: EmptyFilterOption
|
||||
[SearchFilterOperator.STRING]?: {
|
||||
[key: string]: string
|
||||
export enum InternalSearchFilterOperator {
|
||||
COMPLEX_ID_OPERATOR = "_complexIdOperator",
|
||||
}
|
||||
[SearchFilterOperator.FUZZY]?: {
|
||||
[key: string]: string
|
||||
|
||||
type BasicFilter<T = any> = Record<string, T> & {
|
||||
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never
|
||||
}
|
||||
[SearchFilterOperator.RANGE]?: {
|
||||
[key: string]:
|
||||
|
||||
type ArrayFilter = Record<string, any[]> & {
|
||||
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: {
|
||||
id: string[]
|
||||
values: string[]
|
||||
}
|
||||
}
|
||||
|
||||
type RangeFilter = Record<
|
||||
string,
|
||||
| {
|
||||
high: number | string
|
||||
low: number | string
|
||||
}
|
||||
| { high: number | string }
|
||||
| { low: number | string }
|
||||
> & {
|
||||
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never
|
||||
}
|
||||
[SearchFilterOperator.EQUAL]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
[SearchFilterOperator.NOT_EQUAL]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
[SearchFilterOperator.EMPTY]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
[SearchFilterOperator.NOT_EMPTY]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
[SearchFilterOperator.ONE_OF]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
[SearchFilterOperator.CONTAINS]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
[SearchFilterOperator.NOT_CONTAINS]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
[SearchFilterOperator.CONTAINS_ANY]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
|
||||
export type AnySearchFilter = BasicFilter | ArrayFilter | RangeFilter
|
||||
|
||||
export interface SearchFilters {
|
||||
allOr?: boolean
|
||||
// TODO: this is just around for now - we need a better way to do or/and
|
||||
// allows just fuzzy to be or - all the fuzzy/like parameters
|
||||
fuzzyOr?: boolean
|
||||
onEmptyFilter?: EmptyFilterOption
|
||||
[SearchFilterOperator.STRING]?: BasicFilter<string>
|
||||
[SearchFilterOperator.FUZZY]?: BasicFilter<string>
|
||||
[SearchFilterOperator.RANGE]?: RangeFilter
|
||||
[SearchFilterOperator.EQUAL]?: BasicFilter
|
||||
[SearchFilterOperator.NOT_EQUAL]?: BasicFilter
|
||||
[SearchFilterOperator.EMPTY]?: BasicFilter
|
||||
[SearchFilterOperator.NOT_EMPTY]?: BasicFilter
|
||||
[SearchFilterOperator.ONE_OF]?: ArrayFilter
|
||||
[SearchFilterOperator.CONTAINS]?: ArrayFilter
|
||||
[SearchFilterOperator.NOT_CONTAINS]?: ArrayFilter
|
||||
[SearchFilterOperator.CONTAINS_ANY]?: ArrayFilter
|
||||
// specific to SQS/SQLite search on internal tables this can be used
|
||||
// to make sure the documents returned are always filtered down to a
|
||||
// specific document type (such as just rows)
|
||||
|
|
Loading…
Reference in New Issue