Merge pull request #11435 from Budibase/budi-7350-filter-equal-for-numbers-not-working-in-data-providers
Filter equal for UNSIGNED, DECIMAL, BigInt, etc not working in MySQL custom query
This commit is contained in:
commit
c973af86b3
|
@ -1,42 +1,32 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Budibase Server",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"runtimeArgs": [
|
||||
"--nolazy",
|
||||
"-r",
|
||||
"ts-node/register/transpile-only"
|
||||
],
|
||||
"args": [
|
||||
"${workspaceFolder}/packages/server/src/index.ts"
|
||||
],
|
||||
"cwd": "${workspaceFolder}/packages/server"
|
||||
},
|
||||
{
|
||||
"name": "Budibase Worker",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"runtimeArgs": [
|
||||
"--nolazy",
|
||||
"-r",
|
||||
"ts-node/register/transpile-only"
|
||||
],
|
||||
"args": [
|
||||
"${workspaceFolder}/packages/worker/src/index.ts"
|
||||
],
|
||||
"cwd": "${workspaceFolder}/packages/worker"
|
||||
},
|
||||
],
|
||||
"compounds": [
|
||||
{
|
||||
"name": "Start Budibase",
|
||||
"configurations": ["Budibase Server", "Budibase Worker"]
|
||||
}
|
||||
]
|
||||
}
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Budibase Server",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"runtimeVersion": "14.20.1",
|
||||
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
|
||||
"args": ["${workspaceFolder}/packages/server/src/index.ts"],
|
||||
"cwd": "${workspaceFolder}/packages/server"
|
||||
},
|
||||
{
|
||||
"name": "Budibase Worker",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"runtimeVersion": "14.20.1",
|
||||
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
|
||||
"args": ["${workspaceFolder}/packages/worker/src/index.ts"],
|
||||
"cwd": "${workspaceFolder}/packages/worker"
|
||||
}
|
||||
],
|
||||
"compounds": [
|
||||
{
|
||||
"name": "Start Budibase",
|
||||
"configurations": ["Budibase Server", "Budibase Worker"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
import { generate } from "shortid"
|
||||
import { LuceneUtils, Constants } from "@budibase/frontend-core"
|
||||
import { getFields } from "helpers/searchFields"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { createEventDispatcher, onMount } from "svelte"
|
||||
|
||||
export let schemaFields
|
||||
export let filters = []
|
||||
|
@ -64,6 +64,15 @@
|
|||
})
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
parseFilters(filters)
|
||||
rawFilters.forEach(filter => {
|
||||
filter.type =
|
||||
schemaFields.find(field => field.name === filter.field)?.type ||
|
||||
filter.type
|
||||
})
|
||||
})
|
||||
|
||||
// Add field key prefixes and a special metadata filter object to indicate
|
||||
// whether to use the "match all" or "match any" behaviour
|
||||
const enrichFilters = (rawFilters, matchAny) => {
|
||||
|
|
|
@ -100,7 +100,7 @@
|
|||
"memorystream": "0.3.1",
|
||||
"mongodb": "5.7",
|
||||
"mssql": "9.1.1",
|
||||
"mysql2": "2.3.3",
|
||||
"mysql2": "3.5.2",
|
||||
"node-fetch": "2.6.7",
|
||||
"object-sizeof": "2.6.1",
|
||||
"open": "8.4.0",
|
||||
|
|
|
@ -127,7 +127,7 @@ export async function preview(ctx: any) {
|
|||
const query = ctx.request.body
|
||||
// preview may not have a queryId as it hasn't been saved, but if it does
|
||||
// this stops dynamic variables from calling the same query
|
||||
const { fields, parameters, queryVerb, transformer, queryId } = query
|
||||
const { fields, parameters, queryVerb, transformer, queryId, schema } = query
|
||||
|
||||
const authConfigCtx: any = getAuthConfig(ctx)
|
||||
|
||||
|
@ -140,6 +140,7 @@ export async function preview(ctx: any) {
|
|||
parameters,
|
||||
transformer,
|
||||
queryId,
|
||||
schema,
|
||||
// have to pass down to the thread runner - can't put into context now
|
||||
environmentVariables: envVars,
|
||||
ctx: {
|
||||
|
@ -235,6 +236,7 @@ async function execute(
|
|||
user: ctx.user,
|
||||
auth: { ...authConfigCtx },
|
||||
},
|
||||
schema: query.schema,
|
||||
}
|
||||
const runFn = () => Runner.run(inputs)
|
||||
|
||||
|
|
|
@ -1,11 +1,18 @@
|
|||
const setup = require("./utilities")
|
||||
const { FilterConditions } = require("../steps/filter")
|
||||
import * as setup from "./utilities"
|
||||
import { FilterConditions } from "../steps/filter"
|
||||
|
||||
describe("test the filter logic", () => {
|
||||
async function checkFilter(field, condition, value, pass = true) {
|
||||
let res = await setup.runStep(setup.actions.FILTER.stepId,
|
||||
{ field, condition, value }
|
||||
)
|
||||
async function checkFilter(
|
||||
field: any,
|
||||
condition: string,
|
||||
value: any,
|
||||
pass = true
|
||||
) {
|
||||
let res = await setup.runStep(setup.actions.FILTER.stepId, {
|
||||
field,
|
||||
condition,
|
||||
value,
|
||||
})
|
||||
expect(res.result).toEqual(pass)
|
||||
expect(res.success).toEqual(true)
|
||||
}
|
||||
|
@ -36,9 +43,9 @@ describe("test the filter logic", () => {
|
|||
|
||||
it("check date coercion", async () => {
|
||||
await checkFilter(
|
||||
(new Date()).toISOString(),
|
||||
new Date().toISOString(),
|
||||
FilterConditions.GREATER_THAN,
|
||||
(new Date(-10000)).toISOString(),
|
||||
new Date(-10000).toISOString(),
|
||||
true
|
||||
)
|
||||
})
|
|
@ -93,6 +93,21 @@ const SCHEMA: Integration = {
|
|||
},
|
||||
}
|
||||
|
||||
const defaultTypeCasting = function (field: any, next: any) {
|
||||
if (
|
||||
field.type == "DATETIME" ||
|
||||
field.type === "DATE" ||
|
||||
field.type === "TIMESTAMP" ||
|
||||
field.type === "LONGLONG"
|
||||
) {
|
||||
return field.string()
|
||||
}
|
||||
if (field.type === "BIT" && field.length === 1) {
|
||||
return field.buffer()?.[0]
|
||||
}
|
||||
return next()
|
||||
}
|
||||
|
||||
export function bindingTypeCoerce(bindings: any[]) {
|
||||
for (let i = 0; i < bindings.length; i++) {
|
||||
const binding = bindings[i]
|
||||
|
@ -147,21 +162,8 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
delete config.rejectUnauthorized
|
||||
this.config = {
|
||||
...config,
|
||||
typeCast: defaultTypeCasting,
|
||||
multipleStatements: true,
|
||||
typeCast: function (field: any, next: any) {
|
||||
if (
|
||||
field.type == "DATETIME" ||
|
||||
field.type === "DATE" ||
|
||||
field.type === "TIMESTAMP" ||
|
||||
field.type === "LONGLONG"
|
||||
) {
|
||||
return field.string()
|
||||
}
|
||||
if (field.type === "BIT" && field.length === 1) {
|
||||
return field.buffer()?.[0]
|
||||
}
|
||||
return next()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -194,6 +196,37 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
return `concat(${parts.join(", ")})`
|
||||
}
|
||||
|
||||
defineTypeCastingFromSchema(schema: {
|
||||
[key: string]: { name: string; type: string }
|
||||
}): void {
|
||||
if (!schema) {
|
||||
return
|
||||
}
|
||||
this.config.typeCast = function (field: any, next: any) {
|
||||
if (schema[field.name]?.name === field.name) {
|
||||
if (["LONGLONG", "NEWDECIMAL", "DECIMAL"].includes(field.type)) {
|
||||
if (schema[field.name]?.type === "number") {
|
||||
const value = field.string()
|
||||
return value ? Number(value) : null
|
||||
} else {
|
||||
return field.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
if (
|
||||
field.type == "DATETIME" ||
|
||||
field.type === "DATE" ||
|
||||
field.type === "TIMESTAMP"
|
||||
) {
|
||||
return field.string()
|
||||
}
|
||||
if (field.type === "BIT" && field.length === 1) {
|
||||
return field.buffer()?.[0]
|
||||
}
|
||||
return next()
|
||||
}
|
||||
}
|
||||
|
||||
async connect() {
|
||||
this.client = await mysql.createConnection(this.config)
|
||||
}
|
||||
|
@ -204,7 +237,10 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
|
||||
async internalQuery(
|
||||
query: SqlQuery,
|
||||
opts: { connect?: boolean; disableCoercion?: boolean } = {
|
||||
opts: {
|
||||
connect?: boolean
|
||||
disableCoercion?: boolean
|
||||
} = {
|
||||
connect: true,
|
||||
disableCoercion: false,
|
||||
}
|
||||
|
|
|
@ -11,6 +11,12 @@ export interface QueryEvent {
|
|||
queryId: string
|
||||
environmentVariables?: Record<string, string>
|
||||
ctx?: any
|
||||
schema?: {
|
||||
[key: string]: {
|
||||
name: string
|
||||
type: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface QueryVariable {
|
||||
|
|
|
@ -8,6 +8,7 @@ import { context, cache, auth } from "@budibase/backend-core"
|
|||
import { getGlobalIDFromUserMetadataID } from "../db/utils"
|
||||
import sdk from "../sdk"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { SourceName } from "@budibase/types"
|
||||
|
||||
import { isSQL } from "../integrations/utils"
|
||||
import { interpolateSQL } from "../integrations/queries/sql"
|
||||
|
@ -28,6 +29,7 @@ class QueryRunner {
|
|||
hasRerun: boolean
|
||||
hasRefreshedOAuth: boolean
|
||||
hasDynamicVariables: boolean
|
||||
schema: any
|
||||
|
||||
constructor(input: QueryEvent, flags = { noRecursiveQuery: false }) {
|
||||
this.datasource = input.datasource
|
||||
|
@ -37,6 +39,7 @@ class QueryRunner {
|
|||
this.pagination = input.pagination
|
||||
this.transformer = input.transformer
|
||||
this.queryId = input.queryId
|
||||
this.schema = input.schema
|
||||
this.noRecursiveQuery = flags.noRecursiveQuery
|
||||
this.cachedVariables = []
|
||||
// Additional context items for enrichment
|
||||
|
@ -51,7 +54,7 @@ class QueryRunner {
|
|||
}
|
||||
|
||||
async execute(): Promise<any> {
|
||||
let { datasource, fields, queryVerb, transformer } = this
|
||||
let { datasource, fields, queryVerb, transformer, schema } = this
|
||||
let datasourceClone = cloneDeep(datasource)
|
||||
let fieldsClone = cloneDeep(fields)
|
||||
|
||||
|
@ -70,6 +73,9 @@ class QueryRunner {
|
|||
|
||||
const integration = new Integration(datasourceClone.config)
|
||||
|
||||
// define the type casting from the schema
|
||||
integration.defineTypeCastingFromSchema?.(schema)
|
||||
|
||||
// pre-query, make sure datasource variables are added to parameters
|
||||
const parameters = await this.addDatasourceVariables()
|
||||
|
||||
|
|
|
@ -166,6 +166,12 @@ export interface IntegrationBase {
|
|||
delete?(query: any): Promise<any[] | any>
|
||||
testConnection?(): Promise<ConnectionInfo>
|
||||
getExternalSchema?(): Promise<string>
|
||||
defineTypeCastingFromSchema?(schema: {
|
||||
[key: string]: {
|
||||
name: string
|
||||
type: string
|
||||
}
|
||||
}): void
|
||||
}
|
||||
|
||||
export interface DatasourcePlus extends IntegrationBase {
|
||||
|
|
Loading…
Reference in New Issue