Merge branch 'master' into feature/onboarding
This commit is contained in:
commit
f777316fc2
|
@ -6,7 +6,7 @@ import { invalidateDynamicVariables } from "../../../threads/utils"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { events, context, utils, constants } from "@budibase/backend-core"
|
import { events, context, utils, constants } from "@budibase/backend-core"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import { QueryEvent } from "../../../threads/definitions"
|
import { QueryEvent, QueryEventParameters } from "../../../threads/definitions"
|
||||||
import {
|
import {
|
||||||
ConfigType,
|
ConfigType,
|
||||||
Query,
|
Query,
|
||||||
|
@ -18,7 +18,6 @@ import {
|
||||||
FieldType,
|
FieldType,
|
||||||
ExecuteQueryRequest,
|
ExecuteQueryRequest,
|
||||||
ExecuteQueryResponse,
|
ExecuteQueryResponse,
|
||||||
QueryParameter,
|
|
||||||
PreviewQueryRequest,
|
PreviewQueryRequest,
|
||||||
PreviewQueryResponse,
|
PreviewQueryResponse,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
@ -29,7 +28,7 @@ const Runner = new Thread(ThreadType.QUERY, {
|
||||||
timeoutMs: env.QUERY_THREAD_TIMEOUT,
|
timeoutMs: env.QUERY_THREAD_TIMEOUT,
|
||||||
})
|
})
|
||||||
|
|
||||||
function validateQueryInputs(parameters: Record<string, string>) {
|
function validateQueryInputs(parameters: QueryEventParameters) {
|
||||||
for (let entry of Object.entries(parameters)) {
|
for (let entry of Object.entries(parameters)) {
|
||||||
const [key, value] = entry
|
const [key, value] = entry
|
||||||
if (typeof value !== "string") {
|
if (typeof value !== "string") {
|
||||||
|
@ -100,10 +99,18 @@ export async function save(ctx: UserCtx<Query, Query>) {
|
||||||
const datasource = await sdk.datasources.get(query.datasourceId)
|
const datasource = await sdk.datasources.get(query.datasourceId)
|
||||||
|
|
||||||
let eventFn
|
let eventFn
|
||||||
if (!query._id) {
|
if (!query._id && !query._rev) {
|
||||||
query._id = generateQueryID(query.datasourceId)
|
query._id = generateQueryID(query.datasourceId)
|
||||||
|
// flag to state whether the default bindings are empty strings (old behaviour) or null
|
||||||
|
query.nullDefaultSupport = true
|
||||||
eventFn = () => events.query.created(datasource, query)
|
eventFn = () => events.query.created(datasource, query)
|
||||||
} else {
|
} else {
|
||||||
|
// check if flag has previously been set, don't let it change
|
||||||
|
// allow it to be explicitly set to false via API incase this is ever needed
|
||||||
|
const existingQuery = await db.get<Query>(query._id)
|
||||||
|
if (existingQuery.nullDefaultSupport && query.nullDefaultSupport == null) {
|
||||||
|
query.nullDefaultSupport = true
|
||||||
|
}
|
||||||
eventFn = () => events.query.updated(datasource, query)
|
eventFn = () => events.query.updated(datasource, query)
|
||||||
}
|
}
|
||||||
const response = await db.put(query)
|
const response = await db.put(query)
|
||||||
|
@ -135,16 +142,20 @@ function getAuthConfig(ctx: UserCtx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function enrichParameters(
|
function enrichParameters(
|
||||||
queryParameters: QueryParameter[],
|
query: Query,
|
||||||
requestParameters: Record<string, string> = {}
|
requestParameters: QueryEventParameters = {}
|
||||||
): Record<string, string> {
|
): QueryEventParameters {
|
||||||
|
const paramNotSet = (val: unknown) => val === "" || val == undefined
|
||||||
// first check parameters are all valid
|
// first check parameters are all valid
|
||||||
validateQueryInputs(requestParameters)
|
validateQueryInputs(requestParameters)
|
||||||
// make sure parameters are fully enriched with defaults
|
// make sure parameters are fully enriched with defaults
|
||||||
for (let parameter of queryParameters) {
|
for (const parameter of query.parameters) {
|
||||||
if (!requestParameters[parameter.name]) {
|
let value: string | null =
|
||||||
requestParameters[parameter.name] = parameter.default
|
requestParameters[parameter.name] || parameter.default
|
||||||
|
if (query.nullDefaultSupport && paramNotSet(value)) {
|
||||||
|
value = null
|
||||||
}
|
}
|
||||||
|
requestParameters[parameter.name] = value
|
||||||
}
|
}
|
||||||
return requestParameters
|
return requestParameters
|
||||||
}
|
}
|
||||||
|
@ -157,10 +168,15 @@ export async function preview(
|
||||||
)
|
)
|
||||||
// preview may not have a queryId as it hasn't been saved, but if it does
|
// preview may not have a queryId as it hasn't been saved, but if it does
|
||||||
// this stops dynamic variables from calling the same query
|
// this stops dynamic variables from calling the same query
|
||||||
const { fields, parameters, queryVerb, transformer, queryId, schema } =
|
const queryId = ctx.request.body.queryId
|
||||||
ctx.request.body
|
// the body contains the makings of a query, which has not been saved yet
|
||||||
|
const query: Query = ctx.request.body
|
||||||
|
// hasn't been saved, new query
|
||||||
|
if (!queryId && !query._id) {
|
||||||
|
query.nullDefaultSupport = true
|
||||||
|
}
|
||||||
|
|
||||||
let existingSchema = schema
|
let existingSchema = query.schema
|
||||||
if (queryId && !existingSchema) {
|
if (queryId && !existingSchema) {
|
||||||
try {
|
try {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
|
@ -268,13 +284,14 @@ export async function preview(
|
||||||
try {
|
try {
|
||||||
const inputs: QueryEvent = {
|
const inputs: QueryEvent = {
|
||||||
appId: ctx.appId,
|
appId: ctx.appId,
|
||||||
datasource,
|
queryVerb: query.queryVerb,
|
||||||
queryVerb,
|
fields: query.fields,
|
||||||
fields,
|
parameters: enrichParameters(query),
|
||||||
parameters: enrichParameters(parameters),
|
transformer: query.transformer,
|
||||||
transformer,
|
schema: query.schema,
|
||||||
|
nullDefaultSupport: query.nullDefaultSupport,
|
||||||
queryId,
|
queryId,
|
||||||
schema,
|
datasource,
|
||||||
// have to pass down to the thread runner - can't put into context now
|
// have to pass down to the thread runner - can't put into context now
|
||||||
environmentVariables: envVars,
|
environmentVariables: envVars,
|
||||||
ctx: {
|
ctx: {
|
||||||
|
@ -336,14 +353,12 @@ async function execute(
|
||||||
queryVerb: query.queryVerb,
|
queryVerb: query.queryVerb,
|
||||||
fields: query.fields,
|
fields: query.fields,
|
||||||
pagination: ctx.request.body.pagination,
|
pagination: ctx.request.body.pagination,
|
||||||
parameters: enrichParameters(
|
parameters: enrichParameters(query, ctx.request.body.parameters),
|
||||||
query.parameters,
|
|
||||||
ctx.request.body.parameters
|
|
||||||
),
|
|
||||||
transformer: query.transformer,
|
transformer: query.transformer,
|
||||||
queryId: ctx.params.queryId,
|
queryId: ctx.params.queryId,
|
||||||
// have to pass down to the thread runner - can't put into context now
|
// have to pass down to the thread runner - can't put into context now
|
||||||
environmentVariables: envVars,
|
environmentVariables: envVars,
|
||||||
|
nullDefaultSupport: query.nullDefaultSupport,
|
||||||
ctx: {
|
ctx: {
|
||||||
user: ctx.user,
|
user: ctx.user,
|
||||||
auth: { ...authConfigCtx },
|
auth: { ...authConfigCtx },
|
||||||
|
|
|
@ -143,7 +143,10 @@ describe("/api/env/variables", () => {
|
||||||
delete response.body.datasource.config
|
delete response.body.datasource.config
|
||||||
expect(events.query.previewed).toHaveBeenCalledWith(
|
expect(events.query.previewed).toHaveBeenCalledWith(
|
||||||
response.body.datasource,
|
response.body.datasource,
|
||||||
queryPreview
|
{
|
||||||
|
...queryPreview,
|
||||||
|
nullDefaultSupport: true,
|
||||||
|
}
|
||||||
)
|
)
|
||||||
expect(pg.Client).toHaveBeenCalledWith({ password: "test", ssl: undefined })
|
expect(pg.Client).toHaveBeenCalledWith({ password: "test", ssl: undefined })
|
||||||
})
|
})
|
||||||
|
|
|
@ -12,19 +12,22 @@ const createTableSQL: Record<string, string> = {
|
||||||
CREATE TABLE test_table (
|
CREATE TABLE test_table (
|
||||||
id serial PRIMARY KEY,
|
id serial PRIMARY KEY,
|
||||||
name VARCHAR ( 50 ) NOT NULL,
|
name VARCHAR ( 50 ) NOT NULL,
|
||||||
birthday TIMESTAMP
|
birthday TIMESTAMP,
|
||||||
|
number INT
|
||||||
);`,
|
);`,
|
||||||
[SourceName.MYSQL]: `
|
[SourceName.MYSQL]: `
|
||||||
CREATE TABLE test_table (
|
CREATE TABLE test_table (
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
name VARCHAR(50) NOT NULL,
|
name VARCHAR(50) NOT NULL,
|
||||||
birthday TIMESTAMP
|
birthday TIMESTAMP,
|
||||||
|
number INT
|
||||||
);`,
|
);`,
|
||||||
[SourceName.SQL_SERVER]: `
|
[SourceName.SQL_SERVER]: `
|
||||||
CREATE TABLE test_table (
|
CREATE TABLE test_table (
|
||||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||||
name NVARCHAR(50) NOT NULL,
|
name NVARCHAR(50) NOT NULL,
|
||||||
birthday DATETIME
|
birthday DATETIME,
|
||||||
|
number INT
|
||||||
);`,
|
);`,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -36,7 +39,7 @@ describe.each([
|
||||||
["mysql", databaseTestProviders.mysql],
|
["mysql", databaseTestProviders.mysql],
|
||||||
["mssql", databaseTestProviders.mssql],
|
["mssql", databaseTestProviders.mssql],
|
||||||
["mariadb", databaseTestProviders.mariadb],
|
["mariadb", databaseTestProviders.mariadb],
|
||||||
])("queries (%s)", (__, dsProvider) => {
|
])("queries (%s)", (dbName, dsProvider) => {
|
||||||
const config = setup.getConfig()
|
const config = setup.getConfig()
|
||||||
let datasource: Datasource
|
let datasource: Datasource
|
||||||
|
|
||||||
|
@ -51,7 +54,7 @@ describe.each([
|
||||||
transformer: "return data",
|
transformer: "return data",
|
||||||
readable: true,
|
readable: true,
|
||||||
}
|
}
|
||||||
return await config.api.query.create({ ...defaultQuery, ...query })
|
return await config.api.query.save({ ...defaultQuery, ...query })
|
||||||
}
|
}
|
||||||
|
|
||||||
async function rawQuery(sql: string): Promise<any> {
|
async function rawQuery(sql: string): Promise<any> {
|
||||||
|
@ -221,26 +224,31 @@ describe.each([
|
||||||
id: 1,
|
id: 1,
|
||||||
name: "one",
|
name: "one",
|
||||||
birthday: null,
|
birthday: null,
|
||||||
|
number: null,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 2,
|
id: 2,
|
||||||
name: "two",
|
name: "two",
|
||||||
birthday: null,
|
birthday: null,
|
||||||
|
number: null,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 3,
|
id: 3,
|
||||||
name: "three",
|
name: "three",
|
||||||
birthday: null,
|
birthday: null,
|
||||||
|
number: null,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 4,
|
id: 4,
|
||||||
name: "four",
|
name: "four",
|
||||||
birthday: null,
|
birthday: null,
|
||||||
|
number: null,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 5,
|
id: 5,
|
||||||
name: "five",
|
name: "five",
|
||||||
birthday: null,
|
birthday: null,
|
||||||
|
number: null,
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
@ -263,6 +271,7 @@ describe.each([
|
||||||
id: 2,
|
id: 2,
|
||||||
name: "one",
|
name: "one",
|
||||||
birthday: null,
|
birthday: null,
|
||||||
|
number: null,
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
@ -291,6 +300,7 @@ describe.each([
|
||||||
id: 1,
|
id: 1,
|
||||||
name: "one",
|
name: "one",
|
||||||
birthday: null,
|
birthday: null,
|
||||||
|
number: null,
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
@ -329,7 +339,9 @@ describe.each([
|
||||||
])
|
])
|
||||||
|
|
||||||
const rows = await rawQuery("SELECT * FROM test_table WHERE id = 1")
|
const rows = await rawQuery("SELECT * FROM test_table WHERE id = 1")
|
||||||
expect(rows).toEqual([{ id: 1, name: "foo", birthday: null }])
|
expect(rows).toEqual([
|
||||||
|
{ id: 1, name: "foo", birthday: null, number: null },
|
||||||
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to execute an update that updates no rows", async () => {
|
it("should be able to execute an update that updates no rows", async () => {
|
||||||
|
@ -398,4 +410,55 @@ describe.each([
|
||||||
expect(rows).toHaveLength(0)
|
expect(rows).toHaveLength(0)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// this parameter really only impacts SQL queries
|
||||||
|
describe("confirm nullDefaultSupport", () => {
|
||||||
|
const queryParams = {
|
||||||
|
fields: {
|
||||||
|
sql: "INSERT INTO test_table (name, number) VALUES ({{ bindingName }}, {{ bindingNumber }})",
|
||||||
|
},
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "bindingName",
|
||||||
|
default: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bindingNumber",
|
||||||
|
default: "",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
queryVerb: "create",
|
||||||
|
}
|
||||||
|
|
||||||
|
it("should error for old queries", async () => {
|
||||||
|
const query = await createQuery(queryParams)
|
||||||
|
await config.api.query.save({ ...query, nullDefaultSupport: false })
|
||||||
|
let error: string | undefined
|
||||||
|
try {
|
||||||
|
await config.api.query.execute(query._id!, {
|
||||||
|
parameters: {
|
||||||
|
bindingName: "testing",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (err: any) {
|
||||||
|
error = err.message
|
||||||
|
}
|
||||||
|
if (dbName === "mssql") {
|
||||||
|
expect(error).toBeUndefined()
|
||||||
|
} else {
|
||||||
|
expect(error).toBeDefined()
|
||||||
|
expect(error).toContain("integer")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not error for new queries", async () => {
|
||||||
|
const query = await createQuery(queryParams)
|
||||||
|
const results = await config.api.query.execute(query._id!, {
|
||||||
|
parameters: {
|
||||||
|
bindingName: "testing",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(results).toEqual({ data: [{ created: true }] })
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -31,7 +31,7 @@ describe("/queries", () => {
|
||||||
) {
|
) {
|
||||||
combinedQuery.fields.extra.collection = collection
|
combinedQuery.fields.extra.collection = collection
|
||||||
}
|
}
|
||||||
return await config.api.query.create(combinedQuery)
|
return await config.api.query.save(combinedQuery)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function withClient<T>(
|
async function withClient<T>(
|
||||||
|
@ -464,7 +464,7 @@ describe("/queries", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should ignore be able to save deeply nested data", async () => {
|
it("should be able to save deeply nested data", async () => {
|
||||||
const data = {
|
const data = {
|
||||||
foo: "bar",
|
foo: "bar",
|
||||||
data: [
|
data: [
|
||||||
|
|
|
@ -78,6 +78,7 @@ describe("/queries", () => {
|
||||||
_rev: res.body._rev,
|
_rev: res.body._rev,
|
||||||
_id: res.body._id,
|
_id: res.body._id,
|
||||||
...query,
|
...query,
|
||||||
|
nullDefaultSupport: true,
|
||||||
createdAt: new Date().toISOString(),
|
createdAt: new Date().toISOString(),
|
||||||
updatedAt: new Date().toISOString(),
|
updatedAt: new Date().toISOString(),
|
||||||
})
|
})
|
||||||
|
@ -103,6 +104,7 @@ describe("/queries", () => {
|
||||||
_rev: res.body._rev,
|
_rev: res.body._rev,
|
||||||
_id: res.body._id,
|
_id: res.body._id,
|
||||||
...query,
|
...query,
|
||||||
|
nullDefaultSupport: true,
|
||||||
createdAt: new Date().toISOString(),
|
createdAt: new Date().toISOString(),
|
||||||
updatedAt: new Date().toISOString(),
|
updatedAt: new Date().toISOString(),
|
||||||
})
|
})
|
||||||
|
@ -130,6 +132,7 @@ describe("/queries", () => {
|
||||||
_id: query._id,
|
_id: query._id,
|
||||||
createdAt: new Date().toISOString(),
|
createdAt: new Date().toISOString(),
|
||||||
...basicQuery(datasource._id),
|
...basicQuery(datasource._id),
|
||||||
|
nullDefaultSupport: true,
|
||||||
updatedAt: new Date().toISOString(),
|
updatedAt: new Date().toISOString(),
|
||||||
readable: true,
|
readable: true,
|
||||||
},
|
},
|
||||||
|
@ -245,10 +248,10 @@ describe("/queries", () => {
|
||||||
expect(responseBody.rows.length).toEqual(1)
|
expect(responseBody.rows.length).toEqual(1)
|
||||||
expect(events.query.previewed).toHaveBeenCalledTimes(1)
|
expect(events.query.previewed).toHaveBeenCalledTimes(1)
|
||||||
delete datasource.config
|
delete datasource.config
|
||||||
expect(events.query.previewed).toHaveBeenCalledWith(
|
expect(events.query.previewed).toHaveBeenCalledWith(datasource, {
|
||||||
datasource,
|
...queryPreview,
|
||||||
queryPreview
|
nullDefaultSupport: true,
|
||||||
)
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should apply authorization to endpoint", async () => {
|
it("should apply authorization to endpoint", async () => {
|
||||||
|
|
|
@ -5,9 +5,10 @@ import sdk from "../../sdk"
|
||||||
const CONST_CHAR_REGEX = new RegExp("'[^']*'", "g")
|
const CONST_CHAR_REGEX = new RegExp("'[^']*'", "g")
|
||||||
|
|
||||||
export async function interpolateSQL(
|
export async function interpolateSQL(
|
||||||
fields: { [key: string]: any },
|
fields: { sql: string; bindings: any[] },
|
||||||
parameters: { [key: string]: any },
|
parameters: { [key: string]: any },
|
||||||
integration: DatasourcePlus
|
integration: DatasourcePlus,
|
||||||
|
opts: { nullDefaultSupport: boolean }
|
||||||
) {
|
) {
|
||||||
let sql = fields.sql
|
let sql = fields.sql
|
||||||
if (!sql || typeof sql !== "string") {
|
if (!sql || typeof sql !== "string") {
|
||||||
|
@ -64,7 +65,14 @@ export async function interpolateSQL(
|
||||||
}
|
}
|
||||||
// replicate the knex structure
|
// replicate the knex structure
|
||||||
fields.sql = sql
|
fields.sql = sql
|
||||||
fields.bindings = await sdk.queries.enrichContext(variables, parameters)
|
fields.bindings = await sdk.queries.enrichArrayContext(variables, parameters)
|
||||||
|
if (opts.nullDefaultSupport) {
|
||||||
|
for (let index in fields.bindings) {
|
||||||
|
if (fields.bindings[index] === "") {
|
||||||
|
fields.bindings[index] = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
// check for arrays in the data
|
// check for arrays in the data
|
||||||
let updated: string[] = []
|
let updated: string[] = []
|
||||||
for (let i = 0; i < variables.length; i++) {
|
for (let i = 0; i < variables.length; i++) {
|
||||||
|
|
|
@ -65,14 +65,33 @@ export async function fetch(opts: { enrich: boolean } = { enrich: true }) {
|
||||||
return updateSchemas(queries)
|
return updateSchemas(queries)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function enrichArrayContext(
|
||||||
|
fields: any[],
|
||||||
|
inputs = {}
|
||||||
|
): Promise<any[]> {
|
||||||
|
const map: Record<string, any> = {}
|
||||||
|
for (let index in fields) {
|
||||||
|
map[index] = fields[index]
|
||||||
|
}
|
||||||
|
const output = await enrichContext(map, inputs)
|
||||||
|
const outputArray: any[] = []
|
||||||
|
for (let [key, value] of Object.entries(output)) {
|
||||||
|
outputArray[parseInt(key)] = value
|
||||||
|
}
|
||||||
|
return outputArray
|
||||||
|
}
|
||||||
|
|
||||||
export async function enrichContext(
|
export async function enrichContext(
|
||||||
fields: Record<string, any>,
|
fields: Record<string, any>,
|
||||||
inputs = {}
|
inputs = {}
|
||||||
): Promise<Record<string, any>> {
|
): Promise<Record<string, any>> {
|
||||||
const enrichedQuery: Record<string, any> = Array.isArray(fields) ? [] : {}
|
const enrichedQuery: Record<string, any> = {}
|
||||||
if (!fields || !inputs) {
|
if (!fields || !inputs) {
|
||||||
return enrichedQuery
|
return enrichedQuery
|
||||||
}
|
}
|
||||||
|
if (Array.isArray(fields)) {
|
||||||
|
return enrichArrayContext(fields, inputs)
|
||||||
|
}
|
||||||
const env = await getEnvironmentVariables()
|
const env = await getEnvironmentVariables()
|
||||||
const parameters = { ...inputs, env }
|
const parameters = { ...inputs, env }
|
||||||
// enrich the fields with dynamic parameters
|
// enrich the fields with dynamic parameters
|
||||||
|
|
|
@ -8,7 +8,7 @@ import {
|
||||||
import { Expectations, TestAPI } from "./base"
|
import { Expectations, TestAPI } from "./base"
|
||||||
|
|
||||||
export class QueryAPI extends TestAPI {
|
export class QueryAPI extends TestAPI {
|
||||||
create = async (body: Query): Promise<Query> => {
|
save = async (body: Query): Promise<Query> => {
|
||||||
return await this._post<Query>(`/api/queries`, { body })
|
return await this._post<Query>(`/api/queries`, { body })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,21 +1,20 @@
|
||||||
import { Datasource, QuerySchema, Row } from "@budibase/types"
|
import { Datasource, Row, Query } from "@budibase/types"
|
||||||
|
|
||||||
export type WorkerCallback = (error: any, response?: any) => void
|
export type WorkerCallback = (error: any, response?: any) => void
|
||||||
|
|
||||||
export interface QueryEvent {
|
export interface QueryEvent
|
||||||
|
extends Omit<Query, "datasourceId" | "name" | "parameters" | "readable"> {
|
||||||
appId?: string
|
appId?: string
|
||||||
datasource: Datasource
|
datasource: Datasource
|
||||||
queryVerb: string
|
|
||||||
fields: { [key: string]: any }
|
|
||||||
parameters: { [key: string]: unknown }
|
|
||||||
pagination?: any
|
pagination?: any
|
||||||
transformer: any
|
|
||||||
queryId?: string
|
queryId?: string
|
||||||
environmentVariables?: Record<string, string>
|
environmentVariables?: Record<string, string>
|
||||||
|
parameters: QueryEventParameters
|
||||||
ctx?: any
|
ctx?: any
|
||||||
schema?: Record<string, QuerySchema | string>
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type QueryEventParameters = Record<string, string | null>
|
||||||
|
|
||||||
export interface QueryResponse {
|
export interface QueryResponse {
|
||||||
rows: Row[]
|
rows: Row[]
|
||||||
keys: string[]
|
keys: string[]
|
||||||
|
|
|
@ -26,10 +26,11 @@ class QueryRunner {
|
||||||
fields: any
|
fields: any
|
||||||
parameters: any
|
parameters: any
|
||||||
pagination: any
|
pagination: any
|
||||||
transformer: string
|
transformer: string | null
|
||||||
cachedVariables: any[]
|
cachedVariables: any[]
|
||||||
ctx: any
|
ctx: any
|
||||||
queryResponse: any
|
queryResponse: any
|
||||||
|
nullDefaultSupport: boolean
|
||||||
noRecursiveQuery: boolean
|
noRecursiveQuery: boolean
|
||||||
hasRerun: boolean
|
hasRerun: boolean
|
||||||
hasRefreshedOAuth: boolean
|
hasRefreshedOAuth: boolean
|
||||||
|
@ -45,6 +46,7 @@ class QueryRunner {
|
||||||
this.transformer = input.transformer
|
this.transformer = input.transformer
|
||||||
this.queryId = input.queryId!
|
this.queryId = input.queryId!
|
||||||
this.schema = input.schema
|
this.schema = input.schema
|
||||||
|
this.nullDefaultSupport = !!input.nullDefaultSupport
|
||||||
this.noRecursiveQuery = flags.noRecursiveQuery
|
this.noRecursiveQuery = flags.noRecursiveQuery
|
||||||
this.cachedVariables = []
|
this.cachedVariables = []
|
||||||
// Additional context items for enrichment
|
// Additional context items for enrichment
|
||||||
|
@ -59,7 +61,14 @@ class QueryRunner {
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(): Promise<QueryResponse> {
|
async execute(): Promise<QueryResponse> {
|
||||||
let { datasource, fields, queryVerb, transformer, schema } = this
|
let {
|
||||||
|
datasource,
|
||||||
|
fields,
|
||||||
|
queryVerb,
|
||||||
|
transformer,
|
||||||
|
schema,
|
||||||
|
nullDefaultSupport,
|
||||||
|
} = this
|
||||||
let datasourceClone = cloneDeep(datasource)
|
let datasourceClone = cloneDeep(datasource)
|
||||||
let fieldsClone = cloneDeep(fields)
|
let fieldsClone = cloneDeep(fields)
|
||||||
|
|
||||||
|
@ -100,10 +109,12 @@ class QueryRunner {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
let query
|
let query: Record<string, any>
|
||||||
// handle SQL injections by interpolating the variables
|
// handle SQL injections by interpolating the variables
|
||||||
if (isSQL(datasourceClone)) {
|
if (isSQL(datasourceClone)) {
|
||||||
query = await interpolateSQL(fieldsClone, enrichedContext, integration)
|
query = await interpolateSQL(fieldsClone, enrichedContext, integration, {
|
||||||
|
nullDefaultSupport,
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
query = await sdk.queries.enrichContext(fieldsClone, enrichedContext)
|
query = await sdk.queries.enrichContext(fieldsClone, enrichedContext)
|
||||||
}
|
}
|
||||||
|
@ -137,7 +148,9 @@ class QueryRunner {
|
||||||
data: rows,
|
data: rows,
|
||||||
params: enrichedParameters,
|
params: enrichedParameters,
|
||||||
}
|
}
|
||||||
rows = vm.withContext(ctx, () => vm.execute(transformer))
|
if (transformer != null) {
|
||||||
|
rows = vm.withContext(ctx, () => vm.execute(transformer!))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// if the request fails we retry once, invalidating the cached value
|
// if the request fails we retry once, invalidating the cached value
|
||||||
|
@ -191,13 +204,15 @@ class QueryRunner {
|
||||||
})
|
})
|
||||||
return new QueryRunner(
|
return new QueryRunner(
|
||||||
{
|
{
|
||||||
datasource,
|
schema: query.schema,
|
||||||
queryVerb: query.queryVerb,
|
queryVerb: query.queryVerb,
|
||||||
fields: query.fields,
|
fields: query.fields,
|
||||||
parameters,
|
|
||||||
transformer: query.transformer,
|
transformer: query.transformer,
|
||||||
queryId,
|
nullDefaultSupport: query.nullDefaultSupport,
|
||||||
ctx: this.ctx,
|
ctx: this.ctx,
|
||||||
|
parameters,
|
||||||
|
datasource,
|
||||||
|
queryId,
|
||||||
},
|
},
|
||||||
{ noRecursiveQuery: true }
|
{ noRecursiveQuery: true }
|
||||||
).execute()
|
).execute()
|
||||||
|
|
|
@ -15,6 +15,8 @@ export interface Query extends Document {
|
||||||
schema: Record<string, QuerySchema | string>
|
schema: Record<string, QuerySchema | string>
|
||||||
readable: boolean
|
readable: boolean
|
||||||
queryVerb: string
|
queryVerb: string
|
||||||
|
// flag to state whether the default bindings are empty strings (old behaviour) or null
|
||||||
|
nullDefaultSupport?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface QueryPreview extends Omit<Query, "_id"> {
|
export interface QueryPreview extends Omit<Query, "_id"> {
|
||||||
|
|
Loading…
Reference in New Issue