Merge remote-tracking branch 'origin/master' into feature/form-screen-template

This commit is contained in:
Dean 2024-02-23 11:32:16 +00:00
commit ea978923cb
21 changed files with 423 additions and 277 deletions

View File

@ -1,5 +1,5 @@
{
"version": "2.20.8",
"version": "2.20.10",
"npmClient": "yarn",
"packages": [
"packages/*",

@ -1 +1 @@
Subproject commit a851eeacabfaad8bff6e781f5e5a62063cbc31f3
Subproject commit ab324e35d855012bd0f49caa53c6dd765223c6fa

View File

@ -60,6 +60,7 @@
let authConfigId
let dynamicVariables, addVariableModal, varBinding, globalDynamicBindings
let restBindings = getRestBindings()
let nestedSchemaFields = {}
$: staticVariables = datasource?.config?.staticVariables || {}
@ -160,6 +161,7 @@
newQuery.fields.authConfigId = authConfigId
newQuery.fields.disabledHeaders = restUtils.flipHeaderState(enabledHeaders)
newQuery.schema = schema || {}
newQuery.nestedSchemaFields = nestedSchemaFields || {}
return newQuery
}
@ -238,6 +240,7 @@
}
}
schema = response.schema
nestedSchemaFields = response.nestedSchemaFields
notifications.success("Request sent successfully")
}
} catch (error) {

View File

@ -10,13 +10,18 @@
$redirect("../")
}
if ($admin?.checklist?.branding) {
if ($admin?.cloud && $admin?.checklist?.branding) {
let url = new URL(window.location.href)
let hostname = url.hostname
let parts = hostname.split(".")
let tenantId = parts[0]
let newTenantId = parts[0]
let domain = parts.slice(-2).join(".")
CookieUtils.setCookie("tenantId", tenantId, domain)
let existingTenantId = CookieUtils.getCookie("tenantId")
if (!existingTenantId || existingTenantId !== newTenantId) {
CookieUtils.setCookie("tenantId", newTenantId, domain)
}
}
if (

View File

@ -76,17 +76,7 @@ export function createQueriesStore() {
}
const preview = async query => {
const parameters = query.parameters.reduce(
(acc, next) => ({
...acc,
[next.name]: next.default,
}),
{}
)
const result = await API.previewQuery({
...query,
parameters,
})
const result = await API.previewQuery(query)
// Assume all the fields are strings and create a basic schema from the
// unique fields returned by the server
const schema = {}

View File

@ -1,13 +1,20 @@
import { getDefinition, getDefinitions } from "../../integrations"
import { BBContext } from "@budibase/types"
import { SourceName, UserCtx } from "@budibase/types"
export async function fetch(ctx: BBContext) {
ctx.status = 200
ctx.body = await getDefinitions()
const DISABLED_EXTERNAL_INTEGRATIONS = [SourceName.AIRTABLE]
export async function fetch(ctx: UserCtx) {
const definitions = await getDefinitions()
for (let disabledIntegration of DISABLED_EXTERNAL_INTEGRATIONS) {
delete definitions[disabledIntegration]
}
ctx.body = definitions
}
export async function find(ctx: BBContext) {
const def = await getDefinition(ctx.params.type)
ctx.body = def
ctx.status = 200
export async function find(ctx: UserCtx) {
const sourceType = ctx.params?.type
if (DISABLED_EXTERNAL_INTEGRATIONS.indexOf(sourceType) !== -1) {
ctx.throw(400, `Invalid source type - ${sourceType} is not supported.`)
}
ctx.body = await getDefinition(ctx.params.type)
}

View File

@ -20,6 +20,7 @@ import {
type ExecuteQueryRequest,
type ExecuteQueryResponse,
type Row,
QueryParameter,
} from "@budibase/types"
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
@ -118,6 +119,21 @@ function getAuthConfig(ctx: UserCtx) {
return authConfigCtx
}
function enrichParameters(
queryParameters: QueryParameter[],
requestParameters: { [key: string]: string } = {}
): {
[key: string]: string
} {
// make sure parameters are fully enriched with defaults
for (let parameter of queryParameters) {
if (!requestParameters[parameter.name]) {
requestParameters[parameter.name] = parameter.default
}
}
return requestParameters
}
export async function preview(ctx: UserCtx) {
const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
ctx.request.body.datasourceId
@ -142,6 +158,68 @@ export async function preview(ctx: UserCtx) {
const authConfigCtx: any = getAuthConfig(ctx)
function getFieldMetadata(field: any, key: string): QuerySchema {
const makeQuerySchema = (
type: FieldType,
name: string,
subtype?: string
): QuerySchema => ({
type,
name,
subtype,
})
// Because custom queries have no fixed schema, we dynamically determine the schema,
// however types cannot be determined from null. We have no 'unknown' type, so we default to string.
let type = typeof field,
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
if (field != null)
switch (type) {
case "boolean":
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
break
case "object":
if (field instanceof Date) {
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
} else if (Array.isArray(field)) {
if (field.some(item => JsonUtils.hasSchema(item))) {
fieldMetadata = makeQuerySchema(
FieldType.JSON,
key,
JsonFieldSubType.ARRAY
)
} else {
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
}
} else {
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
}
break
case "number":
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
break
}
return fieldMetadata
}
function buildNestedSchema(
nestedSchemaFields: {
[key: string]: Record<string, string | QuerySchema>
},
key: string,
fieldArray: any[]
) {
let schema: { [key: string]: any } = {}
// build the schema by aggregating all row objects in the array
for (const item of fieldArray) {
if (JsonUtils.hasSchema(item)) {
for (const [key, value] of Object.entries(item)) {
schema[key] = getFieldMetadata(value, key)
}
}
}
nestedSchemaFields[key] = schema
}
function getSchemaFields(
rows: any[],
keys: string[]
@ -155,51 +233,16 @@ export async function preview(ctx: UserCtx) {
const nestedSchemaFields: {
[key: string]: Record<string, string | QuerySchema>
} = {}
const makeQuerySchema = (
type: FieldType,
name: string,
subtype?: string
): QuerySchema => ({
type,
name,
subtype,
})
if (rows?.length > 0) {
for (let key of [...new Set(keys)] as string[]) {
const field = rows[0][key]
let type = typeof field,
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
if (field)
switch (type) {
case "boolean":
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
break
case "object":
if (field instanceof Date) {
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
} else if (Array.isArray(field)) {
if (JsonUtils.hasSchema(field[0])) {
fieldMetadata = makeQuerySchema(
FieldType.JSON,
key,
JsonFieldSubType.ARRAY
)
} else {
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
}
nestedSchemaFields[key] = getSchemaFields(
field,
Object.keys(field[0])
).previewSchema
} else {
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
}
break
case "number":
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
break
}
for (let key of new Set(keys)) {
const fieldMetadata = getFieldMetadata(rows[0][key], key)
previewSchema[key] = fieldMetadata
if (
fieldMetadata.type === FieldType.JSON &&
fieldMetadata.subtype === JsonFieldSubType.ARRAY
) {
buildNestedSchema(nestedSchemaFields, key, rows[0][key])
}
}
}
return { previewSchema, nestedSchemaFields }
@ -211,7 +254,7 @@ export async function preview(ctx: UserCtx) {
datasource,
queryVerb,
fields,
parameters,
parameters: enrichParameters(parameters),
transformer,
queryId,
schema,
@ -266,15 +309,6 @@ async function execute(
if (!opts.isAutomation) {
authConfigCtx = getAuthConfig(ctx)
}
const enrichedParameters = ctx.request.body.parameters || {}
// make sure parameters are fully enriched with defaults
if (query && query.parameters) {
for (let parameter of query.parameters) {
if (!enrichedParameters[parameter.name]) {
enrichedParameters[parameter.name] = parameter.default
}
}
}
// call the relevant CRUD method on the integration class
try {
@ -284,7 +318,10 @@ async function execute(
queryVerb: query.queryVerb,
fields: query.fields,
pagination: ctx.request.body.pagination,
parameters: enrichedParameters,
parameters: enrichParameters(
query.parameters,
ctx.request.body.parameters
),
transformer: query.transformer,
queryId: ctx.params.queryId,
// have to pass down to the thread runner - can't put into context now

View File

@ -3,11 +3,10 @@ import Joi from "joi"
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
export function queryValidation() {
return Joi.object({
_id: Joi.string(),
_rev: Joi.string(),
name: Joi.string().required(),
function baseQueryValidation() {
return {
_id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING,
fields: Joi.object().required(),
datasourceId: Joi.string().required(),
readable: Joi.boolean(),
@ -17,11 +16,19 @@ export function queryValidation() {
default: Joi.string().allow(""),
})
),
queryVerb: Joi.string().allow().required(),
queryVerb: Joi.string().required(),
extra: Joi.object().optional(),
schema: Joi.object({}).required().unknown(true),
transformer: OPTIONAL_STRING,
flags: Joi.object().optional(),
queryId: OPTIONAL_STRING,
}
}
export function queryValidation() {
return Joi.object({
...baseQueryValidation(),
name: Joi.string().required(),
}).unknown(true)
}
@ -32,19 +39,10 @@ export function generateQueryValidation() {
export function generateQueryPreviewValidation() {
// prettier-ignore
return auth.joiValidator.body(Joi.object({
_id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING,
readable: Joi.boolean().optional(),
fields: Joi.object().required(),
queryVerb: Joi.string().required(),
name: OPTIONAL_STRING,
flags: Joi.object().optional(),
schema: Joi.object().optional(),
extra: Joi.object().optional(),
datasourceId: Joi.string().required(),
transformer: OPTIONAL_STRING,
parameters: Joi.object({}).required().unknown(true),
queryId: OPTIONAL_STRING,
}).unknown(true))
return auth.joiValidator.body(
Joi.object({
...baseQueryValidation(),
name: OPTIONAL_STRING,
}).unknown(true)
)
}

View File

@ -8,8 +8,8 @@ import {
paramResource,
} from "../../middleware/resourceId"
import {
generateQueryPreviewValidation,
generateQueryValidation,
generateQueryPreviewValidation,
} from "../controllers/query/validation"
const { BUILDER, PermissionType, PermissionLevel } = permissions

View File

@ -7,6 +7,7 @@ import sdk from "../../../sdk"
import tk from "timekeeper"
import { mocks } from "@budibase/backend-core/tests"
import { QueryPreview } from "@budibase/types"
tk.freeze(mocks.date.MOCK_DATE)
@ -63,14 +64,17 @@ describe("/datasources", () => {
datasource: any,
fields: { path: string; queryString: string }
) {
return config.previewQuery(
request,
config,
datasource,
const queryPreview: QueryPreview = {
fields,
undefined,
""
)
datasourceId: datasource._id,
parameters: [],
transformer: null,
queryVerb: "read",
name: datasource.name,
schema: {},
readable: true,
}
return config.api.query.previewQuery(queryPreview)
}
it("should invalidate changed or removed variables", async () => {

View File

@ -14,6 +14,7 @@ jest.mock("pg", () => {
import * as setup from "./utilities"
import { mocks } from "@budibase/backend-core/tests"
import { env, events } from "@budibase/backend-core"
import { QueryPreview } from "@budibase/types"
const structures = setup.structures
@ -120,16 +121,19 @@ describe("/api/env/variables", () => {
.expect(200)
expect(response.body.datasource._id).toBeDefined()
const query = {
const queryPreview: QueryPreview = {
datasourceId: response.body.datasource._id,
parameters: {},
parameters: [],
fields: {},
queryVerb: "read",
name: response.body.datasource.name,
transformer: null,
schema: {},
readable: true,
}
const res = await request
.post(`/api/queries/preview`)
.send(query)
.send(queryPreview)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
@ -139,7 +143,7 @@ describe("/api/env/variables", () => {
delete response.body.datasource.config
expect(events.query.previewed).toBeCalledWith(
response.body.datasource,
query
queryPreview
)
expect(pg.Client).toHaveBeenCalledWith({ password: "test", ssl: undefined })
})

View File

@ -1,5 +1,7 @@
import tk from "timekeeper"
const pg = require("pg")
// Mock out postgres for this
jest.mock("pg")
jest.mock("node-fetch")
@ -22,7 +24,13 @@ import { checkCacheForDynamicVariable } from "../../../../threads/utils"
const { basicQuery, basicDatasource } = setup.structures
import { events, db as dbCore } from "@budibase/backend-core"
import { Datasource, Query, SourceName } from "@budibase/types"
import {
Datasource,
Query,
SourceName,
QueryPreview,
QueryParameter,
} from "@budibase/types"
tk.freeze(Date.now())
@ -218,28 +226,26 @@ describe("/queries", () => {
describe("preview", () => {
it("should be able to preview the query", async () => {
const query = {
const queryPreview: QueryPreview = {
datasourceId: datasource._id,
parameters: {},
fields: {},
queryVerb: "read",
name: datasource.name,
fields: {},
parameters: [],
transformer: "return data",
name: datasource.name!,
schema: {},
readable: true,
}
const res = await request
.post(`/api/queries/preview`)
.send(query)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
const responseBody = await config.api.query.previewQuery(queryPreview)
// these responses come from the mock
expect(res.body.schema).toEqual({
expect(responseBody.schema).toEqual({
a: { type: "string", name: "a" },
b: { type: "number", name: "b" },
})
expect(res.body.rows.length).toEqual(1)
expect(responseBody.rows.length).toEqual(1)
expect(events.query.previewed).toBeCalledTimes(1)
delete datasource.config
expect(events.query.previewed).toBeCalledWith(datasource, query)
expect(events.query.previewed).toBeCalledWith(datasource, queryPreview)
})
it("should apply authorization to endpoint", async () => {
@ -249,6 +255,128 @@ describe("/queries", () => {
url: `/api/queries/preview`,
})
})
it("should not error when trying to generate a nested schema for an empty array", async () => {
const queryPreview: QueryPreview = {
datasourceId: datasource._id,
parameters: [],
fields: {},
queryVerb: "read",
name: datasource.name!,
transformer: "return data",
schema: {},
readable: true,
}
const rows = [
{
contacts: [],
},
]
pg.queryMock.mockImplementation(() => ({
rows,
}))
const responseBody = await config.api.query.previewQuery(queryPreview)
expect(responseBody).toEqual({
nestedSchemaFields: {},
rows,
schema: {
contacts: { type: "array", name: "contacts" },
},
})
expect(responseBody.rows.length).toEqual(1)
delete datasource.config
})
it("should generate a nested schema based on all the nested items", async () => {
const queryPreview: QueryPreview = {
datasourceId: datasource._id,
parameters: [],
fields: {},
queryVerb: "read",
name: datasource.name!,
transformer: "return data",
schema: {},
readable: true,
}
const rows = [
{
contacts: [
{
address: "123 Lane",
},
{
address: "456 Drive",
},
{
postcode: "BT1 12N",
lat: 54.59,
long: -5.92,
},
{
city: "Belfast",
},
{
address: "789 Avenue",
phoneNumber: "0800-999-5555",
},
{
name: "Name",
isActive: false,
},
],
},
]
pg.queryMock.mockImplementation(() => ({
rows,
}))
const responseBody = await config.api.query.previewQuery(queryPreview)
expect(responseBody).toEqual({
nestedSchemaFields: {
contacts: {
address: {
type: "string",
name: "address",
},
postcode: {
type: "string",
name: "postcode",
},
lat: {
type: "number",
name: "lat",
},
long: {
type: "number",
name: "long",
},
city: {
type: "string",
name: "city",
},
phoneNumber: {
type: "string",
name: "phoneNumber",
},
name: {
type: "string",
name: "name",
},
isActive: {
type: "boolean",
name: "isActive",
},
},
},
rows,
schema: {
contacts: { type: "json", name: "contacts", subtype: "array" },
},
})
expect(responseBody.rows.length).toEqual(1)
delete datasource.config
})
})
describe("execute", () => {
@ -283,7 +411,17 @@ describe("/queries", () => {
describe("variables", () => {
async function preview(datasource: Datasource, fields: any) {
return config.previewQuery(request, config, datasource, fields, undefined)
const queryPreview: QueryPreview = {
datasourceId: datasource._id!,
parameters: [],
fields,
queryVerb: "read",
name: datasource.name!,
transformer: "return data",
schema: {},
readable: true,
}
return await config.api.query.previewQuery(queryPreview)
}
it("should work with static variables", async () => {
@ -293,31 +431,31 @@ describe("/queries", () => {
variable2: "1",
},
})
const res = await preview(datasource, {
const responseBody = await preview(datasource, {
path: "www.{{ variable }}.com",
queryString: "test={{ variable2 }}",
})
// these responses come from the mock
expect(res.body.schema).toEqual({
expect(responseBody.schema).toEqual({
opts: { type: "json", name: "opts" },
url: { type: "string", name: "url" },
value: { type: "string", name: "value" },
})
expect(res.body.rows[0].url).toEqual("http://www.google.com?test=1")
expect(responseBody.rows[0].url).toEqual("http://www.google.com?test=1")
})
it("should work with dynamic variables", async () => {
const { datasource } = await config.dynamicVariableDatasource()
const res = await preview(datasource, {
const responseBody = await preview(datasource, {
path: "www.google.com",
queryString: "test={{ variable3 }}",
})
expect(res.body.schema).toEqual({
expect(responseBody.schema).toEqual({
opts: { type: "json", name: "opts" },
url: { type: "string", name: "url" },
value: { type: "string", name: "value" },
})
expect(res.body.rows[0].url).toContain("doctype%20html")
expect(responseBody.rows[0].url).toContain("doctype%20html")
})
it("check that it automatically retries on fail with cached dynamics", async () => {
@ -331,16 +469,16 @@ describe("/queries", () => {
// check its in cache
const contents = await checkCacheForDynamicVariable(base._id, "variable3")
expect(contents.rows.length).toEqual(1)
const res = await preview(datasource, {
const responseBody = await preview(datasource, {
path: "www.failonce.com",
queryString: "test={{ variable3 }}",
})
expect(res.body.schema).toEqual({
expect(responseBody.schema).toEqual({
fails: { type: "number", name: "fails" },
opts: { type: "json", name: "opts" },
url: { type: "string", name: "url" },
})
expect(res.body.rows[0].fails).toEqual(1)
expect(responseBody.rows[0].fails).toEqual(1)
})
it("deletes variables when linked query is deleted", async () => {
@ -371,24 +509,37 @@ describe("/queries", () => {
async function previewGet(
datasource: Datasource,
fields: any,
params: any
params: QueryParameter[]
) {
return config.previewQuery(request, config, datasource, fields, params)
const queryPreview: QueryPreview = {
datasourceId: datasource._id!,
parameters: params,
fields,
queryVerb: "read",
name: datasource.name!,
transformer: "return data",
schema: {},
readable: true,
}
return await config.api.query.previewQuery(queryPreview)
}
async function previewPost(
datasource: Datasource,
fields: any,
params: any
params: QueryParameter[]
) {
return config.previewQuery(
request,
config,
datasource,
const queryPreview: QueryPreview = {
datasourceId: datasource._id!,
parameters: params,
fields,
params,
"create"
)
queryVerb: "create",
name: datasource.name!,
transformer: null,
schema: {},
readable: false,
}
return await config.api.query.previewQuery(queryPreview)
}
it("should parse global and query level header mappings", async () => {
@ -400,7 +551,7 @@ describe("/queries", () => {
emailHdr: "{{[user].[email]}}",
},
})
const res = await previewGet(
const responseBody = await previewGet(
datasource,
{
path: "www.google.com",
@ -410,17 +561,17 @@ describe("/queries", () => {
secondHdr: "1234",
},
},
undefined
[]
)
const parsedRequest = JSON.parse(res.body.extra.raw)
const parsedRequest = JSON.parse(responseBody.extra.raw)
expect(parsedRequest.opts.headers).toEqual({
test: "headerVal",
emailHdr: userDetails.email,
queryHdr: userDetails.firstName,
secondHdr: "1234",
})
expect(res.body.rows[0].url).toEqual(
expect(responseBody.rows[0].url).toEqual(
"http://www.google.com?email=" + userDetails.email.replace("@", "%40")
)
})
@ -430,21 +581,21 @@ describe("/queries", () => {
const datasource = await config.restDatasource()
const res = await previewGet(
const responseBody = await previewGet(
datasource,
{
path: "www.google.com",
queryString:
"test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
},
{
myEmail: "{{[user].[email]}}",
myName: "{{[user].[firstName]}}",
testParam: "1234",
}
[
{ name: "myEmail", default: "{{[user].[email]}}" },
{ name: "myName", default: "{{[user].[firstName]}}" },
{ name: "testParam", default: "1234" },
]
)
expect(res.body.rows[0].url).toEqual(
expect(responseBody.rows[0].url).toEqual(
"http://www.google.com?test=" +
userDetails.email.replace("@", "%40") +
"&testName=" +
@ -457,7 +608,7 @@ describe("/queries", () => {
const userDetails = config.getUserDetails()
const datasource = await config.restDatasource()
const res = await previewPost(
const responseBody = await previewPost(
datasource,
{
path: "www.google.com",
@ -466,16 +617,14 @@ describe("/queries", () => {
"This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
bodyType: "text",
},
{
testParam: "1234",
}
[{ name: "testParam", default: "1234" }]
)
const parsedRequest = JSON.parse(res.body.extra.raw)
const parsedRequest = JSON.parse(responseBody.extra.raw)
expect(parsedRequest.opts.body).toEqual(
`This is plain text and this is my email: ${userDetails.email}. This is a test param: 1234`
)
expect(res.body.rows[0].url).toEqual(
expect(responseBody.rows[0].url).toEqual(
"http://www.google.com?testParam=1234"
)
})
@ -484,7 +633,7 @@ describe("/queries", () => {
const userDetails = config.getUserDetails()
const datasource = await config.restDatasource()
const res = await previewPost(
const responseBody = await previewPost(
datasource,
{
path: "www.google.com",
@ -493,16 +642,16 @@ describe("/queries", () => {
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
bodyType: "json",
},
{
testParam: "1234",
userRef: "{{[user].[firstName]}}",
}
[
{ name: "testParam", default: "1234" },
{ name: "userRef", default: "{{[user].[firstName]}}" },
]
)
const parsedRequest = JSON.parse(res.body.extra.raw)
const parsedRequest = JSON.parse(responseBody.extra.raw)
const test = `{"email":"${userDetails.email}","queryCode":1234,"userRef":"${userDetails.firstName}"}`
expect(parsedRequest.opts.body).toEqual(test)
expect(res.body.rows[0].url).toEqual(
expect(responseBody.rows[0].url).toEqual(
"http://www.google.com?testParam=1234"
)
})
@ -511,7 +660,7 @@ describe("/queries", () => {
const userDetails = config.getUserDetails()
const datasource = await config.restDatasource()
const res = await previewPost(
const responseBody = await previewPost(
datasource,
{
path: "www.google.com",
@ -521,17 +670,17 @@ describe("/queries", () => {
"<ref>{{userId}}</ref> <somestring>testing</somestring> </note>",
bodyType: "xml",
},
{
testParam: "1234",
userId: "{{[user].[firstName]}}",
}
[
{ name: "testParam", default: "1234" },
{ name: "userId", default: "{{[user].[firstName]}}" },
]
)
const parsedRequest = JSON.parse(res.body.extra.raw)
const parsedRequest = JSON.parse(responseBody.extra.raw)
const test = `<note> <email>${userDetails.email}</email> <code>1234</code> <ref>${userDetails.firstName}</ref> <somestring>testing</somestring> </note>`
expect(parsedRequest.opts.body).toEqual(test)
expect(res.body.rows[0].url).toEqual(
expect(responseBody.rows[0].url).toEqual(
"http://www.google.com?testParam=1234"
)
})
@ -540,7 +689,7 @@ describe("/queries", () => {
const userDetails = config.getUserDetails()
const datasource = await config.restDatasource()
const res = await previewPost(
const responseBody = await previewPost(
datasource,
{
path: "www.google.com",
@ -549,13 +698,13 @@ describe("/queries", () => {
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
bodyType: "form",
},
{
testParam: "1234",
userRef: "{{[user].[firstName]}}",
}
[
{ name: "testParam", default: "1234" },
{ name: "userRef", default: "{{[user].[firstName]}}" },
]
)
const parsedRequest = JSON.parse(res.body.extra.raw)
const parsedRequest = JSON.parse(responseBody.extra.raw)
const emailData = parsedRequest.opts.body._streams[1]
expect(emailData).toEqual(userDetails.email)
@ -566,7 +715,7 @@ describe("/queries", () => {
const userRef = parsedRequest.opts.body._streams[7]
expect(userRef).toEqual(userDetails.firstName)
expect(res.body.rows[0].url).toEqual(
expect(responseBody.rows[0].url).toEqual(
"http://www.google.com?testParam=1234"
)
})
@ -575,7 +724,7 @@ describe("/queries", () => {
const userDetails = config.getUserDetails()
const datasource = await config.restDatasource()
const res = await previewPost(
const responseBody = await previewPost(
datasource,
{
path: "www.google.com",
@ -584,12 +733,12 @@ describe("/queries", () => {
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
bodyType: "encoded",
},
{
testParam: "1234",
userRef: "{{[user].[firstName]}}",
}
[
{ name: "testParam", default: "1234" },
{ name: "userRef", default: "{{[user].[firstName]}}" },
]
)
const parsedRequest = JSON.parse(res.body.extra.raw)
const parsedRequest = JSON.parse(responseBody.extra.raw)
expect(parsedRequest.opts.body.email).toEqual(userDetails.email)
expect(parsedRequest.opts.body.queryCode).toEqual("1234")

View File

@ -19,7 +19,7 @@ function parseIntSafe(number?: string) {
}
const DEFAULTS = {
QUERY_THREAD_TIMEOUT: 10000,
QUERY_THREAD_TIMEOUT: 15000,
AUTOMATION_THREAD_TIMEOUT: 12000,
AUTOMATION_SYNC_TIMEOUT: 120000,
AUTOMATION_MAX_ITERATIONS: 200,

View File

@ -7,6 +7,7 @@ import {
ConnectionInfo,
} from "@budibase/types"
import {
Document,
MongoClient,
ObjectId,
Filter,
@ -15,6 +16,10 @@ import {
UpdateOptions,
OperationOptions,
MongoClientOptions,
DeleteResult,
UpdateResult,
InsertOneResult,
InsertManyResult,
} from "mongodb"
import environment from "../environment"
@ -458,7 +463,9 @@ class MongoIntegration implements IntegrationBase {
}
}
async create(query: MongoDBQuery) {
async create(
query: MongoDBQuery
): Promise<InsertOneResult | InsertManyResult> {
try {
await this.connect()
const db = this.client.db(this.config.db)
@ -488,7 +495,7 @@ class MongoIntegration implements IntegrationBase {
}
}
async read(query: MongoDBQuery) {
async read(query: MongoDBQuery): Promise<NonNullable<unknown>> {
try {
await this.connect()
const db = this.client.db(this.config.db)
@ -504,7 +511,7 @@ class MongoIntegration implements IntegrationBase {
}
}
case "findOne": {
return await collection.findOne(json)
return (await collection.findOne(json)) || {}
}
case "findOneAndUpdate": {
if (typeof query.json === "string") {
@ -544,7 +551,7 @@ class MongoIntegration implements IntegrationBase {
}
}
async update(query: MongoDBQuery) {
async update(query: MongoDBQuery): Promise<UpdateResult> {
try {
await this.connect()
const db = this.client.db(this.config.db)
@ -588,7 +595,7 @@ class MongoIntegration implements IntegrationBase {
}
}
async delete(query: MongoDBQuery) {
async delete(query: MongoDBQuery): Promise<DeleteResult> {
try {
await this.connect()
const db = this.client.db(this.config.db)
@ -633,7 +640,7 @@ class MongoIntegration implements IntegrationBase {
json: object
steps: any[]
extra: { [key: string]: string }
}) {
}): Promise<Document[]> {
try {
await this.connect()
const db = this.client.db(this.config.db)

View File

@ -866,28 +866,6 @@ export default class TestConfiguration {
// QUERY
async previewQuery(
request: any,
config: any,
datasource: any,
fields: any,
params: any,
verb?: string
) {
return request
.post(`/api/queries/preview`)
.send({
datasourceId: datasource._id,
parameters: params || {},
fields,
queryVerb: verb || "read",
name: datasource.name,
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
}
async createQuery(config?: any) {
if (!this.datasource && !config) {
throw "No datasource created for query."

View File

@ -1,6 +1,7 @@
import TestConfiguration from "../TestConfiguration"
import {
Query,
QueryPreview,
type ExecuteQueryRequest,
type ExecuteQueryResponse,
} from "@budibase/types"
@ -41,4 +42,19 @@ export class QueryAPI extends TestAPI {
return res.body
}
previewQuery = async (queryPreview: QueryPreview) => {
const res = await this.request
.post(`/api/queries/preview`)
.send(queryPreview)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
if (res.status !== 200) {
throw new Error(JSON.stringify(res.body))
}
return res.body
}
}

View File

@ -366,7 +366,7 @@ export function basicDatasource(): { datasource: Datasource } {
export function basicQuery(datasourceId: string): Query {
return {
datasourceId: datasourceId,
datasourceId,
name: "New Query",
parameters: [],
fields: {},

View File

@ -7,10 +7,10 @@ export interface QueryEvent {
datasource: Datasource
queryVerb: string
fields: { [key: string]: any }
parameters: { [key: string]: any }
parameters: { [key: string]: unknown }
pagination?: any
transformer: any
queryId: string
queryId?: string
environmentVariables?: Record<string, string>
ctx?: any
schema?: Record<string, QuerySchema | string>

View File

@ -43,7 +43,7 @@ class QueryRunner {
this.parameters = input.parameters
this.pagination = input.pagination
this.transformer = input.transformer
this.queryId = input.queryId
this.queryId = input.queryId!
this.schema = input.schema
this.noRecursiveQuery = flags.noRecursiveQuery
this.cachedVariables = []

View File

@ -19,7 +19,7 @@ export interface Query extends Document {
}
export interface QueryPreview extends Omit<Query, "_id"> {
queryId: string
queryId?: string
}
export interface QueryParameter {

View File

@ -2235,13 +2235,6 @@
enabled "2.0.x"
kuler "^2.0.0"
"@datadog/native-appsec@6.0.0":
version "6.0.0"
resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-6.0.0.tgz#da753f8566ec5180ad9e83014cb44984b4bc878e"
integrity sha512-e7vH5usFoqov7FraPcA99fe80t2/qm4Cmno1T3iBhYlhyO6HD01ArDsCZ/sUvNIUR1ujxtbr8Z9WRGJ0qQ/FDA==
dependencies:
node-gyp-build "^3.9.0"
"@datadog/native-appsec@7.0.0":
version "7.0.0"
resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-7.0.0.tgz#a380174dd49aef2d9bb613a0ec8ead6dc7822095"
@ -9036,46 +9029,6 @@ dc-polyfill@^0.1.2:
resolved "https://registry.yarnpkg.com/dc-polyfill/-/dc-polyfill-0.1.3.tgz#fe9eefc86813439dd46d6f9ad9582ec079c39720"
integrity sha512-Wyk5n/5KUj3GfVKV2jtDbtChC/Ff9fjKsBcg4ZtYW1yQe3DXNHcGURvmoxhqQdfOQ9TwyMjnfyv1lyYcOkFkFA==
dd-trace@5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/dd-trace/-/dd-trace-5.0.0.tgz#1e9848d6b6212ca67f8a3d62ce1f9ecd93fb5ebb"
integrity sha512-MmbM05l0qFeM73kDyyQAHWvyeZl2m6FYlv3hgtBU8GSpFmNu/33llyYp4TDpoEJ7hqd5LWT7mKKQFq8lRbTH3w==
dependencies:
"@datadog/native-appsec" "6.0.0"
"@datadog/native-iast-rewriter" "2.2.2"
"@datadog/native-iast-taint-tracking" "1.6.4"
"@datadog/native-metrics" "^2.0.0"
"@datadog/pprof" "5.0.0"
"@datadog/sketches-js" "^2.1.0"
"@opentelemetry/api" "^1.0.0"
"@opentelemetry/core" "^1.14.0"
crypto-randomuuid "^1.0.0"
dc-polyfill "^0.1.2"
ignore "^5.2.4"
import-in-the-middle "^1.7.1"
int64-buffer "^0.1.9"
ipaddr.js "^2.1.0"
istanbul-lib-coverage "3.2.0"
jest-docblock "^29.7.0"
koalas "^1.0.2"
limiter "1.1.5"
lodash.kebabcase "^4.1.1"
lodash.pick "^4.4.0"
lodash.sortby "^4.7.0"
lodash.uniq "^4.5.0"
lru-cache "^7.14.0"
methods "^1.1.2"
module-details-from-path "^1.0.3"
msgpack-lite "^0.1.26"
node-abort-controller "^3.1.1"
opentracing ">=0.12.1"
path-to-regexp "^0.1.2"
pprof-format "^2.0.7"
protobufjs "^7.2.5"
retry "^0.13.1"
semver "^7.5.4"
tlhunter-sorted-set "^0.1.0"
dd-trace@5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/dd-trace/-/dd-trace-5.2.0.tgz#6ca2d76ece95f08d98468d7782c22f24192afa53"
@ -12464,7 +12417,7 @@ import-from@^3.0.0:
dependencies:
resolve-from "^5.0.0"
import-in-the-middle@^1.7.1, import-in-the-middle@^1.7.3:
import-in-the-middle@^1.7.3:
version "1.7.3"
resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.7.3.tgz#ffa784cdd57a47d2b68d2e7dd33070ff06baee43"
integrity sha512-R2I11NRi0lI3jD2+qjqyVlVEahsejw7LDnYEbGb47QEFjczE3bZYsmWheCTQA+LFs2DzOQxR7Pms7naHW1V4bQ==
@ -14927,11 +14880,6 @@ lodash.isstring@^4.0.1:
resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451"
integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==
lodash.kebabcase@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36"
integrity sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==
lodash.keys@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-4.2.0.tgz#a08602ac12e4fb83f91fc1fb7a360a4d9ba35205"
@ -14962,7 +14910,7 @@ lodash.once@^4.0.0:
resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac"
integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==
lodash.pick@^4.0.0, lodash.pick@^4.4.0:
lodash.pick@^4.0.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3"
integrity sha512-hXt6Ul/5yWjfklSGvLQl8vM//l3FtyHZeuelpzK6mm99pNvN9yTDruNZPEJZD1oWrqo+izBmB7oUfWgcCX7s4Q==