Merge branch 'master' into revert-13398-revert-13356-BUDI-8122/single-attachment-column-type
This commit is contained in:
commit
615e27c798
|
@ -49,7 +49,10 @@
|
|||
label: "Long Form Text",
|
||||
value: FIELDS.LONGFORM.type,
|
||||
},
|
||||
|
||||
{
|
||||
label: "Attachment",
|
||||
value: FIELDS.ATTACHMENT.type,
|
||||
},
|
||||
{
|
||||
label: "User",
|
||||
value: `${FIELDS.USER.type}${FIELDS.USER.subtype}`,
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
const query = jest.fn(() => ({
|
||||
rows: [
|
||||
{
|
||||
a: "string",
|
||||
b: 1,
|
||||
},
|
||||
],
|
||||
}))
|
||||
|
||||
class Client {
|
||||
query = query
|
||||
end = jest.fn(cb => {
|
||||
if (cb) cb()
|
||||
})
|
||||
connect = jest.fn()
|
||||
release = jest.fn()
|
||||
}
|
||||
|
||||
const on = jest.fn()
|
||||
|
||||
module.exports = {
|
||||
Client,
|
||||
queryMock: query,
|
||||
on,
|
||||
}
|
|
@ -42,12 +42,6 @@ if (fs.existsSync("../pro/src")) {
|
|||
|
||||
const config: Config.InitialOptions = {
|
||||
projects: [
|
||||
{
|
||||
...baseConfig,
|
||||
displayName: "sequential test",
|
||||
testMatch: ["<rootDir>/**/*.seq.spec.[jt]s"],
|
||||
runner: "jest-serial-runner",
|
||||
},
|
||||
{
|
||||
...baseConfig,
|
||||
testMatch: ["<rootDir>/**/!(*.seq).spec.[jt]s"],
|
||||
|
@ -60,6 +54,9 @@ const config: Config.InitialOptions = {
|
|||
"!src/db/views/staticViews.*",
|
||||
"!src/**/*.spec.{js,ts}",
|
||||
"!src/tests/**/*.{js,ts}",
|
||||
// The use of coverage in the JS runner breaks tests by inserting
|
||||
// coverage functions into code that will run inside of the isolate.
|
||||
"!src/jsRunner/**/*.{js,ts}",
|
||||
],
|
||||
coverageReporters: ["lcov", "json", "clover"],
|
||||
}
|
||||
|
|
|
@ -143,7 +143,7 @@
|
|||
"jest": "29.7.0",
|
||||
"jest-openapi": "0.14.2",
|
||||
"jest-runner": "29.7.0",
|
||||
"jest-serial-runner": "1.2.1",
|
||||
"nock": "13.5.4",
|
||||
"nodemon": "2.0.15",
|
||||
"openapi-typescript": "5.2.0",
|
||||
"path-to-regexp": "6.2.0",
|
||||
|
|
|
@ -4,11 +4,9 @@ set -e
|
|||
if [[ -n $CI ]]
|
||||
then
|
||||
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
|
||||
echo "jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
|
||||
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
|
||||
else
|
||||
# --maxWorkers performs better in development
|
||||
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"
|
||||
echo "jest --coverage --maxWorkers=2 --forceExit $@"
|
||||
jest --coverage --maxWorkers=2 --forceExit $@
|
||||
fi
|
|
@ -1,6 +1,6 @@
|
|||
import { getQueryParams, getTableParams } from "../../db/utils"
|
||||
import { getIntegration } from "../../integrations"
|
||||
import { invalidateDynamicVariables } from "../../threads/utils"
|
||||
import { invalidateCachedVariable } from "../../threads/utils"
|
||||
import { context, db as dbCore, events } from "@budibase/backend-core"
|
||||
import {
|
||||
BuildSchemaFromSourceRequest,
|
||||
|
@ -121,7 +121,7 @@ async function invalidateVariables(
|
|||
}
|
||||
})
|
||||
}
|
||||
await invalidateDynamicVariables(toInvalidate)
|
||||
await invalidateCachedVariable(toInvalidate)
|
||||
}
|
||||
|
||||
export async function update(
|
||||
|
|
|
@ -2,7 +2,7 @@ import { generateQueryID } from "../../../db/utils"
|
|||
import { Thread, ThreadType } from "../../../threads"
|
||||
import { save as saveDatasource } from "../datasource"
|
||||
import { RestImporter } from "./import"
|
||||
import { invalidateDynamicVariables } from "../../../threads/utils"
|
||||
import { invalidateCachedVariable } from "../../../threads/utils"
|
||||
import env from "../../../environment"
|
||||
import { events, context, utils, constants } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
|
@ -281,7 +281,6 @@ export async function preview(
|
|||
return { previewSchema, nestedSchemaFields }
|
||||
}
|
||||
|
||||
try {
|
||||
const inputs: QueryEvent = {
|
||||
appId: ctx.appId,
|
||||
queryVerb: query.queryVerb,
|
||||
|
@ -300,7 +299,14 @@ export async function preview(
|
|||
},
|
||||
}
|
||||
|
||||
const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs)
|
||||
let queryResponse: QueryResponse
|
||||
try {
|
||||
queryResponse = await Runner.run<QueryResponse>(inputs)
|
||||
} catch (err: any) {
|
||||
ctx.throw(400, err)
|
||||
}
|
||||
|
||||
const { rows, keys, info, extra } = queryResponse
|
||||
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
|
||||
|
||||
// if existing schema, update to include any previous schema keys
|
||||
|
@ -321,9 +327,6 @@ export async function preview(
|
|||
info,
|
||||
extra,
|
||||
}
|
||||
} catch (err: any) {
|
||||
ctx.throw(400, err)
|
||||
}
|
||||
}
|
||||
|
||||
async function execute(
|
||||
|
@ -416,7 +419,7 @@ const removeDynamicVariables = async (queryId: string) => {
|
|||
const variablesToDelete = dynamicVariables!.filter(
|
||||
(dv: any) => dv.queryId === queryId
|
||||
)
|
||||
await invalidateDynamicVariables(variablesToDelete)
|
||||
await invalidateCachedVariable(variablesToDelete)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,18 +1,16 @@
|
|||
jest.mock("pg")
|
||||
import * as setup from "./utilities"
|
||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import { checkCacheForDynamicVariable } from "../../../threads/utils"
|
||||
import { getCachedVariable } from "../../../threads/utils"
|
||||
import { context, events } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
import tk from "timekeeper"
|
||||
import { mocks } from "@budibase/backend-core/tests"
|
||||
import { QueryPreview } from "@budibase/types"
|
||||
import { QueryPreview, SourceName } from "@budibase/types"
|
||||
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
let { basicDatasource } = setup.structures
|
||||
const pg = require("pg")
|
||||
|
||||
describe("/datasources", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -42,6 +40,23 @@ describe("/datasources", () => {
|
|||
expect(res.body.errors).toEqual({})
|
||||
expect(events.datasource.created).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should fail if the datasource is invalid", async () => {
|
||||
await config.api.datasource.create(
|
||||
{
|
||||
name: "Test",
|
||||
type: "test",
|
||||
source: "invalid" as SourceName,
|
||||
config: {},
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
body: {
|
||||
message: "No datasource implementation found.",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
|
@ -74,7 +89,7 @@ describe("/datasources", () => {
|
|||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
return config.api.query.previewQuery(queryPreview)
|
||||
return config.api.query.preview(queryPreview)
|
||||
}
|
||||
|
||||
it("should invalidate changed or removed variables", async () => {
|
||||
|
@ -85,10 +100,7 @@ describe("/datasources", () => {
|
|||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
// check variables in cache
|
||||
let contents = await checkCacheForDynamicVariable(
|
||||
query._id!,
|
||||
"variable3"
|
||||
)
|
||||
let contents = await getCachedVariable(query._id!, "variable3")
|
||||
expect(contents.rows.length).toEqual(1)
|
||||
|
||||
// update the datasource to remove the variables
|
||||
|
@ -102,7 +114,7 @@ describe("/datasources", () => {
|
|||
expect(res.body.errors).toBeUndefined()
|
||||
|
||||
// check variables no longer in cache
|
||||
contents = await checkCacheForDynamicVariable(query._id!, "variable3")
|
||||
contents = await getCachedVariable(query._id!, "variable3")
|
||||
expect(contents).toBe(null)
|
||||
})
|
||||
})
|
||||
|
@ -149,35 +161,6 @@ describe("/datasources", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("query", () => {
|
||||
it("should be able to query a pg datasource", async () => {
|
||||
const res = await request
|
||||
.post(`/api/datasources/query`)
|
||||
.send({
|
||||
endpoint: {
|
||||
datasourceId: datasource._id,
|
||||
operation: "READ",
|
||||
// table name below
|
||||
entityId: "users",
|
||||
},
|
||||
resource: {
|
||||
fields: ["users.name", "users.age"],
|
||||
},
|
||||
filters: {
|
||||
string: {
|
||||
name: "John",
|
||||
},
|
||||
},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
// this is mock data, can't test it
|
||||
expect(res.body).toBeDefined()
|
||||
const expSql = `select "users"."name" as "users.name", "users"."age" as "users.age" from (select * from "users" where "users"."name" ilike $1 limit $2) as "users"`
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(expSql, ["John%", 5000])
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
beforeAll(setupTest)
|
||||
|
||||
|
|
|
@ -1,12 +1,18 @@
|
|||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
import {
|
||||
Datasource,
|
||||
Operation,
|
||||
Query,
|
||||
QueryPreview,
|
||||
SourceName,
|
||||
} from "@budibase/types"
|
||||
import * as setup from "../utilities"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
rawQuery,
|
||||
} from "../../../../integrations/tests/utils"
|
||||
|
||||
jest.unmock("pg")
|
||||
import { Expectations } from "src/tests/utilities/api/base"
|
||||
import { events } from "@budibase/backend-core"
|
||||
|
||||
const createTableSQL: Record<string, string> = {
|
||||
[SourceName.POSTGRES]: `
|
||||
|
@ -47,7 +53,10 @@ describe.each(
|
|||
let rawDatasource: Datasource
|
||||
let datasource: Datasource
|
||||
|
||||
async function createQuery(query: Partial<Query>): Promise<Query> {
|
||||
async function createQuery(
|
||||
query: Partial<Query>,
|
||||
expectations?: Expectations
|
||||
): Promise<Query> {
|
||||
const defaultQuery: Query = {
|
||||
datasourceId: datasource._id!,
|
||||
name: "New Query",
|
||||
|
@ -58,28 +67,340 @@ describe.each(
|
|||
transformer: "return data",
|
||||
readable: true,
|
||||
}
|
||||
return await config.api.query.save({ ...defaultQuery, ...query })
|
||||
return await config.api.query.save(
|
||||
{ ...defaultQuery, ...query },
|
||||
expectations
|
||||
)
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
rawDatasource = await dsProvider
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await rawQuery(rawDatasource, createTableSQL[datasource.source])
|
||||
await rawQuery(rawDatasource, insertSQL)
|
||||
rawDatasource = await dsProvider
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
|
||||
// The Datasource API does not return the password, but we need
|
||||
// it later to connect to the underlying database, so we fill it
|
||||
// back in here.
|
||||
datasource.config!.password = rawDatasource.config!.password
|
||||
|
||||
await rawQuery(datasource, createTableSQL[datasource.source])
|
||||
await rawQuery(datasource, insertSQL)
|
||||
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await rawQuery(rawDatasource, dropTableSQL)
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
config.api.datasource.delete(ds)
|
||||
await rawQuery(datasource, dropTableSQL)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
describe("query admin", () => {
|
||||
describe("create", () => {
|
||||
it("should be able to create a query", async () => {
|
||||
const query = await createQuery({
|
||||
name: "New Query",
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
},
|
||||
})
|
||||
|
||||
expect(query).toMatchObject({
|
||||
datasourceId: datasource._id!,
|
||||
name: "New Query",
|
||||
parameters: [],
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
})
|
||||
|
||||
expect(events.query.created).toHaveBeenCalledTimes(1)
|
||||
expect(events.query.updated).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
it("should be able to update a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
},
|
||||
})
|
||||
|
||||
jest.clearAllMocks()
|
||||
|
||||
const updatedQuery = await config.api.query.save({
|
||||
...query,
|
||||
name: "Updated Query",
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table WHERE id = 1",
|
||||
},
|
||||
})
|
||||
|
||||
expect(updatedQuery).toMatchObject({
|
||||
datasourceId: datasource._id!,
|
||||
name: "Updated Query",
|
||||
parameters: [],
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table WHERE id = 1",
|
||||
},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(events.query.created).not.toHaveBeenCalled()
|
||||
expect(events.query.updated).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("delete", () => {
|
||||
it("should be able to delete a query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.query.delete(query)
|
||||
await config.api.query.get(query._id!, { status: 404 })
|
||||
|
||||
const queries = await config.api.query.fetch()
|
||||
expect(queries).not.toContainEqual(query)
|
||||
|
||||
expect(events.query.deleted).toHaveBeenCalledTimes(1)
|
||||
expect(events.query.deleted).toHaveBeenCalledWith(datasource, query)
|
||||
})
|
||||
})
|
||||
|
||||
describe("read", () => {
|
||||
it("should be able to list queries", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
},
|
||||
})
|
||||
|
||||
const queries = await config.api.query.fetch()
|
||||
expect(queries).toContainEqual(query)
|
||||
})
|
||||
|
||||
it("should strip sensitive fields for prod apps", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT * FROM test_table",
|
||||
},
|
||||
})
|
||||
|
||||
await config.publish()
|
||||
const prodQuery = await config.api.query.getProd(query._id!)
|
||||
|
||||
expect(prodQuery._id).toEqual(query._id)
|
||||
expect(prodQuery.fields).toBeUndefined()
|
||||
expect(prodQuery.parameters).toBeUndefined()
|
||||
expect(prodQuery.schema).toBeDefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("preview", () => {
|
||||
it("should be able to preview a query", async () => {
|
||||
const request: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT * FROM test_table WHERE id = 1`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
name: datasource.name!,
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
const response = await config.api.query.preview(request)
|
||||
expect(response.schema).toEqual({
|
||||
birthday: {
|
||||
name: "birthday",
|
||||
type: "string",
|
||||
},
|
||||
id: {
|
||||
name: "id",
|
||||
type: "number",
|
||||
},
|
||||
name: {
|
||||
name: "name",
|
||||
type: "string",
|
||||
},
|
||||
number: {
|
||||
name: "number",
|
||||
type: "string",
|
||||
},
|
||||
})
|
||||
expect(response.rows).toEqual([
|
||||
{
|
||||
birthday: null,
|
||||
id: 1,
|
||||
name: "one",
|
||||
number: null,
|
||||
},
|
||||
])
|
||||
expect(events.query.previewed).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should work with static variables", async () => {
|
||||
await config.api.datasource.update({
|
||||
...datasource,
|
||||
config: {
|
||||
...datasource.config,
|
||||
staticVariables: {
|
||||
foo: "bar",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const request: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT '{{ foo }}' as foo`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
name: datasource.name!,
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
|
||||
const response = await config.api.query.preview(request)
|
||||
|
||||
expect(response.schema).toEqual({
|
||||
foo: {
|
||||
name: "foo",
|
||||
type: "string",
|
||||
},
|
||||
})
|
||||
|
||||
expect(response.rows).toEqual([
|
||||
{
|
||||
foo: "bar",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should work with dynamic variables", async () => {
|
||||
const basedOnQuery = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT name FROM test_table WHERE id = 1",
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.datasource.update({
|
||||
...datasource,
|
||||
config: {
|
||||
...datasource.config,
|
||||
dynamicVariables: [
|
||||
{
|
||||
queryId: basedOnQuery._id!,
|
||||
name: "foo",
|
||||
value: "{{ data[0].name }}",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const preview = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT '{{ foo }}' as foo`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
name: datasource.name!,
|
||||
schema: {},
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(preview.schema).toEqual({
|
||||
foo: {
|
||||
name: "foo",
|
||||
type: "string",
|
||||
},
|
||||
})
|
||||
|
||||
expect(preview.rows).toEqual([
|
||||
{
|
||||
foo: "one",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should handle the dynamic base query being deleted", async () => {
|
||||
const basedOnQuery = await createQuery({
|
||||
fields: {
|
||||
sql: "SELECT name FROM test_table WHERE id = 1",
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.datasource.update({
|
||||
...datasource,
|
||||
config: {
|
||||
...datasource.config,
|
||||
dynamicVariables: [
|
||||
{
|
||||
queryId: basedOnQuery._id!,
|
||||
name: "foo",
|
||||
value: "{{ data[0].name }}",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.query.delete(basedOnQuery)
|
||||
|
||||
const preview = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT '{{ foo }}' as foo`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
name: datasource.name!,
|
||||
schema: {},
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(preview.schema).toEqual({
|
||||
foo: {
|
||||
name: "foo",
|
||||
type: "string",
|
||||
},
|
||||
})
|
||||
|
||||
expect(preview.rows).toEqual([
|
||||
{
|
||||
foo: datasource.source === SourceName.SQL_SERVER ? "" : null,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe("query verbs", () => {
|
||||
describe("create", () => {
|
||||
it("should be able to insert with bindings", async () => {
|
||||
const query = await createQuery({
|
||||
|
@ -108,12 +429,43 @@ describe.each(
|
|||
])
|
||||
|
||||
const rows = await rawQuery(
|
||||
rawDatasource,
|
||||
datasource,
|
||||
"SELECT * FROM test_table WHERE name = 'baz'"
|
||||
)
|
||||
expect(rows).toHaveLength(1)
|
||||
})
|
||||
|
||||
it("should not allow handlebars as parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
|
||||
},
|
||||
parameters: [
|
||||
{
|
||||
name: "foo",
|
||||
default: "bar",
|
||||
},
|
||||
],
|
||||
queryVerb: "create",
|
||||
})
|
||||
|
||||
await config.api.query.execute(
|
||||
query._id!,
|
||||
{
|
||||
parameters: {
|
||||
foo: "{{ 'test' }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message:
|
||||
"Parameter 'foo' input contains a handlebars binding - this is not allowed.",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])(
|
||||
"should coerce %s into a date",
|
||||
async datetimeStr => {
|
||||
|
@ -138,7 +490,7 @@ describe.each(
|
|||
expect(result.data).toEqual([{ created: true }])
|
||||
|
||||
const rows = await rawQuery(
|
||||
rawDatasource,
|
||||
datasource,
|
||||
`SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'`
|
||||
)
|
||||
expect(rows).toHaveLength(1)
|
||||
|
@ -170,7 +522,7 @@ describe.each(
|
|||
expect(result.data).toEqual([{ created: true }])
|
||||
|
||||
const rows = await rawQuery(
|
||||
rawDatasource,
|
||||
datasource,
|
||||
`SELECT * FROM test_table WHERE name = '${notDateStr}'`
|
||||
)
|
||||
expect(rows).toHaveLength(1)
|
||||
|
@ -308,7 +660,7 @@ describe.each(
|
|||
])
|
||||
|
||||
const rows = await rawQuery(
|
||||
rawDatasource,
|
||||
datasource,
|
||||
"SELECT * FROM test_table WHERE id = 1"
|
||||
)
|
||||
expect(rows).toEqual([
|
||||
|
@ -379,12 +731,55 @@ describe.each(
|
|||
])
|
||||
|
||||
const rows = await rawQuery(
|
||||
rawDatasource,
|
||||
datasource,
|
||||
"SELECT * FROM test_table WHERE id = 1"
|
||||
)
|
||||
expect(rows).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("query through datasource", () => {
|
||||
it("should be able to query a pg datasource", async () => {
|
||||
const res = await config.api.datasource.query({
|
||||
endpoint: {
|
||||
datasourceId: datasource._id!,
|
||||
operation: Operation.READ,
|
||||
entityId: "test_table",
|
||||
},
|
||||
resource: {
|
||||
fields: ["id", "name"],
|
||||
},
|
||||
filters: {
|
||||
string: {
|
||||
name: "two",
|
||||
},
|
||||
},
|
||||
})
|
||||
expect(res).toHaveLength(1)
|
||||
expect(res[0]).toEqual({
|
||||
id: 2,
|
||||
name: "two",
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to execute an update that updates no rows", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: "UPDATE test_table SET name = 'updated' WHERE id = 100",
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
updated: true,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
// this parameter really only impacts SQL queries
|
||||
describe("confirm nullDefaultSupport", () => {
|
||||
|
@ -418,7 +813,7 @@ describe.each(
|
|||
} catch (err: any) {
|
||||
error = err.message
|
||||
}
|
||||
if (dbName === DatabaseName.SQL_SERVER) {
|
||||
if (dbName === "mssql") {
|
||||
expect(error).toBeUndefined()
|
||||
} else {
|
||||
expect(error).toBeDefined()
|
||||
|
|
|
@ -88,12 +88,155 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await withCollection(async collection => {
|
||||
await collection.drop()
|
||||
await withCollection(collection => collection.drop())
|
||||
})
|
||||
|
||||
describe("preview", () => {
|
||||
it("should generate a nested schema with an empty array", async () => {
|
||||
const name = generator.guid()
|
||||
await withCollection(
|
||||
async collection => await collection.insertOne({ name, nested: [] })
|
||||
)
|
||||
|
||||
const preview = await config.api.query.preview({
|
||||
name: "New Query",
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
json: {
|
||||
name: { $eq: name },
|
||||
},
|
||||
extra: {
|
||||
collection,
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(preview).toEqual({
|
||||
nestedSchemaFields: {},
|
||||
rows: [{ _id: expect.any(String), name, nested: [] }],
|
||||
schema: {
|
||||
_id: {
|
||||
type: "string",
|
||||
name: "_id",
|
||||
},
|
||||
name: {
|
||||
type: "string",
|
||||
name: "name",
|
||||
},
|
||||
nested: {
|
||||
type: "array",
|
||||
name: "nested",
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should execute a count query", async () => {
|
||||
it("should generate a nested schema based on all of the nested items", async () => {
|
||||
const name = generator.guid()
|
||||
const item = {
|
||||
name,
|
||||
contacts: [
|
||||
{
|
||||
address: "123 Lane",
|
||||
},
|
||||
{
|
||||
address: "456 Drive",
|
||||
},
|
||||
{
|
||||
postcode: "BT1 12N",
|
||||
lat: 54.59,
|
||||
long: -5.92,
|
||||
},
|
||||
{
|
||||
city: "Belfast",
|
||||
},
|
||||
{
|
||||
address: "789 Avenue",
|
||||
phoneNumber: "0800-999-5555",
|
||||
},
|
||||
{
|
||||
name: "Name",
|
||||
isActive: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
await withCollection(collection => collection.insertOne(item))
|
||||
|
||||
const preview = await config.api.query.preview({
|
||||
name: "New Query",
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
json: {
|
||||
name: { $eq: name },
|
||||
},
|
||||
extra: {
|
||||
collection,
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(preview).toEqual({
|
||||
nestedSchemaFields: {
|
||||
contacts: {
|
||||
address: {
|
||||
type: "string",
|
||||
name: "address",
|
||||
},
|
||||
postcode: {
|
||||
type: "string",
|
||||
name: "postcode",
|
||||
},
|
||||
lat: {
|
||||
type: "number",
|
||||
name: "lat",
|
||||
},
|
||||
long: {
|
||||
type: "number",
|
||||
name: "long",
|
||||
},
|
||||
city: {
|
||||
type: "string",
|
||||
name: "city",
|
||||
},
|
||||
phoneNumber: {
|
||||
type: "string",
|
||||
name: "phoneNumber",
|
||||
},
|
||||
name: {
|
||||
type: "string",
|
||||
name: "name",
|
||||
},
|
||||
isActive: {
|
||||
type: "boolean",
|
||||
name: "isActive",
|
||||
},
|
||||
},
|
||||
},
|
||||
rows: [{ ...item, _id: expect.any(String) }],
|
||||
schema: {
|
||||
_id: { type: "string", name: "_id" },
|
||||
name: { type: "string", name: "name" },
|
||||
contacts: { type: "json", name: "contacts", subtype: "array" },
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("execute", () => {
|
||||
it("a count query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
|
@ -108,7 +251,45 @@ describe("/queries", () => {
|
|||
expect(result.data).toEqual([{ value: 5 }])
|
||||
})
|
||||
|
||||
it("should execute a count query with a transformer", async () => {
|
||||
it("should be able to updateOne by ObjectId", async () => {
|
||||
const insertResult = await withCollection(c =>
|
||||
c.insertOne({ name: "one" })
|
||||
)
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { _id: { $eq: `ObjectId("${insertResult.insertedId}")` } },
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 1,
|
||||
modifiedCount: 1,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "newName" } })
|
||||
expect(doc).toEqual({
|
||||
_id: insertResult.insertedId,
|
||||
name: "newName",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("a count query with a transformer", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
|
@ -124,7 +305,7 @@ describe("/queries", () => {
|
|||
expect(result.data).toEqual([{ value: 6 }])
|
||||
})
|
||||
|
||||
it("should execute a find query", async () => {
|
||||
it("a find query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
|
@ -145,7 +326,7 @@ describe("/queries", () => {
|
|||
])
|
||||
})
|
||||
|
||||
it("should execute a findOne query", async () => {
|
||||
it("a findOne query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
|
@ -160,7 +341,7 @@ describe("/queries", () => {
|
|||
expect(result.data).toEqual([{ _id: expectValidId, name: "one" }])
|
||||
})
|
||||
|
||||
it("should execute a findOneAndUpdate query", async () => {
|
||||
it("a findOneAndUpdate query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
|
@ -194,7 +375,7 @@ describe("/queries", () => {
|
|||
})
|
||||
})
|
||||
|
||||
it("should execute a distinct query", async () => {
|
||||
it("a distinct query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: "name",
|
||||
|
@ -209,7 +390,7 @@ describe("/queries", () => {
|
|||
expect(values).toEqual(["five", "four", "one", "three", "two"])
|
||||
})
|
||||
|
||||
it("should execute a create query with parameters", async () => {
|
||||
it("a create query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: { foo: "{{ foo }}" },
|
||||
|
@ -246,7 +427,7 @@ describe("/queries", () => {
|
|||
})
|
||||
})
|
||||
|
||||
it("should execute a delete query with parameters", async () => {
|
||||
it("a delete query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: { name: { $eq: "{{ name }}" } },
|
||||
|
@ -280,7 +461,7 @@ describe("/queries", () => {
|
|||
})
|
||||
})
|
||||
|
||||
it("should execute an update query with parameters", async () => {
|
||||
it("an update query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
|
@ -330,42 +511,6 @@ describe("/queries", () => {
|
|||
})
|
||||
})
|
||||
|
||||
it("should be able to updateOne by ObjectId", async () => {
|
||||
const insertResult = await withCollection(c => c.insertOne({ name: "one" }))
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { _id: { $eq: `ObjectId("${insertResult.insertedId}")` } },
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 1,
|
||||
modifiedCount: 1,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "newName" } })
|
||||
expect(doc).toEqual({
|
||||
_id: insertResult.insertedId,
|
||||
name: "newName",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete all records", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
|
@ -429,6 +574,7 @@ describe("/queries", () => {
|
|||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should throw an error if the incorrect actionType is specified", async () => {
|
||||
const verbs = ["read", "create", "update", "delete"]
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
import * as setup from "../utilities"
|
||||
import { checkBuilderEndpoint } from "../utilities/TestFunctions"
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
|
||||
describe("query permissions", () => {
|
||||
let config: TestConfiguration
|
||||
let datasource: Datasource
|
||||
let query: Query
|
||||
|
||||
beforeAll(async () => {
|
||||
config = setup.getConfig()
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create({
|
||||
name: "test datasource",
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {},
|
||||
})
|
||||
query = await config.api.query.save({
|
||||
name: "test query",
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields: {},
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
queryVerb: "read",
|
||||
})
|
||||
})
|
||||
|
||||
it("delete should require builder", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "DELETE",
|
||||
url: `/api/queries/${query._id}/${query._rev}`,
|
||||
})
|
||||
})
|
||||
|
||||
it("preview should require builder", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "POST",
|
||||
url: `/api/queries/preview`,
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,774 +0,0 @@
|
|||
import tk from "timekeeper"
|
||||
|
||||
const pg = require("pg")
|
||||
|
||||
// Mock out postgres for this
|
||||
jest.mock("pg")
|
||||
jest.mock("node-fetch")
|
||||
|
||||
// Mock isProdAppID to we can later mock the implementation and pretend we are
|
||||
// using prod app IDs
|
||||
jest.mock("@budibase/backend-core", () => {
|
||||
const core = jest.requireActual("@budibase/backend-core")
|
||||
return {
|
||||
...core,
|
||||
db: {
|
||||
...core.db,
|
||||
isProdAppID: jest.fn(),
|
||||
},
|
||||
}
|
||||
})
|
||||
import * as setup from "../utilities"
|
||||
import { checkBuilderEndpoint } from "../utilities/TestFunctions"
|
||||
import { checkCacheForDynamicVariable } from "../../../../threads/utils"
|
||||
|
||||
const { basicQuery, basicDatasource } = setup.structures
|
||||
import { events, db as dbCore } from "@budibase/backend-core"
|
||||
import {
|
||||
Datasource,
|
||||
Query,
|
||||
SourceName,
|
||||
QueryPreview,
|
||||
QueryParameter,
|
||||
} from "@budibase/types"
|
||||
|
||||
tk.freeze(Date.now())
|
||||
|
||||
const mockIsProdAppID = dbCore.isProdAppID as jest.MockedFunction<
|
||||
typeof dbCore.isProdAppID
|
||||
>
|
||||
|
||||
describe("/queries", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let datasource: Datasource & Required<Pick<Datasource, "_id">>, query: Query
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
const setupTest = async () => {
|
||||
await config.init()
|
||||
datasource = await config.createDatasource()
|
||||
query = await config.createQuery()
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
const createQuery = async (query: Query) => {
|
||||
return request
|
||||
.post(`/api/queries`)
|
||||
.send(query)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
}
|
||||
|
||||
describe("create", () => {
|
||||
it("should create a new query", async () => {
|
||||
const { _id } = await config.createDatasource()
|
||||
const query = basicQuery(_id)
|
||||
jest.clearAllMocks()
|
||||
const res = await createQuery(query)
|
||||
|
||||
expect((res as any).res.statusMessage).toEqual(
|
||||
`Query ${query.name} saved successfully.`
|
||||
)
|
||||
expect(res.body).toEqual({
|
||||
_rev: res.body._rev,
|
||||
_id: res.body._id,
|
||||
...query,
|
||||
nullDefaultSupport: true,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
expect(events.query.created).toHaveBeenCalledTimes(1)
|
||||
expect(events.query.updated).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
it("should update query", async () => {
|
||||
const { _id } = await config.createDatasource()
|
||||
const query = basicQuery(_id)
|
||||
const res = await createQuery(query)
|
||||
jest.clearAllMocks()
|
||||
query._id = res.body._id
|
||||
query._rev = res.body._rev
|
||||
await createQuery(query)
|
||||
|
||||
expect((res as any).res.statusMessage).toEqual(
|
||||
`Query ${query.name} saved successfully.`
|
||||
)
|
||||
expect(res.body).toEqual({
|
||||
_rev: res.body._rev,
|
||||
_id: res.body._id,
|
||||
...query,
|
||||
nullDefaultSupport: true,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
expect(events.query.created).not.toHaveBeenCalled()
|
||||
expect(events.query.updated).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("fetch", () => {
|
||||
beforeEach(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
it("returns all the queries from the server", async () => {
|
||||
const res = await request
|
||||
.get(`/api/queries`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
|
||||
const queries = res.body
|
||||
expect(queries).toEqual([
|
||||
{
|
||||
_rev: query._rev,
|
||||
_id: query._id,
|
||||
createdAt: new Date().toISOString(),
|
||||
...basicQuery(datasource._id),
|
||||
nullDefaultSupport: true,
|
||||
updatedAt: new Date().toISOString(),
|
||||
readable: true,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "GET",
|
||||
url: `/api/datasources`,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("find", () => {
|
||||
it("should find a query in builder", async () => {
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body._id).toEqual(query._id)
|
||||
})
|
||||
|
||||
it("should find a query in cloud", async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
.set(await config.defaultHeaders())
|
||||
.expect(200)
|
||||
.expect("Content-Type", /json/)
|
||||
expect(res.body.fields).toBeDefined()
|
||||
expect(res.body.parameters).toBeDefined()
|
||||
expect(res.body.schema).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
it("should remove sensitive info for prod apps", async () => {
|
||||
// Mock isProdAppID to pretend we are using a prod app
|
||||
mockIsProdAppID.mockClear()
|
||||
mockIsProdAppID.mockImplementation(() => true)
|
||||
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
.set(await config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body._id).toEqual(query._id)
|
||||
expect(res.body.fields).toBeUndefined()
|
||||
expect(res.body.parameters).toBeUndefined()
|
||||
expect(res.body.schema).toBeDefined()
|
||||
|
||||
// Reset isProdAppID mock
|
||||
expect(dbCore.isProdAppID).toHaveBeenCalledTimes(1)
|
||||
mockIsProdAppID.mockImplementation(() => false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
beforeEach(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
it("deletes a query and returns a success message", async () => {
|
||||
await request
|
||||
.delete(`/api/queries/${query._id}/${query._rev}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
|
||||
const res = await request
|
||||
.get(`/api/queries`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
|
||||
expect(res.body).toEqual([])
|
||||
expect(events.query.deleted).toHaveBeenCalledTimes(1)
|
||||
expect(events.query.deleted).toHaveBeenCalledWith(datasource, query)
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
const query = await config.createQuery()
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "DELETE",
|
||||
url: `/api/queries/${query._id}/${query._rev}`,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("preview", () => {
|
||||
it("should be able to preview the query", async () => {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id,
|
||||
queryVerb: "read",
|
||||
fields: {},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
name: datasource.name!,
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||
// these responses come from the mock
|
||||
expect(responseBody.schema).toEqual({
|
||||
a: { type: "string", name: "a" },
|
||||
b: { type: "number", name: "b" },
|
||||
})
|
||||
expect(responseBody.rows.length).toEqual(1)
|
||||
expect(events.query.previewed).toHaveBeenCalledTimes(1)
|
||||
delete datasource.config
|
||||
expect(events.query.previewed).toHaveBeenCalledWith(datasource, {
|
||||
...queryPreview,
|
||||
nullDefaultSupport: true,
|
||||
})
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "POST",
|
||||
url: `/api/queries/preview`,
|
||||
})
|
||||
})
|
||||
|
||||
it("should not error when trying to generate a nested schema for an empty array", async () => {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id,
|
||||
parameters: [],
|
||||
fields: {},
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
const rows = [
|
||||
{
|
||||
contacts: [],
|
||||
},
|
||||
]
|
||||
pg.queryMock.mockImplementation(() => ({
|
||||
rows,
|
||||
}))
|
||||
|
||||
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||
expect(responseBody).toEqual({
|
||||
nestedSchemaFields: {},
|
||||
rows,
|
||||
schema: {
|
||||
contacts: { type: "array", name: "contacts" },
|
||||
},
|
||||
})
|
||||
expect(responseBody.rows.length).toEqual(1)
|
||||
delete datasource.config
|
||||
})
|
||||
|
||||
it("should generate a nested schema based on all the nested items", async () => {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id,
|
||||
parameters: [],
|
||||
fields: {},
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
const rows = [
|
||||
{
|
||||
contacts: [
|
||||
{
|
||||
address: "123 Lane",
|
||||
},
|
||||
{
|
||||
address: "456 Drive",
|
||||
},
|
||||
{
|
||||
postcode: "BT1 12N",
|
||||
lat: 54.59,
|
||||
long: -5.92,
|
||||
},
|
||||
{
|
||||
city: "Belfast",
|
||||
},
|
||||
{
|
||||
address: "789 Avenue",
|
||||
phoneNumber: "0800-999-5555",
|
||||
},
|
||||
{
|
||||
name: "Name",
|
||||
isActive: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
pg.queryMock.mockImplementation(() => ({
|
||||
rows,
|
||||
}))
|
||||
|
||||
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||
expect(responseBody).toEqual({
|
||||
nestedSchemaFields: {
|
||||
contacts: {
|
||||
address: {
|
||||
type: "string",
|
||||
name: "address",
|
||||
},
|
||||
postcode: {
|
||||
type: "string",
|
||||
name: "postcode",
|
||||
},
|
||||
lat: {
|
||||
type: "number",
|
||||
name: "lat",
|
||||
},
|
||||
long: {
|
||||
type: "number",
|
||||
name: "long",
|
||||
},
|
||||
city: {
|
||||
type: "string",
|
||||
name: "city",
|
||||
},
|
||||
phoneNumber: {
|
||||
type: "string",
|
||||
name: "phoneNumber",
|
||||
},
|
||||
name: {
|
||||
type: "string",
|
||||
name: "name",
|
||||
},
|
||||
isActive: {
|
||||
type: "boolean",
|
||||
name: "isActive",
|
||||
},
|
||||
},
|
||||
},
|
||||
rows,
|
||||
schema: {
|
||||
contacts: { type: "json", name: "contacts", subtype: "array" },
|
||||
},
|
||||
})
|
||||
expect(responseBody.rows.length).toEqual(1)
|
||||
delete datasource.config
|
||||
})
|
||||
})
|
||||
|
||||
describe("execute", () => {
|
||||
beforeEach(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
it("should be able to execute the query", async () => {
|
||||
const res = await request
|
||||
.post(`/api/queries/${query._id}`)
|
||||
.send({
|
||||
parameters: {},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body.length).toEqual(1)
|
||||
})
|
||||
|
||||
it("should fail with invalid integration type", async () => {
|
||||
const datasource: Datasource = {
|
||||
...basicDatasource().datasource,
|
||||
source: "INVALID_INTEGRATION" as SourceName,
|
||||
}
|
||||
await config.api.datasource.create(datasource, {
|
||||
status: 500,
|
||||
body: {
|
||||
message: "No datasource implementation found.",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("shouldn't allow handlebars to be passed as parameters", async () => {
|
||||
const res = await request
|
||||
.post(`/api/queries/${query._id}`)
|
||||
.send({
|
||||
parameters: {
|
||||
a: "{{ 'test' }}",
|
||||
},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect(400)
|
||||
expect(res.body.message).toEqual(
|
||||
"Parameter 'a' input contains a handlebars binding - this is not allowed."
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("variables", () => {
|
||||
async function preview(datasource: Datasource, fields: any) {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields,
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
return await config.api.query.previewQuery(queryPreview)
|
||||
}
|
||||
|
||||
it("should work with static variables", async () => {
|
||||
const datasource = await config.restDatasource({
|
||||
staticVariables: {
|
||||
variable: "google",
|
||||
variable2: "1",
|
||||
},
|
||||
})
|
||||
const responseBody = await preview(datasource, {
|
||||
path: "www.{{ variable }}.com",
|
||||
queryString: "test={{ variable2 }}",
|
||||
})
|
||||
// these responses come from the mock
|
||||
expect(responseBody.schema).toEqual({
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
value: { type: "string", name: "value" },
|
||||
})
|
||||
expect(responseBody.rows[0].url).toEqual("http://www.google.com?test=1")
|
||||
})
|
||||
|
||||
it("should work with dynamic variables", async () => {
|
||||
const { datasource } = await config.dynamicVariableDatasource()
|
||||
const responseBody = await preview(datasource, {
|
||||
path: "www.google.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
expect(responseBody.schema).toEqual({
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
value: { type: "string", name: "value" },
|
||||
})
|
||||
expect(responseBody.rows[0].url).toContain("doctype%20html")
|
||||
})
|
||||
|
||||
it("check that it automatically retries on fail with cached dynamics", async () => {
|
||||
const { datasource, query: base } =
|
||||
await config.dynamicVariableDatasource()
|
||||
// preview once to cache
|
||||
await preview(datasource, {
|
||||
path: "www.google.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
// check its in cache
|
||||
const contents = await checkCacheForDynamicVariable(
|
||||
base._id!,
|
||||
"variable3"
|
||||
)
|
||||
expect(contents.rows.length).toEqual(1)
|
||||
const responseBody = await preview(datasource, {
|
||||
path: "www.failonce.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
expect(responseBody.schema).toEqual({
|
||||
fails: { type: "number", name: "fails" },
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
})
|
||||
expect(responseBody.rows[0].fails).toEqual(1)
|
||||
})
|
||||
|
||||
it("deletes variables when linked query is deleted", async () => {
|
||||
const { datasource, query: base } =
|
||||
await config.dynamicVariableDatasource()
|
||||
// preview once to cache
|
||||
await preview(datasource, {
|
||||
path: "www.google.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
// check its in cache
|
||||
let contents = await checkCacheForDynamicVariable(base._id!, "variable3")
|
||||
expect(contents.rows.length).toEqual(1)
|
||||
|
||||
// delete the query
|
||||
await request
|
||||
.delete(`/api/queries/${base._id}/${base._rev}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
|
||||
// check variables no longer in cache
|
||||
contents = await checkCacheForDynamicVariable(base._id!, "variable3")
|
||||
expect(contents).toBe(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Current User Request Mapping", () => {
|
||||
async function previewGet(
|
||||
datasource: Datasource,
|
||||
fields: any,
|
||||
params: QueryParameter[]
|
||||
) {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
parameters: params,
|
||||
fields,
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
return await config.api.query.previewQuery(queryPreview)
|
||||
}
|
||||
|
||||
async function previewPost(
|
||||
datasource: Datasource,
|
||||
fields: any,
|
||||
params: QueryParameter[]
|
||||
) {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
parameters: params,
|
||||
fields,
|
||||
queryVerb: "create",
|
||||
name: datasource.name!,
|
||||
transformer: null,
|
||||
schema: {},
|
||||
readable: false,
|
||||
}
|
||||
return await config.api.query.previewQuery(queryPreview)
|
||||
}
|
||||
|
||||
it("should parse global and query level header mappings", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
|
||||
const datasource = await config.restDatasource({
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
})
|
||||
const responseBody = await previewGet(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "email={{[user].[email]}}",
|
||||
headers: {
|
||||
queryHdr: "{{[user].[firstName]}}",
|
||||
secondHdr: "1234",
|
||||
},
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
expect(parsedRequest.opts.headers).toEqual({
|
||||
test: "headerVal",
|
||||
emailHdr: userDetails.email,
|
||||
queryHdr: userDetails.firstName,
|
||||
secondHdr: "1234",
|
||||
})
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?email=" + userDetails.email.replace("@", "%40")
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user to query parameters", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewGet(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString:
|
||||
"test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
|
||||
},
|
||||
[
|
||||
{ name: "myEmail", default: "{{[user].[email]}}" },
|
||||
{ name: "myName", default: "{{[user].[firstName]}}" },
|
||||
{ name: "testParam", default: "1234" },
|
||||
]
|
||||
)
|
||||
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?test=" +
|
||||
userDetails.email.replace("@", "%40") +
|
||||
"&testName=" +
|
||||
userDetails.firstName +
|
||||
"&testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - plain text", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
"This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
|
||||
bodyType: "text",
|
||||
},
|
||||
[{ name: "testParam", default: "1234" }]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
expect(parsedRequest.opts.body).toEqual(
|
||||
`This is plain text and this is my email: ${userDetails.email}. This is a test param: 1234`
|
||||
)
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - json", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
bodyType: "json",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
const test = `{"email":"${userDetails.email}","queryCode":1234,"userRef":"${userDetails.firstName}"}`
|
||||
expect(parsedRequest.opts.body).toEqual(test)
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - xml", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
"<note> <email>{{[user].[email]}}</email> <code>{{testParam}}</code> " +
|
||||
"<ref>{{userId}}</ref> <somestring>testing</somestring> </note>",
|
||||
bodyType: "xml",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userId", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
const test = `<note> <email>${userDetails.email}</email> <code>1234</code> <ref>${userDetails.firstName}</ref> <somestring>testing</somestring> </note>`
|
||||
|
||||
expect(parsedRequest.opts.body).toEqual(test)
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - form-data", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
bodyType: "form",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
|
||||
const emailData = parsedRequest.opts.body._streams[1]
|
||||
expect(emailData).toEqual(userDetails.email)
|
||||
|
||||
const queryCodeData = parsedRequest.opts.body._streams[4]
|
||||
expect(queryCodeData).toEqual("1234")
|
||||
|
||||
const userRef = parsedRequest.opts.body._streams[7]
|
||||
expect(userRef).toEqual(userDetails.firstName)
|
||||
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - encoded", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
bodyType: "encoded",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
|
||||
expect(parsedRequest.opts.body.email).toEqual(userDetails.email)
|
||||
expect(parsedRequest.opts.body.queryCode).toEqual("1234")
|
||||
expect(parsedRequest.opts.body.userRef).toEqual(userDetails.firstName)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,406 @@
|
|||
import * as setup from "../utilities"
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { getCachedVariable } from "../../../../threads/utils"
|
||||
import nock from "nock"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
jest.unmock("node-fetch")
|
||||
|
||||
describe("rest", () => {
|
||||
let config: TestConfiguration
|
||||
let datasource: Datasource
|
||||
|
||||
async function createQuery(fields: any) {
|
||||
return await config.api.query.save({
|
||||
name: "test query",
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields,
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
queryVerb: "read",
|
||||
})
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
config = setup.getConfig()
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {},
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should automatically retry on fail with cached dynamics", async () => {
|
||||
const basedOnQuery = await createQuery({
|
||||
path: "one.example.com",
|
||||
})
|
||||
|
||||
let cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
||||
expect(cached).toBeNull()
|
||||
|
||||
await config.api.datasource.update({
|
||||
...datasource,
|
||||
config: {
|
||||
...datasource.config,
|
||||
dynamicVariables: [
|
||||
{
|
||||
queryId: basedOnQuery._id!,
|
||||
name: "foo",
|
||||
value: "{{ data[0].name }}",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
||||
expect(cached).toBeNull()
|
||||
|
||||
nock("http://one.example.com")
|
||||
.get("/")
|
||||
.reply(200, [{ name: "one" }])
|
||||
nock("http://two.example.com").get("/?test=one").reply(500)
|
||||
nock("http://two.example.com")
|
||||
.get("/?test=one")
|
||||
.reply(200, [{ name: "two" }])
|
||||
|
||||
const res = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "test query",
|
||||
parameters: [],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "two.example.com",
|
||||
queryString: "test={{ foo }}",
|
||||
},
|
||||
})
|
||||
expect(res.schema).toEqual({
|
||||
name: { type: "string", name: "name" },
|
||||
})
|
||||
|
||||
cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
||||
expect(cached.rows.length).toEqual(1)
|
||||
expect(cached.rows[0].name).toEqual("one")
|
||||
})
|
||||
|
||||
it("should parse global and query level header mappings", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com", {
|
||||
reqheaders: {
|
||||
test: "headerVal",
|
||||
emailhdr: user.email,
|
||||
queryhdr: user.firstName!,
|
||||
secondhdr: "1234",
|
||||
},
|
||||
})
|
||||
.get("/?email=" + user.email.replace("@", "%40"))
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
queryString: "email={{[user].[email]}}",
|
||||
headers: {
|
||||
queryHdr: "{{[user].[firstName]}}",
|
||||
secondHdr: "1234",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to query params", async () => {
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.get(
|
||||
"/?test=" +
|
||||
user.email.replace("@", "%40") +
|
||||
"&testName=" +
|
||||
user.firstName +
|
||||
"&testParam=1234"
|
||||
)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "myEmail", default: "{{[user].[email]}}" },
|
||||
{ name: "myName", default: "{{[user].[firstName]}}" },
|
||||
{ name: "testParam", default: "1234" },
|
||||
],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
queryString:
|
||||
"test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - plain text", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post(
|
||||
"/?testParam=1234",
|
||||
"This is plain text and this is my email: " +
|
||||
user.email +
|
||||
". This is a test param: 1234"
|
||||
)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [{ name: "testParam", default: "1234" }],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "text",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
"This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - json", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post("/?testParam=1234", {
|
||||
email: user.email,
|
||||
queryCode: 1234,
|
||||
userRef: user.firstName,
|
||||
})
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "json",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - xml", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post(
|
||||
"/?testParam=1234",
|
||||
`<note> <email>${user.email}</email> <code>1234</code> <ref>${user.firstName}</ref> <somestring>testing</somestring> </note>`
|
||||
)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userId", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "xml",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
"<note> <email>{{[user].[email]}}</email> <code>{{testParam}}</code> " +
|
||||
"<ref>{{userId}}</ref> <somestring>testing</somestring> </note>",
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - form-data", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post("/?testParam=1234", body => {
|
||||
return (
|
||||
body.includes('name="email"\r\n\r\n' + user.email + "\r\n") &&
|
||||
body.includes('name="queryCode"\r\n\r\n1234\r\n') &&
|
||||
body.includes('name="userRef"\r\n\r\n' + user.firstName + "\r\n")
|
||||
)
|
||||
})
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "form",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - encoded", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post("/?testParam=1234", {
|
||||
email: user.email,
|
||||
queryCode: 1234,
|
||||
userRef: user.firstName,
|
||||
})
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "encoded",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
})
|
|
@ -30,7 +30,6 @@ const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
|||
tk.freeze(timestamp)
|
||||
|
||||
jest.unmock("mssql")
|
||||
jest.unmock("pg")
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
|
@ -1296,7 +1295,7 @@ describe.each([
|
|||
|
||||
describe("Formula JS protection", () => {
|
||||
it("should time out JS execution if a single cell takes too long", async () => {
|
||||
await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 20 }, async () => {
|
||||
await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => {
|
||||
const js = Buffer.from(
|
||||
`
|
||||
let i = 0;
|
||||
|
@ -1336,8 +1335,8 @@ describe.each([
|
|||
it("should time out JS execution if a multiple cells take too long", async () => {
|
||||
await config.withEnv(
|
||||
{
|
||||
JS_PER_INVOCATION_TIMEOUT_MS: 20,
|
||||
JS_PER_REQUEST_TIMEOUT_MS: 40,
|
||||
JS_PER_INVOCATION_TIMEOUT_MS: 40,
|
||||
JS_PER_REQUEST_TIMEOUT_MS: 80,
|
||||
},
|
||||
async () => {
|
||||
const js = Buffer.from(
|
||||
|
|
|
@ -25,7 +25,6 @@ import { quotas } from "@budibase/pro"
|
|||
import { roles } from "@budibase/backend-core"
|
||||
|
||||
jest.unmock("mssql")
|
||||
jest.unmock("pg")
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
const setup = require("./utilities")
|
||||
|
||||
describe("test the execute query action", () => {
|
||||
let query
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
||||
await config.createDatasource()
|
||||
query = await config.createQuery()
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to execute a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: query._id },
|
||||
})
|
||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should handle a null query value", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: null,
|
||||
})
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle an error executing a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: "wrong_id" },
|
||||
})
|
||||
expect(res.response).toEqual("Error: missing")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,94 @@
|
|||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
import * as setup from "./utilities"
|
||||
import { DatabaseName, getDatasource } from "../../integrations/tests/utils"
|
||||
import knex, { Knex } from "knex"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
function getKnexClientName(source: SourceName) {
|
||||
switch (source) {
|
||||
case SourceName.MYSQL:
|
||||
return "mysql2"
|
||||
case SourceName.SQL_SERVER:
|
||||
return "mssql"
|
||||
case SourceName.POSTGRES:
|
||||
return "pg"
|
||||
}
|
||||
throw new Error(`Unsupported source: ${source}`)
|
||||
}
|
||||
|
||||
describe.each(
|
||||
[
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.MARIADB,
|
||||
].map(name => [name, getDatasource(name)])
|
||||
)("execute query action (%s)", (_, dsProvider) => {
|
||||
let tableName: string
|
||||
let client: Knex
|
||||
let datasource: Datasource
|
||||
let query: Query
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
||||
const ds = await dsProvider
|
||||
datasource = await config.api.datasource.create(ds)
|
||||
client = knex({
|
||||
client: getKnexClientName(ds.source),
|
||||
connection: ds.config,
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.string("a")
|
||||
table.integer("b")
|
||||
})
|
||||
await client(tableName).insert({ a: "string", b: 1 })
|
||||
query = await config.api.query.save({
|
||||
name: "test query",
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields: {
|
||||
sql: client(tableName).select("*").toSQL().toNative().sql,
|
||||
},
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
queryVerb: "read",
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTable(tableName)
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to execute a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: query._id },
|
||||
})
|
||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should handle a null query value", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: null,
|
||||
})
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle an error executing a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: "wrong_id" },
|
||||
})
|
||||
expect(res.response).toEqual("Error: missing")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
})
|
|
@ -27,7 +27,6 @@ fetch.mockSearch()
|
|||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.unmock("pg")
|
||||
jest.mock("../websockets")
|
||||
|
||||
describe("postgres integrations", () => {
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
jest.unmock("pg")
|
||||
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import * as postgres from "./postgres"
|
||||
import * as mongodb from "./mongodb"
|
||||
|
|
|
@ -1,4 +1,12 @@
|
|||
import newid from "../../db/newid"
|
||||
import TestConfig from "../../tests/utilities/TestConfiguration"
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import sdk from "../index"
|
||||
import {
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import { FIND_LIMIT } from "../app/rows/attachments"
|
||||
|
||||
const attachment = {
|
||||
size: 73479,
|
||||
|
@ -8,69 +16,48 @@ const attachment = {
|
|||
key: "app_bbb/attachments/a.png",
|
||||
}
|
||||
|
||||
const row = {
|
||||
_id: "ro_ta_aaa",
|
||||
photo: [attachment],
|
||||
otherCol: "string",
|
||||
}
|
||||
describe("should be able to re-write attachment URLs", () => {
|
||||
const config = new TestConfig()
|
||||
|
||||
const table = {
|
||||
_id: "ta_aaa",
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
it("should update URLs on a number of rows over the limit", async () => {
|
||||
const table = await config.api.table.save({
|
||||
name: "photos",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
schema: {
|
||||
photo: {
|
||||
type: "attachment",
|
||||
type: FieldType.ATTACHMENT,
|
||||
name: "photo",
|
||||
},
|
||||
otherCol: {
|
||||
type: "string",
|
||||
type: FieldType.STRING,
|
||||
name: "otherCol",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
jest.mock("@budibase/backend-core", () => {
|
||||
const core = jest.requireActual("@budibase/backend-core")
|
||||
return {
|
||||
...core,
|
||||
db: {
|
||||
...core.db,
|
||||
directCouchFind: jest.fn(),
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import sdk from "../index"
|
||||
|
||||
describe("should be able to re-write attachment URLs", () => {
|
||||
it("should update URLs on a number of rows over the limit", async () => {
|
||||
const db = dbCore.getDB("app_aaa")
|
||||
await db.put(table)
|
||||
const limit = 30
|
||||
let rows = []
|
||||
for (let i = 0; i < limit; i++) {
|
||||
const rowToWrite = {
|
||||
...row,
|
||||
_id: `${row._id}_${newid()}`,
|
||||
}
|
||||
const { rev } = await db.put(rowToWrite)
|
||||
rows.push({
|
||||
...rowToWrite,
|
||||
_rev: rev,
|
||||
for (let i = 0; i < FIND_LIMIT * 4; i++) {
|
||||
await config.api.row.save(table._id!, {
|
||||
photo: [attachment],
|
||||
otherCol: "string",
|
||||
})
|
||||
}
|
||||
|
||||
dbCore.directCouchFind
|
||||
// @ts-ignore
|
||||
.mockReturnValueOnce({ rows: rows.slice(0, 25), bookmark: "aaa" })
|
||||
.mockReturnValueOnce({ rows: rows.slice(25, limit), bookmark: "bbb" })
|
||||
const db = dbCore.getDB(config.getAppId())
|
||||
await sdk.backups.updateAttachmentColumns(db.name, db)
|
||||
const finalRows = await sdk.rows.getAllInternalRows(db.name)
|
||||
for (let rowToCheck of finalRows) {
|
||||
expect(rowToCheck.otherCol).toBe(row.otherCol)
|
||||
expect(rowToCheck.photo[0].url).toBe("")
|
||||
expect(rowToCheck.photo[0].key).toBe(`${db.name}/attachments/a.png`)
|
||||
|
||||
const rows = (await sdk.rows.getAllInternalRows(db.name)).filter(
|
||||
row => row.tableId === table._id
|
||||
)
|
||||
for (const row of rows) {
|
||||
expect(row.otherCol).toBe("string")
|
||||
expect(row.photo[0].url).toBe("")
|
||||
expect(row.photo[0].key).toBe(`${db.name}/attachments/a.png`)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
|
|
@ -35,11 +35,20 @@ describe("syncGlobalUsers", () => {
|
|||
builder: { global: true },
|
||||
})
|
||||
await config.doInContext(config.appId, async () => {
|
||||
expect(await rawUserMetadata()).toHaveLength(1)
|
||||
let metadata = await rawUserMetadata()
|
||||
expect(metadata).not.toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user1._id!),
|
||||
})
|
||||
)
|
||||
expect(metadata).not.toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user2._id!),
|
||||
})
|
||||
)
|
||||
await syncGlobalUsers()
|
||||
|
||||
const metadata = await rawUserMetadata()
|
||||
expect(metadata).toHaveLength(3)
|
||||
metadata = await rawUserMetadata()
|
||||
expect(metadata).toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user1._id!),
|
||||
|
@ -62,7 +71,6 @@ describe("syncGlobalUsers", () => {
|
|||
await syncGlobalUsers()
|
||||
|
||||
const metadata = await rawUserMetadata()
|
||||
expect(metadata).toHaveLength(1)
|
||||
expect(metadata).not.toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user._id!),
|
||||
|
|
|
@ -4,6 +4,7 @@ import {
|
|||
CreateDatasourceResponse,
|
||||
UpdateDatasourceResponse,
|
||||
UpdateDatasourceRequest,
|
||||
QueryJson,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
|
@ -45,4 +46,24 @@ export class DatasourceAPI extends TestAPI {
|
|||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
delete = async (datasource: Datasource, expectations?: Expectations) => {
|
||||
return await this._delete(
|
||||
`/api/datasources/${datasource._id!}/${datasource._rev!}`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
|
||||
get = async (id: string, expectations?: Expectations) => {
|
||||
return await this._get<Datasource>(`/api/datasources/${id}`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
query = async (query: QueryJson, expectations?: Expectations) => {
|
||||
return await this._post<any>(`/api/datasources/query`, {
|
||||
body: query,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,10 +6,11 @@ import {
|
|||
PreviewQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
import { constants } from "@budibase/backend-core"
|
||||
|
||||
export class QueryAPI extends TestAPI {
|
||||
save = async (body: Query): Promise<Query> => {
|
||||
return await this._post<Query>(`/api/queries`, { body })
|
||||
save = async (body: Query, expectations?: Expectations): Promise<Query> => {
|
||||
return await this._post<Query>(`/api/queries`, { body, expectations })
|
||||
}
|
||||
|
||||
execute = async (
|
||||
|
@ -26,9 +27,36 @@ export class QueryAPI extends TestAPI {
|
|||
)
|
||||
}
|
||||
|
||||
previewQuery = async (queryPreview: PreviewQueryRequest) => {
|
||||
preview = async (
|
||||
queryPreview: PreviewQueryRequest,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
return await this._post<PreviewQueryResponse>(`/api/queries/preview`, {
|
||||
body: queryPreview,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
delete = async (query: Query, expectations?: Expectations) => {
|
||||
return await this._delete(`/api/queries/${query._id!}/${query._rev!}`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
get = async (queryId: string, expectations?: Expectations) => {
|
||||
return await this._get<Query>(`/api/queries/${queryId}`, { expectations })
|
||||
}
|
||||
|
||||
getProd = async (queryId: string, expectations?: Expectations) => {
|
||||
return await this._get<Query>(`/api/queries/${queryId}`, {
|
||||
expectations,
|
||||
headers: {
|
||||
[constants.Header.APP_ID]: this.config.getProdAppId(),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fetch = async (expectations?: Expectations) => {
|
||||
return await this._get<Query[]>(`/api/queries`, { expectations })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -167,7 +167,7 @@ class QueryRunner {
|
|||
this.hasRerun = true
|
||||
}
|
||||
|
||||
await threadUtils.invalidateDynamicVariables(this.cachedVariables)
|
||||
await threadUtils.invalidateCachedVariable(this.cachedVariables)
|
||||
return this.execute()
|
||||
}
|
||||
|
||||
|
@ -254,7 +254,7 @@ class QueryRunner {
|
|||
let { parameters } = this
|
||||
const queryId = variable.queryId,
|
||||
name = variable.name
|
||||
let value = await threadUtils.checkCacheForDynamicVariable(queryId, name)
|
||||
let value = await threadUtils.getCachedVariable(queryId, name)
|
||||
if (!value) {
|
||||
value = this.queryResponse[queryId]
|
||||
? this.queryResponse[queryId]
|
||||
|
|
|
@ -5,7 +5,7 @@ import { redis, db as dbCore } from "@budibase/backend-core"
|
|||
import * as jsRunner from "../jsRunner"
|
||||
|
||||
const VARIABLE_TTL_SECONDS = 3600
|
||||
let client: any
|
||||
let client: redis.Client | null = null
|
||||
|
||||
async function getClient() {
|
||||
if (!client) {
|
||||
|
@ -36,24 +36,16 @@ export function threadSetup() {
|
|||
db.init()
|
||||
}
|
||||
|
||||
export async function checkCacheForDynamicVariable(
|
||||
queryId: string,
|
||||
variable: string
|
||||
) {
|
||||
const cache = await getClient()
|
||||
return cache.get(makeVariableKey(queryId, variable))
|
||||
export async function getCachedVariable(queryId: string, variable: string) {
|
||||
return (await getClient()).get(makeVariableKey(queryId, variable))
|
||||
}
|
||||
|
||||
export async function invalidateDynamicVariables(cachedVars: QueryVariable[]) {
|
||||
export async function invalidateCachedVariable(vars: QueryVariable[]) {
|
||||
const cache = await getClient()
|
||||
let promises = []
|
||||
for (let variable of cachedVars) {
|
||||
promises.push(
|
||||
cache.delete(makeVariableKey(variable.queryId, variable.name))
|
||||
await Promise.all(
|
||||
vars.map(v => cache.delete(makeVariableKey(v.queryId, v.name)))
|
||||
)
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
|
||||
export async function storeDynamicVariable(
|
||||
queryId: string,
|
||||
|
@ -93,7 +85,7 @@ export default {
|
|||
hasExtraData,
|
||||
formatResponse,
|
||||
storeDynamicVariable,
|
||||
invalidateDynamicVariables,
|
||||
checkCacheForDynamicVariable,
|
||||
invalidateCachedVariable,
|
||||
getCachedVariable,
|
||||
threadSetup,
|
||||
}
|
||||
|
|
|
@ -16042,7 +16042,7 @@ nice-try@^1.0.4:
|
|||
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
|
||||
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
|
||||
|
||||
nock@^13.5.4:
|
||||
nock@13.5.4, nock@^13.5.4:
|
||||
version "13.5.4"
|
||||
resolved "https://registry.yarnpkg.com/nock/-/nock-13.5.4.tgz#8918f0addc70a63736170fef7106a9721e0dc479"
|
||||
integrity sha512-yAyTfdeNJGGBFxWdzSKCBYxs5FxLbCg5X5Q4ets974hcQzG1+qCxvIyOo4j2Ry6MUlhWVMX4OoYDefAIIwupjw==
|
||||
|
@ -17948,9 +17948,9 @@ postgres-interval@^1.1.0:
|
|||
xtend "^4.0.0"
|
||||
|
||||
posthog-js@^1.116.6:
|
||||
version "1.116.6"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.116.6.tgz#9a5c9f49230a76642f4c44d93b96710f886c2880"
|
||||
integrity sha512-rvt8HxzJD4c2B/xsUa4jle8ApdqljeBI2Qqjp4XJMohQf18DXRyM6b96H5/UMs8jxYuZG14Er0h/kEIWeU6Fmw==
|
||||
version "1.117.0"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.117.0.tgz#59c3e520f6269f76ea82dce8760fbc33cdd7f48f"
|
||||
integrity sha512-+I8q5G9YG6r6wOLKPT+C+AV7MRhyVFJMTJS7dfwLmmT+mkVxQ5bfC59hBkJUObOR+YRn5jn2JT/sgIslU94EZg==
|
||||
dependencies:
|
||||
fflate "^0.4.8"
|
||||
preact "^10.19.3"
|
||||
|
|
Loading…
Reference in New Issue