Merge remote-tracking branch 'origin/execute-script-v2' into execute-script-v2-frontend
This commit is contained in:
commit
c35adfa109
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.4.22",
|
||||
"version": "3.4.23",
|
||||
"npmClient": "yarn",
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
|
|
|
@ -1,22 +1,26 @@
|
|||
<script>
|
||||
<script lang="ts" context="module">
|
||||
type Option = any
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import Picker from "./Picker.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
export let value = []
|
||||
export let id = null
|
||||
export let placeholder = null
|
||||
export let disabled = false
|
||||
export let options = []
|
||||
export let getOptionLabel = option => option
|
||||
export let getOptionValue = option => option
|
||||
export let readonly = false
|
||||
export let autocomplete = false
|
||||
export let sort = false
|
||||
export let autoWidth = false
|
||||
export let searchTerm = null
|
||||
export let customPopoverHeight = undefined
|
||||
export let open = false
|
||||
export let loading
|
||||
export let value: string[] = []
|
||||
export let id: string | undefined = undefined
|
||||
export let placeholder: string | null = null
|
||||
export let disabled: boolean = false
|
||||
export let options: Option[] = []
|
||||
export let getOptionLabel = (option: Option, _index?: number) => option
|
||||
export let getOptionValue = (option: Option, _index?: number) => option
|
||||
export let readonly: boolean = false
|
||||
export let autocomplete: boolean = false
|
||||
export let sort: boolean = false
|
||||
export let autoWidth: boolean = false
|
||||
export let searchTerm: string | null = null
|
||||
export let customPopoverHeight: string | undefined = undefined
|
||||
export let open: boolean = false
|
||||
export let loading: boolean
|
||||
export let onOptionMouseenter = () => {}
|
||||
export let onOptionMouseleave = () => {}
|
||||
|
||||
|
@ -27,10 +31,15 @@
|
|||
$: optionLookupMap = getOptionLookupMap(options)
|
||||
|
||||
$: fieldText = getFieldText(arrayValue, optionLookupMap, placeholder)
|
||||
$: isOptionSelected = optionValue => selectedLookupMap[optionValue] === true
|
||||
$: isOptionSelected = (optionValue: string) =>
|
||||
selectedLookupMap[optionValue] === true
|
||||
$: toggleOption = makeToggleOption(selectedLookupMap, arrayValue)
|
||||
|
||||
const getFieldText = (value, map, placeholder) => {
|
||||
const getFieldText = (
|
||||
value: string[],
|
||||
map: Record<string, any> | null,
|
||||
placeholder: string | null
|
||||
) => {
|
||||
if (Array.isArray(value) && value.length > 0) {
|
||||
if (!map) {
|
||||
return ""
|
||||
|
@ -42,8 +51,8 @@
|
|||
}
|
||||
}
|
||||
|
||||
const getSelectedLookupMap = value => {
|
||||
let map = {}
|
||||
const getSelectedLookupMap = (value: string[]) => {
|
||||
const map: Record<string, boolean> = {}
|
||||
if (Array.isArray(value) && value.length > 0) {
|
||||
value.forEach(option => {
|
||||
if (option) {
|
||||
|
@ -54,22 +63,23 @@
|
|||
return map
|
||||
}
|
||||
|
||||
const getOptionLookupMap = options => {
|
||||
let map = null
|
||||
if (options?.length) {
|
||||
map = {}
|
||||
options.forEach((option, idx) => {
|
||||
const optionValue = getOptionValue(option, idx)
|
||||
if (optionValue != null) {
|
||||
map[optionValue] = getOptionLabel(option, idx) || ""
|
||||
}
|
||||
})
|
||||
const getOptionLookupMap = (options: Option[]) => {
|
||||
if (!options?.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
const map: Record<string, any> = {}
|
||||
options.forEach((option, idx) => {
|
||||
const optionValue = getOptionValue(option, idx)
|
||||
if (optionValue != null) {
|
||||
map[optionValue] = getOptionLabel(option, idx) || ""
|
||||
}
|
||||
})
|
||||
return map
|
||||
}
|
||||
|
||||
const makeToggleOption = (map, value) => {
|
||||
return optionValue => {
|
||||
const makeToggleOption = (map: Record<string, boolean>, value: string[]) => {
|
||||
return (optionValue: string) => {
|
||||
if (map[optionValue]) {
|
||||
const filtered = value.filter(option => option !== optionValue)
|
||||
dispatch("change", filtered)
|
||||
|
|
|
@ -5,11 +5,12 @@
|
|||
import { memo } from "@budibase/frontend-core"
|
||||
import Placeholder from "../Placeholder.svelte"
|
||||
import InnerForm from "./InnerForm.svelte"
|
||||
import type { FieldApi } from "."
|
||||
|
||||
export let label: string | undefined = undefined
|
||||
export let field: string | undefined = undefined
|
||||
export let fieldState: any
|
||||
export let fieldApi: any
|
||||
export let fieldApi: FieldApi
|
||||
export let fieldSchema: any
|
||||
export let defaultValue: string | undefined = undefined
|
||||
export let type: any
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<script lang="ts">
|
||||
import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
|
||||
import { FieldType, InternalTable } from "@budibase/types"
|
||||
import { BasicOperator, FieldType, InternalTable } from "@budibase/types"
|
||||
import { fetchData, Utils } from "@budibase/frontend-core"
|
||||
import { getContext } from "svelte"
|
||||
import Field from "./Field.svelte"
|
||||
|
@ -9,10 +9,11 @@
|
|||
RelationshipFieldMetadata,
|
||||
Row,
|
||||
} from "@budibase/types"
|
||||
import type { FieldApi, FieldState } from "."
|
||||
|
||||
export let field: string | undefined = undefined
|
||||
export let label: string | undefined = undefined
|
||||
export let placeholder: any = undefined
|
||||
export let placeholder: string | undefined = undefined
|
||||
export let disabled: boolean = false
|
||||
export let readonly: boolean = false
|
||||
export let validation: any
|
||||
|
@ -35,12 +36,13 @@
|
|||
const { API } = getContext("sdk")
|
||||
|
||||
// Field state
|
||||
let fieldState: any
|
||||
let fieldApi: any
|
||||
let fieldState: FieldState<string | string[]> | undefined
|
||||
|
||||
let fieldApi: FieldApi
|
||||
let fieldSchema: RelationshipFieldMetadata | undefined
|
||||
|
||||
// Local UI state
|
||||
let searchTerm: any
|
||||
let searchTerm: string
|
||||
let open: boolean = false
|
||||
|
||||
// Options state
|
||||
|
@ -106,17 +108,14 @@
|
|||
filter: SearchFilter[],
|
||||
linkedTableId?: string
|
||||
) => {
|
||||
if (!linkedTableId) {
|
||||
return undefined
|
||||
}
|
||||
const datasource =
|
||||
datasourceType === "table"
|
||||
dsType === "table"
|
||||
? {
|
||||
type: datasourceType,
|
||||
tableId: fieldSchema?.tableId!,
|
||||
type: dsType,
|
||||
tableId: linkedTableId!,
|
||||
}
|
||||
: {
|
||||
type: datasourceType,
|
||||
type: dsType,
|
||||
tableId: InternalTable.USER_METADATA,
|
||||
}
|
||||
return fetchData({
|
||||
|
@ -306,14 +305,14 @@
|
|||
}
|
||||
|
||||
// Ensure we match all filters, rather than any
|
||||
let newFilter: any = filter
|
||||
let newFilter = filter
|
||||
if (searchTerm) {
|
||||
// @ts-expect-error this doesn't fit types, but don't want to change it yet
|
||||
newFilter = (newFilter || []).filter(x => x.operator !== "allOr")
|
||||
newFilter.push({
|
||||
// Use a big numeric prefix to avoid clashing with an existing filter
|
||||
field: `999:${primaryDisplay}`,
|
||||
operator: "string",
|
||||
operator: BasicOperator.STRING,
|
||||
value: searchTerm,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -19,3 +19,15 @@ export { default as codescanner } from "./CodeScannerField.svelte"
|
|||
export { default as signaturesinglefield } from "./SignatureField.svelte"
|
||||
export { default as bbreferencefield } from "./BBReferenceField.svelte"
|
||||
export { default as bbreferencesinglefield } from "./BBReferenceSingleField.svelte"
|
||||
|
||||
export interface FieldApi {
|
||||
setValue(value: any): boolean
|
||||
deregister(): void
|
||||
}
|
||||
|
||||
export interface FieldState<T> {
|
||||
value: T
|
||||
fieldId: string
|
||||
disabled: boolean
|
||||
readonly: boolean
|
||||
}
|
|
@ -9,6 +9,7 @@ import {
|
|||
const DISABLED_EXTERNAL_INTEGRATIONS = [
|
||||
SourceName.AIRTABLE,
|
||||
SourceName.BUDIBASE,
|
||||
SourceName.ARANGODB,
|
||||
]
|
||||
|
||||
export async function fetch(ctx: UserCtx<void, FetchIntegrationsResponse>) {
|
||||
|
|
|
@ -26,7 +26,7 @@ export function csv(
|
|||
headers.map(header => {
|
||||
const val = row[header]
|
||||
if (typeof val === "object" && !(val instanceof Date)) {
|
||||
return `"${JSON.stringify(val).replace(/"/g, "'")}"`
|
||||
return `"${escapeCsvString(JSON.stringify(val))}"`
|
||||
}
|
||||
if (val !== undefined) {
|
||||
return `"${escapeCsvString(val.toString())}"`
|
||||
|
|
|
@ -1,19 +1,6 @@
|
|||
import { DEFAULT_TABLES } from "../../../db/defaultData/datasource_bb_default"
|
||||
import { setEnv } from "../../../environment"
|
||||
|
||||
jest.mock("../../../utilities/redis", () => ({
|
||||
init: jest.fn(),
|
||||
getLocksById: () => {
|
||||
return {}
|
||||
},
|
||||
doesUserHaveLock: () => {
|
||||
return true
|
||||
},
|
||||
updateLock: jest.fn(),
|
||||
setDebounce: jest.fn(),
|
||||
checkDebounce: jest.fn(),
|
||||
shutdown: jest.fn(),
|
||||
}))
|
||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import * as setup from "./utilities"
|
||||
import { AppStatus } from "../../../db/utils"
|
||||
|
|
|
@ -2515,15 +2515,14 @@ if (descriptions.length) {
|
|||
csvString: exportedValue,
|
||||
})
|
||||
|
||||
const stringified = (value: string) =>
|
||||
JSON.stringify(value).replace(/"/g, "'")
|
||||
const stringified = (value: string) => JSON.stringify(value)
|
||||
|
||||
const matchingObject = (
|
||||
key: string,
|
||||
value: any,
|
||||
isArray: boolean
|
||||
) => {
|
||||
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
|
||||
const objectMatcher = `{"${key}":"${value[key]}".*?}`
|
||||
if (isArray) {
|
||||
return expect.stringMatching(
|
||||
new RegExp(`^\\[${objectMatcher}\\]$`)
|
||||
|
|
|
@ -1246,10 +1246,7 @@ if (descriptions.length) {
|
|||
})
|
||||
|
||||
describe.each([
|
||||
[
|
||||
RowExportFormat.CSV,
|
||||
(val: any) => JSON.stringify(val).replace(/"/g, "'"),
|
||||
],
|
||||
[RowExportFormat.CSV, (val: any) => JSON.stringify(val)],
|
||||
[RowExportFormat.JSON, (val: any) => val],
|
||||
])("import validation (%s)", (_, userParser) => {
|
||||
const basicSchema: TableSchema = {
|
||||
|
|
|
@ -16,6 +16,7 @@ describe("Execute Bash Automations", () => {
|
|||
name: "test row",
|
||||
description: "test description",
|
||||
})
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -33,6 +33,7 @@ describe("test the create row action", () => {
|
|||
name: "test",
|
||||
description: "test",
|
||||
}
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -6,6 +6,7 @@ describe("test the delay logic", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -13,6 +13,7 @@ describe("test the delete row action", () => {
|
|||
await config.init()
|
||||
table = await config.api.table.save(basicTable())
|
||||
row = await config.api.row.save(table._id!, {})
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -7,6 +7,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -26,6 +26,7 @@ if (descriptions.length) {
|
|||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
|
|
|
@ -13,6 +13,7 @@ describe("Execute Script Automations", () => {
|
|||
await config.init()
|
||||
table = await config.api.table.save(basicTable())
|
||||
await config.api.row.save(table._id!, {})
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -26,6 +26,7 @@ describe("test the filter logic", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -22,10 +22,7 @@ describe("Attempt to run a basic loop automation", () => {
|
|||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
const { automations } = await config.api.automation.fetch()
|
||||
for (const automation of automations) {
|
||||
await config.api.automation.delete(automation)
|
||||
}
|
||||
await config.api.automation.deleteAll()
|
||||
|
||||
table = await config.api.table.save(basicTable())
|
||||
await config.api.row.save(table._id!, {})
|
||||
|
|
|
@ -7,6 +7,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -8,6 +8,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -16,6 +16,7 @@ describe("test the openai action", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
|
|
|
@ -8,6 +8,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -21,6 +21,7 @@ describe("Test a query step automation", () => {
|
|||
}
|
||||
await config.api.row.save(table._id!, row)
|
||||
await config.api.row.save(table._id!, row)
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -28,6 +28,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -6,6 +6,7 @@ describe("test the server log action", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -9,6 +9,7 @@ describe("Test triggering an automation from another automation", () => {
|
|||
beforeAll(async () => {
|
||||
await automation.init()
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
|
|
|
@ -23,6 +23,7 @@ describe("test the update row action", () => {
|
|||
await config.init()
|
||||
table = await config.createTable()
|
||||
row = await config.createRow()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -7,6 +7,7 @@ describe("test the outgoing webhook action", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -9,6 +9,8 @@ describe("app action trigger", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
|
||||
automation = await createAutomationBuilder(config)
|
||||
.onAppAction()
|
||||
.serverLog({
|
||||
|
|
|
@ -16,6 +16,7 @@ describe("cron trigger", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
|
|
|
@ -11,6 +11,7 @@ describe("row deleted trigger", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
table = await config.api.table.save(basicTable())
|
||||
automation = await createAutomationBuilder(config)
|
||||
.onRowDeleted({ tableId: table._id! })
|
||||
|
|
|
@ -11,6 +11,7 @@ describe("row saved trigger", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
table = await config.api.table.save(basicTable())
|
||||
automation = await createAutomationBuilder(config)
|
||||
.onRowSaved({ tableId: table._id! })
|
||||
|
|
|
@ -11,6 +11,7 @@ describe("row updated trigger", () => {
|
|||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
table = await config.api.table.save(basicTable())
|
||||
automation = await createAutomationBuilder(config)
|
||||
.onRowUpdated({ tableId: table._id! })
|
||||
|
|
|
@ -37,6 +37,7 @@ describe("Webhook trigger test", () => {
|
|||
|
||||
beforeEach(async () => {
|
||||
await config.init()
|
||||
await config.api.automation.deleteAll()
|
||||
table = await config.createTable()
|
||||
})
|
||||
|
||||
|
|
|
@ -9,6 +9,11 @@ import {
|
|||
|
||||
import { Database, aql } from "arangojs"
|
||||
|
||||
/**
|
||||
* @deprecated 3rd March 2025
|
||||
* datasource disabled - this datasource is marked for deprecation and removal
|
||||
*/
|
||||
|
||||
interface ArangodbConfig {
|
||||
url: string
|
||||
username: string
|
||||
|
|
|
@ -33,15 +33,17 @@ const DEFINITIONS: Record<SourceName, Integration | undefined> = {
|
|||
[SourceName.COUCHDB]: couchdb.schema,
|
||||
[SourceName.SQL_SERVER]: sqlServer.schema,
|
||||
[SourceName.S3]: s3.schema,
|
||||
[SourceName.AIRTABLE]: airtable.schema,
|
||||
[SourceName.MYSQL]: mysql.schema,
|
||||
[SourceName.ARANGODB]: arangodb.schema,
|
||||
[SourceName.REST]: rest.schema,
|
||||
[SourceName.FIRESTORE]: firebase.schema,
|
||||
[SourceName.GOOGLE_SHEETS]: googlesheets.schema,
|
||||
[SourceName.REDIS]: redis.schema,
|
||||
[SourceName.SNOWFLAKE]: snowflake.schema,
|
||||
[SourceName.ORACLE]: oracle.schema,
|
||||
/* deprecated - not available through UI */
|
||||
[SourceName.ARANGODB]: arangodb.schema,
|
||||
[SourceName.AIRTABLE]: airtable.schema,
|
||||
/* un-used */
|
||||
[SourceName.BUDIBASE]: undefined,
|
||||
}
|
||||
|
||||
|
@ -56,15 +58,17 @@ const INTEGRATIONS: Record<SourceName, IntegrationBaseConstructor | undefined> =
|
|||
[SourceName.COUCHDB]: couchdb.integration,
|
||||
[SourceName.SQL_SERVER]: sqlServer.integration,
|
||||
[SourceName.S3]: s3.integration,
|
||||
[SourceName.AIRTABLE]: airtable.integration,
|
||||
[SourceName.MYSQL]: mysql.integration,
|
||||
[SourceName.ARANGODB]: arangodb.integration,
|
||||
[SourceName.REST]: rest.integration,
|
||||
[SourceName.FIRESTORE]: firebase.integration,
|
||||
[SourceName.GOOGLE_SHEETS]: googlesheets.integration,
|
||||
[SourceName.REDIS]: redis.integration,
|
||||
[SourceName.SNOWFLAKE]: snowflake.integration,
|
||||
[SourceName.ORACLE]: oracle.integration,
|
||||
/* deprecated - not available through UI */
|
||||
[SourceName.ARANGODB]: arangodb.integration,
|
||||
[SourceName.AIRTABLE]: airtable.integration,
|
||||
/* un-used */
|
||||
[SourceName.BUDIBASE]: undefined,
|
||||
}
|
||||
|
||||
|
|
|
@ -1,76 +0,0 @@
|
|||
import { default as AirtableIntegration } from "../airtable"
|
||||
|
||||
jest.mock("airtable")
|
||||
|
||||
class TestConfiguration {
|
||||
integration: any
|
||||
client: any
|
||||
|
||||
constructor(config: any = {}) {
|
||||
this.integration = new AirtableIntegration.integration(config)
|
||||
this.client = {
|
||||
create: jest.fn(),
|
||||
select: jest.fn(() => ({
|
||||
firstPage: jest.fn(() => []),
|
||||
})),
|
||||
update: jest.fn(),
|
||||
destroy: jest.fn(),
|
||||
}
|
||||
this.integration.client = () => this.client
|
||||
}
|
||||
}
|
||||
|
||||
describe("Airtable Integration", () => {
|
||||
let config: any
|
||||
|
||||
beforeEach(() => {
|
||||
config = new TestConfiguration()
|
||||
})
|
||||
|
||||
it("calls the create method with the correct params", async () => {
|
||||
await config.integration.create({
|
||||
table: "test",
|
||||
json: {},
|
||||
})
|
||||
expect(config.client.create).toHaveBeenCalledWith([
|
||||
{
|
||||
fields: {},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("calls the read method with the correct params", async () => {
|
||||
await config.integration.read({
|
||||
table: "test",
|
||||
view: "Grid view",
|
||||
})
|
||||
expect(config.client.select).toHaveBeenCalledWith({
|
||||
maxRecords: 10,
|
||||
view: "Grid view",
|
||||
})
|
||||
})
|
||||
|
||||
it("calls the update method with the correct params", async () => {
|
||||
await config.integration.update({
|
||||
table: "table",
|
||||
id: "123",
|
||||
json: {
|
||||
name: "test",
|
||||
},
|
||||
})
|
||||
expect(config.client.update).toHaveBeenCalledWith([
|
||||
{
|
||||
id: "123",
|
||||
fields: { name: "test" },
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
const ids = [1, 2, 3, 4]
|
||||
await config.integration.delete({
|
||||
ids,
|
||||
})
|
||||
expect(config.client.destroy).toHaveBeenCalledWith(ids)
|
||||
})
|
||||
})
|
|
@ -1,38 +0,0 @@
|
|||
import { default as ArangoDBIntegration } from "../arangodb"
|
||||
|
||||
jest.mock("arangojs")
|
||||
|
||||
class TestConfiguration {
|
||||
integration: any
|
||||
|
||||
constructor(config: any = {}) {
|
||||
this.integration = new ArangoDBIntegration.integration(config)
|
||||
}
|
||||
}
|
||||
|
||||
describe("ArangoDB Integration", () => {
|
||||
let config: any
|
||||
|
||||
beforeEach(() => {
|
||||
config = new TestConfiguration()
|
||||
})
|
||||
|
||||
it("calls the create method with the correct params", async () => {
|
||||
const body = {
|
||||
json: "Hello",
|
||||
}
|
||||
|
||||
await config.integration.create(body)
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(
|
||||
`INSERT Hello INTO collection RETURN NEW`
|
||||
)
|
||||
})
|
||||
|
||||
it("calls the read method with the correct params", async () => {
|
||||
const query = {
|
||||
sql: `test`,
|
||||
}
|
||||
await config.integration.read(query)
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith(query.sql)
|
||||
})
|
||||
})
|
|
@ -133,4 +133,11 @@ export class AutomationAPI extends TestAPI {
|
|||
}
|
||||
)
|
||||
}
|
||||
|
||||
deleteAll = async (expectations?: Expectations): Promise<void> => {
|
||||
const { automations } = await this.fetch()
|
||||
await Promise.all(
|
||||
automations.map(automation => this.delete(automation, expectations))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -270,6 +270,7 @@ function parseJsonExport<T>(value: any) {
|
|||
if (typeof value !== "string") {
|
||||
return value
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(value)
|
||||
|
||||
|
@ -278,12 +279,17 @@ function parseJsonExport<T>(value: any) {
|
|||
if (
|
||||
e.message.startsWith("Expected property name or '}' in JSON at position ")
|
||||
) {
|
||||
// This was probably converted as CSV and it has single quotes instead of double ones
|
||||
// In order to store JSON within CSVs what we used to do is replace double
|
||||
// quotes with single quotes. This results in invalid JSON, so the line
|
||||
// below is a workaround to parse it. However, this method of storing JSON
|
||||
// was never valid, and we don't do it anymore. However, people may have
|
||||
// exported data and stored it, hoping to be able to restore it later, so
|
||||
// we leave this in place to support that.
|
||||
const parsed = JSON.parse(value.replace(/'/g, '"'))
|
||||
return parsed as T
|
||||
}
|
||||
|
||||
// It is no a valid JSON
|
||||
// It is not valid JSON
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
|
|
@ -117,7 +117,8 @@ export function isSupportedUserSearch(
|
|||
{ op: BasicOperator.EQUAL, key: "_id" },
|
||||
{ op: ArrayOperator.ONE_OF, key: "_id" },
|
||||
]
|
||||
for (const [key, operation] of Object.entries(query)) {
|
||||
const { allOr, onEmptyFilter, ...filters } = query
|
||||
for (const [key, operation] of Object.entries(filters)) {
|
||||
if (typeof operation !== "object") {
|
||||
return false
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue