Merge branch 'sustaining/typescript-5.5.2' of github.com:Budibase/budibase into sustaining/typescript-5.5.2
This commit is contained in:
commit
cbcedd5d1e
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.29.1",
|
||||
"version": "2.29.3",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit b600cca314a5cc9971e44d46047d1a0019b46b08
|
||||
Subproject commit ff16525b73c5751d344f5c161a682609c0a993f2
|
|
@ -1,5 +1,14 @@
|
|||
export {
|
||||
CONSTANT_INTERNAL_ROW_COLS,
|
||||
CONSTANT_EXTERNAL_ROW_COLS,
|
||||
isInternalColumnName,
|
||||
} from "@budibase/shared-core"
|
||||
export const CONSTANT_INTERNAL_ROW_COLS = [
|
||||
"_id",
|
||||
"_rev",
|
||||
"type",
|
||||
"createdAt",
|
||||
"updatedAt",
|
||||
"tableId",
|
||||
] as const
|
||||
|
||||
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
|
||||
|
||||
export function isInternalColumnName(name: string): boolean {
|
||||
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@ export const account = (partial: Partial<Account> = {}): Account => {
|
|||
createdAt: Date.now(),
|
||||
verified: true,
|
||||
verificationSent: true,
|
||||
tier: "FREE", // DEPRECATED
|
||||
authType: AuthType.PASSWORD,
|
||||
name: generator.name(),
|
||||
size: "10+",
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
SWITCHABLE_TYPES,
|
||||
ValidColumnNameRegex,
|
||||
helpers,
|
||||
CONSTANT_INTERNAL_ROW_COLS,
|
||||
CONSTANT_EXTERNAL_ROW_COLS,
|
||||
} from "@budibase/shared-core"
|
||||
import { createEventDispatcher, getContext, onMount } from "svelte"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
|
@ -54,6 +52,7 @@
|
|||
const DATE_TYPE = FieldType.DATETIME
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
|
||||
const { dispatch: gridDispatch, rows } = getContext("grid")
|
||||
|
||||
export let field
|
||||
|
@ -488,27 +487,20 @@
|
|||
})
|
||||
}
|
||||
const newError = {}
|
||||
const prohibited = externalTable
|
||||
? CONSTANT_EXTERNAL_ROW_COLS
|
||||
: CONSTANT_INTERNAL_ROW_COLS
|
||||
if (!externalTable && fieldInfo.name?.startsWith("_")) {
|
||||
newError.name = `Column name cannot start with an underscore.`
|
||||
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {
|
||||
newError.name = `Illegal character; must be alpha-numeric.`
|
||||
} else if (
|
||||
prohibited.some(
|
||||
name => fieldInfo?.name?.toLowerCase() === name.toLowerCase()
|
||||
)
|
||||
) {
|
||||
newError.name = `${prohibited.join(
|
||||
} else if (PROHIBITED_COLUMN_NAMES.some(name => fieldInfo.name === name)) {
|
||||
newError.name = `${PROHIBITED_COLUMN_NAMES.join(
|
||||
", "
|
||||
)} are not allowed as column names - case insensitive.`
|
||||
)} are not allowed as column names`
|
||||
} else if (inUse($tables.selected, fieldInfo.name, originalName)) {
|
||||
newError.name = `Column name already in use.`
|
||||
}
|
||||
|
||||
if (fieldInfo.type === FieldType.AUTO && !fieldInfo.subtype) {
|
||||
newError.subtype = `Auto Column requires a type.`
|
||||
newError.subtype = `Auto Column requires a type`
|
||||
}
|
||||
|
||||
if (fieldInfo.fieldName && fieldInfo.tableId) {
|
||||
|
|
|
@ -233,9 +233,9 @@
|
|||
response.info = response.info || { code: 200 }
|
||||
// if existing schema, copy over what it is
|
||||
if (schema) {
|
||||
for (let [name, field] of Object.entries(schema)) {
|
||||
if (response.schema[name]) {
|
||||
response.schema[name] = field
|
||||
for (let [name, field] of Object.entries(response.schema)) {
|
||||
if (!schema[name]) {
|
||||
schema[name] = field
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,17 +23,21 @@
|
|||
{ "type": "bigint", "message": "stringAsNumber" },
|
||||
{ "type": "options", "message": "stringAsNumber" },
|
||||
{ "type": "formula", "message": "stringAsNumber" },
|
||||
{ "type": "datetime", "message": "dateAsNumber"}
|
||||
{ "type": "datetime", "message": "dateAsNumber" }
|
||||
],
|
||||
"unsupported": [
|
||||
{ "type": "json", "message": "jsonPrimitivesOnly" }
|
||||
]
|
||||
"unsupported": [{ "type": "json", "message": "jsonPrimitivesOnly" }]
|
||||
},
|
||||
"stringLike": {
|
||||
"supported": ["string", "number", "bigint", "options", "longform", "boolean", "datetime"],
|
||||
"unsupported": [
|
||||
{ "type": "json", "message": "jsonPrimitivesOnly" }
|
||||
]
|
||||
"supported": [
|
||||
"string",
|
||||
"number",
|
||||
"bigint",
|
||||
"options",
|
||||
"longform",
|
||||
"boolean",
|
||||
"datetime"
|
||||
],
|
||||
"unsupported": [{ "type": "json", "message": "jsonPrimitivesOnly" }]
|
||||
},
|
||||
"datetimeLike": {
|
||||
"supported": ["datetime"],
|
||||
|
@ -43,11 +47,9 @@
|
|||
{ "type": "options", "message": "stringAsDate" },
|
||||
{ "type": "formula", "message": "stringAsDate" },
|
||||
{ "type": "bigint", "message": "stringAsDate" },
|
||||
{ "type": "number", "message": "numberAsDate"}
|
||||
{ "type": "number", "message": "numberAsDate" }
|
||||
],
|
||||
"unsupported": [
|
||||
{ "type": "json", "message": "jsonPrimitivesOnly" }
|
||||
]
|
||||
"unsupported": [{ "type": "json", "message": "jsonPrimitivesOnly" }]
|
||||
}
|
||||
},
|
||||
"layout": {
|
||||
|
|
|
@ -41,7 +41,7 @@
|
|||
allSettings.push(setting)
|
||||
}
|
||||
})
|
||||
return allSettings.filter(setting => setting.showInBar)
|
||||
return allSettings.filter(setting => setting.showInBar && !setting.hidden)
|
||||
}
|
||||
|
||||
const updatePosition = () => {
|
||||
|
|
|
@ -311,8 +311,8 @@ export async function preview(
|
|||
|
||||
// if existing schema, update to include any previous schema keys
|
||||
if (existingSchema) {
|
||||
for (let key of Object.keys(previewSchema)) {
|
||||
if (existingSchema[key]) {
|
||||
for (let key of Object.keys(existingSchema)) {
|
||||
if (!previewSchema[key]) {
|
||||
previewSchema[key] = existingSchema[key]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -250,6 +250,67 @@ describe.each(
|
|||
expect(events.query.previewed).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should update schema when column type changes from number to string", async () => {
|
||||
const tableName = "schema_change_test"
|
||||
await client.schema.dropTableIfExists(tableName)
|
||||
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id").primary()
|
||||
table.string("name")
|
||||
table.integer("data")
|
||||
})
|
||||
|
||||
await client(tableName).insert({
|
||||
name: "test",
|
||||
data: 123,
|
||||
})
|
||||
|
||||
const firstPreview = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "Test Query",
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT * FROM ${tableName}`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(firstPreview.schema).toEqual(
|
||||
expect.objectContaining({
|
||||
data: { type: "number", name: "data" },
|
||||
})
|
||||
)
|
||||
|
||||
await client.schema.alterTable(tableName, table => {
|
||||
table.string("data").alter()
|
||||
})
|
||||
|
||||
await client(tableName).update({
|
||||
data: "string value",
|
||||
})
|
||||
|
||||
const secondPreview = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "Test Query",
|
||||
queryVerb: "read",
|
||||
fields: {
|
||||
sql: `SELECT * FROM ${tableName}`,
|
||||
},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
schema: firstPreview.schema,
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(secondPreview.schema).toEqual(
|
||||
expect.objectContaining({
|
||||
data: { type: "string", name: "data" },
|
||||
})
|
||||
)
|
||||
})
|
||||
it("should work with static variables", async () => {
|
||||
await config.api.datasource.update({
|
||||
...datasource,
|
||||
|
|
|
@ -137,6 +137,67 @@ describe("/queries", () => {
|
|||
})
|
||||
})
|
||||
|
||||
it("should update schema when structure changes from object to array", async () => {
|
||||
const name = generator.guid()
|
||||
|
||||
await withCollection(async collection => {
|
||||
await collection.insertOne({ name, field: { subfield: "value" } })
|
||||
})
|
||||
|
||||
const firstPreview = await config.api.query.preview({
|
||||
name: "Test Query",
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
json: { name: { $eq: name } },
|
||||
extra: {
|
||||
collection,
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(firstPreview.schema).toEqual(
|
||||
expect.objectContaining({
|
||||
field: { type: "json", name: "field" },
|
||||
})
|
||||
)
|
||||
|
||||
await withCollection(async collection => {
|
||||
await collection.updateOne(
|
||||
{ name },
|
||||
{ $set: { field: ["value1", "value2"] } }
|
||||
)
|
||||
})
|
||||
|
||||
const secondPreview = await config.api.query.preview({
|
||||
name: "Test Query",
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
json: { name: { $eq: name } },
|
||||
extra: {
|
||||
collection,
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
schema: firstPreview.schema,
|
||||
queryVerb: "read",
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(secondPreview.schema).toEqual(
|
||||
expect.objectContaining({
|
||||
field: { type: "array", name: "field" },
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it("should generate a nested schema based on all of the nested items", async () => {
|
||||
const name = generator.guid()
|
||||
const item = {
|
||||
|
|
|
@ -92,6 +92,61 @@ describe("rest", () => {
|
|||
expect(cached.rows[0].name).toEqual("one")
|
||||
})
|
||||
|
||||
it("should update schema when structure changes from JSON to array", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {},
|
||||
})
|
||||
|
||||
nock("http://www.example.com")
|
||||
.get("/")
|
||||
.reply(200, [{ obj: {}, id: "1" }])
|
||||
|
||||
const firstResponse = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "test query",
|
||||
parameters: [],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
},
|
||||
})
|
||||
|
||||
expect(firstResponse.schema).toEqual({
|
||||
obj: { type: "json", name: "obj" },
|
||||
id: { type: "string", name: "id" },
|
||||
})
|
||||
|
||||
nock.cleanAll()
|
||||
|
||||
nock("http://www.example.com")
|
||||
.get("/")
|
||||
.reply(200, [{ obj: [], id: "1" }])
|
||||
|
||||
const secondResponse = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "test query",
|
||||
parameters: [],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: firstResponse.schema,
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
},
|
||||
})
|
||||
|
||||
expect(secondResponse.schema).toEqual({
|
||||
obj: { type: "array", name: "obj" },
|
||||
id: { type: "string", name: "id" },
|
||||
})
|
||||
})
|
||||
|
||||
it("should parse global and query level header mappings", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
|
|
|
@ -276,34 +276,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("shouldn't allow duplicate column names", async () => {
|
||||
const saveTableRequest: SaveTableRequest = {
|
||||
...basicTable(),
|
||||
}
|
||||
saveTableRequest.schema["Type"] = {
|
||||
type: FieldType.STRING,
|
||||
name: "Type",
|
||||
}
|
||||
await config.api.table.save(saveTableRequest, {
|
||||
status: 400,
|
||||
body: {
|
||||
message:
|
||||
'Column(s) "type" are duplicated - check for other columns with these name (case in-sensitive)',
|
||||
},
|
||||
})
|
||||
saveTableRequest.schema.foo = { type: FieldType.STRING, name: "foo" }
|
||||
saveTableRequest.schema.FOO = { type: FieldType.STRING, name: "FOO" }
|
||||
|
||||
await config.api.table.save(saveTableRequest, {
|
||||
status: 400,
|
||||
body: {
|
||||
message:
|
||||
'Column(s) "type, foo" are duplicated - check for other columns with these name (case in-sensitive)',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should add a new column for an internal DB table", async () => {
|
||||
const saveTableRequest: SaveTableRequest = {
|
||||
...basicTable(),
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
import * as automationUtils from "./automationUtils"
|
||||
|
||||
type ObjValue = {
|
||||
[key: string]: string | ObjValue
|
||||
}
|
||||
|
||||
export function replaceFakeBindings(
|
||||
originalStepInput: Record<string, any>,
|
||||
loopStepNumber: number
|
||||
) {
|
||||
for (const [key, value] of Object.entries(originalStepInput)) {
|
||||
originalStepInput[key] = replaceBindingsRecursive(value, loopStepNumber)
|
||||
}
|
||||
return originalStepInput
|
||||
}
|
||||
|
||||
function replaceBindingsRecursive(
|
||||
value: string | ObjValue,
|
||||
loopStepNumber: number
|
||||
) {
|
||||
if (typeof value === "object") {
|
||||
for (const [innerKey, innerValue] of Object.entries(value)) {
|
||||
if (typeof innerValue === "string") {
|
||||
value[innerKey] = automationUtils.substituteLoopStep(
|
||||
innerValue,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
} else if (typeof innerValue === "object") {
|
||||
value[innerKey] = replaceBindingsRecursive(innerValue, loopStepNumber)
|
||||
}
|
||||
}
|
||||
} else if (typeof value === "string") {
|
||||
value = automationUtils.substituteLoopStep(value, `steps.${loopStepNumber}`)
|
||||
}
|
||||
return value
|
||||
}
|
|
@ -73,7 +73,12 @@ export async function run({ inputs }: AutomationStepInput) {
|
|||
try {
|
||||
let { field, condition, value } = inputs
|
||||
// coerce types so that we can use them
|
||||
if (!isNaN(value) && !isNaN(field)) {
|
||||
if (
|
||||
!isNaN(value) &&
|
||||
!isNaN(field) &&
|
||||
typeof field !== "boolean" &&
|
||||
typeof value !== "boolean"
|
||||
) {
|
||||
value = parseFloat(value)
|
||||
field = parseFloat(field)
|
||||
} else if (!isNaN(Date.parse(value)) && !isNaN(Date.parse(field))) {
|
||||
|
|
|
@ -17,7 +17,6 @@ import { cloneDeep } from "lodash/fp"
|
|||
import isEqual from "lodash/isEqual"
|
||||
import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { findDuplicateInternalColumns } from "@budibase/shared-core"
|
||||
import { getTable } from "../getters"
|
||||
import { checkAutoColumns } from "./utils"
|
||||
import * as viewsSdk from "../../views"
|
||||
|
@ -45,17 +44,6 @@ export async function save(
|
|||
if (hasTypeChanged(table, oldTable)) {
|
||||
throw new Error("A column type has changed.")
|
||||
}
|
||||
|
||||
// check for case sensitivity - we don't want to allow duplicated columns
|
||||
const duplicateColumn = findDuplicateInternalColumns(table)
|
||||
if (duplicateColumn.length) {
|
||||
throw new Error(
|
||||
`Column(s) "${duplicateColumn.join(
|
||||
", "
|
||||
)}" are duplicated - check for other columns with these name (case in-sensitive)`
|
||||
)
|
||||
}
|
||||
|
||||
// check that subtypes have been maintained
|
||||
table = checkAutoColumns(table, oldTable)
|
||||
|
||||
|
|
|
@ -7,6 +7,8 @@ import {
|
|||
} from "../automations/utils"
|
||||
import * as actions from "../automations/actions"
|
||||
import * as automationUtils from "../automations/automationUtils"
|
||||
import { replaceFakeBindings } from "../automations/loopUtils"
|
||||
|
||||
import { default as AutomationEmitter } from "../events/AutomationEmitter"
|
||||
import { generateAutomationMetadataID, isProdAppID } from "../db/utils"
|
||||
import { definitions as triggerDefs } from "../automations/triggerInfo"
|
||||
|
@ -214,15 +216,15 @@ class Orchestrator {
|
|||
}
|
||||
|
||||
updateContextAndOutput(
|
||||
loopStepNumber: number | undefined,
|
||||
currentLoopStepIndex: number | undefined,
|
||||
step: AutomationStep,
|
||||
output: any,
|
||||
result: { success: boolean; status: string }
|
||||
) {
|
||||
if (!loopStepNumber) {
|
||||
if (!currentLoopStepIndex) {
|
||||
throw new Error("No loop step number provided.")
|
||||
}
|
||||
this.executionOutput.steps.splice(loopStepNumber, 0, {
|
||||
this.executionOutput.steps.splice(currentLoopStepIndex, 0, {
|
||||
id: step.id,
|
||||
stepId: step.stepId,
|
||||
outputs: {
|
||||
|
@ -232,7 +234,7 @@ class Orchestrator {
|
|||
},
|
||||
inputs: step.inputs,
|
||||
})
|
||||
this._context.steps.splice(loopStepNumber, 0, {
|
||||
this._context.steps.splice(currentLoopStepIndex, 0, {
|
||||
...output,
|
||||
success: result.success,
|
||||
status: result.status,
|
||||
|
@ -256,7 +258,7 @@ class Orchestrator {
|
|||
let loopStep: LoopStep | undefined = undefined
|
||||
|
||||
let stepCount = 0
|
||||
let loopStepNumber: any = undefined
|
||||
let currentLoopStepIndex: number = 0
|
||||
let loopSteps: LoopStep[] | undefined = []
|
||||
let metadata
|
||||
let timeoutFlag = false
|
||||
|
@ -290,7 +292,7 @@ class Orchestrator {
|
|||
},
|
||||
})
|
||||
|
||||
let input: any,
|
||||
let input: LoopInput | undefined,
|
||||
iterations = 1,
|
||||
iterationCount = 0
|
||||
|
||||
|
@ -309,19 +311,19 @@ class Orchestrator {
|
|||
stepCount++
|
||||
if (step.stepId === LOOP_STEP_ID) {
|
||||
loopStep = step as LoopStep
|
||||
loopStepNumber = stepCount
|
||||
currentLoopStepIndex = stepCount
|
||||
continue
|
||||
}
|
||||
|
||||
if (loopStep) {
|
||||
input = await processObject(loopStep.inputs, this._context)
|
||||
iterations = getLoopIterations(loopStep as LoopStep)
|
||||
iterations = getLoopIterations(loopStep)
|
||||
stepSpan?.addTags({ step: { iterations } })
|
||||
}
|
||||
for (let index = 0; index < iterations; index++) {
|
||||
|
||||
for (let stepIndex = 0; stepIndex < iterations; stepIndex++) {
|
||||
let originalStepInput = cloneDeep(step.inputs)
|
||||
// Handle if the user has set a max iteration count or if it reaches the max limit set by us
|
||||
if (loopStep && input.binding) {
|
||||
if (loopStep && input?.binding) {
|
||||
let tempOutput = {
|
||||
items: loopSteps,
|
||||
iterations: iterationCount,
|
||||
|
@ -332,7 +334,7 @@ class Orchestrator {
|
|||
)
|
||||
} catch (err) {
|
||||
this.updateContextAndOutput(
|
||||
loopStepNumber,
|
||||
currentLoopStepIndex,
|
||||
step,
|
||||
tempOutput,
|
||||
{
|
||||
|
@ -353,55 +355,22 @@ class Orchestrator {
|
|||
} else if (Array.isArray(loopStep.inputs.binding)) {
|
||||
item = loopStep.inputs.binding
|
||||
}
|
||||
this._context.steps[loopStepNumber] = {
|
||||
currentItem: item[index],
|
||||
this._context.steps[currentLoopStepIndex] = {
|
||||
currentItem: item[stepIndex],
|
||||
}
|
||||
|
||||
// The "Loop" binding in the front end is "fake", so replace it here so the context can understand it
|
||||
// Pretty hacky because we need to account for the row object
|
||||
for (let [key, value] of Object.entries(originalStepInput)) {
|
||||
if (typeof value === "object") {
|
||||
for (let [innerKey, innerValue] of Object.entries(
|
||||
originalStepInput[key]
|
||||
)) {
|
||||
if (typeof innerValue === "string") {
|
||||
originalStepInput[key][innerKey] =
|
||||
automationUtils.substituteLoopStep(
|
||||
innerValue,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
} else if (typeof value === "object") {
|
||||
for (let [innerObject, innerValue] of Object.entries(
|
||||
originalStepInput[key][innerKey]
|
||||
)) {
|
||||
if (typeof innerValue === "string") {
|
||||
originalStepInput[key][innerKey][innerObject] =
|
||||
automationUtils.substituteLoopStep(
|
||||
innerValue,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (typeof value === "string") {
|
||||
originalStepInput[key] =
|
||||
automationUtils.substituteLoopStep(
|
||||
value,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
originalStepInput = replaceFakeBindings(
|
||||
originalStepInput,
|
||||
currentLoopStepIndex
|
||||
)
|
||||
|
||||
if (
|
||||
index === env.AUTOMATION_MAX_ITERATIONS ||
|
||||
stepIndex === env.AUTOMATION_MAX_ITERATIONS ||
|
||||
(loopStep.inputs.iterations &&
|
||||
index === parseInt(loopStep.inputs.iterations))
|
||||
stepIndex === parseInt(loopStep.inputs.iterations))
|
||||
) {
|
||||
this.updateContextAndOutput(
|
||||
loopStepNumber,
|
||||
currentLoopStepIndex,
|
||||
step,
|
||||
tempOutput,
|
||||
{
|
||||
|
@ -416,7 +385,7 @@ class Orchestrator {
|
|||
|
||||
let isFailure = false
|
||||
const currentItem =
|
||||
this._context.steps[loopStepNumber]?.currentItem
|
||||
this._context.steps[currentLoopStepIndex]?.currentItem
|
||||
if (currentItem && typeof currentItem === "object") {
|
||||
isFailure = Object.keys(currentItem).some(value => {
|
||||
return currentItem[value] === loopStep?.inputs.failure
|
||||
|
@ -428,7 +397,7 @@ class Orchestrator {
|
|||
|
||||
if (isFailure) {
|
||||
this.updateContextAndOutput(
|
||||
loopStepNumber,
|
||||
currentLoopStepIndex,
|
||||
step,
|
||||
tempOutput,
|
||||
{
|
||||
|
@ -453,7 +422,6 @@ class Orchestrator {
|
|||
continue
|
||||
}
|
||||
|
||||
// If it's a loop step, we need to manually add the bindings to the context
|
||||
let stepFn = await this.getStepFunctionality(step.stepId)
|
||||
let inputs = await processObject(originalStepInput, this._context)
|
||||
inputs = automationUtils.cleanInputValues(
|
||||
|
@ -502,9 +470,9 @@ class Orchestrator {
|
|||
|
||||
if (loopStep) {
|
||||
iterationCount++
|
||||
if (index === iterations - 1) {
|
||||
if (stepIndex === iterations - 1) {
|
||||
loopStep = undefined
|
||||
this._context.steps.splice(loopStepNumber, 1)
|
||||
this._context.steps.splice(currentLoopStepIndex, 1)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
@ -515,7 +483,7 @@ class Orchestrator {
|
|||
|
||||
if (loopStep && iterations === 0) {
|
||||
loopStep = undefined
|
||||
this.executionOutput.steps.splice(loopStepNumber + 1, 0, {
|
||||
this.executionOutput.steps.splice(currentLoopStepIndex + 1, 0, {
|
||||
id: step.id,
|
||||
stepId: step.stepId,
|
||||
outputs: {
|
||||
|
@ -525,14 +493,14 @@ class Orchestrator {
|
|||
inputs: {},
|
||||
})
|
||||
|
||||
this._context.steps.splice(loopStepNumber, 1)
|
||||
this._context.steps.splice(currentLoopStepIndex, 1)
|
||||
iterations = 1
|
||||
}
|
||||
|
||||
// Delete the step after the loop step as it's irrelevant, since information is included
|
||||
// in the loop step
|
||||
if (wasLoopStep && !loopStep) {
|
||||
this._context.steps.splice(loopStepNumber + 1, 1)
|
||||
this._context.steps.splice(currentLoopStepIndex + 1, 1)
|
||||
wasLoopStep = false
|
||||
}
|
||||
if (loopSteps && loopSteps.length) {
|
||||
|
@ -541,13 +509,13 @@ class Orchestrator {
|
|||
items: loopSteps,
|
||||
iterations: iterationCount,
|
||||
}
|
||||
this.executionOutput.steps.splice(loopStepNumber + 1, 0, {
|
||||
this.executionOutput.steps.splice(currentLoopStepIndex + 1, 0, {
|
||||
id: step.id,
|
||||
stepId: step.stepId,
|
||||
outputs: tempOutput,
|
||||
inputs: step.inputs,
|
||||
})
|
||||
this._context.steps[loopStepNumber] = tempOutput
|
||||
this._context.steps[currentLoopStepIndex] = tempOutput
|
||||
|
||||
wasLoopStep = true
|
||||
loopSteps = []
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
export * from "./api"
|
||||
export * from "./fields"
|
||||
export * from "./rows"
|
||||
|
||||
export const OperatorOptions = {
|
||||
Equals: {
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
export const CONSTANT_INTERNAL_ROW_COLS = [
|
||||
"_id",
|
||||
"_rev",
|
||||
"type",
|
||||
"createdAt",
|
||||
"updatedAt",
|
||||
"tableId",
|
||||
] as const
|
||||
|
||||
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
|
||||
|
||||
export function isInternalColumnName(name: string): boolean {
|
||||
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
import { FieldType, Table } from "@budibase/types"
|
||||
import { CONSTANT_INTERNAL_ROW_COLS } from "./constants"
|
||||
import { FieldType } from "@budibase/types"
|
||||
|
||||
const allowDisplayColumnByType: Record<FieldType, boolean> = {
|
||||
[FieldType.STRING]: true,
|
||||
|
@ -52,22 +51,3 @@ export function canBeDisplayColumn(type: FieldType): boolean {
|
|||
export function canBeSortColumn(type: FieldType): boolean {
|
||||
return !!allowSortColumnByType[type]
|
||||
}
|
||||
|
||||
export function findDuplicateInternalColumns(table: Table): string[] {
|
||||
// get the column names
|
||||
const columnNames = Object.keys(table.schema)
|
||||
.concat(CONSTANT_INTERNAL_ROW_COLS)
|
||||
.map(colName => colName.toLowerCase())
|
||||
// there are duplicates
|
||||
const set = new Set(columnNames)
|
||||
let duplicates: string[] = []
|
||||
if (set.size !== columnNames.length) {
|
||||
for (let key of set.keys()) {
|
||||
const count = columnNames.filter(name => name === key).length
|
||||
if (count > 1) {
|
||||
duplicates.push(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
return duplicates
|
||||
}
|
||||
|
|
|
@ -42,10 +42,7 @@ export interface Account extends CreateAccount {
|
|||
verified: boolean
|
||||
verificationSent: boolean
|
||||
// licensing
|
||||
tier: string // deprecated
|
||||
planType?: PlanType
|
||||
/** @deprecated */
|
||||
planTier?: number
|
||||
license?: License
|
||||
installId?: string
|
||||
installTenantId?: string
|
||||
|
|
|
@ -144,7 +144,7 @@ interface BaseIOStructure {
|
|||
required?: string[]
|
||||
}
|
||||
|
||||
interface InputOutputBlock {
|
||||
export interface InputOutputBlock {
|
||||
properties: {
|
||||
[key: string]: BaseIOStructure
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue