Merge branch 'master' of github.com:Budibase/budibase into table-improvements-2
This commit is contained in:
commit
c5a2f9ce7d
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.29.1",
|
"version": "2.29.3",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*",
|
"packages/*",
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit b600cca314a5cc9971e44d46047d1a0019b46b08
|
Subproject commit ff16525b73c5751d344f5c161a682609c0a993f2
|
|
@ -1,5 +1,14 @@
|
||||||
export {
|
export const CONSTANT_INTERNAL_ROW_COLS = [
|
||||||
CONSTANT_INTERNAL_ROW_COLS,
|
"_id",
|
||||||
CONSTANT_EXTERNAL_ROW_COLS,
|
"_rev",
|
||||||
isInternalColumnName,
|
"type",
|
||||||
} from "@budibase/shared-core"
|
"createdAt",
|
||||||
|
"updatedAt",
|
||||||
|
"tableId",
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
|
||||||
|
|
||||||
|
export function isInternalColumnName(name: string): boolean {
|
||||||
|
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
|
||||||
|
}
|
||||||
|
|
|
@ -449,8 +449,12 @@ class InternalBuilder {
|
||||||
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
|
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// always add sorting by the primary key - make sure result is deterministic
|
|
||||||
|
// add sorting by the primary key if the result isn't already sorted by it,
|
||||||
|
// to make sure result is deterministic
|
||||||
|
if (!sort || sort[primaryKey[0]] === undefined) {
|
||||||
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
|
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
|
||||||
|
}
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -604,7 +608,8 @@ class InternalBuilder {
|
||||||
if (!primary) {
|
if (!primary) {
|
||||||
throw new Error("Primary key is required for upsert")
|
throw new Error("Primary key is required for upsert")
|
||||||
}
|
}
|
||||||
return query.insert(parsedBody).onConflict(primary).merge()
|
const ret = query.insert(parsedBody).onConflict(primary).merge()
|
||||||
|
return ret
|
||||||
} else if (this.client === SqlClient.MS_SQL) {
|
} else if (this.client === SqlClient.MS_SQL) {
|
||||||
// No upsert or onConflict support in MSSQL yet, see:
|
// No upsert or onConflict support in MSSQL yet, see:
|
||||||
// https://github.com/knex/knex/pull/6050
|
// https://github.com/knex/knex/pull/6050
|
||||||
|
|
|
@ -24,7 +24,6 @@ export const account = (partial: Partial<Account> = {}): Account => {
|
||||||
createdAt: Date.now(),
|
createdAt: Date.now(),
|
||||||
verified: true,
|
verified: true,
|
||||||
verificationSent: true,
|
verificationSent: true,
|
||||||
tier: "FREE", // DEPRECATED
|
|
||||||
authType: AuthType.PASSWORD,
|
authType: AuthType.PASSWORD,
|
||||||
name: generator.name(),
|
name: generator.name(),
|
||||||
size: "10+",
|
size: "10+",
|
||||||
|
|
|
@ -17,8 +17,6 @@
|
||||||
SWITCHABLE_TYPES,
|
SWITCHABLE_TYPES,
|
||||||
ValidColumnNameRegex,
|
ValidColumnNameRegex,
|
||||||
helpers,
|
helpers,
|
||||||
CONSTANT_INTERNAL_ROW_COLS,
|
|
||||||
CONSTANT_EXTERNAL_ROW_COLS,
|
|
||||||
} from "@budibase/shared-core"
|
} from "@budibase/shared-core"
|
||||||
import { createEventDispatcher, getContext, onMount } from "svelte"
|
import { createEventDispatcher, getContext, onMount } from "svelte"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
|
@ -54,6 +52,7 @@
|
||||||
const DATE_TYPE = FieldType.DATETIME
|
const DATE_TYPE = FieldType.DATETIME
|
||||||
|
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
|
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
|
||||||
const { dispatch: gridDispatch, rows } = getContext("grid")
|
const { dispatch: gridDispatch, rows } = getContext("grid")
|
||||||
|
|
||||||
export let field
|
export let field
|
||||||
|
@ -488,27 +487,20 @@
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
const newError = {}
|
const newError = {}
|
||||||
const prohibited = externalTable
|
|
||||||
? CONSTANT_EXTERNAL_ROW_COLS
|
|
||||||
: CONSTANT_INTERNAL_ROW_COLS
|
|
||||||
if (!externalTable && fieldInfo.name?.startsWith("_")) {
|
if (!externalTable && fieldInfo.name?.startsWith("_")) {
|
||||||
newError.name = `Column name cannot start with an underscore.`
|
newError.name = `Column name cannot start with an underscore.`
|
||||||
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {
|
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {
|
||||||
newError.name = `Illegal character; must be alpha-numeric.`
|
newError.name = `Illegal character; must be alpha-numeric.`
|
||||||
} else if (
|
} else if (PROHIBITED_COLUMN_NAMES.some(name => fieldInfo.name === name)) {
|
||||||
prohibited.some(
|
newError.name = `${PROHIBITED_COLUMN_NAMES.join(
|
||||||
name => fieldInfo?.name?.toLowerCase() === name.toLowerCase()
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
newError.name = `${prohibited.join(
|
|
||||||
", "
|
", "
|
||||||
)} are not allowed as column names - case insensitive.`
|
)} are not allowed as column names`
|
||||||
} else if (inUse($tables.selected, fieldInfo.name, originalName)) {
|
} else if (inUse($tables.selected, fieldInfo.name, originalName)) {
|
||||||
newError.name = `Column name already in use.`
|
newError.name = `Column name already in use.`
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fieldInfo.type === FieldType.AUTO && !fieldInfo.subtype) {
|
if (fieldInfo.type === FieldType.AUTO && !fieldInfo.subtype) {
|
||||||
newError.subtype = `Auto Column requires a type.`
|
newError.subtype = `Auto Column requires a type`
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fieldInfo.fieldName && fieldInfo.tableId) {
|
if (fieldInfo.fieldName && fieldInfo.tableId) {
|
||||||
|
|
|
@ -1,9 +1,14 @@
|
||||||
<script>
|
<script>
|
||||||
import { FieldType, BBReferenceFieldSubType } from "@budibase/types"
|
import {
|
||||||
|
FieldType,
|
||||||
|
BBReferenceFieldSubType,
|
||||||
|
SourceName,
|
||||||
|
} from "@budibase/types"
|
||||||
import { Select, Toggle, Multiselect } from "@budibase/bbui"
|
import { Select, Toggle, Multiselect } from "@budibase/bbui"
|
||||||
import { DB_TYPE_INTERNAL } from "constants/backend"
|
import { DB_TYPE_INTERNAL } from "constants/backend"
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
import { parseFile } from "./utils"
|
import { parseFile } from "./utils"
|
||||||
|
import { tables, datasources } from "stores/builder"
|
||||||
|
|
||||||
let error = null
|
let error = null
|
||||||
let fileName = null
|
let fileName = null
|
||||||
|
@ -80,6 +85,9 @@
|
||||||
schema = fetchSchema(tableId)
|
schema = fetchSchema(tableId)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$: table = $tables.list.find(table => table._id === tableId)
|
||||||
|
$: datasource = $datasources.list.find(ds => ds._id === table?.sourceId)
|
||||||
|
|
||||||
async function fetchSchema(tableId) {
|
async function fetchSchema(tableId) {
|
||||||
try {
|
try {
|
||||||
const definition = await API.fetchTableDefinition(tableId)
|
const definition = await API.fetchTableDefinition(tableId)
|
||||||
|
@ -185,20 +193,25 @@
|
||||||
</div>
|
</div>
|
||||||
{/each}
|
{/each}
|
||||||
</div>
|
</div>
|
||||||
{#if tableType === DB_TYPE_INTERNAL}
|
|
||||||
<br />
|
<br />
|
||||||
|
<!-- SQL Server doesn't yet support overwriting rows by existing keys -->
|
||||||
|
{#if datasource?.source !== SourceName.SQL_SERVER}
|
||||||
<Toggle
|
<Toggle
|
||||||
bind:value={updateExistingRows}
|
bind:value={updateExistingRows}
|
||||||
on:change={() => (identifierFields = [])}
|
on:change={() => (identifierFields = [])}
|
||||||
thin
|
thin
|
||||||
text="Update existing rows"
|
text="Update existing rows"
|
||||||
/>
|
/>
|
||||||
|
{/if}
|
||||||
{#if updateExistingRows}
|
{#if updateExistingRows}
|
||||||
|
{#if tableType === DB_TYPE_INTERNAL}
|
||||||
<Multiselect
|
<Multiselect
|
||||||
label="Identifier field(s)"
|
label="Identifier field(s)"
|
||||||
options={Object.keys(validation)}
|
options={Object.keys(validation)}
|
||||||
bind:value={identifierFields}
|
bind:value={identifierFields}
|
||||||
/>
|
/>
|
||||||
|
{:else}
|
||||||
|
<p>Rows will be updated based on the table's primary key.</p>
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
{#if invalidColumns.length > 0}
|
{#if invalidColumns.length > 0}
|
||||||
|
|
|
@ -233,9 +233,9 @@
|
||||||
response.info = response.info || { code: 200 }
|
response.info = response.info || { code: 200 }
|
||||||
// if existing schema, copy over what it is
|
// if existing schema, copy over what it is
|
||||||
if (schema) {
|
if (schema) {
|
||||||
for (let [name, field] of Object.entries(schema)) {
|
for (let [name, field] of Object.entries(response.schema)) {
|
||||||
if (response.schema[name]) {
|
if (!schema[name]) {
|
||||||
response.schema[name] = field
|
schema[name] = field
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit bf30f47a28292d619cf0837f21d66790ff31c3a6
|
Subproject commit 6c8d0174ca58c578a37022965ddb923fdbf8e32a
|
|
@ -1,4 +1,4 @@
|
||||||
FROM mcr.microsoft.com/mssql/server:2017-latest
|
FROM mcr.microsoft.com/mssql/server:2022-latest
|
||||||
|
|
||||||
ENV ACCEPT_EULA=Y
|
ENV ACCEPT_EULA=Y
|
||||||
ENV SA_PASSWORD=Passw0rd
|
ENV SA_PASSWORD=Passw0rd
|
||||||
|
|
|
@ -311,8 +311,8 @@ export async function preview(
|
||||||
|
|
||||||
// if existing schema, update to include any previous schema keys
|
// if existing schema, update to include any previous schema keys
|
||||||
if (existingSchema) {
|
if (existingSchema) {
|
||||||
for (let key of Object.keys(previewSchema)) {
|
for (let key of Object.keys(existingSchema)) {
|
||||||
if (existingSchema[key]) {
|
if (!previewSchema[key]) {
|
||||||
previewSchema[key] = existingSchema[key]
|
previewSchema[key] = existingSchema[key]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -316,7 +316,13 @@ export class ExternalRequest<T extends Operation> {
|
||||||
manyRelationships: ManyRelationship[] = []
|
manyRelationships: ManyRelationship[] = []
|
||||||
for (let [key, field] of Object.entries(table.schema)) {
|
for (let [key, field] of Object.entries(table.schema)) {
|
||||||
// if set already, or not set just skip it
|
// if set already, or not set just skip it
|
||||||
if (row[key] === undefined || newRow[key] || !isEditableColumn(field)) {
|
if (row[key] === undefined || newRow[key]) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
!(this.operation === Operation.BULK_UPSERT) &&
|
||||||
|
!isEditableColumn(field)
|
||||||
|
) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// parse floats/numbers
|
// parse floats/numbers
|
||||||
|
@ -690,6 +696,7 @@ export class ExternalRequest<T extends Operation> {
|
||||||
},
|
},
|
||||||
meta: {
|
meta: {
|
||||||
table,
|
table,
|
||||||
|
id: config.id,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ import {
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import { builderSocket } from "../../../websockets"
|
import { builderSocket } from "../../../websockets"
|
||||||
import { inputProcessing } from "../../../utilities/rowProcessor"
|
import { inputProcessing } from "../../../utilities/rowProcessor"
|
||||||
|
import { isEqual } from "lodash"
|
||||||
|
|
||||||
function getDatasourceId(table: Table) {
|
function getDatasourceId(table: Table) {
|
||||||
if (!table) {
|
if (!table) {
|
||||||
|
@ -85,15 +86,30 @@ export async function bulkImport(
|
||||||
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
|
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
|
||||||
) {
|
) {
|
||||||
let table = await sdk.tables.getTable(ctx.params.tableId)
|
let table = await sdk.tables.getTable(ctx.params.tableId)
|
||||||
const { rows } = ctx.request.body
|
const { rows, identifierFields } = ctx.request.body
|
||||||
const schema = table.schema
|
const schema = table.schema
|
||||||
|
|
||||||
|
if (
|
||||||
|
identifierFields &&
|
||||||
|
identifierFields.length > 0 &&
|
||||||
|
!isEqual(identifierFields, table.primary)
|
||||||
|
) {
|
||||||
|
// This is becuse we make use of the ON CONFLICT functionality in SQL
|
||||||
|
// databases, which only triggers when there's a conflict against a unique
|
||||||
|
// index. The only unique index we can count on atm in Budibase is the
|
||||||
|
// primary key, so this functionality always uses the primary key.
|
||||||
|
ctx.throw(
|
||||||
|
400,
|
||||||
|
"Identifier fields are not supported for bulk import into an external datasource."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
||||||
ctx.throw(400, "Provided data import information is invalid.")
|
ctx.throw(400, "Provided data import information is invalid.")
|
||||||
}
|
}
|
||||||
|
|
||||||
const parsedRows = []
|
const parsedRows = []
|
||||||
for (const row of parse(rows, schema)) {
|
for (const row of parse(rows, table)) {
|
||||||
const processed = await inputProcessing(ctx.user?._id, table, row, {
|
const processed = await inputProcessing(ctx.user?._id, table, row, {
|
||||||
noAutoRelationships: true,
|
noAutoRelationships: true,
|
||||||
})
|
})
|
||||||
|
|
|
@ -178,7 +178,7 @@ export async function handleDataImport(
|
||||||
}
|
}
|
||||||
|
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const data = parse(importRows, schema)
|
const data = parse(importRows, table)
|
||||||
|
|
||||||
let finalData: any = await importToRows(data, table, user)
|
let finalData: any = await importToRows(data, table, user)
|
||||||
|
|
||||||
|
|
|
@ -250,6 +250,67 @@ describe.each(
|
||||||
expect(events.query.previewed).toHaveBeenCalledTimes(1)
|
expect(events.query.previewed).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should update schema when column type changes from number to string", async () => {
|
||||||
|
const tableName = "schema_change_test"
|
||||||
|
await client.schema.dropTableIfExists(tableName)
|
||||||
|
|
||||||
|
await client.schema.createTable(tableName, table => {
|
||||||
|
table.increments("id").primary()
|
||||||
|
table.string("name")
|
||||||
|
table.integer("data")
|
||||||
|
})
|
||||||
|
|
||||||
|
await client(tableName).insert({
|
||||||
|
name: "test",
|
||||||
|
data: 123,
|
||||||
|
})
|
||||||
|
|
||||||
|
const firstPreview = await config.api.query.preview({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
name: "Test Query",
|
||||||
|
queryVerb: "read",
|
||||||
|
fields: {
|
||||||
|
sql: `SELECT * FROM ${tableName}`,
|
||||||
|
},
|
||||||
|
parameters: [],
|
||||||
|
transformer: "return data",
|
||||||
|
schema: {},
|
||||||
|
readable: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(firstPreview.schema).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: { type: "number", name: "data" },
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
await client.schema.alterTable(tableName, table => {
|
||||||
|
table.string("data").alter()
|
||||||
|
})
|
||||||
|
|
||||||
|
await client(tableName).update({
|
||||||
|
data: "string value",
|
||||||
|
})
|
||||||
|
|
||||||
|
const secondPreview = await config.api.query.preview({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
name: "Test Query",
|
||||||
|
queryVerb: "read",
|
||||||
|
fields: {
|
||||||
|
sql: `SELECT * FROM ${tableName}`,
|
||||||
|
},
|
||||||
|
parameters: [],
|
||||||
|
transformer: "return data",
|
||||||
|
schema: firstPreview.schema,
|
||||||
|
readable: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(secondPreview.schema).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: { type: "string", name: "data" },
|
||||||
|
})
|
||||||
|
)
|
||||||
|
})
|
||||||
it("should work with static variables", async () => {
|
it("should work with static variables", async () => {
|
||||||
await config.api.datasource.update({
|
await config.api.datasource.update({
|
||||||
...datasource,
|
...datasource,
|
||||||
|
|
|
@ -137,6 +137,67 @@ describe("/queries", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should update schema when structure changes from object to array", async () => {
|
||||||
|
const name = generator.guid()
|
||||||
|
|
||||||
|
await withCollection(async collection => {
|
||||||
|
await collection.insertOne({ name, field: { subfield: "value" } })
|
||||||
|
})
|
||||||
|
|
||||||
|
const firstPreview = await config.api.query.preview({
|
||||||
|
name: "Test Query",
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
fields: {
|
||||||
|
json: { name: { $eq: name } },
|
||||||
|
extra: {
|
||||||
|
collection,
|
||||||
|
actionType: "findOne",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
schema: {},
|
||||||
|
queryVerb: "read",
|
||||||
|
parameters: [],
|
||||||
|
transformer: "return data",
|
||||||
|
readable: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(firstPreview.schema).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
field: { type: "json", name: "field" },
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
await withCollection(async collection => {
|
||||||
|
await collection.updateOne(
|
||||||
|
{ name },
|
||||||
|
{ $set: { field: ["value1", "value2"] } }
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
const secondPreview = await config.api.query.preview({
|
||||||
|
name: "Test Query",
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
fields: {
|
||||||
|
json: { name: { $eq: name } },
|
||||||
|
extra: {
|
||||||
|
collection,
|
||||||
|
actionType: "findOne",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
schema: firstPreview.schema,
|
||||||
|
queryVerb: "read",
|
||||||
|
parameters: [],
|
||||||
|
transformer: "return data",
|
||||||
|
readable: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(secondPreview.schema).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
field: { type: "array", name: "field" },
|
||||||
|
})
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
it("should generate a nested schema based on all of the nested items", async () => {
|
it("should generate a nested schema based on all of the nested items", async () => {
|
||||||
const name = generator.guid()
|
const name = generator.guid()
|
||||||
const item = {
|
const item = {
|
||||||
|
|
|
@ -92,6 +92,61 @@ describe("rest", () => {
|
||||||
expect(cached.rows[0].name).toEqual("one")
|
expect(cached.rows[0].name).toEqual("one")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should update schema when structure changes from JSON to array", async () => {
|
||||||
|
const datasource = await config.api.datasource.create({
|
||||||
|
name: generator.guid(),
|
||||||
|
type: "test",
|
||||||
|
source: SourceName.REST,
|
||||||
|
config: {},
|
||||||
|
})
|
||||||
|
|
||||||
|
nock("http://www.example.com")
|
||||||
|
.get("/")
|
||||||
|
.reply(200, [{ obj: {}, id: "1" }])
|
||||||
|
|
||||||
|
const firstResponse = await config.api.query.preview({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
name: "test query",
|
||||||
|
parameters: [],
|
||||||
|
queryVerb: "read",
|
||||||
|
transformer: "",
|
||||||
|
schema: {},
|
||||||
|
readable: true,
|
||||||
|
fields: {
|
||||||
|
path: "www.example.com",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(firstResponse.schema).toEqual({
|
||||||
|
obj: { type: "json", name: "obj" },
|
||||||
|
id: { type: "string", name: "id" },
|
||||||
|
})
|
||||||
|
|
||||||
|
nock.cleanAll()
|
||||||
|
|
||||||
|
nock("http://www.example.com")
|
||||||
|
.get("/")
|
||||||
|
.reply(200, [{ obj: [], id: "1" }])
|
||||||
|
|
||||||
|
const secondResponse = await config.api.query.preview({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
name: "test query",
|
||||||
|
parameters: [],
|
||||||
|
queryVerb: "read",
|
||||||
|
transformer: "",
|
||||||
|
schema: firstResponse.schema,
|
||||||
|
readable: true,
|
||||||
|
fields: {
|
||||||
|
path: "www.example.com",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(secondResponse.schema).toEqual({
|
||||||
|
obj: { type: "array", name: "obj" },
|
||||||
|
id: { type: "string", name: "id" },
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
it("should parse global and query level header mappings", async () => {
|
it("should parse global and query level header mappings", async () => {
|
||||||
const datasource = await config.api.datasource.create({
|
const datasource = await config.api.datasource.create({
|
||||||
name: generator.guid(),
|
name: generator.guid(),
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import {
|
||||||
|
DatabaseName,
|
||||||
|
getDatasource,
|
||||||
|
knexClient,
|
||||||
|
} from "../../../integrations/tests/utils"
|
||||||
|
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import emitter from "../../../../src/events"
|
import emitter from "../../../../src/events"
|
||||||
|
@ -31,6 +35,7 @@ import {
|
||||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||||
import _, { merge } from "lodash"
|
import _, { merge } from "lodash"
|
||||||
import * as uuid from "uuid"
|
import * as uuid from "uuid"
|
||||||
|
import { Knex } from "knex"
|
||||||
|
|
||||||
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
||||||
tk.freeze(timestamp)
|
tk.freeze(timestamp)
|
||||||
|
@ -70,13 +75,16 @@ describe.each([
|
||||||
|
|
||||||
let table: Table
|
let table: Table
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
|
let client: Knex | undefined
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
if (dsProvider) {
|
if (dsProvider) {
|
||||||
|
const rawDatasource = await dsProvider
|
||||||
datasource = await config.createDatasource({
|
datasource = await config.createDatasource({
|
||||||
datasource: await dsProvider,
|
datasource: rawDatasource,
|
||||||
})
|
})
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -307,13 +315,13 @@ describe.each([
|
||||||
// as quickly as possible.
|
// as quickly as possible.
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
sequence.map(async () => {
|
sequence.map(async () => {
|
||||||
const attempts = 20
|
const attempts = 30
|
||||||
for (let attempt = 0; attempt < attempts; attempt++) {
|
for (let attempt = 0; attempt < attempts; attempt++) {
|
||||||
try {
|
try {
|
||||||
await config.api.row.save(table._id!, {})
|
await config.api.row.save(table._id!, {})
|
||||||
return
|
return
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
await new Promise(r => setTimeout(r, Math.random() * 15))
|
await new Promise(r => setTimeout(r, Math.random() * 50))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new Error(`Failed to create row after ${attempts} attempts`)
|
throw new Error(`Failed to create row after ${attempts} attempts`)
|
||||||
|
@ -598,6 +606,35 @@ describe.each([
|
||||||
expect(res.name).toEqual("Updated Name")
|
expect(res.name).toEqual("Updated Name")
|
||||||
await assertRowUsage(rowUsage)
|
await assertRowUsage(rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
!isInternal &&
|
||||||
|
it("can update a row on an external table with a primary key", async () => {
|
||||||
|
const tableName = uuid.v4().substring(0, 10)
|
||||||
|
await client!.schema.createTable(tableName, table => {
|
||||||
|
table.increments("id").primary()
|
||||||
|
table.string("name")
|
||||||
|
})
|
||||||
|
|
||||||
|
const res = await config.api.datasource.fetchSchema({
|
||||||
|
datasourceId: datasource!._id!,
|
||||||
|
})
|
||||||
|
const table = res.datasource.entities![tableName]
|
||||||
|
|
||||||
|
const row = await config.api.row.save(table._id!, {
|
||||||
|
id: 1,
|
||||||
|
name: "Row 1",
|
||||||
|
})
|
||||||
|
|
||||||
|
const updatedRow = await config.api.row.save(table._id!, {
|
||||||
|
_id: row._id!,
|
||||||
|
name: "Row 1 Updated",
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(updatedRow.name).toEqual("Row 1 Updated")
|
||||||
|
|
||||||
|
const rows = await config.api.row.fetch(table._id!)
|
||||||
|
expect(rows).toHaveLength(1)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("patch", () => {
|
describe("patch", () => {
|
||||||
|
@ -667,6 +704,7 @@ describe.each([
|
||||||
expect(event.oldRow.description).toEqual(beforeRow.description)
|
expect(event.oldRow.description).toEqual(beforeRow.description)
|
||||||
expect(event.row.description).toEqual(beforeRow.description)
|
expect(event.row.description).toEqual(beforeRow.description)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should throw an error when given improper types", async () => {
|
it("should throw an error when given improper types", async () => {
|
||||||
const existing = await config.api.row.save(table._id!, {})
|
const existing = await config.api.row.save(table._id!, {})
|
||||||
const rowUsage = await getRowUsage()
|
const rowUsage = await getRowUsage()
|
||||||
|
@ -758,7 +796,8 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
!isInternal &&
|
!isInternal &&
|
||||||
// TODO: SQL is having issues creating composite keys
|
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
||||||
|
// to identity columns. This is not something Budibase does currently.
|
||||||
providerType !== DatabaseName.SQL_SERVER &&
|
providerType !== DatabaseName.SQL_SERVER &&
|
||||||
it("should support updating fields that are part of a composite key", async () => {
|
it("should support updating fields that are part of a composite key", async () => {
|
||||||
const tableRequest = saveTableRequest({
|
const tableRequest = saveTableRequest({
|
||||||
|
@ -911,24 +950,12 @@ describe.each([
|
||||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("Should ignore malformed/invalid delete requests", async () => {
|
it.each([{ not: "valid" }, { rows: 123 }, "invalid"])(
|
||||||
|
"Should ignore malformed/invalid delete request: %s",
|
||||||
|
async (request: any) => {
|
||||||
const rowUsage = await getRowUsage()
|
const rowUsage = await getRowUsage()
|
||||||
|
|
||||||
await config.api.row.delete(table._id!, { not: "valid" } as any, {
|
await config.api.row.delete(table._id!, request, {
|
||||||
status: 400,
|
|
||||||
body: {
|
|
||||||
message: "Invalid delete rows request",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await config.api.row.delete(table._id!, { rows: 123 } as any, {
|
|
||||||
status: 400,
|
|
||||||
body: {
|
|
||||||
message: "Invalid delete rows request",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await config.api.row.delete(table._id!, "invalid" as any, {
|
|
||||||
status: 400,
|
status: 400,
|
||||||
body: {
|
body: {
|
||||||
message: "Invalid delete rows request",
|
message: "Invalid delete rows request",
|
||||||
|
@ -936,7 +963,8 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
await assertRowUsage(rowUsage)
|
await assertRowUsage(rowUsage)
|
||||||
})
|
}
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("bulkImport", () => {
|
describe("bulkImport", () => {
|
||||||
|
@ -1085,6 +1113,121 @@ describe.each([
|
||||||
expect(rows[2].name).toEqual("Row 3")
|
expect(rows[2].name).toEqual("Row 3")
|
||||||
expect(rows[2].description).toEqual("Row 3 description")
|
expect(rows[2].description).toEqual("Row 3 description")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Upserting isn't yet supported in MSSQL, see:
|
||||||
|
// https://github.com/knex/knex/pull/6050
|
||||||
|
!isMSSQL &&
|
||||||
|
!isInternal &&
|
||||||
|
it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
|
||||||
|
const tableName = uuid.v4()
|
||||||
|
await client?.schema.createTable(tableName, table => {
|
||||||
|
table.integer("companyId")
|
||||||
|
table.integer("userId")
|
||||||
|
table.string("name")
|
||||||
|
table.string("description")
|
||||||
|
table.primary(["companyId", "userId"])
|
||||||
|
})
|
||||||
|
|
||||||
|
const resp = await config.api.datasource.fetchSchema({
|
||||||
|
datasourceId: datasource!._id!,
|
||||||
|
})
|
||||||
|
const table = resp.datasource.entities![tableName]
|
||||||
|
|
||||||
|
const row1 = await config.api.row.save(table._id!, {
|
||||||
|
companyId: 1,
|
||||||
|
userId: 1,
|
||||||
|
name: "Row 1",
|
||||||
|
description: "Row 1 description",
|
||||||
|
})
|
||||||
|
|
||||||
|
const row2 = await config.api.row.save(table._id!, {
|
||||||
|
companyId: 1,
|
||||||
|
userId: 2,
|
||||||
|
name: "Row 2",
|
||||||
|
description: "Row 2 description",
|
||||||
|
})
|
||||||
|
|
||||||
|
await config.api.row.bulkImport(table._id!, {
|
||||||
|
identifierFields: ["companyId", "userId"],
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
companyId: 1,
|
||||||
|
userId: row1.userId,
|
||||||
|
name: "Row 1 updated",
|
||||||
|
description: "Row 1 description updated",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
companyId: 1,
|
||||||
|
userId: row2.userId,
|
||||||
|
name: "Row 2 updated",
|
||||||
|
description: "Row 2 description updated",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
companyId: 1,
|
||||||
|
userId: 3,
|
||||||
|
name: "Row 3",
|
||||||
|
description: "Row 3 description",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
|
const rows = await config.api.row.fetch(table._id!)
|
||||||
|
expect(rows.length).toEqual(3)
|
||||||
|
|
||||||
|
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||||
|
expect(rows[0].name).toEqual("Row 1 updated")
|
||||||
|
expect(rows[0].description).toEqual("Row 1 description updated")
|
||||||
|
expect(rows[1].name).toEqual("Row 2 updated")
|
||||||
|
expect(rows[1].description).toEqual("Row 2 description updated")
|
||||||
|
expect(rows[2].name).toEqual("Row 3")
|
||||||
|
expect(rows[2].description).toEqual("Row 3 description")
|
||||||
|
})
|
||||||
|
|
||||||
|
// Upserting isn't yet supported in MSSQL, see:
|
||||||
|
// https://github.com/knex/knex/pull/6050
|
||||||
|
!isMSSQL &&
|
||||||
|
!isInternal &&
|
||||||
|
it("should be able to update existing rows an autoID primary key", async () => {
|
||||||
|
const tableName = uuid.v4()
|
||||||
|
await client!.schema.createTable(tableName, table => {
|
||||||
|
table.increments("userId").primary()
|
||||||
|
table.string("name")
|
||||||
|
})
|
||||||
|
|
||||||
|
const resp = await config.api.datasource.fetchSchema({
|
||||||
|
datasourceId: datasource!._id!,
|
||||||
|
})
|
||||||
|
const table = resp.datasource.entities![tableName]
|
||||||
|
|
||||||
|
const row1 = await config.api.row.save(table._id!, {
|
||||||
|
name: "Clare",
|
||||||
|
})
|
||||||
|
|
||||||
|
const row2 = await config.api.row.save(table._id!, {
|
||||||
|
name: "Jeff",
|
||||||
|
})
|
||||||
|
|
||||||
|
await config.api.row.bulkImport(table._id!, {
|
||||||
|
identifierFields: ["userId"],
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
userId: row1.userId,
|
||||||
|
name: "Clare updated",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: row2.userId,
|
||||||
|
name: "Jeff updated",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
|
const rows = await config.api.row.fetch(table._id!)
|
||||||
|
expect(rows.length).toEqual(2)
|
||||||
|
|
||||||
|
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||||
|
expect(rows[0].name).toEqual("Clare updated")
|
||||||
|
expect(rows[1].name).toEqual("Jeff updated")
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("enrich", () => {
|
describe("enrich", () => {
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import {
|
||||||
|
DatabaseName,
|
||||||
|
getDatasource,
|
||||||
|
knexClient,
|
||||||
|
} from "../../../integrations/tests/utils"
|
||||||
import { db as dbCore, utils } from "@budibase/backend-core"
|
import { db as dbCore, utils } from "@budibase/backend-core"
|
||||||
|
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
|
@ -24,6 +28,8 @@ import _ from "lodash"
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import { encodeJSBinding } from "@budibase/string-templates"
|
import { encodeJSBinding } from "@budibase/string-templates"
|
||||||
import { dataFilters } from "@budibase/shared-core"
|
import { dataFilters } from "@budibase/shared-core"
|
||||||
|
import { Knex } from "knex"
|
||||||
|
import { structures } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
["in-memory", undefined],
|
["in-memory", undefined],
|
||||||
|
@ -42,6 +48,7 @@ describe.each([
|
||||||
|
|
||||||
let envCleanup: (() => void) | undefined
|
let envCleanup: (() => void) | undefined
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
|
let client: Knex | undefined
|
||||||
let table: Table
|
let table: Table
|
||||||
let rows: Row[]
|
let rows: Row[]
|
||||||
|
|
||||||
|
@ -63,8 +70,10 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
if (dsProvider) {
|
if (dsProvider) {
|
||||||
|
const rawDatasource = await dsProvider
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
datasource = await config.createDatasource({
|
datasource = await config.createDatasource({
|
||||||
datasource: await dsProvider,
|
datasource: rawDatasource,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -909,6 +918,44 @@ describe.each([
|
||||||
}).toMatchExactly([{ name: "foo" }, { name: "bar" }])
|
}).toMatchExactly([{ name: "foo" }, { name: "bar" }])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
!isInternal &&
|
||||||
|
!isInMemory &&
|
||||||
|
// This test was added because we automatically add in a sort by the
|
||||||
|
// primary key, and we used to do this unconditionally which caused
|
||||||
|
// problems because it was possible for the primary key to appear twice
|
||||||
|
// in the resulting SQL ORDER BY clause, resulting in an SQL error.
|
||||||
|
// We now check first to make sure that the primary key isn't already
|
||||||
|
// in the sort before adding it.
|
||||||
|
describe("sort on primary key", () => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
const tableName = structures.uuid().substring(0, 10)
|
||||||
|
await client!.schema.createTable(tableName, t => {
|
||||||
|
t.string("name").primary()
|
||||||
|
})
|
||||||
|
const resp = await config.api.datasource.fetchSchema({
|
||||||
|
datasourceId: datasource!._id!,
|
||||||
|
})
|
||||||
|
|
||||||
|
table = resp.datasource.entities![tableName]
|
||||||
|
|
||||||
|
await createRows([{ name: "foo" }, { name: "bar" }])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to sort by a primary key column ascending", async () =>
|
||||||
|
expectSearch({
|
||||||
|
query: {},
|
||||||
|
sort: "name",
|
||||||
|
sortOrder: SortOrder.ASCENDING,
|
||||||
|
}).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
|
||||||
|
|
||||||
|
it("should be able to sort by a primary key column descending", async () =>
|
||||||
|
expectSearch({
|
||||||
|
query: {},
|
||||||
|
sort: "name",
|
||||||
|
sortOrder: SortOrder.DESCENDING,
|
||||||
|
}).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -276,34 +276,6 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
isInternal &&
|
|
||||||
it("shouldn't allow duplicate column names", async () => {
|
|
||||||
const saveTableRequest: SaveTableRequest = {
|
|
||||||
...basicTable(),
|
|
||||||
}
|
|
||||||
saveTableRequest.schema["Type"] = {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
name: "Type",
|
|
||||||
}
|
|
||||||
await config.api.table.save(saveTableRequest, {
|
|
||||||
status: 400,
|
|
||||||
body: {
|
|
||||||
message:
|
|
||||||
'Column(s) "type" are duplicated - check for other columns with these name (case in-sensitive)',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
saveTableRequest.schema.foo = { type: FieldType.STRING, name: "foo" }
|
|
||||||
saveTableRequest.schema.FOO = { type: FieldType.STRING, name: "FOO" }
|
|
||||||
|
|
||||||
await config.api.table.save(saveTableRequest, {
|
|
||||||
status: 400,
|
|
||||||
body: {
|
|
||||||
message:
|
|
||||||
'Column(s) "type, foo" are duplicated - check for other columns with these name (case in-sensitive)',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should add a new column for an internal DB table", async () => {
|
it("should add a new column for an internal DB table", async () => {
|
||||||
const saveTableRequest: SaveTableRequest = {
|
const saveTableRequest: SaveTableRequest = {
|
||||||
...basicTable(),
|
...basicTable(),
|
||||||
|
|
|
@ -0,0 +1,36 @@
|
||||||
|
import * as automationUtils from "./automationUtils"
|
||||||
|
|
||||||
|
type ObjValue = {
|
||||||
|
[key: string]: string | ObjValue
|
||||||
|
}
|
||||||
|
|
||||||
|
export function replaceFakeBindings(
|
||||||
|
originalStepInput: Record<string, any>,
|
||||||
|
loopStepNumber: number
|
||||||
|
) {
|
||||||
|
for (const [key, value] of Object.entries(originalStepInput)) {
|
||||||
|
originalStepInput[key] = replaceBindingsRecursive(value, loopStepNumber)
|
||||||
|
}
|
||||||
|
return originalStepInput
|
||||||
|
}
|
||||||
|
|
||||||
|
function replaceBindingsRecursive(
|
||||||
|
value: string | ObjValue,
|
||||||
|
loopStepNumber: number
|
||||||
|
) {
|
||||||
|
if (typeof value === "object") {
|
||||||
|
for (const [innerKey, innerValue] of Object.entries(value)) {
|
||||||
|
if (typeof innerValue === "string") {
|
||||||
|
value[innerKey] = automationUtils.substituteLoopStep(
|
||||||
|
innerValue,
|
||||||
|
`steps.${loopStepNumber}`
|
||||||
|
)
|
||||||
|
} else if (typeof innerValue === "object") {
|
||||||
|
value[innerKey] = replaceBindingsRecursive(innerValue, loopStepNumber)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (typeof value === "string") {
|
||||||
|
value = automationUtils.substituteLoopStep(value, `steps.${loopStepNumber}`)
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
|
@ -73,7 +73,12 @@ export async function run({ inputs }: AutomationStepInput) {
|
||||||
try {
|
try {
|
||||||
let { field, condition, value } = inputs
|
let { field, condition, value } = inputs
|
||||||
// coerce types so that we can use them
|
// coerce types so that we can use them
|
||||||
if (!isNaN(value) && !isNaN(field)) {
|
if (
|
||||||
|
!isNaN(value) &&
|
||||||
|
!isNaN(field) &&
|
||||||
|
typeof field !== "boolean" &&
|
||||||
|
typeof value !== "boolean"
|
||||||
|
) {
|
||||||
value = parseFloat(value)
|
value = parseFloat(value)
|
||||||
field = parseFloat(field)
|
field = parseFloat(field)
|
||||||
} else if (!isNaN(Date.parse(value)) && !isNaN(Date.parse(field))) {
|
} else if (!isNaN(Date.parse(value)) && !isNaN(Date.parse(field))) {
|
||||||
|
|
|
@ -17,7 +17,6 @@ import { cloneDeep } from "lodash/fp"
|
||||||
import isEqual from "lodash/isEqual"
|
import isEqual from "lodash/isEqual"
|
||||||
import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula"
|
import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula"
|
||||||
import { context } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import { findDuplicateInternalColumns } from "@budibase/shared-core"
|
|
||||||
import { getTable } from "../getters"
|
import { getTable } from "../getters"
|
||||||
import { checkAutoColumns } from "./utils"
|
import { checkAutoColumns } from "./utils"
|
||||||
import * as viewsSdk from "../../views"
|
import * as viewsSdk from "../../views"
|
||||||
|
@ -45,17 +44,6 @@ export async function save(
|
||||||
if (hasTypeChanged(table, oldTable)) {
|
if (hasTypeChanged(table, oldTable)) {
|
||||||
throw new Error("A column type has changed.")
|
throw new Error("A column type has changed.")
|
||||||
}
|
}
|
||||||
|
|
||||||
// check for case sensitivity - we don't want to allow duplicated columns
|
|
||||||
const duplicateColumn = findDuplicateInternalColumns(table)
|
|
||||||
if (duplicateColumn.length) {
|
|
||||||
throw new Error(
|
|
||||||
`Column(s) "${duplicateColumn.join(
|
|
||||||
", "
|
|
||||||
)}" are duplicated - check for other columns with these name (case in-sensitive)`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// check that subtypes have been maintained
|
// check that subtypes have been maintained
|
||||||
table = checkAutoColumns(table, oldTable)
|
table = checkAutoColumns(table, oldTable)
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,8 @@ import {
|
||||||
} from "../automations/utils"
|
} from "../automations/utils"
|
||||||
import * as actions from "../automations/actions"
|
import * as actions from "../automations/actions"
|
||||||
import * as automationUtils from "../automations/automationUtils"
|
import * as automationUtils from "../automations/automationUtils"
|
||||||
|
import { replaceFakeBindings } from "../automations/loopUtils"
|
||||||
|
|
||||||
import { default as AutomationEmitter } from "../events/AutomationEmitter"
|
import { default as AutomationEmitter } from "../events/AutomationEmitter"
|
||||||
import { generateAutomationMetadataID, isProdAppID } from "../db/utils"
|
import { generateAutomationMetadataID, isProdAppID } from "../db/utils"
|
||||||
import { definitions as triggerDefs } from "../automations/triggerInfo"
|
import { definitions as triggerDefs } from "../automations/triggerInfo"
|
||||||
|
@ -214,15 +216,15 @@ class Orchestrator {
|
||||||
}
|
}
|
||||||
|
|
||||||
updateContextAndOutput(
|
updateContextAndOutput(
|
||||||
loopStepNumber: number | undefined,
|
currentLoopStepIndex: number | undefined,
|
||||||
step: AutomationStep,
|
step: AutomationStep,
|
||||||
output: any,
|
output: any,
|
||||||
result: { success: boolean; status: string }
|
result: { success: boolean; status: string }
|
||||||
) {
|
) {
|
||||||
if (!loopStepNumber) {
|
if (!currentLoopStepIndex) {
|
||||||
throw new Error("No loop step number provided.")
|
throw new Error("No loop step number provided.")
|
||||||
}
|
}
|
||||||
this.executionOutput.steps.splice(loopStepNumber, 0, {
|
this.executionOutput.steps.splice(currentLoopStepIndex, 0, {
|
||||||
id: step.id,
|
id: step.id,
|
||||||
stepId: step.stepId,
|
stepId: step.stepId,
|
||||||
outputs: {
|
outputs: {
|
||||||
|
@ -232,7 +234,7 @@ class Orchestrator {
|
||||||
},
|
},
|
||||||
inputs: step.inputs,
|
inputs: step.inputs,
|
||||||
})
|
})
|
||||||
this._context.steps.splice(loopStepNumber, 0, {
|
this._context.steps.splice(currentLoopStepIndex, 0, {
|
||||||
...output,
|
...output,
|
||||||
success: result.success,
|
success: result.success,
|
||||||
status: result.status,
|
status: result.status,
|
||||||
|
@ -256,7 +258,7 @@ class Orchestrator {
|
||||||
let loopStep: LoopStep | undefined = undefined
|
let loopStep: LoopStep | undefined = undefined
|
||||||
|
|
||||||
let stepCount = 0
|
let stepCount = 0
|
||||||
let loopStepNumber: any = undefined
|
let currentLoopStepIndex: number = 0
|
||||||
let loopSteps: LoopStep[] | undefined = []
|
let loopSteps: LoopStep[] | undefined = []
|
||||||
let metadata
|
let metadata
|
||||||
let timeoutFlag = false
|
let timeoutFlag = false
|
||||||
|
@ -290,7 +292,7 @@ class Orchestrator {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
let input: any,
|
let input: LoopInput | undefined,
|
||||||
iterations = 1,
|
iterations = 1,
|
||||||
iterationCount = 0
|
iterationCount = 0
|
||||||
|
|
||||||
|
@ -309,19 +311,19 @@ class Orchestrator {
|
||||||
stepCount++
|
stepCount++
|
||||||
if (step.stepId === LOOP_STEP_ID) {
|
if (step.stepId === LOOP_STEP_ID) {
|
||||||
loopStep = step as LoopStep
|
loopStep = step as LoopStep
|
||||||
loopStepNumber = stepCount
|
currentLoopStepIndex = stepCount
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if (loopStep) {
|
if (loopStep) {
|
||||||
input = await processObject(loopStep.inputs, this._context)
|
input = await processObject(loopStep.inputs, this._context)
|
||||||
iterations = getLoopIterations(loopStep as LoopStep)
|
iterations = getLoopIterations(loopStep)
|
||||||
stepSpan?.addTags({ step: { iterations } })
|
stepSpan?.addTags({ step: { iterations } })
|
||||||
}
|
}
|
||||||
for (let index = 0; index < iterations; index++) {
|
|
||||||
|
for (let stepIndex = 0; stepIndex < iterations; stepIndex++) {
|
||||||
let originalStepInput = cloneDeep(step.inputs)
|
let originalStepInput = cloneDeep(step.inputs)
|
||||||
// Handle if the user has set a max iteration count or if it reaches the max limit set by us
|
if (loopStep && input?.binding) {
|
||||||
if (loopStep && input.binding) {
|
|
||||||
let tempOutput = {
|
let tempOutput = {
|
||||||
items: loopSteps,
|
items: loopSteps,
|
||||||
iterations: iterationCount,
|
iterations: iterationCount,
|
||||||
|
@ -332,7 +334,7 @@ class Orchestrator {
|
||||||
)
|
)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
this.updateContextAndOutput(
|
this.updateContextAndOutput(
|
||||||
loopStepNumber,
|
currentLoopStepIndex,
|
||||||
step,
|
step,
|
||||||
tempOutput,
|
tempOutput,
|
||||||
{
|
{
|
||||||
|
@ -353,55 +355,22 @@ class Orchestrator {
|
||||||
} else if (Array.isArray(loopStep.inputs.binding)) {
|
} else if (Array.isArray(loopStep.inputs.binding)) {
|
||||||
item = loopStep.inputs.binding
|
item = loopStep.inputs.binding
|
||||||
}
|
}
|
||||||
this._context.steps[loopStepNumber] = {
|
this._context.steps[currentLoopStepIndex] = {
|
||||||
currentItem: item[index],
|
currentItem: item[stepIndex],
|
||||||
}
|
}
|
||||||
|
|
||||||
// The "Loop" binding in the front end is "fake", so replace it here so the context can understand it
|
originalStepInput = replaceFakeBindings(
|
||||||
// Pretty hacky because we need to account for the row object
|
originalStepInput,
|
||||||
for (let [key, value] of Object.entries(originalStepInput)) {
|
currentLoopStepIndex
|
||||||
if (typeof value === "object") {
|
|
||||||
for (let [innerKey, innerValue] of Object.entries(
|
|
||||||
originalStepInput[key]
|
|
||||||
)) {
|
|
||||||
if (typeof innerValue === "string") {
|
|
||||||
originalStepInput[key][innerKey] =
|
|
||||||
automationUtils.substituteLoopStep(
|
|
||||||
innerValue,
|
|
||||||
`steps.${loopStepNumber}`
|
|
||||||
)
|
)
|
||||||
} else if (typeof value === "object") {
|
|
||||||
for (let [innerObject, innerValue] of Object.entries(
|
|
||||||
originalStepInput[key][innerKey]
|
|
||||||
)) {
|
|
||||||
if (typeof innerValue === "string") {
|
|
||||||
originalStepInput[key][innerKey][innerObject] =
|
|
||||||
automationUtils.substituteLoopStep(
|
|
||||||
innerValue,
|
|
||||||
`steps.${loopStepNumber}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (typeof value === "string") {
|
|
||||||
originalStepInput[key] =
|
|
||||||
automationUtils.substituteLoopStep(
|
|
||||||
value,
|
|
||||||
`steps.${loopStepNumber}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
index === env.AUTOMATION_MAX_ITERATIONS ||
|
stepIndex === env.AUTOMATION_MAX_ITERATIONS ||
|
||||||
(loopStep.inputs.iterations &&
|
(loopStep.inputs.iterations &&
|
||||||
index === parseInt(loopStep.inputs.iterations))
|
stepIndex === parseInt(loopStep.inputs.iterations))
|
||||||
) {
|
) {
|
||||||
this.updateContextAndOutput(
|
this.updateContextAndOutput(
|
||||||
loopStepNumber,
|
currentLoopStepIndex,
|
||||||
step,
|
step,
|
||||||
tempOutput,
|
tempOutput,
|
||||||
{
|
{
|
||||||
|
@ -416,7 +385,7 @@ class Orchestrator {
|
||||||
|
|
||||||
let isFailure = false
|
let isFailure = false
|
||||||
const currentItem =
|
const currentItem =
|
||||||
this._context.steps[loopStepNumber]?.currentItem
|
this._context.steps[currentLoopStepIndex]?.currentItem
|
||||||
if (currentItem && typeof currentItem === "object") {
|
if (currentItem && typeof currentItem === "object") {
|
||||||
isFailure = Object.keys(currentItem).some(value => {
|
isFailure = Object.keys(currentItem).some(value => {
|
||||||
return currentItem[value] === loopStep?.inputs.failure
|
return currentItem[value] === loopStep?.inputs.failure
|
||||||
|
@ -428,7 +397,7 @@ class Orchestrator {
|
||||||
|
|
||||||
if (isFailure) {
|
if (isFailure) {
|
||||||
this.updateContextAndOutput(
|
this.updateContextAndOutput(
|
||||||
loopStepNumber,
|
currentLoopStepIndex,
|
||||||
step,
|
step,
|
||||||
tempOutput,
|
tempOutput,
|
||||||
{
|
{
|
||||||
|
@ -453,7 +422,6 @@ class Orchestrator {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// If it's a loop step, we need to manually add the bindings to the context
|
|
||||||
let stepFn = await this.getStepFunctionality(step.stepId)
|
let stepFn = await this.getStepFunctionality(step.stepId)
|
||||||
let inputs = await processObject(originalStepInput, this._context)
|
let inputs = await processObject(originalStepInput, this._context)
|
||||||
inputs = automationUtils.cleanInputValues(
|
inputs = automationUtils.cleanInputValues(
|
||||||
|
@ -502,9 +470,9 @@ class Orchestrator {
|
||||||
|
|
||||||
if (loopStep) {
|
if (loopStep) {
|
||||||
iterationCount++
|
iterationCount++
|
||||||
if (index === iterations - 1) {
|
if (stepIndex === iterations - 1) {
|
||||||
loopStep = undefined
|
loopStep = undefined
|
||||||
this._context.steps.splice(loopStepNumber, 1)
|
this._context.steps.splice(currentLoopStepIndex, 1)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -515,7 +483,7 @@ class Orchestrator {
|
||||||
|
|
||||||
if (loopStep && iterations === 0) {
|
if (loopStep && iterations === 0) {
|
||||||
loopStep = undefined
|
loopStep = undefined
|
||||||
this.executionOutput.steps.splice(loopStepNumber + 1, 0, {
|
this.executionOutput.steps.splice(currentLoopStepIndex + 1, 0, {
|
||||||
id: step.id,
|
id: step.id,
|
||||||
stepId: step.stepId,
|
stepId: step.stepId,
|
||||||
outputs: {
|
outputs: {
|
||||||
|
@ -525,14 +493,14 @@ class Orchestrator {
|
||||||
inputs: {},
|
inputs: {},
|
||||||
})
|
})
|
||||||
|
|
||||||
this._context.steps.splice(loopStepNumber, 1)
|
this._context.steps.splice(currentLoopStepIndex, 1)
|
||||||
iterations = 1
|
iterations = 1
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete the step after the loop step as it's irrelevant, since information is included
|
// Delete the step after the loop step as it's irrelevant, since information is included
|
||||||
// in the loop step
|
// in the loop step
|
||||||
if (wasLoopStep && !loopStep) {
|
if (wasLoopStep && !loopStep) {
|
||||||
this._context.steps.splice(loopStepNumber + 1, 1)
|
this._context.steps.splice(currentLoopStepIndex + 1, 1)
|
||||||
wasLoopStep = false
|
wasLoopStep = false
|
||||||
}
|
}
|
||||||
if (loopSteps && loopSteps.length) {
|
if (loopSteps && loopSteps.length) {
|
||||||
|
@ -541,13 +509,13 @@ class Orchestrator {
|
||||||
items: loopSteps,
|
items: loopSteps,
|
||||||
iterations: iterationCount,
|
iterations: iterationCount,
|
||||||
}
|
}
|
||||||
this.executionOutput.steps.splice(loopStepNumber + 1, 0, {
|
this.executionOutput.steps.splice(currentLoopStepIndex + 1, 0, {
|
||||||
id: step.id,
|
id: step.id,
|
||||||
stepId: step.stepId,
|
stepId: step.stepId,
|
||||||
outputs: tempOutput,
|
outputs: tempOutput,
|
||||||
inputs: step.inputs,
|
inputs: step.inputs,
|
||||||
})
|
})
|
||||||
this._context.steps[loopStepNumber] = tempOutput
|
this._context.steps[currentLoopStepIndex] = tempOutput
|
||||||
|
|
||||||
wasLoopStep = true
|
wasLoopStep = true
|
||||||
loopSteps = []
|
loopSteps = []
|
||||||
|
|
|
@ -4,6 +4,7 @@ import {
|
||||||
TableSchema,
|
TableSchema,
|
||||||
FieldSchema,
|
FieldSchema,
|
||||||
Row,
|
Row,
|
||||||
|
Table,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
|
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
|
||||||
import { db } from "@budibase/backend-core"
|
import { db } from "@budibase/backend-core"
|
||||||
|
@ -118,16 +119,26 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
|
||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
export function parse(rows: Rows, schema: TableSchema): Rows {
|
export function parse(rows: Rows, table: Table): Rows {
|
||||||
return rows.map(row => {
|
return rows.map(row => {
|
||||||
const parsedRow: Row = {}
|
const parsedRow: Row = {}
|
||||||
|
|
||||||
Object.entries(row).forEach(([columnName, columnData]) => {
|
Object.entries(row).forEach(([columnName, columnData]) => {
|
||||||
if (!(columnName in schema) || schema[columnName]?.autocolumn) {
|
const schema = table.schema
|
||||||
|
if (!(columnName in schema)) {
|
||||||
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
schema[columnName].autocolumn &&
|
||||||
|
!table.primary?.includes(columnName)
|
||||||
|
) {
|
||||||
|
// Don't want the user specifying values for autocolumns unless they're updating
|
||||||
|
// a row through its primary key.
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
const columnSchema = schema[columnName]
|
const columnSchema = schema[columnName]
|
||||||
const { type: columnType } = columnSchema
|
const { type: columnType } = columnSchema
|
||||||
if (columnType === FieldType.NUMBER) {
|
if (columnType === FieldType.NUMBER) {
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
export * from "./api"
|
export * from "./api"
|
||||||
export * from "./fields"
|
export * from "./fields"
|
||||||
export * from "./rows"
|
|
||||||
|
|
||||||
export const OperatorOptions = {
|
export const OperatorOptions = {
|
||||||
Equals: {
|
Equals: {
|
||||||
|
|
|
@ -1,14 +0,0 @@
|
||||||
export const CONSTANT_INTERNAL_ROW_COLS = [
|
|
||||||
"_id",
|
|
||||||
"_rev",
|
|
||||||
"type",
|
|
||||||
"createdAt",
|
|
||||||
"updatedAt",
|
|
||||||
"tableId",
|
|
||||||
] as const
|
|
||||||
|
|
||||||
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
|
|
||||||
|
|
||||||
export function isInternalColumnName(name: string): boolean {
|
|
||||||
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
|
|
||||||
}
|
|
|
@ -1,5 +1,4 @@
|
||||||
import { FieldType, Table } from "@budibase/types"
|
import { FieldType } from "@budibase/types"
|
||||||
import { CONSTANT_INTERNAL_ROW_COLS } from "./constants"
|
|
||||||
|
|
||||||
const allowDisplayColumnByType: Record<FieldType, boolean> = {
|
const allowDisplayColumnByType: Record<FieldType, boolean> = {
|
||||||
[FieldType.STRING]: true,
|
[FieldType.STRING]: true,
|
||||||
|
@ -52,22 +51,3 @@ export function canBeDisplayColumn(type: FieldType): boolean {
|
||||||
export function canBeSortColumn(type: FieldType): boolean {
|
export function canBeSortColumn(type: FieldType): boolean {
|
||||||
return !!allowSortColumnByType[type]
|
return !!allowSortColumnByType[type]
|
||||||
}
|
}
|
||||||
|
|
||||||
export function findDuplicateInternalColumns(table: Table): string[] {
|
|
||||||
// get the column names
|
|
||||||
const columnNames = Object.keys(table.schema)
|
|
||||||
.concat(CONSTANT_INTERNAL_ROW_COLS)
|
|
||||||
.map(colName => colName.toLowerCase())
|
|
||||||
// there are duplicates
|
|
||||||
const set = new Set(columnNames)
|
|
||||||
let duplicates: string[] = []
|
|
||||||
if (set.size !== columnNames.length) {
|
|
||||||
for (let key of set.keys()) {
|
|
||||||
const count = columnNames.filter(name => name === key).length
|
|
||||||
if (count > 1) {
|
|
||||||
duplicates.push(key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return duplicates
|
|
||||||
}
|
|
||||||
|
|
|
@ -42,10 +42,7 @@ export interface Account extends CreateAccount {
|
||||||
verified: boolean
|
verified: boolean
|
||||||
verificationSent: boolean
|
verificationSent: boolean
|
||||||
// licensing
|
// licensing
|
||||||
tier: string // deprecated
|
|
||||||
planType?: PlanType
|
planType?: PlanType
|
||||||
/** @deprecated */
|
|
||||||
planTier?: number
|
|
||||||
license?: License
|
license?: License
|
||||||
installId?: string
|
installId?: string
|
||||||
installTenantId?: string
|
installTenantId?: string
|
||||||
|
|
|
@ -144,7 +144,7 @@ interface BaseIOStructure {
|
||||||
required?: string[]
|
required?: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
interface InputOutputBlock {
|
export interface InputOutputBlock {
|
||||||
properties: {
|
properties: {
|
||||||
[key: string]: BaseIOStructure
|
[key: string]: BaseIOStructure
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue