Merge branch 'develop' into chore/npmless-builds
This commit is contained in:
commit
1c0da8ff29
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.5.10-alpha.0",
|
||||
"version": "2.5.10-alpha.3",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/backend-core",
|
||||
|
|
|
@ -147,6 +147,9 @@ const automationActions = store => ({
|
|||
testData,
|
||||
})
|
||||
if (!result?.trigger && !result?.steps?.length) {
|
||||
if (result?.err?.code === "usage_limit_exceeded") {
|
||||
throw "You have exceeded your automation quota"
|
||||
}
|
||||
throw "Something went wrong testing your automation"
|
||||
}
|
||||
store.update(state => {
|
||||
|
|
|
@ -52,7 +52,7 @@
|
|||
await automationStore.actions.test($selectedAutomation, testData)
|
||||
$automationStore.showTestPanel = true
|
||||
} catch (error) {
|
||||
notifications.error("Error testing automation")
|
||||
notifications.error(error)
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
const { styleable, builderStore } = getContext("sdk")
|
||||
const component = getContext("component")
|
||||
|
||||
let handlingOnClick = false
|
||||
|
||||
export let disabled = false
|
||||
export let text = ""
|
||||
export let onClick
|
||||
|
@ -16,6 +18,16 @@
|
|||
export let icon = null
|
||||
export let active = false
|
||||
|
||||
const handleOnClick = async () => {
|
||||
handlingOnClick = true
|
||||
|
||||
if (onClick) {
|
||||
await onClick()
|
||||
}
|
||||
|
||||
handlingOnClick = false
|
||||
}
|
||||
|
||||
let node
|
||||
|
||||
$: $component.editing && node?.focus()
|
||||
|
@ -37,9 +49,9 @@
|
|||
<button
|
||||
class={`spectrum-Button spectrum-Button--size${size} spectrum-Button--${type}`}
|
||||
class:spectrum-Button--quiet={quiet}
|
||||
{disabled}
|
||||
disabled={disabled || handlingOnClick}
|
||||
use:styleable={$component.styles}
|
||||
on:click={onClick}
|
||||
on:click={handleOnClick}
|
||||
contenteditable={$component.editing && !icon}
|
||||
on:blur={$component.editing ? updateText : null}
|
||||
bind:this={node}
|
||||
|
|
|
@ -37,7 +37,7 @@ import {
|
|||
Table,
|
||||
} from "@budibase/types"
|
||||
|
||||
const { cleanExportRows } = require("./utils")
|
||||
import { cleanExportRows } from "./utils"
|
||||
|
||||
const CALCULATION_TYPES = {
|
||||
SUM: "sum",
|
||||
|
@ -391,6 +391,9 @@ export async function exportRows(ctx: UserCtx) {
|
|||
const table = await db.get(ctx.params.tableId)
|
||||
const rowIds = ctx.request.body.rows
|
||||
let format = ctx.query.format
|
||||
if (typeof format !== "string") {
|
||||
ctx.throw(400, "Format parameter is not valid")
|
||||
}
|
||||
const { columns, query } = ctx.request.body
|
||||
|
||||
let result
|
||||
|
|
|
@ -137,8 +137,8 @@ export function cleanExportRows(
|
|||
delete schema[column]
|
||||
})
|
||||
|
||||
// Intended to avoid 'undefined' in export
|
||||
if (format === Format.CSV) {
|
||||
// Intended to append empty values in export
|
||||
const schemaKeys = Object.keys(schema)
|
||||
for (let key of schemaKeys) {
|
||||
if (columns?.length && columns.indexOf(key) > 0) {
|
||||
|
@ -146,7 +146,7 @@ export function cleanExportRows(
|
|||
}
|
||||
for (let row of cleanRows) {
|
||||
if (row[key] == null) {
|
||||
row[key] = ""
|
||||
row[key] = undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ import { getDatasourceParams } from "../../../db/utils"
|
|||
import { context, events } from "@budibase/backend-core"
|
||||
import { Table, UserCtx } from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import csv from "csvtojson"
|
||||
import { jsonFromCsvString } from "../../../utilities/csv"
|
||||
|
||||
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
|
||||
if (table && !tableId) {
|
||||
|
@ -104,7 +104,7 @@ export async function bulkImport(ctx: UserCtx) {
|
|||
export async function csvToJson(ctx: UserCtx) {
|
||||
const { csvString } = ctx.request.body
|
||||
|
||||
const result = await csv().fromString(csvString)
|
||||
const result = await jsonFromCsvString(csvString)
|
||||
|
||||
ctx.status = 200
|
||||
ctx.body = result
|
||||
|
|
|
@ -10,7 +10,9 @@ export function csv(headers: string[], rows: Row[]) {
|
|||
val =
|
||||
typeof val === "object" && !(val instanceof Date)
|
||||
? `"${JSON.stringify(val).replace(/"/g, "'")}"`
|
||||
: `"${val}"`
|
||||
: val !== undefined
|
||||
? `"${val}"`
|
||||
: ""
|
||||
return val.trim()
|
||||
})
|
||||
.join(",")}`
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
import csv from "csvtojson"
|
||||
|
||||
export async function jsonFromCsvString(csvString: string) {
|
||||
const castedWithEmptyValues = await csv({ ignoreEmpty: true }).fromString(
|
||||
csvString
|
||||
)
|
||||
|
||||
// By default the csvtojson library casts empty values as empty strings. This is causing issues on conversion.
|
||||
// ignoreEmpty will remove the key completly if empty, so creating this empty object will ensure we return the values with the keys but empty values
|
||||
const result = await csv({ ignoreEmpty: false }).fromString(csvString)
|
||||
result.forEach((r, i) => {
|
||||
for (const [key] of Object.entries(r).filter(
|
||||
([key, value]) => value === ""
|
||||
)) {
|
||||
if (castedWithEmptyValues[i][key] === undefined) {
|
||||
r[key] = null
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
|
@ -4,6 +4,9 @@ interface SchemaColumn {
|
|||
readonly name: string
|
||||
readonly type: FieldTypes
|
||||
readonly autocolumn?: boolean
|
||||
readonly constraints?: {
|
||||
presence: boolean
|
||||
}
|
||||
}
|
||||
|
||||
interface Schema {
|
||||
|
@ -76,6 +79,11 @@ export function validate(rows: Rows, schema: Schema): ValidationResults {
|
|||
// If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array
|
||||
if (typeof columnType !== "string") {
|
||||
results.invalidColumns.push(columnName)
|
||||
} else if (
|
||||
columnData == null &&
|
||||
!schema[columnName].constraints?.presence
|
||||
) {
|
||||
results.schemaValidation[columnName] = true
|
||||
} else if (
|
||||
// If there's no data for this field don't bother with further checks
|
||||
// If the field is already marked as invalid there's no need for further checks
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
import { jsonFromCsvString } from "../csv"
|
||||
|
||||
describe("csv", () => {
|
||||
describe("jsonFromCsvString", () => {
|
||||
test("multiple lines csv can be casted", async () => {
|
||||
const csvString = '"id","title"\n"1","aaa"\n"2","bbb"'
|
||||
|
||||
const result = await jsonFromCsvString(csvString)
|
||||
|
||||
expect(result).toEqual([
|
||||
{ id: "1", title: "aaa" },
|
||||
{ id: "2", title: "bbb" },
|
||||
])
|
||||
result.forEach(r => expect(Object.keys(r)).toEqual(["id", "title"]))
|
||||
})
|
||||
|
||||
test("empty values are casted as undefined", async () => {
|
||||
const csvString =
|
||||
'"id","optional","title"\n1,,"aaa"\n2,"value","bbb"\n3,,"ccc"'
|
||||
|
||||
const result = await jsonFromCsvString(csvString)
|
||||
|
||||
expect(result).toEqual([
|
||||
{ id: "1", optional: null, title: "aaa" },
|
||||
{ id: "2", optional: "value", title: "bbb" },
|
||||
{ id: "3", optional: null, title: "ccc" },
|
||||
])
|
||||
result.forEach(r =>
|
||||
expect(Object.keys(r)).toEqual(["id", "optional", "title"])
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -9950,9 +9950,9 @@ engine.io-parser@~5.0.3:
|
|||
integrity sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw==
|
||||
|
||||
engine.io@~6.4.1:
|
||||
version "6.4.1"
|
||||
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.1.tgz#8056b4526a88e779f9c280d820422d4e3eeaaae5"
|
||||
integrity sha512-JFYQurD/nbsA5BSPmbaOSLa3tSVj8L6o4srSwXXY3NqE+gGUNmmPTbhn8tjzcCtSqhFgIeqef81ngny8JM25hw==
|
||||
version "6.4.2"
|
||||
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.2.tgz#ffeaf68f69b1364b0286badddf15ff633476473f"
|
||||
integrity sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg==
|
||||
dependencies:
|
||||
"@types/cookie" "^0.4.1"
|
||||
"@types/cors" "^2.8.12"
|
||||
|
|
Loading…
Reference in New Issue