Merge branch 'develop' of github.com:Budibase/budibase into cheeks-lab-day-portal-redesign
This commit is contained in:
commit
ad1109972a
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/backend-core",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"description": "Budibase backend core libraries used in server and worker",
|
||||
"main": "dist/src/index.js",
|
||||
"types": "dist/src/index.d.ts",
|
||||
|
@ -23,7 +23,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@budibase/nano": "10.1.1",
|
||||
"@budibase/types": "2.2.12-alpha.16",
|
||||
"@budibase/types": "2.2.12-alpha.21",
|
||||
"@shopify/jest-koa-mocks": "5.0.1",
|
||||
"@techpass/passport-openidconnect": "0.3.2",
|
||||
"aws-cloudfront-sign": "2.2.0",
|
||||
|
|
|
@ -3,7 +3,6 @@ import {
|
|||
Event,
|
||||
RowsImportedEvent,
|
||||
RowsCreatedEvent,
|
||||
RowImportFormat,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
|
||||
|
@ -16,14 +15,9 @@ const created = async (count: number, timestamp?: string | number) => {
|
|||
await publishEvent(Event.ROWS_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
const imported = async (
|
||||
table: Table,
|
||||
format: RowImportFormat,
|
||||
count: number
|
||||
) => {
|
||||
const imported = async (table: Table, count: number) => {
|
||||
const properties: RowsImportedEvent = {
|
||||
tableId: table._id as string,
|
||||
format,
|
||||
count,
|
||||
}
|
||||
await publishEvent(Event.ROWS_IMPORTED, properties)
|
||||
|
|
|
@ -2,7 +2,6 @@ import { publishEvent } from "../events"
|
|||
import {
|
||||
Event,
|
||||
TableExportFormat,
|
||||
TableImportFormat,
|
||||
Table,
|
||||
TableCreatedEvent,
|
||||
TableUpdatedEvent,
|
||||
|
@ -40,10 +39,9 @@ async function exported(table: Table, format: TableExportFormat) {
|
|||
await publishEvent(Event.TABLE_EXPORTED, properties)
|
||||
}
|
||||
|
||||
async function imported(table: Table, format: TableImportFormat) {
|
||||
async function imported(table: Table) {
|
||||
const properties: TableImportedEvent = {
|
||||
tableId: table._id as string,
|
||||
format,
|
||||
}
|
||||
await publishEvent(Event.TABLE_IMPORTED, properties)
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
@ -38,7 +38,7 @@
|
|||
],
|
||||
"dependencies": {
|
||||
"@adobe/spectrum-css-workflow-icons": "1.2.1",
|
||||
"@budibase/string-templates": "2.2.12-alpha.16",
|
||||
"@budibase/string-templates": "2.2.12-alpha.21",
|
||||
"@spectrum-css/actionbutton": "1.0.1",
|
||||
"@spectrum-css/actiongroup": "1.0.1",
|
||||
"@spectrum-css/avatar": "3.0.2",
|
||||
|
|
|
@ -2,7 +2,7 @@ import filterTests from "../support/filterTests"
|
|||
const interact = require('../support/interact')
|
||||
|
||||
filterTests(["smoke", "all"], () => {
|
||||
context("Screen Tests", () => {
|
||||
xcontext("Screen Tests", () => {
|
||||
before(() => {
|
||||
cy.login()
|
||||
cy.createTestApp()
|
||||
|
@ -25,7 +25,7 @@ filterTests(["smoke", "all"], () => {
|
|||
|
||||
it.skip("should delete all screens then create first screen via button", () => {
|
||||
cy.deleteAllScreens()
|
||||
|
||||
|
||||
cy.contains("Create first screen").click()
|
||||
cy.get(interact.BODY, { timeout: 2000 }).should('contain', '/home')
|
||||
})
|
||||
|
@ -33,7 +33,7 @@ filterTests(["smoke", "all"], () => {
|
|||
it("Should create and filter screens by access level", () => {
|
||||
const accessLevels = ["Basic", "Admin", "Public", "Power"]
|
||||
|
||||
for (const access of accessLevels){
|
||||
for (const access of accessLevels) {
|
||||
// Create screen with specified access level
|
||||
cy.createScreen(access, access)
|
||||
// Filter by access level and confirm screen visible
|
||||
|
@ -46,9 +46,9 @@ filterTests(["smoke", "all"], () => {
|
|||
// Filter by All screens - Confirm all screens visible
|
||||
cy.filterScreensAccessLevel("All screens")
|
||||
cy.get(interact.BODY).should('contain', accessLevels[0])
|
||||
.and('contain', accessLevels[1])
|
||||
.and('contain', accessLevels[2])
|
||||
.and('contain', accessLevels[3])
|
||||
.and('contain', accessLevels[1])
|
||||
.and('contain', accessLevels[2])
|
||||
.and('contain', accessLevels[3])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/builder",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"license": "GPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
@ -71,10 +71,10 @@
|
|||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "2.2.12-alpha.16",
|
||||
"@budibase/client": "2.2.12-alpha.16",
|
||||
"@budibase/frontend-core": "2.2.12-alpha.16",
|
||||
"@budibase/string-templates": "2.2.12-alpha.16",
|
||||
"@budibase/bbui": "2.2.12-alpha.21",
|
||||
"@budibase/client": "2.2.12-alpha.21",
|
||||
"@budibase/frontend-core": "2.2.12-alpha.21",
|
||||
"@budibase/string-templates": "2.2.12-alpha.21",
|
||||
"@sentry/browser": "5.19.1",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
|
|
|
@ -12,6 +12,10 @@
|
|||
name: "JSON",
|
||||
key: "json",
|
||||
},
|
||||
{
|
||||
name: "JSON with Schema",
|
||||
key: "jsonWithSchema",
|
||||
},
|
||||
]
|
||||
|
||||
export let view
|
||||
|
@ -24,7 +28,7 @@
|
|||
viewName: view,
|
||||
format: exportFormat,
|
||||
})
|
||||
download(data, `export.${exportFormat}`)
|
||||
download(data, `export.${exportFormat === "csv" ? "csv" : "json"}`)
|
||||
} catch (error) {
|
||||
notifications.error(`Unable to export ${exportFormat.toUpperCase()} data`)
|
||||
}
|
||||
|
|
|
@ -6,22 +6,22 @@
|
|||
Body,
|
||||
Layout,
|
||||
} from "@budibase/bbui"
|
||||
import TableDataImport from "../../TableNavigator/TableDataImport.svelte"
|
||||
import TableDataImport from "../../TableNavigator/ExistingTableDataImport.svelte"
|
||||
import { API } from "api"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
export let tableId
|
||||
let dataImport
|
||||
|
||||
$: valid = dataImport?.csvString != null && dataImport?.valid
|
||||
let rows = []
|
||||
let allValid = false
|
||||
let displayColumn = null
|
||||
|
||||
async function importData() {
|
||||
try {
|
||||
await API.importTableData({
|
||||
tableId,
|
||||
data: dataImport,
|
||||
rows,
|
||||
})
|
||||
notifications.success("Rows successfully imported")
|
||||
} catch (error) {
|
||||
|
@ -37,14 +37,14 @@
|
|||
title="Import Data"
|
||||
confirmText="Import"
|
||||
onConfirm={importData}
|
||||
disabled={!valid}
|
||||
disabled={!allValid}
|
||||
>
|
||||
<Body size="S">
|
||||
Import rows to an existing table from a CSV. Only columns from the CSV which
|
||||
exist in the table will be imported.
|
||||
Import rows to an existing table from a CSV or JSON file. Only columns from
|
||||
the file which exist in the table will be imported.
|
||||
</Body>
|
||||
<Layout gap="XS" noPadding>
|
||||
<Label grey extraSmall>CSV to import</Label>
|
||||
<TableDataImport bind:dataImport bind:existingTableId={tableId} />
|
||||
<Label grey extraSmall>CSV or JSON file to import</Label>
|
||||
<TableDataImport {tableId} bind:rows bind:allValid bind:displayColumn />
|
||||
</Layout>
|
||||
</ModalContent>
|
||||
|
|
|
@ -340,9 +340,7 @@
|
|||
{:else if isManyToOne && toTable}
|
||||
<Select
|
||||
label={`Foreign Key (${toTable?.name})`}
|
||||
options={Object.keys(toTable?.schema).filter(
|
||||
field => toTable?.primary.indexOf(field) === -1
|
||||
)}
|
||||
options={Object.keys(toTable?.schema)}
|
||||
on:change={() => ($touched.foreign = true)}
|
||||
bind:error={errors.foreign}
|
||||
bind:value={fromRelationship.fieldName}
|
||||
|
|
|
@ -0,0 +1,251 @@
|
|||
<script>
|
||||
import { Select } from "@budibase/bbui"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import { API } from "api"
|
||||
import { parseFile } from "./utils"
|
||||
|
||||
let error = null
|
||||
let fileName = null
|
||||
let fileType = null
|
||||
|
||||
let loading = false
|
||||
let validation = {}
|
||||
let validateHash = ""
|
||||
let schema = null
|
||||
let invalidColumns = []
|
||||
|
||||
export let tableId = null
|
||||
export let rows = []
|
||||
export let allValid = false
|
||||
|
||||
const typeOptions = [
|
||||
{
|
||||
label: "Text",
|
||||
value: FIELDS.STRING.type,
|
||||
},
|
||||
{
|
||||
label: "Number",
|
||||
value: FIELDS.NUMBER.type,
|
||||
},
|
||||
{
|
||||
label: "Date",
|
||||
value: FIELDS.DATETIME.type,
|
||||
},
|
||||
{
|
||||
label: "Options",
|
||||
value: FIELDS.OPTIONS.type,
|
||||
},
|
||||
{
|
||||
label: "Multi-select",
|
||||
value: FIELDS.ARRAY.type,
|
||||
},
|
||||
{
|
||||
label: "Barcode/QR",
|
||||
value: FIELDS.BARCODEQR.type,
|
||||
},
|
||||
{
|
||||
label: "Long Form Text",
|
||||
value: FIELDS.LONGFORM.type,
|
||||
},
|
||||
]
|
||||
|
||||
$: {
|
||||
schema = fetchSchema(tableId)
|
||||
}
|
||||
|
||||
async function fetchSchema(tableId) {
|
||||
try {
|
||||
const definition = await API.fetchTableDefinition(tableId)
|
||||
schema = definition.schema
|
||||
} catch (e) {
|
||||
error = e
|
||||
}
|
||||
}
|
||||
|
||||
async function handleFile(e) {
|
||||
loading = true
|
||||
error = null
|
||||
validation = {}
|
||||
|
||||
try {
|
||||
const response = await parseFile(e)
|
||||
rows = response.rows
|
||||
fileName = response.fileName
|
||||
fileType = response.fileType
|
||||
} catch (e) {
|
||||
loading = false
|
||||
error = e
|
||||
}
|
||||
}
|
||||
|
||||
async function validate(rows) {
|
||||
loading = true
|
||||
error = null
|
||||
validation = {}
|
||||
allValid = false
|
||||
|
||||
try {
|
||||
if (rows.length > 0) {
|
||||
const response = await API.validateExistingTableImport({
|
||||
rows,
|
||||
tableId,
|
||||
})
|
||||
|
||||
validation = response.schemaValidation
|
||||
invalidColumns = response.invalidColumns
|
||||
allValid = response.allValid
|
||||
}
|
||||
} catch (e) {
|
||||
error = e.message
|
||||
}
|
||||
|
||||
loading = false
|
||||
}
|
||||
|
||||
$: {
|
||||
// binding in consumer is causing double renders here
|
||||
const newValidateHash = JSON.stringify(rows)
|
||||
|
||||
if (newValidateHash !== validateHash) {
|
||||
validate(rows)
|
||||
}
|
||||
|
||||
validateHash = newValidateHash
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="dropzone">
|
||||
<input
|
||||
disabled={!schema || loading}
|
||||
id="file-upload"
|
||||
accept="text/csv,application/json"
|
||||
type="file"
|
||||
on:change={handleFile}
|
||||
/>
|
||||
<label for="file-upload" class:uploaded={rows.length > 0}>
|
||||
{#if loading}
|
||||
loading...
|
||||
{:else if error}
|
||||
error: {error}
|
||||
{:else if fileName}
|
||||
{fileName}
|
||||
{:else}
|
||||
Upload
|
||||
{/if}
|
||||
</label>
|
||||
</div>
|
||||
{#if fileName && Object.keys(validation).length === 0}
|
||||
<p>No valid fields, try another file</p>
|
||||
{:else if rows.length > 0 && !error}
|
||||
<div class="schema-fields">
|
||||
{#each Object.keys(validation) as name}
|
||||
<div class="field">
|
||||
<span>{name}</span>
|
||||
<Select
|
||||
value={schema[name]?.type}
|
||||
options={typeOptions}
|
||||
placeholder={null}
|
||||
getOptionLabel={option => option.label}
|
||||
getOptionValue={option => option.value}
|
||||
disabled
|
||||
/>
|
||||
<span
|
||||
class={loading || validation[name]
|
||||
? "fieldStatusSuccess"
|
||||
: "fieldStatusFailure"}
|
||||
>
|
||||
{validation[name] ? "Success" : "Failure"}
|
||||
</span>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{#if invalidColumns.length > 0}
|
||||
<p class="spectrum-FieldLabel spectrum-FieldLabel--sizeM">
|
||||
The following columns are present in the data you wish to import, but do
|
||||
not match the schema of this table and will be ignored.
|
||||
</p>
|
||||
<ul class="ignoredList">
|
||||
{#each invalidColumns as column}
|
||||
<li>{column}</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{/if}
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.dropzone {
|
||||
text-align: center;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-direction: column;
|
||||
border-radius: 10px;
|
||||
transition: all 0.3s;
|
||||
}
|
||||
|
||||
input {
|
||||
display: none;
|
||||
}
|
||||
|
||||
label {
|
||||
font-family: var(--font-sans);
|
||||
cursor: pointer;
|
||||
font-weight: 600;
|
||||
box-sizing: border-box;
|
||||
overflow: hidden;
|
||||
border-radius: var(--border-radius-s);
|
||||
color: var(--ink);
|
||||
padding: var(--spacing-m) var(--spacing-l);
|
||||
transition: all 0.2s ease 0s;
|
||||
display: inline-flex;
|
||||
text-rendering: optimizeLegibility;
|
||||
min-width: auto;
|
||||
outline: none;
|
||||
font-feature-settings: "case" 1, "rlig" 1, "calt" 0;
|
||||
-webkit-box-align: center;
|
||||
user-select: none;
|
||||
flex-shrink: 0;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 100%;
|
||||
background-color: var(--grey-2);
|
||||
font-size: var(--font-size-xs);
|
||||
line-height: normal;
|
||||
border: var(--border-transparent);
|
||||
}
|
||||
|
||||
.uploaded {
|
||||
color: var(--blue);
|
||||
}
|
||||
|
||||
.schema-fields {
|
||||
margin-top: var(--spacing-xl);
|
||||
}
|
||||
|
||||
.field {
|
||||
display: grid;
|
||||
grid-template-columns: 2fr 2fr 1fr auto;
|
||||
margin-top: var(--spacing-m);
|
||||
align-items: center;
|
||||
grid-gap: var(--spacing-m);
|
||||
font-size: var(--spectrum-global-dimension-font-size-75);
|
||||
}
|
||||
|
||||
.fieldStatusSuccess {
|
||||
color: var(--green);
|
||||
justify-self: center;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.fieldStatusFailure {
|
||||
color: var(--red);
|
||||
justify-self: center;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.ignoredList {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
font-size: var(--spectrum-global-dimension-font-size-75);
|
||||
}
|
||||
</style>
|
|
@ -1,107 +1,21 @@
|
|||
<script>
|
||||
import { Select, InlineAlert, notifications } from "@budibase/bbui"
|
||||
import { Select } from "@budibase/bbui"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import { API } from "api"
|
||||
import { parseFile } from "./utils"
|
||||
|
||||
const BYTES_IN_MB = 1000000
|
||||
const FILE_SIZE_LIMIT = BYTES_IN_MB * 5
|
||||
let error = null
|
||||
let fileName = null
|
||||
let fileType = null
|
||||
|
||||
export let files = []
|
||||
export let dataImport = {
|
||||
valid: true,
|
||||
schema: {},
|
||||
}
|
||||
export let existingTableId
|
||||
let loading = false
|
||||
let validation = {}
|
||||
let validateHash = ""
|
||||
|
||||
let csvString = undefined
|
||||
let primaryDisplay = undefined
|
||||
let schema = {}
|
||||
let fields = []
|
||||
let hasValidated = false
|
||||
|
||||
$: valid =
|
||||
!schema ||
|
||||
(fields.every(column => schema[column].success) &&
|
||||
(!hasValidated || Object.keys(schema).length > 0))
|
||||
$: dataImport = {
|
||||
valid,
|
||||
schema: buildTableSchema(schema),
|
||||
csvString,
|
||||
primaryDisplay,
|
||||
}
|
||||
$: noFieldsError = existingTableId
|
||||
? "No columns in CSV match existing table schema"
|
||||
: "Could not find any columns to import"
|
||||
|
||||
function buildTableSchema(schema) {
|
||||
const tableSchema = {}
|
||||
for (let key in schema) {
|
||||
const type = schema[key].type
|
||||
|
||||
if (type === "omit") continue
|
||||
|
||||
tableSchema[key] = {
|
||||
name: key,
|
||||
type,
|
||||
constraints: FIELDS[type.toUpperCase()].constraints,
|
||||
}
|
||||
}
|
||||
return tableSchema
|
||||
}
|
||||
|
||||
async function validateCSV() {
|
||||
try {
|
||||
const parseResult = await API.validateTableCSV({
|
||||
csvString,
|
||||
schema: schema || {},
|
||||
tableId: existingTableId,
|
||||
})
|
||||
schema = parseResult?.schema
|
||||
fields = Object.keys(schema || {}).filter(
|
||||
key => schema[key].type !== "omit"
|
||||
)
|
||||
|
||||
// Check primary display is valid
|
||||
if (!primaryDisplay || fields.indexOf(primaryDisplay) === -1) {
|
||||
primaryDisplay = fields[0]
|
||||
}
|
||||
|
||||
hasValidated = true
|
||||
} catch (error) {
|
||||
notifications.error("CSV Invalid, please try another CSV file")
|
||||
}
|
||||
}
|
||||
|
||||
async function handleFile(evt) {
|
||||
const fileArray = Array.from(evt.target.files)
|
||||
if (fileArray.some(file => file.size >= FILE_SIZE_LIMIT)) {
|
||||
notifications.error(
|
||||
`Files cannot exceed ${
|
||||
FILE_SIZE_LIMIT / BYTES_IN_MB
|
||||
}MB. Please try again with smaller files.`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// Read CSV as plain text to upload alongside schema
|
||||
let reader = new FileReader()
|
||||
reader.addEventListener("load", function (e) {
|
||||
csvString = e.target.result
|
||||
files = fileArray
|
||||
validateCSV()
|
||||
})
|
||||
reader.readAsText(fileArray[0])
|
||||
}
|
||||
|
||||
async function omitColumn(columnName) {
|
||||
schema[columnName].type = "omit"
|
||||
await validateCSV()
|
||||
}
|
||||
|
||||
const handleTypeChange = column => evt => {
|
||||
schema[column].type = evt.detail
|
||||
validateCSV()
|
||||
}
|
||||
export let rows = []
|
||||
export let schema = {}
|
||||
export let allValid = true
|
||||
export let displayColumn = null
|
||||
|
||||
const typeOptions = [
|
||||
{
|
||||
|
@ -133,54 +47,114 @@
|
|||
value: FIELDS.LONGFORM.type,
|
||||
},
|
||||
]
|
||||
|
||||
async function handleFile(e) {
|
||||
loading = true
|
||||
error = null
|
||||
validation = {}
|
||||
|
||||
try {
|
||||
const response = await parseFile(e)
|
||||
rows = response.rows
|
||||
schema = response.schema
|
||||
fileName = response.fileName
|
||||
fileType = response.fileType
|
||||
} catch (e) {
|
||||
loading = false
|
||||
error = e
|
||||
}
|
||||
}
|
||||
|
||||
async function validate(rows, schema) {
|
||||
loading = true
|
||||
error = null
|
||||
validation = {}
|
||||
allValid = false
|
||||
|
||||
try {
|
||||
if (rows.length > 0) {
|
||||
const response = await API.validateNewTableImport({ rows, schema })
|
||||
validation = response.schemaValidation
|
||||
allValid = response.allValid
|
||||
}
|
||||
} catch (e) {
|
||||
error = e.message
|
||||
}
|
||||
|
||||
loading = false
|
||||
}
|
||||
|
||||
$: {
|
||||
// binding in consumer is causing double renders here
|
||||
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
|
||||
|
||||
if (newValidateHash !== validateHash) {
|
||||
validate(rows, schema)
|
||||
}
|
||||
|
||||
validateHash = newValidateHash
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="dropzone">
|
||||
<input id="file-upload" accept=".csv" type="file" on:change={handleFile} />
|
||||
<label for="file-upload" class:uploaded={files[0]}>
|
||||
{#if files[0]}{files[0].name}{:else}Upload{/if}
|
||||
<input
|
||||
disabled={loading}
|
||||
id="file-upload"
|
||||
accept="text/csv,application/json"
|
||||
type="file"
|
||||
on:change={handleFile}
|
||||
/>
|
||||
<label for="file-upload" class:uploaded={rows.length > 0}>
|
||||
{#if loading}
|
||||
loading...
|
||||
{:else if error}
|
||||
error: {error}
|
||||
{:else if fileName}
|
||||
{fileName}
|
||||
{:else}
|
||||
Upload
|
||||
{/if}
|
||||
</label>
|
||||
</div>
|
||||
{#if fields.length}
|
||||
{#if rows.length > 0 && !error}
|
||||
<div class="schema-fields">
|
||||
{#each fields as columnName}
|
||||
{#each Object.values(schema) as column}
|
||||
<div class="field">
|
||||
<span>{columnName}</span>
|
||||
<span>{column.name}</span>
|
||||
<Select
|
||||
bind:value={schema[columnName].type}
|
||||
on:change={handleTypeChange(columnName)}
|
||||
bind:value={column.type}
|
||||
on:change={e => (column.type = e.detail)}
|
||||
options={typeOptions}
|
||||
placeholder={null}
|
||||
getOptionLabel={option => option.label}
|
||||
getOptionValue={option => option.value}
|
||||
disabled={!!existingTableId}
|
||||
disabled={loading}
|
||||
/>
|
||||
<span class="field-status" class:error={!schema[columnName].success}>
|
||||
{schema[columnName].success ? "Success" : "Failure"}
|
||||
<span
|
||||
class={loading || validation[column.name]
|
||||
? "fieldStatusSuccess"
|
||||
: "fieldStatusFailure"}
|
||||
>
|
||||
{validation[column.name] ? "Success" : "Failure"}
|
||||
</span>
|
||||
<i
|
||||
class="omit-button ri-close-circle-fill"
|
||||
on:click={() => omitColumn(columnName)}
|
||||
class={`omit-button ri-close-circle-fill ${
|
||||
loading ? "omit-button-disabled" : ""
|
||||
}`}
|
||||
on:click={() => {
|
||||
delete schema[column.name]
|
||||
schema = schema
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{#if !existingTableId}
|
||||
<div class="display-column">
|
||||
<Select
|
||||
label="Display Column"
|
||||
bind:value={primaryDisplay}
|
||||
options={fields}
|
||||
sort
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
{:else if hasValidated}
|
||||
<div>
|
||||
<InlineAlert
|
||||
header="Invalid CSV"
|
||||
bind:message={noFieldsError}
|
||||
type="error"
|
||||
<div class="display-column">
|
||||
<Select
|
||||
label="Display Column"
|
||||
bind:value={displayColumn}
|
||||
options={Object.keys(schema)}
|
||||
sort
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
|
@ -195,28 +169,10 @@
|
|||
transition: all 0.3s;
|
||||
}
|
||||
|
||||
.field-status {
|
||||
color: var(--green);
|
||||
justify-self: center;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.error {
|
||||
color: var(--red);
|
||||
}
|
||||
|
||||
.uploaded {
|
||||
color: var(--blue);
|
||||
}
|
||||
|
||||
input[type="file"] {
|
||||
input {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.schema-fields {
|
||||
margin-top: var(--spacing-xl);
|
||||
}
|
||||
|
||||
label {
|
||||
cursor: pointer;
|
||||
font-weight: 600;
|
||||
|
@ -243,11 +199,12 @@
|
|||
border: var(--border-transparent);
|
||||
}
|
||||
|
||||
.omit-button {
|
||||
font-size: 1.2em;
|
||||
color: var(--grey-7);
|
||||
cursor: pointer;
|
||||
justify-self: flex-end;
|
||||
.uploaded {
|
||||
color: var(--blue);
|
||||
}
|
||||
|
||||
.schema-fields {
|
||||
margin-top: var(--spacing-xl);
|
||||
}
|
||||
|
||||
.field {
|
||||
|
@ -259,6 +216,30 @@
|
|||
font-size: var(--spectrum-global-dimension-font-size-75);
|
||||
}
|
||||
|
||||
.fieldStatusSuccess {
|
||||
color: var(--green);
|
||||
justify-self: center;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.fieldStatusFailure {
|
||||
color: var(--red);
|
||||
justify-self: center;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.omit-button {
|
||||
font-size: 1.2em;
|
||||
color: var(--grey-7);
|
||||
cursor: pointer;
|
||||
justify-self: flex-end;
|
||||
}
|
||||
|
||||
.omit-button-disabled {
|
||||
pointer-events: none;
|
||||
opacity: 70%;
|
||||
}
|
||||
|
||||
.display-column {
|
||||
margin-top: var(--spacing-xl);
|
||||
}
|
||||
|
|
|
@ -29,18 +29,27 @@
|
|||
: BUDIBASE_INTERNAL_DB_ID
|
||||
|
||||
export let name
|
||||
let dataImport
|
||||
let error = ""
|
||||
let autoColumns = getAutoColumnInformation()
|
||||
let schema = {}
|
||||
let rows = []
|
||||
let allValid = true
|
||||
let displayColumn = null
|
||||
|
||||
function addAutoColumns(tableName, schema) {
|
||||
for (let [subtype, col] of Object.entries(autoColumns)) {
|
||||
if (!col.enabled) {
|
||||
continue
|
||||
function getAutoColumns() {
|
||||
const selectedAutoColumns = {}
|
||||
|
||||
Object.entries(autoColumns).forEach(([subtype, column]) => {
|
||||
if (column.enabled) {
|
||||
selectedAutoColumns[column.name] = buildAutoColumn(
|
||||
name,
|
||||
column.name,
|
||||
subtype
|
||||
)
|
||||
}
|
||||
schema[col.name] = buildAutoColumn(tableName, col.name, subtype)
|
||||
}
|
||||
return schema
|
||||
})
|
||||
|
||||
return selectedAutoColumns
|
||||
}
|
||||
|
||||
function checkValid(evt) {
|
||||
|
@ -55,15 +64,15 @@
|
|||
async function saveTable() {
|
||||
let newTable = {
|
||||
name,
|
||||
schema: addAutoColumns(name, dataImport.schema || {}),
|
||||
dataImport,
|
||||
schema: { ...schema, ...getAutoColumns() },
|
||||
rows,
|
||||
type: "internal",
|
||||
sourceId: targetDatasourceId,
|
||||
}
|
||||
|
||||
// Only set primary display if defined
|
||||
if (dataImport.primaryDisplay && dataImport.primaryDisplay.length) {
|
||||
newTable.primaryDisplay = dataImport.primaryDisplay
|
||||
if (displayColumn && displayColumn.length) {
|
||||
newTable.primaryDisplay = displayColumn
|
||||
}
|
||||
|
||||
// Create table
|
||||
|
@ -90,7 +99,7 @@
|
|||
title="Create Table"
|
||||
confirmText="Create"
|
||||
onConfirm={saveTable}
|
||||
disabled={error || !name || (dataImport && !dataImport.valid)}
|
||||
disabled={error || !name || (rows.length && !allValid)}
|
||||
>
|
||||
<Input
|
||||
data-cy="table-name-input"
|
||||
|
@ -117,8 +126,10 @@
|
|||
</div>
|
||||
<div>
|
||||
<Layout gap="XS" noPadding>
|
||||
<Label grey extraSmall>Create Table from CSV (Optional)</Label>
|
||||
<TableDataImport bind:dataImport />
|
||||
<Label grey extraSmall
|
||||
>Create a Table from a CSV or JSON file (Optional)</Label
|
||||
>
|
||||
<TableDataImport bind:rows bind:schema bind:allValid bind:displayColumn />
|
||||
</Layout>
|
||||
</div>
|
||||
</ModalContent>
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
import { API } from "api"
|
||||
import { FIELDS } from "constants/backend"
|
||||
|
||||
const BYTES_IN_MB = 1000000
|
||||
const FILE_SIZE_LIMIT = BYTES_IN_MB * 5
|
||||
|
||||
const getDefaultSchema = rows => {
|
||||
const newSchema = {}
|
||||
|
||||
rows.forEach(row => {
|
||||
Object.keys(row).forEach(column => {
|
||||
newSchema[column] = {
|
||||
name: column,
|
||||
type: "string",
|
||||
constraints: FIELDS["STRING"].constraints,
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
return newSchema
|
||||
}
|
||||
|
||||
export const parseFile = e => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const file = Array.from(e.target.files)[0]
|
||||
|
||||
if (file.size >= FILE_SIZE_LIMIT) {
|
||||
reject("file too large")
|
||||
return
|
||||
}
|
||||
|
||||
let reader = new FileReader()
|
||||
|
||||
const resolveRows = (rows, schema = null) => {
|
||||
resolve({
|
||||
rows,
|
||||
schema: schema ?? getDefaultSchema(rows),
|
||||
fileName: file.name,
|
||||
fileType: file.type,
|
||||
})
|
||||
}
|
||||
|
||||
reader.addEventListener("load", function (e) {
|
||||
const fileData = e.target.result
|
||||
|
||||
if (file.type === "text/csv") {
|
||||
API.csvToJson(fileData)
|
||||
.then(rows => {
|
||||
resolveRows(rows)
|
||||
})
|
||||
.catch(() => {
|
||||
reject("can't convert csv to json")
|
||||
})
|
||||
} else if (file.type === "application/json") {
|
||||
const parsedFileData = JSON.parse(fileData)
|
||||
|
||||
if (Array.isArray(parsedFileData)) {
|
||||
resolveRows(parsedFileData)
|
||||
} else if (typeof parsedFileData === "object") {
|
||||
resolveRows(parsedFileData.rows, parsedFileData.schema)
|
||||
} else {
|
||||
reject("invalid json format")
|
||||
}
|
||||
} else {
|
||||
reject("invalid file type")
|
||||
}
|
||||
})
|
||||
|
||||
reader.readAsText(file)
|
||||
})
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/cli",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "src/index.js",
|
||||
"bin": {
|
||||
|
@ -26,9 +26,9 @@
|
|||
"outputPath": "build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/backend-core": "2.2.12-alpha.16",
|
||||
"@budibase/string-templates": "2.2.12-alpha.16",
|
||||
"@budibase/types": "2.2.12-alpha.16",
|
||||
"@budibase/backend-core": "2.2.12-alpha.21",
|
||||
"@budibase/string-templates": "2.2.12-alpha.21",
|
||||
"@budibase/types": "2.2.12-alpha.21",
|
||||
"axios": "0.21.2",
|
||||
"chalk": "4.1.0",
|
||||
"cli-progress": "3.11.2",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/client",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"license": "MPL-2.0",
|
||||
"module": "dist/budibase-client.js",
|
||||
"main": "dist/budibase-client.js",
|
||||
|
@ -19,9 +19,9 @@
|
|||
"dev:builder": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "2.2.12-alpha.16",
|
||||
"@budibase/frontend-core": "2.2.12-alpha.16",
|
||||
"@budibase/string-templates": "2.2.12-alpha.16",
|
||||
"@budibase/bbui": "2.2.12-alpha.21",
|
||||
"@budibase/frontend-core": "2.2.12-alpha.21",
|
||||
"@budibase/string-templates": "2.2.12-alpha.21",
|
||||
"@spectrum-css/button": "^3.0.3",
|
||||
"@spectrum-css/card": "^3.0.3",
|
||||
"@spectrum-css/divider": "^1.0.3",
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@budibase/frontend-core",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"description": "Budibase frontend core libraries used in builder and client",
|
||||
"author": "Budibase",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "2.2.12-alpha.16",
|
||||
"@budibase/bbui": "2.2.12-alpha.21",
|
||||
"lodash": "^4.17.21",
|
||||
"svelte": "^3.46.2"
|
||||
}
|
||||
|
|
|
@ -64,32 +64,22 @@ export const buildTableEndpoints = API => ({
|
|||
* @param tableId the table ID to import to
|
||||
* @param data the data import object
|
||||
*/
|
||||
importTableData: async ({ tableId, data }) => {
|
||||
importTableData: async ({ tableId, rows }) => {
|
||||
return await API.post({
|
||||
url: `/api/tables/${tableId}/import`,
|
||||
body: {
|
||||
dataImport: data,
|
||||
rows,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Validates a candidate CSV to be imported for a certain table.
|
||||
* @param tableId the table ID to import to
|
||||
* @param csvString the CSV contents as a string
|
||||
* @param schema the proposed schema
|
||||
*/
|
||||
validateTableCSV: async ({ tableId, csvString, schema }) => {
|
||||
csvToJson: async csvString => {
|
||||
return await API.post({
|
||||
url: "/api/tables/csv/validate",
|
||||
url: "/api/convert/csvToJson",
|
||||
body: {
|
||||
csvString,
|
||||
schema,
|
||||
tableId,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets a list o tables.
|
||||
*/
|
||||
|
@ -120,4 +110,22 @@ export const buildTableEndpoints = API => ({
|
|||
url: `/api/tables/${tableId}/${tableRev}`,
|
||||
})
|
||||
},
|
||||
validateNewTableImport: async ({ rows, schema }) => {
|
||||
return await API.post({
|
||||
url: "/api/tables/validateNewTableImport",
|
||||
body: {
|
||||
rows,
|
||||
schema,
|
||||
},
|
||||
})
|
||||
},
|
||||
validateExistingTableImport: async ({ rows, tableId }) => {
|
||||
return await API.post({
|
||||
url: "/api/tables/validateExistingTableImport",
|
||||
body: {
|
||||
rows,
|
||||
tableId,
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/sdk",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"description": "Budibase Public API SDK",
|
||||
"author": "Budibase",
|
||||
"license": "MPL-2.0",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/server",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.ts",
|
||||
"repository": {
|
||||
|
@ -43,11 +43,11 @@
|
|||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@apidevtools/swagger-parser": "10.0.3",
|
||||
"@budibase/backend-core": "2.2.12-alpha.16",
|
||||
"@budibase/client": "2.2.12-alpha.16",
|
||||
"@budibase/pro": "2.2.12-alpha.16",
|
||||
"@budibase/string-templates": "2.2.12-alpha.16",
|
||||
"@budibase/types": "2.2.12-alpha.16",
|
||||
"@budibase/backend-core": "2.2.12-alpha.21",
|
||||
"@budibase/client": "2.2.12-alpha.21",
|
||||
"@budibase/pro": "2.2.12-alpha.21",
|
||||
"@budibase/string-templates": "2.2.12-alpha.21",
|
||||
"@budibase/types": "2.2.12-alpha.21",
|
||||
"@bull-board/api": "3.7.0",
|
||||
"@bull-board/koa": "3.9.4",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
|
|
|
@ -27,7 +27,7 @@ import {
|
|||
import { cloneDeep } from "lodash/fp"
|
||||
import { context, db as dbCore } from "@budibase/backend-core"
|
||||
import { finaliseRow, updateRelatedFormula } from "./staticFormula"
|
||||
import * as exporters from "../view/exporters"
|
||||
import { csv, json, jsonWithSchema, Format, isFormat } from "../view/exporters"
|
||||
import { apiFileReturn } from "../../../utilities/fileSystem"
|
||||
import {
|
||||
Ctx,
|
||||
|
@ -412,14 +412,15 @@ export async function exportRows(ctx: Ctx) {
|
|||
rows = result
|
||||
}
|
||||
|
||||
let headers = Object.keys(rows[0])
|
||||
// @ts-ignore
|
||||
const exporter = exporters[format]
|
||||
const filename = `export.${format}`
|
||||
|
||||
// send down the file
|
||||
ctx.attachment(filename)
|
||||
return apiFileReturn(exporter(headers, rows))
|
||||
if (format === Format.CSV) {
|
||||
ctx.attachment("export.csv")
|
||||
return apiFileReturn(csv(Object.keys(rows[0]), rows))
|
||||
} else if (format === Format.JSON) {
|
||||
ctx.attachment("export.json")
|
||||
return apiFileReturn(json(rows))
|
||||
} else {
|
||||
throw "Format not recognised"
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchEnrichedRow(ctx: Ctx) {
|
||||
|
|
|
@ -10,9 +10,9 @@ import {
|
|||
} from "./utils"
|
||||
import { FieldTypes, RelationshipTypes } from "../../../constants"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import * as csvParser from "../../../utilities/csvParser"
|
||||
import { handleRequest } from "../row/external"
|
||||
import { events, context } from "@budibase/backend-core"
|
||||
import { parse, isRows, isSchema } from "../../../utilities/schema"
|
||||
import {
|
||||
Datasource,
|
||||
Table,
|
||||
|
@ -197,7 +197,7 @@ export async function save(ctx: BBContext) {
|
|||
const table: TableRequest = ctx.request.body
|
||||
const renamed = table?._rename
|
||||
// can't do this right now
|
||||
delete table.dataImport
|
||||
delete table.rows
|
||||
const datasourceId = getDatasourceId(ctx.request.body)!
|
||||
// table doesn't exist already, note that it is created
|
||||
if (!table._id) {
|
||||
|
@ -338,17 +338,17 @@ export async function destroy(ctx: BBContext) {
|
|||
|
||||
export async function bulkImport(ctx: BBContext) {
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { dataImport } = ctx.request.body
|
||||
if (!dataImport || !dataImport.schema || !dataImport.csvString) {
|
||||
const { rows }: { rows: unknown } = ctx.request.body
|
||||
const schema: unknown = table.schema
|
||||
|
||||
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
||||
ctx.throw(400, "Provided data import information is invalid.")
|
||||
}
|
||||
const rows = await csvParser.transform({
|
||||
...dataImport,
|
||||
existingTable: table,
|
||||
})
|
||||
|
||||
const parsedRows = await parse(rows, schema)
|
||||
await handleRequest(Operation.BULK_CREATE, table._id!, {
|
||||
rows,
|
||||
rows: parsedRows,
|
||||
})
|
||||
await events.rows.imported(table, "csv", rows.length)
|
||||
await events.rows.imported(table, parsedRows.length)
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
import * as internal from "./internal"
|
||||
import * as external from "./external"
|
||||
import * as csvParser from "../../../utilities/csvParser"
|
||||
import {
|
||||
validate as validateSchema,
|
||||
isSchema,
|
||||
isRows,
|
||||
} from "../../../utilities/schema"
|
||||
import { isExternalTable, isSQL } from "../../../integrations/utils"
|
||||
import { getDatasourceParams } from "../../../db/utils"
|
||||
import { context, events } from "@budibase/backend-core"
|
||||
import { Table, BBContext } from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import csv from "csvtojson"
|
||||
|
||||
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
|
||||
if (table && !tableId) {
|
||||
|
@ -56,16 +61,16 @@ export async function find(ctx: BBContext) {
|
|||
export async function save(ctx: BBContext) {
|
||||
const appId = ctx.appId
|
||||
const table = ctx.request.body
|
||||
const importFormat =
|
||||
table.dataImport && table.dataImport.csvString ? "csv" : undefined
|
||||
const isImport = table.rows
|
||||
|
||||
const savedTable = await pickApi({ table }).save(ctx)
|
||||
if (!table._id) {
|
||||
await events.table.created(savedTable)
|
||||
} else {
|
||||
await events.table.updated(savedTable)
|
||||
}
|
||||
if (importFormat) {
|
||||
await events.table.imported(savedTable, importFormat)
|
||||
if (isImport) {
|
||||
await events.table.imported(savedTable)
|
||||
}
|
||||
ctx.status = 200
|
||||
ctx.message = `Table ${table.name} saved successfully.`
|
||||
|
@ -96,19 +101,43 @@ export async function bulkImport(ctx: BBContext) {
|
|||
ctx.body = { message: `Bulk rows created.` }
|
||||
}
|
||||
|
||||
export async function validateCSVSchema(ctx: BBContext) {
|
||||
// tableId being specified means its an import to an existing table
|
||||
const { csvString, schema = {}, tableId } = ctx.request.body
|
||||
let existingTable
|
||||
if (tableId) {
|
||||
existingTable = await sdk.tables.getTable(tableId)
|
||||
}
|
||||
let result: Record<string, any> | undefined = await csvParser.parse(
|
||||
csvString,
|
||||
schema
|
||||
)
|
||||
if (existingTable) {
|
||||
result = csvParser.updateSchema({ schema: result, existingTable })
|
||||
}
|
||||
ctx.body = { schema: result }
|
||||
export async function csvToJson(ctx: BBContext) {
|
||||
const { csvString } = ctx.request.body
|
||||
|
||||
const result = await csv().fromString(csvString)
|
||||
|
||||
ctx.status = 200
|
||||
ctx.body = result
|
||||
}
|
||||
|
||||
export async function validateNewTableImport(ctx: BBContext) {
|
||||
const { rows, schema }: { rows: unknown; schema: unknown } = ctx.request.body
|
||||
|
||||
if (isRows(rows) && isSchema(schema)) {
|
||||
ctx.status = 200
|
||||
ctx.body = validateSchema(rows, schema)
|
||||
} else {
|
||||
ctx.status = 422
|
||||
}
|
||||
}
|
||||
|
||||
export async function validateExistingTableImport(ctx: BBContext) {
|
||||
const { rows, tableId }: { rows: unknown; tableId: unknown } =
|
||||
ctx.request.body
|
||||
|
||||
let schema = null
|
||||
if (tableId) {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
schema = table.schema
|
||||
} else {
|
||||
ctx.status = 422
|
||||
return
|
||||
}
|
||||
|
||||
if (tableId && isRows(rows) && isSchema(schema)) {
|
||||
ctx.status = 200
|
||||
ctx.body = validateSchema(rows, schema)
|
||||
} else {
|
||||
ctx.status = 422
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ function checkAutoColumns(table: Table, oldTable: Table) {
|
|||
|
||||
export async function save(ctx: any) {
|
||||
const db = context.getAppDB()
|
||||
const { dataImport, ...rest } = ctx.request.body
|
||||
const { rows, ...rest } = ctx.request.body
|
||||
let tableToSave = {
|
||||
type: "table",
|
||||
_id: generateTableID(),
|
||||
|
@ -61,7 +61,7 @@ export async function save(ctx: any) {
|
|||
const tableSaveFunctions = new TableSaveFunctions({
|
||||
user: ctx.user,
|
||||
oldTable,
|
||||
dataImport,
|
||||
importRows: rows,
|
||||
})
|
||||
tableToSave = await tableSaveFunctions.before(tableToSave)
|
||||
|
||||
|
@ -185,7 +185,7 @@ export async function destroy(ctx: any) {
|
|||
|
||||
export async function bulkImport(ctx: any) {
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { dataImport } = ctx.request.body
|
||||
await handleDataImport(ctx.user, table, dataImport)
|
||||
const { rows } = ctx.request.body
|
||||
await handleDataImport(ctx.user, table, rows)
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { transform } from "../../../utilities/csvParser"
|
||||
import { parse, isSchema, isRows } from "../../../utilities/schema"
|
||||
import { getRowParams, generateRowID, InternalTables } from "../../../db/utils"
|
||||
import { isEqual } from "lodash"
|
||||
import { AutoFieldSubTypes, FieldTypes } from "../../../constants"
|
||||
|
@ -128,24 +128,23 @@ export function importToRows(data: any, table: any, user: any = {}) {
|
|||
return finalData
|
||||
}
|
||||
|
||||
export async function handleDataImport(user: any, table: any, dataImport: any) {
|
||||
if (!dataImport || !dataImport.csvString) {
|
||||
export async function handleDataImport(user: any, table: any, rows: any) {
|
||||
const schema: unknown = table.schema
|
||||
|
||||
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
||||
return table
|
||||
}
|
||||
|
||||
const db = context.getAppDB()
|
||||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await transform({
|
||||
...dataImport,
|
||||
existingTable: table,
|
||||
})
|
||||
const data = parse(rows, schema)
|
||||
|
||||
let finalData: any = importToRows(data, table, user)
|
||||
|
||||
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), {
|
||||
tableId: table._id,
|
||||
})
|
||||
await events.rows.imported(table, "csv", finalData.length)
|
||||
|
||||
await events.rows.imported(table, finalData.length)
|
||||
return table
|
||||
}
|
||||
|
||||
|
@ -210,14 +209,14 @@ class TableSaveFunctions {
|
|||
db: any
|
||||
user: any
|
||||
oldTable: any
|
||||
dataImport: any
|
||||
importRows: any
|
||||
rows: any
|
||||
|
||||
constructor({ user, oldTable, dataImport }: any) {
|
||||
constructor({ user, oldTable, importRows }: any) {
|
||||
this.db = context.getAppDB()
|
||||
this.user = user
|
||||
this.oldTable = oldTable
|
||||
this.dataImport = dataImport
|
||||
this.importRows = importRows
|
||||
// any rows that need updated
|
||||
this.rows = []
|
||||
}
|
||||
|
@ -241,7 +240,7 @@ class TableSaveFunctions {
|
|||
// after saving
|
||||
async after(table: any) {
|
||||
table = await handleSearchIndexes(table)
|
||||
table = await handleDataImport(this.user, table, this.dataImport)
|
||||
table = await handleDataImport(this.user, table, this.importRows)
|
||||
return table
|
||||
}
|
||||
|
||||
|
|
|
@ -4,19 +4,21 @@ import { getGlobalUsers, getRawGlobalUser } from "../../utilities/global"
|
|||
import { getFullUser } from "../../utilities/users"
|
||||
import {
|
||||
context,
|
||||
constants,
|
||||
roles as rolesCore,
|
||||
db as dbCore,
|
||||
} from "@budibase/backend-core"
|
||||
import { BBContext, User } from "@budibase/types"
|
||||
import { BBContext, Ctx, SyncUserRequest, User } from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
|
||||
export async function syncUser(ctx: BBContext) {
|
||||
export async function syncUser(ctx: Ctx<SyncUserRequest>) {
|
||||
let deleting = false,
|
||||
user: User | any
|
||||
const userId = ctx.params.id
|
||||
|
||||
const previousUser = ctx.request.body?.previousUser
|
||||
|
||||
try {
|
||||
user = await getRawGlobalUser(userId)
|
||||
user = (await getRawGlobalUser(userId)) as User
|
||||
} catch (err: any) {
|
||||
if (err && err.status === 404) {
|
||||
user = {}
|
||||
|
@ -25,6 +27,11 @@ export async function syncUser(ctx: BBContext) {
|
|||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
let previousApps = previousUser
|
||||
? Object.keys(previousUser.roles).map(appId => appId)
|
||||
: []
|
||||
|
||||
const roles = deleting ? {} : user.roles
|
||||
// remove props which aren't useful to metadata
|
||||
delete user.password
|
||||
|
@ -40,8 +47,9 @@ export async function syncUser(ctx: BBContext) {
|
|||
.filter(entry => entry[1] !== rolesCore.BUILTIN_ROLE_IDS.PUBLIC)
|
||||
.map(([appId]) => appId)
|
||||
}
|
||||
for (let prodAppId of prodAppIds) {
|
||||
for (let prodAppId of new Set([...prodAppIds, ...previousApps])) {
|
||||
const roleId = roles[prodAppId]
|
||||
const deleteFromApp = !roleId
|
||||
const devAppId = dbCore.getDevelopmentAppID(prodAppId)
|
||||
for (let appId of [prodAppId, devAppId]) {
|
||||
if (!(await dbCore.dbExists(appId))) {
|
||||
|
@ -54,24 +62,24 @@ export async function syncUser(ctx: BBContext) {
|
|||
try {
|
||||
metadata = await db.get(metadataId)
|
||||
} catch (err) {
|
||||
if (deleting) {
|
||||
if (deleteFromApp) {
|
||||
return
|
||||
}
|
||||
metadata = {
|
||||
tableId: InternalTables.USER_METADATA,
|
||||
}
|
||||
}
|
||||
|
||||
if (deleteFromApp) {
|
||||
await db.remove(metadata)
|
||||
return
|
||||
}
|
||||
|
||||
// assign the roleId for the metadata doc
|
||||
if (roleId) {
|
||||
metadata.roleId = roleId
|
||||
}
|
||||
let combined = !deleting
|
||||
? sdk.users.combineMetadataAndUser(user, metadata)
|
||||
: {
|
||||
...metadata,
|
||||
status: constants.UserStatus.INACTIVE,
|
||||
metadata: rolesCore.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
}
|
||||
let combined = sdk.users.combineMetadataAndUser(user, metadata)
|
||||
// if its null then there was no updates required
|
||||
if (combined) {
|
||||
await db.put(combined)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { Row } from "@budibase/types"
|
||||
import { Row, TableSchema } from "@budibase/types"
|
||||
|
||||
export function csv(headers: string[], rows: Row[]) {
|
||||
let csv = headers.map(key => `"${key}"`).join(",")
|
||||
|
@ -18,11 +18,26 @@ export function csv(headers: string[], rows: Row[]) {
|
|||
return csv
|
||||
}
|
||||
|
||||
export function json(headers: string[], rows: Row[]) {
|
||||
export function json(rows: Row[]) {
|
||||
return JSON.stringify(rows, undefined, 2)
|
||||
}
|
||||
|
||||
export const ExportFormats = {
|
||||
CSV: "csv",
|
||||
JSON: "json",
|
||||
export function jsonWithSchema(schema: TableSchema, rows: Row[]) {
|
||||
const newSchema: TableSchema = {}
|
||||
Object.values(schema).forEach(column => {
|
||||
if (!column.autocolumn) {
|
||||
newSchema[column.name] = column
|
||||
}
|
||||
})
|
||||
return JSON.stringify({ schema: newSchema, rows }, undefined, 2)
|
||||
}
|
||||
|
||||
export enum Format {
|
||||
CSV = "csv",
|
||||
JSON = "json",
|
||||
JSON_WITH_SCHEMA = "jsonWithSchema",
|
||||
}
|
||||
|
||||
export function isFormat(format: any): format is Format {
|
||||
return Object.values(Format).includes(format as Format)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import viewTemplate from "./viewBuilder"
|
||||
import { apiFileReturn } from "../../../utilities/fileSystem"
|
||||
import * as exporters from "./exporters"
|
||||
import { csv, json, jsonWithSchema, Format, isFormat } from "./exporters"
|
||||
import { deleteView, getView, getViews, saveView } from "./utils"
|
||||
import { fetchView } from "../row"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
|
@ -127,9 +127,13 @@ export async function exportView(ctx: BBContext) {
|
|||
const viewName = decodeURIComponent(ctx.query.view as string)
|
||||
const view = await getView(viewName)
|
||||
|
||||
const format = ctx.query.format as string
|
||||
if (!format || !Object.values(exporters.ExportFormats).includes(format)) {
|
||||
ctx.throw(400, "Format must be specified, either csv or json")
|
||||
const format = ctx.query.format as unknown
|
||||
|
||||
if (!isFormat(format)) {
|
||||
ctx.throw(
|
||||
400,
|
||||
"Format must be specified, either csv, json or jsonWithSchema"
|
||||
)
|
||||
}
|
||||
|
||||
if (view) {
|
||||
|
@ -171,7 +175,7 @@ export async function exportView(ctx: BBContext) {
|
|||
})
|
||||
|
||||
// make sure no "undefined" entries appear in the CSV
|
||||
if (format === exporters.ExportFormats.CSV) {
|
||||
if (format === Format.CSV) {
|
||||
const schemaKeys = Object.keys(schema)
|
||||
for (let key of schemaKeys) {
|
||||
for (let row of rows) {
|
||||
|
@ -182,13 +186,18 @@ export async function exportView(ctx: BBContext) {
|
|||
}
|
||||
}
|
||||
|
||||
// Export part
|
||||
let headers = Object.keys(schema)
|
||||
const exporter = format === "csv" ? exporters.csv : exporters.json
|
||||
const filename = `${viewName}.${format}`
|
||||
// send down the file
|
||||
ctx.attachment(filename)
|
||||
ctx.body = apiFileReturn(exporter(headers, rows))
|
||||
if (format === Format.CSV) {
|
||||
ctx.attachment(`${viewName}.csv`)
|
||||
ctx.body = apiFileReturn(csv(Object.keys(schema), rows))
|
||||
} else if (format === Format.JSON) {
|
||||
ctx.attachment(`${viewName}.json`)
|
||||
ctx.body = apiFileReturn(json(rows))
|
||||
} else if (format === Format.JSON_WITH_SCHEMA) {
|
||||
ctx.attachment(`${viewName}.json`)
|
||||
ctx.body = apiFileReturn(jsonWithSchema(schema, rows))
|
||||
} else {
|
||||
throw "Format not recognised"
|
||||
}
|
||||
|
||||
if (viewName.startsWith(DocumentType.TABLE)) {
|
||||
await events.table.exported(table, format as TableExportFormat)
|
||||
|
|
|
@ -67,10 +67,7 @@ router
|
|||
* structure, and the "updated", new column name should also be supplied. The schema should also be updated, this field
|
||||
* lets the server know that a field hasn't just been deleted, that the data has moved to a new name, this will fix
|
||||
* the rows in the table. This functionality is only available for internal tables.
|
||||
* @apiParam (Body) {object} [dataImport] When creating an internal table it can be built from a CSV, by using the
|
||||
* CSV validation endpoint. Send the CSV data to the validation endpoint, then put the results of that call
|
||||
* into this property, along with the CSV and a table/rows will be built from it. This is not supported when updating
|
||||
* or for external tables.
|
||||
* @apiParam (Body) {object[]} [rows] When creating a table using a compatible data source, an array of objects to be imported into the new table can be provided.
|
||||
*
|
||||
* @apiParamExample {json} Example:
|
||||
* {
|
||||
|
@ -99,15 +96,7 @@ router
|
|||
* "old": "columnName",
|
||||
* "updated": "newColumnName",
|
||||
* },
|
||||
* "dataImport": {
|
||||
* "csvString": "column\nvalue",
|
||||
* "primaryDisplay": "column",
|
||||
* "schema": {
|
||||
* "column": {
|
||||
* "type": "string"
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* "rows": []
|
||||
* }
|
||||
*
|
||||
* @apiSuccess {object} table The response body will contain the table structure after being cleaned up and
|
||||
|
@ -121,30 +110,20 @@ router
|
|||
tableValidator(),
|
||||
tableController.save
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/tables/csv/validate Validate a CSV for a table
|
||||
* @apiName Validate a CSV for a table
|
||||
* @apiGroup tables
|
||||
* @apiPermission builder
|
||||
* @apiDescription When creating a new table, or importing a CSV to an existing table the CSV must be validated and
|
||||
* converted into a Budibase schema; this endpoint does this.
|
||||
*
|
||||
* @apiParam (Body) {string} csvString The CSV which is to be validated as a string.
|
||||
* @apiParam (Body) {object} [schema] When a CSV has been validated it is possible to re-validate after changing the
|
||||
* type of a field, by default everything will be strings as there is no way to infer types. The returned schema can
|
||||
* be updated and then returned to the endpoint to re-validate and check if the type will work for the CSV, e.g.
|
||||
* using a number instead of strings.
|
||||
* @apiParam (Body) {string} [tableId] If importing data to an existing table this will pull the current table and
|
||||
* remove any fields from the CSV schema which do not exist on the table/don't match the type of the table. When
|
||||
* importing a CSV to an existing table only fields that are present on the table can be imported.
|
||||
*
|
||||
* @apiSuccess {object} schema The response body will contain a "schema" object that represents the schema found for
|
||||
* the CSV - this will be in the same format used for table schema.s
|
||||
*/
|
||||
.post(
|
||||
"/api/tables/csv/validate",
|
||||
"/api/convert/csvToJson",
|
||||
authorized(BUILDER),
|
||||
tableController.validateCSVSchema
|
||||
tableController.csvToJson
|
||||
)
|
||||
.post(
|
||||
"/api/tables/validateNewTableImport",
|
||||
authorized(BUILDER),
|
||||
tableController.validateNewTableImport
|
||||
)
|
||||
.post(
|
||||
"/api/tables/validateExistingTableImport",
|
||||
authorized(BUILDER),
|
||||
tableController.validateExistingTableImport
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/tables/:tableId/:revId Delete a table
|
||||
|
@ -177,9 +156,7 @@ router
|
|||
*
|
||||
* @apiParam {string} tableId The ID of the table which the data should be imported to.
|
||||
*
|
||||
* @apiParam (Body) {object} dataImport This is the same as the structure used when creating an internal table with
|
||||
* a CSV, it will have the "schema" returned from the CSV validation endpoint and the "csvString" which is to be
|
||||
* turned into rows.
|
||||
* @apiParam (Body) {object[]} rows An array of objects representing the rows to be imported, key-value pairs not matching the table schema will be ignored.
|
||||
*
|
||||
* @apiSuccess {string} message A message stating that the data was imported successfully.
|
||||
*/
|
||||
|
|
|
@ -42,7 +42,7 @@ describe("run misc tests", () => {
|
|||
})
|
||||
|
||||
describe("test table utilities", () => {
|
||||
it("should be able to import a CSV", async () => {
|
||||
it("should be able to import data", async () => {
|
||||
return config.doInContext(null, async () => {
|
||||
const table = await config.createTable({
|
||||
name: "table",
|
||||
|
@ -75,17 +75,11 @@ describe("run misc tests", () => {
|
|||
},
|
||||
},
|
||||
})
|
||||
const dataImport = {
|
||||
csvString: "a,b,c,d\n1,2,3,4",
|
||||
schema: {},
|
||||
}
|
||||
for (let col of ["a", "b", "c", "d"]) {
|
||||
dataImport.schema[col] = { type: "string" }
|
||||
}
|
||||
|
||||
await tableUtils.handleDataImport(
|
||||
{ userId: "test" },
|
||||
table,
|
||||
dataImport
|
||||
[{ a: '1', b: '2', c: '3', d: '4'}]
|
||||
)
|
||||
const rows = await config.getRows()
|
||||
expect(rows[0].a).toEqual("1")
|
||||
|
@ -94,4 +88,4 @@ describe("run misc tests", () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -43,21 +43,18 @@ describe("/tables", () => {
|
|||
expect(events.table.created).toBeCalledWith(res.body)
|
||||
})
|
||||
|
||||
it("creates a table via data import CSV", async () => {
|
||||
it("creates a table via data import", async () => {
|
||||
const table = basicTable()
|
||||
table.dataImport = {
|
||||
csvString: "\"name\",\"description\"\n\"test-name\",\"test-desc\"",
|
||||
}
|
||||
table.dataImport.schema = table.schema
|
||||
table.rows = [{ name: 'test-name', description: 'test-desc' }]
|
||||
|
||||
const res = await createTable(table)
|
||||
|
||||
expect(events.table.created).toBeCalledTimes(1)
|
||||
expect(events.table.created).toBeCalledWith(res.body)
|
||||
expect(events.table.imported).toBeCalledTimes(1)
|
||||
expect(events.table.imported).toBeCalledWith(res.body, "csv")
|
||||
expect(events.table.imported).toBeCalledWith(res.body)
|
||||
expect(events.rows.imported).toBeCalledTimes(1)
|
||||
expect(events.rows.imported).toBeCalledWith(res.body, "csv", 1)
|
||||
expect(events.rows.imported).toBeCalledWith(res.body, 1)
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
|
@ -155,11 +152,10 @@ describe("/tables", () => {
|
|||
it("imports rows successfully", async () => {
|
||||
const table = await config.createTable()
|
||||
const importRequest = {
|
||||
dataImport: {
|
||||
csvString: "\"name\",\"description\"\n\"test-name\",\"test-desc\"",
|
||||
schema: table.schema
|
||||
}
|
||||
schema: table.schema,
|
||||
rows: [{ name: 'test-name', description: 'test-desc' }]
|
||||
}
|
||||
|
||||
jest.clearAllMocks()
|
||||
|
||||
await request
|
||||
|
@ -171,7 +167,7 @@ describe("/tables", () => {
|
|||
|
||||
expect(events.table.created).not.toHaveBeenCalled()
|
||||
expect(events.rows.imported).toBeCalledTimes(1)
|
||||
expect(events.rows.imported).toBeCalledWith(table, "csv", 1)
|
||||
expect(events.rows.imported).toBeCalledWith(table, 1)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -206,24 +202,6 @@ describe("/tables", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("validate csv", () => {
|
||||
it("should be able to validate a CSV layout", async () => {
|
||||
const res = await request
|
||||
.post(`/api/tables/csv/validate`)
|
||||
.send({
|
||||
csvString: "a,b,c,d\n1,2,3,4"
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200)
|
||||
expect(res.body.schema).toBeDefined()
|
||||
expect(res.body.schema.a).toEqual({
|
||||
type: "string",
|
||||
success: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("indexing", () => {
|
||||
it("should be able to create a table with indexes", async () => {
|
||||
await context.doInAppContext(appId, async () => {
|
||||
|
|
|
@ -171,9 +171,28 @@ describe("/users", () => {
|
|||
.expect("Content-Type", /json/)
|
||||
expect(res.body.message).toEqual('User synced.')
|
||||
})
|
||||
|
||||
|
||||
it("should sync the user when a previous user is specified", async () => {
|
||||
const app1 = await config.createApp('App 1')
|
||||
const app2 = await config.createApp('App 2')
|
||||
|
||||
let user = await config.createUser(
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
false,
|
||||
true,
|
||||
{ [app1.appId]: 'ADMIN' })
|
||||
let res = await request
|
||||
.post(`/api/users/metadata/sync/${user._id}`)
|
||||
.set(config.defaultHeaders())
|
||||
.send({ previousUser: { ...user, roles: { ...user.roles, [app2.appId]: 'BASIC' } } })
|
||||
.expect(200)
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
expect(res.body.message).toEqual('User synced.')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
|
||||
|
||||
})
|
||||
|
|
|
@ -18,7 +18,7 @@ export function tableValidator() {
|
|||
schema: Joi.object().required(),
|
||||
name: Joi.string().required(),
|
||||
views: Joi.object(),
|
||||
dataImport: Joi.object(),
|
||||
rows: Joi.array(),
|
||||
}).unknown(true))
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ export default async (ctx: BBContext, next: any) => {
|
|||
if (!appCookie && !requestAppId) {
|
||||
return next()
|
||||
}
|
||||
|
||||
// check the app exists referenced in cookie
|
||||
if (appCookie) {
|
||||
const appId = appCookie.appId
|
||||
|
@ -51,7 +52,7 @@ export default async (ctx: BBContext, next: any) => {
|
|||
|
||||
let appId: string | undefined,
|
||||
roleId = roles.BUILTIN_ROLE_IDS.PUBLIC
|
||||
if (!ctx.user) {
|
||||
if (!ctx.user?._id) {
|
||||
// not logged in, try to set a cookie for public apps
|
||||
appId = requestAppId
|
||||
} else if (requestAppId != null) {
|
||||
|
@ -96,7 +97,7 @@ export default async (ctx: BBContext, next: any) => {
|
|||
// need to judge this only based on the request app ID,
|
||||
if (
|
||||
env.MULTI_TENANCY &&
|
||||
ctx.user &&
|
||||
ctx.user?._id &&
|
||||
requestAppId &&
|
||||
!tenancy.isUserInAppTenant(requestAppId, ctx.user)
|
||||
) {
|
||||
|
|
|
@ -1,161 +0,0 @@
|
|||
import { FieldSchema, Table } from "@budibase/types"
|
||||
import csv from "csvtojson"
|
||||
import { FieldTypes } from "../constants"
|
||||
|
||||
type CsvParseOpts = {
|
||||
schema?: { [key: string]: any }
|
||||
existingTable: Table
|
||||
csvString?: string
|
||||
}
|
||||
|
||||
const VALIDATORS: any = {
|
||||
[FieldTypes.STRING]: () => true,
|
||||
[FieldTypes.OPTIONS]: () => true,
|
||||
[FieldTypes.BARCODEQR]: () => true,
|
||||
[FieldTypes.NUMBER]: (attribute?: string) => {
|
||||
// allow not to be present
|
||||
if (!attribute) {
|
||||
return true
|
||||
}
|
||||
return !isNaN(Number(attribute))
|
||||
},
|
||||
[FieldTypes.DATETIME]: (attribute?: string) => {
|
||||
// allow not to be present
|
||||
if (!attribute) {
|
||||
return true
|
||||
}
|
||||
return !isNaN(new Date(attribute).getTime())
|
||||
},
|
||||
}
|
||||
|
||||
const PARSERS: any = {
|
||||
[FieldTypes.NUMBER]: (attribute?: string) => {
|
||||
if (!attribute) {
|
||||
return attribute
|
||||
}
|
||||
return Number(attribute)
|
||||
},
|
||||
[FieldTypes.DATETIME]: (attribute?: string) => {
|
||||
if (!attribute) {
|
||||
return attribute
|
||||
}
|
||||
return new Date(attribute).toISOString()
|
||||
},
|
||||
}
|
||||
|
||||
export function parse(csvString: string, parsers: any): Record<string, any> {
|
||||
const result = csv().fromString(csvString)
|
||||
|
||||
const schema: Record<string, any> = {}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
result.on("header", headers => {
|
||||
for (let header of headers) {
|
||||
schema[header] = {
|
||||
type: parsers[header] ? parsers[header].type : "string",
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
})
|
||||
result.subscribe(row => {
|
||||
// For each CSV row parse all the columns that need parsed
|
||||
for (let key of Object.keys(parsers)) {
|
||||
if (!schema[key] || schema[key].success) {
|
||||
// get the validator for the column type
|
||||
const validator = VALIDATORS[parsers[key].type]
|
||||
|
||||
try {
|
||||
// allow null/undefined values
|
||||
schema[key].success = !row[key] || validator(row[key])
|
||||
} catch (err) {
|
||||
schema[key].success = false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
result.on("done", error => {
|
||||
if (error) {
|
||||
console.error(error)
|
||||
reject(error)
|
||||
}
|
||||
|
||||
resolve(schema)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export function updateSchema({
|
||||
schema,
|
||||
existingTable,
|
||||
}: {
|
||||
schema?: Record<string, any>
|
||||
existingTable?: Table
|
||||
}) {
|
||||
if (!schema) {
|
||||
return schema
|
||||
}
|
||||
const finalSchema: Record<string, FieldSchema> = {}
|
||||
const schemaKeyMap: Record<string, any> = {}
|
||||
Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key))
|
||||
for (let [key, field] of Object.entries(existingTable?.schema || {})) {
|
||||
const lcKey = key.toLowerCase()
|
||||
const foundKey: string = schemaKeyMap[lcKey]
|
||||
if (foundKey) {
|
||||
finalSchema[key] = schema[foundKey]
|
||||
finalSchema[key].type = field.type
|
||||
}
|
||||
}
|
||||
return finalSchema
|
||||
}
|
||||
|
||||
export async function transform({
|
||||
schema,
|
||||
csvString,
|
||||
existingTable,
|
||||
}: CsvParseOpts) {
|
||||
if (!schema || !csvString) {
|
||||
throw new Error("Unable to transform CSV without schema")
|
||||
}
|
||||
const colParser: any = {}
|
||||
|
||||
// make sure the table has all the columns required for import
|
||||
if (existingTable) {
|
||||
schema = updateSchema({ schema, existingTable })
|
||||
}
|
||||
|
||||
for (let [key, field] of Object.entries(schema || {})) {
|
||||
// don't import data to auto columns
|
||||
if (!field.autocolumn) {
|
||||
colParser[key] = PARSERS[field.type] || field.type
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const data = await csv({ colParser }).fromString(csvString)
|
||||
const schemaKeyMap: any = {}
|
||||
Object.keys(schema || {}).forEach(
|
||||
key => (schemaKeyMap[key.toLowerCase()] = key)
|
||||
)
|
||||
for (let element of data) {
|
||||
if (!data) {
|
||||
continue
|
||||
}
|
||||
for (let key of Object.keys(element)) {
|
||||
const mappedKey = schemaKeyMap[key.toLowerCase()]
|
||||
// isn't a column in the table, remove it
|
||||
if (mappedKey == null) {
|
||||
delete element[key]
|
||||
}
|
||||
// casing is different, fix it in row
|
||||
else if (key !== mappedKey) {
|
||||
element[mappedKey] = element[key]
|
||||
delete element[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return data
|
||||
} catch (err) {
|
||||
console.error(`Error transforming CSV to JSON for data import`, err)
|
||||
throw err
|
||||
}
|
||||
}
|
|
@ -0,0 +1,141 @@
|
|||
import { FieldTypes } from "../constants"
|
||||
|
||||
interface SchemaColumn {
|
||||
readonly name: string
|
||||
readonly type: FieldTypes
|
||||
readonly autocolumn?: boolean
|
||||
}
|
||||
|
||||
interface Schema {
|
||||
readonly [index: string]: SchemaColumn
|
||||
}
|
||||
|
||||
interface Row {
|
||||
[index: string]: any
|
||||
}
|
||||
|
||||
type Rows = Array<Row>
|
||||
|
||||
interface SchemaValidation {
|
||||
[index: string]: boolean
|
||||
}
|
||||
|
||||
interface ValidationResults {
|
||||
schemaValidation: SchemaValidation
|
||||
allValid: boolean
|
||||
invalidColumns: Array<string>
|
||||
}
|
||||
|
||||
const PARSERS: any = {
|
||||
[FieldTypes.NUMBER]: (attribute?: string) => {
|
||||
if (!attribute) {
|
||||
return attribute
|
||||
}
|
||||
return Number(attribute)
|
||||
},
|
||||
[FieldTypes.DATETIME]: (attribute?: string) => {
|
||||
if (!attribute) {
|
||||
return attribute
|
||||
}
|
||||
return new Date(attribute).toISOString()
|
||||
},
|
||||
}
|
||||
|
||||
export function isSchema(schema: any): schema is Schema {
|
||||
return (
|
||||
typeof schema === "object" &&
|
||||
Object.values(schema).every(rawColumn => {
|
||||
const column = rawColumn as SchemaColumn
|
||||
|
||||
return (
|
||||
column !== null &&
|
||||
typeof column === "object" &&
|
||||
typeof column.type === "string" &&
|
||||
Object.values(FieldTypes).includes(column.type as FieldTypes)
|
||||
)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
export function isRows(rows: any): rows is Rows {
|
||||
return Array.isArray(rows) && rows.every(row => typeof row === "object")
|
||||
}
|
||||
|
||||
export function validate(rows: Rows, schema: Schema): ValidationResults {
|
||||
const results: ValidationResults = {
|
||||
schemaValidation: {},
|
||||
allValid: false,
|
||||
invalidColumns: [],
|
||||
}
|
||||
|
||||
rows.forEach(row => {
|
||||
Object.entries(row).forEach(([columnName, columnData]) => {
|
||||
const columnType = schema[columnName]?.type
|
||||
const isAutoColumn = schema[columnName]?.autocolumn
|
||||
|
||||
// If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array
|
||||
if (typeof columnType !== "string") {
|
||||
results.invalidColumns.push(columnName)
|
||||
} else if (
|
||||
// If there's no data for this field don't bother with further checks
|
||||
// If the field is already marked as invalid there's no need for further checks
|
||||
results.schemaValidation[columnName] === false ||
|
||||
columnData == null ||
|
||||
isAutoColumn
|
||||
) {
|
||||
return
|
||||
} else if (
|
||||
columnType === FieldTypes.NUMBER &&
|
||||
isNaN(Number(columnData))
|
||||
) {
|
||||
// If provided must be a valid number
|
||||
results.schemaValidation[columnName] = false
|
||||
} else if (
|
||||
// If provided must be a valid date
|
||||
columnType === FieldTypes.DATETIME &&
|
||||
isNaN(new Date(columnData).getTime())
|
||||
) {
|
||||
results.schemaValidation[columnName] = false
|
||||
} else {
|
||||
results.schemaValidation[columnName] = true
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
results.allValid =
|
||||
Object.values(results.schemaValidation).length > 0 &&
|
||||
Object.values(results.schemaValidation).every(column => column)
|
||||
|
||||
// Select unique values
|
||||
results.invalidColumns = [...new Set(results.invalidColumns)]
|
||||
return results
|
||||
}
|
||||
|
||||
export function parse(rows: Rows, schema: Schema): Rows {
|
||||
return rows.map(row => {
|
||||
const parsedRow: Row = {}
|
||||
|
||||
Object.entries(row).forEach(([columnName, columnData]) => {
|
||||
if (!(columnName in schema) || schema[columnName]?.autocolumn) {
|
||||
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
||||
return
|
||||
}
|
||||
|
||||
const columnType = schema[columnName].type
|
||||
|
||||
if (columnType === FieldTypes.NUMBER) {
|
||||
// If provided must be a valid number
|
||||
parsedRow[columnName] = columnData ? Number(columnData) : columnData
|
||||
} else if (columnType === FieldTypes.DATETIME) {
|
||||
// If provided must be a valid date
|
||||
parsedRow[columnName] = columnData
|
||||
? new Date(columnData).toISOString()
|
||||
: columnData
|
||||
} else {
|
||||
parsedRow[columnName] = columnData
|
||||
}
|
||||
})
|
||||
|
||||
return parsedRow
|
||||
})
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`CSV Parser transformation transforms a CSV file into JSON 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"Age": 4324,
|
||||
},
|
||||
Object {
|
||||
"Age": 34,
|
||||
},
|
||||
Object {
|
||||
"Age": 23423,
|
||||
},
|
||||
]
|
||||
`;
|
|
@ -1,112 +0,0 @@
|
|||
const { readFileSync } = require("../fileSystem")
|
||||
const csvParser = require("../csvParser")
|
||||
|
||||
const CSV_PATH = __dirname + "/test.csv"
|
||||
|
||||
const SCHEMAS = {
|
||||
VALID: {
|
||||
Age: {
|
||||
type: "number",
|
||||
},
|
||||
},
|
||||
INVALID: {
|
||||
Address: {
|
||||
type: "number",
|
||||
},
|
||||
Age: {
|
||||
type: "number",
|
||||
},
|
||||
},
|
||||
IGNORE: {
|
||||
Address: {
|
||||
type: "omit",
|
||||
},
|
||||
Age: {
|
||||
type: "omit",
|
||||
},
|
||||
Name: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
BROKEN: {
|
||||
Address: {
|
||||
type: "datetime",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
describe("CSV Parser", () => {
|
||||
const csvString = readFileSync(CSV_PATH, "utf8")
|
||||
|
||||
describe("parsing", () => {
|
||||
it("returns status and types for a valid CSV transformation", async () => {
|
||||
expect(await csvParser.parse(csvString, SCHEMAS.VALID)).toEqual({
|
||||
Address: {
|
||||
success: true,
|
||||
type: "string",
|
||||
},
|
||||
Age: {
|
||||
success: true,
|
||||
type: "number",
|
||||
},
|
||||
Name: {
|
||||
success: true,
|
||||
type: "string",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("returns status and types for an invalid CSV transformation", async () => {
|
||||
expect(await csvParser.parse(csvString, SCHEMAS.INVALID)).toEqual({
|
||||
Address: {
|
||||
success: false,
|
||||
type: "number",
|
||||
},
|
||||
Age: {
|
||||
success: true,
|
||||
type: "number",
|
||||
},
|
||||
Name: {
|
||||
success: true,
|
||||
type: "string",
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("transformation", () => {
|
||||
it("transforms a CSV file into JSON", async () => {
|
||||
expect(
|
||||
await csvParser.transform({
|
||||
schema: SCHEMAS.VALID,
|
||||
csvString,
|
||||
})
|
||||
).toMatchSnapshot()
|
||||
})
|
||||
|
||||
it("transforms a CSV file into JSON ignoring certain fields", async () => {
|
||||
expect(
|
||||
await csvParser.transform({
|
||||
schema: SCHEMAS.IGNORE,
|
||||
csvString,
|
||||
})
|
||||
).toEqual([
|
||||
{
|
||||
Name: "Bertå",
|
||||
},
|
||||
{
|
||||
Name: "Ernie",
|
||||
},
|
||||
{
|
||||
Name: "Big Bird",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("throws an error on invalid schema", async () => {
|
||||
await expect(
|
||||
csvParser.transform({ schema: SCHEMAS.BROKEN, csvString })
|
||||
).rejects.toThrow()
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1273,13 +1273,13 @@
|
|||
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
|
||||
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
|
||||
|
||||
"@budibase/backend-core@2.2.12-alpha.16":
|
||||
version "2.2.12-alpha.16"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.2.12-alpha.16.tgz#9ebfa7308fc97b34d6a076e4300fbcf996160d66"
|
||||
integrity sha512-rHMryIOb71U7W5jZtn39vuBI7xSZ6XA4l6P7lc2bBT1lI10G/zQRoQWjsWaUWo+RVBQ5zki3Ok05tFS9Yx/7fA==
|
||||
"@budibase/backend-core@2.2.12-alpha.21":
|
||||
version "2.2.12-alpha.21"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.2.12-alpha.21.tgz#99844f641cddb99ca6b6abd6a8af7d990e6b92e1"
|
||||
integrity sha512-4ZFcLTRtApF1aCE2CJrgMO44O0rkQq9bmyBxXxMUMjmv83lsue8rpWV00G5Z7fCg2C/Z2VdzeOJU97OumT3dmQ==
|
||||
dependencies:
|
||||
"@budibase/nano" "10.1.1"
|
||||
"@budibase/types" "2.2.12-alpha.16"
|
||||
"@budibase/types" "2.2.12-alpha.21"
|
||||
"@shopify/jest-koa-mocks" "5.0.1"
|
||||
"@techpass/passport-openidconnect" "0.3.2"
|
||||
aws-cloudfront-sign "2.2.0"
|
||||
|
@ -1374,13 +1374,13 @@
|
|||
qs "^6.11.0"
|
||||
tough-cookie "^4.1.2"
|
||||
|
||||
"@budibase/pro@2.2.12-alpha.16":
|
||||
version "2.2.12-alpha.16"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.2.12-alpha.16.tgz#db5a345b072e725765cd01edcad4a930ae875eba"
|
||||
integrity sha512-GBXdOQMIbxU0TGgGQ4+npNGtuFvanNVFrZBqwB7+3x6rIku313WkbgJJji5uemtU6B8XFh/QqS6AA0R0PS2Kmg==
|
||||
"@budibase/pro@2.2.12-alpha.21":
|
||||
version "2.2.12-alpha.21"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.2.12-alpha.21.tgz#5419fd78ac68ed9feddc4af52da8a79ff874cbfc"
|
||||
integrity sha512-Fvm4ruWP9V514PJMqO9n2T4CQ2DGJpXDbK/hRSRooGyrAfR3lDJC6TB1boa4WYC+1YACF+f757adLPcNvUNCnA==
|
||||
dependencies:
|
||||
"@budibase/backend-core" "2.2.12-alpha.16"
|
||||
"@budibase/types" "2.2.12-alpha.16"
|
||||
"@budibase/backend-core" "2.2.12-alpha.21"
|
||||
"@budibase/types" "2.2.12-alpha.21"
|
||||
"@koa/router" "8.0.8"
|
||||
bull "4.10.1"
|
||||
joi "17.6.0"
|
||||
|
@ -1405,10 +1405,10 @@
|
|||
svelte-apexcharts "^1.0.2"
|
||||
svelte-flatpickr "^3.1.0"
|
||||
|
||||
"@budibase/types@2.2.12-alpha.16":
|
||||
version "2.2.12-alpha.16"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.2.12-alpha.16.tgz#6fb42d4be88fbd8054a0a3264cf9c4b4a7248893"
|
||||
integrity sha512-pXn/r3tA0A30f2dJVJfzldMGXAEhpObBfqbONn8AStiD6Qm8Hu9H6aFaCPqS8DDaWBuwY/tMqSry2E0saRaSwg==
|
||||
"@budibase/types@2.2.12-alpha.21":
|
||||
version "2.2.12-alpha.21"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.2.12-alpha.21.tgz#9344fae4504cf5d9a3a92ffb94434f988389c9a9"
|
||||
integrity sha512-F+UMqKvrYqHYtJUcmvT9kRBFtPRPGBhbktJgYHa8D3X+CK/nf93UYN8j9nTeXme7iOA+i/cmg+zALFuHyZi45Q==
|
||||
|
||||
"@bull-board/api@3.7.0":
|
||||
version "3.7.0"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/string-templates",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"description": "Handlebars wrapper for Budibase templating.",
|
||||
"main": "src/index.cjs",
|
||||
"module": "dist/bundle.mjs",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/types",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"description": "Budibase types",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
|
|
|
@ -57,3 +57,7 @@ export interface CreateAdminUserRequest {
|
|||
password: string
|
||||
tenantId: string
|
||||
}
|
||||
|
||||
export interface SyncUserRequest {
|
||||
previousUser?: User
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ export interface Table extends Document {
|
|||
constrained?: string[]
|
||||
sql?: boolean
|
||||
indexes?: { [key: string]: any }
|
||||
dataImport?: { [key: string]: any }
|
||||
rows?: { [key: string]: any }
|
||||
}
|
||||
|
||||
export interface TableRequest extends Table {
|
||||
|
|
|
@ -69,3 +69,7 @@ export interface AdminUser extends User {
|
|||
global: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export function isUser(user: object): user is User {
|
||||
return !!(user as User).roles
|
||||
}
|
||||
|
|
|
@ -185,6 +185,4 @@ export interface BaseEvent {
|
|||
hosting?: Hosting
|
||||
}
|
||||
|
||||
export type RowImportFormat = "csv"
|
||||
export type TableExportFormat = "json" | "csv"
|
||||
export type TableImportFormat = "csv"
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import { BaseEvent, RowImportFormat } from "./event"
|
||||
import { BaseEvent } from "./event"
|
||||
|
||||
export interface RowsImportedEvent extends BaseEvent {
|
||||
tableId: string
|
||||
format: RowImportFormat
|
||||
count: number
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { BaseEvent, TableExportFormat, TableImportFormat } from "./event"
|
||||
import { BaseEvent, TableExportFormat } from "./event"
|
||||
|
||||
export interface TableCreatedEvent extends BaseEvent {
|
||||
tableId: string
|
||||
|
@ -19,5 +19,4 @@ export interface TableExportedEvent extends BaseEvent {
|
|||
|
||||
export interface TableImportedEvent extends BaseEvent {
|
||||
tableId: string
|
||||
format: TableImportFormat
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ export interface UserCtx<RequestBody = any, ResponseBody = any>
|
|||
}
|
||||
|
||||
/**
|
||||
* Deprecated: Use UserCtx / Ctx appropriately
|
||||
* @deprecated: Use UserCtx / Ctx appropriately
|
||||
* Authenticated context.
|
||||
*/
|
||||
export interface BBContext extends Ctx {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/worker",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "2.2.12-alpha.16",
|
||||
"version": "2.2.12-alpha.21",
|
||||
"description": "Budibase background service",
|
||||
"main": "src/index.ts",
|
||||
"repository": {
|
||||
|
@ -36,10 +36,10 @@
|
|||
"author": "Budibase",
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@budibase/backend-core": "2.2.12-alpha.16",
|
||||
"@budibase/pro": "2.2.12-alpha.16",
|
||||
"@budibase/string-templates": "2.2.12-alpha.16",
|
||||
"@budibase/types": "2.2.12-alpha.16",
|
||||
"@budibase/backend-core": "2.2.12-alpha.21",
|
||||
"@budibase/pro": "2.2.12-alpha.21",
|
||||
"@budibase/string-templates": "2.2.12-alpha.21",
|
||||
"@budibase/types": "2.2.12-alpha.21",
|
||||
"@koa/router": "8.0.8",
|
||||
"@sentry/node": "6.17.7",
|
||||
"@techpass/passport-openidconnect": "0.3.2",
|
||||
|
|
|
@ -31,6 +31,7 @@ import {
|
|||
SearchUsersRequest,
|
||||
User,
|
||||
ThirdPartyUser,
|
||||
isUser,
|
||||
} from "@budibase/types"
|
||||
import { sendEmail } from "../../utilities/email"
|
||||
import { EmailTemplatePurpose } from "../../constants"
|
||||
|
@ -265,8 +266,9 @@ export const save = async (
|
|||
await eventHelpers.handleSaveEvents(builtUser, dbUser)
|
||||
await addTenant(tenantId, _id, email)
|
||||
await cache.user.invalidateUser(response.id)
|
||||
|
||||
// let server know to sync user
|
||||
await apps.syncUserInApps(_id)
|
||||
await apps.syncUserInApps(_id, dbUser)
|
||||
|
||||
await Promise.all(groupPromises)
|
||||
|
||||
|
@ -572,7 +574,7 @@ export const destroy = async (id: string, currentUser: any) => {
|
|||
await cache.user.invalidateUser(userId)
|
||||
await sessions.invalidateSessions(userId, { reason: "deletion" })
|
||||
// let server know to sync user
|
||||
await apps.syncUserInApps(userId)
|
||||
await apps.syncUserInApps(userId, dbUser)
|
||||
}
|
||||
|
||||
const bulkDeleteProcessing = async (dbUser: User) => {
|
||||
|
@ -582,7 +584,7 @@ const bulkDeleteProcessing = async (dbUser: User) => {
|
|||
await cache.user.invalidateUser(userId)
|
||||
await sessions.invalidateSessions(userId, { reason: "bulk-deletion" })
|
||||
// let server know to sync user
|
||||
await apps.syncUserInApps(userId)
|
||||
await apps.syncUserInApps(userId, dbUser)
|
||||
}
|
||||
|
||||
export const invite = async (
|
||||
|
|
|
@ -2,6 +2,7 @@ import fetch from "node-fetch"
|
|||
import { constants, tenancy, logging } from "@budibase/backend-core"
|
||||
import { checkSlashesInUrl } from "../utilities"
|
||||
import env from "../environment"
|
||||
import { SyncUserRequest, User } from "@budibase/types"
|
||||
|
||||
async function makeAppRequest(url: string, method: string, body: any) {
|
||||
if (env.isTest()) {
|
||||
|
@ -24,11 +25,15 @@ async function makeAppRequest(url: string, method: string, body: any) {
|
|||
return fetch(checkSlashesInUrl(env.APPS_URL + url), request)
|
||||
}
|
||||
|
||||
export async function syncUserInApps(userId: string) {
|
||||
export async function syncUserInApps(userId: string, previousUser?: User) {
|
||||
const body: SyncUserRequest = {
|
||||
previousUser,
|
||||
}
|
||||
|
||||
const response = await makeAppRequest(
|
||||
`/api/users/metadata/sync/${userId}`,
|
||||
"POST",
|
||||
{}
|
||||
body
|
||||
)
|
||||
if (response && response.status !== 200) {
|
||||
throw "Unable to sync user."
|
||||
|
|
|
@ -470,13 +470,13 @@
|
|||
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
|
||||
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
|
||||
|
||||
"@budibase/backend-core@2.2.12-alpha.16":
|
||||
version "2.2.12-alpha.16"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.2.12-alpha.16.tgz#9ebfa7308fc97b34d6a076e4300fbcf996160d66"
|
||||
integrity sha512-rHMryIOb71U7W5jZtn39vuBI7xSZ6XA4l6P7lc2bBT1lI10G/zQRoQWjsWaUWo+RVBQ5zki3Ok05tFS9Yx/7fA==
|
||||
"@budibase/backend-core@2.2.12-alpha.21":
|
||||
version "2.2.12-alpha.21"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.2.12-alpha.21.tgz#99844f641cddb99ca6b6abd6a8af7d990e6b92e1"
|
||||
integrity sha512-4ZFcLTRtApF1aCE2CJrgMO44O0rkQq9bmyBxXxMUMjmv83lsue8rpWV00G5Z7fCg2C/Z2VdzeOJU97OumT3dmQ==
|
||||
dependencies:
|
||||
"@budibase/nano" "10.1.1"
|
||||
"@budibase/types" "2.2.12-alpha.16"
|
||||
"@budibase/types" "2.2.12-alpha.21"
|
||||
"@shopify/jest-koa-mocks" "5.0.1"
|
||||
"@techpass/passport-openidconnect" "0.3.2"
|
||||
aws-cloudfront-sign "2.2.0"
|
||||
|
@ -521,23 +521,23 @@
|
|||
qs "^6.11.0"
|
||||
tough-cookie "^4.1.2"
|
||||
|
||||
"@budibase/pro@2.2.12-alpha.16":
|
||||
version "2.2.12-alpha.16"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.2.12-alpha.16.tgz#db5a345b072e725765cd01edcad4a930ae875eba"
|
||||
integrity sha512-GBXdOQMIbxU0TGgGQ4+npNGtuFvanNVFrZBqwB7+3x6rIku313WkbgJJji5uemtU6B8XFh/QqS6AA0R0PS2Kmg==
|
||||
"@budibase/pro@2.2.12-alpha.21":
|
||||
version "2.2.12-alpha.21"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.2.12-alpha.21.tgz#5419fd78ac68ed9feddc4af52da8a79ff874cbfc"
|
||||
integrity sha512-Fvm4ruWP9V514PJMqO9n2T4CQ2DGJpXDbK/hRSRooGyrAfR3lDJC6TB1boa4WYC+1YACF+f757adLPcNvUNCnA==
|
||||
dependencies:
|
||||
"@budibase/backend-core" "2.2.12-alpha.16"
|
||||
"@budibase/types" "2.2.12-alpha.16"
|
||||
"@budibase/backend-core" "2.2.12-alpha.21"
|
||||
"@budibase/types" "2.2.12-alpha.21"
|
||||
"@koa/router" "8.0.8"
|
||||
bull "4.10.1"
|
||||
joi "17.6.0"
|
||||
jsonwebtoken "8.5.1"
|
||||
node-fetch "^2.6.1"
|
||||
|
||||
"@budibase/types@2.2.12-alpha.16":
|
||||
version "2.2.12-alpha.16"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.2.12-alpha.16.tgz#6fb42d4be88fbd8054a0a3264cf9c4b4a7248893"
|
||||
integrity sha512-pXn/r3tA0A30f2dJVJfzldMGXAEhpObBfqbONn8AStiD6Qm8Hu9H6aFaCPqS8DDaWBuwY/tMqSry2E0saRaSwg==
|
||||
"@budibase/types@2.2.12-alpha.21":
|
||||
version "2.2.12-alpha.21"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.2.12-alpha.21.tgz#9344fae4504cf5d9a3a92ffb94434f988389c9a9"
|
||||
integrity sha512-F+UMqKvrYqHYtJUcmvT9kRBFtPRPGBhbktJgYHa8D3X+CK/nf93UYN8j9nTeXme7iOA+i/cmg+zALFuHyZi45Q==
|
||||
|
||||
"@cspotcode/source-map-support@^0.8.0":
|
||||
version "0.8.1"
|
||||
|
|
|
@ -117,7 +117,7 @@ export default class AppApi {
|
|||
return [response, json]
|
||||
}
|
||||
|
||||
async update(
|
||||
async rename(
|
||||
appId: string,
|
||||
oldName: string,
|
||||
body: any
|
||||
|
@ -153,4 +153,27 @@ export default class AppApi {
|
|||
expect(response).toHaveStatusCode(204)
|
||||
return [response]
|
||||
}
|
||||
|
||||
async unlock(appId: string): Promise<[Response, responseMessage]> {
|
||||
const response = await this.api.del(`/dev/${appId}/lock`)
|
||||
const json = await response.json()
|
||||
expect(response).toHaveStatusCode(200)
|
||||
expect(json.message).toEqual("Lock released successfully.")
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async updateIcon(appId: string): Promise<[Response, Application]> {
|
||||
const body = {
|
||||
icon: {
|
||||
name: "ConversionFunnel",
|
||||
color: "var(--spectrum-global-color-red-400)",
|
||||
},
|
||||
}
|
||||
const response = await this.api.put(`/applications/${appId}`, { body })
|
||||
const json = await response.json()
|
||||
expect(response).toHaveStatusCode(200)
|
||||
expect(json.icon.name).toEqual(body.icon.name)
|
||||
expect(json.icon.color).toEqual(body.icon.color)
|
||||
return [response, json]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ export default class RowsApi {
|
|||
const json = await response.json()
|
||||
if (this.rowAdded) {
|
||||
expect(response).toHaveStatusCode(200)
|
||||
expect(json.length).toEqual(1)
|
||||
expect(json.length).toBeGreaterThanOrEqual(1)
|
||||
}
|
||||
return [response, json]
|
||||
}
|
||||
|
@ -36,4 +36,27 @@ export default class RowsApi {
|
|||
expect(response).toHaveStatusCode(200)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async searchNoPagination(
|
||||
tableId: string,
|
||||
body: any
|
||||
): Promise<[Response, Row[]]> {
|
||||
const response = await this.api.post(`/${tableId}/search`, { body })
|
||||
const json = await response.json()
|
||||
expect(response).toHaveStatusCode(200)
|
||||
expect(json.hasNextPage).toEqual(false)
|
||||
return [response, json.rows]
|
||||
}
|
||||
|
||||
async searchWithPagination(
|
||||
tableId: string,
|
||||
body: any
|
||||
): Promise<[Response, Row[]]> {
|
||||
const response = await this.api.post(`/${tableId}/search`, { body })
|
||||
const json = await response.json()
|
||||
expect(response).toHaveStatusCode(200)
|
||||
expect(json.hasNextPage).toEqual(true)
|
||||
expect(json.rows.length).toEqual(10)
|
||||
return [response, json.rows]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,3 +6,27 @@ export const generateNewRowForTable = (tableId: string): Row => {
|
|||
tableId: tableId,
|
||||
}
|
||||
}
|
||||
|
||||
export const searchBody = (primaryDisplay: string): any => {
|
||||
return {
|
||||
bookmark: null,
|
||||
limit: 10,
|
||||
paginate: true,
|
||||
query: {
|
||||
contains: {},
|
||||
containsAny: {},
|
||||
empty: {},
|
||||
equal: {},
|
||||
fuzzy: {},
|
||||
notContains: {},
|
||||
notEmpty: {},
|
||||
notEqual: {},
|
||||
oneOf: {},
|
||||
range: {},
|
||||
string: {},
|
||||
},
|
||||
sort: primaryDisplay,
|
||||
sortOrder: "ascending",
|
||||
sortType: "string",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,10 +6,6 @@ export const generateTable = (): Table => {
|
|||
schema: {},
|
||||
sourceId: "bb_internal",
|
||||
type: "internal",
|
||||
dataImport: {
|
||||
valid: true,
|
||||
schema: {},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ describe("Internal API - Application creation, update, publish and delete", () =
|
|||
await config.applications.unpublish(<string>app.appId)
|
||||
})
|
||||
|
||||
it("POST - Sync application before deployment", async () => {
|
||||
it("Sync application before deployment", async () => {
|
||||
const app = await config.applications.create(generateApp())
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
|
@ -81,7 +81,7 @@ describe("Internal API - Application creation, update, publish and delete", () =
|
|||
})
|
||||
})
|
||||
|
||||
it("POST - Sync application after deployment", async () => {
|
||||
it("Sync application after deployment", async () => {
|
||||
const app = await config.applications.create(generateApp())
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
|
@ -96,24 +96,32 @@ describe("Internal API - Application creation, update, publish and delete", () =
|
|||
})
|
||||
})
|
||||
|
||||
it("PUT - Update an application", async () => {
|
||||
it("Rename an application", async () => {
|
||||
const app = await config.applications.create(generateApp())
|
||||
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
await config.applications.update(<string>app.appId, <string>app.name, {
|
||||
await config.applications.rename(<string>app.appId, <string>app.name, {
|
||||
name: generator.word(),
|
||||
})
|
||||
})
|
||||
|
||||
it("POST - Revert Changes without changes", async () => {
|
||||
it("Update the icon and color of an application", async () => {
|
||||
const app = await config.applications.create(generateApp())
|
||||
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
await config.applications.updateIcon(<string>app.appId)
|
||||
})
|
||||
|
||||
it("Revert Changes without changes", async () => {
|
||||
const app = await config.applications.create(generateApp())
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
await config.applications.revertUnpublished(<string>app.appId)
|
||||
})
|
||||
|
||||
it("POST - Revert Changes", async () => {
|
||||
it("Revert Changes", async () => {
|
||||
const app = await config.applications.create(generateApp())
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
|
@ -126,11 +134,12 @@ describe("Internal API - Application creation, update, publish and delete", () =
|
|||
// // Revert the app to published state
|
||||
await config.applications.revertPublished(<string>app.appId)
|
||||
|
||||
await config.applications.unlock(<string>app.appId)
|
||||
// Check screen is removed
|
||||
await config.applications.getRoutes()
|
||||
})
|
||||
|
||||
it("DELETE - Delete an application", async () => {
|
||||
it("Delete an application", async () => {
|
||||
const app = await config.applications.create(generateApp())
|
||||
|
||||
await config.applications.delete(<string>app.appId)
|
||||
|
|
|
@ -21,7 +21,7 @@ describe("Internal API - /screens endpoints", () => {
|
|||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("POST - Create a screen with each role type", async () => {
|
||||
it("Create a screen with each role type", async () => {
|
||||
// Create app
|
||||
const app = await appConfig.applications.create(generateApp())
|
||||
|
||||
|
@ -35,7 +35,7 @@ describe("Internal API - /screens endpoints", () => {
|
|||
}
|
||||
})
|
||||
|
||||
it("GET - Fetch screens", async () => {
|
||||
it("Get screens", async () => {
|
||||
// Create app
|
||||
const app = await appConfig.applications.create(generateApp())
|
||||
|
||||
|
@ -47,7 +47,7 @@ describe("Internal API - /screens endpoints", () => {
|
|||
await appConfig.applications.getRoutes(true)
|
||||
})
|
||||
|
||||
it("DELETE - Delete a screen", async () => {
|
||||
it("Delete a screen", async () => {
|
||||
// Create app
|
||||
const app = await appConfig.applications.create(generateApp())
|
||||
|
||||
|
|
|
@ -6,9 +6,12 @@ import {
|
|||
generateTable,
|
||||
generateNewColumnForTable,
|
||||
} from "../../../config/internal-api/fixtures/table"
|
||||
import { generateNewRowForTable } from "../../../config/internal-api/fixtures/rows"
|
||||
import {
|
||||
generateNewRowForTable,
|
||||
searchBody,
|
||||
} from "../../../config/internal-api/fixtures/rows"
|
||||
|
||||
describe("Internal API - Application creation, update, publish and delete", () => {
|
||||
describe("Internal API - Table Operations", () => {
|
||||
const api = new InternalAPIClient()
|
||||
const config = new TestConfiguration<Application>(api)
|
||||
|
||||
|
@ -31,7 +34,7 @@ describe("Internal API - Application creation, update, publish and delete", () =
|
|||
})
|
||||
}
|
||||
|
||||
it("Operations on Tables", async () => {
|
||||
it("Create and delete table, columns and rows", async () => {
|
||||
// create the app
|
||||
const appName = generator.word()
|
||||
const app = await createAppFromTemplate()
|
||||
|
@ -86,4 +89,70 @@ describe("Internal API - Application creation, update, publish and delete", () =
|
|||
//Table was deleted
|
||||
await config.tables.getAll(2)
|
||||
})
|
||||
|
||||
it("Search and pagination", async () => {
|
||||
// create the app
|
||||
const appName = generator.word()
|
||||
const app = await createAppFromTemplate()
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
// Get current tables: expect 2 in this template
|
||||
await config.tables.getAll(2)
|
||||
|
||||
// Add new table
|
||||
const [createdTableResponse, createdTableData] = await config.tables.save(
|
||||
generateTable()
|
||||
)
|
||||
|
||||
//Table was added
|
||||
await config.tables.getAll(3)
|
||||
|
||||
//Get information about the table
|
||||
await config.tables.getTableById(<string>createdTableData._id)
|
||||
|
||||
//Add Column to table
|
||||
const newColumn = generateNewColumnForTable(createdTableData)
|
||||
const [addColumnResponse, addColumnData] = await config.tables.save(
|
||||
newColumn,
|
||||
true
|
||||
)
|
||||
|
||||
//Add Row to table
|
||||
let newRow = generateNewRowForTable(<string>addColumnData._id)
|
||||
await config.rows.add(<string>addColumnData._id, newRow)
|
||||
|
||||
//Search single row
|
||||
await config.rows.searchNoPagination(
|
||||
<string>createdTableData._id,
|
||||
searchBody(<string>createdTableData.primaryDisplay)
|
||||
)
|
||||
|
||||
//Add 10 more rows
|
||||
for (let i = 0; i < 10; i++) {
|
||||
let newRow = generateNewRowForTable(<string>addColumnData._id)
|
||||
await config.rows.add(<string>addColumnData._id, newRow)
|
||||
}
|
||||
|
||||
//Search rows with pagination
|
||||
const [allRowsResponse, allRowsJson] =
|
||||
await config.rows.searchWithPagination(
|
||||
<string>createdTableData._id,
|
||||
searchBody(<string>createdTableData.primaryDisplay)
|
||||
)
|
||||
|
||||
//Delete Rows from table
|
||||
const rowToDelete = {
|
||||
rows: [allRowsJson],
|
||||
}
|
||||
const [deleteRowResponse, deleteRowData] = await config.rows.delete(
|
||||
<string>createdTableData._id,
|
||||
rowToDelete
|
||||
)
|
||||
|
||||
//Search single row
|
||||
await config.rows.searchWithPagination(
|
||||
<string>createdTableData._id,
|
||||
searchBody(<string>createdTableData.primaryDisplay)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -28,9 +28,12 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
})
|
||||
|
||||
it("Add BASIC user to app", async () => {
|
||||
// Create a user with BASIC role and check if it was created successfully
|
||||
const appUser = generateUser()
|
||||
expect(appUser[0].builder?.global).toEqual(false)
|
||||
expect(appUser[0].admin?.global).toEqual(false)
|
||||
|
||||
// Add the user to the tenant.
|
||||
const [createUserResponse, createUserJson] = await config.users.addMultiple(
|
||||
appUser
|
||||
)
|
||||
|
@ -38,9 +41,12 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
const app = await config.applications.create(appFromTemplate())
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
// Get all the information from the create user
|
||||
const [userInfoResponse, userInfoJson] = await config.users.getInfo(
|
||||
createUserJson.created.successful[0]._id
|
||||
)
|
||||
|
||||
// Create the body with the information from the user and add the role to the app
|
||||
const body: User = {
|
||||
...userInfoJson,
|
||||
roles: {
|
||||
|
@ -49,6 +55,7 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
}
|
||||
await config.users.updateInfo(body)
|
||||
|
||||
// Get the user information again and check if the role was added
|
||||
const [changedUserInfoResponse, changedUserInfoJson] =
|
||||
await config.users.getInfo(createUserJson.created.successful[0]._id)
|
||||
expect(changedUserInfoJson.roles[<string>app.appId]).toBeDefined()
|
||||
|
@ -56,6 +63,7 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
})
|
||||
|
||||
it("Add ADMIN user to app", async () => {
|
||||
// Create a user with ADMIN role and check if it was created successfully
|
||||
const adminUser = generateUser(1, "admin")
|
||||
expect(adminUser[0].builder?.global).toEqual(true)
|
||||
expect(adminUser[0].admin?.global).toEqual(true)
|
||||
|
@ -63,15 +71,15 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
adminUser
|
||||
)
|
||||
|
||||
//const app = await config.applications.create(generateApp())
|
||||
//config.applications.api.appId = app.appId
|
||||
|
||||
const app = await config.applications.create(appFromTemplate())
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
// Get all the information from the create user
|
||||
const [userInfoResponse, userInfoJson] = await config.users.getInfo(
|
||||
createUserJson.created.successful[0]._id
|
||||
)
|
||||
|
||||
// Create the body with the information from the user and add the role to the app
|
||||
const body: User = {
|
||||
...userInfoJson,
|
||||
roles: {
|
||||
|
@ -80,6 +88,7 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
}
|
||||
await config.users.updateInfo(body)
|
||||
|
||||
// Get the user information again and check if the role was added
|
||||
const [changedUserInfoResponse, changedUserInfoJson] =
|
||||
await config.users.getInfo(createUserJson.created.successful[0]._id)
|
||||
expect(changedUserInfoJson.roles[<string>app.appId]).toBeDefined()
|
||||
|
@ -93,9 +102,9 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
})
|
||||
|
||||
it("Add POWER user to app", async () => {
|
||||
// Create a user with POWER role and check if it was created successfully
|
||||
const powerUser = generateUser(1, "developer")
|
||||
expect(powerUser[0].builder?.global).toEqual(true)
|
||||
|
||||
const [createUserResponse, createUserJson] = await config.users.addMultiple(
|
||||
powerUser
|
||||
)
|
||||
|
@ -103,9 +112,12 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
const app = await config.applications.create(generateApp())
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
// Get all the information from the create user
|
||||
const [userInfoResponse, userInfoJson] = await config.users.getInfo(
|
||||
createUserJson.created.successful[0]._id
|
||||
)
|
||||
|
||||
// Create the body with the information from the user and add the role to the app
|
||||
const body: User = {
|
||||
...userInfoJson,
|
||||
roles: {
|
||||
|
@ -114,6 +126,7 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
}
|
||||
await config.users.updateInfo(body)
|
||||
|
||||
// Get the user information again and check if the role was added
|
||||
const [changedUserInfoResponse, changedUserInfoJson] =
|
||||
await config.users.getInfo(createUserJson.created.successful[0]._id)
|
||||
expect(changedUserInfoJson.roles[<string>app.appId]).toBeDefined()
|
||||
|
@ -122,6 +135,7 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
|
||||
describe("Check Access for default roles", () => {
|
||||
it("Check Table access for app user", async () => {
|
||||
// Create a user with BASIC role and check if it was created successfully
|
||||
const appUser = generateUser()
|
||||
expect(appUser[0].builder?.global).toEqual(false)
|
||||
expect(appUser[0].admin?.global).toEqual(false)
|
||||
|
@ -131,9 +145,12 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
const app = await config.applications.create(generateApp())
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
// Get all the information from the create user
|
||||
const [userInfoResponse, userInfoJson] = await config.users.getInfo(
|
||||
createUserJson.created.successful[0]._id
|
||||
)
|
||||
|
||||
// Create the body with the information from the user and add the role to the app
|
||||
const body: User = {
|
||||
...userInfoJson,
|
||||
roles: {
|
||||
|
@ -142,14 +159,18 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
}
|
||||
await config.users.updateInfo(body)
|
||||
|
||||
// Get the user information again and check if the role was added
|
||||
const [changedUserInfoResponse, changedUserInfoJson] =
|
||||
await config.users.getInfo(createUserJson.created.successful[0]._id)
|
||||
expect(changedUserInfoJson.roles[<string>app.appId]).toBeDefined()
|
||||
expect(changedUserInfoJson.roles[<string>app.appId]).toEqual("BASIC")
|
||||
|
||||
// Create a table
|
||||
const [createdTableResponse, createdTableData] = await config.tables.save(
|
||||
generateTable()
|
||||
)
|
||||
|
||||
// Login with the user created and try to create a column
|
||||
await config.login(<string>appUser[0].email, <string>appUser[0].password)
|
||||
const newColumn = generateNewColumnForTable(createdTableData)
|
||||
await config.tables.forbiddenSave(newColumn)
|
||||
|
@ -157,6 +178,7 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
})
|
||||
|
||||
it("Check Table access for developer", async () => {
|
||||
// Create a user with POWER role and check if it was created successfully
|
||||
const developer = generateUser(1, "developer")
|
||||
expect(developer[0].builder?.global).toEqual(true)
|
||||
|
||||
|
@ -166,9 +188,12 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
const app = await config.applications.create(generateApp())
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
// Get all the information from the create user
|
||||
const [userInfoResponse, userInfoJson] = await config.users.getInfo(
|
||||
createUserJson.created.successful[0]._id
|
||||
)
|
||||
|
||||
// Create the body with the information from the user and add the role to the app
|
||||
const body: User = {
|
||||
...userInfoJson,
|
||||
roles: {
|
||||
|
@ -177,14 +202,18 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
}
|
||||
await config.users.updateInfo(body)
|
||||
|
||||
// Get the user information again and check if the role was added
|
||||
const [changedUserInfoResponse, changedUserInfoJson] =
|
||||
await config.users.getInfo(createUserJson.created.successful[0]._id)
|
||||
expect(changedUserInfoJson.roles[<string>app.appId]).toBeDefined()
|
||||
expect(changedUserInfoJson.roles[<string>app.appId]).toEqual("POWER")
|
||||
|
||||
// Create a table
|
||||
const [createdTableResponse, createdTableData] = await config.tables.save(
|
||||
generateTable()
|
||||
)
|
||||
|
||||
// Login with the user created and try to create a column
|
||||
await config.login(
|
||||
<string>developer[0].email,
|
||||
<string>developer[0].password
|
||||
|
@ -197,6 +226,7 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
})
|
||||
|
||||
it("Check Table access for admin", async () => {
|
||||
// Create a user with ADMIN role and check if it was created successfully
|
||||
const adminUser = generateUser(1, "admin")
|
||||
expect(adminUser[0].builder?.global).toEqual(true)
|
||||
expect(adminUser[0].admin?.global).toEqual(true)
|
||||
|
@ -206,9 +236,12 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
const app = await config.applications.create(generateApp())
|
||||
config.applications.api.appId = app.appId
|
||||
|
||||
// Get all the information from the create user
|
||||
const [userInfoResponse, userInfoJson] = await config.users.getInfo(
|
||||
createUserJson.created.successful[0]._id
|
||||
)
|
||||
|
||||
// Create the body with the information from the user and add the role to the app
|
||||
const body: User = {
|
||||
...userInfoJson,
|
||||
roles: {
|
||||
|
@ -217,11 +250,13 @@ describe("Internal API - App Specific Roles & Permissions", () => {
|
|||
}
|
||||
await config.users.updateInfo(body)
|
||||
|
||||
// Get the user information again and check if the role was added
|
||||
const [changedUserInfoResponse, changedUserInfoJson] =
|
||||
await config.users.getInfo(createUserJson.created.successful[0]._id)
|
||||
expect(changedUserInfoJson.roles[<string>app.appId]).toBeDefined()
|
||||
expect(changedUserInfoJson.roles[<string>app.appId]).toEqual("ADMIN")
|
||||
|
||||
// Login with the created user and create a table
|
||||
await config.login(
|
||||
<string>adminUser[0].email,
|
||||
<string>adminUser[0].password
|
||||
|
|
|
@ -18,9 +18,13 @@ describe("Internal API - User Management & Permissions", () => {
|
|||
})
|
||||
|
||||
it("Add Users with different roles", async () => {
|
||||
// Get all users
|
||||
await config.users.search()
|
||||
|
||||
// Get all roles
|
||||
await config.users.getRoles()
|
||||
|
||||
// Add users with each role
|
||||
const admin = generateUser(1, "admin")
|
||||
expect(admin[0].builder?.global).toEqual(true)
|
||||
expect(admin[0].admin?.global).toEqual(true)
|
||||
|
@ -34,6 +38,7 @@ describe("Internal API - User Management & Permissions", () => {
|
|||
|
||||
await config.users.addMultiple(userList)
|
||||
|
||||
// Check users are added
|
||||
const [allUsersResponse, allUsersJson] = await config.users.getAll()
|
||||
expect(allUsersJson.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
|
Loading…
Reference in New Issue