Merge branch 'master' of github.com:Budibase/budibase into develop
This commit is contained in:
commit
14806809e8
|
@ -55,7 +55,7 @@ http {
|
|||
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
|
||||
set $csp_object "object-src 'none'";
|
||||
set $csp_base_uri "base-uri 'self'";
|
||||
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
|
||||
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
|
||||
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
|
||||
set $csp_frame "frame-src 'self' https:";
|
||||
set $csp_img "img-src http: https: data: blob:";
|
||||
|
@ -82,6 +82,12 @@ http {
|
|||
set $couchdb ${COUCHDB_UPSTREAM_URL};
|
||||
set $watchtower ${WATCHTOWER_UPSTREAM_URL};
|
||||
|
||||
location /health {
|
||||
access_log off;
|
||||
add_header 'Content-Type' 'application/json';
|
||||
return 200 '{ "status": "OK" }';
|
||||
}
|
||||
|
||||
location /app {
|
||||
proxy_pass $apps;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.6.8-alpha.14",
|
||||
"version": "2.6.15",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/backend-core",
|
||||
|
@ -31,4 +31,4 @@
|
|||
"loadEnvFiles": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,10 +69,10 @@ function findVersion() {
|
|||
try {
|
||||
const packageJsonFile = findFileInAncestors("package.json", process.cwd())
|
||||
const content = readFileSync(packageJsonFile!, "utf-8")
|
||||
const version = JSON.parse(content).version
|
||||
return version
|
||||
return JSON.parse(content).version
|
||||
} catch {
|
||||
throw new Error("Cannot find a valid version in its package.json")
|
||||
// throwing an error here is confusing/causes backend-core to be hard to import
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -95,7 +95,7 @@ const environment = {
|
|||
GOOGLE_CLIENT_SECRET: process.env.GOOGLE_CLIENT_SECRET,
|
||||
SALT_ROUNDS: process.env.SALT_ROUNDS,
|
||||
REDIS_URL: process.env.REDIS_URL || "localhost:6379",
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD || "budibase",
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
REDIS_CLUSTERED: process.env.REDIS_CLUSTERED,
|
||||
MOCK_REDIS: process.env.MOCK_REDIS,
|
||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
export let disableSorting = false
|
||||
export let customPlaceholder = false
|
||||
export let allowClickRows
|
||||
export let allowEditing = true
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
|
@ -110,6 +111,7 @@
|
|||
{rowCount}
|
||||
{disableSorting}
|
||||
{customPlaceholder}
|
||||
allowEditRows={allowEditing}
|
||||
showAutoColumns={!hideAutocolumns}
|
||||
{allowClickRows}
|
||||
on:clickrelationship={e => selectRelationship(e.detail)}
|
||||
|
|
|
@ -58,6 +58,7 @@
|
|||
{loading}
|
||||
{type}
|
||||
rowCount={10}
|
||||
allowEditing={false}
|
||||
bind:hideAutocolumns
|
||||
>
|
||||
<ViewFilterButton {view} />
|
||||
|
|
|
@ -107,8 +107,9 @@
|
|||
useSampleData,
|
||||
isGoogle,
|
||||
}) => {
|
||||
let app
|
||||
try {
|
||||
const app = await createApp(useSampleData)
|
||||
app = await createApp(useSampleData)
|
||||
|
||||
let datasource
|
||||
if (datasourceConfig) {
|
||||
|
@ -134,6 +135,17 @@
|
|||
console.log(e)
|
||||
creationLoading = false
|
||||
notifications.error("There was a problem creating your app")
|
||||
|
||||
// Reset the store so that we don't send up stale headers
|
||||
store.actions.reset()
|
||||
|
||||
// If we successfully created an app, delete it again so that we
|
||||
// can try again once the error has been corrected.
|
||||
// This also ensures onboarding can't be skipped by entering invalid
|
||||
// data credentials.
|
||||
if (app?.appId) {
|
||||
await API.deleteApp(app.appId)
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
@ -146,80 +158,87 @@
|
|||
/>
|
||||
</Modal>
|
||||
|
||||
<SplitPage>
|
||||
{#if stage === "name"}
|
||||
<NamePanel bind:name bind:url onNext={() => (stage = "data")} />
|
||||
{:else if googleComplete}
|
||||
<div class="centered">
|
||||
<Body
|
||||
>Please login to your Google account in the new tab which as opened to
|
||||
continue.</Body
|
||||
>
|
||||
</div>
|
||||
{:else if integrationsLoading || creationLoading}
|
||||
<div class="centered">
|
||||
<Spinner />
|
||||
</div>
|
||||
{:else if stage === "data"}
|
||||
<DataPanel onBack={() => (stage = "name")}>
|
||||
<div class="dataButton">
|
||||
<FancyButton on:click={() => handleCreateApp({ useSampleData: true })}>
|
||||
<div class="dataButtonContent">
|
||||
<div class="dataButtonIcon">
|
||||
<img
|
||||
alt="Budibase Logo"
|
||||
class="budibaseLogo"
|
||||
src={"https://i.imgur.com/Xhdt1YP.png"}
|
||||
/>
|
||||
</div>
|
||||
Budibase Sample data
|
||||
</div>
|
||||
</FancyButton>
|
||||
<div class="full-width">
|
||||
<SplitPage>
|
||||
{#if stage === "name"}
|
||||
<NamePanel bind:name bind:url onNext={() => (stage = "data")} />
|
||||
{:else if googleComplete}
|
||||
<div class="centered">
|
||||
<Body
|
||||
>Please login to your Google account in the new tab which as opened to
|
||||
continue.</Body
|
||||
>
|
||||
</div>
|
||||
<div class="dataButton">
|
||||
<FancyButton on:click={uploadModal.show}>
|
||||
<div class="dataButtonContent">
|
||||
<div class="dataButtonIcon">
|
||||
<FontAwesomeIcon name="fa-solid fa-file-arrow-up" />
|
||||
</div>
|
||||
Upload data (CSV or JSON)
|
||||
</div>
|
||||
</FancyButton>
|
||||
{:else if integrationsLoading || creationLoading}
|
||||
<div class="centered">
|
||||
<Spinner />
|
||||
</div>
|
||||
{#each Object.entries(plusIntegrations) as [integrationType, schema]}
|
||||
{:else if stage === "data"}
|
||||
<DataPanel onBack={() => (stage = "name")}>
|
||||
<div class="dataButton">
|
||||
<FancyButton on:click={() => (stage = integrationType)}>
|
||||
<FancyButton
|
||||
on:click={() => handleCreateApp({ useSampleData: true })}
|
||||
>
|
||||
<div class="dataButtonContent">
|
||||
<div class="dataButtonIcon">
|
||||
<IntegrationIcon {integrationType} {schema} />
|
||||
<img
|
||||
alt="Budibase Logo"
|
||||
class="budibaseLogo"
|
||||
src={"https://i.imgur.com/Xhdt1YP.png"}
|
||||
/>
|
||||
</div>
|
||||
{schema.friendlyName}
|
||||
Budibase Sample data
|
||||
</div>
|
||||
</FancyButton>
|
||||
</div>
|
||||
{/each}
|
||||
</DataPanel>
|
||||
{:else if stage in plusIntegrations}
|
||||
<DatasourceConfigPanel
|
||||
title={plusIntegrations[stage].friendlyName}
|
||||
fields={plusIntegrations[stage].datasource}
|
||||
type={stage}
|
||||
onBack={() => (stage = "data")}
|
||||
onNext={data => {
|
||||
const isGoogle = data.isGoogle
|
||||
delete data.isGoogle
|
||||
return handleCreateApp({ datasourceConfig: data, isGoogle })
|
||||
}}
|
||||
/>
|
||||
{:else}
|
||||
<p>There was an problem. Please refresh the page and try again.</p>
|
||||
{/if}
|
||||
<div slot="right">
|
||||
<ExampleApp {name} showData={stage !== "name"} />
|
||||
</div>
|
||||
</SplitPage>
|
||||
<div class="dataButton">
|
||||
<FancyButton on:click={uploadModal.show}>
|
||||
<div class="dataButtonContent">
|
||||
<div class="dataButtonIcon">
|
||||
<FontAwesomeIcon name="fa-solid fa-file-arrow-up" />
|
||||
</div>
|
||||
Upload data (CSV or JSON)
|
||||
</div>
|
||||
</FancyButton>
|
||||
</div>
|
||||
{#each Object.entries(plusIntegrations) as [integrationType, schema]}
|
||||
<div class="dataButton">
|
||||
<FancyButton on:click={() => (stage = integrationType)}>
|
||||
<div class="dataButtonContent">
|
||||
<div class="dataButtonIcon">
|
||||
<IntegrationIcon {integrationType} {schema} />
|
||||
</div>
|
||||
{schema.friendlyName}
|
||||
</div>
|
||||
</FancyButton>
|
||||
</div>
|
||||
{/each}
|
||||
</DataPanel>
|
||||
{:else if stage in plusIntegrations}
|
||||
<DatasourceConfigPanel
|
||||
title={plusIntegrations[stage].friendlyName}
|
||||
fields={plusIntegrations[stage].datasource}
|
||||
type={stage}
|
||||
onBack={() => (stage = "data")}
|
||||
onNext={data => {
|
||||
const isGoogle = data.isGoogle
|
||||
delete data.isGoogle
|
||||
return handleCreateApp({ datasourceConfig: data, isGoogle })
|
||||
}}
|
||||
/>
|
||||
{:else}
|
||||
<p>There was an problem. Please refresh the page and try again.</p>
|
||||
{/if}
|
||||
<div slot="right">
|
||||
<ExampleApp {name} showData={stage !== "name"} />
|
||||
</div>
|
||||
</SplitPage>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.full-width {
|
||||
width: 100%;
|
||||
}
|
||||
.centered {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
"name": "@budibase/cli",
|
||||
"version": "0.0.1",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "dist/index.js",
|
||||
"main": "dist/src/index.js",
|
||||
"bin": {
|
||||
"budi": "dist/index.js"
|
||||
"budi": "dist/src/index.js"
|
||||
},
|
||||
"author": "Budibase",
|
||||
"license": "GPL-3.0",
|
||||
|
|
|
@ -2,17 +2,17 @@
|
|||
process.env.DISABLE_PINO_LOGGER = "1"
|
||||
import "./prebuilds"
|
||||
import "./environment"
|
||||
import { env } from "@budibase/backend-core"
|
||||
import { getCommands } from "./options"
|
||||
import { Command } from "commander"
|
||||
import { getHelpDescription } from "./utils"
|
||||
import { version } from "../package.json"
|
||||
|
||||
// add hosting config
|
||||
async function init() {
|
||||
const program = new Command()
|
||||
.addHelpCommand("help", getHelpDescription("Help with Budibase commands."))
|
||||
.helpOption(false)
|
||||
.version(env.VERSION)
|
||||
.version(version)
|
||||
// add commands
|
||||
for (let command of getCommands()) {
|
||||
command.configure(program)
|
||||
|
|
|
@ -13,7 +13,7 @@ if (!process.argv[0].includes("node")) {
|
|||
}
|
||||
|
||||
function checkForBinaries() {
|
||||
const readDir = join(__filename, "..", "..", PREBUILDS, ARCH)
|
||||
const readDir = join(__filename, "..", "..", "..", PREBUILDS, ARCH)
|
||||
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
|
||||
return
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
"declaration": true,
|
||||
"sourceMap": true,
|
||||
"baseUrl": ".",
|
||||
"resolveJsonModule": true,
|
||||
"paths": {
|
||||
"@budibase/types": ["../types/src"],
|
||||
"@budibase/backend-core": ["../backend-core/src"],
|
||||
|
@ -16,6 +17,6 @@
|
|||
"swc": true
|
||||
},
|
||||
"references": [{ "path": "../types" }, { "path": "../backend-core" }],
|
||||
"include": ["src/**/*"],
|
||||
"include": ["src/**/*", "package.json"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
|
|
|
@ -118,8 +118,11 @@ export async function patch(ctx: UserCtx) {
|
|||
combinedRow[key] = inputs[key]
|
||||
}
|
||||
|
||||
// need to copy the table so it can be differenced on way out
|
||||
const tableClone = cloneDeep(dbTable)
|
||||
|
||||
// this returns the table and row incase they have been updated
|
||||
let { table, row } = inputProcessing(ctx.user, dbTable, combinedRow)
|
||||
let { table, row } = inputProcessing(ctx.user, tableClone, combinedRow)
|
||||
const validateResult = await utils.validate({
|
||||
row,
|
||||
table,
|
||||
|
@ -163,7 +166,12 @@ export async function save(ctx: UserCtx) {
|
|||
|
||||
// this returns the table and row incase they have been updated
|
||||
const dbTable = await db.get(inputs.tableId)
|
||||
let { table, row } = inputProcessing(ctx.user, dbTable, inputs)
|
||||
|
||||
// need to copy the table so it can be differenced on way out
|
||||
const tableClone = cloneDeep(dbTable)
|
||||
|
||||
let { table, row } = inputProcessing(ctx.user, tableClone, inputs)
|
||||
|
||||
const validateResult = await utils.validate({
|
||||
row,
|
||||
table,
|
||||
|
|
|
@ -97,6 +97,7 @@ export async function bulkImport(ctx: UserCtx) {
|
|||
// right now we don't trigger anything for bulk import because it
|
||||
// can only be done in the builder, but in the future we may need to
|
||||
// think about events for bulk items
|
||||
|
||||
ctx.status = 200
|
||||
ctx.body = { message: `Bulk rows created.` }
|
||||
}
|
||||
|
|
|
@ -184,8 +184,13 @@ export async function destroy(ctx: any) {
|
|||
}
|
||||
|
||||
export async function bulkImport(ctx: any) {
|
||||
const db = context.getAppDB()
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { rows } = ctx.request.body
|
||||
await handleDataImport(ctx.user, table, rows)
|
||||
|
||||
// Ensure auto id and other table updates are persisted
|
||||
await db.put(table)
|
||||
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -129,17 +129,17 @@ export function importToRows(
|
|||
// the real schema of the table passed in, not the clone used for
|
||||
// incrementing auto IDs
|
||||
for (const [fieldName, schema] of Object.entries(originalTable.schema)) {
|
||||
const rowVal = Array.isArray(row[fieldName])
|
||||
? row[fieldName]
|
||||
: [row[fieldName]]
|
||||
if (
|
||||
(schema.type === FieldTypes.OPTIONS ||
|
||||
schema.type === FieldTypes.ARRAY) &&
|
||||
row[fieldName] &&
|
||||
(!schema.constraints!.inclusion ||
|
||||
schema.constraints!.inclusion.indexOf(row[fieldName]) === -1)
|
||||
row[fieldName]
|
||||
) {
|
||||
schema.constraints!.inclusion = [
|
||||
...schema.constraints!.inclusion!,
|
||||
row[fieldName],
|
||||
]
|
||||
let merged = [...schema.constraints!.inclusion!, ...rowVal]
|
||||
let superSet = new Set(merged)
|
||||
schema.constraints!.inclusion = Array.from(superSet)
|
||||
schema.constraints!.inclusion.sort()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,13 +42,17 @@ if (!env.isTest()) {
|
|||
host: REDIS_OPTS.host,
|
||||
port: REDIS_OPTS.port,
|
||||
},
|
||||
password:
|
||||
REDIS_OPTS.opts.password || REDIS_OPTS.opts.redisOptions.password,
|
||||
}
|
||||
|
||||
if (REDIS_OPTS.opts?.password || REDIS_OPTS.opts.redisOptions?.password) {
|
||||
// @ts-ignore
|
||||
options.password =
|
||||
REDIS_OPTS.opts.password || REDIS_OPTS.opts.redisOptions.password
|
||||
}
|
||||
|
||||
if (!env.REDIS_CLUSTERED) {
|
||||
// Can't set direct redis db in clustered env
|
||||
// @ts-ignore
|
||||
// Can't set direct redis db in clustered env
|
||||
options.database = 1
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,18 +73,97 @@ describe("run misc tests", () => {
|
|||
type: "string",
|
||||
},
|
||||
},
|
||||
e: {
|
||||
name: "Auto ID",
|
||||
type: "number",
|
||||
subtype: "autoID",
|
||||
icon: "ri-magic-line",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: false,
|
||||
numericality: {
|
||||
greaterThanOrEqualTo: "",
|
||||
lessThanOrEqualTo: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
f: {
|
||||
type: "array",
|
||||
constraints: {
|
||||
type: "array",
|
||||
presence: {
|
||||
"allowEmpty": true
|
||||
},
|
||||
inclusion: [
|
||||
"One",
|
||||
"Two",
|
||||
"Three",
|
||||
]
|
||||
},
|
||||
name: "Sample Tags",
|
||||
sortable: false
|
||||
},
|
||||
g: {
|
||||
type: "options",
|
||||
constraints: {
|
||||
type: "string",
|
||||
presence: false,
|
||||
inclusion: [
|
||||
"Alpha",
|
||||
"Beta",
|
||||
"Gamma"
|
||||
]
|
||||
},
|
||||
name: "Sample Opts"
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
// Shift specific row tests to the row spec
|
||||
await tableUtils.handleDataImport(
|
||||
{ userId: "test" },
|
||||
table,
|
||||
[{ a: '1', b: '2', c: '3', d: '4'}]
|
||||
[
|
||||
{ a: '1', b: '2', c: '3', d: '4', f: "['One']", g: "Alpha" },
|
||||
{ a: '5', b: '6', c: '7', d: '8', f: "[]", g: undefined},
|
||||
{ a: '9', b: '10', c: '11', d: '12', f: "['Two','Four']", g: ""},
|
||||
{ a: '13', b: '14', c: '15', d: '16', g: "Omega"}
|
||||
]
|
||||
)
|
||||
|
||||
// 4 rows imported, the auto ID starts at 1
|
||||
// We expect the handleDataImport function to update the lastID
|
||||
expect(table.schema.e.lastID).toEqual(4);
|
||||
|
||||
// Array/Multi - should have added a new value to the inclusion.
|
||||
expect(table.schema.f.constraints.inclusion).toEqual(['Four','One','Three','Two']);
|
||||
|
||||
// Options - should have a new value in the inclusion
|
||||
expect(table.schema.g.constraints.inclusion).toEqual(['Alpha','Beta','Gamma','Omega']);
|
||||
|
||||
const rows = await config.getRows()
|
||||
expect(rows[0].a).toEqual("1")
|
||||
expect(rows[0].b).toEqual("2")
|
||||
expect(rows[0].c).toEqual("3")
|
||||
expect(rows.length).toEqual(4);
|
||||
|
||||
const rowOne = rows.find(row => row.e === 1)
|
||||
expect(rowOne.a).toEqual("1")
|
||||
expect(rowOne.f).toEqual(['One'])
|
||||
expect(rowOne.g).toEqual('Alpha')
|
||||
|
||||
const rowTwo = rows.find(row => row.e === 2)
|
||||
expect(rowTwo.a).toEqual("5")
|
||||
expect(rowTwo.f).toEqual([])
|
||||
expect(rowTwo.g).toEqual(undefined)
|
||||
|
||||
const rowThree = rows.find(row => row.e === 3)
|
||||
expect(rowThree.a).toEqual("9")
|
||||
expect(rowThree.f).toEqual(['Two','Four'])
|
||||
expect(rowThree.g).toEqual(null)
|
||||
|
||||
const rowFour = rows.find(row => row.e === 4)
|
||||
expect(rowFour.a).toEqual("13")
|
||||
expect(rowFour.f).toEqual(undefined)
|
||||
expect(rowFour.g).toEqual('Omega')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -34,9 +34,9 @@ describe("/rows", () => {
|
|||
row = basicRow(table._id)
|
||||
})
|
||||
|
||||
const loadRow = async (id, status = 200) =>
|
||||
const loadRow = async (id, tbl_Id, status = 200) =>
|
||||
await request
|
||||
.get(`/api/${table._id}/rows/${id}`)
|
||||
.get(`/api/${tbl_Id}/rows/${id}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(status)
|
||||
|
@ -79,6 +79,60 @@ describe("/rows", () => {
|
|||
await assertQueryUsage(queryUsage + 1)
|
||||
})
|
||||
|
||||
it("Increment row autoId per create row request", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
const queryUsage = await getQueryUsage()
|
||||
|
||||
const newTable = await config.createTable({
|
||||
name: "TestTableAuto",
|
||||
type: "table",
|
||||
key: "name",
|
||||
schema: {
|
||||
...table.schema,
|
||||
"Row ID": {
|
||||
name: "Row ID",
|
||||
type: "number",
|
||||
subtype: "autoID",
|
||||
icon: "ri-magic-line",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: false,
|
||||
numericality: {
|
||||
greaterThanOrEqualTo: "",
|
||||
lessThanOrEqualTo: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
const ids = [1,2,3]
|
||||
|
||||
// Performing several create row requests should increment the autoID fields accordingly
|
||||
const createRow = async (id) => {
|
||||
const res = await request
|
||||
.post(`/api/${newTable._id}/rows`)
|
||||
.send({
|
||||
name: "row_" + id
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200)
|
||||
expect(res.res.statusMessage).toEqual(`${newTable.name} saved successfully`)
|
||||
expect(res.body.name).toEqual("row_" + id)
|
||||
expect(res.body._rev).toBeDefined()
|
||||
expect(res.body["Row ID"]).toEqual(id)
|
||||
}
|
||||
|
||||
for (let i=0; i<ids.length; i++ ){
|
||||
await createRow(ids[i])
|
||||
}
|
||||
|
||||
await assertRowUsage(rowUsage + ids.length)
|
||||
await assertQueryUsage(queryUsage + ids.length)
|
||||
})
|
||||
|
||||
it("updates a row successfully", async () => {
|
||||
const existing = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
@ -182,8 +236,32 @@ describe("/rows", () => {
|
|||
type: "string",
|
||||
presence: false,
|
||||
datetime: { earliest: "", latest: "" },
|
||||
},
|
||||
}
|
||||
}
|
||||
const arrayField = {
|
||||
type: "array",
|
||||
constraints: {
|
||||
type: "array",
|
||||
presence: false,
|
||||
inclusion: [
|
||||
"One",
|
||||
"Two",
|
||||
"Three",
|
||||
]
|
||||
},
|
||||
name: "Sample Tags",
|
||||
sortable: false
|
||||
}
|
||||
const optsField = {
|
||||
fieldName: "Sample Opts",
|
||||
name: "Sample Opts",
|
||||
type: "options",
|
||||
constraints: {
|
||||
type: "string",
|
||||
presence: false,
|
||||
inclusion: [ "Alpha", "Beta", "Gamma" ]
|
||||
},
|
||||
},
|
||||
|
||||
table = await config.createTable({
|
||||
name: "TestTable2",
|
||||
|
@ -212,7 +290,15 @@ describe("/rows", () => {
|
|||
attachmentNull: attachment,
|
||||
attachmentUndefined: attachment,
|
||||
attachmentEmpty: attachment,
|
||||
attachmentEmptyArrayStr: attachment
|
||||
attachmentEmptyArrayStr: attachment,
|
||||
arrayFieldEmptyArrayStr: arrayField,
|
||||
arrayFieldArrayStrKnown: arrayField,
|
||||
arrayFieldNull: arrayField,
|
||||
arrayFieldUndefined: arrayField,
|
||||
optsFieldEmptyStr: optsField,
|
||||
optsFieldUndefined: optsField,
|
||||
optsFieldNull: optsField,
|
||||
optsFieldStrKnown: optsField
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -241,11 +327,20 @@ describe("/rows", () => {
|
|||
attachmentUndefined: undefined,
|
||||
attachmentEmpty: "",
|
||||
attachmentEmptyArrayStr: "[]",
|
||||
arrayFieldEmptyArrayStr: "[]",
|
||||
arrayFieldUndefined: undefined,
|
||||
arrayFieldNull: null,
|
||||
arrayFieldArrayStrKnown: "['One']",
|
||||
optsFieldEmptyStr: "",
|
||||
optsFieldUndefined: undefined,
|
||||
optsFieldNull: null,
|
||||
optsFieldStrKnown: 'Alpha'
|
||||
}
|
||||
|
||||
const id = (await config.createRow(row))._id
|
||||
const createdRow = await config.createRow(row);
|
||||
const id = createdRow._id
|
||||
|
||||
const saved = (await loadRow(id)).body
|
||||
const saved = (await loadRow(id, table._id)).body
|
||||
|
||||
expect(saved.stringUndefined).toBe(undefined)
|
||||
expect(saved.stringNull).toBe("")
|
||||
|
@ -270,7 +365,15 @@ describe("/rows", () => {
|
|||
expect(saved.attachmentNull).toEqual([])
|
||||
expect(saved.attachmentUndefined).toBe(undefined)
|
||||
expect(saved.attachmentEmpty).toEqual([])
|
||||
expect(saved.attachmentEmptyArrayStr).toEqual([])
|
||||
expect(saved.attachmentEmptyArrayStr).toEqual([])
|
||||
expect(saved.arrayFieldEmptyArrayStr).toEqual([])
|
||||
expect(saved.arrayFieldNull).toEqual([])
|
||||
expect(saved.arrayFieldUndefined).toEqual(undefined)
|
||||
expect(saved.optsFieldEmptyStr).toEqual(null)
|
||||
expect(saved.optsFieldUndefined).toEqual(undefined)
|
||||
expect(saved.optsFieldNull).toEqual(null)
|
||||
expect(saved.arrayFieldArrayStrKnown).toEqual(['One'])
|
||||
expect(saved.optsFieldStrKnown).toEqual('Alpha')
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -299,7 +402,7 @@ describe("/rows", () => {
|
|||
expect(res.body.name).toEqual("Updated Name")
|
||||
expect(res.body.description).toEqual(existing.description)
|
||||
|
||||
const savedRow = await loadRow(res.body._id)
|
||||
const savedRow = await loadRow(res.body._id, table._id)
|
||||
|
||||
expect(savedRow.body.description).toEqual(existing.description)
|
||||
expect(savedRow.body.name).toEqual("Updated Name")
|
||||
|
@ -401,7 +504,7 @@ describe("/rows", () => {
|
|||
.expect(200)
|
||||
|
||||
expect(res.body.length).toEqual(2)
|
||||
await loadRow(row1._id, 404)
|
||||
await loadRow(row1._id, table._id, 404)
|
||||
await assertRowUsage(rowUsage - 2)
|
||||
await assertQueryUsage(queryUsage + 1)
|
||||
})
|
||||
|
|
|
@ -167,7 +167,10 @@ describe("/tables", () => {
|
|||
|
||||
expect(events.table.created).not.toHaveBeenCalled()
|
||||
expect(events.rows.imported).toBeCalledTimes(1)
|
||||
expect(events.rows.imported).toBeCalledWith(table, 1)
|
||||
expect(events.rows.imported).toBeCalledWith(expect.objectContaining({
|
||||
name: "TestTable",
|
||||
_id: table._id
|
||||
}), 1)
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -137,8 +137,7 @@ export function inputProcessing(
|
|||
opts?: AutoColumnProcessingOpts
|
||||
) {
|
||||
let clonedRow = cloneDeep(row)
|
||||
// need to copy the table so it can be differenced on way out
|
||||
const copiedTable = cloneDeep(table)
|
||||
|
||||
const dontCleanseKeys = ["type", "_id", "_rev", "tableId"]
|
||||
for (let [key, value] of Object.entries(clonedRow)) {
|
||||
const field = table.schema[key]
|
||||
|
@ -175,7 +174,7 @@ export function inputProcessing(
|
|||
}
|
||||
|
||||
// handle auto columns - this returns an object like {table, row}
|
||||
return processAutoColumn(user, copiedTable, clonedRow, opts)
|
||||
return processAutoColumn(user, table, clonedRow, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -2,6 +2,22 @@
|
|||
import { FieldTypes } from "../../constants"
|
||||
import { logging } from "@budibase/backend-core"
|
||||
|
||||
const parseArrayString = value => {
|
||||
if (typeof value === "string") {
|
||||
if (value === "") {
|
||||
return []
|
||||
}
|
||||
let result
|
||||
try {
|
||||
result = JSON.parse(value.replace(/'/g, '"'))
|
||||
return result
|
||||
} catch (e) {
|
||||
logging.logAlert("Could not parse row value", e)
|
||||
}
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* A map of how we convert various properties in rows to each other based on the row type.
|
||||
*/
|
||||
|
@ -26,9 +42,9 @@ export const TYPE_TRANSFORM_MAP: any = {
|
|||
[undefined]: undefined,
|
||||
},
|
||||
[FieldTypes.ARRAY]: {
|
||||
"": [],
|
||||
[null]: [],
|
||||
[undefined]: undefined,
|
||||
parse: parseArrayString,
|
||||
},
|
||||
[FieldTypes.STRING]: {
|
||||
"": "",
|
||||
|
@ -70,21 +86,7 @@ export const TYPE_TRANSFORM_MAP: any = {
|
|||
[FieldTypes.ATTACHMENT]: {
|
||||
[null]: [],
|
||||
[undefined]: undefined,
|
||||
parse: attachments => {
|
||||
if (typeof attachments === "string") {
|
||||
if (attachments === "") {
|
||||
return []
|
||||
}
|
||||
let result
|
||||
try {
|
||||
result = JSON.parse(attachments)
|
||||
} catch (e) {
|
||||
logging.logAlert("Could not parse attachments", e)
|
||||
}
|
||||
return result
|
||||
}
|
||||
return attachments
|
||||
},
|
||||
parse: parseArrayString,
|
||||
},
|
||||
[FieldTypes.BOOLEAN]: {
|
||||
"": null,
|
||||
|
|
Loading…
Reference in New Issue