Merge branch 'master' into BUDI-8588

This commit is contained in:
Mike Sealey 2024-09-25 15:54:12 +01:00 committed by GitHub
commit a21a096e6c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 455 additions and 228 deletions

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.32.6",
"version": "2.32.7",
"npmClient": "yarn",
"packages": [
"packages/*",

View File

@ -11,6 +11,7 @@ export interface DeletedApp {
}
const EXPIRY_SECONDS = 3600
const INVALID_EXPIRY_SECONDS = 60
/**
* The default populate app metadata function
@ -48,9 +49,8 @@ export async function getAppMetadata(appId: string): Promise<App | DeletedApp> {
// app DB left around, but no metadata, it is invalid
if (err && err.status === 404) {
metadata = { state: AppState.INVALID }
// don't expire the reference to an invalid app, it'll only be
// updated if a metadata doc actually gets stored (app is remade/reverted)
expiry = undefined
// expire invalid apps regularly, in-case it was only briefly invalid
expiry = INVALID_EXPIRY_SECONDS
} else {
throw err
}

View File

@ -43,6 +43,9 @@ function buildNano(couchInfo: { url: string; cookie: string }) {
}
type DBCall<T> = () => Promise<T>
type DBCallback<T> = (
db: Nano.DocumentScope<any>
) => Promise<DBCall<T>> | DBCall<T>
class CouchDBError extends Error implements DBError {
status: number
@ -171,8 +174,8 @@ export class DatabaseImpl implements Database {
}
// this function fetches the DB and handles if DB creation is needed
private async performCall<T>(
call: (db: Nano.DocumentScope<any>) => Promise<DBCall<T>> | DBCall<T>
private async performCallWithDBCreation<T>(
call: DBCallback<T>
): Promise<any> {
const db = this.getDb()
const fnc = await call(db)
@ -181,13 +184,24 @@ export class DatabaseImpl implements Database {
} catch (err: any) {
if (err.statusCode === 404 && err.reason === DATABASE_NOT_FOUND) {
await this.checkAndCreateDb()
return await this.performCall(call)
return await this.performCallWithDBCreation(call)
}
// stripping the error down the props which are safe/useful, drop everything else
throw new CouchDBError(`CouchDB error: ${err.message}`, err)
}
}
private async performCall<T>(call: DBCallback<T>): Promise<any> {
const db = this.getDb()
const fnc = await call(db)
try {
return await fnc()
} catch (err: any) {
// stripping the error down the props which are safe/useful, drop everything else
throw new CouchDBError(`CouchDB error: ${err.message}`, err)
}
}
async get<T extends Document>(id?: string): Promise<T> {
return this.performCall(db => {
if (!id) {
@ -227,6 +241,7 @@ export class DatabaseImpl implements Database {
}
async remove(idOrDoc: string | Document, rev?: string) {
// not a read call - but don't create a DB to delete a document
return this.performCall(db => {
let _id: string
let _rev: string
@ -286,7 +301,7 @@ export class DatabaseImpl implements Database {
if (!document._id) {
throw new Error("Cannot store document without _id field.")
}
return this.performCall(async db => {
return this.performCallWithDBCreation(async db => {
if (!document.createdAt) {
document.createdAt = new Date().toISOString()
}
@ -309,7 +324,7 @@ export class DatabaseImpl implements Database {
async bulkDocs(documents: AnyDocument[]) {
const now = new Date().toISOString()
return this.performCall(db => {
return this.performCallWithDBCreation(db => {
return () =>
db.bulk({
docs: documents.map(d => ({ createdAt: now, ...d, updatedAt: now })),
@ -321,7 +336,21 @@ export class DatabaseImpl implements Database {
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
return this.performCall(db => {
return () => db.list(params)
return async () => {
try {
return (await db.list(params)) as AllDocsResponse<T>
} catch (err: any) {
if (err.reason === DATABASE_NOT_FOUND) {
return {
offset: 0,
total_rows: 0,
rows: [],
}
} else {
throw err
}
}
}
})
}

View File

@ -3,6 +3,7 @@ import * as context from "../context"
import { PostHog, PostHogOptions } from "posthog-node"
import { FeatureFlag, IdentityType, UserCtx } from "@budibase/types"
import tracer from "dd-trace"
import { Duration } from "../utils"
let posthog: PostHog | undefined
export function init(opts?: PostHogOptions) {
@ -16,6 +17,7 @@ export function init(opts?: PostHogOptions) {
posthog = new PostHog(env.POSTHOG_TOKEN, {
host: env.POSTHOG_API_HOST,
personalApiKey: env.POSTHOG_PERSONAL_TOKEN,
featureFlagsPollingInterval: Duration.fromMinutes(3).toMs(),
...opts,
})
} else {

View File

@ -150,6 +150,7 @@ class InternalBuilder {
return `"${str}"`
case SqlClient.MS_SQL:
return `[${str}]`
case SqlClient.MARIADB:
case SqlClient.MY_SQL:
return `\`${str}\``
}
@ -559,7 +560,10 @@ class InternalBuilder {
)}${wrap}, FALSE)`
)
})
} else if (this.client === SqlClient.MY_SQL) {
} else if (
this.client === SqlClient.MY_SQL ||
this.client === SqlClient.MARIADB
) {
const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS"
iterate(mode, (q, key, value) => {
return q[rawFnc](
@ -930,7 +934,8 @@ class InternalBuilder {
}
const relatedTable = meta.tables?.[toTable]
const toAlias = aliases?.[toTable] || toTable,
fromAlias = aliases?.[fromTable] || fromTable
fromAlias = aliases?.[fromTable] || fromTable,
throughAlias = (throughTable && aliases?.[throughTable]) || throughTable
let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias,
schema: endpoint.schema,
@ -957,38 +962,36 @@ class InternalBuilder {
const primaryKey = `${toAlias}.${toPrimary || toKey}`
let subQuery: Knex.QueryBuilder = knex
.from(toTableWithSchema)
.limit(getRelationshipLimit())
// add sorting to get consistent order
.orderBy(primaryKey)
// many-to-many relationship with junction table
if (throughTable && toPrimary && fromPrimary) {
const throughAlias = aliases?.[throughTable] || throughTable
const isManyToMany = throughTable && toPrimary && fromPrimary
let correlatedTo = isManyToMany
? `${throughAlias}.${fromKey}`
: `${toAlias}.${toKey}`,
correlatedFrom = isManyToMany
? `${fromAlias}.${fromPrimary}`
: `${fromAlias}.${fromKey}`
// many-to-many relationship needs junction table join
if (isManyToMany) {
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema: endpoint.schema,
})
subQuery = subQuery
.join(throughTableWithSchema, function () {
subQuery = subQuery.join(throughTableWithSchema, function () {
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
})
.where(
`${throughAlias}.${fromKey}`,
"=",
knex.raw(this.quotedIdentifier(`${fromAlias}.${fromPrimary}`))
)
}
// one-to-many relationship with foreign key
else {
subQuery = subQuery.where(
`${toAlias}.${toKey}`,
"=",
knex.raw(this.quotedIdentifier(`${fromAlias}.${fromKey}`))
)
}
// add the correlation to the overall query
subQuery = subQuery.where(
correlatedTo,
"=",
knex.raw(this.quotedIdentifier(correlatedFrom))
)
const standardWrap = (select: string): Knex.QueryBuilder => {
subQuery = subQuery.select(`${toAlias}.*`)
subQuery = subQuery.select(`${toAlias}.*`).limit(getRelationshipLimit())
// @ts-ignore - the from alias syntax isn't in Knex typing
return knex.select(knex.raw(select)).from({
[toAlias]: subQuery,
@ -1008,11 +1011,15 @@ class InternalBuilder {
`json_agg(json_build_object(${fieldList}))`
)
break
case SqlClient.MY_SQL:
case SqlClient.MARIADB:
// can't use the standard wrap due to correlated sub-query limitations in MariaDB
wrapperQuery = subQuery.select(
knex.raw(`json_arrayagg(json_object(${fieldList}))`)
knex.raw(
`json_arrayagg(json_object(${fieldList}) LIMIT ${getRelationshipLimit()})`
)
)
break
case SqlClient.MY_SQL:
case SqlClient.ORACLE:
wrapperQuery = standardWrap(
`json_arrayagg(json_object(${fieldList}))`
@ -1024,7 +1031,9 @@ class InternalBuilder {
.select(`${fromAlias}.*`)
// @ts-ignore - from alias syntax not TS supported
.from({
[fromAlias]: subQuery.select(`${toAlias}.*`),
[fromAlias]: subQuery
.select(`${toAlias}.*`)
.limit(getRelationshipLimit()),
})} FOR JSON PATH))`
)
break
@ -1179,7 +1188,8 @@ class InternalBuilder {
if (
this.client === SqlClient.POSTGRES ||
this.client === SqlClient.SQL_LITE ||
this.client === SqlClient.MY_SQL
this.client === SqlClient.MY_SQL ||
this.client === SqlClient.MARIADB
) {
const primary = this.table.primary
if (!primary) {
@ -1326,12 +1336,11 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
_query(json: QueryJson, opts: QueryOptions = {}): SqlQuery | SqlQuery[] {
const sqlClient = this.getSqlClient()
const config: Knex.Config = {
client: sqlClient,
client: this.getBaseSqlClient(),
}
if (sqlClient === SqlClient.SQL_LITE || sqlClient === SqlClient.ORACLE) {
config.useNullAsDefault = true
}
const client = knex(config)
let query: Knex.QueryBuilder
const builder = new InternalBuilder(sqlClient, client, json)
@ -1440,7 +1449,10 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
let id
if (sqlClient === SqlClient.MS_SQL) {
id = results?.[0].id
} else if (sqlClient === SqlClient.MY_SQL) {
} else if (
sqlClient === SqlClient.MY_SQL ||
sqlClient === SqlClient.MARIADB
) {
id = results?.insertId
}
row = processFn(

View File

@ -210,16 +210,27 @@ function buildDeleteTable(knex: SchemaBuilder, table: Table): SchemaBuilder {
class SqlTableQueryBuilder {
private readonly sqlClient: SqlClient
private extendedSqlClient: SqlClient | undefined
// pass through client to get flavour of SQL
constructor(client: SqlClient) {
this.sqlClient = client
}
getSqlClient(): SqlClient {
getBaseSqlClient(): SqlClient {
return this.sqlClient
}
getSqlClient(): SqlClient {
return this.extendedSqlClient || this.sqlClient
}
// if working in a database like MySQL with many variants (MariaDB)
// we can set another client which overrides the base one
setExtendedSqlClient(client: SqlClient) {
this.extendedSqlClient = client
}
/**
* @param json the input JSON structure from which an SQL query will be built.
* @return the operation that was found in the JSON.

View File

@ -16,9 +16,11 @@
export let enableNaming = true
let validRegex = /^[A-Za-z0-9_\s]+$/
let typing = false
let editing = false
const dispatch = createEventDispatcher()
$: stepNames = $selectedAutomation?.definition.stepNames
$: allSteps = $selectedAutomation?.definition.steps || []
$: automationName = stepNames?.[block.id] || block?.name || ""
$: automationNameError = getAutomationNameError(automationName)
$: status = updateStatus(testResult)
@ -56,10 +58,18 @@
}
}
const getAutomationNameError = name => {
if (stepNames) {
const duplicateError =
"This name already exists, please enter a unique name"
if (stepNames && editing) {
for (const [key, value] of Object.entries(stepNames)) {
if (name === value && key !== block.id) {
return "This name already exists, please enter a unique name"
if (name !== block.name && name === value && key !== block.id) {
return duplicateError
}
}
for (const step of allSteps) {
if (step.id !== block.id && name === step.name) {
return duplicateError
}
}
}
@ -67,16 +77,12 @@
if (name !== block.name && name?.length > 0) {
let invalidRoleName = !validRegex.test(name)
if (invalidRoleName) {
return "Please enter a role name consisting of only alphanumeric symbols and underscores"
return "Please enter a name consisting of only alphanumeric symbols and underscores"
}
}
return null
}
}
const startTyping = async () => {
typing = true
}
const saveName = async () => {
if (automationNameError || block.name === automationName) {
@ -89,13 +95,28 @@
await automationStore.actions.saveAutomationName(block.id, automationName)
}
}
const startEditing = () => {
editing = true
typing = true
}
const stopEditing = async () => {
editing = false
typing = false
if (automationNameError) {
automationName = stepNames[block.id] || block?.name
} else {
await saveName()
}
}
</script>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<div
class:typing={typing && !automationNameError}
class:typing-error={automationNameError}
class:typing={typing && !automationNameError && editing}
class:typing-error={automationNameError && editing}
class="blockSection"
on:click={() => dispatch("toggle")}
>
@ -132,7 +153,7 @@
<input
class="input-text"
disabled={!enableNaming}
placeholder="Enter some text"
placeholder="Enter step name"
name="name"
autocomplete="off"
value={automationName}
@ -141,26 +162,14 @@
}}
on:click={e => {
e.stopPropagation()
startTyping()
startEditing()
}}
on:keydown={async e => {
if (e.key === "Enter") {
typing = false
if (automationNameError) {
automationName = stepNames[block.id] || block?.name
} else {
await saveName()
}
}
}}
on:blur={async () => {
typing = false
if (automationNameError) {
automationName = stepNames[block.id] || block?.name
} else {
await saveName()
await stopEditing()
}
}}
on:blur={stopEditing}
/>
{:else}
<div class="input-text">
@ -222,7 +231,7 @@
/>
{/if}
</div>
{#if automationNameError}
{#if automationNameError && editing}
<div class="error-container">
<AbsTooltip type="negative" text={automationNameError}>
<div class="error-icon">

View File

@ -643,8 +643,8 @@
runtimeName = `loop.${name}`
} else if (block.name.startsWith("JS")) {
runtimeName = hasUserDefinedName
? `stepsByName[${bindingName}].${name}`
: `steps[${idx - loopBlockCount}].${name}`
? `stepsByName["${bindingName}"].${name}`
: `steps["${idx - loopBlockCount}"].${name}`
} else {
runtimeName = hasUserDefinedName
? `stepsByName.${bindingName}.${name}`
@ -752,13 +752,21 @@
: allSteps[idx].icon
if (wasLoopBlock) {
loopBlockCount++
schema = cloneDeep(allSteps[idx - 1]?.schema?.outputs?.properties)
}
Object.entries(schema).forEach(([name, value]) => {
addBinding(name, value, icon, idx, isLoopBlock, bindingName)
})
}
if (
allSteps[blockIdx - 1]?.stepId !== ActionStepID.LOOP &&
allSteps
.slice(0, blockIdx)
.some(step => step.stepId === ActionStepID.LOOP)
) {
bindings = bindings.filter(x => !x.readableBinding.includes("loop"))
}
return bindings
}

View File

@ -28,7 +28,7 @@
"inquirer": "8.0.0",
"lookpath": "1.1.0",
"node-fetch": "2.6.7",
"posthog-node": "1.3.0",
"posthog-node": "4.0.1",
"pouchdb": "7.3.0",
"@budibase/pouchdb-replication-stream": "1.2.11",
"randomstring": "1.1.5",

View File

@ -1,4 +1,4 @@
import PostHog from "posthog-node"
import { PostHog } from "posthog-node"
import { POSTHOG_TOKEN, AnalyticsEvent } from "../constants"
import { ConfigManager } from "../structures/ConfigManager"

View File

@ -39,9 +39,10 @@ import tk from "timekeeper"
import { encodeJSBinding } from "@budibase/string-templates"
import { dataFilters } from "@budibase/shared-core"
import { Knex } from "knex"
import { structures } from "@budibase/backend-core/tests"
import { generator, structures } from "@budibase/backend-core/tests"
import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default"
import { generateRowIdField } from "../../../integrations/utils"
import { cloneDeep } from "lodash/fp"
describe.each([
["in-memory", undefined],
@ -66,6 +67,36 @@ describe.each([
let table: Table
let rows: Row[]
async function basicRelationshipTables(type: RelationshipType) {
const relatedTable = await createTable(
{
name: { name: "name", type: FieldType.STRING },
},
generator.guid().substring(0, 10)
)
table = await createTable(
{
name: { name: "name", type: FieldType.STRING },
//@ts-ignore - API accepts this structure, will build out rest of definition
productCat: {
type: FieldType.LINK,
relationshipType: type,
name: "productCat",
fieldName: "product",
tableId: relatedTable._id!,
constraints: {
type: "array",
},
},
},
generator.guid().substring(0, 10)
)
return {
relatedTable: await config.api.table.get(relatedTable._id!),
table,
}
}
beforeAll(async () => {
await withCoreEnv({ TENANT_FEATURE_FLAGS: "*:SQS" }, () => config.init())
if (isLucene) {
@ -201,6 +232,7 @@ describe.each([
// rows returned by the query will also cause the assertion to fail.
async toMatchExactly(expectedRows: any[]) {
const response = await this.performSearch()
const cloned = cloneDeep(response)
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
@ -211,7 +243,7 @@ describe.each([
expect.objectContaining(this.popRow(expectedRow, foundRows))
)
)
return response
return cloned
}
// Asserts that the query returns rows matching exactly the set of rows
@ -219,6 +251,7 @@ describe.each([
// cause the assertion to fail.
async toContainExactly(expectedRows: any[]) {
const response = await this.performSearch()
const cloned = cloneDeep(response)
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
@ -231,7 +264,7 @@ describe.each([
)
)
)
return response
return cloned
}
// Asserts that the query returns some property values - this cannot be used
@ -239,6 +272,7 @@ describe.each([
// typing for this has to be any, Jest doesn't expose types for matchers like expect.any(...)
async toMatch(properties: Record<string, any>) {
const response = await this.performSearch()
const cloned = cloneDeep(response)
const keys = Object.keys(properties) as Array<keyof SearchResponse<Row>>
for (let key of keys) {
// eslint-disable-next-line jest/no-standalone-expect
@ -248,17 +282,18 @@ describe.each([
expect(response[key]).toEqual(properties[key])
}
}
return response
return cloned
}
// Asserts that the query doesn't return a property, e.g. pagination parameters.
async toNotHaveProperty(properties: (keyof SearchResponse<Row>)[]) {
const response = await this.performSearch()
const cloned = cloneDeep(response)
for (let property of properties) {
// eslint-disable-next-line jest/no-standalone-expect
expect(response[property]).toBeUndefined()
}
return response
return cloned
}
// Asserts that the query returns rows matching the set of rows passed in.
@ -266,6 +301,7 @@ describe.each([
// assertion to fail.
async toContain(expectedRows: any[]) {
const response = await this.performSearch()
const cloned = cloneDeep(response)
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
@ -276,7 +312,7 @@ describe.each([
)
)
)
return response
return cloned
}
async toFindNothing() {
@ -2196,28 +2232,10 @@ describe.each([
let productCategoryTable: Table, productCatRows: Row[]
beforeAll(async () => {
productCategoryTable = await createTable(
{
name: { name: "name", type: FieldType.STRING },
},
"productCategory"
)
table = await createTable(
{
name: { name: "name", type: FieldType.STRING },
productCat: {
type: FieldType.LINK,
relationshipType: RelationshipType.ONE_TO_MANY,
name: "productCat",
fieldName: "product",
tableId: productCategoryTable._id!,
constraints: {
type: "array",
},
},
},
"product"
const { relatedTable } = await basicRelationshipTables(
RelationshipType.ONE_TO_MANY
)
productCategoryTable = relatedTable
productCatRows = await Promise.all([
config.api.row.save(productCategoryTable._id!, { name: "foo" }),
@ -2250,7 +2268,7 @@ describe.each([
it("should be able to filter by relationship using table name", async () => {
await expectQuery({
equal: { ["productCategory.name"]: "foo" },
equal: { [`${productCategoryTable.name}.name`]: "foo" },
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
])
@ -2262,6 +2280,36 @@ describe.each([
}).toContainExactly([{ name: "baz", productCat: undefined }])
})
})
isSql &&
describe("big relations", () => {
beforeAll(async () => {
const { relatedTable } = await basicRelationshipTables(
RelationshipType.MANY_TO_ONE
)
const mainRow = await config.api.row.save(table._id!, {
name: "foo",
})
for (let i = 0; i < 11; i++) {
await config.api.row.save(relatedTable._id!, {
name: i,
product: [mainRow._id!],
})
}
})
it("can only pull 10 related rows", async () => {
await withCoreEnv({ SQL_MAX_RELATED_ROWS: "10" }, async () => {
const response = await expectQuery({}).toContain([{ name: "foo" }])
expect(response.rows[0].productCat).toBeArrayOfSize(10)
})
})
it("can pull max rows when env not set (defaults to 500)", async () => {
const response = await expectQuery({}).toContain([{ name: "foo" }])
expect(response.rows[0].productCat).toBeArrayOfSize(11)
})
})
;(isSqs || isLucene) &&
describe("relations to same table", () => {
let relatedTable: Table, relatedRows: Row[]

View File

@ -22,6 +22,8 @@ import {
TableSchema,
ViewFieldMetadata,
RenameColumn,
FeatureFlag,
BBReferenceFieldSubType,
} from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
@ -32,6 +34,7 @@ import {
roles,
withEnv as withCoreEnv,
setEnv as setCoreEnv,
env,
} from "@budibase/backend-core"
import sdk from "../../../sdk"
@ -694,6 +697,7 @@ describe.each([
)
})
isInternal &&
it("cannot update views v1", async () => {
const viewV1 = await config.api.legacyView.save({
tableId: table._id!,
@ -2213,6 +2217,171 @@ describe.each([
})
)
})
describe("foreign relationship columns", () => {
let envCleanup: () => void
beforeAll(() => {
const flags = [`*:${FeatureFlag.ENRICHED_RELATIONSHIPS}`]
if (env.TENANT_FEATURE_FLAGS) {
flags.push(...env.TENANT_FEATURE_FLAGS.split(","))
}
envCleanup = setCoreEnv({
TENANT_FEATURE_FLAGS: flags.join(","),
})
})
afterAll(() => {
envCleanup?.()
})
const createMainTable = async (
links: {
name: string
tableId: string
fk: string
}[]
) => {
const table = await config.api.table.save(
saveTableRequest({
schema: { title: { name: "title", type: FieldType.STRING } },
})
)
await config.api.table.save({
...table,
schema: {
...table.schema,
...links.reduce<TableSchema>((acc, c) => {
acc[c.name] = {
name: c.name,
relationshipType: RelationshipType.ONE_TO_MANY,
type: FieldType.LINK,
tableId: c.tableId,
fieldName: c.fk,
constraints: { type: "array" },
}
return acc
}, {}),
},
})
return table
}
const createAuxTable = (schema: TableSchema) =>
config.api.table.save(
saveTableRequest({
primaryDisplay: "name",
schema: {
...schema,
name: { name: "name", type: FieldType.STRING },
},
})
)
it("returns squashed fields respecting the view config", async () => {
const auxTable = await createAuxTable({
age: { name: "age", type: FieldType.NUMBER },
})
const auxRow = await config.api.row.save(auxTable._id!, {
name: generator.name(),
age: generator.age(),
})
const table = await createMainTable([
{ name: "aux", tableId: auxTable._id!, fk: "fk_aux" },
])
await config.api.row.save(table._id!, {
title: generator.word(),
aux: [auxRow],
})
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
title: { visible: true },
aux: {
visible: true,
columns: {
name: { visible: false, readonly: false },
age: { visible: true, readonly: true },
},
},
},
})
const response = await config.api.viewV2.search(view.id)
expect(response.rows).toEqual([
expect.objectContaining({
aux: [
{
_id: auxRow._id,
primaryDisplay: auxRow.name,
age: auxRow.age,
},
],
}),
])
})
it("enriches squashed fields", async () => {
const auxTable = await createAuxTable({
user: {
name: "user",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
constraints: { presence: true },
},
})
const table = await createMainTable([
{ name: "aux", tableId: auxTable._id!, fk: "fk_aux" },
])
const user = config.getUser()
const auxRow = await config.api.row.save(auxTable._id!, {
name: generator.name(),
user: user._id,
})
await config.api.row.save(table._id!, {
title: generator.word(),
aux: [auxRow],
})
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
title: { visible: true },
aux: {
visible: true,
columns: {
name: { visible: true, readonly: true },
user: { visible: true, readonly: true },
},
},
},
})
const response = await config.api.viewV2.search(view.id)
expect(response.rows).toEqual([
expect.objectContaining({
aux: [
{
_id: auxRow._id,
primaryDisplay: auxRow.name,
name: auxRow.name,
user: {
_id: user._id,
email: user.email,
firstName: user.firstName,
lastName: user.lastName,
primaryDisplay: user.email,
},
},
],
}),
])
})
})
})
describe("permissions", () => {

View File

@ -10,7 +10,7 @@ import flatten from "lodash/flatten"
import { USER_METDATA_PREFIX } from "../utils"
import partition from "lodash/partition"
import { getGlobalUsersFromMetadata } from "../../utilities/global"
import { processFormulas } from "../../utilities/rowProcessor"
import { outputProcessing, processFormulas } from "../../utilities/rowProcessor"
import { context, features } from "@budibase/backend-core"
import {
ContextUser,
@ -275,7 +275,7 @@ export async function squashLinks<T = Row[] | Row>(
// will populate this as we find them
const linkedTables = [table]
const isArray = Array.isArray(enriched)
const enrichedArray = !isArray ? [enriched] : enriched
const enrichedArray = !isArray ? [enriched as Row] : (enriched as Row[])
for (const row of enrichedArray) {
// this only fetches the table if its not already in array
const rowTable = await getLinkedTable(row.tableId!, linkedTables)
@ -292,6 +292,9 @@ export async function squashLinks<T = Row[] | Row>(
obj.primaryDisplay = getPrimaryDisplayValue(link, linkedTable)
if (viewSchema[column]?.columns) {
const enrichedLink = await outputProcessing(linkedTable, link, {
squash: false,
})
const squashFields = Object.entries(viewSchema[column].columns)
.filter(([columnName, viewColumnConfig]) => {
const tableColumn = linkedTable.schema[columnName]
@ -312,7 +315,7 @@ export async function squashLinks<T = Row[] | Row>(
.map(([columnName]) => columnName)
for (const relField of squashFields) {
obj[relField] = link[relField]
obj[relField] = enrichedLink[relField]
}
}
@ -321,5 +324,5 @@ export async function squashLinks<T = Row[] | Row>(
row[column] = newLinks
}
}
return isArray ? enrichedArray : enrichedArray[0]
return (isArray ? enrichedArray : enrichedArray[0]) as T
}

View File

@ -241,6 +241,16 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async connect() {
this.client = await mysql.createConnection(this.config)
const res = await this.internalQuery(
{
sql: "SELECT VERSION();",
},
{ connect: false }
)
const version = res?.[0]?.["VERSION()"]
if (version?.toLowerCase().includes("mariadb")) {
this.setExtendedSqlClient(SqlClient.MARIADB)
}
}
async disconnect() {

View File

@ -198,12 +198,15 @@ export async function save(
}
}
generateRelatedSchema(schema, relatedTable, tableToSave, relatedColumnName)
tables[relatedTable.name] = relatedTable
schema.main = true
}
// add in the new table for relationship purposes
tables[tableToSave.name] = tableToSave
cleanupRelationships(tableToSave, tables, oldTable)
if (oldTable) {
cleanupRelationships(tableToSave, tables, { oldTable })
}
const operation = tableId ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
await makeTableRequest(
@ -231,7 +234,10 @@ export async function save(
// remove the rename prop
delete tableToSave._rename
datasource.entities[tableToSave.name] = tableToSave
datasource.entities = {
...datasource.entities,
...tables,
}
// store it into couch now for budibase reference
await db.put(populateExternalTableSchemas(datasource))
@ -255,7 +261,7 @@ export async function destroy(datasourceId: string, table: Table) {
const operation = Operation.DELETE_TABLE
if (tables) {
await makeTableRequest(datasource, operation, table, tables)
cleanupRelationships(table, tables)
cleanupRelationships(table, tables, { deleting: true })
delete tables[table.name]
datasource.entities = tables
}

View File

@ -20,14 +20,26 @@ import { cloneDeep } from "lodash/fp"
export function cleanupRelationships(
table: Table,
tables: Record<string, Table>,
oldTable?: Table
) {
opts: { oldTable: Table }
): void
export function cleanupRelationships(
table: Table,
tables: Record<string, Table>,
opts: { deleting: boolean }
): void
export function cleanupRelationships(
table: Table,
tables: Record<string, Table>,
opts?: { oldTable?: Table; deleting?: boolean }
): void {
const oldTable = opts?.oldTable
const tableToIterate = oldTable ? oldTable : table
// clean up relationships in couch table schemas
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
if (
schema.type === FieldType.LINK &&
(!oldTable || table.schema[key] == null)
(opts?.deleting || oldTable?.schema[key] != null) &&
table.schema[key] == null
) {
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find(

View File

@ -600,10 +600,10 @@ export function fullSchemaWithoutLinks({
allRequired,
}: {
allRequired?: boolean
}) {
const schema: {
}): {
[type in Exclude<FieldType, FieldType.LINK>]: FieldSchema & { type: type }
} = {
} {
return {
[FieldType.STRING]: {
name: "string",
type: FieldType.STRING,
@ -741,8 +741,6 @@ export function fullSchemaWithoutLinks({
},
},
}
return schema
}
export function basicAttachment() {
return {

View File

@ -195,6 +195,7 @@ export enum SqlClient {
MS_SQL = "mssql",
POSTGRES = "pg",
MY_SQL = "mysql2",
MARIADB = "mariadb",
ORACLE = "oracledb",
SQL_LITE = "sqlite3",
}

101
yarn.lock
View File

@ -7535,15 +7535,7 @@ aws4@^1.8.0:
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59"
integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==
axios-retry@^3.1.9:
version "3.4.0"
resolved "https://registry.yarnpkg.com/axios-retry/-/axios-retry-3.4.0.tgz#f464dbe9408e5aa78fa319afd38bb69b533d8854"
integrity sha512-VdgaP+gHH4iQYCCNUWF2pcqeciVOdGrBBAYUfTY+wPcO5Ltvp/37MLFNCmJKo7Gj3SHvCSdL8ouI1qLYJN3liA==
dependencies:
"@babel/runtime" "^7.15.4"
is-retry-allowed "^2.2.0"
axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0, axios@^1.6.2:
axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0, axios@^1.6.2:
version "1.6.3"
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4"
integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww==
@ -8396,11 +8388,6 @@ chardet@^0.7.0:
resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e"
integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==
charenc@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
integrity sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==
cheap-watch@^1.0.2, cheap-watch@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/cheap-watch/-/cheap-watch-1.0.4.tgz#0bcb4a3a8fbd9d5327936493f6b56baa668d8fef"
@ -8787,11 +8774,6 @@ component-emitter@^1.3.0:
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0"
integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==
component-type@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/component-type/-/component-type-1.2.1.tgz#8a47901700238e4fc32269771230226f24b415a9"
integrity sha512-Kgy+2+Uwr75vAi6ChWXgHuLvd+QLD7ssgpaRq2zCvt80ptvAfMc/hijcJxXkBa2wMlEZcJvC2H8Ubo+A9ATHIg==
compress-commons@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-4.1.2.tgz#6542e59cb63e1f46a8b21b0e06f9a32e4c8b06df"
@ -9210,11 +9192,6 @@ cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3:
shebang-command "^2.0.0"
which "^2.0.1"
crypt@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
crypto-browserify@^3.11.0:
version "3.12.0"
resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec"
@ -13257,11 +13234,6 @@ is-boolean-object@^1.1.0:
call-bind "^1.0.2"
has-tostringtag "^1.0.0"
is-buffer@~1.1.6:
version "1.1.6"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
is-builtin-module@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-3.2.1.tgz#f03271717d8654cfcaf07ab0463faa3571581169"
@ -13546,11 +13518,6 @@ is-retry-allowed@^1.1.0:
resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz#d778488bd0a4666a3be8a1482b9f2baafedea8b4"
integrity sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==
is-retry-allowed@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz#88f34cbd236e043e71b6932d09b0c65fb7b4d71d"
integrity sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg==
is-self-closing@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/is-self-closing/-/is-self-closing-1.0.1.tgz#5f406b527c7b12610176320338af0fa3896416e4"
@ -14307,11 +14274,6 @@ joi@^17.13.1:
"@sideway/formula" "^3.0.1"
"@sideway/pinpoint" "^2.0.0"
join-component@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/join-component/-/join-component-1.1.0.tgz#b8417b750661a392bee2c2537c68b2a9d4977cd5"
integrity sha512-bF7vcQxbODoGK1imE2P9GS9aw4zD0Sd+Hni68IMZLj7zRnquH7dXUmMw9hDI5S/Jzt7q+IyTXN0rSg2GI0IKhQ==
joycon@^3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/joycon/-/joycon-3.1.1.tgz#bce8596d6ae808f8b68168f5fc69280996894f03"
@ -15801,15 +15763,6 @@ md5.js@^1.3.4:
inherits "^2.0.1"
safe-buffer "^5.1.2"
md5@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f"
integrity sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==
dependencies:
charenc "0.0.2"
crypt "0.0.2"
is-buffer "~1.1.6"
mdn-data@2.0.14:
version "2.0.14"
resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50"
@ -16271,7 +16224,7 @@ ms@2.1.2:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
ms@^2.0.0, ms@^2.1.1, ms@^2.1.3:
ms@^2.0.0, ms@^2.1.1:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
@ -18440,20 +18393,6 @@ posthog-js@^1.13.4:
preact "^10.19.3"
web-vitals "^4.0.1"
posthog-node@1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-1.3.0.tgz#804ed2f213a2f05253f798bf9569d55a9cad94f7"
integrity sha512-2+VhqiY/rKIqKIXyvemBFHbeijHE25sP7eKltnqcFqAssUE6+sX6vusN9A4luzToOqHQkUZexiCKxvuGagh7JA==
dependencies:
axios "0.24.0"
axios-retry "^3.1.9"
component-type "^1.2.1"
join-component "^1.1.0"
md5 "^2.3.0"
ms "^2.1.3"
remove-trailing-slash "^0.1.1"
uuid "^8.3.2"
posthog-node@4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-4.0.1.tgz#eb8b6cdf68c3fdd0dc2b75e8aab2e0ec3727fb2a"
@ -19494,11 +19433,6 @@ remixicon@2.5.0:
resolved "https://registry.yarnpkg.com/remixicon/-/remixicon-2.5.0.tgz#b5e245894a1550aa23793f95daceadbf96ad1a41"
integrity sha512-q54ra2QutYDZpuSnFjmeagmEiN9IMo56/zz5dDNitzKD23oFRw77cWo4TsrAdmdkPiEn8mxlrTqxnkujDbEGww==
remove-trailing-slash@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/remove-trailing-slash/-/remove-trailing-slash-0.1.1.tgz#be2285a59f39c74d1bce4f825950061915e3780d"
integrity sha512-o4S4Qh6L2jpnCy83ysZDau+VORNvnFw07CKSAymkd6ICNVEPisMyzlc00KlvvicsxKck94SEwhDnMNdICzO+tA==
request@^2.88.0:
version "2.88.2"
resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3"
@ -20786,16 +20720,7 @@ string-similarity@^4.0.4:
resolved "https://registry.yarnpkg.com/string-similarity/-/string-similarity-4.0.4.tgz#42d01ab0b34660ea8a018da8f56a3309bb8b2a5b"
integrity sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ==
"string-width-cjs@npm:string-width@^4.2.0":
version "4.2.3"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
dependencies:
emoji-regex "^8.0.0"
is-fullwidth-code-point "^3.0.0"
strip-ansi "^6.0.1"
"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3:
"string-width-cjs@npm:string-width@^4.2.0", "string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3:
version "4.2.3"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
@ -20886,7 +20811,7 @@ stringify-object@^3.2.1:
is-obj "^1.0.1"
is-regexp "^1.0.0"
"strip-ansi-cjs@npm:strip-ansi@^6.0.1":
"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
@ -20900,13 +20825,6 @@ strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0:
dependencies:
ansi-regex "^4.1.0"
strip-ansi@^6.0.0, strip-ansi@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
dependencies:
ansi-regex "^5.0.1"
strip-ansi@^7.0.1:
version "7.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2"
@ -22862,7 +22780,7 @@ worker-farm@1.7.0:
dependencies:
errno "~0.1.7"
"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0":
"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
@ -22880,15 +22798,6 @@ wrap-ansi@^5.1.0:
string-width "^3.0.0"
strip-ansi "^5.0.0"
wrap-ansi@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
dependencies:
ansi-styles "^4.0.0"
string-width "^4.1.0"
strip-ansi "^6.0.0"
wrap-ansi@^8.1.0:
version "8.1.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214"