Merge branch 'budi-7710-user-groups-do-not-fully-support-custom-roles-4' of github.com:budibase/budibase into budi-7710-user-groups-do-not-fully-support-custom-roles-5

This commit is contained in:
Sam Rose 2024-03-05 09:25:19 +00:00
commit 4c6745ad20
No known key found for this signature in database
21 changed files with 280 additions and 136 deletions

View File

@ -1,5 +1,5 @@
{ {
"version": "2.21.0", "version": "2.21.2",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -184,7 +184,7 @@ export async function getRole(
return cloneDeep(BUILTIN_ROLES.PUBLIC) return cloneDeep(BUILTIN_ROLES.PUBLIC)
} }
// only throw an error if there is no role at all // only throw an error if there is no role at all
if (!role || Object.keys(role).length === 0) { if (Object.keys(role || {}).length === 0) {
throw err throw err
} }
} }

View File

@ -1,7 +1,7 @@
<script> <script>
import { CoreSelect, CoreMultiselect } from "@budibase/bbui" import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
import { fetchData, Utils } from "@budibase/frontend-core" import { fetchData, Utils } from "@budibase/frontend-core"
import { getContext } from "svelte" import { getContext, onMount } from "svelte"
import Field from "./Field.svelte" import Field from "./Field.svelte"
import { FieldTypes } from "../../../constants" import { FieldTypes } from "../../../constants"
@ -28,6 +28,7 @@
let tableDefinition let tableDefinition
let searchTerm let searchTerm
let open let open
let initialValue
$: type = $: type =
datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE
@ -109,7 +110,11 @@
} }
$: forceFetchRows(filter) $: forceFetchRows(filter)
$: debouncedFetchRows(searchTerm, primaryDisplay, defaultValue) $: debouncedFetchRows(
searchTerm,
primaryDisplay,
initialValue || defaultValue
)
const forceFetchRows = async () => { const forceFetchRows = async () => {
// if the filter has changed, then we need to reset the options, clear the selection, and re-fetch // if the filter has changed, then we need to reset the options, clear the selection, and re-fetch
@ -127,9 +132,13 @@
if (allRowsFetched || !primaryDisplay) { if (allRowsFetched || !primaryDisplay) {
return return
} }
if (defaultVal && !optionsObj[defaultVal]) { // must be an array
if (defaultVal && !Array.isArray(defaultVal)) {
defaultVal = defaultVal.split(",")
}
if (defaultVal && defaultVal.some(val => !optionsObj[val])) {
await fetch.update({ await fetch.update({
query: { equal: { _id: defaultVal } }, query: { oneOf: { _id: defaultVal } },
}) })
} }
@ -202,6 +211,16 @@
fetch.nextPage() fetch.nextPage()
} }
} }
onMount(() => {
// if the form is in 'Update' mode, then we need to fetch the matching row so that the value is correctly set
if (fieldState?.value) {
initialValue =
fieldSchema?.relationshipType !== "one-to-many"
? flatten(fieldState?.value) ?? []
: flatten(fieldState?.value)?.[0]
}
})
</script> </script>
<Field <Field

View File

@ -10,6 +10,11 @@ CREATE TABLE Persons (
City varchar(255), City varchar(255),
PRIMARY KEY (PersonID) PRIMARY KEY (PersonID)
); );
CREATE TABLE Person (
PersonID int NOT NULL AUTO_INCREMENT,
Name varchar(255),
PRIMARY KEY (PersonID)
);
CREATE TABLE Tasks ( CREATE TABLE Tasks (
TaskID int NOT NULL AUTO_INCREMENT, TaskID int NOT NULL AUTO_INCREMENT,
PersonID INT, PersonID INT,
@ -27,6 +32,7 @@ CREATE TABLE Products (
); );
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07');
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11');
INSERT INTO Person (Name) VALUES ('Elf');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31');
INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00'); INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00');

View File

@ -35,20 +35,21 @@ async function updateRolesOnUserTable(
) { ) {
const table = await sdk.tables.getTable(InternalTables.USER_METADATA) const table = await sdk.tables.getTable(InternalTables.USER_METADATA)
const constraints = table.schema.roleId?.constraints const constraints = table.schema.roleId?.constraints
if (constraints) { if (!constraints) {
const updatedRoleId = return
roleVersion === roles.RoleIDVersion.NAME
? roles.getExternalRoleID(roleId, roleVersion)
: roleId
const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId)
const remove = updateOption === UpdateRolesOptions.REMOVED
if (remove && indexOfRoleId !== -1) {
constraints.inclusion!.splice(indexOfRoleId, 1)
} else if (!remove && indexOfRoleId === -1) {
constraints.inclusion!.push(updatedRoleId)
}
await db.put(table)
} }
const updatedRoleId =
roleVersion === roles.RoleIDVersion.NAME
? roles.getExternalRoleID(roleId, roleVersion)
: roleId
const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId)
const remove = updateOption === UpdateRolesOptions.REMOVED
if (remove && indexOfRoleId !== -1) {
constraints.inclusion!.splice(indexOfRoleId, 1)
} else if (!remove && indexOfRoleId === -1) {
constraints.inclusion!.push(updatedRoleId)
}
await db.put(table)
} }
export async function fetch(ctx: UserCtx<void, FetchRolesResponse>) { export async function fetch(ctx: UserCtx<void, FetchRolesResponse>) {

View File

@ -62,7 +62,11 @@ export default class AliasTables {
if (idx === -1 || idx > 1) { if (idx === -1 || idx > 1) {
return return
} }
return Math.abs(tableName.length - name.length) <= 2 // this might look a bit mad, but the idea is if the field is wrapped, say in "", `` or []
// then the idx of the table name will be 1, and we should allow for it ending in a closing
// character - otherwise it should be the full length if the index is zero
const allowedCharacterDiff = idx * 2
return Math.abs(tableName.length - name.length) <= allowedCharacterDiff
}) })
if (foundTableName) { if (foundTableName) {
const aliasedTableName = tableName.replace( const aliasedTableName = tableName.replace(
@ -107,57 +111,55 @@ export default class AliasTables {
} }
async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse { async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse {
json = cloneDeep(json) const fieldLength = json.resource?.fields?.length
const aliasTable = (table: Table) => ({ const aliasingEnabled = fieldLength && fieldLength > 0
...table, if (aliasingEnabled) {
name: this.getAlias(table.name), json = cloneDeep(json)
}) // run through the query json to update anywhere a table may be used
// run through the query json to update anywhere a table may be used if (json.resource?.fields) {
if (json.resource?.fields) { json.resource.fields = json.resource.fields.map(field =>
json.resource.fields = json.resource.fields.map(field => this.aliasField(field)
this.aliasField(field) )
)
}
if (json.filters) {
for (let [filterKey, filter] of Object.entries(json.filters)) {
if (typeof filter !== "object") {
continue
}
const aliasedFilters: typeof filter = {}
for (let key of Object.keys(filter)) {
aliasedFilters[this.aliasField(key)] = filter[key]
}
json.filters[filterKey as keyof SearchFilters] = aliasedFilters
} }
} if (json.filters) {
if (json.relationships) { for (let [filterKey, filter] of Object.entries(json.filters)) {
json.relationships = json.relationships.map(relationship => ({ if (typeof filter !== "object") {
...relationship, continue
aliases: this.aliasMap([ }
relationship.through, const aliasedFilters: typeof filter = {}
relationship.tableName, for (let key of Object.keys(filter)) {
json.endpoint.entityId, aliasedFilters[this.aliasField(key)] = filter[key]
]), }
})) json.filters[filterKey as keyof SearchFilters] = aliasedFilters
} }
if (json.meta?.table) {
json.meta.table = aliasTable(json.meta.table)
}
if (json.meta?.tables) {
const aliasedTables: Record<string, Table> = {}
for (let [tableName, table] of Object.entries(json.meta.tables)) {
aliasedTables[this.getAlias(tableName)] = aliasTable(table)
} }
json.meta.tables = aliasedTables if (json.meta?.table) {
this.getAlias(json.meta.table.name)
}
if (json.meta?.tables) {
Object.keys(json.meta.tables).forEach(tableName =>
this.getAlias(tableName)
)
}
if (json.relationships) {
json.relationships = json.relationships.map(relationship => ({
...relationship,
aliases: this.aliasMap([
relationship.through,
relationship.tableName,
json.endpoint.entityId,
]),
}))
}
// invert and return
const invertedTableAliases: Record<string, string> = {}
for (let [key, value] of Object.entries(this.tableAliases)) {
invertedTableAliases[value] = key
}
json.tableAliases = invertedTableAliases
} }
// invert and return
const invertedTableAliases: Record<string, string> = {}
for (let [key, value] of Object.entries(this.tableAliases)) {
invertedTableAliases[value] = key
}
json.tableAliases = invertedTableAliases
const response = await getDatasourceAndQuery(json) const response = await getDatasourceAndQuery(json)
if (Array.isArray(response)) { if (Array.isArray(response) && aliasingEnabled) {
return this.reverse(response) return this.reverse(response)
} else { } else {
return response return response

View File

@ -12,6 +12,8 @@ import {
} from "@budibase/types" } from "@budibase/types"
import environment from "../../environment" import environment from "../../environment"
type QueryFunction = (query: Knex.SqlNative, operation: Operation) => any
const envLimit = environment.SQL_MAX_ROWS const envLimit = environment.SQL_MAX_ROWS
? parseInt(environment.SQL_MAX_ROWS) ? parseInt(environment.SQL_MAX_ROWS)
: null : null
@ -322,15 +324,18 @@ class InternalBuilder {
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort, paginate } = json let { sort, paginate } = json
const table = json.meta?.table const table = json.meta?.table
const aliases = json.tableAliases
const aliased =
table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name
if (sort && Object.keys(sort || {}).length > 0) { if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) { for (let [key, value] of Object.entries(sort)) {
const direction = const direction =
value.direction === SortDirection.ASCENDING ? "asc" : "desc" value.direction === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(`${table?.name}.${key}`, direction) query = query.orderBy(`${aliased}.${key}`, direction)
} }
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) { } else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
// @ts-ignore // @ts-ignore
query = query.orderBy(`${table?.name}.${table?.primary[0]}`) query = query.orderBy(`${aliased}.${table?.primary[0]}`)
} }
return query return query
} }
@ -605,7 +610,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return query.toSQL().toNative() return query.toSQL().toNative()
} }
async getReturningRow(queryFn: Function, json: QueryJson) { async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
if (!json.extra || !json.extra.idFilter) { if (!json.extra || !json.extra.idFilter) {
return {} return {}
} }
@ -617,7 +622,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
resource: { resource: {
fields: [], fields: [],
}, },
filters: json.extra.idFilter, filters: json.extra?.idFilter,
paginate: { paginate: {
limit: 1, limit: 1,
}, },
@ -646,7 +651,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
// this function recreates the returning functionality of postgres // this function recreates the returning functionality of postgres
async queryWithReturning( async queryWithReturning(
json: QueryJson, json: QueryJson,
queryFn: Function, queryFn: QueryFunction,
processFn: Function = (result: any) => result processFn: Function = (result: any) => result
) { ) {
const sqlClient = this.getSqlClient() const sqlClient = this.getSqlClient()

View File

@ -4,6 +4,7 @@ import Sql from "../base/sql"
import { SqlClient } from "../utils" import { SqlClient } from "../utils"
import AliasTables from "../../api/controllers/row/alias" import AliasTables from "../../api/controllers/row/alias"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { Knex } from "knex"
function multiline(sql: string) { function multiline(sql: string) {
return sql.replace(/\n/g, "").replace(/ +/g, " ") return sql.replace(/\n/g, "").replace(/ +/g, " ")
@ -160,6 +161,28 @@ describe("Captures of real examples", () => {
}) })
}) })
describe("returning (everything bar Postgres)", () => {
it("should be able to handle row returning", () => {
const queryJson = getJson("createSimple.json")
const SQL = new Sql(SqlClient.MS_SQL, limit)
let query = SQL._query(queryJson, { disableReturning: true })
expect(query).toEqual({
sql: "insert into [people] ([age], [name]) values (@p0, @p1)",
bindings: [22, "Test"],
})
// now check returning
let returningQuery: Knex.SqlNative = { sql: "", bindings: [] }
SQL.getReturningRow((input: Knex.SqlNative) => {
returningQuery = input
}, queryJson)
expect(returningQuery).toEqual({
sql: "select * from (select top (@p0) * from [people] where [people].[name] = @p1 and [people].[age] = @p2 order by [people].[name] asc) as [people]",
bindings: [1, "Test", 22],
})
})
})
describe("check max character aliasing", () => { describe("check max character aliasing", () => {
it("should handle over 'z' max character alias", () => { it("should handle over 'z' max character alias", () => {
const tableNames = [] const tableNames = []

View File

@ -68,7 +68,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View File

@ -0,0 +1,64 @@
{
"endpoint": {
"datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"entityId": "people",
"operation": "CREATE"
},
"resource": {
"fields": [
"a.name",
"a.age"
]
},
"filters": {},
"relationships": [],
"body": {
"name": "Test",
"age": 22
},
"extra": {
"idFilter": {
"equal": {
"name": "Test",
"age": 22
}
}
},
"meta": {
"table": {
"_id": "datasource_plus_0ed5835e5552496285df546030f7c4ae__people",
"type": "table",
"sourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"sourceType": "external",
"primary": [
"name",
"age"
],
"name": "people",
"schema": {
"name": {
"type": "string",
"externalType": "varchar",
"autocolumn": false,
"name": "name",
"constraints": {
"presence": true
}
},
"age": {
"type": "number",
"externalType": "int",
"autocolumn": false,
"name": "age",
"constraints": {
"presence": false
}
}
},
"primaryDisplay": "name"
}
},
"tableAliases": {
"people": "a"
}
}

View File

@ -58,7 +58,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View File

@ -34,7 +34,7 @@
"keypartone", "keypartone",
"keyparttwo" "keyparttwo"
], ],
"name": "a", "name": "compositetable",
"schema": { "schema": {
"keyparttwo": { "keyparttwo": {
"type": "string", "type": "string",

View File

@ -49,7 +49,7 @@
"primary": [ "primary": [
"taskid" "taskid"
], ],
"name": "a", "name": "tasks",
"schema": { "schema": {
"executorid": { "executorid": {
"type": "number", "type": "number",

View File

@ -63,7 +63,7 @@
"primary": [ "primary": [
"productid" "productid"
], ],
"name": "a", "name": "products",
"schema": { "schema": {
"productname": { "productname": {
"type": "string", "type": "string",

View File

@ -53,7 +53,7 @@
"primary": [ "primary": [
"productid" "productid"
], ],
"name": "a", "name": "products",
"schema": { "schema": {
"productname": { "productname": {
"type": "string", "type": "string",

View File

@ -109,7 +109,7 @@
"primary": [ "primary": [
"taskid" "taskid"
], ],
"name": "a", "name": "tasks",
"schema": { "schema": {
"executorid": { "executorid": {
"type": "number", "type": "number",

View File

@ -66,7 +66,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View File

@ -66,7 +66,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View File

@ -11,7 +11,10 @@ import {
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
import { handleRequest } from "../../../../api/controllers/row/external" import { handleRequest } from "../../../../api/controllers/row/external"
import { breakExternalTableId } from "../../../../integrations/utils" import {
breakExternalTableId,
breakRowIdField,
} from "../../../../integrations/utils"
import { cleanExportRows } from "../utils" import { cleanExportRows } from "../utils"
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "../search" import { ExportRowsParams, ExportRowsResult } from "../search"
@ -52,6 +55,15 @@ export async function search(options: SearchParams) {
} }
} }
// Make sure oneOf _id queries decode the Row IDs
if (query?.oneOf?._id) {
const rowIds = query.oneOf._id
query.oneOf._id = rowIds.map((row: string) => {
const ids = breakRowIdField(row)
return ids[0]
})
}
try { try {
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options) options = searchInputMapping(table, options)
@ -119,9 +131,7 @@ export async function exportRows(
requestQuery = { requestQuery = {
oneOf: { oneOf: {
_id: rowIds.map((row: string) => { _id: rowIds.map((row: string) => {
const ids = JSON.parse( const ids = breakRowIdField(row)
decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",")
)
if (ids.length > 1) { if (ids.length > 1) {
throw new HTTPError( throw new HTTPError(
"Export data does not support composite keys.", "Export data does not support composite keys.",

View File

@ -21,10 +21,11 @@ jest.unmock("mysql2/promise")
jest.setTimeout(30000) jest.setTimeout(30000)
describe.skip("external", () => { describe("external search", () => {
const config = new TestConfiguration() const config = new TestConfiguration()
let externalDatasource: Datasource, tableData: Table let externalDatasource: Datasource, tableData: Table
const rows: Row[] = []
beforeAll(async () => { beforeAll(async () => {
const container = await new GenericContainer("mysql") const container = await new GenericContainer("mysql")
@ -89,67 +90,81 @@ describe.skip("external", () => {
}, },
}, },
} }
const table = await config.createExternalTable({
...tableData,
sourceId: externalDatasource._id,
})
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
)
}
}) })
describe("search", () => { it("default search returns all the data", async () => {
const rows: Row[] = [] await config.doInContext(config.appId, async () => {
beforeAll(async () => { const tableId = config.table!._id!
const table = await config.createExternalTable({
...tableData, const searchParams: SearchParams = {
sourceId: externalDatasource._id, tableId,
}) query: {},
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
)
} }
const result = await search(searchParams)
expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual(
expect.arrayContaining(rows.map(r => expect.objectContaining(r)))
)
}) })
})
it("default search returns all the data", async () => { it("querying by fields will always return data attribute columns", async () => {
await config.doInContext(config.appId, async () => { await config.doInContext(config.appId, async () => {
const tableId = config.table!._id! const tableId = config.table!._id!
const searchParams: SearchParams = { const searchParams: SearchParams = {
tableId, tableId,
query: {}, query: {},
} fields: ["name", "age"],
const result = await search(searchParams) }
const result = await search(searchParams)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(
expect.arrayContaining(rows.map(r => expect.objectContaining(r))) expect.arrayContaining(
rows.map(r => ({
...expectAnyExternalColsAttributes,
name: r.name,
age: r.age,
}))
) )
}) )
}) })
})
it("querying by fields will always return data attribute columns", async () => { it("will decode _id in oneOf query", async () => {
await config.doInContext(config.appId, async () => { await config.doInContext(config.appId, async () => {
const tableId = config.table!._id! const tableId = config.table!._id!
const searchParams: SearchParams = { const searchParams: SearchParams = {
tableId, tableId,
query: {}, query: {
fields: ["name", "age"], oneOf: {
} _id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
const result = await search(searchParams) },
},
}
const result = await search(searchParams)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(3)
expect(result.rows).toEqual( expect(result.rows.map(row => row.id)).toEqual([1, 4, 8])
expect.arrayContaining(
rows.map(r => ({
...expectAnyExternalColsAttributes,
name: r.name,
age: r.age,
}))
)
)
})
}) })
}) })
}) })

View File

@ -1,6 +1,5 @@
import { import {
FieldType, FieldType,
FieldTypeSubtypes,
SearchParams, SearchParams,
Table, Table,
DocumentType, DocumentType,