Merge branch 'master' into develop

This commit is contained in:
Rory Powell 2023-06-05 23:19:43 +01:00
commit abbd8c5c8b
7 changed files with 45 additions and 31 deletions

View File

@ -1,5 +1,5 @@
{ {
"version": "2.6.19-alpha.54", "version": "2.6.23",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/backend-core", "packages/backend-core",

View File

@ -86,6 +86,7 @@ const getCurrentIdentity = async (): Promise<Identity> => {
installationId, installationId,
tenantId, tenantId,
environment, environment,
realTenantId: context.getTenantId(),
hostInfo: userContext.hostInfo, hostInfo: userContext.hostInfo,
} }
} else { } else {

@ -1 +1 @@
Subproject commit cd06642b860111aa1bd3443ee10076ca3abf03c3 Subproject commit 65a626155bbb4702b8dd4dbe06e99f2c47d47698

View File

@ -14,6 +14,7 @@ import {
SearchFilters, SearchFilters,
Table, Table,
} from "@budibase/types" } from "@budibase/types"
import { db as dbCore } from "@budibase/backend-core"
enum SortOrder { enum SortOrder {
ASCENDING = "ascending", ASCENDING = "ascending",
@ -121,7 +122,11 @@ function typeCoercion(filters: SearchFilters, table: Table) {
const searchParam = filters[key] const searchParam = filters[key]
if (typeof searchParam === "object") { if (typeof searchParam === "object") {
for (let [property, value] of Object.entries(searchParam)) { for (let [property, value] of Object.entries(searchParam)) {
const column = table.schema[property] // We need to strip numerical prefixes here, so that we can look up
// the correct field name in the schema
const columnName = dbCore.removeKeyNumbering(property)
const column = table.schema[columnName]
// convert string inputs // convert string inputs
if (!column || typeof value !== "string") { if (!column || typeof value !== "string") {
continue continue

View File

@ -38,6 +38,9 @@ const SCHEMA: Integration = {
type: "password", type: "password",
required: true, required: true,
}, },
role: {
type: "string",
},
warehouse: { warehouse: {
type: "string", type: "string",
required: true, required: true,

View File

@ -7,7 +7,7 @@ import {
InternalTables, InternalTables,
} from "../../db/utils" } from "../../db/utils"
import { isEqual } from "lodash" import { isEqual } from "lodash"
import { ContextUser, UserMetadata, User } from "@budibase/types" import { ContextUser, UserMetadata, User, Database } from "@budibase/types"
export function combineMetadataAndUser( export function combineMetadataAndUser(
user: ContextUser, user: ContextUser,
@ -51,8 +51,10 @@ export function combineMetadataAndUser(
return null return null
} }
export async function rawUserMetadata() { export async function rawUserMetadata(db?: Database) {
const db = context.getAppDB() if (!db) {
db = context.getAppDB()
}
return ( return (
await db.allDocs( await db.allDocs(
getUserMetadataParams(null, { getUserMetadataParams(null, {
@ -64,30 +66,36 @@ export async function rawUserMetadata() {
export async function syncGlobalUsers() { export async function syncGlobalUsers() {
// sync user metadata // sync user metadata
const db = context.getAppDB() const dbs = [context.getDevAppDB(), context.getProdAppDB()]
const resp = await Promise.all([getGlobalUsers(), rawUserMetadata()]) for (let db of dbs) {
const users = resp[0] as User[] if (!(await db.exists())) {
const metadata = resp[1] as UserMetadata[]
const toWrite = []
for (let user of users) {
const combined = combineMetadataAndUser(user, metadata)
if (combined) {
toWrite.push(combined)
}
}
let foundEmails: string[] = []
for (let data of metadata) {
if (!data._id) {
continue continue
} }
const alreadyExisting = data.email && foundEmails.indexOf(data.email) !== -1 const resp = await Promise.all([getGlobalUsers(), rawUserMetadata(db)])
const globalId = getGlobalIDFromUserMetadataID(data._id) const users = resp[0] as User[]
if (!users.find(user => user._id === globalId) || alreadyExisting) { const metadata = resp[1] as UserMetadata[]
toWrite.push({ ...data, _deleted: true }) const toWrite = []
for (let user of users) {
const combined = combineMetadataAndUser(user, metadata)
if (combined) {
toWrite.push(combined)
}
} }
if (data.email) { let foundEmails: string[] = []
foundEmails.push(data.email) for (let data of metadata) {
if (!data._id) {
continue
}
const alreadyExisting =
data.email && foundEmails.indexOf(data.email) !== -1
const globalId = getGlobalIDFromUserMetadataID(data._id)
if (!users.find(user => user._id === globalId) || alreadyExisting) {
toWrite.push({ ...data, _deleted: true })
}
if (data.email) {
foundEmails.push(data.email)
}
} }
await db.bulkDocs(toWrite)
} }
await db.bulkDocs(toWrite)
} }

View File

@ -122,11 +122,8 @@ export async function getGlobalUsers(
delete user.forceResetPassword delete user.forceResetPassword
return user return user
}) })
if (!appId) {
return globalUsers
}
if (opts?.noProcessing) { if (opts?.noProcessing || !appId) {
return globalUsers return globalUsers
} else { } else {
// pass in the groups, meaning we don't actually need to retrieve them for // pass in the groups, meaning we don't actually need to retrieve them for