Merge branch 'master' into drawer-eval-improvements

This commit is contained in:
Andrew Kingston 2024-05-30 09:06:36 +01:00 committed by GitHub
commit 3acf6f37df
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 640 additions and 158 deletions

View File

@ -40,7 +40,7 @@
"build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui", "build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
"build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal --scope @budibase/account-portal-server --scope @budibase/account-portal-ui", "build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal --scope @budibase/account-portal-server --scope @budibase/account-portal-ui",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput", "build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run check:types", "check:types": "lerna run --concurrency 2 check:types",
"build:sdk": "lerna run --stream build:sdk", "build:sdk": "lerna run --stream build:sdk",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular", "deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset", "release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset",

@ -1 +1 @@
Subproject commit c167c331ff9b8161fc18e2ecbaaf1ea5815ba964 Subproject commit 39acfff42a063e5a8a7d58d36721ec3103e16348

View File

@ -1,6 +1,11 @@
import { Knex, knex } from "knex" import { Knex, knex } from "knex"
import * as dbCore from "../db" import * as dbCore from "../db"
import { isIsoDateString, isValidFilter, getNativeSql } from "./utils" import {
isIsoDateString,
isValidFilter,
getNativeSql,
isExternalTable,
} from "./utils"
import { SqlStatements } from "./sqlStatements" import { SqlStatements } from "./sqlStatements"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
import { import {
@ -21,6 +26,7 @@ import {
SqlClient, SqlClient,
QueryOptions, QueryOptions,
JsonTypes, JsonTypes,
prefixed,
} from "@budibase/types" } from "@budibase/types"
import environment from "../environment" import environment from "../environment"
import { helpers } from "@budibase/shared-core" import { helpers } from "@budibase/shared-core"
@ -391,6 +397,16 @@ class InternalBuilder {
contains(filters.containsAny, true) contains(filters.containsAny, true)
} }
// when searching internal tables make sure long looking for rows
if (filters.documentType && !isExternalTable(table)) {
const tableRef = opts?.aliases?.[table._id!] || table._id
// has to be its own option, must always be AND onto the search
query.andWhereLike(
`${tableRef}._id`,
`${prefixed(filters.documentType)}%`
)
}
return query return query
} }
@ -592,6 +608,7 @@ class InternalBuilder {
query = this.addFilters(query, filters, json.meta.table, { query = this.addFilters(query, filters, json.meta.table, {
aliases: tableAliases, aliases: tableAliases,
}) })
// add sorting to pre-query // add sorting to pre-query
query = this.addSorting(query, json) query = this.addSorting(query, json)
const alias = tableAliases?.[tableName] || tableName const alias = tableAliases?.[tableName] || tableName

View File

@ -32,8 +32,14 @@
onboarding = true onboarding = true
try { try {
const { password, firstName, lastName } = formData const { password, firstName, lastName } = formData
await users.acceptInvite(inviteCode, password, firstName, lastName) const user = await users.acceptInvite(
inviteCode,
password,
firstName,
lastName
)
notifications.success("Invitation accepted successfully") notifications.success("Invitation accepted successfully")
auth.setOrg(user.tenantId)
await login() await login()
} catch (error) { } catch (error) {
notifications.error(error.message) notifications.error(error.message)
@ -66,7 +72,7 @@
notifications.success("Logged in successfully") notifications.success("Logged in successfully")
$goto("../portal") $goto("../portal")
} catch (err) { } catch (err) {
notifications.error(err.message ? err.message : "Invalid credentials") //not likely, considering. notifications.error(err.message ? err.message : "Something went wrong")
} }
} }
@ -141,12 +147,19 @@
password: e.detail, password: e.detail,
} }
}} }}
validateOn="blur"
validate={() => { validate={() => {
let fieldError = {} let fieldError = {}
fieldError["password"] = !formData.password function validatePassword() {
? "Please enter a password" if (!formData.password) {
: undefined return "Please enter a password"
} else if (formData.password.length < 8) {
return "Please enter at least 8 characters"
}
return undefined
}
fieldError["password"] = validatePassword()
fieldError["confirmationPassword"] = fieldError["confirmationPassword"] =
!passwordsMatch( !passwordsMatch(

View File

@ -1,4 +1,5 @@
import { import {
DocumentType,
FieldType, FieldType,
Operation, Operation,
QueryJson, QueryJson,
@ -148,7 +149,10 @@ export async function search(
entityId: table._id!, entityId: table._id!,
operation: Operation.READ, operation: Operation.READ,
}, },
filters: cleanupFilters(query, allTables), filters: {
...cleanupFilters(query, allTables),
documentType: DocumentType.ROW,
},
table, table,
meta: { meta: {
table, table,

View File

@ -92,6 +92,7 @@ export interface AcceptUserInviteResponse {
_id: string _id: string
_rev: string _rev: string
email: string email: string
tenantId: string
} }
export interface SyncUserRequest { export interface SyncUserRequest {

View File

@ -1,5 +1,5 @@
import { Operation, SortDirection } from "./datasources" import { Operation, SortDirection } from "./datasources"
import { Row, Table } from "../documents" import { Row, Table, DocumentType } from "../documents"
import { SortType } from "../api" import { SortType } from "../api"
import { Knex } from "knex" import { Knex } from "knex"
@ -62,11 +62,15 @@ export interface SearchFilters {
[SearchFilterOperator.CONTAINS_ANY]?: { [SearchFilterOperator.CONTAINS_ANY]?: {
[key: string]: any[] [key: string]: any[]
} }
// specific to SQS/SQLite search on internal tables this can be used
// to make sure the documents returned are always filtered down to a
// specific document type (such as just rows)
documentType?: DocumentType
} }
export type SearchFilterKey = keyof Omit< export type SearchFilterKey = keyof Omit<
SearchFilters, SearchFilters,
"allOr" | "onEmptyFilter" | "fuzzyOr" "allOr" | "onEmptyFilter" | "fuzzyOr" | "documentType"
> >
export type SearchQueryFields = Omit<SearchFilters, "allOr" | "onEmptyFilter"> export type SearchQueryFields = Omit<SearchFilters, "allOr" | "onEmptyFilter">

View File

@ -35,6 +35,7 @@ import {
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { checkAnyUserExists } from "../../../utilities/users" import { checkAnyUserExists } from "../../../utilities/users"
import { isEmailConfigured } from "../../../utilities/email" import { isEmailConfigured } from "../../../utilities/email"
import { BpmStatusKey, BpmStatusValue } from "@budibase/shared-core"
const MAX_USERS_UPLOAD_LIMIT = 1000 const MAX_USERS_UPLOAD_LIMIT = 1000
@ -444,10 +445,16 @@ export const inviteAccept = async (
await cache.invite.deleteCode(inviteCode) await cache.invite.deleteCode(inviteCode)
// make sure onboarding flow is cleared
ctx.cookies.set(BpmStatusKey.ONBOARDING, BpmStatusValue.COMPLETED, {
expires: new Date(0),
})
ctx.body = { ctx.body = {
_id: user._id!, _id: user._id!,
_rev: user._rev!, _rev: user._rev!,
email: user.email, email: user.email,
tenantId: user.tenantId,
} }
} }
) )

View File

@ -105,11 +105,6 @@ const NO_TENANCY_ENDPOINTS = [
route: "/api/admin/auth/oidc/callback", route: "/api/admin/auth/oidc/callback",
method: "GET", method: "GET",
}, },
// tenant is determined from code in redis
{
route: "/api/global/users/invite/accept",
method: "POST",
},
// global user search - no tenancy // global user search - no tenancy
// :id is user id // :id is user id
// TODO: this should really be `/api/system/users/:id` // TODO: this should really be `/api/system/users/:id`
@ -117,6 +112,15 @@ const NO_TENANCY_ENDPOINTS = [
route: "/api/global/users/tenant/:id", route: "/api/global/users/tenant/:id",
method: "GET", method: "GET",
}, },
// tenant is determined from code in redis
{
route: "/api/global/users/invite/accept",
method: "POST",
},
{
route: "/api/global/users/invite/:code",
method: "GET",
},
] ]
// most public endpoints are gets, but some are posts // most public endpoints are gets, but some are posts

716
yarn.lock

File diff suppressed because it is too large Load Diff