Merge branch 'master' into chore/refactor-finalizerow
This commit is contained in:
commit
ff2c06bed4
|
@ -281,6 +281,7 @@ jobs:
|
|||
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
|
|
@ -6,6 +6,26 @@ import {
|
|||
import { ContainerInfo } from "dockerode"
|
||||
import path from "path"
|
||||
import lockfile from "proper-lockfile"
|
||||
import { execSync } from "child_process"
|
||||
|
||||
interface DockerContext {
|
||||
Name: string
|
||||
Description: string
|
||||
DockerEndpoint: string
|
||||
ContextType: string
|
||||
Error: string
|
||||
}
|
||||
|
||||
function getCurrentDockerContext(): DockerContext {
|
||||
const out = execSync("docker context ls --format json")
|
||||
for (const line of out.toString().split("\n")) {
|
||||
const parsed = JSON.parse(line)
|
||||
if (parsed.Current) {
|
||||
return parsed as DockerContext
|
||||
}
|
||||
}
|
||||
throw new Error("No current Docker context")
|
||||
}
|
||||
|
||||
async function getBudibaseContainers() {
|
||||
const client = await getContainerRuntimeClient()
|
||||
|
@ -27,6 +47,14 @@ async function killContainers(containers: ContainerInfo[]) {
|
|||
}
|
||||
|
||||
export default async function setup() {
|
||||
// For whatever reason, testcontainers doesn't always use the correct current
|
||||
// docker context. This bit of code forces the issue by finding the current
|
||||
// context and setting it as the DOCKER_HOST environment
|
||||
if (!process.env.DOCKER_HOST) {
|
||||
const dockerContext = getCurrentDockerContext()
|
||||
process.env.DOCKER_HOST = dockerContext.DockerEndpoint
|
||||
}
|
||||
|
||||
const lockPath = path.resolve(__dirname, "globalSetup.ts")
|
||||
// If you run multiple tests at the same time, it's possible for the CouchDB
|
||||
// shared container to get started multiple times despite having an
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.2.14",
|
||||
"version": "3.2.16",
|
||||
"npmClient": "yarn",
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
"@types/node": "20.10.0",
|
||||
"@types/proper-lockfile": "^4.1.4",
|
||||
"@typescript-eslint/parser": "6.9.0",
|
||||
"cross-spawn": "7.0.6",
|
||||
"depcheck": "^1.4.7",
|
||||
"esbuild": "^0.18.17",
|
||||
"esbuild-node-externals": "^1.14.0",
|
||||
|
@ -29,8 +30,7 @@
|
|||
"svelte-eslint-parser": "^0.33.1",
|
||||
"typescript": "5.5.2",
|
||||
"typescript-eslint": "^7.3.1",
|
||||
"yargs": "^17.7.2",
|
||||
"cross-spawn": "7.0.6"
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"scripts": {
|
||||
"get-past-client-version": "node scripts/getPastClientVersion.js",
|
||||
|
@ -76,7 +76,6 @@
|
|||
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
|
||||
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.3.3 -t budibase/couchdb:v3.3.3-sqs-v2.1.1 --push ./hosting/couchdb",
|
||||
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
|
||||
"release:helm": "node scripts/releaseHelmChart",
|
||||
"env:multi:enable": "lerna run --stream env:multi:enable",
|
||||
"env:multi:disable": "lerna run --stream env:multi:disable",
|
||||
"env:selfhost:enable": "lerna run --stream env:selfhost:enable",
|
||||
|
|
|
@ -121,7 +121,7 @@ const identifyInstallationGroup = async (
|
|||
|
||||
const identifyTenantGroup = async (
|
||||
tenantId: string,
|
||||
account: Account | undefined,
|
||||
hosting: Hosting,
|
||||
timestamp?: string | number
|
||||
): Promise<void> => {
|
||||
const id = await getEventTenantId(tenantId)
|
||||
|
@ -129,26 +129,12 @@ const identifyTenantGroup = async (
|
|||
const installationId = await getInstallationId()
|
||||
const environment = getDeploymentEnvironment()
|
||||
|
||||
let hosting: Hosting
|
||||
let profession: string | undefined
|
||||
let companySize: string | undefined
|
||||
|
||||
if (account) {
|
||||
profession = account.profession
|
||||
companySize = account.size
|
||||
hosting = account.hosting
|
||||
} else {
|
||||
hosting = getHostingFromEnv()
|
||||
}
|
||||
|
||||
const group: TenantGroup = {
|
||||
id,
|
||||
type,
|
||||
hosting,
|
||||
environment,
|
||||
installationId,
|
||||
profession,
|
||||
companySize,
|
||||
}
|
||||
|
||||
await identifyGroup(group, timestamp)
|
||||
|
|
|
@ -266,12 +266,14 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
|||
// new flag, add it here and use the `fetch` and `get` functions to access it.
|
||||
// All of the machinery in this file is to make sure that flags have their
|
||||
// default values set correctly and their types flow through the system.
|
||||
export const flags = new FlagSet({
|
||||
const flagsConfig: Record<FeatureFlag, Flag<any>> = {
|
||||
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
|
||||
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
|
||||
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
|
||||
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
|
||||
})
|
||||
[FeatureFlag.USE_ZOD_VALIDATOR]: Flag.boolean(env.isDev()),
|
||||
}
|
||||
export const flags = new FlagSet(flagsConfig)
|
||||
|
||||
type UnwrapPromise<T> = T extends Promise<infer U> ? U : T
|
||||
export type FeatureFlags = UnwrapPromise<ReturnType<typeof flags.fetch>>
|
||||
|
|
|
@ -284,13 +284,13 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
private generateSelectStatement(): (string | Knex.Raw)[] | "*" {
|
||||
const { endpoint, resource } = this.query
|
||||
const { table, resource } = this.query
|
||||
|
||||
if (!resource || !resource.fields || resource.fields.length === 0) {
|
||||
return "*"
|
||||
}
|
||||
|
||||
const alias = this.getTableName(endpoint.entityId)
|
||||
const alias = this.getTableName(table)
|
||||
const schema = this.table.schema
|
||||
if (!this.isFullSelectStatementRequired()) {
|
||||
return [this.knex.raw("??", [`${alias}.*`])]
|
||||
|
@ -496,7 +496,7 @@ class InternalBuilder {
|
|||
filterKey: string,
|
||||
whereCb: (filterKey: string, query: Knex.QueryBuilder) => Knex.QueryBuilder
|
||||
): Knex.QueryBuilder {
|
||||
const { relationships, endpoint, tableAliases: aliases, table } = this.query
|
||||
const { relationships, schema, tableAliases: aliases, table } = this.query
|
||||
const fromAlias = aliases?.[table.name] || table.name
|
||||
const matches = (value: string) =>
|
||||
filterKey.match(new RegExp(`^${value}\\.`))
|
||||
|
@ -537,7 +537,7 @@ class InternalBuilder {
|
|||
aliases?.[manyToMany.through] || relationship.through
|
||||
let throughTable = this.tableNameWithSchema(manyToMany.through, {
|
||||
alias: throughAlias,
|
||||
schema: endpoint.schema,
|
||||
schema,
|
||||
})
|
||||
subQuery = subQuery
|
||||
// add a join through the junction table
|
||||
|
@ -1010,28 +1010,10 @@ class InternalBuilder {
|
|||
return isSqs(this.table)
|
||||
}
|
||||
|
||||
getTableName(tableOrName?: Table | string): string {
|
||||
let table: Table
|
||||
if (typeof tableOrName === "string") {
|
||||
const name = tableOrName
|
||||
if (this.query.table?.name === name) {
|
||||
table = this.query.table
|
||||
} else if (this.query.table.name === name) {
|
||||
table = this.query.table
|
||||
} else if (!this.query.tables[name]) {
|
||||
// This can legitimately happen in custom queries, where the user is
|
||||
// querying against a table that may not have been imported into
|
||||
// Budibase.
|
||||
return name
|
||||
} else {
|
||||
table = this.query.tables[name]
|
||||
}
|
||||
} else if (tableOrName) {
|
||||
table = tableOrName
|
||||
} else {
|
||||
getTableName(table?: Table): string {
|
||||
if (!table) {
|
||||
table = this.table
|
||||
}
|
||||
|
||||
let name = table.name
|
||||
if (isSqs(table) && table._id) {
|
||||
// SQS uses the table ID rather than the table name
|
||||
|
@ -1242,7 +1224,7 @@ class InternalBuilder {
|
|||
): Knex.QueryBuilder {
|
||||
const sqlClient = this.client
|
||||
const knex = this.knex
|
||||
const { resource, tableAliases: aliases, endpoint, tables } = this.query
|
||||
const { resource, tableAliases: aliases, schema, tables } = this.query
|
||||
const fields = resource?.fields || []
|
||||
for (let relationship of relationships) {
|
||||
const {
|
||||
|
@ -1266,7 +1248,7 @@ class InternalBuilder {
|
|||
throughAlias = (throughTable && aliases?.[throughTable]) || throughTable
|
||||
let toTableWithSchema = this.tableNameWithSchema(toTable, {
|
||||
alias: toAlias,
|
||||
schema: endpoint.schema,
|
||||
schema,
|
||||
})
|
||||
const requiredFields = [
|
||||
...(relatedTable?.primary || []),
|
||||
|
@ -1310,7 +1292,7 @@ class InternalBuilder {
|
|||
if (isManyToMany) {
|
||||
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
|
||||
alias: throughAlias,
|
||||
schema: endpoint.schema,
|
||||
schema,
|
||||
})
|
||||
subQuery = subQuery.join(throughTableWithSchema, function () {
|
||||
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
|
||||
|
@ -1401,8 +1383,7 @@ class InternalBuilder {
|
|||
toPrimary?: string
|
||||
}[]
|
||||
): Knex.QueryBuilder {
|
||||
const { tableAliases: aliases, endpoint } = this.query
|
||||
const schema = endpoint.schema
|
||||
const { tableAliases: aliases, schema } = this.query
|
||||
const toTable = tables.to,
|
||||
fromTable = tables.from,
|
||||
throughTable = tables.through
|
||||
|
@ -1462,7 +1443,7 @@ class InternalBuilder {
|
|||
return this.knex(
|
||||
this.tableNameWithSchema(this.query.table.name, {
|
||||
alias,
|
||||
schema: this.query.endpoint.schema,
|
||||
schema: this.query.schema,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
@ -1556,9 +1537,8 @@ class InternalBuilder {
|
|||
limits?: { base: number; query: number }
|
||||
} = {}
|
||||
): Knex.QueryBuilder {
|
||||
let { endpoint, filters, paginate, relationships, table } = this.query
|
||||
let { operation, filters, paginate, relationships, table } = this.query
|
||||
const { limits } = opts
|
||||
const counting = endpoint.operation === Operation.COUNT
|
||||
|
||||
// start building the query
|
||||
let query = this.qualifiedKnex()
|
||||
|
@ -1578,7 +1558,7 @@ class InternalBuilder {
|
|||
foundLimit = paginate.limit
|
||||
}
|
||||
// counting should not sort, limit or offset
|
||||
if (!counting) {
|
||||
if (operation !== Operation.COUNT) {
|
||||
// add the found limit if supplied
|
||||
if (foundLimit != null) {
|
||||
query = query.limit(foundLimit)
|
||||
|
@ -1590,7 +1570,7 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
const aggregations = this.query.resource?.aggregations || []
|
||||
if (counting) {
|
||||
if (operation === Operation.COUNT) {
|
||||
query = this.addDistinctCount(query)
|
||||
} else if (aggregations.length > 0) {
|
||||
query = this.addAggregations(query, aggregations)
|
||||
|
@ -1599,7 +1579,7 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
// have to add after as well (this breaks MS-SQL)
|
||||
if (!counting) {
|
||||
if (operation !== Operation.COUNT) {
|
||||
query = this.addSorting(query)
|
||||
}
|
||||
|
||||
|
@ -1738,13 +1718,11 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
return {}
|
||||
}
|
||||
const input = this._query({
|
||||
operation: Operation.READ,
|
||||
datasource: json.datasource,
|
||||
schema: json.schema,
|
||||
table: json.table,
|
||||
tables: json.tables,
|
||||
endpoint: {
|
||||
...json.endpoint,
|
||||
operation: Operation.READ,
|
||||
},
|
||||
resource: { fields: [] },
|
||||
filters: json.extra?.idFilter,
|
||||
paginate: { limit: 1 },
|
||||
|
|
|
@ -239,14 +239,13 @@ class SqlTableQueryBuilder {
|
|||
* @return the operation that was found in the JSON.
|
||||
*/
|
||||
_operation(json: EnrichedQueryJson): Operation {
|
||||
return json.endpoint.operation
|
||||
return json.operation
|
||||
}
|
||||
|
||||
_tableQuery(json: EnrichedQueryJson): SqlQuery | SqlQuery[] {
|
||||
let client = knex({ client: this.sqlClient }).schema
|
||||
let schemaName = json?.endpoint?.schema
|
||||
if (schemaName) {
|
||||
client = client.withSchema(schemaName)
|
||||
if (json?.schema) {
|
||||
client = client.withSchema(json.schema)
|
||||
}
|
||||
|
||||
let query: Knex.SchemaBuilder
|
||||
|
@ -268,8 +267,8 @@ class SqlTableQueryBuilder {
|
|||
// renameColumn does not work for MySQL, so return a raw query
|
||||
if (this.sqlClient === SqlClient.MY_SQL && json.meta?.renamed) {
|
||||
const updatedColumn = json.meta.renamed.updated
|
||||
const tableName = schemaName
|
||||
? `\`${schemaName}\`.\`${json.table.name}\``
|
||||
const tableName = json?.schema
|
||||
? `\`${json.schema}\`.\`${json.table.name}\``
|
||||
: `\`${json.table.name}\``
|
||||
return {
|
||||
sql: `alter table ${tableName} rename column \`${json.meta.renamed.old}\` to \`${updatedColumn}\`;`,
|
||||
|
@ -290,8 +289,8 @@ class SqlTableQueryBuilder {
|
|||
if (this.sqlClient === SqlClient.MS_SQL && json.meta?.renamed) {
|
||||
const oldColumn = json.meta.renamed.old
|
||||
const updatedColumn = json.meta.renamed.updated
|
||||
const tableName = schemaName
|
||||
? `${schemaName}.${json.table.name}`
|
||||
const tableName = json?.schema
|
||||
? `${json.schema}.${json.table.name}`
|
||||
: `${json.table.name}`
|
||||
const sql = getNativeSql(query)
|
||||
if (Array.isArray(sql)) {
|
||||
|
|
|
@ -25,7 +25,7 @@ function getTestcontainers(): ContainerInfo[] {
|
|||
// We use --format json to make sure the output is nice and machine-readable,
|
||||
// and we use --no-trunc so that the command returns full container IDs so we
|
||||
// can filter on them correctly.
|
||||
return execSync("docker ps --format json --no-trunc")
|
||||
return execSync("docker ps --all --format json --no-trunc")
|
||||
.toString()
|
||||
.split("\n")
|
||||
.filter(x => x.length > 0)
|
||||
|
@ -37,6 +37,10 @@ function getTestcontainers(): ContainerInfo[] {
|
|||
)
|
||||
}
|
||||
|
||||
function removeContainer(container: ContainerInfo) {
|
||||
execSync(`docker rm ${container.ID}`)
|
||||
}
|
||||
|
||||
export function getContainerByImage(image: string) {
|
||||
const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
|
||||
if (containers.length > 1) {
|
||||
|
@ -49,6 +53,10 @@ export function getContainerByImage(image: string) {
|
|||
return containers[0]
|
||||
}
|
||||
|
||||
function getContainerByName(name: string) {
|
||||
return getTestcontainers().find(x => x.Names === name)
|
||||
}
|
||||
|
||||
export function getContainerById(id: string) {
|
||||
return getTestcontainers().find(x => x.ID === id)
|
||||
}
|
||||
|
@ -70,7 +78,34 @@ export function getExposedV4Port(container: ContainerInfo, port: number) {
|
|||
return getExposedV4Ports(container).find(x => x.container === port)?.host
|
||||
}
|
||||
|
||||
interface DockerContext {
|
||||
Name: string
|
||||
Description: string
|
||||
DockerEndpoint: string
|
||||
ContextType: string
|
||||
Error: string
|
||||
}
|
||||
|
||||
function getCurrentDockerContext(): DockerContext {
|
||||
const out = execSync("docker context ls --format json")
|
||||
for (const line of out.toString().split("\n")) {
|
||||
const parsed = JSON.parse(line)
|
||||
if (parsed.Current) {
|
||||
return parsed as DockerContext
|
||||
}
|
||||
}
|
||||
throw new Error("No current Docker context")
|
||||
}
|
||||
|
||||
export function setupEnv(...envs: any[]) {
|
||||
// For whatever reason, testcontainers doesn't always use the correct current
|
||||
// docker context. This bit of code forces the issue by finding the current
|
||||
// context and setting it as the DOCKER_HOST environment
|
||||
if (!process.env.DOCKER_HOST) {
|
||||
const dockerContext = getCurrentDockerContext()
|
||||
process.env.DOCKER_HOST = dockerContext.DockerEndpoint
|
||||
}
|
||||
|
||||
// We start couchdb in globalSetup.ts, in the root of the monorepo, so it
|
||||
// should be relatively safe to look for it by its image name.
|
||||
const couch = getContainerByImage("budibase/couchdb")
|
||||
|
@ -116,6 +151,16 @@ export async function startContainer(container: GenericContainer) {
|
|||
key = imageName.split("@")[0]
|
||||
}
|
||||
key = key.replace(/\//g, "-").replace(/:/g, "-")
|
||||
const name = `${key}_testcontainer`
|
||||
|
||||
// If a container has died it hangs around and future attempts to start a
|
||||
// container with the same name will fail. What we do here is if we find a
|
||||
// matching container and it has exited, we remove it before carrying on. This
|
||||
// removes the need to do this removal manually.
|
||||
const existingContainer = getContainerByName(name)
|
||||
if (existingContainer?.State === "exited") {
|
||||
removeContainer(existingContainer)
|
||||
}
|
||||
|
||||
container = container
|
||||
.withReuse()
|
||||
|
|
|
@ -4,27 +4,21 @@
|
|||
"version": "0.0.0",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
"module": "dist/bbui.mjs",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/bbui.es.js"
|
||||
"import": "./dist/bbui.mjs"
|
||||
},
|
||||
"./package.json": "./package.json",
|
||||
"./spectrum-icons-rollup.js": "./src/spectrum-icons-rollup.js",
|
||||
"./spectrum-icons-vite.js": "./src/spectrum-icons-vite.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rollup -c"
|
||||
"build": "vite build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-commonjs": "^16.0.0",
|
||||
"@rollup/plugin-json": "^4.1.0",
|
||||
"@rollup/plugin-node-resolve": "^11.2.1",
|
||||
"postcss": "^8.2.9",
|
||||
"rollup": "^2.45.2",
|
||||
"rollup-plugin-postcss": "^4.0.0",
|
||||
"rollup-plugin-svelte": "^7.1.0",
|
||||
"rollup-plugin-terser": "^7.0.2"
|
||||
"@sveltejs/vite-plugin-svelte": "1.4.0",
|
||||
"vite-plugin-css-injected-by-js": "3.5.2"
|
||||
},
|
||||
"keywords": [
|
||||
"svelte"
|
||||
|
@ -96,8 +90,7 @@
|
|||
"dependsOn": [
|
||||
{
|
||||
"projects": [
|
||||
"@budibase/string-templates",
|
||||
"@budibase/shared-core"
|
||||
"@budibase/string-templates"
|
||||
],
|
||||
"target": "build"
|
||||
}
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
import svelte from "rollup-plugin-svelte"
|
||||
import resolve from "@rollup/plugin-node-resolve"
|
||||
import commonjs from "@rollup/plugin-commonjs"
|
||||
import json from "@rollup/plugin-json"
|
||||
import { terser } from "rollup-plugin-terser"
|
||||
import postcss from "rollup-plugin-postcss"
|
||||
|
||||
export default {
|
||||
input: "src/index.js",
|
||||
output: {
|
||||
sourcemap: true,
|
||||
format: "esm",
|
||||
file: "dist/bbui.es.js",
|
||||
},
|
||||
onwarn(warning, warn) {
|
||||
// suppress eval warnings
|
||||
if (warning.code === "EVAL") {
|
||||
return
|
||||
}
|
||||
warn(warning)
|
||||
},
|
||||
plugins: [
|
||||
resolve(),
|
||||
commonjs(),
|
||||
svelte({
|
||||
emitCss: true,
|
||||
}),
|
||||
postcss(),
|
||||
terser(),
|
||||
json(),
|
||||
],
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
import { defineConfig } from "vite"
|
||||
import { svelte } from "@sveltejs/vite-plugin-svelte"
|
||||
import path from "path"
|
||||
import cssInjectedByJsPlugin from "vite-plugin-css-injected-by-js"
|
||||
|
||||
export default defineConfig(({ mode }) => {
|
||||
const isProduction = mode === "production"
|
||||
return {
|
||||
build: {
|
||||
sourcemap: !isProduction,
|
||||
lib: {
|
||||
entry: "src/index.js",
|
||||
formats: ["es"],
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
svelte({
|
||||
emitCss: true,
|
||||
}),
|
||||
cssInjectedByJsPlugin(),
|
||||
],
|
||||
resolve: {
|
||||
alias: {
|
||||
"@budibase/shared-core": path.resolve(__dirname, "../shared-core/src"),
|
||||
"@budibase/types": path.resolve(__dirname, "../types/src"),
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
|
@ -1 +1 @@
|
|||
Subproject commit e2252498ddfade3c2592b1ec78f7bee4e3cf0d2f
|
||||
Subproject commit d9245f3d6d0b41ec2e6b3406b791f9e7448882cb
|
|
@ -129,7 +129,8 @@
|
|||
"uuid": "^8.3.2",
|
||||
"validate.js": "0.13.1",
|
||||
"worker-farm": "1.7.0",
|
||||
"xml2js": "0.6.2"
|
||||
"xml2js": "0.6.2",
|
||||
"zod-validation-error": "^3.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.22.5",
|
||||
|
@ -175,7 +176,8 @@
|
|||
"tsconfig-paths": "4.0.0",
|
||||
"typescript": "5.5.2",
|
||||
"update-dotenv": "1.1.1",
|
||||
"yargs": "13.2.4"
|
||||
"yargs": "^13.2.4",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"nx": {
|
||||
"targets": {
|
||||
|
|
|
@ -19,6 +19,7 @@ import {
|
|||
isRelationshipField,
|
||||
PatchRowRequest,
|
||||
PatchRowResponse,
|
||||
RequiredKeys,
|
||||
Row,
|
||||
RowAttachment,
|
||||
RowSearchParams,
|
||||
|
@ -239,7 +240,8 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
|||
|
||||
await context.ensureSnippetContext(true)
|
||||
|
||||
let { query } = ctx.request.body
|
||||
const searchRequest = ctx.request.body
|
||||
let { query } = searchRequest
|
||||
if (query) {
|
||||
const allTables = await sdk.tables.getAllTables()
|
||||
query = replaceTableNamesInFilters(tableId, query, allTables)
|
||||
|
@ -249,11 +251,22 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
|||
user: sdk.users.getUserContextBindings(ctx.user),
|
||||
})
|
||||
|
||||
const searchParams: RowSearchParams = {
|
||||
...ctx.request.body,
|
||||
const searchParams: RequiredKeys<RowSearchParams> = {
|
||||
query: enrichedQuery,
|
||||
tableId,
|
||||
viewId,
|
||||
bookmark: searchRequest.bookmark ?? undefined,
|
||||
paginate: searchRequest.paginate,
|
||||
limit: searchRequest.limit,
|
||||
sort: searchRequest.sort ?? undefined,
|
||||
sortOrder: searchRequest.sortOrder,
|
||||
sortType: searchRequest.sortType ?? undefined,
|
||||
countRows: searchRequest.countRows,
|
||||
version: searchRequest.version,
|
||||
disableEscaping: searchRequest.disableEscaping,
|
||||
fields: undefined,
|
||||
indexer: undefined,
|
||||
rows: undefined,
|
||||
}
|
||||
|
||||
ctx.status = 200
|
||||
|
|
|
@ -175,7 +175,7 @@ export async function enrichArrayContext(
|
|||
}
|
||||
|
||||
export async function enrichSearchContext(
|
||||
fields: Record<string, any>,
|
||||
fields: Record<string, any> | undefined,
|
||||
inputs = {},
|
||||
helpers = true
|
||||
): Promise<Record<string, any>> {
|
||||
|
|
|
@ -29,19 +29,20 @@ export async function searchView(
|
|||
|
||||
await context.ensureSnippetContext(true)
|
||||
|
||||
const searchOptions: RequiredKeys<SearchViewRowRequest> &
|
||||
RequiredKeys<
|
||||
Pick<RowSearchParams, "tableId" | "viewId" | "query" | "fields">
|
||||
> = {
|
||||
const searchOptions: RequiredKeys<RowSearchParams> = {
|
||||
tableId: view.tableId,
|
||||
viewId: view.id,
|
||||
query: body.query,
|
||||
query: body.query || {},
|
||||
fields: viewFields,
|
||||
...getSortOptions(body, view),
|
||||
limit: body.limit,
|
||||
bookmark: body.bookmark,
|
||||
bookmark: body.bookmark ?? undefined,
|
||||
paginate: body.paginate,
|
||||
countRows: body.countRows,
|
||||
version: undefined,
|
||||
disableEscaping: undefined,
|
||||
indexer: undefined,
|
||||
rows: undefined,
|
||||
}
|
||||
|
||||
const result = await sdk.rows.search(searchOptions, {
|
||||
|
@ -56,7 +57,7 @@ function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
|
|||
return {
|
||||
sort: request.sort,
|
||||
sortOrder: request.sortOrder,
|
||||
sortType: request.sortType,
|
||||
sortType: request.sortType ?? undefined,
|
||||
}
|
||||
}
|
||||
if (view.sort) {
|
||||
|
|
|
@ -5,6 +5,8 @@ import { paramResource, paramSubResource } from "../../middleware/resourceId"
|
|||
import { permissions } from "@budibase/backend-core"
|
||||
import { internalSearchValidator } from "./utils/validators"
|
||||
import trimViewRowInfo from "../../middleware/trimViewRowInfo"
|
||||
import { validateBody } from "../../middleware/zod-validator"
|
||||
import { searchRowRequestValidator } from "@budibase/types"
|
||||
|
||||
const { PermissionType, PermissionLevel } = permissions
|
||||
|
||||
|
@ -32,6 +34,7 @@ router
|
|||
.post(
|
||||
"/api/:sourceId/search",
|
||||
internalSearchValidator(),
|
||||
validateBody(searchRowRequestValidator),
|
||||
paramResource("sourceId"),
|
||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||
rowController.search
|
||||
|
@ -87,6 +90,7 @@ router
|
|||
router.post(
|
||||
"/api/v2/views/:viewId/search",
|
||||
internalSearchValidator(),
|
||||
validateBody(searchRowRequestValidator),
|
||||
authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"),
|
||||
rowController.views.searchView
|
||||
)
|
||||
|
|
|
@ -24,6 +24,7 @@ import {
|
|||
JsonFieldSubType,
|
||||
LogicalOperator,
|
||||
RelationshipType,
|
||||
RequiredKeys,
|
||||
Row,
|
||||
RowSearchParams,
|
||||
SearchFilters,
|
||||
|
@ -208,9 +209,25 @@ if (descriptions.length) {
|
|||
|
||||
private async performSearch(): Promise<SearchResponse<Row>> {
|
||||
if (isInMemory) {
|
||||
return dataFilters.search(_.cloneDeep(rows), {
|
||||
...this.query,
|
||||
})
|
||||
const inMemoryQuery: RequiredKeys<
|
||||
Omit<RowSearchParams, "tableId">
|
||||
> = {
|
||||
sort: this.query.sort ?? undefined,
|
||||
query: { ...this.query.query },
|
||||
paginate: this.query.paginate,
|
||||
bookmark: this.query.bookmark ?? undefined,
|
||||
limit: this.query.limit,
|
||||
sortOrder: this.query.sortOrder,
|
||||
sortType: this.query.sortType ?? undefined,
|
||||
version: this.query.version,
|
||||
disableEscaping: this.query.disableEscaping,
|
||||
countRows: this.query.countRows,
|
||||
viewId: undefined,
|
||||
fields: undefined,
|
||||
indexer: undefined,
|
||||
rows: undefined,
|
||||
}
|
||||
return dataFilters.search(_.cloneDeep(rows), inMemoryQuery)
|
||||
} else {
|
||||
return config.api.row.search(tableOrViewId, this.query)
|
||||
}
|
||||
|
|
|
@ -152,6 +152,44 @@ describe("Loop automations", () => {
|
|||
)
|
||||
})
|
||||
|
||||
it("ensure the loop stops if the max iterations are reached", async () => {
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test Loop max iterations",
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.appAction({ fields: {} })
|
||||
.loop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: ["test", "test2", "test3"],
|
||||
iterations: 2,
|
||||
})
|
||||
.serverLog({ text: "{{loop.currentItem}}" })
|
||||
.serverLog({ text: "{{steps.1.iterations}}" })
|
||||
.run()
|
||||
|
||||
expect(results.steps[0].outputs.iterations).toBe(2)
|
||||
})
|
||||
|
||||
it("should run an automation with loop and max iterations to ensure context correctness further down the tree", async () => {
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test context down tree with Loop and max iterations",
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.appAction({ fields: {} })
|
||||
.loop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: ["test", "test2", "test3"],
|
||||
iterations: 2,
|
||||
})
|
||||
.serverLog({ text: "{{loop.currentItem}}" })
|
||||
.serverLog({ text: "{{steps.1.iterations}}" })
|
||||
.run()
|
||||
|
||||
expect(results.steps[1].outputs.message).toContain("- 2")
|
||||
})
|
||||
|
||||
it("should run an automation where a loop is successfully run twice", async () => {
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test Trigger with Loop and Create Row",
|
||||
|
|
|
@ -137,7 +137,6 @@ export enum InvalidColumns {
|
|||
|
||||
export enum AutomationErrors {
|
||||
INCORRECT_TYPE = "INCORRECT_TYPE",
|
||||
MAX_ITERATIONS = "MAX_ITERATIONS_REACHED",
|
||||
FAILURE_CONDITION = "FAILURE_CONDITION_MET",
|
||||
}
|
||||
|
||||
|
|
|
@ -1,27 +1,39 @@
|
|||
import { DatasourcePlusQueryResponse, QueryJson } from "@budibase/types"
|
||||
import {
|
||||
DatasourcePlusQueryResponse,
|
||||
EnrichedQueryJson,
|
||||
QueryJson,
|
||||
} from "@budibase/types"
|
||||
import { getIntegration } from "../index"
|
||||
import sdk from "../../sdk"
|
||||
import { enrichQueryJson } from "../../sdk/app/rows/utils"
|
||||
|
||||
function isEnriched(
|
||||
json: QueryJson | EnrichedQueryJson
|
||||
): json is EnrichedQueryJson {
|
||||
return "datasource" in json
|
||||
}
|
||||
|
||||
export async function makeExternalQuery(
|
||||
json: QueryJson
|
||||
json: QueryJson | EnrichedQueryJson
|
||||
): Promise<DatasourcePlusQueryResponse> {
|
||||
const enrichedJson = await enrichQueryJson(json)
|
||||
if (!enrichedJson.datasource) {
|
||||
if (!isEnriched(json)) {
|
||||
json = await enrichQueryJson(json)
|
||||
if (json.datasource) {
|
||||
json.datasource = await sdk.datasources.enrich(json.datasource)
|
||||
}
|
||||
}
|
||||
|
||||
if (!json.datasource) {
|
||||
throw new Error("No datasource provided for external query")
|
||||
}
|
||||
|
||||
enrichedJson.datasource = await sdk.datasources.enrich(
|
||||
enrichedJson.datasource
|
||||
)
|
||||
|
||||
const Integration = await getIntegration(enrichedJson.datasource.source)
|
||||
const Integration = await getIntegration(json.datasource.source)
|
||||
|
||||
// query is the opinionated function
|
||||
if (!Integration.prototype.query) {
|
||||
throw "Datasource does not support query."
|
||||
}
|
||||
|
||||
const integration = new Integration(enrichedJson.datasource.config)
|
||||
return integration.query(enrichedJson)
|
||||
const integration = new Integration(json.datasource.config)
|
||||
return integration.query(json)
|
||||
}
|
||||
|
|
|
@ -383,7 +383,7 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
|
||||
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
|
||||
const sheet = json.table.name
|
||||
switch (json.endpoint.operation) {
|
||||
switch (json.operation) {
|
||||
case Operation.CREATE:
|
||||
return this.create({ sheet, row: json.body as Row })
|
||||
case Operation.BULK_CREATE:
|
||||
|
@ -426,7 +426,7 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
return this.deleteTable(json?.table?.name)
|
||||
default:
|
||||
throw new Error(
|
||||
`GSheets integration does not support "${json.endpoint.operation}".`
|
||||
`GSheets integration does not support "${json.operation}".`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -509,8 +509,8 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
|
||||
const schema = this.config.schema
|
||||
await this.connect()
|
||||
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
|
||||
json.endpoint.schema = schema
|
||||
if (schema && schema !== DEFAULT_SCHEMA) {
|
||||
json.schema = schema
|
||||
}
|
||||
const operation = this._operation(json)
|
||||
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
|
||||
|
|
|
@ -572,11 +572,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
return response.rows as Row[]
|
||||
} else {
|
||||
// get the last row that was updated
|
||||
if (
|
||||
response.lastRowid &&
|
||||
json.endpoint?.entityId &&
|
||||
operation !== Operation.DELETE
|
||||
) {
|
||||
if (response.lastRowid && operation !== Operation.DELETE) {
|
||||
const lastRow = await this.internalQuery({
|
||||
sql: `SELECT * FROM "${json.table.name}" WHERE ROWID = '${response.lastRowid}'`,
|
||||
})
|
||||
|
|
|
@ -269,7 +269,7 @@ describe("Captures of real examples", () => {
|
|||
fields: string[] = ["a"]
|
||||
): EnrichedQueryJson {
|
||||
return {
|
||||
endpoint: { datasourceId: "", entityId: "", operation: op },
|
||||
operation: op,
|
||||
resource: {
|
||||
fields,
|
||||
},
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "persons",
|
||||
"operation": "READ"
|
||||
},
|
||||
"operation": "READ",
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.year",
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
|
||||
"entityId": "people",
|
||||
"operation": "CREATE"
|
||||
},
|
||||
"operation": "CREATE",
|
||||
"resource": {
|
||||
"fields": ["a.name", "a.age"]
|
||||
},
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "persons",
|
||||
"operation": "CREATE"
|
||||
},
|
||||
"operation": "CREATE",
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.year",
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "compositetable",
|
||||
"operation": "DELETE"
|
||||
},
|
||||
"operation": "DELETE",
|
||||
"resource": {
|
||||
"fields": ["a.keyparttwo", "a.keypartone", "a.name"]
|
||||
},
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
|
||||
"entityId": "tasks",
|
||||
"operation": "READ"
|
||||
},
|
||||
"operation": "READ",
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.executorid",
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
|
||||
"entityId": "products",
|
||||
"operation": "READ"
|
||||
},
|
||||
"operation": "READ",
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.productname",
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "products",
|
||||
"operation": "READ"
|
||||
},
|
||||
"operation": "READ",
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.productname",
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
|
||||
"entityId": "tasks",
|
||||
"operation": "READ"
|
||||
},
|
||||
"operation": "READ",
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.executorid",
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "persons",
|
||||
"operation": "UPDATE"
|
||||
},
|
||||
"operation": "UPDATE",
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.year",
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "persons",
|
||||
"operation": "UPDATE"
|
||||
},
|
||||
"operation": "UPDATE",
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.year",
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
import { features } from "@budibase/backend-core"
|
||||
import { Ctx, FeatureFlag } from "@budibase/types"
|
||||
|
||||
import { AnyZodObject } from "zod"
|
||||
import { fromZodError } from "zod-validation-error"
|
||||
|
||||
function validate(schema: AnyZodObject, property: "body" | "params") {
|
||||
// Return a Koa middleware function
|
||||
return async (ctx: Ctx, next: any) => {
|
||||
if (!(await features.flags.isEnabled(FeatureFlag.USE_ZOD_VALIDATOR))) {
|
||||
return next()
|
||||
}
|
||||
|
||||
if (!schema) {
|
||||
return next()
|
||||
}
|
||||
let params = null
|
||||
let setClean: ((data: any) => void) | undefined
|
||||
if (ctx[property] != null) {
|
||||
params = ctx[property]
|
||||
setClean = data => (ctx[property] = data)
|
||||
} else if (property === "body" && ctx.request[property] != null) {
|
||||
params = ctx.request[property]
|
||||
setClean = data => (ctx.request[property] = data)
|
||||
} else if (property === "params") {
|
||||
params = ctx.request.query
|
||||
setClean = data => (ctx.request.query = data)
|
||||
}
|
||||
|
||||
const result = schema.safeParse(params)
|
||||
if (!result.success) {
|
||||
ctx.throw(400, fromZodError(result.error))
|
||||
} else {
|
||||
setClean?.(result.data)
|
||||
}
|
||||
|
||||
return next()
|
||||
}
|
||||
}
|
||||
|
||||
export function validateBody(schema: AnyZodObject) {
|
||||
return validate(schema, "body")
|
||||
}
|
|
@ -9,11 +9,12 @@ import {
|
|||
db as dbUtils,
|
||||
} from "@budibase/backend-core"
|
||||
import {
|
||||
QuotaUsage,
|
||||
CloudAccount,
|
||||
App,
|
||||
TenantBackfillSucceededEvent,
|
||||
CloudAccount,
|
||||
Event,
|
||||
Hosting,
|
||||
QuotaUsage,
|
||||
TenantBackfillSucceededEvent,
|
||||
User,
|
||||
} from "@budibase/types"
|
||||
import env from "../../../environment"
|
||||
|
@ -125,7 +126,7 @@ export const run = async (db: any) => {
|
|||
try {
|
||||
await events.identification.identifyTenantGroup(
|
||||
tenantId,
|
||||
account,
|
||||
env.SELF_HOSTED ? Hosting.SELF : Hosting.CLOUD,
|
||||
timestamp
|
||||
)
|
||||
} catch (e) {
|
||||
|
|
|
@ -231,7 +231,7 @@ async function runSqlQuery(
|
|||
tables.map(table => table._id!).concat(relationshipJunctionTableIds)
|
||||
)
|
||||
if (opts?.countTotalRows) {
|
||||
json.endpoint.operation = Operation.COUNT
|
||||
json.operation = Operation.COUNT
|
||||
}
|
||||
const processSQLQuery = async (json: EnrichedQueryJson) => {
|
||||
const query = builder._query(json, {
|
||||
|
|
|
@ -3,7 +3,6 @@ import {
|
|||
DatasourcePlusQueryResponse,
|
||||
EnrichedQueryJson,
|
||||
Operation,
|
||||
QueryJson,
|
||||
Row,
|
||||
SearchFilters,
|
||||
SqlClient,
|
||||
|
@ -69,13 +68,12 @@ export default class AliasTables {
|
|||
this.charSeq = new CharSequence()
|
||||
}
|
||||
|
||||
isAliasingEnabled(json: QueryJson, datasource?: Datasource) {
|
||||
const operation = json.endpoint.operation
|
||||
isAliasingEnabled(json: EnrichedQueryJson, datasource?: Datasource) {
|
||||
const fieldLength = json.resource?.fields?.length
|
||||
if (
|
||||
!fieldLength ||
|
||||
fieldLength <= 0 ||
|
||||
DISABLED_OPERATIONS.includes(operation)
|
||||
DISABLED_OPERATIONS.includes(json.operation)
|
||||
) {
|
||||
return false
|
||||
}
|
||||
|
@ -85,7 +83,7 @@ export default class AliasTables {
|
|||
}
|
||||
try {
|
||||
const sqlClient = getSQLClient(datasource)
|
||||
const isWrite = WRITE_OPERATIONS.includes(operation)
|
||||
const isWrite = WRITE_OPERATIONS.includes(json.operation)
|
||||
const isDisabledClient = DISABLED_WRITE_CLIENTS.includes(sqlClient)
|
||||
if (isWrite && isDisabledClient) {
|
||||
return false
|
||||
|
|
|
@ -118,9 +118,11 @@ export async function enrichQueryJson(
|
|||
}
|
||||
|
||||
return {
|
||||
operation: json.endpoint.operation,
|
||||
table,
|
||||
tables,
|
||||
datasource,
|
||||
schema: json.endpoint.schema,
|
||||
...json,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -392,6 +392,7 @@ class Orchestrator {
|
|||
|
||||
let iterationCount = 0
|
||||
let shouldCleanup = true
|
||||
let reachedMaxIterations = false
|
||||
|
||||
for (let loopStepIndex = 0; loopStepIndex < iterations; loopStepIndex++) {
|
||||
try {
|
||||
|
@ -419,19 +420,8 @@ class Orchestrator {
|
|||
loopStepIndex === env.AUTOMATION_MAX_ITERATIONS ||
|
||||
(loopStep.inputs.iterations && loopStepIndex === maxIterations)
|
||||
) {
|
||||
this.updateContextAndOutput(
|
||||
pathStepIdx + 1,
|
||||
steps[stepToLoopIndex],
|
||||
{
|
||||
items: this.loopStepOutputs,
|
||||
iterations: loopStepIndex,
|
||||
},
|
||||
{
|
||||
status: AutomationErrors.MAX_ITERATIONS,
|
||||
success: true,
|
||||
}
|
||||
)
|
||||
shouldCleanup = false
|
||||
reachedMaxIterations = true
|
||||
shouldCleanup = true
|
||||
break
|
||||
}
|
||||
|
||||
|
@ -485,6 +475,10 @@ class Orchestrator {
|
|||
iterations: iterationCount,
|
||||
}
|
||||
|
||||
if (reachedMaxIterations && iterations !== 0) {
|
||||
tempOutput.status = AutomationStepStatus.MAX_ITERATIONS
|
||||
}
|
||||
|
||||
// Loop Step clean up
|
||||
this.executionOutput.steps.splice(pathStepIdx, 0, {
|
||||
id: steps[stepToLoopIndex].id,
|
||||
|
|
|
@ -20,7 +20,8 @@
|
|||
"@types/redlock": "4.0.7",
|
||||
"rimraf": "3.0.2",
|
||||
"typescript": "5.5.2",
|
||||
"koa-useragent": "^4.1.0"
|
||||
"koa-useragent": "^4.1.0",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"dependencies": {
|
||||
"scim-patch": "^0.8.1"
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
import { SearchFilters, RowSearchParams } from "../../../sdk"
|
||||
import { Row } from "../../../documents"
|
||||
import { PaginationResponse, SortOrder } from "../../../api"
|
||||
import { ReadStream } from "fs"
|
||||
|
||||
export interface SaveRowRequest extends Row {}
|
||||
|
||||
export interface PatchRowRequest extends Row {
|
||||
_id: string
|
||||
_rev: string
|
||||
tableId: string
|
||||
}
|
||||
|
||||
export interface PatchRowResponse extends Row {}
|
||||
|
||||
export interface SearchRowRequest extends Omit<RowSearchParams, "tableId"> {}
|
||||
|
||||
export interface SearchViewRowRequest
|
||||
extends Pick<
|
||||
SearchRowRequest,
|
||||
| "sort"
|
||||
| "sortOrder"
|
||||
| "sortType"
|
||||
| "limit"
|
||||
| "bookmark"
|
||||
| "paginate"
|
||||
| "query"
|
||||
| "countRows"
|
||||
> {}
|
||||
|
||||
export interface SearchRowResponse {
|
||||
rows: any[]
|
||||
}
|
||||
|
||||
export interface PaginatedSearchRowResponse
|
||||
extends SearchRowResponse,
|
||||
PaginationResponse {}
|
||||
|
||||
export interface ExportRowsRequest {
|
||||
rows?: string[]
|
||||
columns?: string[]
|
||||
query?: SearchFilters
|
||||
sort?: string
|
||||
sortOrder?: SortOrder
|
||||
delimiter?: string
|
||||
customHeaders?: { [key: string]: string }
|
||||
}
|
||||
|
||||
export type ExportRowsResponse = ReadStream
|
|
@ -0,0 +1,28 @@
|
|||
import { SearchFilters } from "../../../../sdk"
|
||||
import { Row } from "../../../../documents"
|
||||
import { SortOrder } from "../../../../api/web/pagination"
|
||||
import { ReadStream } from "fs"
|
||||
|
||||
export * from "./search"
|
||||
|
||||
export interface SaveRowRequest extends Row {}
|
||||
|
||||
export interface PatchRowRequest extends Row {
|
||||
_id: string
|
||||
_rev: string
|
||||
tableId: string
|
||||
}
|
||||
|
||||
export interface PatchRowResponse extends Row {}
|
||||
|
||||
export interface ExportRowsRequest {
|
||||
rows?: string[]
|
||||
columns?: string[]
|
||||
query?: SearchFilters
|
||||
sort?: string
|
||||
sortOrder?: SortOrder
|
||||
delimiter?: string
|
||||
customHeaders?: { [key: string]: string }
|
||||
}
|
||||
|
||||
export type ExportRowsResponse = ReadStream
|
|
@ -0,0 +1,100 @@
|
|||
import {
|
||||
ArrayOperator,
|
||||
BasicOperator,
|
||||
EmptyFilterOption,
|
||||
InternalSearchFilterOperator,
|
||||
LogicalOperator,
|
||||
RangeOperator,
|
||||
SearchFilterKey,
|
||||
} from "../../../../sdk"
|
||||
import { Row } from "../../../../documents"
|
||||
import {
|
||||
PaginationResponse,
|
||||
SortOrder,
|
||||
SortType,
|
||||
} from "../../../../api/web/pagination"
|
||||
import { z } from "zod"
|
||||
|
||||
const fieldKey = z
|
||||
.string()
|
||||
.refine(s => s !== InternalSearchFilterOperator.COMPLEX_ID_OPERATOR, {
|
||||
message: `Key '${InternalSearchFilterOperator.COMPLEX_ID_OPERATOR}' is not allowed`,
|
||||
})
|
||||
|
||||
const stringBasicFilter = z.record(fieldKey, z.string())
|
||||
const basicFilter = z.record(fieldKey, z.any())
|
||||
const arrayFilter = z.record(fieldKey, z.union([z.any().array(), z.string()]))
|
||||
const logicFilter = z.lazy(() =>
|
||||
z.object({
|
||||
conditions: z.array(z.object(queryFilterValidation)),
|
||||
})
|
||||
)
|
||||
|
||||
const stringOrNumber = z.union([z.string(), z.number()])
|
||||
|
||||
const queryFilterValidation: Record<SearchFilterKey, z.ZodTypeAny> = {
|
||||
[BasicOperator.STRING]: stringBasicFilter.optional(),
|
||||
[BasicOperator.FUZZY]: stringBasicFilter.optional(),
|
||||
[RangeOperator.RANGE]: z
|
||||
.record(
|
||||
fieldKey,
|
||||
z.union([
|
||||
z.object({ high: stringOrNumber, low: stringOrNumber }),
|
||||
z.object({ high: stringOrNumber }),
|
||||
z.object({ low: stringOrNumber }),
|
||||
])
|
||||
)
|
||||
.optional(),
|
||||
[BasicOperator.EQUAL]: basicFilter.optional(),
|
||||
[BasicOperator.NOT_EQUAL]: basicFilter.optional(),
|
||||
[BasicOperator.EMPTY]: basicFilter.optional(),
|
||||
[BasicOperator.NOT_EMPTY]: basicFilter.optional(),
|
||||
[ArrayOperator.ONE_OF]: arrayFilter.optional(),
|
||||
[ArrayOperator.CONTAINS]: arrayFilter.optional(),
|
||||
[ArrayOperator.NOT_CONTAINS]: arrayFilter.optional(),
|
||||
[ArrayOperator.CONTAINS_ANY]: arrayFilter.optional(),
|
||||
[LogicalOperator.AND]: logicFilter.optional(),
|
||||
[LogicalOperator.OR]: logicFilter.optional(),
|
||||
}
|
||||
|
||||
const searchRowRequest = z.object({
|
||||
query: z
|
||||
.object({
|
||||
allOr: z.boolean().optional(),
|
||||
onEmptyFilter: z.nativeEnum(EmptyFilterOption).optional(),
|
||||
...queryFilterValidation,
|
||||
})
|
||||
.optional(),
|
||||
paginate: z.boolean().optional(),
|
||||
bookmark: z.union([z.string(), z.number()]).nullish(),
|
||||
limit: z.number().optional(),
|
||||
sort: z.string().nullish(),
|
||||
sortOrder: z.nativeEnum(SortOrder).optional(),
|
||||
sortType: z.nativeEnum(SortType).nullish(),
|
||||
version: z.string().optional(),
|
||||
disableEscaping: z.boolean().optional(),
|
||||
countRows: z.boolean().optional(),
|
||||
})
|
||||
|
||||
export const searchRowRequestValidator = searchRowRequest
|
||||
|
||||
export type SearchRowRequest = z.infer<typeof searchRowRequest>
|
||||
export type SearchViewRowRequest = Pick<
|
||||
SearchRowRequest,
|
||||
| "sort"
|
||||
| "sortOrder"
|
||||
| "sortType"
|
||||
| "limit"
|
||||
| "bookmark"
|
||||
| "paginate"
|
||||
| "query"
|
||||
| "countRows"
|
||||
>
|
||||
|
||||
export interface SearchRowResponse {
|
||||
rows: Row[]
|
||||
}
|
||||
|
||||
export interface PaginatedSearchRowResponse
|
||||
extends SearchRowResponse,
|
||||
PaginationResponse {}
|
|
@ -174,6 +174,7 @@ export enum AutomationFeature {
|
|||
|
||||
export enum AutomationStepStatus {
|
||||
NO_ITERATIONS = "no_iterations",
|
||||
MAX_ITERATIONS = "max_iterations_reached",
|
||||
}
|
||||
|
||||
export enum AutomationStatus {
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
export enum FeatureFlag {
|
||||
PER_CREATOR_PER_USER_PRICE = "PER_CREATOR_PER_USER_PRICE",
|
||||
PER_CREATOR_PER_USER_PRICE_ALERT = "PER_CREATOR_PER_USER_PRICE_ALERT",
|
||||
AUTOMATION_BRANCHING = "AUTOMATION_BRANCHING",
|
||||
AI_CUSTOM_CONFIGS = "AI_CUSTOM_CONFIGS",
|
||||
DEFAULT_VALUES = "DEFAULT_VALUES",
|
||||
|
||||
BUDIBASE_AI = "BUDIBASE_AI",
|
||||
USE_ZOD_VALIDATOR = "USE_ZOD_VALIDATOR",
|
||||
}
|
||||
|
||||
export interface TenantFeatureFlags {
|
||||
|
|
|
@ -184,10 +184,12 @@ export interface QueryJson {
|
|||
tableAliases?: Record<string, string>
|
||||
}
|
||||
|
||||
export interface EnrichedQueryJson extends QueryJson {
|
||||
export interface EnrichedQueryJson extends Omit<QueryJson, "endpoint"> {
|
||||
operation: Operation
|
||||
table: Table
|
||||
tables: Record<string, Table>
|
||||
datasource?: Datasource
|
||||
schema?: string
|
||||
}
|
||||
|
||||
export interface QueryOptions {
|
||||
|
|
|
@ -6,12 +6,12 @@ import {
|
|||
AddSSoUserRequest,
|
||||
BulkUserRequest,
|
||||
BulkUserResponse,
|
||||
CloudAccount,
|
||||
CreateAdminUserRequest,
|
||||
CreateAdminUserResponse,
|
||||
Ctx,
|
||||
DeleteInviteUserRequest,
|
||||
DeleteInviteUsersRequest,
|
||||
Hosting,
|
||||
InviteUserRequest,
|
||||
InviteUsersRequest,
|
||||
InviteUsersResponse,
|
||||
|
@ -26,7 +26,6 @@ import {
|
|||
UserIdentifier,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
accounts,
|
||||
users,
|
||||
cache,
|
||||
ErrorCode,
|
||||
|
@ -192,12 +191,10 @@ export const adminUser = async (
|
|||
lastName: familyName,
|
||||
})
|
||||
|
||||
// events
|
||||
let account: CloudAccount | undefined
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
account = await accounts.getAccountByTenantId(tenantId)
|
||||
}
|
||||
await events.identification.identifyTenantGroup(tenantId, account)
|
||||
await events.identification.identifyTenantGroup(
|
||||
tenantId,
|
||||
env.SELF_HOSTED ? Hosting.SELF : Hosting.CLOUD
|
||||
)
|
||||
|
||||
ctx.body = {
|
||||
_id: finalUser._id!,
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
sudo apt-get install -y qemu qemu-user-static
|
||||
docker buildx create --name budibase
|
||||
docker buildx use budibase
|
|
@ -1,46 +0,0 @@
|
|||
const fs = require("fs")
|
||||
const path = require("path")
|
||||
|
||||
const MONOREPO_ROOT = "packages"
|
||||
|
||||
const packages = getPackages()
|
||||
|
||||
function getPackages() {
|
||||
if (fs.existsSync(MONOREPO_ROOT)) {
|
||||
return fs.readdirSync(MONOREPO_ROOT).map(pkg => path.join(MONOREPO_ROOT, pkg))
|
||||
} else {
|
||||
return ["./"]
|
||||
}
|
||||
}
|
||||
|
||||
function pinDeps(dependencies) {
|
||||
for (let dependency in dependencies) {
|
||||
if (dependency.startsWith("@budibase")) {
|
||||
dependencies[dependency] = dependencies[dependency].replace("^", "")
|
||||
}
|
||||
}
|
||||
return dependencies
|
||||
}
|
||||
|
||||
// iterate over the monorepo packages
|
||||
for (let pkgPath of packages) {
|
||||
// only directories
|
||||
if (fs.statSync(pkgPath).isDirectory()) {
|
||||
// get the package JSON file
|
||||
const pkgJsonPath = path.join(pkgPath, "package.json")
|
||||
if (!fs.existsSync(pkgJsonPath)) {
|
||||
continue
|
||||
}
|
||||
const pkgJson = JSON.parse(fs.readFileSync(pkgJsonPath))
|
||||
|
||||
|
||||
// find any budibase dependencies, and pin them
|
||||
pkgJson.dependencies = pinDeps(pkgJson.dependencies)
|
||||
pkgJson.devDependencies = pinDeps(pkgJson.devDependencies)
|
||||
|
||||
// update the package JSON files
|
||||
fs.writeFileSync(pkgJsonPath, JSON.stringify(pkgJson, null, 2))
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Pinned dev versions for budibase packages successfully.")
|
|
@ -1,28 +0,0 @@
|
|||
const yaml = require("js-yaml")
|
||||
const fs = require("fs")
|
||||
const path = require("path")
|
||||
|
||||
const CHART_PATH = path.join(__dirname, "../", "charts", "budibase", "Chart.yaml")
|
||||
const UPGRADE_VERSION = process.env.BUDIBASE_RELEASE_VERSION
|
||||
|
||||
if (!UPGRADE_VERSION) {
|
||||
throw new Error("BUDIBASE_RELEASE_VERSION env var must be set.")
|
||||
}
|
||||
|
||||
try {
|
||||
const chartFile = fs.readFileSync(CHART_PATH, "utf-8")
|
||||
const chart = yaml.load(chartFile)
|
||||
|
||||
// Upgrade app version in chart to match budibase release version
|
||||
chart.appVersion = UPGRADE_VERSION
|
||||
|
||||
// semantically version the chart
|
||||
const [major, minor, patch] = chart.version.split(".")
|
||||
const newPatch = parseInt(patch) + 1
|
||||
chart.version = [major, minor, newPatch].join(".")
|
||||
const updatedChartYaml = yaml.dump(chart)
|
||||
fs.writeFileSync(CHART_PATH, updatedChartYaml)
|
||||
} catch (err) {
|
||||
console.error("Error releasing helm chart")
|
||||
throw err
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
#!/bin/bash
|
||||
echo "Resetting package versions"
|
||||
yarn lerna exec "yarn version --no-git-tag-version --new-version=0.0.0"
|
||||
echo "Updating dependencies"
|
||||
node scripts/syncLocalDependencies.js "0.0.0"
|
||||
git checkout package.json
|
||||
echo "Package versions reset!"
|
|
@ -1,8 +0,0 @@
|
|||
#!/bin/bash
|
||||
version=$(./scripts/getCurrentVersion.sh)
|
||||
echo "Setting version $version"
|
||||
yarn lerna exec "yarn version --no-git-tag-version --new-version=$version"
|
||||
echo "Updating dependencies"
|
||||
node scripts/syncLocalDependencies.js $version
|
||||
echo "Syncing yarn workspace"
|
||||
yarn
|
Loading…
Reference in New Issue