Merge branch 'master' into automation-branching-ux-updates

This commit is contained in:
deanhannigan 2024-11-29 11:31:09 +00:00 committed by GitHub
commit 37e4ac6a2a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
51 changed files with 1344 additions and 2357 deletions

View File

@ -281,6 +281,7 @@ jobs:
check-lockfile:
runs-on: ubuntu-latest
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
steps:
- name: Checkout repo
uses: actions/checkout@v4

View File

@ -6,6 +6,26 @@ import {
import { ContainerInfo } from "dockerode"
import path from "path"
import lockfile from "proper-lockfile"
import { execSync } from "child_process"
interface DockerContext {
Name: string
Description: string
DockerEndpoint: string
ContextType: string
Error: string
}
function getCurrentDockerContext(): DockerContext {
const out = execSync("docker context ls --format json")
for (const line of out.toString().split("\n")) {
const parsed = JSON.parse(line)
if (parsed.Current) {
return parsed as DockerContext
}
}
throw new Error("No current Docker context")
}
async function getBudibaseContainers() {
const client = await getContainerRuntimeClient()
@ -27,6 +47,14 @@ async function killContainers(containers: ContainerInfo[]) {
}
export default async function setup() {
// For whatever reason, testcontainers doesn't always use the correct current
// docker context. This bit of code forces the issue by finding the current
// context and setting it as the DOCKER_HOST environment
if (!process.env.DOCKER_HOST) {
const dockerContext = getCurrentDockerContext()
process.env.DOCKER_HOST = dockerContext.DockerEndpoint
}
const lockPath = path.resolve(__dirname, "globalSetup.ts")
// If you run multiple tests at the same time, it's possible for the CouchDB
// shared container to get started multiple times despite having an

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.2.13",
"version": "3.2.16",
"npmClient": "yarn",
"concurrency": 20,
"command": {

View File

@ -9,6 +9,7 @@
"@types/node": "20.10.0",
"@types/proper-lockfile": "^4.1.4",
"@typescript-eslint/parser": "6.9.0",
"cross-spawn": "7.0.6",
"depcheck": "^1.4.7",
"esbuild": "^0.18.17",
"esbuild-node-externals": "^1.14.0",
@ -29,8 +30,7 @@
"svelte-eslint-parser": "^0.33.1",
"typescript": "5.5.2",
"typescript-eslint": "^7.3.1",
"yargs": "^17.7.2",
"cross-spawn": "7.0.6"
"yargs": "^17.7.2"
},
"scripts": {
"get-past-client-version": "node scripts/getPastClientVersion.js",
@ -76,7 +76,6 @@
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.3.3 -t budibase/couchdb:v3.3.3-sqs-v2.1.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run --stream env:multi:enable",
"env:multi:disable": "lerna run --stream env:multi:disable",
"env:selfhost:enable": "lerna run --stream env:selfhost:enable",

View File

@ -18,6 +18,7 @@ import {
BasicOperator,
BBReferenceFieldMetadata,
CalculationType,
EnrichedQueryJson,
FieldSchema,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
@ -27,7 +28,6 @@ import {
LogicalOperator,
Operation,
prefixed,
QueryJson,
QueryOptions,
RangeOperator,
RelationshipsJson,
@ -134,18 +134,18 @@ const allowEmptyRelationships: Record<SearchFilterKey, boolean> = {
class InternalBuilder {
private readonly client: SqlClient
private readonly query: QueryJson
private readonly query: EnrichedQueryJson
private readonly splitter: dataFilters.ColumnSplitter
private readonly knex: Knex
constructor(client: SqlClient, knex: Knex, query: QueryJson) {
constructor(client: SqlClient, knex: Knex, query: EnrichedQueryJson) {
this.client = client
this.query = query
this.knex = knex
this.splitter = new dataFilters.ColumnSplitter([this.table], {
aliases: this.query.tableAliases,
columnPrefix: this.query.meta.columnPrefix,
columnPrefix: this.query.meta?.columnPrefix,
})
}
@ -167,7 +167,7 @@ class InternalBuilder {
}
get table(): Table {
return this.query.meta.table
return this.query.table
}
get knexClient(): Knex.Client {
@ -273,8 +273,7 @@ class InternalBuilder {
}
private isFullSelectStatementRequired(): boolean {
const { meta } = this.query
for (let column of Object.values(meta.table.schema)) {
for (let column of Object.values(this.table.schema)) {
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(column)) {
return true
} else if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(column)) {
@ -285,14 +284,14 @@ class InternalBuilder {
}
private generateSelectStatement(): (string | Knex.Raw)[] | "*" {
const { meta, endpoint, resource } = this.query
const { table, resource } = this.query
if (!resource || !resource.fields || resource.fields.length === 0) {
return "*"
}
const alias = this.getTableName(endpoint.entityId)
const schema = meta.table.schema
const alias = this.getTableName(table)
const schema = this.table.schema
if (!this.isFullSelectStatementRequired()) {
return [this.knex.raw("??", [`${alias}.*`])]
}
@ -497,9 +496,8 @@ class InternalBuilder {
filterKey: string,
whereCb: (filterKey: string, query: Knex.QueryBuilder) => Knex.QueryBuilder
): Knex.QueryBuilder {
const { relationships, endpoint, tableAliases: aliases } = this.query
const tableName = endpoint.entityId
const fromAlias = aliases?.[tableName] || tableName
const { relationships, schema, tableAliases: aliases, table } = this.query
const fromAlias = aliases?.[table.name] || table.name
const matches = (value: string) =>
filterKey.match(new RegExp(`^${value}\\.`))
if (!relationships) {
@ -539,7 +537,7 @@ class InternalBuilder {
aliases?.[manyToMany.through] || relationship.through
let throughTable = this.tableNameWithSchema(manyToMany.through, {
alias: throughAlias,
schema: endpoint.schema,
schema,
})
subQuery = subQuery
// add a join through the junction table
@ -1012,28 +1010,10 @@ class InternalBuilder {
return isSqs(this.table)
}
getTableName(tableOrName?: Table | string): string {
let table: Table
if (typeof tableOrName === "string") {
const name = tableOrName
if (this.query.table?.name === name) {
table = this.query.table
} else if (this.query.meta.table?.name === name) {
table = this.query.meta.table
} else if (!this.query.meta.tables?.[name]) {
// This can legitimately happen in custom queries, where the user is
// querying against a table that may not have been imported into
// Budibase.
return name
} else {
table = this.query.meta.tables[name]
}
} else if (tableOrName) {
table = tableOrName
} else {
getTableName(table?: Table): string {
if (!table) {
table = this.table
}
let name = table.name
if (isSqs(table) && table._id) {
// SQS uses the table ID rather than the table name
@ -1191,8 +1171,9 @@ class InternalBuilder {
return withSchema
}
private buildJsonField(field: string): string {
private buildJsonField(table: Table, field: string): [string, Knex.Raw] {
const parts = field.split(".")
let baseName = parts[parts.length - 1]
let unaliased: string
let tableField: string
@ -1205,10 +1186,19 @@ class InternalBuilder {
tableField = unaliased
}
const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
return this.knex
.raw(`?${separator}??`, [unaliased, this.rawQuotedIdentifier(tableField)])
.toString()
if (this.query.meta?.columnPrefix) {
baseName = baseName.replace(this.query.meta.columnPrefix, "")
}
let identifier = this.rawQuotedIdentifier(tableField)
// Internal tables have special _id, _rev, createdAt, and updatedAt fields
// that do not appear in the schema, meaning schema could actually be
// undefined.
const schema: FieldSchema | undefined = table.schema[baseName]
if (schema && schema.type === FieldType.BIGINT) {
identifier = this.castIntToString(identifier)
}
return [unaliased, identifier]
}
maxFunctionParameters() {
@ -1234,7 +1224,7 @@ class InternalBuilder {
): Knex.QueryBuilder {
const sqlClient = this.client
const knex = this.knex
const { resource, tableAliases: aliases, endpoint, meta } = this.query
const { resource, tableAliases: aliases, schema, tables } = this.query
const fields = resource?.fields || []
for (let relationship of relationships) {
const {
@ -1249,13 +1239,16 @@ class InternalBuilder {
if (!toTable || !fromTable) {
continue
}
const relatedTable = meta.tables?.[toTable]
const relatedTable = tables[toTable]
if (!relatedTable) {
throw new Error(`related table "${toTable}" not found in datasource`)
}
const toAlias = aliases?.[toTable] || toTable,
fromAlias = aliases?.[fromTable] || fromTable,
throughAlias = (throughTable && aliases?.[throughTable]) || throughTable
let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias,
schema: endpoint.schema,
schema,
})
const requiredFields = [
...(relatedTable?.primary || []),
@ -1271,8 +1264,14 @@ class InternalBuilder {
0,
Math.floor(this.maxFunctionParameters() / 2)
)
const fieldList: string = relationshipFields
.map(field => this.buildJsonField(field))
const fieldList = relationshipFields.map(field =>
this.buildJsonField(relatedTable, field)
)
const fieldListFormatted = fieldList
.map(f => {
const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
return this.knex.raw(`?${separator}??`, [f[0], f[1]]).toString()
})
.join(",")
// SQL Server uses TOP - which performs a little differently to the normal LIMIT syntax
// it reduces the result set rather than limiting how much data it filters over
@ -1293,7 +1292,7 @@ class InternalBuilder {
if (isManyToMany) {
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema: endpoint.schema,
schema,
})
subQuery = subQuery.join(throughTableWithSchema, function () {
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
@ -1320,35 +1319,42 @@ class InternalBuilder {
// need to check the junction table document is to the right column, this is just for SQS
subQuery = this.addJoinFieldCheck(subQuery, relationship)
wrapperQuery = standardWrap(
this.knex.raw(`json_group_array(json_object(${fieldList}))`)
this.knex.raw(
`json_group_array(json_object(${fieldListFormatted}))`
)
)
break
case SqlClient.POSTGRES:
wrapperQuery = standardWrap(
this.knex.raw(`json_agg(json_build_object(${fieldList}))`)
this.knex.raw(`json_agg(json_build_object(${fieldListFormatted}))`)
)
break
case SqlClient.MARIADB:
// can't use the standard wrap due to correlated sub-query limitations in MariaDB
wrapperQuery = subQuery.select(
knex.raw(
`json_arrayagg(json_object(${fieldList}) LIMIT ${getRelationshipLimit()})`
`json_arrayagg(json_object(${fieldListFormatted}) LIMIT ${getRelationshipLimit()})`
)
)
break
case SqlClient.MY_SQL:
case SqlClient.ORACLE:
wrapperQuery = standardWrap(
this.knex.raw(`json_arrayagg(json_object(${fieldList}))`)
this.knex.raw(`json_arrayagg(json_object(${fieldListFormatted}))`)
)
break
case SqlClient.MS_SQL: {
const comparatorQuery = knex
.select(`${fromAlias}.*`)
.select(`*`)
// @ts-ignore - from alias syntax not TS supported
.from({
[fromAlias]: subQuery
.select(`${toAlias}.*`)
.select(
fieldList.map(f => {
// @ts-expect-error raw is fine here, knex types are wrong
return knex.ref(f[1]).as(f[0])
})
)
.limit(getRelationshipLimit()),
})
@ -1377,8 +1383,7 @@ class InternalBuilder {
toPrimary?: string
}[]
): Knex.QueryBuilder {
const { tableAliases: aliases, endpoint } = this.query
const schema = endpoint.schema
const { tableAliases: aliases, schema } = this.query
const toTable = tables.to,
fromTable = tables.from,
throughTable = tables.through
@ -1429,16 +1434,16 @@ class InternalBuilder {
}
qualifiedKnex(opts?: { alias?: string | boolean }): Knex.QueryBuilder {
let alias = this.query.tableAliases?.[this.query.endpoint.entityId]
let alias = this.query.tableAliases?.[this.query.table.name]
if (opts?.alias === false) {
alias = undefined
} else if (typeof opts?.alias === "string") {
alias = opts.alias
}
return this.knex(
this.tableNameWithSchema(this.query.endpoint.entityId, {
this.tableNameWithSchema(this.query.table.name, {
alias,
schema: this.query.endpoint.schema,
schema: this.query.schema,
})
)
}
@ -1455,9 +1460,7 @@ class InternalBuilder {
if (this.client === SqlClient.ORACLE) {
// Oracle doesn't seem to automatically insert nulls
// if we don't specify them, so we need to do that here
for (const [column, schema] of Object.entries(
this.query.meta.table.schema
)) {
for (const [column, schema] of Object.entries(this.query.table.schema)) {
if (
schema.constraints?.presence === true ||
schema.type === FieldType.FORMULA ||
@ -1534,11 +1537,9 @@ class InternalBuilder {
limits?: { base: number; query: number }
} = {}
): Knex.QueryBuilder {
let { endpoint, filters, paginate, relationships } = this.query
let { operation, filters, paginate, relationships, table } = this.query
const { limits } = opts
const counting = endpoint.operation === Operation.COUNT
const tableName = endpoint.entityId
// start building the query
let query = this.qualifiedKnex()
// handle pagination
@ -1557,7 +1558,7 @@ class InternalBuilder {
foundLimit = paginate.limit
}
// counting should not sort, limit or offset
if (!counting) {
if (operation !== Operation.COUNT) {
// add the found limit if supplied
if (foundLimit != null) {
query = query.limit(foundLimit)
@ -1569,7 +1570,7 @@ class InternalBuilder {
}
const aggregations = this.query.resource?.aggregations || []
if (counting) {
if (operation === Operation.COUNT) {
query = this.addDistinctCount(query)
} else if (aggregations.length > 0) {
query = this.addAggregations(query, aggregations)
@ -1578,7 +1579,7 @@ class InternalBuilder {
}
// have to add after as well (this breaks MS-SQL)
if (!counting) {
if (operation !== Operation.COUNT) {
query = this.addSorting(query)
}
@ -1586,9 +1587,7 @@ class InternalBuilder {
// handle relationships with a CTE for all others
if (relationships?.length && aggregations.length === 0) {
const mainTable =
this.query.tableAliases?.[this.query.endpoint.entityId] ||
this.query.endpoint.entityId
const mainTable = this.query.tableAliases?.[table.name] || table.name
const cte = this.addSorting(
this.knex
.with("paginated", query)
@ -1598,7 +1597,7 @@ class InternalBuilder {
})
)
// add JSON aggregations attached to the CTE
return this.addJsonRelationships(cte, tableName, relationships)
return this.addJsonRelationships(cte, table.name, relationships)
}
return query
@ -1661,7 +1660,10 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
* @return the query ready to be passed to the driver.
*/
_query(json: QueryJson, opts: QueryOptions = {}): SqlQuery | SqlQuery[] {
_query(
json: EnrichedQueryJson,
opts: QueryOptions = {}
): SqlQuery | SqlQuery[] {
const sqlClient = this.getSqlClient()
const config: Knex.Config = {
client: this.getBaseSqlClient(),
@ -1711,34 +1713,30 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return this.convertToNative(query, opts)
}
async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
async getReturningRow(queryFn: QueryFunction, json: EnrichedQueryJson) {
if (!json.extra || !json.extra.idFilter) {
return {}
}
const input = this._query({
endpoint: {
...json.endpoint,
operation: Operation.READ,
},
resource: {
fields: [],
},
datasource: json.datasource,
schema: json.schema,
table: json.table,
tables: json.tables,
resource: { fields: [] },
filters: json.extra?.idFilter,
paginate: {
limit: 1,
},
meta: json.meta,
paginate: { limit: 1 },
})
return queryFn(input, Operation.READ)
}
// when creating if an ID has been inserted need to make sure
// the id filter is enriched with it before trying to retrieve the row
checkLookupKeys(id: any, json: QueryJson) {
if (!id || !json.meta.table || !json.meta.table.primary) {
checkLookupKeys(id: any, json: EnrichedQueryJson) {
if (!id || !json.table.primary) {
return json
}
const primaryKey = json.meta.table.primary?.[0]
const primaryKey = json.table.primary[0]
json.extra = {
idFilter: {
equal: {
@ -1751,7 +1749,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
// this function recreates the returning functionality of postgres
async queryWithReturning(
json: QueryJson,
json: EnrichedQueryJson,
queryFn: QueryFunction,
processFn: Function = (result: any) => result
) {

View File

@ -3,13 +3,13 @@ import {
FieldType,
NumberFieldMetadata,
Operation,
QueryJson,
RelationshipType,
RenameColumn,
SqlQuery,
Table,
TableSourceType,
SqlClient,
EnrichedQueryJson,
} from "@budibase/types"
import { breakExternalTableId, getNativeSql } from "./utils"
import { helpers, utils } from "@budibase/shared-core"
@ -25,7 +25,7 @@ function generateSchema(
schema: CreateTableBuilder,
table: Table,
tables: Record<string, Table>,
oldTable: null | Table = null,
oldTable?: Table,
renamed?: RenameColumn
) {
let primaryKeys = table && table.primary ? table.primary : []
@ -55,7 +55,7 @@ function generateSchema(
)
for (let [key, column] of Object.entries(table.schema)) {
// skip things that are already correct
const oldColumn = oldTable ? oldTable.schema[key] : null
const oldColumn = oldTable?.schema[key]
if (
(oldColumn && oldColumn.type) ||
columnTypeSet.includes(key) ||
@ -199,8 +199,8 @@ function buildUpdateTable(
knex: SchemaBuilder,
table: Table,
tables: Record<string, Table>,
oldTable: Table,
renamed: RenameColumn
oldTable?: Table,
renamed?: RenameColumn
): SchemaBuilder {
return knex.alterTable(table.name, schema => {
generateSchema(schema, table, tables, oldTable, renamed)
@ -238,19 +238,18 @@ class SqlTableQueryBuilder {
* @param json the input JSON structure from which an SQL query will be built.
* @return the operation that was found in the JSON.
*/
_operation(json: QueryJson): Operation {
return json.endpoint.operation
_operation(json: EnrichedQueryJson): Operation {
return json.operation
}
_tableQuery(json: QueryJson): SqlQuery | SqlQuery[] {
_tableQuery(json: EnrichedQueryJson): SqlQuery | SqlQuery[] {
let client = knex({ client: this.sqlClient }).schema
let schemaName = json?.endpoint?.schema
if (schemaName) {
client = client.withSchema(schemaName)
if (json?.schema) {
client = client.withSchema(json.schema)
}
let query: Knex.SchemaBuilder
if (!json.table || !json.meta || !json.meta.tables) {
if (!json.table || !json.tables) {
throw new Error("Cannot execute without table being specified")
}
if (json.table.sourceType === TableSourceType.INTERNAL) {
@ -259,17 +258,17 @@ class SqlTableQueryBuilder {
switch (this._operation(json)) {
case Operation.CREATE_TABLE:
query = buildCreateTable(client, json.table, json.meta.tables)
query = buildCreateTable(client, json.table, json.tables)
break
case Operation.UPDATE_TABLE:
if (!json.meta || !json.meta.table) {
if (!json.table) {
throw new Error("Must specify old table for update")
}
// renameColumn does not work for MySQL, so return a raw query
if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) {
if (this.sqlClient === SqlClient.MY_SQL && json.meta?.renamed) {
const updatedColumn = json.meta.renamed.updated
const tableName = schemaName
? `\`${schemaName}\`.\`${json.table.name}\``
const tableName = json?.schema
? `\`${json.schema}\`.\`${json.table.name}\``
: `\`${json.table.name}\``
return {
sql: `alter table ${tableName} rename column \`${json.meta.renamed.old}\` to \`${updatedColumn}\`;`,
@ -280,18 +279,18 @@ class SqlTableQueryBuilder {
query = buildUpdateTable(
client,
json.table,
json.meta.tables,
json.meta.table,
json.meta.renamed!
json.tables,
json.meta?.oldTable,
json.meta?.renamed
)
// renameColumn for SQL Server returns a parameterised `sp_rename` query,
// which is not supported by SQL Server and gives a syntax error.
if (this.sqlClient === SqlClient.MS_SQL && json.meta.renamed) {
if (this.sqlClient === SqlClient.MS_SQL && json.meta?.renamed) {
const oldColumn = json.meta.renamed.old
const updatedColumn = json.meta.renamed.updated
const tableName = schemaName
? `${schemaName}.${json.table.name}`
const tableName = json?.schema
? `${json.schema}.${json.table.name}`
: `${json.table.name}`
const sql = getNativeSql(query)
if (Array.isArray(sql)) {

View File

@ -25,7 +25,7 @@ function getTestcontainers(): ContainerInfo[] {
// We use --format json to make sure the output is nice and machine-readable,
// and we use --no-trunc so that the command returns full container IDs so we
// can filter on them correctly.
return execSync("docker ps --format json --no-trunc")
return execSync("docker ps --all --format json --no-trunc")
.toString()
.split("\n")
.filter(x => x.length > 0)
@ -37,6 +37,10 @@ function getTestcontainers(): ContainerInfo[] {
)
}
function removeContainer(container: ContainerInfo) {
execSync(`docker rm ${container.ID}`)
}
export function getContainerByImage(image: string) {
const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
if (containers.length > 1) {
@ -49,6 +53,10 @@ export function getContainerByImage(image: string) {
return containers[0]
}
function getContainerByName(name: string) {
return getTestcontainers().find(x => x.Names === name)
}
export function getContainerById(id: string) {
return getTestcontainers().find(x => x.ID === id)
}
@ -70,7 +78,34 @@ export function getExposedV4Port(container: ContainerInfo, port: number) {
return getExposedV4Ports(container).find(x => x.container === port)?.host
}
interface DockerContext {
Name: string
Description: string
DockerEndpoint: string
ContextType: string
Error: string
}
function getCurrentDockerContext(): DockerContext {
const out = execSync("docker context ls --format json")
for (const line of out.toString().split("\n")) {
const parsed = JSON.parse(line)
if (parsed.Current) {
return parsed as DockerContext
}
}
throw new Error("No current Docker context")
}
export function setupEnv(...envs: any[]) {
// For whatever reason, testcontainers doesn't always use the correct current
// docker context. This bit of code forces the issue by finding the current
// context and setting it as the DOCKER_HOST environment
if (!process.env.DOCKER_HOST) {
const dockerContext = getCurrentDockerContext()
process.env.DOCKER_HOST = dockerContext.DockerEndpoint
}
// We start couchdb in globalSetup.ts, in the root of the monorepo, so it
// should be relatively safe to look for it by its image name.
const couch = getContainerByImage("budibase/couchdb")
@ -116,6 +151,16 @@ export async function startContainer(container: GenericContainer) {
key = imageName.split("@")[0]
}
key = key.replace(/\//g, "-").replace(/:/g, "-")
const name = `${key}_testcontainer`
// If a container has died it hangs around and future attempts to start a
// container with the same name will fail. What we do here is if we find a
// matching container and it has exited, we remove it before carrying on. This
// removes the need to do this removal manually.
const existingContainer = getContainerByName(name)
if (existingContainer?.State === "exited") {
removeContainer(existingContainer)
}
container = container
.withReuse()

View File

@ -4,27 +4,21 @@
"version": "0.0.0",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
"module": "dist/bbui.mjs",
"exports": {
".": {
"import": "./dist/bbui.es.js"
"import": "./dist/bbui.mjs"
},
"./package.json": "./package.json",
"./spectrum-icons-rollup.js": "./src/spectrum-icons-rollup.js",
"./spectrum-icons-vite.js": "./src/spectrum-icons-vite.js"
},
"scripts": {
"build": "rollup -c"
"build": "vite build"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^16.0.0",
"@rollup/plugin-json": "^4.1.0",
"@rollup/plugin-node-resolve": "^11.2.1",
"postcss": "^8.2.9",
"rollup": "^2.45.2",
"rollup-plugin-postcss": "^4.0.0",
"rollup-plugin-svelte": "^7.1.0",
"rollup-plugin-terser": "^7.0.2"
"@sveltejs/vite-plugin-svelte": "1.4.0",
"vite-plugin-css-injected-by-js": "3.5.2"
},
"keywords": [
"svelte"
@ -96,8 +90,7 @@
"dependsOn": [
{
"projects": [
"@budibase/string-templates",
"@budibase/shared-core"
"@budibase/string-templates"
],
"target": "build"
}

View File

@ -1,32 +0,0 @@
import svelte from "rollup-plugin-svelte"
import resolve from "@rollup/plugin-node-resolve"
import commonjs from "@rollup/plugin-commonjs"
import json from "@rollup/plugin-json"
import { terser } from "rollup-plugin-terser"
import postcss from "rollup-plugin-postcss"
export default {
input: "src/index.js",
output: {
sourcemap: true,
format: "esm",
file: "dist/bbui.es.js",
},
onwarn(warning, warn) {
// suppress eval warnings
if (warning.code === "EVAL") {
return
}
warn(warning)
},
plugins: [
resolve(),
commonjs(),
svelte({
emitCss: true,
}),
postcss(),
terser(),
json(),
],
}

View File

@ -0,0 +1,29 @@
import { defineConfig } from "vite"
import { svelte } from "@sveltejs/vite-plugin-svelte"
import path from "path"
import cssInjectedByJsPlugin from "vite-plugin-css-injected-by-js"
export default defineConfig(({ mode }) => {
const isProduction = mode === "production"
return {
build: {
sourcemap: !isProduction,
lib: {
entry: "src/index.js",
formats: ["es"],
},
},
plugins: [
svelte({
emitCss: true,
}),
cssInjectedByJsPlugin(),
],
resolve: {
alias: {
"@budibase/shared-core": path.resolve(__dirname, "../shared-core/src"),
"@budibase/types": path.resolve(__dirname, "../types/src"),
},
},
}
})

@ -1 +1 @@
Subproject commit e8ef2205de8bca5adcf18d07573096086aa9a606
Subproject commit d9245f3d6d0b41ec2e6b3406b791f9e7448882cb

View File

@ -296,16 +296,6 @@ export async function find(ctx: UserCtx) {
ctx.body = await sdk.datasources.removeSecretSingle(datasource)
}
// dynamic query functionality
export async function query(ctx: UserCtx) {
const queryJson = ctx.request.body
try {
ctx.body = await sdk.rows.utils.getDatasourceAndQuery(queryJson)
} catch (err: any) {
ctx.throw(400, err)
}
}
export async function getExternalSchema(ctx: UserCtx) {
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(

View File

@ -42,7 +42,7 @@ import {
sqlOutputProcessing,
} from "./utils"
import {
getDatasourceAndQuery,
enrichQueryJson,
processRowCountResponse,
} from "../../../sdk/app/rows/utils"
import { processObjectSync } from "@budibase/string-templates"
@ -135,16 +135,9 @@ function cleanupConfig(config: RunConfig, table: Table): RunConfig {
return config
}
function getEndpoint(tableId: string | undefined, operation: string) {
if (!tableId) {
throw new Error("Cannot get endpoint information - no table ID specified")
}
function getEndpoint(tableId: string, operation: Operation) {
const { datasourceId, tableName } = breakExternalTableId(tableId)
return {
datasourceId: datasourceId,
entityId: tableName,
operation: operation as Operation,
}
return { datasourceId, entityId: tableName, operation }
}
function isOneSide(
@ -268,12 +261,9 @@ export class ExternalRequest<T extends Operation> {
const filters = this.prepareFilters(rowId, {}, table)
// safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) {
return getDatasourceAndQuery({
return makeExternalQuery({
endpoint: getEndpoint(tableId, Operation.DELETE),
filters,
meta: {
table,
},
})
} else {
return []
@ -289,13 +279,10 @@ export class ExternalRequest<T extends Operation> {
const filters = this.prepareFilters(rowId, {}, table)
// safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) {
return getDatasourceAndQuery({
return makeExternalQuery({
endpoint: getEndpoint(tableId, Operation.UPDATE),
body: { [colName]: null },
filters,
meta: {
table,
},
})
} else {
return []
@ -311,12 +298,9 @@ export class ExternalRequest<T extends Operation> {
}
async getRow(table: Table, rowId: string): Promise<Row> {
const response = await getDatasourceAndQuery({
const response = await makeExternalQuery({
endpoint: getEndpoint(table._id!, Operation.READ),
filters: this.prepareFilters(rowId, {}, table),
meta: {
table,
},
})
if (Array.isArray(response) && response.length > 0) {
return response[0]
@ -490,16 +474,13 @@ export class ExternalRequest<T extends Operation> {
if (!relatedTable) {
throw new Error("unable to find related table")
}
const response = await getDatasourceAndQuery({
endpoint: endpoint,
const response = await makeExternalQuery({
endpoint,
filters: {
equal: {
[fieldName]: row[lookupField],
},
},
meta: {
table: relatedTable,
},
})
// this is the response from knex if no rows found
const rows: Row[] =
@ -537,6 +518,11 @@ export class ExternalRequest<T extends Operation> {
for (let relationship of relationships) {
const { key, tableId, isUpdate, id, relationshipType, ...rest } =
relationship
if (!tableId) {
throw new Error("Table ID is unknown, cannot find table")
}
const body: { [key: string]: any } = processObjectSync(rest, row, {})
const linkTable = this.getTable(tableId)
const relationshipPrimary = linkTable?.primary || []
@ -583,14 +569,11 @@ export class ExternalRequest<T extends Operation> {
const operation = isUpdate ? Operation.UPDATE : Operation.CREATE
if (!existingRelationship) {
promises.push(
getDatasourceAndQuery({
makeExternalQuery({
endpoint: getEndpoint(tableId, operation),
// if we're doing many relationships then we're writing, only one response
body,
filters: this.prepareFilters(id, {}, linkTable),
meta: {
table: linkTable,
},
})
)
} else {
@ -723,8 +706,8 @@ export class ExternalRequest<T extends Operation> {
let json: QueryJson = {
endpoint: {
datasourceId: this.datasource._id!,
entityId: table.name,
datasourceId: this.datasource,
entityId: table,
operation,
},
resource: {
@ -749,10 +732,6 @@ export class ExternalRequest<T extends Operation> {
table
),
},
meta: {
table,
tables: this.tables,
},
}
// remove any relationships that could block deletion
@ -773,8 +752,11 @@ export class ExternalRequest<T extends Operation> {
response = [unprocessedRow]
} else {
response = env.SQL_ALIASING_DISABLE
? await getDatasourceAndQuery(json)
: await aliasing.queryWithAliasing(json, makeExternalQuery)
? await makeExternalQuery(json)
: await aliasing.queryWithAliasing(
await enrichQueryJson(json),
makeExternalQuery
)
}
// if it's a counting operation there will be no more processing, just return the number

View File

@ -11,27 +11,24 @@ export async function makeTableRequest(
datasource: Datasource,
operation: Operation,
table: Table,
tables: Record<string, Table>,
oldTable?: Table,
renamed?: RenameColumn
) {
const json: QueryJson = {
endpoint: {
datasourceId: datasource._id!,
entityId: table._id!,
datasourceId: datasource,
entityId: table,
operation,
},
meta: {
table,
tables,
},
table,
}
if (!json.meta) {
json.meta = {}
}
if (oldTable) {
json.meta!.table = oldTable
json.meta.oldTable = oldTable
}
if (renamed) {
json.meta!.renamed = renamed
json.meta.renamed = renamed
}
return makeExternalQuery(datasource, json)
return makeExternalQuery(json)
}

View File

@ -2,10 +2,7 @@ import Router from "@koa/router"
import * as datasourceController from "../controllers/datasource"
import authorized from "../../middleware/authorized"
import { permissions } from "@budibase/backend-core"
import {
datasourceValidator,
datasourceQueryValidator,
} from "./utils/validators"
import { datasourceValidator } from "./utils/validators"
const router: Router = new Router()
@ -41,15 +38,6 @@ router
),
datasourceController.update
)
.post(
"/api/datasources/query",
authorized(
permissions.PermissionType.TABLE,
permissions.PermissionLevel.READ
),
datasourceQueryValidator(),
datasourceController.query
)
.post(
"/api/datasources/:datasourceId/schema",
authorized(permissions.BUILDER),

View File

@ -1,10 +1,4 @@
import {
Datasource,
Operation,
Query,
QueryPreview,
TableSourceType,
} from "@budibase/types"
import { Datasource, Query, QueryPreview } from "@budibase/types"
import {
DatabaseName,
datasourceDescribe,
@ -817,49 +811,6 @@ if (descriptions.length) {
})
describe("query through datasource", () => {
it("should be able to query the datasource", async () => {
const datasource = await config.api.datasource.create(rawDatasource)
const entityId = tableName
await config.api.datasource.update({
...datasource,
entities: {
[entityId]: {
name: entityId,
schema: {},
type: "table",
primary: ["id"],
sourceId: datasource._id!,
sourceType: TableSourceType.EXTERNAL,
},
},
})
const res = await config.api.datasource.query({
endpoint: {
datasourceId: datasource._id!,
operation: Operation.READ,
entityId,
},
resource: {
fields: ["id", "name"],
},
filters: {
string: {
name: "two",
},
},
})
expect(res).toHaveLength(1)
expect(res[0]).toEqual({
id: 2,
name: "two",
// the use of table.* introduces the possibility of nulls being returned
birthday: null,
number: null,
})
})
// this parameter really only impacts SQL queries
describe("confirm nullDefaultSupport", () => {
let queryParams: Partial<Query>

View File

@ -3268,7 +3268,7 @@ if (descriptions.length) {
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: formula,
formula,
responseType: opts?.responseType,
formulaType: opts?.formulaType || FormulaType.DYNAMIC,
},
@ -3495,6 +3495,72 @@ if (descriptions.length) {
)
})
})
if (!isInternal && !isOracle) {
describe("bigint ids", () => {
let table1: Table, table2: Table
let table1Name: string, table2Name: string
beforeAll(async () => {
table1Name = `table1-${generator.guid().substring(0, 5)}`
await client!.schema.createTable(table1Name, table => {
table.bigInteger("table1Id").primary()
})
table2Name = `table2-${generator.guid().substring(0, 5)}`
await client!.schema.createTable(table2Name, table => {
table.bigInteger("table2Id").primary()
table
.bigInteger("table1Ref")
.references("table1Id")
.inTable(table1Name)
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const tables = Object.values(resp.datasource.entities || {})
table1 = tables.find(t => t.name === table1Name)!
table2 = tables.find(t => t.name === table2Name)!
await config.api.datasource.addExistingRelationship({
one: {
tableId: table2._id!,
relationshipName: "one",
foreignKey: "table1Ref",
},
many: {
tableId: table1._id!,
relationshipName: "many",
primaryKey: "table1Id",
},
})
})
it("should be able to fetch rows with related bigint ids", async () => {
const row = await config.api.row.save(table1._id!, {
table1Id: "1",
})
await config.api.row.save(table2._id!, {
table2Id: "2",
table1Ref: row.table1Id,
})
let resp = await config.api.row.search(table1._id!)
expect(resp.rows).toHaveLength(1)
expect(resp.rows[0]._id).toBe("%5B'1'%5D")
expect(resp.rows[0].many).toHaveLength(1)
expect(resp.rows[0].many[0]._id).toBe("%5B'2'%5D")
resp = await config.api.row.search(table2._id!)
expect(resp.rows).toHaveLength(1)
expect(resp.rows[0]._id).toBe("%5B'2'%5D")
expect(resp.rows[0].one).toHaveLength(1)
expect(resp.rows[0].one[0]._id).toBe("%5B'1'%5D")
})
})
}
}
)
}

View File

@ -1,5 +1,4 @@
import { auth, permissions } from "@budibase/backend-core"
import { DataSourceOperation } from "../../../constants"
import {
AutomationActionStepId,
AutomationStep,
@ -231,30 +230,6 @@ export function externalSearchValidator() {
)
}
export function datasourceQueryValidator() {
return auth.joiValidator.body(
Joi.object({
endpoint: Joi.object({
datasourceId: Joi.string().required(),
operation: Joi.string()
.required()
.valid(...Object.values(DataSourceOperation)),
entityId: Joi.string().required(),
}).required(),
resource: Joi.object({
fields: Joi.array().items(Joi.string()).optional(),
}).optional(),
body: Joi.object().optional(),
sort: Joi.object().optional(),
filters: filterObject().optional(),
paginate: Joi.object({
page: Joi.string().alphanum().optional(),
limit: Joi.number().optional(),
}).optional(),
})
)
}
export function webhookValidator() {
return auth.joiValidator.body(
Joi.object({

View File

@ -45,17 +45,6 @@ export enum AuthTypes {
EXTERNAL = "external",
}
export enum DataSourceOperation {
CREATE = "CREATE",
READ = "READ",
UPDATE = "UPDATE",
DELETE = "DELETE",
BULK_CREATE = "BULK_CREATE",
CREATE_TABLE = "CREATE_TABLE",
UPDATE_TABLE = "UPDATE_TABLE",
DELETE_TABLE = "DELETE_TABLE",
}
export enum DatasourceAuthTypes {
GOOGLE = "google",
}

View File

@ -1,37 +1,39 @@
import {
QueryJson,
Datasource,
DatasourcePlusQueryResponse,
RowOperations,
EnrichedQueryJson,
QueryJson,
} from "@budibase/types"
import { getIntegration } from "../index"
import sdk from "../../sdk"
import { enrichQueryJson } from "../../sdk/app/rows/utils"
function isEnriched(
json: QueryJson | EnrichedQueryJson
): json is EnrichedQueryJson {
return "datasource" in json
}
export async function makeExternalQuery(
datasource: Datasource,
json: QueryJson
json: QueryJson | EnrichedQueryJson
): Promise<DatasourcePlusQueryResponse> {
const entityId = json.endpoint.entityId,
tableName = json.meta.table.name,
tableId = json.meta.table._id
// case found during testing - make sure this doesn't happen again
if (
RowOperations.includes(json.endpoint.operation) &&
entityId !== tableId &&
entityId !== tableName
) {
throw new Error("Entity ID and table metadata do not align")
if (!isEnriched(json)) {
json = await enrichQueryJson(json)
if (json.datasource) {
json.datasource = await sdk.datasources.enrich(json.datasource)
}
if (!datasource) {
}
if (!json.datasource) {
throw new Error("No datasource provided for external query")
}
datasource = await sdk.datasources.enrich(datasource)
const Integration = await getIntegration(datasource.source)
const Integration = await getIntegration(json.datasource.source)
// query is the opinionated function
if (Integration.prototype.query) {
const integration = new Integration(datasource.config)
return integration.query(json)
} else {
if (!Integration.prototype.query) {
throw "Datasource does not support query."
}
const integration = new Integration(json.datasource.config)
return integration.query(json)
}

View File

@ -7,7 +7,6 @@ import {
Integration,
Operation,
PaginationJson,
QueryJson,
QueryType,
Row,
Schema,
@ -18,6 +17,7 @@ import {
TableSourceType,
DatasourcePlusQueryResponse,
BBReferenceFieldSubType,
EnrichedQueryJson,
} from "@budibase/types"
import { OAuth2Client } from "google-auth-library"
import {
@ -381,9 +381,9 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
return { tables: externalTables, errors }
}
async query(json: QueryJson): Promise<DatasourcePlusQueryResponse> {
const sheet = json.endpoint.entityId
switch (json.endpoint.operation) {
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
const sheet = json.table.name
switch (json.operation) {
case Operation.CREATE:
return this.create({ sheet, row: json.body as Row })
case Operation.BULK_CREATE:
@ -400,7 +400,7 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
rowIndex: json.extra?.idFilter?.equal?.rowNumber,
sheet,
row: json.body,
table: json.meta.table,
table: json.table,
})
case Operation.DELETE:
return this.delete({
@ -426,7 +426,7 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
return this.deleteTable(json?.table?.name)
default:
throw new Error(
`GSheets integration does not support "${json.endpoint.operation}".`
`GSheets integration does not support "${json.operation}".`
)
}
}

View File

@ -4,9 +4,9 @@ import {
DatasourceFieldType,
DatasourcePlus,
DatasourcePlusQueryResponse,
EnrichedQueryJson,
Integration,
Operation,
QueryJson,
QueryType,
Schema,
SourceName,
@ -342,7 +342,8 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
: query.sql
this.log(sql, query.bindings)
return await request.query(sql)
const resp = await request.query(sql)
return resp
} catch (err: any) {
let readableMessage = getReadableErrorMessage(
SourceName.SQL_SERVER,
@ -505,23 +506,21 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
return response.recordset || [{ deleted: true }]
}
async query(json: QueryJson): Promise<DatasourcePlusQueryResponse> {
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
const schema = this.config.schema
await this.connect()
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
json.endpoint.schema = schema
if (schema && schema !== DEFAULT_SCHEMA) {
json.schema = schema
}
const operation = this._operation(json)
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
const processFn = (result: any) => {
if (json?.meta?.table && result.recordset) {
if (result.recordset) {
return this.convertJsonStringColumns(
json.meta.table,
json.table,
result.recordset,
json.tableAliases
)
} else if (result.recordset) {
return result.recordset
}
return [{ [operation]: true }]
}

View File

@ -2,7 +2,6 @@ import {
Integration,
DatasourceFieldType,
QueryType,
QueryJson,
SqlQuery,
Table,
TableSchema,
@ -15,6 +14,7 @@ import {
DatasourcePlusQueryResponse,
SqlQueryBinding,
SqlClient,
EnrichedQueryJson,
} from "@budibase/types"
import {
getSqlQuery,
@ -390,15 +390,15 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
return results.length ? results : [{ deleted: true }]
}
async query(json: QueryJson): Promise<DatasourcePlusQueryResponse> {
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
await this.connect()
try {
const queryFn = (query: any) =>
this.internalQuery(query, { connect: false, disableCoercion: true })
const processFn = (result: any) => {
if (json?.meta?.table && Array.isArray(result)) {
if (Array.isArray(result)) {
return this.convertJsonStringColumns(
json.meta.table,
json.table,
result,
json.tableAliases
)

View File

@ -3,7 +3,6 @@ import {
DatasourceFieldType,
Integration,
Operation,
QueryJson,
QueryType,
SqlQuery,
Table,
@ -15,6 +14,7 @@ import {
Row,
DatasourcePlusQueryResponse,
SqlClient,
EnrichedQueryJson,
} from "@budibase/types"
import {
buildExternalTableId,
@ -545,7 +545,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
: [{ deleted: true }]
}
async query(json: QueryJson): Promise<DatasourcePlusQueryResponse> {
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
const operation = this._operation(json)
const input = this._query(json, { disableReturning: true }) as SqlQuery
if (Array.isArray(input)) {
@ -572,13 +572,9 @@ class OracleIntegration extends Sql implements DatasourcePlus {
return response.rows as Row[]
} else {
// get the last row that was updated
if (
response.lastRowid &&
json.endpoint?.entityId &&
operation !== Operation.DELETE
) {
if (response.lastRowid && operation !== Operation.DELETE) {
const lastRow = await this.internalQuery({
sql: `SELECT * FROM "${json.endpoint.entityId}" WHERE ROWID = '${response.lastRowid}'`,
sql: `SELECT * FROM "${json.table.name}" WHERE ROWID = '${response.lastRowid}'`,
})
return lastRow.rows as Row[]
} else {

View File

@ -3,7 +3,6 @@ import {
Integration,
DatasourceFieldType,
QueryType,
QueryJson,
SqlQuery,
Table,
DatasourcePlus,
@ -14,6 +13,7 @@ import {
TableSourceType,
DatasourcePlusQueryResponse,
SqlClient,
EnrichedQueryJson,
} from "@budibase/types"
import {
getSqlQuery,
@ -419,7 +419,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
return response.rows.length ? response.rows : [{ deleted: true }]
}
async query(json: QueryJson): Promise<DatasourcePlusQueryResponse> {
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
const operation = this._operation(json).toLowerCase()
const input = this._query(json) as SqlQuery
if (Array.isArray(input)) {

View File

@ -1,268 +0,0 @@
import {
FieldType,
Operation,
PaginationJson,
QueryJson,
SearchFilters,
SortJson,
SqlClient,
Table,
TableSourceType,
} from "@budibase/types"
import { sql } from "@budibase/backend-core"
import { merge } from "lodash"
const Sql = sql.Sql
const TABLE_NAME = "test"
const TABLE: Table = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
sourceId: "SOURCE_ID",
schema: {
id: {
name: "id",
type: FieldType.NUMBER,
},
},
name: TABLE_NAME,
primary: ["id"],
}
const ORACLE_TABLE: Partial<Table> = {
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
}
function endpoint(table: string, operation: Operation) {
return {
datasourceId: "Postgres",
operation: operation,
entityId: table || TABLE_NAME,
}
}
function generateReadJson({
table,
fields,
filters,
sort,
paginate,
}: {
table?: Partial<Table>
fields?: string[]
filters?: SearchFilters
sort?: SortJson
paginate?: PaginationJson
} = {}): QueryJson {
let tableObj: Table = { ...TABLE }
if (table) {
tableObj = merge(TABLE, table)
}
return {
endpoint: endpoint(tableObj.name || TABLE_NAME, Operation.READ),
resource: {
fields: fields || [],
},
filters: filters || {},
sort: sort || {},
paginate: paginate || undefined,
meta: {
table: tableObj,
},
}
}
function generateRelationshipJson(config: { schema?: string } = {}): QueryJson {
return {
endpoint: {
datasourceId: "Postgres",
entityId: "brands",
operation: Operation.READ,
schema: config.schema,
},
resource: {
fields: [
"brands.brand_id",
"brands.brand_name",
"products.product_id",
"products.product_name",
"products.brand_id",
],
},
filters: {},
sort: {},
relationships: [
{
from: "brand_id",
to: "brand_id",
tableName: "products",
column: "products",
},
],
extra: { idFilter: {} },
meta: {
table: TABLE,
},
}
}
function generateManyRelationshipJson(config: { schema?: string } = {}) {
return {
endpoint: {
datasourceId: "Postgres",
entityId: "stores",
operation: "READ",
schema: config.schema,
},
resource: {
fields: [
"stores.store_id",
"stores.store_name",
"products.product_id",
"products.product_name",
],
},
filters: {},
sort: {},
paginate: {},
relationships: [
{
from: "store_id",
to: "product_id",
tableName: "products",
column: "products",
through: "stocks",
fromPrimary: "store_id",
toPrimary: "product_id",
},
],
extra: { idFilter: {} },
meta: {
table: TABLE,
},
}
}
describe("SQL query builder", () => {
const relationshipLimit = 500
const limit = 500
const client = SqlClient.POSTGRES
let sql: any
beforeEach(() => {
sql = new Sql(client, limit)
})
it("should add the schema to the LEFT JOIN", () => {
const query = sql._query(generateRelationshipJson({ schema: "production" }))
expect(query).toEqual({
bindings: [limit, relationshipLimit],
sql: `with "paginated" as (select "brands".* from "production"."brands" order by "test"."id" asc limit $1) select "brands".*, (select json_agg(json_build_object('brand_id',"products"."brand_id",'product_id',"products"."product_id",'product_name',"products"."product_name")) from (select "products".* from "production"."products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $2) as "products") as "products" from "paginated" as "brands" order by "test"."id" asc`,
})
})
it("should handle if the schema is not present when doing a LEFT JOIN", () => {
const query = sql._query(generateRelationshipJson())
expect(query).toEqual({
bindings: [limit, relationshipLimit],
sql: `with "paginated" as (select "brands".* from "brands" order by "test"."id" asc limit $1) select "brands".*, (select json_agg(json_build_object('brand_id',"products"."brand_id",'product_id',"products"."product_id",'product_name',"products"."product_name")) from (select "products".* from "products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $2) as "products") as "products" from "paginated" as "brands" order by "test"."id" asc`,
})
})
it("should add the schema to both the toTable and throughTable in many-to-many join", () => {
const query = sql._query(
generateManyRelationshipJson({ schema: "production" })
)
expect(query).toEqual({
bindings: [limit, relationshipLimit],
sql: `with "paginated" as (select "stores".* from "production"."stores" order by "test"."id" asc limit $1) select "stores".*, (select json_agg(json_build_object('product_id',"products"."product_id",'product_name',"products"."product_name")) from (select "products".* from "production"."products" as "products" inner join "production"."stocks" as "stocks" on "products"."product_id" = "stocks"."product_id" where "stocks"."store_id" = "stores"."store_id" order by "products"."product_id" asc limit $2) as "products") as "products" from "paginated" as "stores" order by "test"."id" asc`,
})
})
it("should lowercase the values for Oracle LIKE statements", () => {
let query = new Sql(SqlClient.ORACLE, limit)._query(
generateReadJson({
filters: {
string: {
name: "John",
},
},
})
)
expect(query).toEqual({
bindings: ["john%", limit],
sql: `select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2`,
})
query = new Sql(SqlClient.ORACLE, limit)._query(
generateReadJson({
filters: {
contains: {
age: [20, 25],
name: ["John", "Mary"],
},
},
})
)
const filterSet = [`%20%`, `%25%`, `%"john"%`, `%"mary"%`]
expect(query).toEqual({
bindings: [...filterSet, limit],
sql: `select * from (select * from "test" where ((COALESCE(LOWER("test"."age"), '') like :1 and COALESCE(LOWER("test"."age"), '') like :2)) and ((COALESCE(LOWER("test"."name"), '') like :3 and COALESCE(LOWER("test"."name"), '') like :4)) order by "test"."id" asc) where rownum <= :5`,
})
query = new Sql(SqlClient.ORACLE, limit)._query(
generateReadJson({
filters: {
fuzzy: {
name: "Jo",
},
},
})
)
expect(query).toEqual({
bindings: [`%jo%`, limit],
sql: `select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2`,
})
})
it("should use an oracle compatible coalesce query for oracle when using the equals filter", () => {
let query = new Sql(SqlClient.ORACLE, limit)._query(
generateReadJson({
table: ORACLE_TABLE,
filters: {
equal: {
name: "John",
},
},
})
)
expect(query).toEqual({
bindings: ["John", limit],
sql: `select * from (select * from "test" where (to_char("test"."name") is not null and to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2`,
})
})
it("should use an oracle compatible coalesce query for oracle when using the not equals filter", () => {
let query = new Sql(SqlClient.ORACLE, limit)._query(
generateReadJson({
table: ORACLE_TABLE,
filters: {
notEqual: {
name: "John",
},
},
})
)
expect(query).toEqual({
bindings: ["John", limit],
sql: `select * from (select * from "test" where (to_char("test"."name") is not null and to_char("test"."name") != :1) or to_char("test"."name") is null order by "test"."id" asc) where rownum <= :2`,
})
})
})

View File

@ -1,12 +1,13 @@
import {
Datasource,
Operation,
QueryJson,
SourceName,
SqlQuery,
SqlClient,
EnrichedQueryJson,
TableSchema,
Table,
TableSourceType,
SqlClient,
} from "@budibase/types"
import { sql } from "@budibase/backend-core"
import { join } from "path"
@ -16,16 +17,20 @@ import sdk from "../../sdk"
const Sql = sql.Sql
// this doesn't exist strictly
const TABLE: Table = {
const TABLE = buildTable("tableName", {})
const AliasTables = sdk.rows.AliasTables
function buildTable(name: string, schema: TableSchema): Table {
return {
type: "table",
sourceType: TableSourceType.EXTERNAL,
sourceId: "SOURCE_ID",
schema: {},
name: "tableName",
schema: schema,
name: name,
primary: ["id"],
}
const AliasTables = sdk.rows.AliasTables
}
function multiline(sql: string) {
return sql.replace(/\n/g, "").replace(/ +/g, " ")
@ -35,8 +40,22 @@ describe("Captures of real examples", () => {
const relationshipLimit = 500
const primaryLimit = 100
function getJson(name: string): QueryJson {
return require(join(__dirname, "sqlQueryJson", name)) as QueryJson
function getJson(name: string): EnrichedQueryJson {
// tables aren't fully specified in the test JSON
const base = require(join(__dirname, "sqlQueryJson", name)) as Omit<
EnrichedQueryJson,
"tables"
>
const tables: Record<string, Table> = { [base.table.name]: base.table }
if (base.relationships) {
for (let { tableName } of base.relationships) {
tables[tableName] = buildTable(tableName, {})
}
}
return {
...base,
tables: tables,
}
}
describe("create", () => {
@ -63,7 +82,7 @@ describe("Captures of real examples", () => {
bindings: [primaryLimit, relationshipLimit, relationshipLimit],
sql: expect.stringContaining(
multiline(
`select json_agg(json_build_object('completed',"b"."completed",'completed',"b"."completed",'executorid',"b"."executorid",'executorid',"b"."executorid",'qaid',"b"."qaid",'qaid',"b"."qaid",'taskid',"b"."taskid",'taskid',"b"."taskid",'taskname',"b"."taskname",'taskname',"b"."taskname")`
`select json_agg(json_build_object('executorid',"b"."executorid",'executorid',"b"."executorid",'qaid',"b"."qaid",'qaid',"b"."qaid",'taskid',"b"."taskid",'taskid',"b"."taskid",'completed',"b"."completed",'completed',"b"."completed",'taskname',"b"."taskname",'taskname',"b"."taskname"`
)
),
})
@ -94,7 +113,7 @@ describe("Captures of real examples", () => {
sql: expect.stringContaining(
multiline(
`with "paginated" as (select "a".* from "products" as "a" order by "a"."productname" asc nulls first, "a"."productid" asc limit $1)
select "a".*, (select json_agg(json_build_object('completed',"b"."completed",'executorid',"b"."executorid",'qaid',"b"."qaid",'taskid',"b"."taskid",'taskname',"b"."taskname"))
select "a".*, (select json_agg(json_build_object('executorid',"b"."executorid",'qaid',"b"."qaid",'taskid',"b"."taskid",'completed',"b"."completed",'taskname',"b"."taskname"))
from (select "b".* from "tasks" as "b" inner join "products_tasks" as "c" on "b"."taskid" = "c"."taskid" where "c"."productid" = "a"."productid" order by "b"."taskid" asc limit $2) as "b") as "tasks"
from "paginated" as "a" order by "a"."productname" asc nulls first, "a"."productid" asc`
)
@ -212,8 +231,7 @@ describe("Captures of real examples", () => {
}, queryJson)
expect(returningQuery).toEqual({
sql: multiline(
`select top (@p0) * from [people] where CASE WHEN [people].[name] = @p1 THEN 1 ELSE 0 END = 1
and CASE WHEN [people].[age] = @p2 THEN 1 ELSE 0 END = 1 order by [people].[name] asc`
`select top (@p0) * from [people] where CASE WHEN [people].[name] = @p1 THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p2 THEN 1 ELSE 0 END = 1 order by [people].[name] asc`
),
bindings: [1, "Test", 22],
})
@ -246,15 +264,17 @@ describe("Captures of real examples", () => {
}
}
function getQuery(op: Operation, fields: string[] = ["a"]): QueryJson {
function getQuery(
op: Operation,
fields: string[] = ["a"]
): EnrichedQueryJson {
return {
endpoint: { datasourceId: "", entityId: "", operation: op },
operation: op,
resource: {
fields,
},
meta: {
table: TABLE,
},
tables: { [TABLE.name]: TABLE },
}
}

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "persons",
"operation": "READ"
},
"operation": "READ",
"resource": {
"fields": [
"a.year",
@ -61,7 +57,6 @@
"extra": {
"idFilter": {}
},
"meta": {
"table": {
"type": "table",
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
@ -167,7 +162,6 @@
"sourceType": "external",
"primaryDisplay": "firstname",
"views": {}
}
},
"tableAliases": {
"persons": "a",

View File

@ -1,14 +1,7 @@
{
"endpoint": {
"datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"entityId": "people",
"operation": "CREATE"
},
"operation": "CREATE",
"resource": {
"fields": [
"a.name",
"a.age"
]
"fields": ["a.name", "a.age"]
},
"filters": {},
"relationships": [],
@ -24,16 +17,12 @@
}
}
},
"meta": {
"table": {
"_id": "datasource_plus_0ed5835e5552496285df546030f7c4ae__people",
"type": "table",
"sourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"sourceType": "external",
"primary": [
"name",
"age"
],
"primary": ["name", "age"],
"name": "people",
"schema": {
"name": {
@ -56,7 +45,6 @@
}
},
"primaryDisplay": "name"
}
},
"tableAliases": {
"people": "a"

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "persons",
"operation": "CREATE"
},
"operation": "CREATE",
"resource": {
"fields": [
"a.year",
@ -51,13 +47,10 @@
"extra": {
"idFilter": {}
},
"meta": {
"table": {
"type": "table",
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
"primary": [
"personid"
],
"primary": ["personid"],
"name": "persons",
"schema": {
"year": {
@ -112,12 +105,7 @@
"name": "type",
"constraints": {
"presence": false,
"inclusion": [
"support",
"designer",
"programmer",
"qa"
]
"inclusion": ["support", "designer", "programmer", "qa"]
}
},
"city": {
@ -164,7 +152,6 @@
"sourceType": "external",
"primaryDisplay": "firstname",
"views": {}
}
},
"tableAliases": {
"persons": "a",

View File

@ -1,15 +1,7 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "compositetable",
"operation": "DELETE"
},
"operation": "DELETE",
"resource": {
"fields": [
"a.keyparttwo",
"a.keypartone",
"a.name"
]
"fields": ["a.keyparttwo", "a.keypartone", "a.name"]
},
"filters": {
"equal": {
@ -26,14 +18,10 @@
}
}
},
"meta": {
"table": {
"type": "table",
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__compositetable",
"primary": [
"keypartone",
"keyparttwo"
],
"primary": ["keypartone", "keyparttwo"],
"name": "compositetable",
"schema": {
"keyparttwo": {
@ -67,7 +55,6 @@
"sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"sourceType": "external",
"primaryDisplay": "keypartone"
}
},
"tableAliases": {
"compositetable": "a"

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
"entityId": "tasks",
"operation": "READ"
},
"operation": "READ",
"resource": {
"fields": [
"a.executorid",
@ -17,10 +13,7 @@
},
"filters": {
"oneOf": {
"taskid": [
1,
2
]
"taskid": [1, 2]
}
},
"relationships": [
@ -42,13 +35,10 @@
"extra": {
"idFilter": {}
},
"meta": {
"table": {
"type": "table",
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks",
"primary": [
"taskid"
],
"primary": ["taskid"],
"name": "tasks",
"schema": {
"executorid": {
@ -113,7 +103,6 @@
"primaryDisplay": "taskname",
"sql": true,
"views": {}
}
},
"tableAliases": {
"tasks": "a",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
"entityId": "products",
"operation": "READ"
},
"operation": "READ",
"resource": {
"fields": [
"a.productname",
@ -56,7 +52,6 @@
"extra": {
"idFilter": {}
},
"meta": {
"table": {
"type": "table",
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products",
@ -97,7 +92,6 @@
"sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
"sourceType": "external",
"primaryDisplay": "productname"
}
},
"tableAliases": {
"products": "a",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "products",
"operation": "READ"
},
"operation": "READ",
"resource": {
"fields": [
"a.productname",
@ -46,7 +42,6 @@
"tasks": "b",
"products": "a"
},
"meta": {
"table": {
"type": "table",
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products",
@ -89,4 +84,3 @@
"primaryDisplay": "productname"
}
}
}

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
"entityId": "tasks",
"operation": "READ"
},
"operation": "READ",
"resource": {
"fields": [
"a.executorid",
@ -102,7 +98,6 @@
"extra": {
"idFilter": {}
},
"meta": {
"table": {
"type": "table",
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks",
@ -189,7 +184,6 @@
"primaryDisplay": "taskname",
"sql": true,
"views": {}
}
},
"tableAliases": {
"tasks": "a",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "persons",
"operation": "UPDATE"
},
"operation": "UPDATE",
"resource": {
"fields": [
"a.year",
@ -59,13 +55,10 @@
}
}
},
"meta": {
"table": {
"type": "table",
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
"primary": [
"personid"
],
"primary": ["personid"],
"name": "persons",
"schema": {
"year": {
@ -120,12 +113,7 @@
"name": "type",
"constraints": {
"presence": false,
"inclusion": [
"support",
"designer",
"programmer",
"qa"
]
"inclusion": ["support", "designer", "programmer", "qa"]
}
},
"city": {
@ -172,7 +160,6 @@
"sourceType": "external",
"primaryDisplay": "firstname",
"views": {}
}
},
"tableAliases": {
"persons": "a",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "persons",
"operation": "UPDATE"
},
"operation": "UPDATE",
"resource": {
"fields": [
"a.year",
@ -59,13 +55,10 @@
}
}
},
"meta": {
"table": {
"type": "table",
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
"primary": [
"personid"
],
"primary": ["personid"],
"name": "persons",
"schema": {
"year": {
@ -120,12 +113,7 @@
"name": "type",
"constraints": {
"presence": false,
"inclusion": [
"support",
"designer",
"programmer",
"qa"
]
"inclusion": ["support", "designer", "programmer", "qa"]
}
},
"city": {
@ -172,7 +160,6 @@
"sourceType": "external",
"primaryDisplay": "firstname",
"views": {}
}
},
"tableAliases": {
"persons": "a",

View File

@ -1,8 +1,8 @@
import {
Aggregation,
CalculationType,
Datasource,
DocumentType,
EnrichedQueryJson,
FieldType,
isLogicalSearchOperator,
Operation,
@ -38,7 +38,7 @@ import { generateJunctionTableID } from "../../../../../db/utils"
import AliasTables from "../../sqlAlias"
import { outputProcessing } from "../../../../../utilities/rowProcessor"
import pick from "lodash/pick"
import { processRowCountResponse } from "../../utils"
import { enrichQueryJson, processRowCountResponse } from "../../utils"
import {
dataFilters,
helpers,
@ -180,19 +180,6 @@ function cleanupFilters(filters: SearchFilters, allTables: Table[]) {
return filters
}
function buildTableMap(tables: Table[]) {
const tableMap: Record<string, Table> = {}
for (let table of tables) {
// update the table name, should never query by name for SQLite
table.originalName = table.name
table.name = table._id!
// need a primary for sorting, lookups etc
table.primary = ["_id"]
tableMap[table._id!] = table
}
return tableMap
}
// table is only needed to handle relationships
function reverseUserColumnMapping(rows: Row[], table?: Table) {
const prefixLength = USER_COLUMN_PREFIX.length
@ -223,30 +210,30 @@ function reverseUserColumnMapping(rows: Row[], table?: Table) {
}
function runSqlQuery(
json: QueryJson,
json: EnrichedQueryJson,
tables: Table[],
relationships: RelationshipsJson[]
): Promise<Row[]>
function runSqlQuery(
json: QueryJson,
json: EnrichedQueryJson,
tables: Table[],
relationships: RelationshipsJson[],
opts: { countTotalRows: true }
): Promise<number>
async function runSqlQuery(
json: QueryJson,
json: EnrichedQueryJson,
tables: Table[],
relationships: RelationshipsJson[],
opts?: { countTotalRows?: boolean }
) {
const relationshipJunctionTableIds = relationships.map(rel => rel.through!)
const alias = new AliasTables(
tables.map(table => table.name).concat(relationshipJunctionTableIds)
tables.map(table => table._id!).concat(relationshipJunctionTableIds)
)
if (opts?.countTotalRows) {
json.endpoint.operation = Operation.COUNT
json.operation = Operation.COUNT
}
const processSQLQuery = async (_: Datasource, json: QueryJson) => {
const processSQLQuery = async (json: EnrichedQueryJson) => {
const query = builder._query(json, {
disableReturning: true,
})
@ -281,7 +268,7 @@ async function runSqlQuery(
if (opts?.countTotalRows) {
return processRowCountResponse(response)
} else if (Array.isArray(response)) {
return reverseUserColumnMapping(response, json.meta.table)
return reverseUserColumnMapping(response, json.table)
}
return response
}
@ -315,7 +302,11 @@ export async function search(
}
const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables)
const allTablesMap = allTables.reduce((acc, table) => {
acc[table._id!] = table
return acc
}, {} as Record<string, Table>)
// make sure we have the mapped/latest table
if (table._id) {
table = allTablesMap[table._id]
@ -372,10 +363,7 @@ export async function search(
operation: Operation.READ,
},
filters: searchFilters,
table,
meta: {
table,
tables: allTablesMap,
columnPrefix: USER_COLUMN_PREFIX,
},
resource: {
@ -427,11 +415,13 @@ export async function search(
}
}
const enrichedRequest = await enrichQueryJson(request)
try {
const [rows, totalRows] = await Promise.all([
runSqlQuery(request, allTables, relationships),
runSqlQuery(enrichedRequest, allTables, relationships),
options.countRows
? runSqlQuery(request, allTables, relationships, {
? runSqlQuery(enrichedRequest, allTables, relationships, {
countTotalRows: true,
})
: Promise.resolve(undefined),

View File

@ -1,22 +1,19 @@
import {
Datasource,
DatasourcePlusQueryResponse,
EnrichedQueryJson,
Operation,
QueryJson,
Row,
SearchFilters,
SqlClient,
Table,
} from "@budibase/types"
import { SQS_DATASOURCE_INTERNAL } from "@budibase/backend-core"
import { getSQLClient } from "./utils"
import { cloneDeep } from "lodash"
import datasources from "../datasources"
import { BudibaseInternalDB } from "../../../db/utils"
import { dataFilters } from "@budibase/shared-core"
type PerformQueryFunction = (
datasource: Datasource,
json: QueryJson
json: EnrichedQueryJson
) => Promise<DatasourcePlusQueryResponse>
const WRITE_OPERATIONS: Operation[] = [
@ -71,13 +68,12 @@ export default class AliasTables {
this.charSeq = new CharSequence()
}
isAliasingEnabled(json: QueryJson, datasource?: Datasource) {
const operation = json.endpoint.operation
isAliasingEnabled(json: EnrichedQueryJson, datasource?: Datasource) {
const fieldLength = json.resource?.fields?.length
if (
!fieldLength ||
fieldLength <= 0 ||
DISABLED_OPERATIONS.includes(operation)
DISABLED_OPERATIONS.includes(json.operation)
) {
return false
}
@ -87,7 +83,7 @@ export default class AliasTables {
}
try {
const sqlClient = getSQLClient(datasource)
const isWrite = WRITE_OPERATIONS.includes(operation)
const isWrite = WRITE_OPERATIONS.includes(json.operation)
const isDisabledClient = DISABLED_WRITE_CLIENTS.includes(sqlClient)
if (isWrite && isDisabledClient) {
return false
@ -99,7 +95,11 @@ export default class AliasTables {
return true
}
getAlias(tableName: string) {
getAlias(tableName: string | Table) {
if (typeof tableName === "object") {
tableName = tableName.name
}
if (this.aliases[tableName]) {
return this.aliases[tableName]
}
@ -177,17 +177,15 @@ export default class AliasTables {
}
async queryWithAliasing(
json: QueryJson,
json: EnrichedQueryJson,
queryFn: PerformQueryFunction
): Promise<DatasourcePlusQueryResponse> {
const datasourceId = json.endpoint.datasourceId
const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL
let aliasingEnabled: boolean, datasource: Datasource
const datasource = json.datasource
const isSqs = datasource === undefined
let aliasingEnabled: boolean
if (isSqs) {
aliasingEnabled = this.isAliasingEnabled(json)
datasource = BudibaseInternalDB
} else {
datasource = await datasources.get(datasourceId)
aliasingEnabled = this.isAliasingEnabled(json, datasource)
}
@ -215,24 +213,23 @@ export default class AliasTables {
}
json.filters = aliasFilters(json.filters)
}
if (json.meta?.table) {
this.getAlias(json.meta.table.name)
}
if (json.meta?.tables) {
Object.keys(json.meta.tables).forEach(tableName =>
this.getAlias(tableName)
)
}
if (json.relationships) {
json.relationships = json.relationships.map(relationship => ({
...relationship,
aliases: this.aliasMap([
relationship.through,
relationship.tableName,
json.endpoint.entityId,
json.table.name,
]),
}))
}
this.getAlias(json.table)
for (const tableName of Object.keys(json.tables)) {
this.getAlias(tableName)
}
// invert and return
const invertedTableAliases: Record<string, string> = {}
for (let [key, value] of Object.entries(this.tableAliases)) {
@ -241,7 +238,7 @@ export default class AliasTables {
json.tableAliases = invertedTableAliases
}
let response: DatasourcePlusQueryResponse = await queryFn(datasource, json)
let response = await queryFn(json)
if (Array.isArray(response) && aliasingEnabled) {
return this.reverse(response)
} else {

View File

@ -14,13 +14,13 @@ import {
SqlClient,
ArrayOperator,
ViewV2,
EnrichedQueryJson,
} from "@budibase/types"
import { makeExternalQuery } from "../../../integrations/base/query"
import { Format } from "../../../api/controllers/view/exporters"
import sdk from "../.."
import { extractViewInfoFromID, isRelationshipColumn } from "../../../db/utils"
import { isSQL } from "../../../integrations/utils"
import { docIds, sql } from "@budibase/backend-core"
import { docIds, sql, SQS_DATASOURCE_INTERNAL } from "@budibase/backend-core"
import { getTableFromSource } from "../../../api/controllers/row/utils"
import env from "../../../environment"
@ -73,19 +73,59 @@ export function processRowCountResponse(
}
}
export async function getDatasourceAndQuery(
function processInternalTables(tables: Table[]) {
const tableMap: Record<string, Table> = {}
for (let table of tables) {
// update the table name, should never query by name for SQLite
table.originalName = table.name
table.name = table._id!
tableMap[table._id!] = table
}
return tableMap
}
export async function enrichQueryJson(
json: QueryJson
): Promise<DatasourcePlusQueryResponse> {
const datasourceId = json.endpoint.datasourceId
const datasource = await sdk.datasources.get(datasourceId)
const table = datasource.entities?.[json.endpoint.entityId]
if (!json.meta && table) {
json.meta = {
): Promise<EnrichedQueryJson> {
let datasource: Datasource | undefined = undefined
if (typeof json.endpoint.datasourceId === "string") {
if (json.endpoint.datasourceId !== SQS_DATASOURCE_INTERNAL) {
datasource = await sdk.datasources.get(json.endpoint.datasourceId, {
enriched: true,
})
}
} else {
datasource = json.endpoint.datasourceId
}
let tables: Record<string, Table>
if (datasource) {
tables = datasource.entities || {}
} else {
tables = processInternalTables(await sdk.tables.getAllInternalTables())
}
let table: Table
if (typeof json.endpoint.entityId === "string") {
let entityId = json.endpoint.entityId
if (docIds.isDatasourceId(entityId)) {
entityId = sql.utils.breakExternalTableId(entityId).tableName
}
table = tables[entityId]
} else {
table = json.endpoint.entityId
}
return {
operation: json.endpoint.operation,
table,
tables,
datasource,
schema: json.endpoint.schema,
...json,
}
}
return makeExternalQuery(datasource, json)
}
export function cleanExportRows(
rows: Row[],

View File

@ -245,7 +245,6 @@ export async function save(
datasource,
operation,
tableToSave,
tables,
oldTable,
opts?.renaming
)
@ -253,7 +252,7 @@ export async function save(
for (let extraTable of extraTablesToUpdate) {
const oldExtraTable = oldTables[extraTable.name]
let op = oldExtraTable ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
await makeTableRequest(datasource, op, extraTable, oldExtraTable)
}
// make sure the constrained list, all still exist
@ -292,7 +291,7 @@ export async function destroy(datasourceId: string, table: Table) {
const operation = Operation.DELETE_TABLE
if (tables) {
await makeTableRequest(datasource, operation, table, tables)
await makeTableRequest(datasource, operation, table)
cleanupRelationships(table, tables, { deleting: true })
delete tables[table.name]
datasource.entities = tables

View File

@ -46,6 +46,7 @@ export async function processTable(table: Table): Promise<Table> {
const processed: Table = {
...table,
type: "table",
primary: ["_id"], // internal tables must always use _id as primary key
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
sql: true,

View File

@ -1,14 +1,16 @@
import {
Datasource,
VerifyDatasourceRequest,
CreateDatasourceResponse,
UpdateDatasourceResponse,
UpdateDatasourceRequest,
QueryJson,
BuildSchemaFromSourceResponse,
CreateDatasourceResponse,
Datasource,
FetchDatasourceInfoResponse,
FieldType,
RelationshipType,
UpdateDatasourceRequest,
UpdateDatasourceResponse,
VerifyDatasourceRequest,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
import { sql } from "@budibase/backend-core"
export class DatasourceAPI extends TestAPI {
create = async (
@ -66,16 +68,6 @@ export class DatasourceAPI extends TestAPI {
return await this._get<Datasource[]>(`/api/datasources`, { expectations })
}
query = async (
query: Omit<QueryJson, "meta"> & Partial<Pick<QueryJson, "meta">>,
expectations?: Expectations
) => {
return await this._post<any>(`/api/datasources/query`, {
body: query,
expectations,
})
}
fetchSchema = async (
{
datasourceId,
@ -103,4 +95,50 @@ export class DatasourceAPI extends TestAPI {
}
)
}
addExistingRelationship = async (
{
one,
many,
}: {
one: { tableId: string; relationshipName: string; foreignKey: string }
many: { tableId: string; relationshipName: string; primaryKey: string }
},
expectations?: Expectations
) => {
const oneTableInfo = sql.utils.breakExternalTableId(one.tableId),
manyTableInfo = sql.utils.breakExternalTableId(many.tableId)
if (oneTableInfo.datasourceId !== manyTableInfo.datasourceId) {
throw new Error(
"Tables are in different datasources, cannot create relationship."
)
}
const datasource = await this.get(oneTableInfo.datasourceId)
const oneTable = datasource.entities?.[oneTableInfo.tableName],
manyTable = datasource.entities?.[manyTableInfo.tableName]
if (!oneTable || !manyTable) {
throw new Error(
"Both tables not found in datasource, cannot create relationship."
)
}
manyTable.schema[many.relationshipName] = {
type: FieldType.LINK,
name: many.relationshipName,
tableId: oneTable._id!,
relationshipType: RelationshipType.MANY_TO_ONE,
fieldName: one.foreignKey,
foreignKey: many.primaryKey,
main: true,
}
oneTable.schema[one.relationshipName] = {
type: FieldType.LINK,
name: one.relationshipName,
tableId: manyTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: many.primaryKey,
foreignKey: one.foreignKey,
}
return await this.update(datasource, expectations)
}
}

View File

@ -1,5 +1,5 @@
import { Table, Row } from "../documents"
import { QueryJson } from "./search"
import { EnrichedQueryJson } from "./search"
export const PASSWORD_REPLACEMENT = "--secret-value--"
@ -207,7 +207,7 @@ export interface DatasourcePlus extends IntegrationBase {
// this returns the format of the identifier
getBindingIdentifier(): string
getStringConcat(parts: string[]): string
query(json: QueryJson): Promise<DatasourcePlusQueryResponse>
query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse>
buildSchema(
datasourceId: string,
entities: Record<string, Table>

View File

@ -1,5 +1,5 @@
import { Operation } from "./datasources"
import { Row, Table, DocumentType } from "../documents"
import { Row, DocumentType, Table, Datasource } from "../documents"
import { SortOrder, SortType } from "../api"
import { Knex } from "knex"
import { Aggregation } from "./row"
@ -158,8 +158,8 @@ export interface ManyToManyRelationshipJson {
export interface QueryJson {
endpoint: {
datasourceId: string
entityId: string
datasourceId: string | Datasource
entityId: string | Table
operation: Operation
schema?: string
}
@ -171,11 +171,9 @@ export interface QueryJson {
sort?: SortJson
paginate?: PaginationJson
body?: Row | Row[]
table?: Table
meta: {
table: Table
tables?: Record<string, Table>
meta?: {
renamed?: RenameColumn
oldTable?: Table
// can specify something that columns could be prefixed with
columnPrefix?: string
}
@ -186,6 +184,14 @@ export interface QueryJson {
tableAliases?: Record<string, string>
}
export interface EnrichedQueryJson extends Omit<QueryJson, "endpoint"> {
operation: Operation
table: Table
tables: Record<string, Table>
datasource?: Datasource
schema?: string
}
export interface QueryOptions {
disableReturning?: boolean
disableBindings?: boolean

View File

@ -1,4 +0,0 @@
#!/bin/bash
sudo apt-get install -y qemu qemu-user-static
docker buildx create --name budibase
docker buildx use budibase

View File

@ -1,46 +0,0 @@
const fs = require("fs")
const path = require("path")
const MONOREPO_ROOT = "packages"
const packages = getPackages()
function getPackages() {
if (fs.existsSync(MONOREPO_ROOT)) {
return fs.readdirSync(MONOREPO_ROOT).map(pkg => path.join(MONOREPO_ROOT, pkg))
} else {
return ["./"]
}
}
function pinDeps(dependencies) {
for (let dependency in dependencies) {
if (dependency.startsWith("@budibase")) {
dependencies[dependency] = dependencies[dependency].replace("^", "")
}
}
return dependencies
}
// iterate over the monorepo packages
for (let pkgPath of packages) {
// only directories
if (fs.statSync(pkgPath).isDirectory()) {
// get the package JSON file
const pkgJsonPath = path.join(pkgPath, "package.json")
if (!fs.existsSync(pkgJsonPath)) {
continue
}
const pkgJson = JSON.parse(fs.readFileSync(pkgJsonPath))
// find any budibase dependencies, and pin them
pkgJson.dependencies = pinDeps(pkgJson.dependencies)
pkgJson.devDependencies = pinDeps(pkgJson.devDependencies)
// update the package JSON files
fs.writeFileSync(pkgJsonPath, JSON.stringify(pkgJson, null, 2))
}
}
console.log("Pinned dev versions for budibase packages successfully.")

View File

@ -1,28 +0,0 @@
const yaml = require("js-yaml")
const fs = require("fs")
const path = require("path")
const CHART_PATH = path.join(__dirname, "../", "charts", "budibase", "Chart.yaml")
const UPGRADE_VERSION = process.env.BUDIBASE_RELEASE_VERSION
if (!UPGRADE_VERSION) {
throw new Error("BUDIBASE_RELEASE_VERSION env var must be set.")
}
try {
const chartFile = fs.readFileSync(CHART_PATH, "utf-8")
const chart = yaml.load(chartFile)
// Upgrade app version in chart to match budibase release version
chart.appVersion = UPGRADE_VERSION
// semantically version the chart
const [major, minor, patch] = chart.version.split(".")
const newPatch = parseInt(patch) + 1
chart.version = [major, minor, newPatch].join(".")
const updatedChartYaml = yaml.dump(chart)
fs.writeFileSync(CHART_PATH, updatedChartYaml)
} catch (err) {
console.error("Error releasing helm chart")
throw err
}

View File

@ -1,7 +0,0 @@
#!/bin/bash
echo "Resetting package versions"
yarn lerna exec "yarn version --no-git-tag-version --new-version=0.0.0"
echo "Updating dependencies"
node scripts/syncLocalDependencies.js "0.0.0"
git checkout package.json
echo "Package versions reset!"

View File

@ -1,8 +0,0 @@
#!/bin/bash
version=$(./scripts/getCurrentVersion.sh)
echo "Setting version $version"
yarn lerna exec "yarn version --no-git-tag-version --new-version=$version"
echo "Updating dependencies"
node scripts/syncLocalDependencies.js $version
echo "Syncing yarn workspace"
yarn

745
yarn.lock

File diff suppressed because it is too large Load Diff