Merge pull request #15087 from Budibase/bigint-cleanup

Bigint cleanup
This commit is contained in:
Sam Rose 2024-11-28 17:45:41 +00:00 committed by GitHub
commit 03377e37bb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 73 additions and 126 deletions

View File

@ -284,13 +284,13 @@ class InternalBuilder {
}
private generateSelectStatement(): (string | Knex.Raw)[] | "*" {
const { endpoint, resource } = this.query
const { table, resource } = this.query
if (!resource || !resource.fields || resource.fields.length === 0) {
return "*"
}
const alias = this.getTableName(endpoint.entityId)
const alias = this.getTableName(table)
const schema = this.table.schema
if (!this.isFullSelectStatementRequired()) {
return [this.knex.raw("??", [`${alias}.*`])]
@ -496,7 +496,7 @@ class InternalBuilder {
filterKey: string,
whereCb: (filterKey: string, query: Knex.QueryBuilder) => Knex.QueryBuilder
): Knex.QueryBuilder {
const { relationships, endpoint, tableAliases: aliases, table } = this.query
const { relationships, schema, tableAliases: aliases, table } = this.query
const fromAlias = aliases?.[table.name] || table.name
const matches = (value: string) =>
filterKey.match(new RegExp(`^${value}\\.`))
@ -537,7 +537,7 @@ class InternalBuilder {
aliases?.[manyToMany.through] || relationship.through
let throughTable = this.tableNameWithSchema(manyToMany.through, {
alias: throughAlias,
schema: endpoint.schema,
schema,
})
subQuery = subQuery
// add a join through the junction table
@ -1010,28 +1010,10 @@ class InternalBuilder {
return isSqs(this.table)
}
getTableName(tableOrName?: Table | string): string {
let table: Table
if (typeof tableOrName === "string") {
const name = tableOrName
if (this.query.table?.name === name) {
table = this.query.table
} else if (this.query.table.name === name) {
table = this.query.table
} else if (!this.query.tables[name]) {
// This can legitimately happen in custom queries, where the user is
// querying against a table that may not have been imported into
// Budibase.
return name
} else {
table = this.query.tables[name]
}
} else if (tableOrName) {
table = tableOrName
} else {
getTableName(table?: Table): string {
if (!table) {
table = this.table
}
let name = table.name
if (isSqs(table) && table._id) {
// SQS uses the table ID rather than the table name
@ -1242,7 +1224,7 @@ class InternalBuilder {
): Knex.QueryBuilder {
const sqlClient = this.client
const knex = this.knex
const { resource, tableAliases: aliases, endpoint, tables } = this.query
const { resource, tableAliases: aliases, schema, tables } = this.query
const fields = resource?.fields || []
for (let relationship of relationships) {
const {
@ -1266,7 +1248,7 @@ class InternalBuilder {
throughAlias = (throughTable && aliases?.[throughTable]) || throughTable
let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias,
schema: endpoint.schema,
schema,
})
const requiredFields = [
...(relatedTable?.primary || []),
@ -1310,7 +1292,7 @@ class InternalBuilder {
if (isManyToMany) {
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema: endpoint.schema,
schema,
})
subQuery = subQuery.join(throughTableWithSchema, function () {
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
@ -1401,8 +1383,7 @@ class InternalBuilder {
toPrimary?: string
}[]
): Knex.QueryBuilder {
const { tableAliases: aliases, endpoint } = this.query
const schema = endpoint.schema
const { tableAliases: aliases, schema } = this.query
const toTable = tables.to,
fromTable = tables.from,
throughTable = tables.through
@ -1462,7 +1443,7 @@ class InternalBuilder {
return this.knex(
this.tableNameWithSchema(this.query.table.name, {
alias,
schema: this.query.endpoint.schema,
schema: this.query.schema,
})
)
}
@ -1556,9 +1537,8 @@ class InternalBuilder {
limits?: { base: number; query: number }
} = {}
): Knex.QueryBuilder {
let { endpoint, filters, paginate, relationships, table } = this.query
let { operation, filters, paginate, relationships, table } = this.query
const { limits } = opts
const counting = endpoint.operation === Operation.COUNT
// start building the query
let query = this.qualifiedKnex()
@ -1578,7 +1558,7 @@ class InternalBuilder {
foundLimit = paginate.limit
}
// counting should not sort, limit or offset
if (!counting) {
if (operation !== Operation.COUNT) {
// add the found limit if supplied
if (foundLimit != null) {
query = query.limit(foundLimit)
@ -1590,7 +1570,7 @@ class InternalBuilder {
}
const aggregations = this.query.resource?.aggregations || []
if (counting) {
if (operation === Operation.COUNT) {
query = this.addDistinctCount(query)
} else if (aggregations.length > 0) {
query = this.addAggregations(query, aggregations)
@ -1599,7 +1579,7 @@ class InternalBuilder {
}
// have to add after as well (this breaks MS-SQL)
if (!counting) {
if (operation !== Operation.COUNT) {
query = this.addSorting(query)
}
@ -1738,13 +1718,11 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return {}
}
const input = this._query({
operation: Operation.READ,
datasource: json.datasource,
schema: json.schema,
table: json.table,
tables: json.tables,
endpoint: {
...json.endpoint,
operation: Operation.READ,
},
resource: { fields: [] },
filters: json.extra?.idFilter,
paginate: { limit: 1 },

View File

@ -239,14 +239,13 @@ class SqlTableQueryBuilder {
* @return the operation that was found in the JSON.
*/
_operation(json: EnrichedQueryJson): Operation {
return json.endpoint.operation
return json.operation
}
_tableQuery(json: EnrichedQueryJson): SqlQuery | SqlQuery[] {
let client = knex({ client: this.sqlClient }).schema
let schemaName = json?.endpoint?.schema
if (schemaName) {
client = client.withSchema(schemaName)
if (json?.schema) {
client = client.withSchema(json.schema)
}
let query: Knex.SchemaBuilder
@ -268,8 +267,8 @@ class SqlTableQueryBuilder {
// renameColumn does not work for MySQL, so return a raw query
if (this.sqlClient === SqlClient.MY_SQL && json.meta?.renamed) {
const updatedColumn = json.meta.renamed.updated
const tableName = schemaName
? `\`${schemaName}\`.\`${json.table.name}\``
const tableName = json?.schema
? `\`${json.schema}\`.\`${json.table.name}\``
: `\`${json.table.name}\``
return {
sql: `alter table ${tableName} rename column \`${json.meta.renamed.old}\` to \`${updatedColumn}\`;`,
@ -290,8 +289,8 @@ class SqlTableQueryBuilder {
if (this.sqlClient === SqlClient.MS_SQL && json.meta?.renamed) {
const oldColumn = json.meta.renamed.old
const updatedColumn = json.meta.renamed.updated
const tableName = schemaName
? `${schemaName}.${json.table.name}`
const tableName = json?.schema
? `${json.schema}.${json.table.name}`
: `${json.table.name}`
const sql = getNativeSql(query)
if (Array.isArray(sql)) {

@ -1 +1 @@
Subproject commit e2252498ddfade3c2592b1ec78f7bee4e3cf0d2f
Subproject commit d9245f3d6d0b41ec2e6b3406b791f9e7448882cb

View File

@ -1,27 +1,39 @@
import { DatasourcePlusQueryResponse, QueryJson } from "@budibase/types"
import {
DatasourcePlusQueryResponse,
EnrichedQueryJson,
QueryJson,
} from "@budibase/types"
import { getIntegration } from "../index"
import sdk from "../../sdk"
import { enrichQueryJson } from "../../sdk/app/rows/utils"
function isEnriched(
json: QueryJson | EnrichedQueryJson
): json is EnrichedQueryJson {
return "datasource" in json
}
export async function makeExternalQuery(
json: QueryJson
json: QueryJson | EnrichedQueryJson
): Promise<DatasourcePlusQueryResponse> {
const enrichedJson = await enrichQueryJson(json)
if (!enrichedJson.datasource) {
if (!isEnriched(json)) {
json = await enrichQueryJson(json)
if (json.datasource) {
json.datasource = await sdk.datasources.enrich(json.datasource)
}
}
if (!json.datasource) {
throw new Error("No datasource provided for external query")
}
enrichedJson.datasource = await sdk.datasources.enrich(
enrichedJson.datasource
)
const Integration = await getIntegration(enrichedJson.datasource.source)
const Integration = await getIntegration(json.datasource.source)
// query is the opinionated function
if (!Integration.prototype.query) {
throw "Datasource does not support query."
}
const integration = new Integration(enrichedJson.datasource.config)
return integration.query(enrichedJson)
const integration = new Integration(json.datasource.config)
return integration.query(json)
}

View File

@ -383,7 +383,7 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
const sheet = json.table.name
switch (json.endpoint.operation) {
switch (json.operation) {
case Operation.CREATE:
return this.create({ sheet, row: json.body as Row })
case Operation.BULK_CREATE:
@ -426,7 +426,7 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
return this.deleteTable(json?.table?.name)
default:
throw new Error(
`GSheets integration does not support "${json.endpoint.operation}".`
`GSheets integration does not support "${json.operation}".`
)
}
}

View File

@ -509,8 +509,8 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
const schema = this.config.schema
await this.connect()
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
json.endpoint.schema = schema
if (schema && schema !== DEFAULT_SCHEMA) {
json.schema = schema
}
const operation = this._operation(json)
const queryFn = (query: any, op: string) => this.internalQuery(query, op)

View File

@ -572,11 +572,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
return response.rows as Row[]
} else {
// get the last row that was updated
if (
response.lastRowid &&
json.endpoint?.entityId &&
operation !== Operation.DELETE
) {
if (response.lastRowid && operation !== Operation.DELETE) {
const lastRow = await this.internalQuery({
sql: `SELECT * FROM "${json.table.name}" WHERE ROWID = '${response.lastRowid}'`,
})

View File

@ -269,7 +269,7 @@ describe("Captures of real examples", () => {
fields: string[] = ["a"]
): EnrichedQueryJson {
return {
endpoint: { datasourceId: "", entityId: "", operation: op },
operation: op,
resource: {
fields,
},

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "persons",
"operation": "READ"
},
"operation": "READ",
"resource": {
"fields": [
"a.year",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"entityId": "people",
"operation": "CREATE"
},
"operation": "CREATE",
"resource": {
"fields": ["a.name", "a.age"]
},

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "persons",
"operation": "CREATE"
},
"operation": "CREATE",
"resource": {
"fields": [
"a.year",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "compositetable",
"operation": "DELETE"
},
"operation": "DELETE",
"resource": {
"fields": ["a.keyparttwo", "a.keypartone", "a.name"]
},

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
"entityId": "tasks",
"operation": "READ"
},
"operation": "READ",
"resource": {
"fields": [
"a.executorid",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
"entityId": "products",
"operation": "READ"
},
"operation": "READ",
"resource": {
"fields": [
"a.productname",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "products",
"operation": "READ"
},
"operation": "READ",
"resource": {
"fields": [
"a.productname",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
"entityId": "tasks",
"operation": "READ"
},
"operation": "READ",
"resource": {
"fields": [
"a.executorid",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "persons",
"operation": "UPDATE"
},
"operation": "UPDATE",
"resource": {
"fields": [
"a.year",

View File

@ -1,9 +1,5 @@
{
"endpoint": {
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
"entityId": "persons",
"operation": "UPDATE"
},
"operation": "UPDATE",
"resource": {
"fields": [
"a.year",

View File

@ -231,7 +231,7 @@ async function runSqlQuery(
tables.map(table => table._id!).concat(relationshipJunctionTableIds)
)
if (opts?.countTotalRows) {
json.endpoint.operation = Operation.COUNT
json.operation = Operation.COUNT
}
const processSQLQuery = async (json: EnrichedQueryJson) => {
const query = builder._query(json, {

View File

@ -3,7 +3,6 @@ import {
DatasourcePlusQueryResponse,
EnrichedQueryJson,
Operation,
QueryJson,
Row,
SearchFilters,
SqlClient,
@ -69,13 +68,12 @@ export default class AliasTables {
this.charSeq = new CharSequence()
}
isAliasingEnabled(json: QueryJson, datasource?: Datasource) {
const operation = json.endpoint.operation
isAliasingEnabled(json: EnrichedQueryJson, datasource?: Datasource) {
const fieldLength = json.resource?.fields?.length
if (
!fieldLength ||
fieldLength <= 0 ||
DISABLED_OPERATIONS.includes(operation)
DISABLED_OPERATIONS.includes(json.operation)
) {
return false
}
@ -85,7 +83,7 @@ export default class AliasTables {
}
try {
const sqlClient = getSQLClient(datasource)
const isWrite = WRITE_OPERATIONS.includes(operation)
const isWrite = WRITE_OPERATIONS.includes(json.operation)
const isDisabledClient = DISABLED_WRITE_CLIENTS.includes(sqlClient)
if (isWrite && isDisabledClient) {
return false

View File

@ -118,9 +118,11 @@ export async function enrichQueryJson(
}
return {
operation: json.endpoint.operation,
table,
tables,
datasource,
schema: json.endpoint.schema,
...json,
}
}

View File

@ -184,10 +184,12 @@ export interface QueryJson {
tableAliases?: Record<string, string>
}
export interface EnrichedQueryJson extends QueryJson {
export interface EnrichedQueryJson extends Omit<QueryJson, "endpoint"> {
operation: Operation
table: Table
tables: Record<string, Table>
datasource?: Datasource
schema?: string
}
export interface QueryOptions {