Get all bigint tests passing.
This commit is contained in:
parent
1c27ddcb8a
commit
26fa0222a1
|
@ -1191,7 +1191,7 @@ class InternalBuilder {
|
||||||
return withSchema
|
return withSchema
|
||||||
}
|
}
|
||||||
|
|
||||||
private buildJsonField(table: Table, field: string): string {
|
private buildJsonField(table: Table, field: string): [string, Knex.Raw] {
|
||||||
const parts = field.split(".")
|
const parts = field.split(".")
|
||||||
const baseName = parts[parts.length - 1]
|
const baseName = parts[parts.length - 1]
|
||||||
let unaliased: string
|
let unaliased: string
|
||||||
|
@ -1208,18 +1208,11 @@ class InternalBuilder {
|
||||||
|
|
||||||
const schema = table.schema[baseName]
|
const schema = table.schema[baseName]
|
||||||
|
|
||||||
const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
|
|
||||||
let identifier = this.rawQuotedIdentifier(tableField)
|
let identifier = this.rawQuotedIdentifier(tableField)
|
||||||
// if (schema.type === FieldType.BIGINT) {
|
if (schema.type === FieldType.BIGINT) {
|
||||||
// identifier = this.castIntToString(identifier)
|
identifier = this.castIntToString(identifier)
|
||||||
// } else if (schema.type === FieldType.LINK) {
|
}
|
||||||
// const otherTable = this.query.meta.tables![schema.tableId]
|
return [unaliased, identifier]
|
||||||
// const otherField = otherTable.schema[schema.fieldName]
|
|
||||||
// if (otherField.type === FieldType.BIGINT) {
|
|
||||||
// identifier = this.castIntToString(identifier)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
return this.knex.raw(`?${separator}??`, [unaliased, identifier]).toString()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
maxFunctionParameters() {
|
maxFunctionParameters() {
|
||||||
|
@ -1282,8 +1275,14 @@ class InternalBuilder {
|
||||||
0,
|
0,
|
||||||
Math.floor(this.maxFunctionParameters() / 2)
|
Math.floor(this.maxFunctionParameters() / 2)
|
||||||
)
|
)
|
||||||
const fieldList: string = relationshipFields
|
const fieldList = relationshipFields.map(field =>
|
||||||
.map(field => this.buildJsonField(relatedTable!, field))
|
this.buildJsonField(relatedTable!, field)
|
||||||
|
)
|
||||||
|
const fieldListFormatted = fieldList
|
||||||
|
.map(f => {
|
||||||
|
const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
|
||||||
|
return this.knex.raw(`?${separator}??`, [f[0], f[1]]).toString()
|
||||||
|
})
|
||||||
.join(",")
|
.join(",")
|
||||||
// SQL Server uses TOP - which performs a little differently to the normal LIMIT syntax
|
// SQL Server uses TOP - which performs a little differently to the normal LIMIT syntax
|
||||||
// it reduces the result set rather than limiting how much data it filters over
|
// it reduces the result set rather than limiting how much data it filters over
|
||||||
|
@ -1331,35 +1330,42 @@ class InternalBuilder {
|
||||||
// need to check the junction table document is to the right column, this is just for SQS
|
// need to check the junction table document is to the right column, this is just for SQS
|
||||||
subQuery = this.addJoinFieldCheck(subQuery, relationship)
|
subQuery = this.addJoinFieldCheck(subQuery, relationship)
|
||||||
wrapperQuery = standardWrap(
|
wrapperQuery = standardWrap(
|
||||||
this.knex.raw(`json_group_array(json_object(${fieldList}))`)
|
this.knex.raw(
|
||||||
|
`json_group_array(json_object(${fieldListFormatted}))`
|
||||||
|
)
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
case SqlClient.POSTGRES:
|
case SqlClient.POSTGRES:
|
||||||
wrapperQuery = standardWrap(
|
wrapperQuery = standardWrap(
|
||||||
this.knex.raw(`json_agg(json_build_object(${fieldList}))`)
|
this.knex.raw(`json_agg(json_build_object(${fieldListFormatted}))`)
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
case SqlClient.MARIADB:
|
case SqlClient.MARIADB:
|
||||||
// can't use the standard wrap due to correlated sub-query limitations in MariaDB
|
// can't use the standard wrap due to correlated sub-query limitations in MariaDB
|
||||||
wrapperQuery = subQuery.select(
|
wrapperQuery = subQuery.select(
|
||||||
knex.raw(
|
knex.raw(
|
||||||
`json_arrayagg(json_object(${fieldList}) LIMIT ${getRelationshipLimit()})`
|
`json_arrayagg(json_object(${fieldListFormatted}) LIMIT ${getRelationshipLimit()})`
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
case SqlClient.MY_SQL:
|
case SqlClient.MY_SQL:
|
||||||
case SqlClient.ORACLE:
|
case SqlClient.ORACLE:
|
||||||
wrapperQuery = standardWrap(
|
wrapperQuery = standardWrap(
|
||||||
this.knex.raw(`json_arrayagg(json_object(${fieldList}))`)
|
this.knex.raw(`json_arrayagg(json_object(${fieldListFormatted}))`)
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
case SqlClient.MS_SQL: {
|
case SqlClient.MS_SQL: {
|
||||||
const comparatorQuery = knex
|
const comparatorQuery = knex
|
||||||
.select(`${fromAlias}.*`)
|
.select(`*`)
|
||||||
// @ts-ignore - from alias syntax not TS supported
|
// @ts-ignore - from alias syntax not TS supported
|
||||||
.from({
|
.from({
|
||||||
[fromAlias]: subQuery
|
[fromAlias]: subQuery
|
||||||
.select(`${toAlias}.*`)
|
.select(
|
||||||
|
fieldList.map(f => {
|
||||||
|
// @ts-expect-error raw is fine here, knex types are wrong
|
||||||
|
return knex.ref(f[1]).as(f[0])
|
||||||
|
})
|
||||||
|
)
|
||||||
.limit(getRelationshipLimit()),
|
.limit(getRelationshipLimit()),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -3496,7 +3496,7 @@ if (descriptions.length) {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!isInternal) {
|
if (!isInternal && !isOracle) {
|
||||||
describe("bigint ids", () => {
|
describe("bigint ids", () => {
|
||||||
let table1: Table, table2: Table
|
let table1: Table, table2: Table
|
||||||
let table1Name: string, table2Name: string
|
let table1Name: string, table2Name: string
|
||||||
|
@ -3504,15 +3504,14 @@ if (descriptions.length) {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
table1Name = `table1-${generator.guid().substring(0, 5)}`
|
table1Name = `table1-${generator.guid().substring(0, 5)}`
|
||||||
await client!.schema.createTable(table1Name, table => {
|
await client!.schema.createTable(table1Name, table => {
|
||||||
table.bigIncrements("table1Id").primary()
|
table.bigInteger("table1Id").primary()
|
||||||
})
|
})
|
||||||
|
|
||||||
table2Name = `table2-${generator.guid().substring(0, 5)}`
|
table2Name = `table2-${generator.guid().substring(0, 5)}`
|
||||||
await client!.schema.createTable(table2Name, table => {
|
await client!.schema.createTable(table2Name, table => {
|
||||||
table.increments("table2Id").primary()
|
table.bigInteger("table2Id").primary()
|
||||||
table
|
table
|
||||||
.bigInteger("table1Ref")
|
.bigInteger("table1Ref")
|
||||||
.unsigned()
|
|
||||||
.references("table1Id")
|
.references("table1Id")
|
||||||
.inTable(table1Name)
|
.inTable(table1Name)
|
||||||
})
|
})
|
||||||
|
@ -3540,21 +3539,42 @@ if (descriptions.length) {
|
||||||
})
|
})
|
||||||
|
|
||||||
it.only("should be able to fetch rows with related bigint ids", async () => {
|
it.only("should be able to fetch rows with related bigint ids", async () => {
|
||||||
const row = await config.api.row.save(table1._id!, {})
|
const row = await config.api.row.save(table1._id!, {
|
||||||
await config.api.row.save(table2._id!, { table1Ref: row.table1Id })
|
table1Id: "1",
|
||||||
|
})
|
||||||
|
await config.api.row.save(table2._id!, {
|
||||||
|
table2Id: "2",
|
||||||
|
table1Ref: row.table1Id,
|
||||||
|
})
|
||||||
|
|
||||||
const { rows } = await config.api.row.search(table1._id!)
|
let resp = await config.api.row.search(table1._id!)
|
||||||
expect(rows).toEqual([
|
expect(resp.rows).toEqual([
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
_id: "%5B'1'%5D",
|
_id: "%5B'1'%5D",
|
||||||
_rev: "rev",
|
|
||||||
table1Id: "1",
|
table1Id: "1",
|
||||||
many: [
|
many: [
|
||||||
{
|
{
|
||||||
_id: "%5B'1'%5D",
|
_id: "%5B'2'%5D",
|
||||||
primaryDisplay: 1,
|
primaryDisplay: "2",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
tableId: table1._id,
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
|
||||||
|
resp = await config.api.row.search(table2._id!)
|
||||||
|
expect(resp.rows).toEqual([
|
||||||
|
expect.objectContaining({
|
||||||
|
_id: "%5B'2'%5D",
|
||||||
|
table2Id: "2",
|
||||||
|
table1Ref: "1",
|
||||||
|
one: [
|
||||||
|
{
|
||||||
|
_id: "%5B'1'%5D",
|
||||||
|
primaryDisplay: "1",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
tableId: table2._id,
|
||||||
}),
|
}),
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
|
@ -342,7 +342,8 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
||||||
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
|
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
|
||||||
: query.sql
|
: query.sql
|
||||||
this.log(sql, query.bindings)
|
this.log(sql, query.bindings)
|
||||||
return await request.query(sql)
|
const resp = await request.query(sql)
|
||||||
|
return resp
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
let readableMessage = getReadableErrorMessage(
|
let readableMessage = getReadableErrorMessage(
|
||||||
SourceName.SQL_SERVER,
|
SourceName.SQL_SERVER,
|
||||||
|
|
Loading…
Reference in New Issue