Adding test cases for table names in table names.
This commit is contained in:
parent
be99d27460
commit
56b1855f6a
|
@ -57,9 +57,13 @@ export default class AliasTables {
|
|||
const tableNames = this.tableNames
|
||||
if (field.includes(".")) {
|
||||
const [tableName, column] = field.split(".")
|
||||
const foundTableName = tableNames.find(
|
||||
name => tableName.includes(name) && tableName.indexOf(name) <= 1
|
||||
)
|
||||
const foundTableName = tableNames.find(name => {
|
||||
const idx = tableName.indexOf(name)
|
||||
if (idx === -1 || idx > 1) {
|
||||
return
|
||||
}
|
||||
return Math.abs(tableName.length - name.length) <= 2
|
||||
})
|
||||
if (foundTableName) {
|
||||
const aliasedTableName = tableName.replace(
|
||||
foundTableName,
|
||||
|
|
|
@ -39,10 +39,9 @@ tk.freeze(timestamp)
|
|||
|
||||
const { basicRow } = setup.structures
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
["postgres", databaseTestProviders.postgres],
|
||||
])("/rows (%s)", (__, dsProvider) => {
|
||||
describe.each([["postgres", databaseTestProviders.postgres]])(
|
||||
"/rows (%s)",
|
||||
(__, dsProvider) => {
|
||||
const isInternal = !dsProvider
|
||||
|
||||
const request = setup.getRequest()
|
||||
|
@ -111,7 +110,10 @@ describe.each([
|
|||
|
||||
const getRowUsage = async () => {
|
||||
const { total } = await config.doInContext(null, () =>
|
||||
quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
|
||||
quotas.getCurrentUsageValues(
|
||||
QuotaUsageType.STATIC,
|
||||
StaticQuotaName.ROWS
|
||||
)
|
||||
)
|
||||
return total
|
||||
}
|
||||
|
@ -855,7 +857,9 @@ describe.each([
|
|||
expect(resEnriched.body.link.length).toBe(1)
|
||||
expect(resEnriched.body.link[0]._id).toBe(firstRow._id)
|
||||
expect(resEnriched.body.link[0].name).toBe("Test Contact")
|
||||
expect(resEnriched.body.link[0].description).toBe("original description")
|
||||
expect(resEnriched.body.link[0].description).toBe(
|
||||
"original description"
|
||||
)
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
|
@ -1407,26 +1411,34 @@ describe.each([
|
|||
|
||||
it("respects the limit parameter", async () => {
|
||||
await createTable(await userTable())
|
||||
await Promise.all(Array.from({ length: 10 }, () => config.createRow()))
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () => config.createRow())
|
||||
)
|
||||
|
||||
const limit = generator.integer({ min: 1, max: 8 })
|
||||
|
||||
const createViewResponse = await config.createView()
|
||||
const response = await config.api.viewV2.search(createViewResponse.id, {
|
||||
const response = await config.api.viewV2.search(
|
||||
createViewResponse.id,
|
||||
{
|
||||
limit,
|
||||
query: {},
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
expect(response.body.rows).toHaveLength(limit)
|
||||
})
|
||||
|
||||
it("can handle pagination", async () => {
|
||||
await createTable(await userTable())
|
||||
await Promise.all(Array.from({ length: 10 }, () => config.createRow()))
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () => config.createRow())
|
||||
)
|
||||
|
||||
const createViewResponse = await config.createView()
|
||||
const allRows = (await config.api.viewV2.search(createViewResponse.id))
|
||||
.body.rows
|
||||
const allRows = (
|
||||
await config.api.viewV2.search(createViewResponse.id)
|
||||
).body.rows
|
||||
|
||||
const firstPageResponse = await config.api.viewV2.search(
|
||||
createViewResponse.id,
|
||||
|
@ -1738,7 +1750,9 @@ describe.each([
|
|||
name: rowData.name,
|
||||
description: rowData.description,
|
||||
tableId,
|
||||
users: expect.arrayContaining(selectedUsers.map(u => resultMapper(u))),
|
||||
users: expect.arrayContaining(
|
||||
selectedUsers.map(u => resultMapper(u))
|
||||
),
|
||||
_id: expect.any(String),
|
||||
_rev: expect.any(String),
|
||||
id: isInternal ? undefined : expect.any(Number),
|
||||
|
@ -1787,7 +1801,9 @@ describe.each([
|
|||
description: rowData.description,
|
||||
tableId,
|
||||
user: expect.arrayContaining([user1].map(u => resultMapper(u))),
|
||||
users: expect.arrayContaining([user2, user3].map(u => resultMapper(u))),
|
||||
users: expect.arrayContaining(
|
||||
[user2, user3].map(u => resultMapper(u))
|
||||
),
|
||||
_id: row._id,
|
||||
_rev: expect.any(String),
|
||||
id: isInternal ? undefined : expect.any(Number),
|
||||
|
@ -2179,4 +2195,5 @@ describe.each([
|
|||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
|
@ -17,7 +17,6 @@ const envLimit = environment.SQL_MAX_ROWS
|
|||
: null
|
||||
const BASE_LIMIT = envLimit || 5000
|
||||
|
||||
type KnexQuery = Knex.QueryBuilder
|
||||
// these are invalid dates sent by the client, need to convert them to a real max date
|
||||
const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z"
|
||||
const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z"
|
||||
|
@ -127,11 +126,11 @@ class InternalBuilder {
|
|||
|
||||
// right now we only do filters on the specific table being queried
|
||||
addFilters(
|
||||
query: KnexQuery,
|
||||
query: Knex.QueryBuilder,
|
||||
filters: SearchFilters | undefined,
|
||||
tableName: string,
|
||||
opts: { aliases?: Record<string, string>; relationship?: boolean }
|
||||
): KnexQuery {
|
||||
): Knex.QueryBuilder {
|
||||
function getTableName(name: string) {
|
||||
const alias = opts.aliases?.[name]
|
||||
return alias || name
|
||||
|
@ -320,7 +319,7 @@ class InternalBuilder {
|
|||
return query
|
||||
}
|
||||
|
||||
addSorting(query: KnexQuery, json: QueryJson): KnexQuery {
|
||||
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
|
||||
let { sort, paginate } = json
|
||||
const table = json.meta?.table
|
||||
if (sort && Object.keys(sort || {}).length > 0) {
|
||||
|
@ -348,12 +347,12 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
addRelationships(
|
||||
query: KnexQuery,
|
||||
query: Knex.QueryBuilder,
|
||||
fromTable: string,
|
||||
relationships: RelationshipsJson[] | undefined,
|
||||
schema: string | undefined,
|
||||
aliases?: Record<string, string>
|
||||
): KnexQuery {
|
||||
): Knex.QueryBuilder {
|
||||
if (!relationships) {
|
||||
return query
|
||||
}
|
||||
|
@ -429,7 +428,7 @@ class InternalBuilder {
|
|||
knex: Knex,
|
||||
endpoint: QueryJson["endpoint"],
|
||||
aliases?: QueryJson["tableAliases"]
|
||||
): KnexQuery {
|
||||
): Knex.QueryBuilder {
|
||||
const tableName = endpoint.entityId
|
||||
const tableAliased = aliases?.[tableName]
|
||||
? `${tableName} as ${aliases?.[tableName]}`
|
||||
|
@ -441,7 +440,7 @@ class InternalBuilder {
|
|||
return query
|
||||
}
|
||||
|
||||
create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||
create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||
const { endpoint, body } = json
|
||||
let query = this.knexWithAlias(knex, endpoint)
|
||||
const parsedBody = parseBody(body)
|
||||
|
@ -460,7 +459,7 @@ class InternalBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
bulkCreate(knex: Knex, json: QueryJson): KnexQuery {
|
||||
bulkCreate(knex: Knex, json: QueryJson): Knex.QueryBuilder {
|
||||
const { endpoint, body } = json
|
||||
let query = this.knexWithAlias(knex, endpoint)
|
||||
if (!Array.isArray(body)) {
|
||||
|
@ -470,7 +469,7 @@ class InternalBuilder {
|
|||
return query.insert(parsedBody)
|
||||
}
|
||||
|
||||
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||
read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder {
|
||||
let { endpoint, resource, filters, paginate, relationships, tableAliases } =
|
||||
json
|
||||
|
||||
|
@ -531,7 +530,7 @@ class InternalBuilder {
|
|||
})
|
||||
}
|
||||
|
||||
update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||
update(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||
const { endpoint, body, filters, tableAliases } = json
|
||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
const parsedBody = parseBody(body)
|
||||
|
@ -546,7 +545,7 @@ class InternalBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||
delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||
const { endpoint, filters, tableAliases } = json
|
||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
query = this.addFilters(query, filters, endpoint.entityId, {
|
||||
|
@ -578,7 +577,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
_query(json: QueryJson, opts: QueryOptions = {}): Knex.SqlNative | Knex.Sql {
|
||||
const sqlClient = this.getSqlClient()
|
||||
const client = knex({ client: sqlClient })
|
||||
let query: KnexQuery
|
||||
let query: Knex.QueryBuilder
|
||||
const builder = new InternalBuilder(sqlClient)
|
||||
switch (this._operation(json)) {
|
||||
case Operation.CREATE:
|
||||
|
|
|
@ -189,5 +189,16 @@ describe("Captures of real examples", () => {
|
|||
const aliased = aliasing.aliasField("`hello`.`world`")
|
||||
expect(aliased).toEqual("`a`.`world`")
|
||||
})
|
||||
|
||||
it("should handle table names in table names correctly", () => {
|
||||
const tableNames = ["he", "hell", "hello"]
|
||||
const aliasing = new AliasTables(tableNames)
|
||||
const aliased1 = aliasing.aliasField("`he`.`world`")
|
||||
const aliased2 = aliasing.aliasField("`hell`.`world`")
|
||||
const aliased3 = aliasing.aliasField("`hello`.`world`")
|
||||
expect(aliased1).toEqual("`a`.`world`")
|
||||
expect(aliased2).toEqual("`b`.`world`")
|
||||
expect(aliased3).toEqual("`c`.`world`")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
Loading…
Reference in New Issue