Fix row.spec.ts.

This commit is contained in:
Sam Rose 2024-09-24 17:46:38 +01:00
parent 0eb90cfbea
commit e3256cb005
No known key found for this signature in database
10 changed files with 36 additions and 33 deletions

View File

@ -612,7 +612,6 @@ async function runQuery<T>(
* limit {number} The number of results to fetch * limit {number} The number of results to fetch
* bookmark {string|null} Current bookmark in the recursive search * bookmark {string|null} Current bookmark in the recursive search
* rows {array|null} Current results in the recursive search * rows {array|null} Current results in the recursive search
* @returns {Promise<*[]|*>}
*/ */
async function recursiveSearch<T>( async function recursiveSearch<T>(
dbName: string, dbName: string,

View File

@ -76,11 +76,11 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
}) })
const [enrichedRow, oldRow] = await Promise.all([ const [enrichedRow, oldRow] = await Promise.all([
outputProcessing(table, row, { outputProcessing(source, row, {
squash: true, squash: true,
preserveLinks: true, preserveLinks: true,
}), }),
outputProcessing(table, beforeRow, { outputProcessing(source, beforeRow, {
squash: true, squash: true,
preserveLinks: true, preserveLinks: true,
}), }),

View File

@ -207,7 +207,7 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
} }
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) { export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
const { tableId } = utils.getSourceId(ctx) const { tableId, viewId } = utils.getSourceId(ctx)
await context.ensureSnippetContext(true) await context.ensureSnippetContext(true)
@ -221,7 +221,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
const searchParams: RowSearchParams = { const searchParams: RowSearchParams = {
...ctx.request.body, ...ctx.request.body,
query: enrichedQuery, query: enrichedQuery,
sourceId: tableId, sourceId: viewId || tableId,
} }
ctx.status = 200 ctx.status = 200

View File

@ -112,7 +112,7 @@ function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
export async function sqlOutputProcessing( export async function sqlOutputProcessing(
rows: DatasourcePlusQueryResponse, rows: DatasourcePlusQueryResponse,
table: Table, source: Table | ViewV2,
tables: Record<string, Table>, tables: Record<string, Table>,
relationships: RelationshipsJson[], relationships: RelationshipsJson[],
opts?: { sqs?: boolean; aggregations?: Aggregation[] } opts?: { sqs?: boolean; aggregations?: Aggregation[] }
@ -120,6 +120,12 @@ export async function sqlOutputProcessing(
if (isKnexEmptyReadResponse(rows)) { if (isKnexEmptyReadResponse(rows)) {
return [] return []
} }
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
let finalRows: { [key: string]: Row } = {} let finalRows: { [key: string]: Row } = {}
for (let row of rows as Row[]) { for (let row of rows as Row[]) {
let rowId = row._id let rowId = row._id

View File

@ -33,7 +33,7 @@ import {
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv" import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash" import { cloneDeep } from "lodash"
import { import {
helpers, helpers,
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
@ -149,12 +149,7 @@ export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse> ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) { ) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
let tableBefore = await sdk.tables.getTable(tableId) await pickApi({ tableId }).bulkImport(ctx)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await sdk.tables.saveTable(tableAfter)
}
// right now we don't trigger anything for bulk import because it // right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to // can only be done in the builder, but in the future we may need to

View File

@ -76,7 +76,7 @@ async function waitForEvent(
} }
describe.each([ describe.each([
["internal", undefined], ["lucene", undefined],
["sqs", undefined], ["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
@ -2453,9 +2453,15 @@ describe.each([
let flagCleanup: (() => void) | undefined let flagCleanup: (() => void) | undefined
beforeAll(async () => { beforeAll(async () => {
flagCleanup = setCoreEnv({ const env = {
TENANT_FEATURE_FLAGS: `*:${FeatureFlag.ENRICHED_RELATIONSHIPS}`, TENANT_FEATURE_FLAGS: `*:${FeatureFlag.ENRICHED_RELATIONSHIPS}`,
}) }
if (isSqs) {
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:SQS`
} else {
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:!SQS`
}
flagCleanup = setCoreEnv(env)
const aux2Table = await config.api.table.save(saveTableRequest()) const aux2Table = await config.api.table.save(saveTableRequest())
const aux2Data = await config.api.row.save(aux2Table._id!, {}) const aux2Data = await config.api.row.save(aux2Table._id!, {})

View File

@ -248,10 +248,11 @@ function getPrimaryDisplayValue(row: Row, table?: Table) {
export type SquashTableFields = Record<string, { visibleFieldNames: string[] }> export type SquashTableFields = Record<string, { visibleFieldNames: string[] }>
/** /**
* This function will take the given enriched rows and squash the links to only contain the primary display field. * This function will take the given enriched rows and squash the links to only
* @param table The table from which the rows originated. * contain the primary display field.
*
* @param source The table or view from which the rows originated.
* @param enriched The pre-enriched rows (full docs) which are to be squashed. * @param enriched The pre-enriched rows (full docs) which are to be squashed.
* @param squashFields Per link column (key) define which columns are allowed while squashing.
* @returns The rows after having their links squashed to only contain the ID and primary display. * @returns The rows after having their links squashed to only contain the ID and primary display.
*/ */
export async function squashLinks<T = Row[] | Row>( export async function squashLinks<T = Row[] | Row>(

View File

@ -56,7 +56,7 @@ export async function save(
table, table,
})) as Row })) as Row
return finaliseRow(table, row, { updateFormula: true }) return finaliseRow(source, row, { updateFormula: true })
} }
export async function find(sourceId: string, rowId: string): Promise<Row> { export async function find(sourceId: string, rowId: string): Promise<Row> {

View File

@ -31,7 +31,7 @@ export async function search(
const { paginate, query } = options const { paginate, query } = options
const params: RowSearchParams = { const params: RowSearchParams = {
sourceId: options.sourceId, sourceId: table._id!,
sort: options.sort, sort: options.sort,
sortOrder: options.sortOrder, sortOrder: options.sortOrder,
sortType: options.sortType, sortType: options.sortType,

View File

@ -308,8 +308,8 @@ export async function search(
const allTables = await sdk.tables.getAllInternalTables() const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables) const allTablesMap = buildTableMap(allTables)
// make sure we have the mapped/latest table // make sure we have the mapped/latest table
if (table?._id) { if (table._id) {
table = allTablesMap[table?._id] table = allTablesMap[table._id]
} }
if (!table) { if (!table) {
throw new Error("Unable to find table") throw new Error("Unable to find table")
@ -322,13 +322,6 @@ export async function search(
documentType: DocumentType.ROW, documentType: DocumentType.ROW,
} }
let fields = options.fields
if (fields === undefined) {
fields = buildInternalFieldList(table, allTables, { relationships })
} else {
fields = fields.map(f => mapToUserColumn(f))
}
if (options.aggregations) { if (options.aggregations) {
options.aggregations = options.aggregations.map(a => { options.aggregations = options.aggregations.map(a => {
a.field = mapToUserColumn(a.field) a.field = mapToUserColumn(a.field)
@ -350,7 +343,10 @@ export async function search(
tables: allTablesMap, tables: allTablesMap,
columnPrefix: USER_COLUMN_PREFIX, columnPrefix: USER_COLUMN_PREFIX,
}, },
resource: { fields, aggregations: options.aggregations }, resource: {
fields: buildInternalFieldList(table, allTables, { relationships }),
aggregations: options.aggregations,
},
relationships, relationships,
} }
@ -394,7 +390,7 @@ export async function search(
// make sure JSON columns corrected // make sure JSON columns corrected
const processed = builder.convertJsonStringColumns<Row>( const processed = builder.convertJsonStringColumns<Row>(
table, table,
await sqlOutputProcessing(rows, table, allTablesMap, relationships, { await sqlOutputProcessing(rows, source, allTablesMap, relationships, {
sqs: true, sqs: true,
aggregations: options.aggregations, aggregations: options.aggregations,
}) })
@ -411,7 +407,7 @@ export async function search(
} }
// get the rows // get the rows
let finalRows = await outputProcessing(table, processed, { let finalRows = await outputProcessing(source, processed, {
preserveLinks: true, preserveLinks: true,
squash: true, squash: true,
aggregations: options.aggregations, aggregations: options.aggregations,