Refactoring, moving SQS around, re-introducing old internal search.
This commit is contained in:
parent
37b4b756ea
commit
69f0c06171
|
@ -560,7 +560,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
break
|
||||
case FieldType.NUMBER:
|
||||
if (sort && sort[sortColumn]) {
|
||||
sort[sortColumn].type = SortType.number
|
||||
sort[sortColumn].type = SortType.NUMBER
|
||||
}
|
||||
break
|
||||
}
|
||||
|
|
|
@ -7,14 +7,9 @@ import {
|
|||
outputProcessing,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import * as utils from "./utils"
|
||||
// import { fullSearch, paginatedSearch } from "./internalSearch"
|
||||
// import { getGlobalUsersFromMetadata } from "../../../utilities/global"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { finaliseRow, updateRelatedFormula } from "./staticFormula"
|
||||
import { csv, json, jsonWithSchema, Format } from "../view/exporters"
|
||||
import { apiFileReturn } from "../../../utilities/fileSystem"
|
||||
import { sqlSearch } from "./internalSql"
|
||||
import {
|
||||
FieldType,
|
||||
LinkDocumentValue,
|
||||
|
@ -28,12 +23,6 @@ import sdk from "../../../sdk"
|
|||
import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils"
|
||||
import { flatten } from "lodash"
|
||||
|
||||
// const CALCULATION_TYPES = {
|
||||
// SUM: "sum",
|
||||
// COUNT: "count",
|
||||
// STATS: "stats",
|
||||
// }
|
||||
|
||||
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||
const tableId = utils.getTableId(ctx)
|
||||
const inputs = ctx.request.body
|
||||
|
@ -194,102 +183,6 @@ export async function bulkDestroy(ctx: UserCtx) {
|
|||
return { response: { ok: true }, rows: processedRows }
|
||||
}
|
||||
|
||||
export async function search(ctx: UserCtx) {
|
||||
return await sqlSearch(ctx)
|
||||
// // Fetch the whole table when running in cypress, as search doesn't work
|
||||
// if (!env.COUCH_DB_URL && env.isCypress()) {
|
||||
// return { rows: await fetch(ctx) }
|
||||
// }
|
||||
//
|
||||
// const { tableId } = ctx.params
|
||||
// const db = context.getAppDB()
|
||||
// const { paginate, query, ...params } = ctx.request.body
|
||||
// params.version = ctx.version
|
||||
// params.tableId = tableId
|
||||
//
|
||||
// let table
|
||||
// if (params.sort && !params.sortType) {
|
||||
// table = await db.get(tableId)
|
||||
// const schema = table.schema
|
||||
// const sortField = schema[params.sort]
|
||||
// params.sortType = sortField.type == "number" ? "number" : "string"
|
||||
// }
|
||||
//
|
||||
// let response
|
||||
// if (paginate) {
|
||||
// response = await paginatedSearch(query, params)
|
||||
// } else {
|
||||
// response = await fullSearch(query, params)
|
||||
// }
|
||||
//
|
||||
// // Enrich search results with relationships
|
||||
// if (response.rows && response.rows.length) {
|
||||
// // enrich with global users if from users table
|
||||
// if (tableId === InternalTables.USER_METADATA) {
|
||||
// response.rows = await getGlobalUsersFromMetadata(response.rows)
|
||||
// }
|
||||
// table = table || (await db.get(tableId))
|
||||
// response.rows = await outputProcessing(table, response.rows)
|
||||
// }
|
||||
//
|
||||
// return response
|
||||
}
|
||||
|
||||
export async function exportRows(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
const table = (await db.get(ctx.params.tableId)) as Table
|
||||
const rowIds = ctx.request.body.rows
|
||||
let format = ctx.query.format
|
||||
if (typeof format !== "string") {
|
||||
ctx.throw(400, "Format parameter is not valid")
|
||||
}
|
||||
const { columns, query } = ctx.request.body
|
||||
|
||||
let result: Row[] = []
|
||||
if (rowIds) {
|
||||
let response = (
|
||||
await db.allDocs({
|
||||
include_docs: true,
|
||||
keys: rowIds,
|
||||
})
|
||||
).rows.map(row => row.doc)
|
||||
|
||||
result = (await outputProcessing(table, response)) as Row[]
|
||||
} else if (query) {
|
||||
let searchResponse = await search(ctx)
|
||||
result = searchResponse.rows
|
||||
}
|
||||
|
||||
let rows: Row[] = []
|
||||
let schema = table.schema
|
||||
|
||||
// Filter data to only specified columns if required
|
||||
if (columns && columns.length) {
|
||||
for (let i = 0; i < result.length; i++) {
|
||||
rows[i] = {}
|
||||
for (let column of columns) {
|
||||
rows[i][column] = result[i][column]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
rows = result
|
||||
}
|
||||
|
||||
let exportRows = sdk.rows.utils.cleanExportRows(rows, schema, format, columns)
|
||||
if (format === Format.CSV) {
|
||||
ctx.attachment("export.csv")
|
||||
return apiFileReturn(csv(Object.keys(rows[0]), exportRows))
|
||||
} else if (format === Format.JSON) {
|
||||
ctx.attachment("export.json")
|
||||
return apiFileReturn(json(exportRows))
|
||||
} else if (format === Format.JSON_WITH_SCHEMA) {
|
||||
ctx.attachment("export.json")
|
||||
return apiFileReturn(jsonWithSchema(schema, exportRows))
|
||||
} else {
|
||||
throw "Format not recognised"
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||
const fieldName = ctx.request.query.field as string | undefined
|
||||
const db = context.getAppDB()
|
||||
|
@ -343,4 +236,4 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
|||
}
|
||||
}
|
||||
return row
|
||||
}
|
||||
}
|
|
@ -818,7 +818,7 @@ describe.each([
|
|||
{
|
||||
field: "age",
|
||||
order: SortOrder.ASCENDING,
|
||||
type: SortType.number,
|
||||
type: SortType.NUMBER,
|
||||
},
|
||||
["Danny", "Alice", "Charly", "Bob"],
|
||||
],
|
||||
|
@ -840,7 +840,7 @@ describe.each([
|
|||
{
|
||||
field: "age",
|
||||
order: SortOrder.DESCENDING,
|
||||
type: SortType.number,
|
||||
type: SortType.NUMBER,
|
||||
},
|
||||
["Bob", "Charly", "Alice", "Danny"],
|
||||
],
|
||||
|
|
|
@ -86,6 +86,7 @@ const environment = {
|
|||
SQL_MAX_ROWS: process.env.SQL_MAX_ROWS,
|
||||
SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE,
|
||||
SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE,
|
||||
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
|
||||
// flags
|
||||
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
|
||||
DISABLE_THREADING: process.env.DISABLE_THREADING,
|
||||
|
|
|
@ -34,8 +34,13 @@ import { ExportRowsParams, ExportRowsResult } from "../search"
|
|||
import { searchInputMapping } from "./utils"
|
||||
import pick from "lodash/pick"
|
||||
import { breakRowIdField } from "../../../../integrations/utils"
|
||||
import * as sqs from "./sqs"
|
||||
|
||||
export async function search(options: SearchParams) {
|
||||
if (env.SQS_SEARCH_ENABLE) {
|
||||
return sqs.search(options)
|
||||
}
|
||||
|
||||
const { tableId } = options
|
||||
|
||||
const { paginate, query } = options
|
||||
|
|
|
@ -4,17 +4,19 @@ import {
|
|||
QueryJson,
|
||||
RelationshipFieldMetadata,
|
||||
Row,
|
||||
SearchFilters,
|
||||
SearchFilters, SearchParams,
|
||||
SortType,
|
||||
Table,
|
||||
UserCtx,
|
||||
SortJson,
|
||||
SortOrder,
|
||||
SortDirection,
|
||||
} from "@budibase/types"
|
||||
import SqlQueryBuilder from "../../../integrations/base/sql"
|
||||
import { SqlClient } from "../../../integrations/utils"
|
||||
import { buildInternalRelationships, sqlOutputProcessing } from "./utils"
|
||||
import sdk from "../../../sdk"
|
||||
import SqlQueryBuilder from "../../../../integrations/base/sql"
|
||||
import { SqlClient } from "../../../../integrations/utils"
|
||||
import { buildInternalRelationships, sqlOutputProcessing } from "../../../../api/controllers/row/utils"
|
||||
import sdk from "../../../index"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../db/utils"
|
||||
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
|
||||
|
||||
function buildInternalFieldList(
|
||||
table: Table,
|
||||
|
@ -87,16 +89,15 @@ function buildTableMap(tables: Table[]) {
|
|||
return tableMap
|
||||
}
|
||||
|
||||
export async function sqlSearch(ctx: UserCtx) {
|
||||
const { tableId } = ctx.params
|
||||
const { paginate, query, ...params } = ctx.request.body
|
||||
export async function search(options: SearchParams) {
|
||||
const { tableId, paginate, query, ...params } = options
|
||||
|
||||
const builder = new SqlQueryBuilder(SqlClient.SQL_LITE)
|
||||
const allTables = await sdk.tables.getAllInternalTables()
|
||||
const allTablesMap = buildTableMap(allTables)
|
||||
const table = allTables.find(table => table._id === tableId)
|
||||
if (!table) {
|
||||
ctx.throw(400, "Unable to find table")
|
||||
throw new Error("Unable to find table")
|
||||
}
|
||||
|
||||
const relationships = buildInternalRelationships(table)
|
||||
|
@ -127,13 +128,15 @@ export async function sqlSearch(ctx: UserCtx) {
|
|||
|
||||
if (params.sort && !params.sortType) {
|
||||
const sortField = table.schema[params.sort]
|
||||
const sortType = sortField.type == "number" ? "number" : "string"
|
||||
request.sort = {
|
||||
const sortType = sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
|
||||
const sortDirection = params.sortOrder === SortOrder.ASCENDING ? SortDirection.ASCENDING : SortDirection.DESCENDING
|
||||
const sortObj: SortJson = {
|
||||
[sortField.name]: {
|
||||
direction: params.sortOrder,
|
||||
direction: sortDirection,
|
||||
type: sortType as SortType,
|
||||
},
|
||||
}
|
||||
request.sort = sortObj
|
||||
}
|
||||
if (paginate) {
|
||||
request.paginate = {
|
|
@ -5,7 +5,7 @@ export enum SortOrder {
|
|||
|
||||
export enum SortType {
|
||||
STRING = "string",
|
||||
number = "number",
|
||||
NUMBER = "number",
|
||||
}
|
||||
|
||||
export interface BasicPaginationRequest {
|
||||
|
|
Loading…
Reference in New Issue