Removing the use of the addQueries quota functionality from row API - we don't make use of these statistics anymore (and haven't in a while) counting them is pointless and could be causing some loss in performance in these heavily used endpoints.

This commit is contained in:
mike12345567 2024-01-04 17:38:54 +00:00
parent 3e6848fda5
commit bbeaed48a7
6 changed files with 80 additions and 855 deletions

View File

@ -2,7 +2,6 @@ import Redlock from "redlock"
import { getLockClient } from "./init" import { getLockClient } from "./init"
import { LockOptions, LockType } from "@budibase/types" import { LockOptions, LockType } from "@budibase/types"
import * as context from "../context" import * as context from "../context"
import { logWarn } from "../logging"
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import { Duration } from "../utils" import { Duration } from "../utils"

@ -1 +1 @@
Subproject commit dc2b1b22e7f9bac705746bf1fb72c817db043fa3 Subproject commit ac87f51b9bd9d4427f0dc6473b6e136b436f2ba1

View File

@ -161,11 +161,8 @@ export async function preview(ctx: UserCtx) {
auth: { ...authConfigCtx }, auth: { ...authConfigCtx },
}, },
} }
const runFn = () => Runner.run(inputs)
const { rows, keys, info, extra } = await quotas.addQuery<any>(runFn, { const { rows, keys, info, extra } = (await Runner.run(inputs)) as any
datasourceId: datasource._id,
})
const schemaFields: any = {} const schemaFields: any = {}
if (rows?.length > 0) { if (rows?.length > 0) {
for (let key of [...new Set(keys)] as string[]) { for (let key of [...new Set(keys)] as string[]) {
@ -259,14 +256,8 @@ async function execute(
}, },
schema: query.schema, schema: query.schema,
} }
const runFn = () => Runner.run(inputs)
const { rows, pagination, extra, info } = await quotas.addQuery<any>( const { rows, pagination, extra, info } = (await Runner.run(inputs)) as any
runFn,
{
datasourceId: datasource._id,
}
)
// remove the raw from execution incase transformer being used to hide data // remove the raw from execution incase transformer being used to hide data
if (extra?.raw) { if (extra?.raw) {
delete extra.raw delete extra.raw

View File

@ -4,20 +4,20 @@ import * as external from "./external"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import { import {
Ctx, Ctx,
UserCtx,
DeleteRowRequest,
DeleteRow, DeleteRow,
DeleteRowRequest,
DeleteRows, DeleteRows,
Row,
PatchRowRequest,
PatchRowResponse,
SearchRowResponse,
SearchRowRequest,
SearchParams,
GetRowResponse,
ValidateResponse,
ExportRowsRequest, ExportRowsRequest,
ExportRowsResponse, ExportRowsResponse,
GetRowResponse,
PatchRowRequest,
PatchRowResponse,
Row,
SearchParams,
SearchRowRequest,
SearchRowResponse,
UserCtx,
ValidateResponse,
} from "@budibase/types" } from "@budibase/types"
import * as utils from "./utils" import * as utils from "./utils"
import { gridSocket } from "../../../websockets" import { gridSocket } from "../../../websockets"
@ -25,8 +25,8 @@ import { addRev } from "../public/utils"
import { fixRow } from "../public/rows" import { fixRow } from "../public/rows"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as exporters from "../view/exporters" import * as exporters from "../view/exporters"
import { apiFileReturn } from "../../../utilities/fileSystem"
import { Format } from "../view/exporters" import { Format } from "../view/exporters"
import { apiFileReturn } from "../../../utilities/fileSystem"
export * as views from "./views" export * as views from "./views"
@ -49,12 +49,7 @@ export async function patch(
return save(ctx) return save(ctx)
} }
try { try {
const { row, table } = await quotas.addQuery( const { row, table } = await pickApi(tableId).patch(ctx)
() => pickApi(tableId).patch(ctx),
{
datasourceId: tableId,
}
)
if (!row) { if (!row) {
ctx.throw(404, "Row not found") ctx.throw(404, "Row not found")
} }
@ -84,12 +79,7 @@ export const save = async (ctx: UserCtx<Row, Row>) => {
return patch(ctx as UserCtx<PatchRowRequest, PatchRowResponse>) return patch(ctx as UserCtx<PatchRowRequest, PatchRowResponse>)
} }
const { row, table, squashed } = await quotas.addRow(() => const { row, table, squashed } = await quotas.addRow(() =>
quotas.addQuery( sdk.rows.save(tableId, ctx.request.body, ctx.user?._id)
() => sdk.rows.save(tableId, ctx.request.body, ctx.user?._id),
{
datasourceId: tableId,
}
)
) )
ctx.status = 200 ctx.status = 200
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table) ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
@ -105,31 +95,21 @@ export async function fetchView(ctx: any) {
const { calculation, group, field } = ctx.query const { calculation, group, field } = ctx.query
ctx.body = await quotas.addQuery( ctx.body = await sdk.rows.fetchView(tableId, viewName, {
() => calculation,
sdk.rows.fetchView(tableId, viewName, { group: calculation ? group : null,
calculation, field,
group: calculation ? group : null, })
field,
}),
{
datasourceId: tableId,
}
)
} }
export async function fetch(ctx: any) { export async function fetch(ctx: any) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
ctx.body = await quotas.addQuery(() => sdk.rows.fetch(tableId), { ctx.body = await sdk.rows.fetch(tableId)
datasourceId: tableId,
})
} }
export async function find(ctx: UserCtx<void, GetRowResponse>) { export async function find(ctx: UserCtx<void, GetRowResponse>) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
ctx.body = await quotas.addQuery(() => pickApi(tableId).find(ctx), { ctx.body = await pickApi(tableId).find(ctx)
datasourceId: tableId,
})
} }
function isDeleteRows(input: any): input is DeleteRows { function isDeleteRows(input: any): input is DeleteRows {
@ -160,15 +140,9 @@ async function deleteRows(ctx: UserCtx<DeleteRowRequest>) {
let deleteRequest = ctx.request.body as DeleteRows let deleteRequest = ctx.request.body as DeleteRows
const rowDeletes: Row[] = await processDeleteRowsRequest(ctx) deleteRequest.rows = await processDeleteRowsRequest(ctx)
deleteRequest.rows = rowDeletes
const { rows } = await quotas.addQuery( const { rows } = await pickApi(tableId).bulkDestroy(ctx)
() => pickApi(tableId).bulkDestroy(ctx),
{
datasourceId: tableId,
}
)
await quotas.removeRows(rows.length) await quotas.removeRows(rows.length)
for (let row of rows) { for (let row of rows) {
@ -183,9 +157,7 @@ async function deleteRow(ctx: UserCtx<DeleteRowRequest>) {
const appId = ctx.appId const appId = ctx.appId
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
const resp = await quotas.addQuery(() => pickApi(tableId).destroy(ctx), { const resp = await pickApi(tableId).destroy(ctx)
datasourceId: tableId,
})
await quotas.removeRow() await quotas.removeRow()
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, resp.row) ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, resp.row)
@ -223,9 +195,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
} }
ctx.status = 200 ctx.status = 200
ctx.body = await quotas.addQuery(() => sdk.rows.search(searchParams), { ctx.body = await sdk.rows.search(searchParams)
datasourceId: tableId,
})
} }
export async function validate(ctx: Ctx<Row, ValidateResponse>) { export async function validate(ctx: Ctx<Row, ValidateResponse>) {
@ -243,12 +213,7 @@ export async function validate(ctx: Ctx<Row, ValidateResponse>) {
export async function fetchEnrichedRow(ctx: any) { export async function fetchEnrichedRow(ctx: any) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
ctx.body = await quotas.addQuery( ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
() => pickApi(tableId).fetchEnrichedRow(ctx),
{
datasourceId: tableId,
}
)
} }
export const exportRows = async ( export const exportRows = async (
@ -268,22 +233,15 @@ export const exportRows = async (
) )
} }
ctx.body = await quotas.addQuery( const { fileName, content } = await sdk.rows.exportRows({
async () => { tableId,
const { fileName, content } = await sdk.rows.exportRows({ format: format as Format,
tableId, rowIds: rows,
format: format as Format, columns,
rowIds: rows, query,
columns, sort,
query, sortOrder,
sort, })
sortOrder, ctx.attachment(fileName)
}) ctx.body = apiFileReturn(content)
ctx.attachment(fileName)
return apiFileReturn(content)
},
{
datasourceId: tableId,
}
)
} }

View File

@ -68,10 +68,7 @@ export async function searchView(
paginate: body.paginate, paginate: body.paginate,
} }
const result = await quotas.addQuery(() => sdk.rows.search(searchOptions), { const result = await sdk.rows.search(searchOptions)
datasourceId: view.tableId,
})
result.rows.forEach(r => (r._viewId = view.id)) result.rows.forEach(r => (r._viewId = view.id))
ctx.body = result ctx.body = result
} }

800
yarn.lock

File diff suppressed because it is too large Load Diff