Merge branch 'master' of github.com:budibase/budibase into logged-out-search-fix
This commit is contained in:
commit
871925bcef
|
@ -27,6 +27,7 @@ import {
|
|||
} from "../../../utilities/rowProcessor"
|
||||
import { cloneDeep } from "lodash"
|
||||
import { generateIdForRow } from "./utils"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
export async function handleRequest<T extends Operation>(
|
||||
operation: T,
|
||||
|
@ -42,6 +43,11 @@ export async function handleRequest<T extends Operation>(
|
|||
|
||||
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||
const source = await utils.getSource(ctx)
|
||||
|
||||
if (sdk.views.isView(source) && helpers.views.isCalculationView(source)) {
|
||||
ctx.throw(400, "Cannot update rows through a calculation view")
|
||||
}
|
||||
|
||||
const table = await utils.getTableFromSource(source)
|
||||
const { _id, ...rowData } = ctx.request.body
|
||||
|
||||
|
|
|
@ -22,13 +22,20 @@ import sdk from "../../../sdk"
|
|||
import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils"
|
||||
import { flatten } from "lodash"
|
||||
import { findRow } from "../../../sdk/app/rows/internal"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||
const { tableId } = utils.getSourceId(ctx)
|
||||
const source = await utils.getSource(ctx)
|
||||
|
||||
if (sdk.views.isView(source) && helpers.views.isCalculationView(source)) {
|
||||
ctx.throw(400, "Cannot update rows through a calculation view")
|
||||
}
|
||||
|
||||
const table = sdk.views.isView(source)
|
||||
? await sdk.views.getTable(source.id)
|
||||
: source
|
||||
|
||||
const inputs = ctx.request.body
|
||||
const isUserTable = tableId === InternalTables.USER_METADATA
|
||||
let oldRow
|
||||
|
|
|
@ -31,7 +31,7 @@ function getDatasourceId(table: Table) {
|
|||
return breakExternalTableId(table._id).datasourceId
|
||||
}
|
||||
|
||||
export async function save(
|
||||
export async function updateTable(
|
||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
|
|
|
@ -102,18 +102,22 @@ export async function find(ctx: UserCtx<void, TableResponse>) {
|
|||
|
||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||
const appId = ctx.appId
|
||||
const table = ctx.request.body
|
||||
const isImport = table.rows
|
||||
const { rows, ...table } = ctx.request.body
|
||||
const isImport = rows
|
||||
const renaming = ctx.request.body._rename
|
||||
|
||||
const isCreate = !table._id
|
||||
|
||||
checkDefaultFields(table)
|
||||
|
||||
const api = pickApi({ table })
|
||||
let savedTable = await api.save(ctx, renaming)
|
||||
if (!table._id) {
|
||||
let savedTable: Table
|
||||
if (isCreate) {
|
||||
savedTable = await sdk.tables.create(table, rows, ctx.user._id)
|
||||
savedTable = await sdk.tables.enrichViewSchemas(savedTable)
|
||||
await events.table.created(savedTable)
|
||||
} else {
|
||||
const api = pickApi({ table })
|
||||
savedTable = await api.updateTable(ctx, renaming)
|
||||
await events.table.updated(savedTable)
|
||||
}
|
||||
if (renaming) {
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
export async function save(
|
||||
export async function updateTable(
|
||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
|
@ -25,19 +25,16 @@ export async function save(
|
|||
sourceType: rest.sourceType || TableSourceType.INTERNAL,
|
||||
}
|
||||
|
||||
const isImport = !!rows
|
||||
|
||||
if (!tableToSave.views) {
|
||||
tableToSave.views = {}
|
||||
}
|
||||
|
||||
try {
|
||||
const { table } = await sdk.tables.internal.save(tableToSave, {
|
||||
user: ctx.user,
|
||||
userId: ctx.user._id,
|
||||
rowsToImport: rows,
|
||||
tableId: ctx.request.body._id,
|
||||
renaming,
|
||||
isImport,
|
||||
})
|
||||
|
||||
return table
|
||||
|
@ -72,7 +69,7 @@ export async function bulkImport(
|
|||
await handleDataImport(table, {
|
||||
importRows: rows,
|
||||
identifierFields,
|
||||
user: ctx.user,
|
||||
userId: ctx.user._id,
|
||||
})
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ describe("utils", () => {
|
|||
|
||||
const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }]
|
||||
|
||||
const result = await importToRows(data, table, config.user)
|
||||
const result = await importToRows(data, table, config.user?._id)
|
||||
expect(result).toEqual([
|
||||
expect.objectContaining({
|
||||
autoId: 1,
|
||||
|
|
|
@ -18,7 +18,6 @@ import { quotas } from "@budibase/pro"
|
|||
import { events, context, features } from "@budibase/backend-core"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
ContextUser,
|
||||
Datasource,
|
||||
Row,
|
||||
SourceName,
|
||||
|
@ -122,7 +121,7 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
|
|||
export async function importToRows(
|
||||
data: Row[],
|
||||
table: Table,
|
||||
user?: ContextUser,
|
||||
userId?: string,
|
||||
opts?: { keepCouchId: boolean }
|
||||
) {
|
||||
const originalTable = table
|
||||
|
@ -136,7 +135,7 @@ export async function importToRows(
|
|||
|
||||
// We use a reference to table here and update it after input processing,
|
||||
// so that we can auto increment auto IDs in imported data properly
|
||||
const processed = await inputProcessing(user?._id, table, row, {
|
||||
const processed = await inputProcessing(userId, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
row = processed
|
||||
|
@ -167,11 +166,10 @@ export async function importToRows(
|
|||
|
||||
export async function handleDataImport(
|
||||
table: Table,
|
||||
opts?: { identifierFields?: string[]; user?: ContextUser; importRows?: Row[] }
|
||||
opts?: { identifierFields?: string[]; userId?: string; importRows?: Row[] }
|
||||
) {
|
||||
const schema = table.schema
|
||||
const identifierFields = opts?.identifierFields || []
|
||||
const user = opts?.user
|
||||
const importRows = opts?.importRows
|
||||
|
||||
if (!importRows || !isRows(importRows) || !isSchema(schema)) {
|
||||
|
@ -181,7 +179,7 @@ export async function handleDataImport(
|
|||
const db = context.getAppDB()
|
||||
const data = parse(importRows, table)
|
||||
|
||||
const finalData = await importToRows(data, table, user, {
|
||||
const finalData = await importToRows(data, table, opts?.userId, {
|
||||
keepCouchId: identifierFields.includes("_id"),
|
||||
})
|
||||
|
||||
|
@ -282,22 +280,22 @@ export function checkStaticTables(table: Table) {
|
|||
|
||||
class TableSaveFunctions {
|
||||
db: Database
|
||||
user?: ContextUser
|
||||
userId?: string
|
||||
oldTable?: Table
|
||||
importRows?: Row[]
|
||||
rows: Row[]
|
||||
|
||||
constructor({
|
||||
user,
|
||||
userId,
|
||||
oldTable,
|
||||
importRows,
|
||||
}: {
|
||||
user?: ContextUser
|
||||
userId?: string
|
||||
oldTable?: Table
|
||||
importRows?: Row[]
|
||||
}) {
|
||||
this.db = context.getAppDB()
|
||||
this.user = user
|
||||
this.userId = userId
|
||||
this.oldTable = oldTable
|
||||
this.importRows = importRows
|
||||
// any rows that need updated
|
||||
|
@ -329,7 +327,7 @@ class TableSaveFunctions {
|
|||
table = await handleSearchIndexes(table)
|
||||
table = await handleDataImport(table, {
|
||||
importRows: this.importRows,
|
||||
user: this.user,
|
||||
userId: this.userId,
|
||||
})
|
||||
if (await features.flags.isEnabled("SQS")) {
|
||||
await sdk.tables.sqs.addTable(table)
|
||||
|
|
|
@ -26,6 +26,7 @@ import {
|
|||
NumericCalculationFieldMetadata,
|
||||
ViewV2Schema,
|
||||
ViewV2Type,
|
||||
JsonTypes,
|
||||
} from "@budibase/types"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
|
@ -736,6 +737,69 @@ describe.each([
|
|||
},
|
||||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
it("does not get confused when a calculation field shadows a basic one", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [{ age: 1 }, { age: 2 }, { age: 3 }],
|
||||
})
|
||||
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
type: ViewV2Type.CALCULATION,
|
||||
schema: {
|
||||
age: {
|
||||
visible: true,
|
||||
calculationType: CalculationType.SUM,
|
||||
field: "age",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const { rows } = await config.api.row.search(view.id)
|
||||
expect(rows).toHaveLength(1)
|
||||
expect(rows[0].age).toEqual(6)
|
||||
})
|
||||
|
||||
// We don't allow the creation of tables with most JsonTypes when using
|
||||
// external datasources.
|
||||
isInternal &&
|
||||
it("cannot use complex types as group-by fields", async () => {
|
||||
for (const type of JsonTypes) {
|
||||
const field = { name: "field", type } as FieldSchema
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({ schema: { field } })
|
||||
)
|
||||
await config.api.viewV2.create(
|
||||
{
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
type: ViewV2Type.CALCULATION,
|
||||
schema: {
|
||||
field: { visible: true },
|
||||
},
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: `Grouping by fields of type "${type}" is not supported`,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
|
@ -1914,6 +1978,30 @@ describe.each([
|
|||
expect(newRow.one).toBeUndefined()
|
||||
expect(newRow.two).toEqual("bar")
|
||||
})
|
||||
|
||||
it("should not be possible to create a row in a calculation view", async () => {
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
type: ViewV2Type.CALCULATION,
|
||||
schema: {
|
||||
id: { visible: true },
|
||||
one: { visible: true },
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.row.save(
|
||||
view.id,
|
||||
{ one: "foo" },
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Cannot insert rows through a calculation view",
|
||||
status: 400,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("patch", () => {
|
||||
|
@ -1978,6 +2066,40 @@ describe.each([
|
|||
expect(row.one).toEqual("foo")
|
||||
expect(row.two).toEqual("newBar")
|
||||
})
|
||||
|
||||
it("should not be possible to modify a row in a calculation view", async () => {
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
type: ViewV2Type.CALCULATION,
|
||||
schema: {
|
||||
id: { visible: true },
|
||||
one: { visible: true },
|
||||
},
|
||||
})
|
||||
|
||||
const newRow = await config.api.row.save(table._id!, {
|
||||
one: "foo",
|
||||
two: "bar",
|
||||
})
|
||||
|
||||
await config.api.row.patch(
|
||||
view.id,
|
||||
{
|
||||
tableId: table._id!,
|
||||
_id: newRow._id!,
|
||||
_rev: newRow._rev!,
|
||||
one: "newFoo",
|
||||
two: "newBar",
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Cannot update rows through a calculation view",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
} from "../../../utilities/rowProcessor"
|
||||
import cloneDeep from "lodash/fp/cloneDeep"
|
||||
import { tryExtractingTableAndViewId } from "./utils"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
export async function getRow(
|
||||
sourceId: string | Table | ViewV2,
|
||||
|
@ -54,6 +55,10 @@ export async function save(
|
|||
source = await sdk.tables.getTable(tableId)
|
||||
}
|
||||
|
||||
if (sdk.views.isView(source) && helpers.views.isCalculationView(source)) {
|
||||
throw new HTTPError("Cannot insert rows through a calculation view", 400)
|
||||
}
|
||||
|
||||
const row = await inputProcessing(userId, cloneDeep(source), inputs)
|
||||
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { context, db } from "@budibase/backend-core"
|
||||
import { context, db, HTTPError } from "@budibase/backend-core"
|
||||
import { Row, Table, ViewV2 } from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { finaliseRow } from "../../../api/controllers/row/staticFormula"
|
||||
|
@ -10,6 +10,7 @@ import * as linkRows from "../../../db/linkedRows"
|
|||
import { InternalTables } from "../../../db/utils"
|
||||
import { getFullUser } from "../../../utilities/users"
|
||||
import { getSource, tryExtractingTableAndViewId } from "./utils"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
export async function save(
|
||||
tableOrViewId: string,
|
||||
|
@ -29,6 +30,10 @@ export async function save(
|
|||
table = source
|
||||
}
|
||||
|
||||
if (sdk.views.isView(source) && helpers.views.isCalculationView(source)) {
|
||||
throw new HTTPError("Cannot insert rows through a calculation view", 400)
|
||||
}
|
||||
|
||||
if (!inputs._rev && !inputs._id) {
|
||||
inputs._id = db.generateRowID(inputs.tableId)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
import { Row, Table } from "@budibase/types"
|
||||
|
||||
import * as external from "./external"
|
||||
import * as internal from "./internal"
|
||||
import { isExternal } from "./utils"
|
||||
|
||||
export async function create(
|
||||
table: Omit<Table, "_id" | "_rev">,
|
||||
rows?: Row[],
|
||||
userId?: string
|
||||
): Promise<Table> {
|
||||
let createdTable: Table
|
||||
if (isExternal({ table })) {
|
||||
createdTable = await external.create(table)
|
||||
} else {
|
||||
createdTable = await internal.create(table, rows, userId)
|
||||
}
|
||||
return createdTable
|
||||
}
|
|
@ -8,8 +8,11 @@ import {
|
|||
ViewV2,
|
||||
AutoFieldSubType,
|
||||
} from "@budibase/types"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { buildExternalTableId } from "../../../../integrations/utils"
|
||||
import { context, HTTPError } from "@budibase/backend-core"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
buildExternalTableId,
|
||||
} from "../../../../integrations/utils"
|
||||
import {
|
||||
foreignKeyStructure,
|
||||
hasTypeChanged,
|
||||
|
@ -86,6 +89,35 @@ function validate(table: Table, oldTable?: Table) {
|
|||
}
|
||||
}
|
||||
|
||||
function getDatasourceId(table: Table) {
|
||||
if (!table) {
|
||||
throw new Error("No table supplied")
|
||||
}
|
||||
if (table.sourceId) {
|
||||
return table.sourceId
|
||||
}
|
||||
if (!table._id) {
|
||||
throw new Error("No table ID supplied")
|
||||
}
|
||||
return breakExternalTableId(table._id).datasourceId
|
||||
}
|
||||
|
||||
export async function create(table: Omit<Table, "_id" | "_rev">) {
|
||||
const datasourceId = getDatasourceId(table)
|
||||
|
||||
const tableToCreate = { ...table, created: true }
|
||||
try {
|
||||
const result = await save(datasourceId!, tableToCreate)
|
||||
return result.table
|
||||
} catch (err: any) {
|
||||
if (err instanceof Error) {
|
||||
throw new HTTPError(err.message, 400)
|
||||
} else {
|
||||
throw new HTTPError(err?.message || err, err.status || 500)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function save(
|
||||
datasourceId: string,
|
||||
update: Table,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { populateExternalTableSchemas } from "./validation"
|
||||
import * as getters from "./getters"
|
||||
import * as create from "./create"
|
||||
import * as updates from "./update"
|
||||
import * as utils from "./utils"
|
||||
import { migrate } from "./migration"
|
||||
|
@ -7,6 +8,7 @@ import * as sqs from "./internal/sqs"
|
|||
|
||||
export default {
|
||||
populateExternalTableSchemas,
|
||||
...create,
|
||||
...updates,
|
||||
...getters,
|
||||
...utils,
|
||||
|
|
|
@ -5,7 +5,7 @@ import {
|
|||
ViewStatisticsSchema,
|
||||
ViewV2,
|
||||
Row,
|
||||
ContextUser,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
hasTypeChanged,
|
||||
|
@ -16,18 +16,56 @@ import { EventType, updateLinks } from "../../../../db/linkedRows"
|
|||
import { cloneDeep } from "lodash/fp"
|
||||
import isEqual from "lodash/isEqual"
|
||||
import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { context, HTTPError } from "@budibase/backend-core"
|
||||
import { findDuplicateInternalColumns } from "@budibase/shared-core"
|
||||
import { getTable } from "../getters"
|
||||
import { checkAutoColumns } from "./utils"
|
||||
import * as viewsSdk from "../../views"
|
||||
import { getRowParams } from "../../../../db/utils"
|
||||
import { generateTableID, getRowParams } from "../../../../db/utils"
|
||||
import { quotas } from "@budibase/pro"
|
||||
|
||||
export async function create(
|
||||
table: Omit<Table, "_id" | "_rev">,
|
||||
rows?: Row[],
|
||||
userId?: string
|
||||
) {
|
||||
const tableId = generateTableID()
|
||||
|
||||
let tableToSave: Table = {
|
||||
_id: tableId,
|
||||
...table,
|
||||
// Ensure these fields are populated, even if not sent in the request
|
||||
type: table.type || "table",
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
}
|
||||
|
||||
const isImport = !!rows
|
||||
|
||||
if (!tableToSave.views) {
|
||||
tableToSave.views = {}
|
||||
}
|
||||
|
||||
try {
|
||||
const { table } = await save(tableToSave, {
|
||||
userId,
|
||||
rowsToImport: rows,
|
||||
isImport,
|
||||
})
|
||||
|
||||
return table
|
||||
} catch (err: any) {
|
||||
if (err instanceof Error) {
|
||||
throw new HTTPError(err.message, 400)
|
||||
} else {
|
||||
throw new HTTPError(err.message || err, err.status || 500)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function save(
|
||||
table: Table,
|
||||
opts?: {
|
||||
user?: ContextUser
|
||||
userId?: string
|
||||
tableId?: string
|
||||
rowsToImport?: Row[]
|
||||
renaming?: RenameColumn
|
||||
|
@ -63,7 +101,7 @@ export async function save(
|
|||
// saving a table is a complex operation, involving many different steps, this
|
||||
// has been broken out into a utility to make it more obvious/easier to manipulate
|
||||
const tableSaveFunctions = new TableSaveFunctions({
|
||||
user: opts?.user,
|
||||
userId: opts?.userId,
|
||||
oldTable,
|
||||
importRows: opts?.rowsToImport,
|
||||
})
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import {
|
||||
CalculationType,
|
||||
canGroupBy,
|
||||
FieldType,
|
||||
isNumeric,
|
||||
PermissionLevel,
|
||||
RelationSchemaField,
|
||||
RenameColumn,
|
||||
|
@ -103,7 +105,7 @@ async function guardCalculationViewSchema(
|
|||
)
|
||||
}
|
||||
|
||||
if (!isCount && !helpers.schema.isNumeric(targetSchema)) {
|
||||
if (!isCount && !isNumeric(targetSchema.type)) {
|
||||
throw new HTTPError(
|
||||
`Calculation field "${name}" references field "${schema.field}" which is not a numeric field`,
|
||||
400
|
||||
|
@ -120,6 +122,13 @@ async function guardCalculationViewSchema(
|
|||
400
|
||||
)
|
||||
}
|
||||
|
||||
if (!canGroupBy(targetSchema.type)) {
|
||||
throw new HTTPError(
|
||||
`Grouping by fields of type "${targetSchema.type}" is not supported`,
|
||||
400
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -127,6 +127,26 @@ export const JsonTypes = [
|
|||
FieldType.ARRAY,
|
||||
]
|
||||
|
||||
export const NumericTypes = [FieldType.NUMBER, FieldType.BIGINT]
|
||||
|
||||
export function isNumeric(type: FieldType) {
|
||||
return NumericTypes.includes(type)
|
||||
}
|
||||
|
||||
export const GroupByTypes = [
|
||||
FieldType.STRING,
|
||||
FieldType.LONGFORM,
|
||||
FieldType.OPTIONS,
|
||||
FieldType.NUMBER,
|
||||
FieldType.BOOLEAN,
|
||||
FieldType.DATETIME,
|
||||
FieldType.BIGINT,
|
||||
]
|
||||
|
||||
export function canGroupBy(type: FieldType) {
|
||||
return GroupByTypes.includes(type)
|
||||
}
|
||||
|
||||
export interface RowAttachment {
|
||||
size: number
|
||||
name: string
|
||||
|
|
Loading…
Reference in New Issue