Merge branch 'cheeks-lab-day-binding-eval' of github.com:Budibase/budibase into cheeks-snippets-poc
This commit is contained in:
commit
ab9458c28a
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.20.14",
|
||||
"version": "2.21.2",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 19f7a5829f4d23cbc694136e45d94482a59a475a
|
||||
Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac
|
|
@ -1,5 +1,6 @@
|
|||
import { APIError } from "@budibase/types"
|
||||
import * as errors from "../errors"
|
||||
import environment from "../environment"
|
||||
|
||||
export async function errorHandling(ctx: any, next: any) {
|
||||
try {
|
||||
|
@ -14,15 +15,19 @@ export async function errorHandling(ctx: any, next: any) {
|
|||
console.error(err)
|
||||
}
|
||||
|
||||
const error = errors.getPublicError(err)
|
||||
const body: APIError = {
|
||||
let error: APIError = {
|
||||
message: err.message,
|
||||
status: status,
|
||||
validationErrors: err.validation,
|
||||
error,
|
||||
error: errors.getPublicError(err),
|
||||
}
|
||||
|
||||
ctx.body = body
|
||||
if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) {
|
||||
// @ts-ignore
|
||||
error.stack = err.stack
|
||||
}
|
||||
|
||||
ctx.body = error
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -174,7 +174,7 @@
|
|||
complete.push(
|
||||
autocompletion({
|
||||
override: [...completions],
|
||||
closeOnBlur: false,
|
||||
closeOnBlur: true,
|
||||
icons: false,
|
||||
optionClass: () => "autocomplete-option",
|
||||
})
|
||||
|
@ -462,7 +462,7 @@
|
|||
overflow: hidden;
|
||||
max-height: 480px;
|
||||
}
|
||||
.code-editor :global(.binding__example.helper) {
|
||||
.code-editor :global(.binding__example) {
|
||||
color: var(--spectrum-global-color-blue-700);
|
||||
}
|
||||
.code-editor :global(.binding__example span) {
|
||||
|
|
|
@ -166,7 +166,7 @@ export const buildBindingInfoNode = (completion, binding) => {
|
|||
const ele = document.createElement("div")
|
||||
ele.classList.add("info-bubble")
|
||||
|
||||
if (binding.valueHTML) {
|
||||
if (binding.value != null && binding.valueHTML) {
|
||||
ele.innerHTML = `<div class="binding__example">${binding.valueHTML}</div>`
|
||||
return ele
|
||||
}
|
||||
|
|
|
@ -147,6 +147,12 @@ export function createTablesStore() {
|
|||
if (indexes) {
|
||||
draft.indexes = indexes
|
||||
}
|
||||
// Add object to indicate if column is being added
|
||||
if (draft.schema[field.name] === undefined) {
|
||||
draft._add = {
|
||||
name: field.name,
|
||||
}
|
||||
}
|
||||
draft.schema = {
|
||||
...draft.schema,
|
||||
[field.name]: cloneDeep(field),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
|
||||
import { fetchData, Utils } from "@budibase/frontend-core"
|
||||
import { getContext } from "svelte"
|
||||
import { getContext, onMount } from "svelte"
|
||||
import Field from "./Field.svelte"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
|
||||
|
@ -28,6 +28,7 @@
|
|||
let tableDefinition
|
||||
let searchTerm
|
||||
let open
|
||||
let initialValue
|
||||
|
||||
$: type =
|
||||
datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE
|
||||
|
@ -109,7 +110,11 @@
|
|||
}
|
||||
|
||||
$: forceFetchRows(filter)
|
||||
$: debouncedFetchRows(searchTerm, primaryDisplay, defaultValue)
|
||||
$: debouncedFetchRows(
|
||||
searchTerm,
|
||||
primaryDisplay,
|
||||
initialValue || defaultValue
|
||||
)
|
||||
|
||||
const forceFetchRows = async () => {
|
||||
// if the filter has changed, then we need to reset the options, clear the selection, and re-fetch
|
||||
|
@ -127,9 +132,13 @@
|
|||
if (allRowsFetched || !primaryDisplay) {
|
||||
return
|
||||
}
|
||||
if (defaultVal && !optionsObj[defaultVal]) {
|
||||
// must be an array
|
||||
if (defaultVal && !Array.isArray(defaultVal)) {
|
||||
defaultVal = defaultVal.split(",")
|
||||
}
|
||||
if (defaultVal && defaultVal.some(val => !optionsObj[val])) {
|
||||
await fetch.update({
|
||||
query: { equal: { _id: defaultVal } },
|
||||
query: { oneOf: { _id: defaultVal } },
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -202,6 +211,16 @@
|
|||
fetch.nextPage()
|
||||
}
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
// if the form is in 'Update' mode, then we need to fetch the matching row so that the value is correctly set
|
||||
if (fieldState?.value) {
|
||||
initialValue =
|
||||
fieldSchema?.relationshipType !== "one-to-many"
|
||||
? flatten(fieldState?.value) ?? []
|
||||
: flatten(fieldState?.value)?.[0]
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
<Field
|
||||
|
|
|
@ -87,8 +87,14 @@ const loadBudibase = async () => {
|
|||
} else if (type === "request-context") {
|
||||
const { selectedComponentInstance } = get(componentStore)
|
||||
const context = selectedComponentInstance?.getDataContext()
|
||||
let stringifiedContext = null
|
||||
try {
|
||||
stringifiedContext = JSON.stringify(context)
|
||||
} catch (error) {
|
||||
// Ignore - invalid context
|
||||
}
|
||||
eventStore.actions.dispatchEvent("provide-context", {
|
||||
context: JSON.stringify(context),
|
||||
context: stringifiedContext,
|
||||
})
|
||||
} else if (type === "hover-component") {
|
||||
hoverStore.actions.hoverComponent(data)
|
||||
|
|
|
@ -59,13 +59,13 @@
|
|||
isReadonly: () => readonly,
|
||||
getType: () => column.schema.type,
|
||||
getValue: () => row[column.name],
|
||||
setValue: (value, options = { save: true }) => {
|
||||
setValue: (value, options = { apply: true }) => {
|
||||
validation.actions.setError(cellId, null)
|
||||
updateValue({
|
||||
rowId: row._id,
|
||||
column: column.name,
|
||||
value,
|
||||
save: options?.save,
|
||||
apply: options?.apply,
|
||||
})
|
||||
},
|
||||
}
|
||||
|
|
|
@ -217,14 +217,14 @@
|
|||
const type = $focusedCellAPI.getType()
|
||||
if (type === "number" && keyCodeIsNumber(keyCode)) {
|
||||
// Update the value locally but don't save it yet
|
||||
$focusedCellAPI.setValue(parseInt(key), { save: false })
|
||||
$focusedCellAPI.setValue(parseInt(key), { apply: false })
|
||||
$focusedCellAPI.focus()
|
||||
} else if (
|
||||
["string", "barcodeqr", "longform"].includes(type) &&
|
||||
(keyCodeIsLetter(keyCode) || keyCodeIsNumber(keyCode))
|
||||
) {
|
||||
// Update the value locally but don't save it yet
|
||||
$focusedCellAPI.setValue(key, { save: false })
|
||||
$focusedCellAPI.setValue(key, { apply: false })
|
||||
$focusedCellAPI.focus()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -327,29 +327,31 @@ export const createActions = context => {
|
|||
get(fetch)?.getInitialData()
|
||||
}
|
||||
|
||||
// Patches a row with some changes
|
||||
const updateRow = async (rowId, changes, options = { save: true }) => {
|
||||
// Checks if a changeset for a row actually mutates the row or not
|
||||
const changesAreValid = (row, changes) => {
|
||||
const columns = Object.keys(changes || {})
|
||||
if (!row || !columns.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Ensure there is at least 1 column that creates a difference
|
||||
return columns.some(column => row[column] !== changes[column])
|
||||
}
|
||||
|
||||
// Patches a row with some changes in local state, and returns whether a
|
||||
// valid pending change was made or not
|
||||
const stashRowChanges = (rowId, changes) => {
|
||||
const $rows = get(rows)
|
||||
const $rowLookupMap = get(rowLookupMap)
|
||||
const index = $rowLookupMap[rowId]
|
||||
const row = $rows[index]
|
||||
if (index == null || !Object.keys(changes || {}).length) {
|
||||
return
|
||||
|
||||
// Check this is a valid change
|
||||
if (!row || !changesAreValid(row, changes)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Abandon if no changes
|
||||
let same = true
|
||||
for (let column of Object.keys(changes)) {
|
||||
if (row[column] !== changes[column]) {
|
||||
same = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if (same) {
|
||||
return
|
||||
}
|
||||
|
||||
// Immediately update state so that the change is reflected
|
||||
// Add change to cache
|
||||
rowChangeCache.update(state => ({
|
||||
...state,
|
||||
[rowId]: {
|
||||
|
@ -357,26 +359,30 @@ export const createActions = context => {
|
|||
...changes,
|
||||
},
|
||||
}))
|
||||
return true
|
||||
}
|
||||
|
||||
// Stop here if we don't want to persist the change
|
||||
if (!options?.save) {
|
||||
// Saves any pending changes to a row
|
||||
const applyRowChanges = async rowId => {
|
||||
const $rows = get(rows)
|
||||
const $rowLookupMap = get(rowLookupMap)
|
||||
const index = $rowLookupMap[rowId]
|
||||
const row = $rows[index]
|
||||
if (row == null) {
|
||||
return
|
||||
}
|
||||
|
||||
// Save change
|
||||
try {
|
||||
inProgressChanges.update(state => ({
|
||||
...state,
|
||||
[rowId]: true,
|
||||
}))
|
||||
// Mark as in progress
|
||||
inProgressChanges.update(state => ({ ...state, [rowId]: true }))
|
||||
|
||||
// Update row
|
||||
const saved = await datasource.actions.updateRow({
|
||||
...cleanRow(row),
|
||||
...get(rowChangeCache)[rowId],
|
||||
})
|
||||
const changes = get(rowChangeCache)[rowId]
|
||||
const newRow = { ...cleanRow(row), ...changes }
|
||||
const saved = await datasource.actions.updateRow(newRow)
|
||||
|
||||
// Update state after a successful change
|
||||
// Update row state after a successful change
|
||||
if (saved?._id) {
|
||||
rows.update(state => {
|
||||
state[index] = saved
|
||||
|
@ -386,6 +392,8 @@ export const createActions = context => {
|
|||
// Handle users table edge case
|
||||
await refreshRow(saved.id)
|
||||
}
|
||||
|
||||
// Wipe row change cache now that we've saved the row
|
||||
rowChangeCache.update(state => {
|
||||
delete state[rowId]
|
||||
return state
|
||||
|
@ -393,15 +401,17 @@ export const createActions = context => {
|
|||
} catch (error) {
|
||||
handleValidationError(rowId, error)
|
||||
}
|
||||
inProgressChanges.update(state => ({
|
||||
...state,
|
||||
[rowId]: false,
|
||||
}))
|
||||
|
||||
// Mark as completed
|
||||
inProgressChanges.update(state => ({ ...state, [rowId]: false }))
|
||||
}
|
||||
|
||||
// Updates a value of a row
|
||||
const updateValue = async ({ rowId, column, value, save = true }) => {
|
||||
return await updateRow(rowId, { [column]: value }, { save })
|
||||
const updateValue = async ({ rowId, column, value, apply = true }) => {
|
||||
const success = stashRowChanges(rowId, { [column]: value })
|
||||
if (success && apply) {
|
||||
await applyRowChanges(rowId)
|
||||
}
|
||||
}
|
||||
|
||||
// Deletes an array of rows
|
||||
|
@ -411,9 +421,7 @@ export const createActions = context => {
|
|||
}
|
||||
|
||||
// Actually delete rows
|
||||
rowsToDelete.forEach(row => {
|
||||
delete row.__idx
|
||||
})
|
||||
rowsToDelete.forEach(row => delete row.__idx)
|
||||
await datasource.actions.deleteRows(rowsToDelete)
|
||||
|
||||
// Update state
|
||||
|
@ -433,7 +441,7 @@ export const createActions = context => {
|
|||
newRow = newRows[i]
|
||||
|
||||
// Ensure we have a unique _id.
|
||||
// This means generating one for non DS+, overriting any that may already
|
||||
// This means generating one for non DS+, overwriting any that may already
|
||||
// exist as we cannot allow duplicates.
|
||||
if (!$isDatasourcePlus) {
|
||||
newRow._id = Helpers.uuid()
|
||||
|
@ -494,7 +502,7 @@ export const createActions = context => {
|
|||
duplicateRow,
|
||||
getRow,
|
||||
updateValue,
|
||||
updateRow,
|
||||
applyRowChanges,
|
||||
deleteRows,
|
||||
hasRow,
|
||||
loadNextPage,
|
||||
|
@ -508,7 +516,14 @@ export const createActions = context => {
|
|||
}
|
||||
|
||||
export const initialise = context => {
|
||||
const { rowChangeCache, inProgressChanges, previousFocusedRowId } = context
|
||||
const {
|
||||
rowChangeCache,
|
||||
inProgressChanges,
|
||||
previousFocusedRowId,
|
||||
previousFocusedCellId,
|
||||
rows,
|
||||
validation,
|
||||
} = context
|
||||
|
||||
// Wipe the row change cache when changing row
|
||||
previousFocusedRowId.subscribe(id => {
|
||||
|
@ -519,4 +534,15 @@ export const initialise = context => {
|
|||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Ensure any unsaved changes are saved when changing cell
|
||||
previousFocusedCellId.subscribe(async id => {
|
||||
const rowId = id?.split("-")[0]
|
||||
const hasErrors = validation.actions.rowHasErrors(rowId)
|
||||
const hasChanges = Object.keys(get(rowChangeCache)[rowId] || {}).length > 0
|
||||
const isSavingChanges = get(inProgressChanges)[rowId]
|
||||
if (rowId && !hasErrors && hasChanges && !isSavingChanges) {
|
||||
await rows.actions.applyRowChanges(rowId)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ export const createStores = context => {
|
|||
const hoveredRowId = writable(null)
|
||||
const rowHeight = writable(get(props).fixedRowHeight || DefaultRowHeight)
|
||||
const previousFocusedRowId = writable(null)
|
||||
const previousFocusedCellId = writable(null)
|
||||
const gridFocused = writable(false)
|
||||
const isDragging = writable(false)
|
||||
const buttonColumnWidth = writable(0)
|
||||
|
@ -48,6 +49,7 @@ export const createStores = context => {
|
|||
focusedCellAPI,
|
||||
focusedRowId,
|
||||
previousFocusedRowId,
|
||||
previousFocusedCellId,
|
||||
hoveredRowId,
|
||||
rowHeight,
|
||||
gridFocused,
|
||||
|
@ -129,6 +131,7 @@ export const initialise = context => {
|
|||
const {
|
||||
focusedRowId,
|
||||
previousFocusedRowId,
|
||||
previousFocusedCellId,
|
||||
rows,
|
||||
focusedCellId,
|
||||
selectedRows,
|
||||
|
@ -181,6 +184,13 @@ export const initialise = context => {
|
|||
lastFocusedRowId = id
|
||||
})
|
||||
|
||||
// Remember the last focused cell ID so that we can store the previous one
|
||||
let lastFocusedCellId = null
|
||||
focusedCellId.subscribe(id => {
|
||||
previousFocusedCellId.set(lastFocusedCellId)
|
||||
lastFocusedCellId = id
|
||||
})
|
||||
|
||||
// Remove hovered row when a cell is selected
|
||||
focusedCellId.subscribe(cell => {
|
||||
if (cell && get(hoveredRowId)) {
|
||||
|
|
|
@ -1,8 +1,23 @@
|
|||
import { writable, get } from "svelte/store"
|
||||
import { writable, get, derived } from "svelte/store"
|
||||
|
||||
// Normally we would break out actions into the explicit "createActions"
|
||||
// function, but for validation all these actions are pure so can go into
|
||||
// "createStores" instead to make dependency ordering simpler
|
||||
export const createStores = () => {
|
||||
const validation = writable({})
|
||||
|
||||
// Derive which rows have errors so that we can use that info later
|
||||
const rowErrorMap = derived(validation, $validation => {
|
||||
let map = {}
|
||||
Object.entries($validation).forEach(([key, error]) => {
|
||||
// Extract row ID from all errored cell IDs
|
||||
if (error) {
|
||||
map[key.split("-")[0]] = true
|
||||
}
|
||||
})
|
||||
return map
|
||||
})
|
||||
|
||||
const setError = (cellId, error) => {
|
||||
if (!cellId) {
|
||||
return
|
||||
|
@ -13,11 +28,16 @@ export const createStores = () => {
|
|||
}))
|
||||
}
|
||||
|
||||
const rowHasErrors = rowId => {
|
||||
return get(rowErrorMap)[rowId]
|
||||
}
|
||||
|
||||
return {
|
||||
validation: {
|
||||
...validation,
|
||||
actions: {
|
||||
setError,
|
||||
rowHasErrors,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 183b35d3acd42433dcb2d32bcd89a36abe13afec
|
||||
Subproject commit 22a278da720d92991dabdcd4cb6c96e7abe29781
|
|
@ -10,6 +10,11 @@ CREATE TABLE Persons (
|
|||
City varchar(255),
|
||||
PRIMARY KEY (PersonID)
|
||||
);
|
||||
CREATE TABLE Person (
|
||||
PersonID int NOT NULL AUTO_INCREMENT,
|
||||
Name varchar(255),
|
||||
PRIMARY KEY (PersonID)
|
||||
);
|
||||
CREATE TABLE Tasks (
|
||||
TaskID int NOT NULL AUTO_INCREMENT,
|
||||
PersonID INT,
|
||||
|
@ -27,6 +32,7 @@ CREATE TABLE Products (
|
|||
);
|
||||
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07');
|
||||
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11');
|
||||
INSERT INTO Person (Name) VALUES ('Elf');
|
||||
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01');
|
||||
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31');
|
||||
INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00');
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
#!/bin/bash
|
||||
docker-compose down
|
||||
docker-compose down -v
|
||||
docker volume prune -f
|
||||
|
|
|
@ -7,6 +7,10 @@ import {
|
|||
GetResourcePermsResponse,
|
||||
ResourcePermissionInfo,
|
||||
GetDependantResourcesResponse,
|
||||
AddPermissionResponse,
|
||||
AddPermissionRequest,
|
||||
RemovePermissionRequest,
|
||||
RemovePermissionResponse,
|
||||
} from "@budibase/types"
|
||||
import { getRoleParams } from "../../db/utils"
|
||||
import {
|
||||
|
@ -16,9 +20,9 @@ import {
|
|||
import { removeFromArray } from "../../utilities"
|
||||
import sdk from "../../sdk"
|
||||
|
||||
const PermissionUpdateType = {
|
||||
REMOVE: "remove",
|
||||
ADD: "add",
|
||||
const enum PermissionUpdateType {
|
||||
REMOVE = "remove",
|
||||
ADD = "add",
|
||||
}
|
||||
|
||||
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
|
||||
|
@ -39,7 +43,7 @@ async function updatePermissionOnRole(
|
|||
resourceId,
|
||||
level,
|
||||
}: { roleId: string; resourceId: string; level: PermissionLevel },
|
||||
updateType: string
|
||||
updateType: PermissionUpdateType
|
||||
) {
|
||||
const allowedAction = await sdk.permissions.resourceActionAllowed({
|
||||
resourceId,
|
||||
|
@ -107,11 +111,15 @@ async function updatePermissionOnRole(
|
|||
}
|
||||
|
||||
const response = await db.bulkDocs(docUpdates)
|
||||
return response.map((resp: any) => {
|
||||
return response.map(resp => {
|
||||
const version = docUpdates.find(role => role._id === resp.id)?.version
|
||||
resp._id = roles.getExternalRoleID(resp.id, version)
|
||||
delete resp.id
|
||||
return resp
|
||||
const _id = roles.getExternalRoleID(resp.id, version)
|
||||
return {
|
||||
_id,
|
||||
rev: resp.rev,
|
||||
error: resp.error,
|
||||
reason: resp.reason,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -189,13 +197,14 @@ export async function getDependantResources(
|
|||
}
|
||||
}
|
||||
|
||||
export async function addPermission(ctx: UserCtx) {
|
||||
ctx.body = await updatePermissionOnRole(ctx.params, PermissionUpdateType.ADD)
|
||||
export async function addPermission(ctx: UserCtx<void, AddPermissionResponse>) {
|
||||
const params: AddPermissionRequest = ctx.params
|
||||
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.ADD)
|
||||
}
|
||||
|
||||
export async function removePermission(ctx: UserCtx) {
|
||||
ctx.body = await updatePermissionOnRole(
|
||||
ctx.params,
|
||||
PermissionUpdateType.REMOVE
|
||||
)
|
||||
export async function removePermission(
|
||||
ctx: UserCtx<void, RemovePermissionResponse>
|
||||
) {
|
||||
const params: RemovePermissionRequest = ctx.params
|
||||
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.REMOVE)
|
||||
}
|
||||
|
|
|
@ -17,10 +17,12 @@ import {
|
|||
QueryPreview,
|
||||
QuerySchema,
|
||||
FieldType,
|
||||
type ExecuteQueryRequest,
|
||||
type ExecuteQueryResponse,
|
||||
type Row,
|
||||
ExecuteQueryRequest,
|
||||
ExecuteQueryResponse,
|
||||
Row,
|
||||
QueryParameter,
|
||||
PreviewQueryRequest,
|
||||
PreviewQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
|
||||
|
||||
|
@ -134,14 +136,16 @@ function enrichParameters(
|
|||
return requestParameters
|
||||
}
|
||||
|
||||
export async function preview(ctx: UserCtx) {
|
||||
export async function preview(
|
||||
ctx: UserCtx<PreviewQueryRequest, PreviewQueryResponse>
|
||||
) {
|
||||
const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
|
||||
ctx.request.body.datasourceId
|
||||
)
|
||||
const query: QueryPreview = ctx.request.body
|
||||
// preview may not have a queryId as it hasn't been saved, but if it does
|
||||
// this stops dynamic variables from calling the same query
|
||||
const { fields, parameters, queryVerb, transformer, queryId, schema } = query
|
||||
const { fields, parameters, queryVerb, transformer, queryId, schema } =
|
||||
ctx.request.body
|
||||
|
||||
let existingSchema = schema
|
||||
if (queryId && !existingSchema) {
|
||||
|
@ -266,9 +270,7 @@ export async function preview(ctx: UserCtx) {
|
|||
},
|
||||
}
|
||||
|
||||
const { rows, keys, info, extra } = (await Runner.run(
|
||||
inputs
|
||||
)) as QueryResponse
|
||||
const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs)
|
||||
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
|
||||
|
||||
// if existing schema, update to include any previous schema keys
|
||||
|
@ -281,7 +283,7 @@ export async function preview(ctx: UserCtx) {
|
|||
}
|
||||
// remove configuration before sending event
|
||||
delete datasource.config
|
||||
await events.query.previewed(datasource, query)
|
||||
await events.query.previewed(datasource, ctx.request.body)
|
||||
ctx.body = {
|
||||
rows,
|
||||
nestedSchemaFields,
|
||||
|
@ -295,7 +297,10 @@ export async function preview(ctx: UserCtx) {
|
|||
}
|
||||
|
||||
async function execute(
|
||||
ctx: UserCtx<ExecuteQueryRequest, ExecuteQueryResponse | Row[]>,
|
||||
ctx: UserCtx<
|
||||
ExecuteQueryRequest,
|
||||
ExecuteQueryResponse | Record<string, any>[]
|
||||
>,
|
||||
opts: any = { rowsOnly: false, isAutomation: false }
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
|
@ -350,18 +355,23 @@ async function execute(
|
|||
}
|
||||
}
|
||||
|
||||
export async function executeV1(ctx: UserCtx) {
|
||||
export async function executeV1(
|
||||
ctx: UserCtx<ExecuteQueryRequest, Record<string, any>[]>
|
||||
) {
|
||||
return execute(ctx, { rowsOnly: true, isAutomation: false })
|
||||
}
|
||||
|
||||
export async function executeV2(
|
||||
ctx: UserCtx,
|
||||
ctx: UserCtx<
|
||||
ExecuteQueryRequest,
|
||||
ExecuteQueryResponse | Record<string, any>[]
|
||||
>,
|
||||
{ isAutomation }: { isAutomation?: boolean } = {}
|
||||
) {
|
||||
return execute(ctx, { rowsOnly: false, isAutomation })
|
||||
}
|
||||
|
||||
const removeDynamicVariables = async (queryId: any) => {
|
||||
const removeDynamicVariables = async (queryId: string) => {
|
||||
const db = context.getAppDB()
|
||||
const query = await db.get<Query>(queryId)
|
||||
const datasource = await sdk.datasources.get(query.datasourceId)
|
||||
|
@ -384,7 +394,7 @@ const removeDynamicVariables = async (queryId: any) => {
|
|||
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
const queryId = ctx.params.queryId
|
||||
const queryId = ctx.params.queryId as string
|
||||
await removeDynamicVariables(queryId)
|
||||
const query = await db.get<Query>(queryId)
|
||||
const datasource = await sdk.datasources.get(query.datasourceId)
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
FilterType,
|
||||
IncludeRelationship,
|
||||
ManyToManyRelationshipFieldMetadata,
|
||||
ManyToOneRelationshipFieldMetadata,
|
||||
OneToManyRelationshipFieldMetadata,
|
||||
Operation,
|
||||
PaginationJson,
|
||||
|
@ -18,6 +19,7 @@ import {
|
|||
SortJson,
|
||||
SortType,
|
||||
Table,
|
||||
isManyToOne,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
|
@ -32,7 +34,9 @@ import { processObjectSync } from "@budibase/string-templates"
|
|||
import { cloneDeep } from "lodash/fp"
|
||||
import { processDates, processFormulas } from "../../../utilities/rowProcessor"
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import AliasTables from "./alias"
|
||||
import sdk from "../../../sdk"
|
||||
import env from "../../../environment"
|
||||
|
||||
export interface ManyRelationship {
|
||||
tableId?: string
|
||||
|
@ -101,6 +105,39 @@ function buildFilters(
|
|||
}
|
||||
}
|
||||
|
||||
async function removeManyToManyRelationships(
|
||||
rowId: string,
|
||||
table: Table,
|
||||
colName: string
|
||||
) {
|
||||
const tableId = table._id!
|
||||
const filters = buildFilters(rowId, {}, table)
|
||||
// safety check, if there are no filters on deletion bad things happen
|
||||
if (Object.keys(filters).length !== 0) {
|
||||
return getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, Operation.DELETE),
|
||||
body: { [colName]: null },
|
||||
filters,
|
||||
})
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
async function removeOneToManyRelationships(rowId: string, table: Table) {
|
||||
const tableId = table._id!
|
||||
const filters = buildFilters(rowId, {}, table)
|
||||
// safety check, if there are no filters on deletion bad things happen
|
||||
if (Object.keys(filters).length !== 0) {
|
||||
return getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, Operation.UPDATE),
|
||||
filters,
|
||||
})
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function checks the incoming parameters to make sure all the inputs are
|
||||
* valid based on on the table schema. The main thing this is looking for is when a
|
||||
|
@ -178,13 +215,13 @@ function generateIdForRow(
|
|||
|
||||
function getEndpoint(tableId: string | undefined, operation: string) {
|
||||
if (!tableId) {
|
||||
return {}
|
||||
throw new Error("Cannot get endpoint information - no table ID specified")
|
||||
}
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
return {
|
||||
datasourceId,
|
||||
entityId: tableName,
|
||||
operation,
|
||||
datasourceId: datasourceId!,
|
||||
entityId: tableName!,
|
||||
operation: operation as Operation,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -304,6 +341,18 @@ export class ExternalRequest<T extends Operation> {
|
|||
}
|
||||
}
|
||||
|
||||
async getRow(table: Table, rowId: string): Promise<Row> {
|
||||
const response = await getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(table._id!, Operation.READ),
|
||||
filters: buildFilters(rowId, {}, table),
|
||||
})
|
||||
if (Array.isArray(response) && response.length > 0) {
|
||||
return response[0]
|
||||
} else {
|
||||
throw new Error(`Cannot fetch row by ID "${rowId}"`)
|
||||
}
|
||||
}
|
||||
|
||||
inputProcessing(row: Row | undefined, table: Table) {
|
||||
if (!row) {
|
||||
return { row, manyRelationships: [] }
|
||||
|
@ -571,7 +620,9 @@ export class ExternalRequest<T extends Operation> {
|
|||
* information.
|
||||
*/
|
||||
async lookupRelations(tableId: string, row: Row) {
|
||||
const related: { [key: string]: any } = {}
|
||||
const related: {
|
||||
[key: string]: { rows: Row[]; isMany: boolean; tableId: string }
|
||||
} = {}
|
||||
const { tableName } = breakExternalTableId(tableId)
|
||||
if (!tableName) {
|
||||
return related
|
||||
|
@ -589,14 +640,26 @@ export class ExternalRequest<T extends Operation> {
|
|||
) {
|
||||
continue
|
||||
}
|
||||
const isMany = field.relationshipType === RelationshipType.MANY_TO_MANY
|
||||
const tableId = isMany ? field.through : field.tableId
|
||||
let tableId: string | undefined,
|
||||
lookupField: string | undefined,
|
||||
fieldName: string | undefined
|
||||
if (isManyToMany(field)) {
|
||||
tableId = field.through
|
||||
lookupField = primaryKey
|
||||
fieldName = field.throughTo || primaryKey
|
||||
} else if (isManyToOne(field)) {
|
||||
tableId = field.tableId
|
||||
lookupField = field.foreignKey
|
||||
fieldName = field.fieldName
|
||||
}
|
||||
if (!tableId || !lookupField || !fieldName) {
|
||||
throw new Error(
|
||||
"Unable to lookup relationships - undefined column properties."
|
||||
)
|
||||
}
|
||||
const { tableName: relatedTableName } = breakExternalTableId(tableId)
|
||||
// @ts-ignore
|
||||
const linkPrimaryKey = this.tables[relatedTableName].primary[0]
|
||||
|
||||
const lookupField = isMany ? primaryKey : field.foreignKey
|
||||
const fieldName = isMany ? field.throughTo || primaryKey : field.fieldName
|
||||
if (!lookupField || !row[lookupField]) {
|
||||
continue
|
||||
}
|
||||
|
@ -609,9 +672,12 @@ export class ExternalRequest<T extends Operation> {
|
|||
},
|
||||
})
|
||||
// this is the response from knex if no rows found
|
||||
const rows = !response[0].read ? response : []
|
||||
const storeTo = isMany ? field.throughFrom || linkPrimaryKey : fieldName
|
||||
related[storeTo] = { rows, isMany, tableId }
|
||||
const rows: Row[] =
|
||||
!Array.isArray(response) || response?.[0].read ? [] : response
|
||||
const storeTo = isManyToMany(field)
|
||||
? field.throughFrom || linkPrimaryKey
|
||||
: fieldName
|
||||
related[storeTo] = { rows, isMany: isManyToMany(field), tableId }
|
||||
}
|
||||
return related
|
||||
}
|
||||
|
@ -697,24 +763,43 @@ export class ExternalRequest<T extends Operation> {
|
|||
continue
|
||||
}
|
||||
for (let row of rows) {
|
||||
const filters = buildFilters(generateIdForRow(row, table), {}, table)
|
||||
// safety check, if there are no filters on deletion bad things happen
|
||||
if (Object.keys(filters).length !== 0) {
|
||||
const op = isMany ? Operation.DELETE : Operation.UPDATE
|
||||
const body = isMany ? null : { [colName]: null }
|
||||
promises.push(
|
||||
getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, op),
|
||||
body,
|
||||
filters,
|
||||
})
|
||||
)
|
||||
const rowId = generateIdForRow(row, table)
|
||||
const promise: Promise<any> = isMany
|
||||
? removeManyToManyRelationships(rowId, table, colName)
|
||||
: removeOneToManyRelationships(rowId, table)
|
||||
if (promise) {
|
||||
promises.push(promise)
|
||||
}
|
||||
}
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
|
||||
async removeRelationshipsToRow(table: Table, rowId: string) {
|
||||
const row = await this.getRow(table, rowId)
|
||||
const related = await this.lookupRelations(table._id!, row)
|
||||
for (let column of Object.values(table.schema)) {
|
||||
const relationshipColumn = column as RelationshipFieldMetadata
|
||||
if (!isManyToOne(relationshipColumn)) {
|
||||
continue
|
||||
}
|
||||
const { rows, isMany, tableId } = related[relationshipColumn.fieldName]
|
||||
const table = this.getTable(tableId)!
|
||||
await Promise.all(
|
||||
rows.map(row => {
|
||||
const rowId = generateIdForRow(row, table)
|
||||
return isMany
|
||||
? removeManyToManyRelationships(
|
||||
rowId,
|
||||
table,
|
||||
relationshipColumn.fieldName
|
||||
)
|
||||
: removeOneToManyRelationships(rowId, table)
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which
|
||||
* you have column overlap in relationships, e.g. we join a few different tables and they all have the
|
||||
|
@ -804,7 +889,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
}
|
||||
let json = {
|
||||
endpoint: {
|
||||
datasourceId,
|
||||
datasourceId: datasourceId!,
|
||||
entityId: tableName,
|
||||
operation,
|
||||
},
|
||||
|
@ -826,17 +911,30 @@ export class ExternalRequest<T extends Operation> {
|
|||
},
|
||||
}
|
||||
|
||||
// can't really use response right now
|
||||
const response = await getDatasourceAndQuery(json)
|
||||
// handle many to many relationships now if we know the ID (could be auto increment)
|
||||
// remove any relationships that could block deletion
|
||||
if (operation === Operation.DELETE && id) {
|
||||
await this.removeRelationshipsToRow(table, generateRowIdField(id))
|
||||
}
|
||||
|
||||
// aliasing can be disabled fully if desired
|
||||
let response
|
||||
if (env.SQL_ALIASING_DISABLE) {
|
||||
response = await getDatasourceAndQuery(json)
|
||||
} else {
|
||||
const aliasing = new AliasTables(Object.keys(this.tables))
|
||||
response = await aliasing.queryWithAliasing(json)
|
||||
}
|
||||
|
||||
const responseRows = Array.isArray(response) ? response : []
|
||||
// handle many-to-many relationships now if we know the ID (could be auto increment)
|
||||
if (operation !== Operation.READ) {
|
||||
await this.handleManyRelationships(
|
||||
table._id || "",
|
||||
response[0],
|
||||
responseRows[0],
|
||||
processed.manyRelationships
|
||||
)
|
||||
}
|
||||
const output = this.outputProcessing(response, table, relationships)
|
||||
const output = this.outputProcessing(responseRows, table, relationships)
|
||||
// if reading it'll just be an array of rows, return whole thing
|
||||
if (operation === Operation.READ) {
|
||||
return (
|
||||
|
|
|
@ -0,0 +1,168 @@
|
|||
import {
|
||||
QueryJson,
|
||||
SearchFilters,
|
||||
Table,
|
||||
Row,
|
||||
DatasourcePlusQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils"
|
||||
import { cloneDeep } from "lodash"
|
||||
|
||||
class CharSequence {
|
||||
static alphabet = "abcdefghijklmnopqrstuvwxyz"
|
||||
counters: number[]
|
||||
|
||||
constructor() {
|
||||
this.counters = [0]
|
||||
}
|
||||
|
||||
getCharacter(): string {
|
||||
const char = this.counters.map(i => CharSequence.alphabet[i]).join("")
|
||||
for (let i = this.counters.length - 1; i >= 0; i--) {
|
||||
if (this.counters[i] < CharSequence.alphabet.length - 1) {
|
||||
this.counters[i]++
|
||||
return char
|
||||
}
|
||||
this.counters[i] = 0
|
||||
}
|
||||
this.counters.unshift(0)
|
||||
return char
|
||||
}
|
||||
}
|
||||
|
||||
export default class AliasTables {
|
||||
aliases: Record<string, string>
|
||||
tableAliases: Record<string, string>
|
||||
tableNames: string[]
|
||||
charSeq: CharSequence
|
||||
|
||||
constructor(tableNames: string[]) {
|
||||
this.tableNames = tableNames
|
||||
this.aliases = {}
|
||||
this.tableAliases = {}
|
||||
this.charSeq = new CharSequence()
|
||||
}
|
||||
|
||||
getAlias(tableName: string) {
|
||||
if (this.aliases[tableName]) {
|
||||
return this.aliases[tableName]
|
||||
}
|
||||
const char = this.charSeq.getCharacter()
|
||||
this.aliases[tableName] = char
|
||||
this.tableAliases[char] = tableName
|
||||
return char
|
||||
}
|
||||
|
||||
aliasField(field: string) {
|
||||
const tableNames = this.tableNames
|
||||
if (field.includes(".")) {
|
||||
const [tableName, column] = field.split(".")
|
||||
const foundTableName = tableNames.find(name => {
|
||||
const idx = tableName.indexOf(name)
|
||||
if (idx === -1 || idx > 1) {
|
||||
return
|
||||
}
|
||||
// this might look a bit mad, but the idea is if the field is wrapped, say in "", `` or []
|
||||
// then the idx of the table name will be 1, and we should allow for it ending in a closing
|
||||
// character - otherwise it should be the full length if the index is zero
|
||||
const allowedCharacterDiff = idx * 2
|
||||
return Math.abs(tableName.length - name.length) <= allowedCharacterDiff
|
||||
})
|
||||
if (foundTableName) {
|
||||
const aliasedTableName = tableName.replace(
|
||||
foundTableName,
|
||||
this.getAlias(foundTableName)
|
||||
)
|
||||
field = `${aliasedTableName}.${column}`
|
||||
}
|
||||
}
|
||||
return field
|
||||
}
|
||||
|
||||
reverse<T extends Row | Row[]>(rows: T): T {
|
||||
const process = (row: Row) => {
|
||||
const final: Row = {}
|
||||
for (let [key, value] of Object.entries(row)) {
|
||||
if (!key.includes(".")) {
|
||||
final[key] = value
|
||||
} else {
|
||||
const [alias, column] = key.split(".")
|
||||
const tableName = this.tableAliases[alias] || alias
|
||||
final[`${tableName}.${column}`] = value
|
||||
}
|
||||
}
|
||||
return final
|
||||
}
|
||||
if (Array.isArray(rows)) {
|
||||
return rows.map(row => process(row)) as T
|
||||
} else {
|
||||
return process(rows) as T
|
||||
}
|
||||
}
|
||||
|
||||
aliasMap(tableNames: (string | undefined)[]) {
|
||||
const map: Record<string, string> = {}
|
||||
for (let tableName of tableNames) {
|
||||
if (tableName) {
|
||||
map[tableName] = this.getAlias(tableName)
|
||||
}
|
||||
}
|
||||
return map
|
||||
}
|
||||
|
||||
async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse {
|
||||
const fieldLength = json.resource?.fields?.length
|
||||
const aliasingEnabled = fieldLength && fieldLength > 0
|
||||
if (aliasingEnabled) {
|
||||
json = cloneDeep(json)
|
||||
// run through the query json to update anywhere a table may be used
|
||||
if (json.resource?.fields) {
|
||||
json.resource.fields = json.resource.fields.map(field =>
|
||||
this.aliasField(field)
|
||||
)
|
||||
}
|
||||
if (json.filters) {
|
||||
for (let [filterKey, filter] of Object.entries(json.filters)) {
|
||||
if (typeof filter !== "object") {
|
||||
continue
|
||||
}
|
||||
const aliasedFilters: typeof filter = {}
|
||||
for (let key of Object.keys(filter)) {
|
||||
aliasedFilters[this.aliasField(key)] = filter[key]
|
||||
}
|
||||
json.filters[filterKey as keyof SearchFilters] = aliasedFilters
|
||||
}
|
||||
}
|
||||
if (json.meta?.table) {
|
||||
this.getAlias(json.meta.table.name)
|
||||
}
|
||||
if (json.meta?.tables) {
|
||||
Object.keys(json.meta.tables).forEach(tableName =>
|
||||
this.getAlias(tableName)
|
||||
)
|
||||
}
|
||||
if (json.relationships) {
|
||||
json.relationships = json.relationships.map(relationship => ({
|
||||
...relationship,
|
||||
aliases: this.aliasMap([
|
||||
relationship.through,
|
||||
relationship.tableName,
|
||||
json.endpoint.entityId,
|
||||
]),
|
||||
}))
|
||||
}
|
||||
// invert and return
|
||||
const invertedTableAliases: Record<string, string> = {}
|
||||
for (let [key, value] of Object.entries(this.tableAliases)) {
|
||||
invertedTableAliases[value] = key
|
||||
}
|
||||
json.tableAliases = invertedTableAliases
|
||||
}
|
||||
const response = await getDatasourceAndQuery(json)
|
||||
if (Array.isArray(response) && aliasingEnabled) {
|
||||
return this.reverse(response)
|
||||
} else {
|
||||
return response
|
||||
}
|
||||
}
|
||||
}
|
|
@ -211,7 +211,7 @@ export async function validate(ctx: Ctx<Row, ValidateResponse>) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetchEnrichedRow(ctx: any) {
|
||||
export async function fetchEnrichedRow(ctx: UserCtx<void, Row>) {
|
||||
const tableId = utils.getTableId(ctx)
|
||||
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
|
||||
}
|
||||
|
|
|
@ -170,6 +170,7 @@ export const serveApp = async function (ctx: Ctx) {
|
|||
if (!env.isJest()) {
|
||||
const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins)
|
||||
const { head, html, css } = AppComponent.render({
|
||||
title: branding?.platformTitle || `${appInfo.name}`,
|
||||
metaImage:
|
||||
branding?.metaImageUrl ||
|
||||
"https://res.cloudinary.com/daog6scxm/image/upload/v1698759482/meta-images/plain-branded-meta-image-coral_ocxmgu.png",
|
||||
|
|
|
@ -6,6 +6,7 @@ import {
|
|||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
Operation,
|
||||
RenameColumn,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
Table,
|
||||
|
@ -25,9 +26,12 @@ function getDatasourceId(table: Table) {
|
|||
return breakExternalTableId(table._id).datasourceId
|
||||
}
|
||||
|
||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||
export async function save(
|
||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
const inputs = ctx.request.body
|
||||
const renaming = inputs?._rename
|
||||
const adding = inputs?._add
|
||||
// can't do this right now
|
||||
delete inputs.rows
|
||||
const tableId = ctx.request.body._id
|
||||
|
@ -40,7 +44,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
const { datasource, table } = await sdk.tables.external.save(
|
||||
datasourceId!,
|
||||
inputs,
|
||||
{ tableId, renaming }
|
||||
{ tableId, renaming, adding }
|
||||
)
|
||||
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
||||
return table
|
||||
|
|
|
@ -74,8 +74,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
const appId = ctx.appId
|
||||
const table = ctx.request.body
|
||||
const isImport = table.rows
|
||||
const renaming = ctx.request.body._rename
|
||||
|
||||
let savedTable = await pickApi({ table }).save(ctx)
|
||||
const api = pickApi({ table })
|
||||
// do not pass _rename or _add if saving to CouchDB
|
||||
if (api === internal) {
|
||||
delete ctx.request.body._add
|
||||
delete ctx.request.body._rename
|
||||
}
|
||||
let savedTable = await api.save(ctx, renaming)
|
||||
if (!table._id) {
|
||||
await events.table.created(savedTable)
|
||||
savedTable = sdk.tables.enrichViewSchemas(savedTable)
|
||||
|
|
|
@ -12,11 +12,12 @@ import {
|
|||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||
export async function save(
|
||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
const { rows, ...rest } = ctx.request.body
|
||||
let tableToSave: Table & {
|
||||
_rename?: RenameColumn
|
||||
} = {
|
||||
let tableToSave: Table = {
|
||||
_id: generateTableID(),
|
||||
...rest,
|
||||
// Ensure these fields are populated, even if not sent in the request
|
||||
|
@ -28,15 +29,12 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
tableToSave.views = {}
|
||||
}
|
||||
|
||||
const renaming = tableToSave._rename
|
||||
delete tableToSave._rename
|
||||
|
||||
try {
|
||||
const { table } = await sdk.tables.internal.save(tableToSave, {
|
||||
user: ctx.user,
|
||||
rowsToImport: rows,
|
||||
tableId: ctx.request.body._id,
|
||||
renaming: renaming,
|
||||
renaming,
|
||||
})
|
||||
|
||||
return table
|
||||
|
|
|
@ -13,7 +13,7 @@ describe("/api/keys", () => {
|
|||
|
||||
describe("fetch", () => {
|
||||
it("should allow fetching", async () => {
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const res = await request
|
||||
.get(`/api/keys`)
|
||||
.set(config.defaultHeaders())
|
||||
|
@ -34,7 +34,7 @@ describe("/api/keys", () => {
|
|||
|
||||
describe("update", () => {
|
||||
it("should allow updating a value", async () => {
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const res = await request
|
||||
.put(`/api/keys/TEST`)
|
||||
.send({
|
||||
|
|
|
@ -184,7 +184,7 @@ describe("/applications", () => {
|
|||
it("app should not sync if production", async () => {
|
||||
const { message } = await config.api.application.sync(
|
||||
app.appId.replace("_dev", ""),
|
||||
{ statusCode: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
|
||||
expect(message).toEqual(
|
||||
|
@ -248,4 +248,13 @@ describe("/applications", () => {
|
|||
expect(devLogs.data.length).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("permissions", () => {
|
||||
it("should only return apps a user has access to", async () => {
|
||||
const user = await config.createUser()
|
||||
|
||||
const apps = await config.api.application.fetch()
|
||||
expect(apps.length).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -29,7 +29,7 @@ describe("/api/applications/:appId/sync", () => {
|
|||
let resp = (await config.api.attachment.process(
|
||||
"ohno.exe",
|
||||
Buffer.from([0]),
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)) as unknown as APIError
|
||||
expect(resp.message).toContain("invalid extension")
|
||||
})
|
||||
|
@ -40,7 +40,7 @@ describe("/api/applications/:appId/sync", () => {
|
|||
let resp = (await config.api.attachment.process(
|
||||
"OHNO.EXE",
|
||||
Buffer.from([0]),
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)) as unknown as APIError
|
||||
expect(resp.message).toContain("invalid extension")
|
||||
})
|
||||
|
@ -51,7 +51,7 @@ describe("/api/applications/:appId/sync", () => {
|
|||
undefined as any,
|
||||
undefined as any,
|
||||
{
|
||||
expectStatus: 400,
|
||||
status: 400,
|
||||
}
|
||||
)) as unknown as APIError
|
||||
expect(resp.message).toContain("No file provided")
|
||||
|
|
|
@ -19,11 +19,8 @@ describe("/backups", () => {
|
|||
|
||||
describe("/api/backups/export", () => {
|
||||
it("should be able to export app", async () => {
|
||||
const { body, headers } = await config.api.backup.exportBasicBackup(
|
||||
config.getAppId()!
|
||||
)
|
||||
const body = await config.api.backup.exportBasicBackup(config.getAppId()!)
|
||||
expect(body instanceof Buffer).toBe(true)
|
||||
expect(headers["content-type"]).toEqual("application/gzip")
|
||||
expect(events.app.exported).toBeCalledTimes(1)
|
||||
})
|
||||
|
||||
|
@ -38,15 +35,13 @@ describe("/backups", () => {
|
|||
it("should infer the app name from the app", async () => {
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
const { headers } = await config.api.backup.exportBasicBackup(
|
||||
config.getAppId()!
|
||||
)
|
||||
|
||||
expect(headers["content-disposition"]).toEqual(
|
||||
`attachment; filename="${
|
||||
config.getApp().name
|
||||
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
|
||||
)
|
||||
await config.api.backup.exportBasicBackup(config.getAppId()!, {
|
||||
headers: {
|
||||
"content-disposition": `attachment; filename="${
|
||||
config.getApp().name
|
||||
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ describe("/permission", () => {
|
|||
table = (await config.createTable()) as typeof table
|
||||
row = await config.createRow()
|
||||
view = await config.api.viewV2.create({ tableId: table._id })
|
||||
perms = await config.api.permission.set({
|
||||
perms = await config.api.permission.add({
|
||||
roleId: STD_ROLE_ID,
|
||||
resourceId: table._id,
|
||||
level: PermissionLevel.READ,
|
||||
|
@ -88,13 +88,13 @@ describe("/permission", () => {
|
|||
})
|
||||
|
||||
it("should get resource permissions with multiple roles", async () => {
|
||||
perms = await config.api.permission.set({
|
||||
perms = await config.api.permission.add({
|
||||
roleId: HIGHER_ROLE_ID,
|
||||
resourceId: table._id,
|
||||
level: PermissionLevel.WRITE,
|
||||
})
|
||||
const res = await config.api.permission.get(table._id)
|
||||
expect(res.body).toEqual({
|
||||
expect(res).toEqual({
|
||||
permissions: {
|
||||
read: { permissionType: "EXPLICIT", role: STD_ROLE_ID },
|
||||
write: { permissionType: "EXPLICIT", role: HIGHER_ROLE_ID },
|
||||
|
@ -117,16 +117,19 @@ describe("/permission", () => {
|
|||
level: PermissionLevel.READ,
|
||||
})
|
||||
|
||||
const response = await config.api.permission.set(
|
||||
await config.api.permission.add(
|
||||
{
|
||||
roleId: STD_ROLE_ID,
|
||||
resourceId: table._id,
|
||||
level: PermissionLevel.EXECUTE,
|
||||
},
|
||||
{ expectStatus: 403 }
|
||||
)
|
||||
expect(response.message).toEqual(
|
||||
"You are not allowed to 'read' the resource type 'datasource'"
|
||||
{
|
||||
status: 403,
|
||||
body: {
|
||||
message:
|
||||
"You are not allowed to 'read' the resource type 'datasource'",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -138,9 +141,9 @@ describe("/permission", () => {
|
|||
resourceId: table._id,
|
||||
level: PermissionLevel.READ,
|
||||
})
|
||||
expect(res.body[0]._id).toEqual(STD_ROLE_ID)
|
||||
expect(res[0]._id).toEqual(STD_ROLE_ID)
|
||||
const permsRes = await config.api.permission.get(table._id)
|
||||
expect(permsRes.body[STD_ROLE_ID]).toBeUndefined()
|
||||
expect(permsRes.permissions[STD_ROLE_ID]).toBeUndefined()
|
||||
})
|
||||
|
||||
it("throw forbidden if the action is not allowed for the resource", async () => {
|
||||
|
@ -156,10 +159,13 @@ describe("/permission", () => {
|
|||
resourceId: table._id,
|
||||
level: PermissionLevel.EXECUTE,
|
||||
},
|
||||
{ expectStatus: 403 }
|
||||
)
|
||||
expect(response.body.message).toEqual(
|
||||
"You are not allowed to 'read' the resource type 'datasource'"
|
||||
{
|
||||
status: 403,
|
||||
body: {
|
||||
message:
|
||||
"You are not allowed to 'read' the resource type 'datasource'",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -181,10 +187,8 @@ describe("/permission", () => {
|
|||
// replicate changes before checking permissions
|
||||
await config.publish()
|
||||
|
||||
const res = await config.api.viewV2.search(view.id, undefined, {
|
||||
usePublicUser: true,
|
||||
})
|
||||
expect(res.body.rows[0]._id).toEqual(row._id)
|
||||
const res = await config.api.viewV2.publicSearch(view.id)
|
||||
expect(res.rows[0]._id).toEqual(row._id)
|
||||
})
|
||||
|
||||
it("should not be able to access the view data when the table is not public and there are no view permissions overrides", async () => {
|
||||
|
@ -196,14 +200,11 @@ describe("/permission", () => {
|
|||
// replicate changes before checking permissions
|
||||
await config.publish()
|
||||
|
||||
await config.api.viewV2.search(view.id, undefined, {
|
||||
expectStatus: 403,
|
||||
usePublicUser: true,
|
||||
})
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, { status: 403 })
|
||||
})
|
||||
|
||||
it("should ignore the view permissions if the flag is not on", async () => {
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: STD_ROLE_ID,
|
||||
resourceId: view.id,
|
||||
level: PermissionLevel.READ,
|
||||
|
@ -216,15 +217,14 @@ describe("/permission", () => {
|
|||
// replicate changes before checking permissions
|
||||
await config.publish()
|
||||
|
||||
await config.api.viewV2.search(view.id, undefined, {
|
||||
expectStatus: 403,
|
||||
usePublicUser: true,
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, {
|
||||
status: 403,
|
||||
})
|
||||
})
|
||||
|
||||
it("should use the view permissions if the flag is on", async () => {
|
||||
mocks.licenses.useViewPermissions()
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: STD_ROLE_ID,
|
||||
resourceId: view.id,
|
||||
level: PermissionLevel.READ,
|
||||
|
@ -237,10 +237,8 @@ describe("/permission", () => {
|
|||
// replicate changes before checking permissions
|
||||
await config.publish()
|
||||
|
||||
const res = await config.api.viewV2.search(view.id, undefined, {
|
||||
usePublicUser: true,
|
||||
})
|
||||
expect(res.body.rows[0]._id).toEqual(row._id)
|
||||
const res = await config.api.viewV2.publicSearch(view.id)
|
||||
expect(res.rows[0]._id).toEqual(row._id)
|
||||
})
|
||||
|
||||
it("shouldn't allow writing from a public user", async () => {
|
||||
|
@ -277,7 +275,7 @@ describe("/permission", () => {
|
|||
|
||||
const res = await config.api.permission.get(legacyView.name)
|
||||
|
||||
expect(res.body).toEqual({
|
||||
expect(res).toEqual({
|
||||
permissions: {
|
||||
read: {
|
||||
permissionType: "BASE",
|
||||
|
|
|
@ -157,7 +157,7 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
it("should find a query in cloud", async () => {
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
|
@ -397,15 +397,16 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
it("should fail with invalid integration type", async () => {
|
||||
const response = await config.api.datasource.create(
|
||||
{
|
||||
...basicDatasource().datasource,
|
||||
source: "INVALID_INTEGRATION" as SourceName,
|
||||
const datasource: Datasource = {
|
||||
...basicDatasource().datasource,
|
||||
source: "INVALID_INTEGRATION" as SourceName,
|
||||
}
|
||||
await config.api.datasource.create(datasource, {
|
||||
status: 500,
|
||||
body: {
|
||||
message: "No datasource implementation found.",
|
||||
},
|
||||
{ expectStatus: 500, rawResponse: true }
|
||||
)
|
||||
|
||||
expect(response.body.message).toBe("No datasource implementation found.")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ describe("/roles", () => {
|
|||
|
||||
it("should be able to get the role with a permission added", async () => {
|
||||
const table = await config.createTable()
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: BUILTIN_ROLE_IDS.POWER,
|
||||
resourceId: table._id,
|
||||
level: PermissionLevel.READ,
|
||||
|
|
|
@ -7,6 +7,7 @@ import { context, InternalTable, roles, tenancy } from "@budibase/backend-core"
|
|||
import { quotas } from "@budibase/pro"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
DeleteRow,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
FieldTypeSubtypes,
|
||||
|
@ -106,9 +107,6 @@ describe.each([
|
|||
mocks.licenses.useCloudFree()
|
||||
})
|
||||
|
||||
const loadRow = (id: string, tbl_Id: string, status = 200) =>
|
||||
config.api.row.get(tbl_Id, id, { expectStatus: status })
|
||||
|
||||
const getRowUsage = async () => {
|
||||
const { total } = await config.doInContext(undefined, () =>
|
||||
quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS)
|
||||
|
@ -235,7 +233,7 @@ describe.each([
|
|||
|
||||
const res = await config.api.row.get(tableId, existing._id!)
|
||||
|
||||
expect(res.body).toEqual({
|
||||
expect(res).toEqual({
|
||||
...existing,
|
||||
...defaultRowFields,
|
||||
})
|
||||
|
@ -265,7 +263,7 @@ describe.each([
|
|||
await config.createRow()
|
||||
|
||||
await config.api.row.get(tableId, "1234567", {
|
||||
expectStatus: 404,
|
||||
status: 404,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -395,7 +393,7 @@ describe.each([
|
|||
const createdRow = await config.createRow(row)
|
||||
const id = createdRow._id!
|
||||
|
||||
const saved = (await loadRow(id, table._id!)).body
|
||||
const saved = await config.api.row.get(table._id!, id)
|
||||
|
||||
expect(saved.stringUndefined).toBe(undefined)
|
||||
expect(saved.stringNull).toBe(null)
|
||||
|
@ -476,8 +474,8 @@ describe.each([
|
|||
)
|
||||
|
||||
const row = await config.api.row.get(table._id!, createRowResponse._id!)
|
||||
expect(row.body.Story).toBeUndefined()
|
||||
expect(row.body).toEqual({
|
||||
expect(row.Story).toBeUndefined()
|
||||
expect(row).toEqual({
|
||||
...defaultRowFields,
|
||||
OrderID: 1111,
|
||||
Country: "Aussy",
|
||||
|
@ -524,10 +522,10 @@ describe.each([
|
|||
expect(row.name).toEqual("Updated Name")
|
||||
expect(row.description).toEqual(existing.description)
|
||||
|
||||
const savedRow = await loadRow(row._id!, table._id!)
|
||||
const savedRow = await config.api.row.get(table._id!, row._id!)
|
||||
|
||||
expect(savedRow.body.description).toEqual(existing.description)
|
||||
expect(savedRow.body.name).toEqual("Updated Name")
|
||||
expect(savedRow.description).toEqual(existing.description)
|
||||
expect(savedRow.name).toEqual("Updated Name")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
||||
|
@ -543,7 +541,7 @@ describe.each([
|
|||
tableId: table._id!,
|
||||
name: 1,
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
|
@ -582,8 +580,8 @@ describe.each([
|
|||
})
|
||||
|
||||
let getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.body.user2[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
|
||||
let patchResp = await config.api.row.patch(table._id!, {
|
||||
_id: row._id!,
|
||||
|
@ -595,8 +593,8 @@ describe.each([
|
|||
expect(patchResp.user2[0]._id).toEqual(user2._id)
|
||||
|
||||
getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.body.user2[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
})
|
||||
|
||||
it("should be able to update relationships when both columns are same name", async () => {
|
||||
|
@ -609,7 +607,7 @@ describe.each([
|
|||
description: "test",
|
||||
relationship: [row._id],
|
||||
})
|
||||
row = (await config.api.row.get(table._id!, row._id!)).body
|
||||
row = await config.api.row.get(table._id!, row._id!)
|
||||
expect(row.relationship.length).toBe(1)
|
||||
const resp = await config.api.row.patch(table._id!, {
|
||||
_id: row._id!,
|
||||
|
@ -632,8 +630,10 @@ describe.each([
|
|||
const createdRow = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(table._id!, [createdRow])
|
||||
expect(res.body[0]._id).toEqual(createdRow._id)
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [createdRow],
|
||||
})
|
||||
expect(res[0]._id).toEqual(createdRow._id)
|
||||
await assertRowUsage(rowUsage - 1)
|
||||
})
|
||||
})
|
||||
|
@ -682,10 +682,12 @@ describe.each([
|
|||
const row2 = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(table._id!, [row1, row2])
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [row1, row2],
|
||||
})
|
||||
|
||||
expect(res.body.length).toEqual(2)
|
||||
await loadRow(row1._id!, table._id!, 404)
|
||||
expect(res.length).toEqual(2)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(rowUsage - 2)
|
||||
})
|
||||
|
||||
|
@ -697,14 +699,12 @@ describe.each([
|
|||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(table._id!, [
|
||||
row1,
|
||||
row2._id,
|
||||
{ _id: row3._id },
|
||||
])
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [row1, row2._id!, { _id: row3._id }],
|
||||
})
|
||||
|
||||
expect(res.body.length).toEqual(3)
|
||||
await loadRow(row1._id!, table._id!, 404)
|
||||
expect(res.length).toEqual(3)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(rowUsage - 3)
|
||||
})
|
||||
|
||||
|
@ -712,34 +712,36 @@ describe.each([
|
|||
const row1 = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(table._id!, row1)
|
||||
const res = await config.api.row.delete(table._id!, row1 as DeleteRow)
|
||||
|
||||
expect(res.body.id).toEqual(row1._id)
|
||||
await loadRow(row1._id!, table._id!, 404)
|
||||
expect(res.id).toEqual(row1._id)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(rowUsage - 1)
|
||||
})
|
||||
|
||||
it("Should ignore malformed/invalid delete requests", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(
|
||||
table._id!,
|
||||
{ not: "valid" },
|
||||
{ expectStatus: 400 }
|
||||
)
|
||||
expect(res.body.message).toEqual("Invalid delete rows request")
|
||||
|
||||
const res2 = await config.api.row.delete(
|
||||
table._id!,
|
||||
{ rows: 123 },
|
||||
{ expectStatus: 400 }
|
||||
)
|
||||
expect(res2.body.message).toEqual("Invalid delete rows request")
|
||||
|
||||
const res3 = await config.api.row.delete(table._id!, "invalid", {
|
||||
expectStatus: 400,
|
||||
await config.api.row.delete(table._id!, { not: "valid" } as any, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.row.delete(table._id!, { rows: 123 } as any, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.row.delete(table._id!, "invalid" as any, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
expect(res3.body.message).toEqual("Invalid delete rows request")
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
@ -757,16 +759,16 @@ describe.each([
|
|||
const row = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.legacyView.get(table._id!)
|
||||
expect(res.body.length).toEqual(1)
|
||||
expect(res.body[0]._id).toEqual(row._id)
|
||||
const rows = await config.api.legacyView.get(table._id!)
|
||||
expect(rows.length).toEqual(1)
|
||||
expect(rows[0]._id).toEqual(row._id)
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
||||
it("should throw an error if view doesn't exist", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.legacyView.get("derp", { expectStatus: 404 })
|
||||
await config.api.legacyView.get("derp", { status: 404 })
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
@ -781,9 +783,9 @@ describe.each([
|
|||
const row = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.legacyView.get(view.name)
|
||||
expect(res.body.length).toEqual(1)
|
||||
expect(res.body[0]._id).toEqual(row._id)
|
||||
const rows = await config.api.legacyView.get(view.name)
|
||||
expect(rows.length).toEqual(1)
|
||||
expect(rows[0]._id).toEqual(row._id)
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
|
@ -841,8 +843,8 @@ describe.each([
|
|||
linkedTable._id!,
|
||||
secondRow._id!
|
||||
)
|
||||
expect(resBasic.body.link.length).toBe(1)
|
||||
expect(resBasic.body.link[0]).toEqual({
|
||||
expect(resBasic.link.length).toBe(1)
|
||||
expect(resBasic.link[0]).toEqual({
|
||||
_id: firstRow._id,
|
||||
primaryDisplay: firstRow.name,
|
||||
})
|
||||
|
@ -852,10 +854,10 @@ describe.each([
|
|||
linkedTable._id!,
|
||||
secondRow._id!
|
||||
)
|
||||
expect(resEnriched.body.link.length).toBe(1)
|
||||
expect(resEnriched.body.link[0]._id).toBe(firstRow._id)
|
||||
expect(resEnriched.body.link[0].name).toBe("Test Contact")
|
||||
expect(resEnriched.body.link[0].description).toBe("original description")
|
||||
expect(resEnriched.link.length).toBe(1)
|
||||
expect(resEnriched.link[0]._id).toBe(firstRow._id)
|
||||
expect(resEnriched.link[0].name).toBe("Test Contact")
|
||||
expect(resEnriched.link[0].description).toBe("original description")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
|
@ -880,8 +882,7 @@ describe.each([
|
|||
],
|
||||
tableId: table._id,
|
||||
})
|
||||
// the environment needs configured for this
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
return context.doInAppContext(config.getAppId(), async () => {
|
||||
const enriched = await outputProcessing(table, [row])
|
||||
expect((enriched as Row[])[0].attachment[0].url).toBe(
|
||||
|
@ -903,7 +904,7 @@ describe.each([
|
|||
const res = await config.api.row.exportRows(table._id!, {
|
||||
rows: [existing._id!],
|
||||
})
|
||||
const results = JSON.parse(res.text)
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
|
||||
|
@ -922,7 +923,7 @@ describe.each([
|
|||
rows: [existing._id!],
|
||||
columns: ["_id"],
|
||||
})
|
||||
const results = JSON.parse(res.text)
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
|
||||
|
@ -1000,7 +1001,7 @@ describe.each([
|
|||
})
|
||||
|
||||
const row = await config.api.row.get(table._id!, newRow._id!)
|
||||
expect(row.body).toEqual({
|
||||
expect(row).toEqual({
|
||||
name: data.name,
|
||||
surname: data.surname,
|
||||
address: data.address,
|
||||
|
@ -1010,9 +1011,9 @@ describe.each([
|
|||
id: newRow.id,
|
||||
...defaultRowFields,
|
||||
})
|
||||
expect(row.body._viewId).toBeUndefined()
|
||||
expect(row.body.age).toBeUndefined()
|
||||
expect(row.body.jobTitle).toBeUndefined()
|
||||
expect(row._viewId).toBeUndefined()
|
||||
expect(row.age).toBeUndefined()
|
||||
expect(row.jobTitle).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1042,7 +1043,7 @@ describe.each([
|
|||
})
|
||||
|
||||
const row = await config.api.row.get(tableId, newRow._id!)
|
||||
expect(row.body).toEqual({
|
||||
expect(row).toEqual({
|
||||
...newRow,
|
||||
name: newData.name,
|
||||
address: newData.address,
|
||||
|
@ -1051,9 +1052,9 @@ describe.each([
|
|||
id: newRow.id,
|
||||
...defaultRowFields,
|
||||
})
|
||||
expect(row.body._viewId).toBeUndefined()
|
||||
expect(row.body.age).toBeUndefined()
|
||||
expect(row.body.jobTitle).toBeUndefined()
|
||||
expect(row._viewId).toBeUndefined()
|
||||
expect(row.age).toBeUndefined()
|
||||
expect(row.jobTitle).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1071,12 +1072,12 @@ describe.each([
|
|||
const createdRow = await config.createRow()
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.delete(view.id, [createdRow])
|
||||
await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
||||
|
||||
await assertRowUsage(rowUsage - 1)
|
||||
|
||||
await config.api.row.get(tableId, createdRow._id!, {
|
||||
expectStatus: 404,
|
||||
status: 404,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1097,17 +1098,17 @@ describe.each([
|
|||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.delete(view.id, [rows[0], rows[2]])
|
||||
await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] })
|
||||
|
||||
await assertRowUsage(rowUsage - 2)
|
||||
|
||||
await config.api.row.get(tableId, rows[0]._id!, {
|
||||
expectStatus: 404,
|
||||
status: 404,
|
||||
})
|
||||
await config.api.row.get(tableId, rows[2]._id!, {
|
||||
expectStatus: 404,
|
||||
status: 404,
|
||||
})
|
||||
await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 })
|
||||
await config.api.row.get(tableId, rows[1]._id!, { status: 200 })
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1154,8 +1155,8 @@ describe.each([
|
|||
const createViewResponse = await config.createView()
|
||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||
|
||||
expect(response.body.rows).toHaveLength(10)
|
||||
expect(response.body).toEqual({
|
||||
expect(response.rows).toHaveLength(10)
|
||||
expect(response).toEqual({
|
||||
rows: expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
_viewId: createViewResponse.id,
|
||||
|
@ -1206,8 +1207,8 @@ describe.each([
|
|||
|
||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||
|
||||
expect(response.body.rows).toHaveLength(5)
|
||||
expect(response.body).toEqual({
|
||||
expect(response.rows).toHaveLength(5)
|
||||
expect(response).toEqual({
|
||||
rows: expect.arrayContaining(
|
||||
expectedRows.map(r => ({
|
||||
_viewId: createViewResponse.id,
|
||||
|
@ -1328,8 +1329,8 @@ describe.each([
|
|||
createViewResponse.id
|
||||
)
|
||||
|
||||
expect(response.body.rows).toHaveLength(4)
|
||||
expect(response.body.rows).toEqual(
|
||||
expect(response.rows).toHaveLength(4)
|
||||
expect(response.rows).toEqual(
|
||||
expected.map(name => expect.objectContaining({ name }))
|
||||
)
|
||||
}
|
||||
|
@ -1357,8 +1358,8 @@ describe.each([
|
|||
}
|
||||
)
|
||||
|
||||
expect(response.body.rows).toHaveLength(4)
|
||||
expect(response.body.rows).toEqual(
|
||||
expect(response.rows).toHaveLength(4)
|
||||
expect(response.rows).toEqual(
|
||||
expected.map(name => expect.objectContaining({ name }))
|
||||
)
|
||||
}
|
||||
|
@ -1382,8 +1383,8 @@ describe.each([
|
|||
})
|
||||
const response = await config.api.viewV2.search(view.id)
|
||||
|
||||
expect(response.body.rows).toHaveLength(10)
|
||||
expect(response.body.rows).toEqual(
|
||||
expect(response.rows).toHaveLength(10)
|
||||
expect(response.rows).toEqual(
|
||||
expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
...(isInternal
|
||||
|
@ -1402,7 +1403,7 @@ describe.each([
|
|||
const createViewResponse = await config.createView()
|
||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||
|
||||
expect(response.body.rows).toHaveLength(0)
|
||||
expect(response.rows).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("respects the limit parameter", async () => {
|
||||
|
@ -1417,7 +1418,7 @@ describe.each([
|
|||
query: {},
|
||||
})
|
||||
|
||||
expect(response.body.rows).toHaveLength(limit)
|
||||
expect(response.rows).toHaveLength(limit)
|
||||
})
|
||||
|
||||
it("can handle pagination", async () => {
|
||||
|
@ -1426,7 +1427,7 @@ describe.each([
|
|||
|
||||
const createViewResponse = await config.createView()
|
||||
const allRows = (await config.api.viewV2.search(createViewResponse.id))
|
||||
.body.rows
|
||||
.rows
|
||||
|
||||
const firstPageResponse = await config.api.viewV2.search(
|
||||
createViewResponse.id,
|
||||
|
@ -1436,7 +1437,7 @@ describe.each([
|
|||
query: {},
|
||||
}
|
||||
)
|
||||
expect(firstPageResponse.body).toEqual({
|
||||
expect(firstPageResponse).toEqual({
|
||||
rows: expect.arrayContaining(allRows.slice(0, 4)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: true,
|
||||
|
@ -1448,12 +1449,12 @@ describe.each([
|
|||
{
|
||||
paginate: true,
|
||||
limit: 4,
|
||||
bookmark: firstPageResponse.body.bookmark,
|
||||
bookmark: firstPageResponse.bookmark,
|
||||
|
||||
query: {},
|
||||
}
|
||||
)
|
||||
expect(secondPageResponse.body).toEqual({
|
||||
expect(secondPageResponse).toEqual({
|
||||
rows: expect.arrayContaining(allRows.slice(4, 8)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: true,
|
||||
|
@ -1465,11 +1466,11 @@ describe.each([
|
|||
{
|
||||
paginate: true,
|
||||
limit: 4,
|
||||
bookmark: secondPageResponse.body.bookmark,
|
||||
bookmark: secondPageResponse.bookmark,
|
||||
query: {},
|
||||
}
|
||||
)
|
||||
expect(lastPageResponse.body).toEqual({
|
||||
expect(lastPageResponse).toEqual({
|
||||
rows: expect.arrayContaining(allRows.slice(8)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: false,
|
||||
|
@ -1489,7 +1490,7 @@ describe.each([
|
|||
email: "joe@joe.com",
|
||||
roles: {},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
expect(response.message).toBe("Cannot create new user entry.")
|
||||
})
|
||||
|
@ -1516,58 +1517,52 @@ describe.each([
|
|||
|
||||
it("does not allow public users to fetch by default", async () => {
|
||||
await config.publish()
|
||||
await config.api.viewV2.search(viewId, undefined, {
|
||||
expectStatus: 403,
|
||||
usePublicUser: true,
|
||||
await config.api.viewV2.publicSearch(viewId, undefined, {
|
||||
status: 403,
|
||||
})
|
||||
})
|
||||
|
||||
it("allow public users to fetch when permissions are explicit", async () => {
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: viewId,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
const response = await config.api.viewV2.search(viewId, undefined, {
|
||||
usePublicUser: true,
|
||||
})
|
||||
const response = await config.api.viewV2.publicSearch(viewId)
|
||||
|
||||
expect(response.body.rows).toHaveLength(10)
|
||||
expect(response.rows).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("allow public users to fetch when permissions are inherited", async () => {
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: tableId,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
const response = await config.api.viewV2.search(viewId, undefined, {
|
||||
usePublicUser: true,
|
||||
})
|
||||
const response = await config.api.viewV2.publicSearch(viewId)
|
||||
|
||||
expect(response.body.rows).toHaveLength(10)
|
||||
expect(response.rows).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("respects inherited permissions, not allowing not public views from public tables", async () => {
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: tableId,
|
||||
})
|
||||
await config.api.permission.set({
|
||||
await config.api.permission.add({
|
||||
roleId: roles.BUILTIN_ROLE_IDS.POWER,
|
||||
level: PermissionLevel.READ,
|
||||
resourceId: viewId,
|
||||
})
|
||||
await config.publish()
|
||||
|
||||
await config.api.viewV2.search(viewId, undefined, {
|
||||
usePublicUser: true,
|
||||
expectStatus: 403,
|
||||
await config.api.viewV2.publicSearch(viewId, undefined, {
|
||||
status: 403,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -1754,7 +1749,7 @@ describe.each([
|
|||
}
|
||||
const row = await config.api.row.save(tableId, rowData)
|
||||
|
||||
const { body: retrieved } = await config.api.row.get(tableId, row._id!)
|
||||
const retrieved = await config.api.row.get(tableId, row._id!)
|
||||
expect(retrieved).toEqual({
|
||||
name: rowData.name,
|
||||
description: rowData.description,
|
||||
|
@ -1781,7 +1776,7 @@ describe.each([
|
|||
}
|
||||
const row = await config.api.row.save(tableId, rowData)
|
||||
|
||||
const { body: retrieved } = await config.api.row.get(tableId, row._id!)
|
||||
const retrieved = await config.api.row.get(tableId, row._id!)
|
||||
expect(retrieved).toEqual({
|
||||
name: rowData.name,
|
||||
description: rowData.description,
|
||||
|
|
|
@ -26,6 +26,7 @@ import { TableToBuild } from "../../../tests/utilities/TestConfiguration"
|
|||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
const { basicTable } = setup.structures
|
||||
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
||||
|
||||
describe("/tables", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -285,6 +286,35 @@ describe("/tables", () => {
|
|||
expect(res.body.schema.roleId).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
it("should add a new column for an internal DB table", async () => {
|
||||
const saveTableRequest: SaveTableRequest = {
|
||||
_add: {
|
||||
name: "NEW_COLUMN",
|
||||
},
|
||||
...basicTable(),
|
||||
}
|
||||
|
||||
const response = await request
|
||||
.post(`/api/tables`)
|
||||
.send(saveTableRequest)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
|
||||
const expectedResponse = {
|
||||
...saveTableRequest,
|
||||
_rev: expect.stringMatching(/^\d-.+/),
|
||||
_id: expect.stringMatching(/^ta_.+/),
|
||||
createdAt: expect.stringMatching(ISO_REGEX_PATTERN),
|
||||
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
|
||||
views: {},
|
||||
}
|
||||
delete expectedResponse._add
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body).toEqual(expectedResponse)
|
||||
})
|
||||
})
|
||||
|
||||
describe("import", () => {
|
||||
|
@ -663,8 +693,7 @@ describe("/tables", () => {
|
|||
expect(migratedTable.schema["user column"]).toBeDefined()
|
||||
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
|
||||
|
||||
const resp = await config.api.row.get(table._id!, testRow._id!)
|
||||
const migratedRow = resp.body as Row
|
||||
const migratedRow = await config.api.row.get(table._id!, testRow._id!)
|
||||
|
||||
expect(migratedRow["user column"]).toBeDefined()
|
||||
expect(migratedRow["user relationship"]).not.toBeDefined()
|
||||
|
@ -716,15 +745,13 @@ describe("/tables", () => {
|
|||
expect(migratedTable.schema["user column"]).toBeDefined()
|
||||
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
|
||||
|
||||
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
|
||||
.body as Row
|
||||
const row1Migrated = await config.api.row.get(table._id!, row1._id!)
|
||||
expect(row1Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
|
||||
expect.arrayContaining([users[0]._id, users[1]._id])
|
||||
)
|
||||
|
||||
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
|
||||
.body as Row
|
||||
const row2Migrated = await config.api.row.get(table._id!, row2._id!)
|
||||
expect(row2Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual(
|
||||
expect.arrayContaining([users[1]._id, users[2]._id])
|
||||
|
@ -773,15 +800,13 @@ describe("/tables", () => {
|
|||
expect(migratedTable.schema["user column"]).toBeDefined()
|
||||
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
|
||||
|
||||
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
|
||||
.body as Row
|
||||
const row1Migrated = await config.api.row.get(table._id!, row1._id!)
|
||||
expect(row1Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
|
||||
expect.arrayContaining([users[0]._id, users[1]._id])
|
||||
)
|
||||
|
||||
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
|
||||
.body as Row
|
||||
const row2Migrated = await config.api.row.get(table._id!, row2._id!)
|
||||
expect(row2Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([
|
||||
users[2]._id,
|
||||
|
@ -831,7 +856,7 @@ describe("/tables", () => {
|
|||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -846,7 +871,7 @@ describe("/tables", () => {
|
|||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -861,7 +886,7 @@ describe("/tables", () => {
|
|||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -880,7 +905,7 @@ describe("/tables", () => {
|
|||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -90,7 +90,7 @@ describe("/users", () => {
|
|||
})
|
||||
await config.api.user.update(
|
||||
{ ...user, roleId: roles.BUILTIN_ROLE_IDS.POWER },
|
||||
{ expectStatus: 409 }
|
||||
{ status: 409 }
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -77,21 +77,3 @@ export function getConfig() {
|
|||
}
|
||||
return config!
|
||||
}
|
||||
|
||||
export async function switchToSelfHosted(func: any) {
|
||||
// self hosted stops any attempts to Dynamo
|
||||
env._set("NODE_ENV", "production")
|
||||
env._set("SELF_HOSTED", true)
|
||||
let error
|
||||
try {
|
||||
await func()
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
env._set("NODE_ENV", "jest")
|
||||
env._set("SELF_HOSTED", false)
|
||||
// don't throw error until after reset
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
|
|
@ -177,7 +177,7 @@ describe.each([
|
|||
}
|
||||
|
||||
await config.api.viewV2.create(newView, {
|
||||
expectStatus: 201,
|
||||
status: 201,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -275,7 +275,7 @@ describe.each([
|
|||
const tableId = table._id!
|
||||
await config.api.viewV2.update(
|
||||
{ ...view, id: generator.guid() },
|
||||
{ expectStatus: 404 }
|
||||
{ status: 404 }
|
||||
)
|
||||
|
||||
expect(await config.api.table.get(tableId)).toEqual(
|
||||
|
@ -304,7 +304,7 @@ describe.each([
|
|||
},
|
||||
],
|
||||
},
|
||||
{ expectStatus: 404 }
|
||||
{ status: 404 }
|
||||
)
|
||||
|
||||
expect(await config.api.table.get(tableId)).toEqual(
|
||||
|
@ -326,12 +326,10 @@ describe.each([
|
|||
...viewV1,
|
||||
},
|
||||
{
|
||||
expectStatus: 400,
|
||||
handleResponse: r => {
|
||||
expect(r.body).toEqual({
|
||||
message: "Only views V2 can be updated",
|
||||
status: 400,
|
||||
})
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Only views V2 can be updated",
|
||||
status: 400,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
@ -403,7 +401,7 @@ describe.each([
|
|||
} as Record<string, FieldSchema>,
|
||||
},
|
||||
{
|
||||
expectStatus: 200,
|
||||
status: 200,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
|
|
@ -30,9 +30,9 @@ describe("migrations", () => {
|
|||
|
||||
const appId = config.getAppId()
|
||||
|
||||
const response = await config.api.application.getRaw(appId)
|
||||
|
||||
expect(response.headers[Header.MIGRATING_APP]).toBeUndefined()
|
||||
await config.api.application.get(appId, {
|
||||
headersNotPresent: [Header.MIGRATING_APP],
|
||||
})
|
||||
})
|
||||
|
||||
it("accessing an app that has pending migrations will attach the migrating header", async () => {
|
||||
|
@ -46,8 +46,10 @@ describe("migrations", () => {
|
|||
func: async () => {},
|
||||
})
|
||||
|
||||
const response = await config.api.application.getRaw(appId)
|
||||
|
||||
expect(response.headers[Header.MIGRATING_APP]).toEqual(appId)
|
||||
await config.api.application.get(appId, {
|
||||
headers: {
|
||||
[Header.MIGRATING_APP]: appId,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -24,7 +24,7 @@ describe("test the create row action", () => {
|
|||
expect(res.id).toBeDefined()
|
||||
expect(res.revision).toBeDefined()
|
||||
expect(res.success).toEqual(true)
|
||||
const gottenRow = await config.getRow(table._id, res.id)
|
||||
const gottenRow = await config.api.row.get(table._id, res.id)
|
||||
expect(gottenRow.name).toEqual("test")
|
||||
expect(gottenRow.description).toEqual("test")
|
||||
})
|
||||
|
|
|
@ -36,7 +36,7 @@ describe("test the update row action", () => {
|
|||
it("should be able to run the action", async () => {
|
||||
const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, inputs)
|
||||
expect(res.success).toEqual(true)
|
||||
const updatedRow = await config.getRow(table._id!, res.id)
|
||||
const updatedRow = await config.api.row.get(table._id!, res.id)
|
||||
expect(updatedRow.name).toEqual("Updated name")
|
||||
expect(updatedRow.description).not.toEqual("")
|
||||
})
|
||||
|
@ -87,8 +87,8 @@ describe("test the update row action", () => {
|
|||
})
|
||||
|
||||
let getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.body.user2[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
|
||||
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
|
||||
rowId: row._id,
|
||||
|
@ -103,8 +103,8 @@ describe("test the update row action", () => {
|
|||
expect(stepResp.success).toEqual(true)
|
||||
|
||||
getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.body.user2[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
})
|
||||
|
||||
it("should overwrite links if those links are not set and we ask it do", async () => {
|
||||
|
@ -140,8 +140,8 @@ describe("test the update row action", () => {
|
|||
})
|
||||
|
||||
let getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.body.user2[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user1[0]._id).toEqual(user1._id)
|
||||
expect(getResp.user2[0]._id).toEqual(user2._id)
|
||||
|
||||
let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, {
|
||||
rowId: row._id,
|
||||
|
@ -163,7 +163,7 @@ describe("test the update row action", () => {
|
|||
expect(stepResp.success).toEqual(true)
|
||||
|
||||
getResp = await config.api.row.get(table._id!, row._id!)
|
||||
expect(getResp.body.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.body.user2).toBeUndefined()
|
||||
expect(getResp.user1[0]._id).toEqual(user2._id)
|
||||
expect(getResp.user2).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
|
|
@ -100,7 +100,7 @@ describe("test the link controller", () => {
|
|||
const { _id } = await config.createRow(
|
||||
basicLinkedRow(t1._id!, row._id!, linkField)
|
||||
)
|
||||
return config.getRow(t1._id!, _id!)
|
||||
return config.api.row.get(t1._id!, _id!)
|
||||
}
|
||||
|
||||
it("should be able to confirm if two table schemas are equal", async () => {
|
||||
|
|
|
@ -76,13 +76,16 @@ const environment = {
|
|||
DEFAULTS.AUTOMATION_THREAD_TIMEOUT > QUERY_THREAD_TIMEOUT
|
||||
? DEFAULTS.AUTOMATION_THREAD_TIMEOUT
|
||||
: QUERY_THREAD_TIMEOUT,
|
||||
SQL_MAX_ROWS: process.env.SQL_MAX_ROWS,
|
||||
BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL,
|
||||
BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD,
|
||||
PLUGINS_DIR: process.env.PLUGINS_DIR || DEFAULTS.PLUGINS_DIR,
|
||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||
MAX_IMPORT_SIZE_MB: process.env.MAX_IMPORT_SIZE_MB,
|
||||
SESSION_EXPIRY_SECONDS: process.env.SESSION_EXPIRY_SECONDS,
|
||||
// SQL
|
||||
SQL_MAX_ROWS: process.env.SQL_MAX_ROWS,
|
||||
SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE,
|
||||
SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE,
|
||||
// flags
|
||||
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
|
||||
DISABLE_THREADING: process.env.DISABLE_THREADING,
|
||||
|
|
|
@ -0,0 +1,363 @@
|
|||
import fetch from "node-fetch"
|
||||
import {
|
||||
generateMakeRequest,
|
||||
MakeRequestResponse,
|
||||
} from "../api/routes/public/tests/utils"
|
||||
import { v4 as uuidv4 } from "uuid"
|
||||
import * as setup from "../api/routes/tests/utilities"
|
||||
import {
|
||||
Datasource,
|
||||
FieldType,
|
||||
Table,
|
||||
TableRequest,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
import { databaseTestProviders } from "../integrations/tests/utils"
|
||||
import mysql from "mysql2/promise"
|
||||
import { builderSocket } from "../websockets"
|
||||
// @ts-ignore
|
||||
fetch.mockSearch()
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.unmock("mysql2/promise")
|
||||
jest.mock("../websockets", () => ({
|
||||
clientAppSocket: jest.fn(),
|
||||
gridAppSocket: jest.fn(),
|
||||
initialise: jest.fn(),
|
||||
builderSocket: {
|
||||
emitTableUpdate: jest.fn(),
|
||||
emitTableDeletion: jest.fn(),
|
||||
emitDatasourceUpdate: jest.fn(),
|
||||
emitDatasourceDeletion: jest.fn(),
|
||||
emitScreenUpdate: jest.fn(),
|
||||
emitAppMetadataUpdate: jest.fn(),
|
||||
emitAppPublish: jest.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
describe("mysql integrations", () => {
|
||||
let makeRequest: MakeRequestResponse,
|
||||
mysqlDatasource: Datasource,
|
||||
primaryMySqlTable: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
const apiKey = await config.generateApiKey()
|
||||
|
||||
makeRequest = generateMakeRequest(apiKey, true)
|
||||
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.mysql.stop()
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
primaryMySqlTable = await config.createTable({
|
||||
name: uuidv4(),
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
autocolumn: true,
|
||||
},
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
description: {
|
||||
name: "description",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
value: {
|
||||
name: "value",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
sourceId: mysqlDatasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(config.end)
|
||||
|
||||
it("validate table schema", async () => {
|
||||
const res = await makeRequest(
|
||||
"get",
|
||||
`/api/datasources/${mysqlDatasource._id}`
|
||||
)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual({
|
||||
config: {
|
||||
database: "mysql",
|
||||
host: mysqlDatasource.config!.host,
|
||||
password: "--secret-value--",
|
||||
port: mysqlDatasource.config!.port,
|
||||
user: "root",
|
||||
},
|
||||
plus: true,
|
||||
source: "MYSQL",
|
||||
type: "datasource_plus",
|
||||
_id: expect.any(String),
|
||||
_rev: expect.any(String),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
entities: expect.any(Object),
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/datasources/verify", () => {
|
||||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.mysql.datasource(),
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.mysql.datasource()
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...dbConfig,
|
||||
config: {
|
||||
...dbConfig.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: false,
|
||||
error:
|
||||
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/datasources/info", () => {
|
||||
it("should fetch information about mysql datasource", async () => {
|
||||
const primaryName = primaryMySqlTable.name
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: mysqlDatasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Integration compatibility with mysql search_path", () => {
|
||||
let client: mysql.Connection, pathDatasource: Datasource
|
||||
const database = "test1"
|
||||
const database2 = "test-2"
|
||||
|
||||
beforeAll(async () => {
|
||||
const dsConfig = await databaseTestProviders.mysql.datasource()
|
||||
const dbConfig = dsConfig.config!
|
||||
|
||||
client = await mysql.createConnection(dbConfig)
|
||||
await client.query(`CREATE DATABASE \`${database}\`;`)
|
||||
await client.query(`CREATE DATABASE \`${database2}\`;`)
|
||||
|
||||
const pathConfig: any = {
|
||||
...dsConfig,
|
||||
config: {
|
||||
...dbConfig,
|
||||
database,
|
||||
},
|
||||
}
|
||||
pathDatasource = await config.api.datasource.create(pathConfig)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await client.query(`DROP DATABASE \`${database}\`;`)
|
||||
await client.query(`DROP DATABASE \`${database2}\`;`)
|
||||
await client.end()
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.query(
|
||||
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: pathDatasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
expect(response.body.tableNames).toEqual(
|
||||
expect.arrayContaining(["table1"])
|
||||
)
|
||||
})
|
||||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.query(
|
||||
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
||||
)
|
||||
await client.query(
|
||||
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
||||
)
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${pathDatasource._id}/schema`,
|
||||
{
|
||||
tablesFilter: [repeated_table_name],
|
||||
}
|
||||
)
|
||||
expect(response.status).toBe(200)
|
||||
expect(
|
||||
response.body.datasource.entities[repeated_table_name].schema
|
||||
).toBeDefined()
|
||||
const schema =
|
||||
response.body.datasource.entities[repeated_table_name].schema
|
||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/tables/", () => {
|
||||
let client: mysql.Connection
|
||||
const emitDatasourceUpdateMock = jest.fn()
|
||||
|
||||
beforeEach(async () => {
|
||||
client = await mysql.createConnection(
|
||||
(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
).config!
|
||||
)
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.end()
|
||||
})
|
||||
|
||||
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
|
||||
const addColumnToTable: TableRequest = {
|
||||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
name: "table",
|
||||
sourceId: mysqlDatasource._id!,
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
type: FieldType.AUTO,
|
||||
name: "id",
|
||||
autocolumn: true,
|
||||
},
|
||||
new_column: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "new_column",
|
||||
},
|
||||
},
|
||||
_add: {
|
||||
name: "new_column",
|
||||
},
|
||||
}
|
||||
|
||||
jest
|
||||
.spyOn(builderSocket!, "emitDatasourceUpdate")
|
||||
.mockImplementation(emitDatasourceUpdateMock)
|
||||
|
||||
await makeRequest("post", "/api/tables/", addColumnToTable)
|
||||
|
||||
const expectedTable: TableRequest = {
|
||||
...addColumnToTable,
|
||||
schema: {
|
||||
id: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "id",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
presence: false,
|
||||
},
|
||||
externalType: "int unsigned",
|
||||
},
|
||||
new_column: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "new_column",
|
||||
autocolumn: false,
|
||||
constraints: {
|
||||
presence: false,
|
||||
},
|
||||
externalType: "float(8,2)",
|
||||
},
|
||||
},
|
||||
created: true,
|
||||
_id: `${mysqlDatasource._id}__table`,
|
||||
}
|
||||
delete expectedTable._add
|
||||
|
||||
expect(emitDatasourceUpdateMock).toBeCalledTimes(1)
|
||||
const emittedDatasource: Datasource =
|
||||
emitDatasourceUpdateMock.mock.calls[0][1]
|
||||
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
|
||||
})
|
||||
|
||||
it("will rename a column", async () => {
|
||||
await makeRequest("post", "/api/tables/", primaryMySqlTable)
|
||||
|
||||
let renameColumnOnTable: TableRequest = {
|
||||
...primaryMySqlTable,
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
autocolumn: true,
|
||||
externalType: "unsigned integer",
|
||||
},
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
externalType: "text",
|
||||
},
|
||||
description: {
|
||||
name: "description",
|
||||
type: FieldType.STRING,
|
||||
externalType: "text",
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
externalType: "float(8,2)",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
"/api/tables/",
|
||||
renameColumnOnTable
|
||||
)
|
||||
mysqlDatasource = (
|
||||
await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${mysqlDatasource._id}/schema`
|
||||
)
|
||||
).body.datasource
|
||||
|
||||
expect(response.status).toEqual(200)
|
||||
expect(
|
||||
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
|
||||
).toEqual(["id", "name", "description", "age"])
|
||||
})
|
||||
})
|
||||
})
|
|
@ -398,7 +398,7 @@ describe("postgres integrations", () => {
|
|||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual(updatedRow)
|
||||
|
||||
const persistedRow = await config.getRow(
|
||||
const persistedRow = await config.api.row.get(
|
||||
primaryPostgresTable._id!,
|
||||
row.id
|
||||
)
|
||||
|
@ -1040,28 +1040,37 @@ describe("postgres integrations", () => {
|
|||
|
||||
describe("POST /api/datasources/verify", () => {
|
||||
it("should be able to verify the connection", async () => {
|
||||
const response = await config.api.datasource.verify({
|
||||
datasource: await databaseTestProviders.postgres.datasource(),
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.connected).toBe(true)
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.postgres.datasource(),
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.postgres.datasource()
|
||||
const response = await config.api.datasource.verify({
|
||||
datasource: {
|
||||
...dbConfig,
|
||||
config: {
|
||||
...dbConfig.config,
|
||||
password: "wrongpassword",
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...dbConfig,
|
||||
config: {
|
||||
...dbConfig.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.connected).toBe(false)
|
||||
expect(response.body.error).toBeDefined()
|
||||
{
|
||||
body: {
|
||||
connected: false,
|
||||
error: 'password authentication failed for user "postgres"',
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
import { QueryJson, Datasource } from "@budibase/types"
|
||||
import {
|
||||
QueryJson,
|
||||
Datasource,
|
||||
DatasourcePlusQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { getIntegration } from "../index"
|
||||
import sdk from "../../sdk"
|
||||
|
||||
export async function makeExternalQuery(
|
||||
datasource: Datasource,
|
||||
json: QueryJson
|
||||
) {
|
||||
): DatasourcePlusQueryResponse {
|
||||
datasource = await sdk.datasources.enrich(datasource)
|
||||
const Integration = await getIntegration(datasource.source)
|
||||
// query is the opinionated function
|
||||
|
|
|
@ -12,12 +12,13 @@ import {
|
|||
} from "@budibase/types"
|
||||
import environment from "../../environment"
|
||||
|
||||
type QueryFunction = (query: Knex.SqlNative, operation: Operation) => any
|
||||
|
||||
const envLimit = environment.SQL_MAX_ROWS
|
||||
? parseInt(environment.SQL_MAX_ROWS)
|
||||
: null
|
||||
const BASE_LIMIT = envLimit || 5000
|
||||
|
||||
type KnexQuery = Knex.QueryBuilder | Knex
|
||||
// these are invalid dates sent by the client, need to convert them to a real max date
|
||||
const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z"
|
||||
const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z"
|
||||
|
@ -127,10 +128,15 @@ class InternalBuilder {
|
|||
|
||||
// right now we only do filters on the specific table being queried
|
||||
addFilters(
|
||||
query: KnexQuery,
|
||||
query: Knex.QueryBuilder,
|
||||
filters: SearchFilters | undefined,
|
||||
opts: { relationship?: boolean; tableName?: string }
|
||||
): KnexQuery {
|
||||
tableName: string,
|
||||
opts: { aliases?: Record<string, string>; relationship?: boolean }
|
||||
): Knex.QueryBuilder {
|
||||
function getTableName(name: string) {
|
||||
const alias = opts.aliases?.[name]
|
||||
return alias || name
|
||||
}
|
||||
function iterate(
|
||||
structure: { [key: string]: any },
|
||||
fn: (key: string, value: any) => void
|
||||
|
@ -139,10 +145,11 @@ class InternalBuilder {
|
|||
const updatedKey = dbCore.removeKeyNumbering(key)
|
||||
const isRelationshipField = updatedKey.includes(".")
|
||||
if (!opts.relationship && !isRelationshipField) {
|
||||
fn(`${opts.tableName}.${updatedKey}`, value)
|
||||
fn(`${getTableName(tableName)}.${updatedKey}`, value)
|
||||
}
|
||||
if (opts.relationship && isRelationshipField) {
|
||||
fn(updatedKey, value)
|
||||
const [filterTableName, property] = updatedKey.split(".")
|
||||
fn(`${getTableName(filterTableName)}.${property}`, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -314,32 +321,47 @@ class InternalBuilder {
|
|||
return query
|
||||
}
|
||||
|
||||
addSorting(query: KnexQuery, json: QueryJson): KnexQuery {
|
||||
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
|
||||
let { sort, paginate } = json
|
||||
const table = json.meta?.table
|
||||
const aliases = json.tableAliases
|
||||
const aliased =
|
||||
table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name
|
||||
if (sort && Object.keys(sort || {}).length > 0) {
|
||||
for (let [key, value] of Object.entries(sort)) {
|
||||
const direction =
|
||||
value.direction === SortDirection.ASCENDING ? "asc" : "desc"
|
||||
query = query.orderBy(`${table?.name}.${key}`, direction)
|
||||
query = query.orderBy(`${aliased}.${key}`, direction)
|
||||
}
|
||||
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
|
||||
// @ts-ignore
|
||||
query = query.orderBy(`${table?.name}.${table?.primary[0]}`)
|
||||
query = query.orderBy(`${aliased}.${table?.primary[0]}`)
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
tableNameWithSchema(
|
||||
tableName: string,
|
||||
opts?: { alias?: string; schema?: string }
|
||||
) {
|
||||
let withSchema = opts?.schema ? `${opts.schema}.${tableName}` : tableName
|
||||
if (opts?.alias) {
|
||||
withSchema += ` as ${opts.alias}`
|
||||
}
|
||||
return withSchema
|
||||
}
|
||||
|
||||
addRelationships(
|
||||
query: KnexQuery,
|
||||
query: Knex.QueryBuilder,
|
||||
fromTable: string,
|
||||
relationships: RelationshipsJson[] | undefined,
|
||||
schema: string | undefined
|
||||
): KnexQuery {
|
||||
schema: string | undefined,
|
||||
aliases?: Record<string, string>
|
||||
): Knex.QueryBuilder {
|
||||
if (!relationships) {
|
||||
return query
|
||||
}
|
||||
const tableSets: Record<string, [any]> = {}
|
||||
const tableSets: Record<string, [RelationshipsJson]> = {}
|
||||
// aggregate into table sets (all the same to tables)
|
||||
for (let relationship of relationships) {
|
||||
const keyObj: { toTable: string; throughTable: string | undefined } = {
|
||||
|
@ -358,10 +380,17 @@ class InternalBuilder {
|
|||
}
|
||||
for (let [key, relationships] of Object.entries(tableSets)) {
|
||||
const { toTable, throughTable } = JSON.parse(key)
|
||||
const toTableWithSchema = schema ? `${schema}.${toTable}` : toTable
|
||||
const throughTableWithSchema = schema
|
||||
? `${schema}.${throughTable}`
|
||||
: throughTable
|
||||
const toAlias = aliases?.[toTable] || toTable,
|
||||
throughAlias = aliases?.[throughTable] || throughTable,
|
||||
fromAlias = aliases?.[fromTable] || fromTable
|
||||
let toTableWithSchema = this.tableNameWithSchema(toTable, {
|
||||
alias: toAlias,
|
||||
schema,
|
||||
})
|
||||
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
|
||||
alias: throughAlias,
|
||||
schema,
|
||||
})
|
||||
if (!throughTable) {
|
||||
// @ts-ignore
|
||||
query = query.leftJoin(toTableWithSchema, function () {
|
||||
|
@ -369,7 +398,7 @@ class InternalBuilder {
|
|||
const from = relationship.from,
|
||||
to = relationship.to
|
||||
// @ts-ignore
|
||||
this.orOn(`${fromTable}.${from}`, "=", `${toTable}.${to}`)
|
||||
this.orOn(`${fromAlias}.${from}`, "=", `${toAlias}.${to}`)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
|
@ -381,9 +410,9 @@ class InternalBuilder {
|
|||
const from = relationship.from
|
||||
// @ts-ignore
|
||||
this.orOn(
|
||||
`${fromTable}.${fromPrimary}`,
|
||||
`${fromAlias}.${fromPrimary}`,
|
||||
"=",
|
||||
`${throughTable}.${from}`
|
||||
`${throughAlias}.${from}`
|
||||
)
|
||||
}
|
||||
})
|
||||
|
@ -392,7 +421,7 @@ class InternalBuilder {
|
|||
const toPrimary = relationship.toPrimary
|
||||
const to = relationship.to
|
||||
// @ts-ignore
|
||||
this.orOn(`${toTable}.${toPrimary}`, `${throughTable}.${to}`)
|
||||
this.orOn(`${toAlias}.${toPrimary}`, `${throughAlias}.${to}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -400,12 +429,25 @@ class InternalBuilder {
|
|||
return query.limit(BASE_LIMIT)
|
||||
}
|
||||
|
||||
create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||
const { endpoint, body } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
knexWithAlias(
|
||||
knex: Knex,
|
||||
endpoint: QueryJson["endpoint"],
|
||||
aliases?: QueryJson["tableAliases"]
|
||||
): Knex.QueryBuilder {
|
||||
const tableName = endpoint.entityId
|
||||
const tableAliased = aliases?.[tableName]
|
||||
? `${tableName} as ${aliases?.[tableName]}`
|
||||
: tableName
|
||||
let query = knex(tableAliased)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||
const { endpoint, body } = json
|
||||
let query = this.knexWithAlias(knex, endpoint)
|
||||
const parsedBody = parseBody(body)
|
||||
// make sure no null values in body for creation
|
||||
for (let [key, value] of Object.entries(parsedBody)) {
|
||||
|
@ -424,10 +466,7 @@ class InternalBuilder {
|
|||
|
||||
bulkCreate(knex: Knex, json: QueryJson): Knex.QueryBuilder {
|
||||
const { endpoint, body } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
let query = this.knexWithAlias(knex, endpoint)
|
||||
if (!Array.isArray(body)) {
|
||||
return query
|
||||
}
|
||||
|
@ -435,8 +474,10 @@ class InternalBuilder {
|
|||
return query.insert(parsedBody)
|
||||
}
|
||||
|
||||
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||
let { endpoint, resource, filters, paginate, relationships } = json
|
||||
read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder {
|
||||
let { endpoint, resource, filters, paginate, relationships, tableAliases } =
|
||||
json
|
||||
|
||||
const tableName = endpoint.entityId
|
||||
// select all if not specified
|
||||
if (!resource) {
|
||||
|
@ -462,21 +503,20 @@ class InternalBuilder {
|
|||
foundLimit = paginate.limit
|
||||
}
|
||||
// start building the query
|
||||
let query: KnexQuery = knex(tableName).limit(foundLimit)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
query = query.limit(foundLimit)
|
||||
if (foundOffset) {
|
||||
query = query.offset(foundOffset)
|
||||
}
|
||||
query = this.addFilters(query, filters, { tableName })
|
||||
query = this.addFilters(query, filters, tableName, {
|
||||
aliases: tableAliases,
|
||||
})
|
||||
// add sorting to pre-query
|
||||
query = this.addSorting(query, json)
|
||||
// @ts-ignore
|
||||
let preQuery: KnexQuery = knex({
|
||||
// @ts-ignore
|
||||
[tableName]: query,
|
||||
}).select(selectStatement)
|
||||
const alias = tableAliases?.[tableName] || tableName
|
||||
let preQuery = knex({
|
||||
[alias]: query,
|
||||
} as any).select(selectStatement) as any
|
||||
// have to add after as well (this breaks MS-SQL)
|
||||
if (this.client !== SqlClient.MS_SQL) {
|
||||
preQuery = this.addSorting(preQuery, json)
|
||||
|
@ -486,19 +526,22 @@ class InternalBuilder {
|
|||
preQuery,
|
||||
tableName,
|
||||
relationships,
|
||||
endpoint.schema
|
||||
endpoint.schema,
|
||||
tableAliases
|
||||
)
|
||||
return this.addFilters(query, filters, { relationship: true })
|
||||
return this.addFilters(query, filters, tableName, {
|
||||
relationship: true,
|
||||
aliases: tableAliases,
|
||||
})
|
||||
}
|
||||
|
||||
update(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||
const { endpoint, body, filters } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
const { endpoint, body, filters, tableAliases } = json
|
||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
const parsedBody = parseBody(body)
|
||||
query = this.addFilters(query, filters, { tableName: endpoint.entityId })
|
||||
query = this.addFilters(query, filters, endpoint.entityId, {
|
||||
aliases: tableAliases,
|
||||
})
|
||||
// mysql can't use returning
|
||||
if (opts.disableReturning) {
|
||||
return query.update(parsedBody)
|
||||
|
@ -508,12 +551,11 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||
const { endpoint, filters } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
query = this.addFilters(query, filters, { tableName: endpoint.entityId })
|
||||
const { endpoint, filters, tableAliases } = json
|
||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
query = this.addFilters(query, filters, endpoint.entityId, {
|
||||
aliases: tableAliases,
|
||||
})
|
||||
// mysql can't use returning
|
||||
if (opts.disableReturning) {
|
||||
return query.delete()
|
||||
|
@ -547,7 +589,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
query = builder.create(client, json, opts)
|
||||
break
|
||||
case Operation.READ:
|
||||
query = builder.read(client, json, this.limit) as Knex.QueryBuilder
|
||||
query = builder.read(client, json, this.limit)
|
||||
break
|
||||
case Operation.UPDATE:
|
||||
query = builder.update(client, json, opts)
|
||||
|
@ -568,7 +610,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
return query.toSQL().toNative()
|
||||
}
|
||||
|
||||
async getReturningRow(queryFn: Function, json: QueryJson) {
|
||||
async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
|
||||
if (!json.extra || !json.extra.idFilter) {
|
||||
return {}
|
||||
}
|
||||
|
@ -580,7 +622,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
resource: {
|
||||
fields: [],
|
||||
},
|
||||
filters: json.extra.idFilter,
|
||||
filters: json.extra?.idFilter,
|
||||
paginate: {
|
||||
limit: 1,
|
||||
},
|
||||
|
@ -609,7 +651,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
// this function recreates the returning functionality of postgres
|
||||
async queryWithReturning(
|
||||
json: QueryJson,
|
||||
queryFn: Function,
|
||||
queryFn: QueryFunction,
|
||||
processFn: Function = (result: any) => result
|
||||
) {
|
||||
const sqlClient = this.getSqlClient()
|
||||
|
@ -646,6 +688,18 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
}
|
||||
return results.length ? results : [{ [operation.toLowerCase()]: true }]
|
||||
}
|
||||
|
||||
log(query: string, values?: any[]) {
|
||||
if (!environment.SQL_LOGGING_ENABLE) {
|
||||
return
|
||||
}
|
||||
const sqlClient = this.getSqlClient()
|
||||
let string = `[SQL] [${sqlClient.toUpperCase()}] query="${query}"`
|
||||
if (values) {
|
||||
string += ` values="${values.join(", ")}"`
|
||||
}
|
||||
console.log(string)
|
||||
}
|
||||
}
|
||||
|
||||
export default SqlQueryBuilder
|
||||
|
|
|
@ -16,6 +16,7 @@ import {
|
|||
Table,
|
||||
TableRequest,
|
||||
TableSourceType,
|
||||
DatasourcePlusQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { OAuth2Client } from "google-auth-library"
|
||||
import {
|
||||
|
@ -334,7 +335,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
return { tables: externalTables, errors }
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
async query(json: QueryJson): DatasourcePlusQueryResponse {
|
||||
const sheet = json.endpoint.entityId
|
||||
switch (json.endpoint.operation) {
|
||||
case Operation.CREATE:
|
||||
|
@ -384,7 +385,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
}
|
||||
try {
|
||||
await this.connect()
|
||||
return await this.client.addSheet({ title: name, headerValues: [name] })
|
||||
await this.client.addSheet({ title: name, headerValues: [name] })
|
||||
} catch (err) {
|
||||
console.error("Error creating new table in google sheets", err)
|
||||
throw err
|
||||
|
@ -450,7 +451,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
try {
|
||||
await this.connect()
|
||||
const sheetToDelete = this.client.sheetsByTitle[sheet]
|
||||
return await sheetToDelete.delete()
|
||||
await sheetToDelete.delete()
|
||||
} catch (err) {
|
||||
console.error("Error deleting table in google sheets", err)
|
||||
throw err
|
||||
|
|
|
@ -13,6 +13,7 @@ import {
|
|||
SourceName,
|
||||
Schema,
|
||||
TableSourceType,
|
||||
DatasourcePlusQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getSqlQuery,
|
||||
|
@ -329,6 +330,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
operation === Operation.CREATE
|
||||
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
|
||||
: query.sql
|
||||
this.log(sql, query.bindings)
|
||||
return await request.query(sql)
|
||||
} catch (err: any) {
|
||||
let readableMessage = getReadableErrorMessage(
|
||||
|
@ -492,7 +494,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
return response.recordset || [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
async query(json: QueryJson): DatasourcePlusQueryResponse {
|
||||
const schema = this.config.schema
|
||||
await this.connect()
|
||||
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
|
||||
|
|
|
@ -12,6 +12,7 @@ import {
|
|||
SourceName,
|
||||
Schema,
|
||||
TableSourceType,
|
||||
DatasourcePlusQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getSqlQuery,
|
||||
|
@ -260,6 +261,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
const bindings = opts?.disableCoercion
|
||||
? baseBindings
|
||||
: bindingTypeCoerce(baseBindings)
|
||||
this.log(query.sql, bindings)
|
||||
// Node MySQL is callback based, so we must wrap our call in a promise
|
||||
const response = await this.client!.query(query.sql, bindings)
|
||||
return response[0]
|
||||
|
@ -379,7 +381,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
return results.length ? results : [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
async query(json: QueryJson): DatasourcePlusQueryResponse {
|
||||
await this.connect()
|
||||
try {
|
||||
const queryFn = (query: any) =>
|
||||
|
|
|
@ -12,6 +12,8 @@ import {
|
|||
ConnectionInfo,
|
||||
Schema,
|
||||
TableSourceType,
|
||||
Row,
|
||||
DatasourcePlusQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
buildExternalTableId,
|
||||
|
@ -368,6 +370,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
const options: ExecuteOptions = { autoCommit: true }
|
||||
const bindings: BindParameters = query.bindings || []
|
||||
|
||||
this.log(query.sql, bindings)
|
||||
return await connection.execute<T>(query.sql, bindings, options)
|
||||
} finally {
|
||||
if (connection) {
|
||||
|
@ -419,7 +422,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
: [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
async query(json: QueryJson): DatasourcePlusQueryResponse {
|
||||
const operation = this._operation(json)
|
||||
const input = this._query(json, { disableReturning: true }) as SqlQuery
|
||||
if (Array.isArray(input)) {
|
||||
|
@ -443,7 +446,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
if (deletedRows?.rows?.length) {
|
||||
return deletedRows.rows
|
||||
} else if (response.rows?.length) {
|
||||
return response.rows
|
||||
return response.rows as Row[]
|
||||
} else {
|
||||
// get the last row that was updated
|
||||
if (
|
||||
|
@ -454,7 +457,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
const lastRow = await this.internalQuery({
|
||||
sql: `SELECT * FROM \"${json.endpoint.entityId}\" WHERE ROWID = '${response.lastRowid}'`,
|
||||
})
|
||||
return lastRow.rows
|
||||
return lastRow.rows as Row[]
|
||||
} else {
|
||||
return [{ [operation.toLowerCase()]: true }]
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import {
|
|||
SourceName,
|
||||
Schema,
|
||||
TableSourceType,
|
||||
DatasourcePlusQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getSqlQuery,
|
||||
|
@ -268,7 +269,9 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
|||
}
|
||||
}
|
||||
try {
|
||||
return await client.query(query.sql, query.bindings || [])
|
||||
const bindings = query.bindings || []
|
||||
this.log(query.sql, bindings)
|
||||
return await client.query(query.sql, bindings)
|
||||
} catch (err: any) {
|
||||
await this.closeConnection()
|
||||
let readableMessage = getReadableErrorMessage(
|
||||
|
@ -417,7 +420,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
|||
return response.rows.length ? response.rows : [{ deleted: true }]
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
async query(json: QueryJson): DatasourcePlusQueryResponse {
|
||||
const operation = this._operation(json).toLowerCase()
|
||||
const input = this._query(json) as SqlQuery
|
||||
if (Array.isArray(input)) {
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { SqlClient } from "../utils"
|
||||
import Sql from "../base/sql"
|
||||
import {
|
||||
Operation,
|
||||
QueryJson,
|
||||
|
@ -6,9 +8,6 @@ import {
|
|||
FieldType,
|
||||
} from "@budibase/types"
|
||||
|
||||
const Sql = require("../base/sql").default
|
||||
const { SqlClient } = require("../utils")
|
||||
|
||||
const TABLE_NAME = "test"
|
||||
|
||||
function endpoint(table: any, operation: any) {
|
||||
|
@ -42,7 +41,7 @@ function generateReadJson({
|
|||
schema: {},
|
||||
name: table || TABLE_NAME,
|
||||
primary: ["id"],
|
||||
},
|
||||
} as any,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -519,7 +518,7 @@ describe("SQL query builder", () => {
|
|||
const query = sql._query(generateRelationshipJson({ schema: "production" }))
|
||||
expect(query).toEqual({
|
||||
bindings: [500, 5000],
|
||||
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" on "brands"."brand_id" = "products"."brand_id" limit $2`,
|
||||
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -527,7 +526,7 @@ describe("SQL query builder", () => {
|
|||
const query = sql._query(generateRelationshipJson())
|
||||
expect(query).toEqual({
|
||||
bindings: [500, 5000],
|
||||
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" on "brands"."brand_id" = "products"."brand_id" limit $2`,
|
||||
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -537,7 +536,7 @@ describe("SQL query builder", () => {
|
|||
)
|
||||
expect(query).toEqual({
|
||||
bindings: [500, 5000],
|
||||
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" on "products"."product_id" = "stocks"."product_id" limit $2`,
|
||||
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" limit $2`,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -733,7 +732,7 @@ describe("SQL query builder", () => {
|
|||
},
|
||||
meta: {
|
||||
table: oldTable,
|
||||
tables: [oldTable],
|
||||
tables: { [oldTable.name]: oldTable },
|
||||
renamed: {
|
||||
old: "name",
|
||||
updated: "first_name",
|
||||
|
|
|
@ -0,0 +1,227 @@
|
|||
import { QueryJson } from "@budibase/types"
|
||||
import { join } from "path"
|
||||
import Sql from "../base/sql"
|
||||
import { SqlClient } from "../utils"
|
||||
import AliasTables from "../../api/controllers/row/alias"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { Knex } from "knex"
|
||||
|
||||
function multiline(sql: string) {
|
||||
return sql.replace(/\n/g, "").replace(/ +/g, " ")
|
||||
}
|
||||
|
||||
describe("Captures of real examples", () => {
|
||||
const limit = 5000
|
||||
const relationshipLimit = 100
|
||||
|
||||
function getJson(name: string): QueryJson {
|
||||
return require(join(__dirname, "sqlQueryJson", name)) as QueryJson
|
||||
}
|
||||
|
||||
describe("create", () => {
|
||||
it("should create a row with relationships", () => {
|
||||
const queryJson = getJson("createWithRelationships.json")
|
||||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: ["A Street", 34, "London", "A", "B", "designer", 1990],
|
||||
sql: multiline(`insert into "persons" ("address", "age", "city", "firstname", "lastname", "type", "year")
|
||||
values ($1, $2, $3, $4, $5, $6, $7) returning *`),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("read", () => {
|
||||
it("should handle basic retrieval with relationships", () => {
|
||||
const queryJson = getJson("basicFetchWithRelationships.json")
|
||||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: [relationshipLimit, limit],
|
||||
sql: multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid",
|
||||
"a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city",
|
||||
"a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname",
|
||||
"b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid",
|
||||
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
|
||||
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
|
||||
from (select * from "persons" as "a" order by "a"."firstname" asc limit $1) as "a"
|
||||
left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid"
|
||||
order by "a"."firstname" asc limit $2`),
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle filtering by relationship", () => {
|
||||
const queryJson = getJson("filterByRelationship.json")
|
||||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: [relationshipLimit, "assembling", limit],
|
||||
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid",
|
||||
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
|
||||
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
|
||||
from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a"
|
||||
left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
|
||||
left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2
|
||||
order by "a"."productname" asc limit $3`),
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle fetching many to many relationships", () => {
|
||||
const queryJson = getJson("fetchManyToMany.json")
|
||||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: [relationshipLimit, limit],
|
||||
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid",
|
||||
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
|
||||
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
|
||||
from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a"
|
||||
left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
|
||||
left join "tasks" as "b" on "b"."taskid" = "c"."taskid"
|
||||
order by "a"."productname" asc limit $2`),
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle enrichment of rows", () => {
|
||||
const queryJson = getJson("enrichRelationship.json")
|
||||
const filters = queryJson.filters?.oneOf?.taskid as number[]
|
||||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: [...filters, limit, limit],
|
||||
sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname",
|
||||
"a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid",
|
||||
"b"."productname" as "b.productname", "b"."productid" as "b.productid"
|
||||
from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a"
|
||||
left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid"
|
||||
left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`),
|
||||
})
|
||||
})
|
||||
|
||||
it("should manage query with many relationship filters", () => {
|
||||
const queryJson = getJson("manyRelationshipFilters.json")
|
||||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
const filters = queryJson.filters
|
||||
const notEqualsValue = Object.values(filters?.notEqual!)[0]
|
||||
const rangeValue = Object.values(filters?.range!)[0]
|
||||
const equalValue = Object.values(filters?.equal!)[0]
|
||||
|
||||
expect(query).toEqual({
|
||||
bindings: [
|
||||
notEqualsValue,
|
||||
relationshipLimit,
|
||||
rangeValue.low,
|
||||
rangeValue.high,
|
||||
equalValue,
|
||||
limit,
|
||||
],
|
||||
sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid",
|
||||
"a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname",
|
||||
"b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname",
|
||||
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type",
|
||||
"c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname",
|
||||
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type",
|
||||
"c"."city" as "c.city", "c"."lastname" as "c.lastname"
|
||||
from (select * from "tasks" as "a" where not "a"."completed" = $1
|
||||
order by "a"."taskname" asc limit $2) as "a"
|
||||
left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid"
|
||||
left join "products" as "b" on "b"."productid" = "d"."productid"
|
||||
left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid"
|
||||
where "c"."year" between $3 and $4 and "b"."productname" = $5 order by "a"."taskname" asc limit $6`),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
it("should handle performing a simple update", () => {
|
||||
const queryJson = getJson("updateSimple.json")
|
||||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5],
|
||||
sql: multiline(`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4,
|
||||
"type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`),
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle performing an update of relationships", () => {
|
||||
const queryJson = getJson("updateRelationship.json")
|
||||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5],
|
||||
sql: multiline(`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4,
|
||||
"type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("delete", () => {
|
||||
it("should handle deleting with relationships", () => {
|
||||
const queryJson = getJson("deleteSimple.json")
|
||||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: ["ddd", ""],
|
||||
sql: multiline(`delete from "compositetable" as "a" where "a"."keypartone" = $1 and "a"."keyparttwo" = $2
|
||||
returning "a"."keyparttwo" as "a.keyparttwo", "a"."keypartone" as "a.keypartone", "a"."name" as "a.name"`),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("returning (everything bar Postgres)", () => {
|
||||
it("should be able to handle row returning", () => {
|
||||
const queryJson = getJson("createSimple.json")
|
||||
const SQL = new Sql(SqlClient.MS_SQL, limit)
|
||||
let query = SQL._query(queryJson, { disableReturning: true })
|
||||
expect(query).toEqual({
|
||||
sql: "insert into [people] ([age], [name]) values (@p0, @p1)",
|
||||
bindings: [22, "Test"],
|
||||
})
|
||||
|
||||
// now check returning
|
||||
let returningQuery: Knex.SqlNative = { sql: "", bindings: [] }
|
||||
SQL.getReturningRow((input: Knex.SqlNative) => {
|
||||
returningQuery = input
|
||||
}, queryJson)
|
||||
expect(returningQuery).toEqual({
|
||||
sql: "select * from (select top (@p0) * from [people] where [people].[name] = @p1 and [people].[age] = @p2 order by [people].[name] asc) as [people]",
|
||||
bindings: [1, "Test", 22],
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("check max character aliasing", () => {
|
||||
it("should handle over 'z' max character alias", () => {
|
||||
const tableNames = []
|
||||
for (let i = 0; i < 100; i++) {
|
||||
tableNames.push(generator.guid())
|
||||
}
|
||||
const aliasing = new AliasTables(tableNames)
|
||||
let alias: string = ""
|
||||
for (let table of tableNames) {
|
||||
alias = aliasing.getAlias(table)
|
||||
}
|
||||
expect(alias).toEqual("cv")
|
||||
})
|
||||
})
|
||||
|
||||
describe("check some edge cases", () => {
|
||||
const tableNames = ["hello", "world"]
|
||||
|
||||
it("should handle quoted table names", () => {
|
||||
const aliasing = new AliasTables(tableNames)
|
||||
const aliased = aliasing.aliasField(`"hello"."field"`)
|
||||
expect(aliased).toEqual(`"a"."field"`)
|
||||
})
|
||||
|
||||
it("should handle quoted table names with graves", () => {
|
||||
const aliasing = new AliasTables(tableNames)
|
||||
const aliased = aliasing.aliasField("`hello`.`world`")
|
||||
expect(aliased).toEqual("`a`.`world`")
|
||||
})
|
||||
|
||||
it("should handle table names in table names correctly", () => {
|
||||
const tableNames = ["he", "hell", "hello"]
|
||||
const aliasing = new AliasTables(tableNames)
|
||||
const aliased1 = aliasing.aliasField("`he`.`world`")
|
||||
const aliased2 = aliasing.aliasField("`hell`.`world`")
|
||||
const aliased3 = aliasing.aliasField("`hello`.`world`")
|
||||
expect(aliased1).toEqual("`a`.`world`")
|
||||
expect(aliased2).toEqual("`b`.`world`")
|
||||
expect(aliased3).toEqual("`c`.`world`")
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,183 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "persons",
|
||||
"operation": "READ"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.year",
|
||||
"a.firstname",
|
||||
"a.personid",
|
||||
"a.address",
|
||||
"a.age",
|
||||
"a.type",
|
||||
"a.city",
|
||||
"a.lastname",
|
||||
"b.executorid",
|
||||
"b.taskname",
|
||||
"b.taskid",
|
||||
"b.completed",
|
||||
"b.qaid",
|
||||
"b.executorid",
|
||||
"b.taskname",
|
||||
"b.taskid",
|
||||
"b.completed",
|
||||
"b.qaid"
|
||||
]
|
||||
},
|
||||
"filters": {},
|
||||
"sort": {
|
||||
"firstname": {
|
||||
"direction": "ASCENDING"
|
||||
}
|
||||
},
|
||||
"paginate": {
|
||||
"limit": 100,
|
||||
"page": 1
|
||||
},
|
||||
"relationships": [
|
||||
{
|
||||
"tableName": "tasks",
|
||||
"column": "QA",
|
||||
"from": "personid",
|
||||
"to": "qaid",
|
||||
"aliases": {
|
||||
"tasks": "b",
|
||||
"persons": "a"
|
||||
}
|
||||
},
|
||||
{
|
||||
"tableName": "tasks",
|
||||
"column": "executor",
|
||||
"from": "personid",
|
||||
"to": "executorid",
|
||||
"aliases": {
|
||||
"tasks": "b",
|
||||
"persons": "a"
|
||||
}
|
||||
}
|
||||
],
|
||||
"extra": {
|
||||
"idFilter": {}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
|
||||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": false,
|
||||
"name": "year",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"firstname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "firstname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"personid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": true,
|
||||
"name": "personid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"address": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "address",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"age": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": false,
|
||||
"name": "age",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"type": "options",
|
||||
"externalType": "USER-DEFINED",
|
||||
"autocolumn": false,
|
||||
"name": "type",
|
||||
"constraints": {
|
||||
"presence": false,
|
||||
"inclusion": [
|
||||
"support",
|
||||
"designer",
|
||||
"programmer",
|
||||
"qa"
|
||||
]
|
||||
}
|
||||
},
|
||||
"city": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "city",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"lastname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "lastname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"QA": {
|
||||
"tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks",
|
||||
"name": "QA",
|
||||
"relationshipType": "many-to-one",
|
||||
"fieldName": "qaid",
|
||||
"type": "link",
|
||||
"main": true,
|
||||
"_id": "ccb68481c80c34217a4540a2c6c27fe46",
|
||||
"foreignKey": "personid"
|
||||
},
|
||||
"executor": {
|
||||
"tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks",
|
||||
"name": "executor",
|
||||
"relationshipType": "many-to-one",
|
||||
"fieldName": "executorid",
|
||||
"type": "link",
|
||||
"main": true,
|
||||
"_id": "c89530b9770d94bec851e062b5cff3001",
|
||||
"foreignKey": "personid",
|
||||
"tableName": "persons"
|
||||
}
|
||||
},
|
||||
"sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"sourceType": "external",
|
||||
"primaryDisplay": "firstname",
|
||||
"views": {}
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"persons": "a",
|
||||
"tasks": "b"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
|
||||
"entityId": "people",
|
||||
"operation": "CREATE"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.name",
|
||||
"a.age"
|
||||
]
|
||||
},
|
||||
"filters": {},
|
||||
"relationships": [],
|
||||
"body": {
|
||||
"name": "Test",
|
||||
"age": 22
|
||||
},
|
||||
"extra": {
|
||||
"idFilter": {
|
||||
"equal": {
|
||||
"name": "Test",
|
||||
"age": 22
|
||||
}
|
||||
}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"_id": "datasource_plus_0ed5835e5552496285df546030f7c4ae__people",
|
||||
"type": "table",
|
||||
"sourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
|
||||
"sourceType": "external",
|
||||
"primary": [
|
||||
"name",
|
||||
"age"
|
||||
],
|
||||
"name": "people",
|
||||
"schema": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"externalType": "varchar",
|
||||
"autocolumn": false,
|
||||
"name": "name",
|
||||
"constraints": {
|
||||
"presence": true
|
||||
}
|
||||
},
|
||||
"age": {
|
||||
"type": "number",
|
||||
"externalType": "int",
|
||||
"autocolumn": false,
|
||||
"name": "age",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"primaryDisplay": "name"
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"people": "a"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,173 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "persons",
|
||||
"operation": "CREATE"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.year",
|
||||
"a.firstname",
|
||||
"a.personid",
|
||||
"a.address",
|
||||
"a.age",
|
||||
"a.type",
|
||||
"a.city",
|
||||
"a.lastname"
|
||||
]
|
||||
},
|
||||
"filters": {},
|
||||
"relationships": [
|
||||
{
|
||||
"tableName": "tasks",
|
||||
"column": "QA",
|
||||
"from": "personid",
|
||||
"to": "qaid",
|
||||
"aliases": {
|
||||
"tasks": "b",
|
||||
"persons": "a"
|
||||
}
|
||||
},
|
||||
{
|
||||
"tableName": "tasks",
|
||||
"column": "executor",
|
||||
"from": "personid",
|
||||
"to": "executorid",
|
||||
"aliases": {
|
||||
"tasks": "b",
|
||||
"persons": "a"
|
||||
}
|
||||
}
|
||||
],
|
||||
"body": {
|
||||
"year": 1990,
|
||||
"firstname": "A",
|
||||
"address": "A Street",
|
||||
"age": 34,
|
||||
"type": "designer",
|
||||
"city": "London",
|
||||
"lastname": "B"
|
||||
},
|
||||
"extra": {
|
||||
"idFilter": {}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
|
||||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": false,
|
||||
"name": "year",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"firstname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "firstname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"personid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": true,
|
||||
"name": "personid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"address": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "address",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"age": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": false,
|
||||
"name": "age",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"type": "options",
|
||||
"externalType": "USER-DEFINED",
|
||||
"autocolumn": false,
|
||||
"name": "type",
|
||||
"constraints": {
|
||||
"presence": false,
|
||||
"inclusion": [
|
||||
"support",
|
||||
"designer",
|
||||
"programmer",
|
||||
"qa"
|
||||
]
|
||||
}
|
||||
},
|
||||
"city": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "city",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"lastname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "lastname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"QA": {
|
||||
"tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks",
|
||||
"name": "QA",
|
||||
"relationshipType": "many-to-one",
|
||||
"fieldName": "qaid",
|
||||
"type": "link",
|
||||
"main": true,
|
||||
"_id": "ccb68481c80c34217a4540a2c6c27fe46",
|
||||
"foreignKey": "personid"
|
||||
},
|
||||
"executor": {
|
||||
"tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks",
|
||||
"name": "executor",
|
||||
"relationshipType": "many-to-one",
|
||||
"fieldName": "executorid",
|
||||
"type": "link",
|
||||
"main": true,
|
||||
"_id": "c89530b9770d94bec851e062b5cff3001",
|
||||
"foreignKey": "personid",
|
||||
"tableName": "persons"
|
||||
}
|
||||
},
|
||||
"sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"sourceType": "external",
|
||||
"primaryDisplay": "firstname",
|
||||
"views": {}
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"persons": "a",
|
||||
"tasks": "b"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "compositetable",
|
||||
"operation": "DELETE"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.keyparttwo",
|
||||
"a.keypartone",
|
||||
"a.name"
|
||||
]
|
||||
},
|
||||
"filters": {
|
||||
"equal": {
|
||||
"keypartone": "ddd",
|
||||
"keyparttwo": ""
|
||||
}
|
||||
},
|
||||
"relationships": [],
|
||||
"extra": {
|
||||
"idFilter": {
|
||||
"equal": {
|
||||
"keypartone": "ddd",
|
||||
"keyparttwo": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__compositetable",
|
||||
"primary": [
|
||||
"keypartone",
|
||||
"keyparttwo"
|
||||
],
|
||||
"name": "compositetable",
|
||||
"schema": {
|
||||
"keyparttwo": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "keyparttwo",
|
||||
"constraints": {
|
||||
"presence": true
|
||||
}
|
||||
},
|
||||
"keypartone": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "keypartone",
|
||||
"constraints": {
|
||||
"presence": true
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "name",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"sourceType": "external",
|
||||
"primaryDisplay": "keypartone"
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"compositetable": "a"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
|
||||
"entityId": "tasks",
|
||||
"operation": "READ"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.executorid",
|
||||
"a.taskname",
|
||||
"a.taskid",
|
||||
"a.completed",
|
||||
"a.qaid",
|
||||
"b.productname",
|
||||
"b.productid"
|
||||
]
|
||||
},
|
||||
"filters": {
|
||||
"oneOf": {
|
||||
"taskid": [
|
||||
1,
|
||||
2
|
||||
]
|
||||
}
|
||||
},
|
||||
"relationships": [
|
||||
{
|
||||
"tableName": "products",
|
||||
"column": "products",
|
||||
"through": "products_tasks",
|
||||
"from": "taskid",
|
||||
"to": "productid",
|
||||
"fromPrimary": "taskid",
|
||||
"toPrimary": "productid",
|
||||
"aliases": {
|
||||
"products_tasks": "c",
|
||||
"products": "b",
|
||||
"tasks": "a"
|
||||
}
|
||||
}
|
||||
],
|
||||
"extra": {
|
||||
"idFilter": {}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks",
|
||||
"primary": [
|
||||
"taskid"
|
||||
],
|
||||
"name": "tasks",
|
||||
"schema": {
|
||||
"executorid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": false,
|
||||
"name": "executorid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"taskname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "taskname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"taskid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": true,
|
||||
"name": "taskid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"completed": {
|
||||
"type": "boolean",
|
||||
"externalType": "boolean",
|
||||
"autocolumn": false,
|
||||
"name": "completed",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"qaid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": false,
|
||||
"name": "qaid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"products": {
|
||||
"tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products",
|
||||
"name": "products",
|
||||
"relationshipType": "many-to-many",
|
||||
"through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks",
|
||||
"type": "link",
|
||||
"_id": "c3b91d00cd36c4cc1a347794725b9adbd",
|
||||
"fieldName": "productid",
|
||||
"throughFrom": "productid",
|
||||
"throughTo": "taskid"
|
||||
}
|
||||
},
|
||||
"sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
|
||||
"sourceType": "external",
|
||||
"primaryDisplay": "taskname",
|
||||
"sql": true,
|
||||
"views": {}
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"tasks": "a",
|
||||
"products": "b",
|
||||
"products_tasks": "c"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,109 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
|
||||
"entityId": "products",
|
||||
"operation": "READ"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.productname",
|
||||
"a.productid",
|
||||
"b.executorid",
|
||||
"b.taskname",
|
||||
"b.taskid",
|
||||
"b.completed",
|
||||
"b.qaid"
|
||||
]
|
||||
},
|
||||
"filters": {
|
||||
"string": {},
|
||||
"fuzzy": {},
|
||||
"range": {},
|
||||
"equal": {},
|
||||
"notEqual": {},
|
||||
"empty": {},
|
||||
"notEmpty": {},
|
||||
"contains": {},
|
||||
"notContains": {},
|
||||
"oneOf": {},
|
||||
"containsAny": {}
|
||||
},
|
||||
"sort": {
|
||||
"productname": {
|
||||
"direction": "ASCENDING"
|
||||
}
|
||||
},
|
||||
"paginate": {
|
||||
"limit": 100,
|
||||
"page": 1
|
||||
},
|
||||
"relationships": [
|
||||
{
|
||||
"tableName": "tasks",
|
||||
"column": "tasks",
|
||||
"through": "products_tasks",
|
||||
"from": "productid",
|
||||
"to": "taskid",
|
||||
"fromPrimary": "productid",
|
||||
"toPrimary": "taskid",
|
||||
"aliases": {
|
||||
"products_tasks": "c",
|
||||
"tasks": "b",
|
||||
"products": "a"
|
||||
}
|
||||
}
|
||||
],
|
||||
"extra": {
|
||||
"idFilter": {}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products",
|
||||
"primary": [
|
||||
"productid"
|
||||
],
|
||||
"name": "products",
|
||||
"schema": {
|
||||
"productname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "productname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"productid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": true,
|
||||
"name": "productid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"tasks": {
|
||||
"tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks",
|
||||
"name": "tasks",
|
||||
"relationshipType": "many-to-many",
|
||||
"fieldName": "taskid",
|
||||
"through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks",
|
||||
"throughFrom": "taskid",
|
||||
"throughTo": "productid",
|
||||
"type": "link",
|
||||
"main": true,
|
||||
"_id": "c3b91d00cd36c4cc1a347794725b9adbd"
|
||||
}
|
||||
},
|
||||
"sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
|
||||
"sourceType": "external",
|
||||
"primaryDisplay": "productname"
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"products": "a",
|
||||
"tasks": "b",
|
||||
"products_tasks": "c"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,94 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "products",
|
||||
"operation": "READ"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.productname",
|
||||
"a.productid",
|
||||
"b.executorid",
|
||||
"b.taskname",
|
||||
"b.taskid",
|
||||
"b.completed",
|
||||
"b.qaid"
|
||||
]
|
||||
},
|
||||
"filters": {
|
||||
"equal": {
|
||||
"1:tasks.taskname": "assembling"
|
||||
},
|
||||
"onEmptyFilter": "all"
|
||||
},
|
||||
"sort": {
|
||||
"productname": {
|
||||
"direction": "ASCENDING"
|
||||
}
|
||||
},
|
||||
"paginate": {
|
||||
"limit": 100,
|
||||
"page": 1
|
||||
},
|
||||
"relationships": [
|
||||
{
|
||||
"tableName": "tasks",
|
||||
"column": "tasks",
|
||||
"through": "products_tasks",
|
||||
"from": "productid",
|
||||
"to": "taskid",
|
||||
"fromPrimary": "productid",
|
||||
"toPrimary": "taskid"
|
||||
}
|
||||
],
|
||||
"tableAliases": {
|
||||
"products_tasks": "c",
|
||||
"tasks": "b",
|
||||
"products": "a"
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products",
|
||||
"primary": [
|
||||
"productid"
|
||||
],
|
||||
"name": "products",
|
||||
"schema": {
|
||||
"productname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "productname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"productid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": true,
|
||||
"name": "productid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"tasks": {
|
||||
"tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks",
|
||||
"name": "tasks",
|
||||
"relationshipType": "many-to-many",
|
||||
"fieldName": "taskid",
|
||||
"through": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products_tasks",
|
||||
"throughFrom": "taskid",
|
||||
"throughTo": "productid",
|
||||
"type": "link",
|
||||
"main": true,
|
||||
"_id": "ca6862d9ba09146dd8a68e3b5b7055a09"
|
||||
}
|
||||
},
|
||||
"sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"sourceType": "external",
|
||||
"primaryDisplay": "productname"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,202 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
|
||||
"entityId": "tasks",
|
||||
"operation": "READ"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.executorid",
|
||||
"a.taskname",
|
||||
"a.taskid",
|
||||
"a.completed",
|
||||
"a.qaid",
|
||||
"b.productname",
|
||||
"b.productid",
|
||||
"c.year",
|
||||
"c.firstname",
|
||||
"c.personid",
|
||||
"c.address",
|
||||
"c.age",
|
||||
"c.type",
|
||||
"c.city",
|
||||
"c.lastname",
|
||||
"c.year",
|
||||
"c.firstname",
|
||||
"c.personid",
|
||||
"c.address",
|
||||
"c.age",
|
||||
"c.type",
|
||||
"c.city",
|
||||
"c.lastname"
|
||||
]
|
||||
},
|
||||
"filters": {
|
||||
"string": {},
|
||||
"fuzzy": {},
|
||||
"range": {
|
||||
"1:persons.year": {
|
||||
"low": 1990,
|
||||
"high": 2147483647
|
||||
}
|
||||
},
|
||||
"equal": {
|
||||
"2:products.productname": "Computers"
|
||||
},
|
||||
"notEqual": {
|
||||
"3:completed": true
|
||||
},
|
||||
"empty": {},
|
||||
"notEmpty": {},
|
||||
"contains": {},
|
||||
"notContains": {},
|
||||
"oneOf": {},
|
||||
"containsAny": {},
|
||||
"onEmptyFilter": "all"
|
||||
},
|
||||
"sort": {
|
||||
"taskname": {
|
||||
"direction": "ASCENDING"
|
||||
}
|
||||
},
|
||||
"paginate": {
|
||||
"limit": 100,
|
||||
"page": 1
|
||||
},
|
||||
"relationships": [
|
||||
{
|
||||
"tableName": "products",
|
||||
"column": "products",
|
||||
"through": "products_tasks",
|
||||
"from": "taskid",
|
||||
"to": "productid",
|
||||
"fromPrimary": "taskid",
|
||||
"toPrimary": "productid",
|
||||
"aliases": {
|
||||
"products_tasks": "d",
|
||||
"products": "b",
|
||||
"tasks": "a"
|
||||
}
|
||||
},
|
||||
{
|
||||
"tableName": "persons",
|
||||
"column": "tasksToExecute",
|
||||
"from": "executorid",
|
||||
"to": "personid",
|
||||
"aliases": {
|
||||
"persons": "c",
|
||||
"tasks": "a"
|
||||
}
|
||||
},
|
||||
{
|
||||
"tableName": "persons",
|
||||
"column": "tasksToQA",
|
||||
"from": "qaid",
|
||||
"to": "personid",
|
||||
"aliases": {
|
||||
"persons": "c",
|
||||
"tasks": "a"
|
||||
}
|
||||
}
|
||||
],
|
||||
"extra": {
|
||||
"idFilter": {}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks",
|
||||
"primary": [
|
||||
"taskid"
|
||||
],
|
||||
"name": "tasks",
|
||||
"schema": {
|
||||
"executorid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"name": "executorid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
},
|
||||
"autocolumn": true,
|
||||
"autoReason": "foreign_key"
|
||||
},
|
||||
"taskname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "taskname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"taskid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": true,
|
||||
"name": "taskid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"completed": {
|
||||
"type": "boolean",
|
||||
"externalType": "boolean",
|
||||
"autocolumn": false,
|
||||
"name": "completed",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"qaid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"name": "qaid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"products": {
|
||||
"tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products",
|
||||
"name": "products",
|
||||
"relationshipType": "many-to-many",
|
||||
"through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks",
|
||||
"type": "link",
|
||||
"_id": "c3b91d00cd36c4cc1a347794725b9adbd",
|
||||
"fieldName": "productid",
|
||||
"throughFrom": "productid",
|
||||
"throughTo": "taskid"
|
||||
},
|
||||
"tasksToExecute": {
|
||||
"tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__persons",
|
||||
"name": "tasksToExecute",
|
||||
"relationshipType": "one-to-many",
|
||||
"type": "link",
|
||||
"_id": "c0f440590bda04f28846242156c1dd60b",
|
||||
"foreignKey": "executorid",
|
||||
"fieldName": "personid"
|
||||
},
|
||||
"tasksToQA": {
|
||||
"tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__persons",
|
||||
"name": "tasksToQA",
|
||||
"relationshipType": "one-to-many",
|
||||
"type": "link",
|
||||
"_id": "c5fdf453a0ba743d58e29491d174c974b",
|
||||
"foreignKey": "qaid",
|
||||
"fieldName": "personid"
|
||||
}
|
||||
},
|
||||
"sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81",
|
||||
"sourceType": "external",
|
||||
"primaryDisplay": "taskname",
|
||||
"sql": true,
|
||||
"views": {}
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"tasks": "a",
|
||||
"products": "b",
|
||||
"persons": "c",
|
||||
"products_tasks": "d"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,181 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "persons",
|
||||
"operation": "UPDATE"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.year",
|
||||
"a.firstname",
|
||||
"a.personid",
|
||||
"a.address",
|
||||
"a.age",
|
||||
"a.type",
|
||||
"a.city",
|
||||
"a.lastname"
|
||||
]
|
||||
},
|
||||
"filters": {
|
||||
"equal": {
|
||||
"personid": 5
|
||||
}
|
||||
},
|
||||
"relationships": [
|
||||
{
|
||||
"tableName": "tasks",
|
||||
"column": "QA",
|
||||
"from": "personid",
|
||||
"to": "qaid",
|
||||
"aliases": {
|
||||
"tasks": "b",
|
||||
"persons": "a"
|
||||
}
|
||||
},
|
||||
{
|
||||
"tableName": "tasks",
|
||||
"column": "executor",
|
||||
"from": "personid",
|
||||
"to": "executorid",
|
||||
"aliases": {
|
||||
"tasks": "b",
|
||||
"persons": "a"
|
||||
}
|
||||
}
|
||||
],
|
||||
"body": {
|
||||
"year": 1990,
|
||||
"firstname": "C",
|
||||
"address": "A Street",
|
||||
"age": 34,
|
||||
"type": "designer",
|
||||
"city": "London",
|
||||
"lastname": "B"
|
||||
},
|
||||
"extra": {
|
||||
"idFilter": {
|
||||
"equal": {
|
||||
"personid": 5
|
||||
}
|
||||
}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
|
||||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": false,
|
||||
"name": "year",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"firstname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "firstname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"personid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": true,
|
||||
"name": "personid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"address": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "address",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"age": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": false,
|
||||
"name": "age",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"type": "options",
|
||||
"externalType": "USER-DEFINED",
|
||||
"autocolumn": false,
|
||||
"name": "type",
|
||||
"constraints": {
|
||||
"presence": false,
|
||||
"inclusion": [
|
||||
"support",
|
||||
"designer",
|
||||
"programmer",
|
||||
"qa"
|
||||
]
|
||||
}
|
||||
},
|
||||
"city": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "city",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"lastname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "lastname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"QA": {
|
||||
"tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks",
|
||||
"name": "QA",
|
||||
"relationshipType": "many-to-one",
|
||||
"fieldName": "qaid",
|
||||
"type": "link",
|
||||
"main": true,
|
||||
"_id": "ccb68481c80c34217a4540a2c6c27fe46",
|
||||
"foreignKey": "personid"
|
||||
},
|
||||
"executor": {
|
||||
"tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks",
|
||||
"name": "executor",
|
||||
"relationshipType": "many-to-one",
|
||||
"fieldName": "executorid",
|
||||
"type": "link",
|
||||
"main": true,
|
||||
"_id": "c89530b9770d94bec851e062b5cff3001",
|
||||
"foreignKey": "personid",
|
||||
"tableName": "persons"
|
||||
}
|
||||
},
|
||||
"sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"sourceType": "external",
|
||||
"primaryDisplay": "firstname",
|
||||
"views": {}
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"persons": "a",
|
||||
"tasks": "b"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,181 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"entityId": "persons",
|
||||
"operation": "UPDATE"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.year",
|
||||
"a.firstname",
|
||||
"a.personid",
|
||||
"a.address",
|
||||
"a.age",
|
||||
"a.type",
|
||||
"a.city",
|
||||
"a.lastname"
|
||||
]
|
||||
},
|
||||
"filters": {
|
||||
"equal": {
|
||||
"personid": 5
|
||||
}
|
||||
},
|
||||
"relationships": [
|
||||
{
|
||||
"tableName": "tasks",
|
||||
"column": "QA",
|
||||
"from": "personid",
|
||||
"to": "qaid",
|
||||
"aliases": {
|
||||
"tasks": "b",
|
||||
"persons": "a"
|
||||
}
|
||||
},
|
||||
{
|
||||
"tableName": "tasks",
|
||||
"column": "executor",
|
||||
"from": "personid",
|
||||
"to": "executorid",
|
||||
"aliases": {
|
||||
"tasks": "b",
|
||||
"persons": "a"
|
||||
}
|
||||
}
|
||||
],
|
||||
"body": {
|
||||
"year": 1990,
|
||||
"firstname": "C",
|
||||
"address": "A Street",
|
||||
"age": 34,
|
||||
"type": "designer",
|
||||
"city": "London",
|
||||
"lastname": "B"
|
||||
},
|
||||
"extra": {
|
||||
"idFilter": {
|
||||
"equal": {
|
||||
"personid": 5
|
||||
}
|
||||
}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
|
||||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": false,
|
||||
"name": "year",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"firstname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "firstname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"personid": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": true,
|
||||
"name": "personid",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"address": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "address",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"age": {
|
||||
"type": "number",
|
||||
"externalType": "integer",
|
||||
"autocolumn": false,
|
||||
"name": "age",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"type": "options",
|
||||
"externalType": "USER-DEFINED",
|
||||
"autocolumn": false,
|
||||
"name": "type",
|
||||
"constraints": {
|
||||
"presence": false,
|
||||
"inclusion": [
|
||||
"support",
|
||||
"designer",
|
||||
"programmer",
|
||||
"qa"
|
||||
]
|
||||
}
|
||||
},
|
||||
"city": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "city",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"lastname": {
|
||||
"type": "string",
|
||||
"externalType": "character varying",
|
||||
"autocolumn": false,
|
||||
"name": "lastname",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
},
|
||||
"QA": {
|
||||
"tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks",
|
||||
"name": "QA",
|
||||
"relationshipType": "many-to-one",
|
||||
"fieldName": "qaid",
|
||||
"type": "link",
|
||||
"main": true,
|
||||
"_id": "ccb68481c80c34217a4540a2c6c27fe46",
|
||||
"foreignKey": "personid"
|
||||
},
|
||||
"executor": {
|
||||
"tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks",
|
||||
"name": "executor",
|
||||
"relationshipType": "many-to-one",
|
||||
"fieldName": "executorid",
|
||||
"type": "link",
|
||||
"main": true,
|
||||
"_id": "c89530b9770d94bec851e062b5cff3001",
|
||||
"foreignKey": "personid",
|
||||
"tableName": "persons"
|
||||
}
|
||||
},
|
||||
"sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7",
|
||||
"sourceType": "external",
|
||||
"primaryDisplay": "firstname",
|
||||
"views": {}
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"persons": "a",
|
||||
"tasks": "b"
|
||||
}
|
||||
}
|
|
@ -3,12 +3,33 @@ import {
|
|||
DatasourcePlus,
|
||||
IntegrationBase,
|
||||
Schema,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import * as datasources from "./datasources"
|
||||
import tableSdk from "../tables"
|
||||
import { getIntegration } from "../../../integrations"
|
||||
import { context } from "@budibase/backend-core"
|
||||
|
||||
function checkForSchemaErrors(schema: Record<string, Table>) {
|
||||
const errors: Record<string, string> = {}
|
||||
for (let [tableName, table] of Object.entries(schema)) {
|
||||
if (tableName.includes(".")) {
|
||||
errors[tableName] = "Table names containing dots are not supported."
|
||||
} else {
|
||||
const columnNames = Object.keys(table.schema)
|
||||
const invalidColumnName = columnNames.find(columnName =>
|
||||
columnName.includes(".")
|
||||
)
|
||||
if (invalidColumnName) {
|
||||
errors[
|
||||
tableName
|
||||
] = `Column '${invalidColumnName}' is not supported as it contains a dot.`
|
||||
}
|
||||
}
|
||||
}
|
||||
return errors
|
||||
}
|
||||
|
||||
export async function buildFilteredSchema(
|
||||
datasource: Datasource,
|
||||
filter?: string[]
|
||||
|
@ -30,16 +51,19 @@ export async function buildFilteredSchema(
|
|||
filteredSchema.errors[key] = schema.errors[key]
|
||||
}
|
||||
}
|
||||
return filteredSchema
|
||||
|
||||
return {
|
||||
...filteredSchema,
|
||||
errors: {
|
||||
...filteredSchema.errors,
|
||||
...checkForSchemaErrors(filteredSchema.tables),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
|
||||
const connector = (await getConnector(datasource)) as DatasourcePlus
|
||||
const externalSchema = await connector.buildSchema(
|
||||
datasource._id!,
|
||||
datasource.entities!
|
||||
)
|
||||
return externalSchema
|
||||
return await connector.buildSchema(datasource._id!, datasource.entities!)
|
||||
}
|
||||
|
||||
export async function getConnector(
|
||||
|
|
|
@ -11,7 +11,10 @@ import {
|
|||
import * as exporters from "../../../../api/controllers/view/exporters"
|
||||
import sdk from "../../../../sdk"
|
||||
import { handleRequest } from "../../../../api/controllers/row/external"
|
||||
import { breakExternalTableId } from "../../../../integrations/utils"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
breakRowIdField,
|
||||
} from "../../../../integrations/utils"
|
||||
import { cleanExportRows } from "../utils"
|
||||
import { utils } from "@budibase/shared-core"
|
||||
import { ExportRowsParams, ExportRowsResult } from "../search"
|
||||
|
@ -52,6 +55,15 @@ export async function search(options: SearchParams) {
|
|||
}
|
||||
}
|
||||
|
||||
// Make sure oneOf _id queries decode the Row IDs
|
||||
if (query?.oneOf?._id) {
|
||||
const rowIds = query.oneOf._id
|
||||
query.oneOf._id = rowIds.map((row: string) => {
|
||||
const ids = breakRowIdField(row)
|
||||
return ids[0]
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
options = searchInputMapping(table, options)
|
||||
|
@ -119,9 +131,7 @@ export async function exportRows(
|
|||
requestQuery = {
|
||||
oneOf: {
|
||||
_id: rowIds.map((row: string) => {
|
||||
const ids = JSON.parse(
|
||||
decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",")
|
||||
)
|
||||
const ids = breakRowIdField(row)
|
||||
if (ids.length > 1) {
|
||||
throw new HTTPError(
|
||||
"Export data does not support composite keys.",
|
||||
|
|
|
@ -21,10 +21,11 @@ jest.unmock("mysql2/promise")
|
|||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe.skip("external", () => {
|
||||
describe("external search", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
let externalDatasource: Datasource, tableData: Table
|
||||
const rows: Row[] = []
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("mysql")
|
||||
|
@ -89,67 +90,81 @@ describe.skip("external", () => {
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
const table = await config.createExternalTable({
|
||||
...tableData,
|
||||
sourceId: externalDatasource._id,
|
||||
})
|
||||
for (let i = 0; i < 10; i++) {
|
||||
rows.push(
|
||||
await config.createRow({
|
||||
tableId: table._id,
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
describe("search", () => {
|
||||
const rows: Row[] = []
|
||||
beforeAll(async () => {
|
||||
const table = await config.createExternalTable({
|
||||
...tableData,
|
||||
sourceId: externalDatasource._id,
|
||||
})
|
||||
for (let i = 0; i < 10; i++) {
|
||||
rows.push(
|
||||
await config.createRow({
|
||||
tableId: table._id,
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
})
|
||||
)
|
||||
it("default search returns all the data", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const tableId = config.table!._id!
|
||||
|
||||
const searchParams: SearchParams = {
|
||||
tableId,
|
||||
query: {},
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
expect.arrayContaining(rows.map(r => expect.objectContaining(r)))
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("default search returns all the data", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const tableId = config.table!._id!
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const tableId = config.table!._id!
|
||||
|
||||
const searchParams: SearchParams = {
|
||||
tableId,
|
||||
query: {},
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
const searchParams: SearchParams = {
|
||||
tableId,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
expect.arrayContaining(rows.map(r => expect.objectContaining(r)))
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
...expectAnyExternalColsAttributes,
|
||||
name: r.name,
|
||||
age: r.age,
|
||||
}))
|
||||
)
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const tableId = config.table!._id!
|
||||
it("will decode _id in oneOf query", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const tableId = config.table!._id!
|
||||
|
||||
const searchParams: SearchParams = {
|
||||
tableId,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
const searchParams: SearchParams = {
|
||||
tableId,
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
|
||||
},
|
||||
},
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
...expectAnyExternalColsAttributes,
|
||||
name: r.name,
|
||||
age: r.age,
|
||||
}))
|
||||
)
|
||||
)
|
||||
})
|
||||
expect(result.rows).toHaveLength(3)
|
||||
expect(result.rows.map(row => row.id)).toEqual([1, 4, 8])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import {
|
||||
FieldType,
|
||||
FieldTypeSubtypes,
|
||||
SearchParams,
|
||||
Table,
|
||||
DocumentType,
|
||||
|
|
|
@ -98,7 +98,10 @@ describe("sdk >> rows >> internal", () => {
|
|||
},
|
||||
})
|
||||
|
||||
const persistedRow = await config.getRow(table._id!, response.row._id!)
|
||||
const persistedRow = await config.api.row.get(
|
||||
table._id!,
|
||||
response.row._id!
|
||||
)
|
||||
expect(persistedRow).toEqual({
|
||||
...row,
|
||||
type: "row",
|
||||
|
@ -157,7 +160,10 @@ describe("sdk >> rows >> internal", () => {
|
|||
},
|
||||
})
|
||||
|
||||
const persistedRow = await config.getRow(table._id!, response.row._id!)
|
||||
const persistedRow = await config.api.row.get(
|
||||
table._id!,
|
||||
response.row._id!
|
||||
)
|
||||
expect(persistedRow).toEqual({
|
||||
...row,
|
||||
type: "row",
|
||||
|
|
|
@ -1,12 +1,21 @@
|
|||
import cloneDeep from "lodash/cloneDeep"
|
||||
import validateJs from "validate.js"
|
||||
import { FieldType, Row, Table, TableSchema } from "@budibase/types"
|
||||
import {
|
||||
FieldType,
|
||||
QueryJson,
|
||||
Row,
|
||||
Table,
|
||||
TableSchema,
|
||||
DatasourcePlusQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import { Format } from "../../../api/controllers/view/exporters"
|
||||
import sdk from "../.."
|
||||
import { isRelationshipColumn } from "../../../db/utils"
|
||||
|
||||
export async function getDatasourceAndQuery(json: any) {
|
||||
export async function getDatasourceAndQuery(
|
||||
json: QueryJson
|
||||
): DatasourcePlusQueryResponse {
|
||||
const datasourceId = json.endpoint.datasourceId
|
||||
const datasource = await sdk.datasources.get(datasourceId)
|
||||
return makeExternalQuery(datasource, json)
|
||||
|
|
|
@ -3,6 +3,7 @@ import {
|
|||
Operation,
|
||||
RelationshipType,
|
||||
RenameColumn,
|
||||
AddColumn,
|
||||
Table,
|
||||
TableRequest,
|
||||
ViewV2,
|
||||
|
@ -32,7 +33,7 @@ import * as viewSdk from "../../views"
|
|||
export async function save(
|
||||
datasourceId: string,
|
||||
update: Table,
|
||||
opts?: { tableId?: string; renaming?: RenameColumn }
|
||||
opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn }
|
||||
) {
|
||||
let tableToSave: TableRequest = {
|
||||
...update,
|
||||
|
@ -165,8 +166,17 @@ export async function save(
|
|||
|
||||
// remove the rename prop
|
||||
delete tableToSave._rename
|
||||
|
||||
// if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column
|
||||
if (opts?.adding) {
|
||||
datasource.entities[tableToSave.name] = (
|
||||
await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name])
|
||||
).tables[tableToSave.name]
|
||||
} else {
|
||||
datasource.entities[tableToSave.name] = tableToSave
|
||||
}
|
||||
|
||||
// store it into couch now for budibase reference
|
||||
datasource.entities[tableToSave.name] = tableToSave
|
||||
await db.put(populateExternalTableSchemas(datasource))
|
||||
|
||||
// Since tables are stored inside datasources, we need to notify clients
|
||||
|
|
|
@ -712,11 +712,6 @@ export default class TestConfiguration {
|
|||
return this.api.row.save(tableId, config)
|
||||
}
|
||||
|
||||
async getRow(tableId: string, rowId: string): Promise<Row> {
|
||||
const res = await this.api.row.get(tableId, rowId)
|
||||
return res.body
|
||||
}
|
||||
|
||||
async getRows(tableId: string) {
|
||||
if (!tableId && this.table) {
|
||||
tableId = this.table._id!
|
||||
|
|
|
@ -1,193 +1,133 @@
|
|||
import { Response } from "supertest"
|
||||
import {
|
||||
App,
|
||||
PublishResponse,
|
||||
type CreateAppRequest,
|
||||
type FetchAppDefinitionResponse,
|
||||
type FetchAppPackageResponse,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
import { AppStatus } from "../../../db/utils"
|
||||
import { constants } from "@budibase/backend-core"
|
||||
|
||||
export class ApplicationAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
create = async (
|
||||
app: CreateAppRequest,
|
||||
expectations?: Expectations
|
||||
): Promise<App> => {
|
||||
const files = app.templateFile ? { templateFile: app.templateFile } : {}
|
||||
delete app.templateFile
|
||||
return await this._post<App>("/api/applications", {
|
||||
fields: app,
|
||||
files,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
create = async (app: CreateAppRequest): Promise<App> => {
|
||||
const request = this.request
|
||||
.post("/api/applications")
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
for (const key of Object.keys(app)) {
|
||||
request.field(key, (app as any)[key])
|
||||
}
|
||||
|
||||
if (app.templateFile) {
|
||||
request.attach("templateFile", app.templateFile)
|
||||
}
|
||||
|
||||
const result = await request
|
||||
|
||||
if (result.statusCode !== 200) {
|
||||
throw new Error(JSON.stringify(result.body))
|
||||
}
|
||||
|
||||
return result.body as App
|
||||
delete = async (
|
||||
appId: string,
|
||||
expectations?: Expectations
|
||||
): Promise<void> => {
|
||||
await this._delete(`/api/applications/${appId}`, { expectations })
|
||||
}
|
||||
|
||||
delete = async (appId: string): Promise<void> => {
|
||||
await this.request
|
||||
.delete(`/api/applications/${appId}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect(200)
|
||||
}
|
||||
|
||||
publish = async (
|
||||
appId: string
|
||||
): Promise<{ _id: string; status: string; appUrl: string }> => {
|
||||
// While the publish endpoint does take an :appId parameter, it doesn't
|
||||
// use it. It uses the appId from the context.
|
||||
let headers = {
|
||||
...this.config.defaultHeaders(),
|
||||
[constants.Header.APP_ID]: appId,
|
||||
}
|
||||
const result = await this.request
|
||||
.post(`/api/applications/${appId}/publish`)
|
||||
.set(headers)
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return result.body as { _id: string; status: string; appUrl: string }
|
||||
publish = async (appId: string): Promise<PublishResponse> => {
|
||||
return await this._post<PublishResponse>(
|
||||
`/api/applications/${appId}/publish`,
|
||||
{
|
||||
// While the publish endpoint does take an :appId parameter, it doesn't
|
||||
// use it. It uses the appId from the context.
|
||||
headers: {
|
||||
[constants.Header.APP_ID]: appId,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
unpublish = async (appId: string): Promise<void> => {
|
||||
await this.request
|
||||
.post(`/api/applications/${appId}/unpublish`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect(204)
|
||||
await this._post(`/api/applications/${appId}/unpublish`, {
|
||||
expectations: { status: 204 },
|
||||
})
|
||||
}
|
||||
|
||||
sync = async (
|
||||
appId: string,
|
||||
{ statusCode }: { statusCode: number } = { statusCode: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<{ message: string }> => {
|
||||
const result = await this.request
|
||||
.post(`/api/applications/${appId}/sync`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(statusCode)
|
||||
return result.body
|
||||
return await this._post<{ message: string }>(
|
||||
`/api/applications/${appId}/sync`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
|
||||
getRaw = async (appId: string): Promise<Response> => {
|
||||
// While the appPackage endpoint does take an :appId parameter, it doesn't
|
||||
// use it. It uses the appId from the context.
|
||||
let headers = {
|
||||
...this.config.defaultHeaders(),
|
||||
[constants.Header.APP_ID]: appId,
|
||||
}
|
||||
const result = await this.request
|
||||
.get(`/api/applications/${appId}/appPackage`)
|
||||
.set(headers)
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return result
|
||||
}
|
||||
|
||||
get = async (appId: string): Promise<App> => {
|
||||
const result = await this.getRaw(appId)
|
||||
return result.body.application as App
|
||||
get = async (appId: string, expectations?: Expectations): Promise<App> => {
|
||||
return await this._get<App>(`/api/applications/${appId}`, {
|
||||
// While the get endpoint does take an :appId parameter, it doesn't use
|
||||
// it. It uses the appId from the context.
|
||||
headers: {
|
||||
[constants.Header.APP_ID]: appId,
|
||||
},
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
getDefinition = async (
|
||||
appId: string
|
||||
appId: string,
|
||||
expectations?: Expectations
|
||||
): Promise<FetchAppDefinitionResponse> => {
|
||||
const result = await this.request
|
||||
.get(`/api/applications/${appId}/definition`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return result.body as FetchAppDefinitionResponse
|
||||
return await this._get<FetchAppDefinitionResponse>(
|
||||
`/api/applications/${appId}/definition`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
|
||||
getAppPackage = async (appId: string): Promise<FetchAppPackageResponse> => {
|
||||
const result = await this.request
|
||||
.get(`/api/applications/${appId}/appPackage`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return result.body
|
||||
getAppPackage = async (
|
||||
appId: string,
|
||||
expectations?: Expectations
|
||||
): Promise<FetchAppPackageResponse> => {
|
||||
return await this._get<FetchAppPackageResponse>(
|
||||
`/api/applications/${appId}/appPackage`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
|
||||
update = async (
|
||||
appId: string,
|
||||
app: { name?: string; url?: string }
|
||||
app: { name?: string; url?: string },
|
||||
expectations?: Expectations
|
||||
): Promise<App> => {
|
||||
const request = this.request
|
||||
.put(`/api/applications/${appId}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
for (const key of Object.keys(app)) {
|
||||
request.field(key, (app as any)[key])
|
||||
}
|
||||
|
||||
const result = await request
|
||||
|
||||
if (result.statusCode !== 200) {
|
||||
throw new Error(JSON.stringify(result.body))
|
||||
}
|
||||
|
||||
return result.body as App
|
||||
return await this._put<App>(`/api/applications/${appId}`, {
|
||||
fields: app,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
updateClient = async (appId: string): Promise<void> => {
|
||||
// While the updateClient endpoint does take an :appId parameter, it doesn't
|
||||
// use it. It uses the appId from the context.
|
||||
let headers = {
|
||||
...this.config.defaultHeaders(),
|
||||
[constants.Header.APP_ID]: appId,
|
||||
}
|
||||
const response = await this.request
|
||||
.post(`/api/applications/${appId}/client/update`)
|
||||
.set(headers)
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (response.statusCode !== 200) {
|
||||
throw new Error(JSON.stringify(response.body))
|
||||
}
|
||||
updateClient = async (
|
||||
appId: string,
|
||||
expectations?: Expectations
|
||||
): Promise<void> => {
|
||||
await this._post(`/api/applications/${appId}/client/update`, {
|
||||
// While the updateClient endpoint does take an :appId parameter, it doesn't
|
||||
// use it. It uses the appId from the context.
|
||||
headers: {
|
||||
[constants.Header.APP_ID]: appId,
|
||||
},
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
revertClient = async (appId: string): Promise<void> => {
|
||||
// While the revertClient endpoint does take an :appId parameter, it doesn't
|
||||
// use it. It uses the appId from the context.
|
||||
let headers = {
|
||||
...this.config.defaultHeaders(),
|
||||
[constants.Header.APP_ID]: appId,
|
||||
}
|
||||
const response = await this.request
|
||||
.post(`/api/applications/${appId}/client/revert`)
|
||||
.set(headers)
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (response.statusCode !== 200) {
|
||||
throw new Error(JSON.stringify(response.body))
|
||||
}
|
||||
await this._post(`/api/applications/${appId}/client/revert`, {
|
||||
// While the revertClient endpoint does take an :appId parameter, it doesn't
|
||||
// use it. It uses the appId from the context.
|
||||
headers: {
|
||||
[constants.Header.APP_ID]: appId,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fetch = async ({ status }: { status?: AppStatus } = {}): Promise<App[]> => {
|
||||
let query = []
|
||||
if (status) {
|
||||
query.push(`status=${status}`)
|
||||
}
|
||||
|
||||
const result = await this.request
|
||||
.get(`/api/applications${query.length ? `?${query.join("&")}` : ""}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return result.body as App[]
|
||||
return await this._get<App[]>("/api/applications", {
|
||||
query: { status },
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,35 +1,16 @@
|
|||
import {
|
||||
APIError,
|
||||
Datasource,
|
||||
ProcessAttachmentResponse,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import { ProcessAttachmentResponse } from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
import fs from "fs"
|
||||
|
||||
export class AttachmentAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
process = async (
|
||||
name: string,
|
||||
file: Buffer | fs.ReadStream | string,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<ProcessAttachmentResponse> => {
|
||||
const result = await this.request
|
||||
.post(`/api/attachments/process`)
|
||||
.attach("file", file, name)
|
||||
.set(this.config.defaultHeaders())
|
||||
|
||||
if (result.statusCode !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
result.statusCode
|
||||
}, body: ${JSON.stringify(result.body)}`
|
||||
)
|
||||
}
|
||||
|
||||
return result.body
|
||||
return await this._post(`/api/attachments/process`, {
|
||||
files: { file: { name, file } },
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,42 +2,38 @@ import {
|
|||
CreateAppBackupResponse,
|
||||
ImportAppBackupResponse,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
export class BackupAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
exportBasicBackup = async (appId: string) => {
|
||||
const result = await this.request
|
||||
.post(`/api/backups/export?appId=${appId}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /application\/gzip/)
|
||||
.expect(200)
|
||||
return {
|
||||
body: result.body as Buffer,
|
||||
headers: result.headers,
|
||||
exportBasicBackup = async (appId: string, expectations?: Expectations) => {
|
||||
const exp = {
|
||||
...expectations,
|
||||
headers: {
|
||||
...expectations?.headers,
|
||||
"Content-Type": "application/gzip",
|
||||
},
|
||||
}
|
||||
return await this._post<Buffer>(`/api/backups/export`, {
|
||||
query: { appId },
|
||||
expectations: exp,
|
||||
})
|
||||
}
|
||||
|
||||
createBackup = async (appId: string) => {
|
||||
const result = await this.request
|
||||
.post(`/api/apps/${appId}/backups`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return result.body as CreateAppBackupResponse
|
||||
createBackup = async (appId: string, expectations?: Expectations) => {
|
||||
return await this._post<CreateAppBackupResponse>(
|
||||
`/api/apps/${appId}/backups`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
|
||||
waitForBackupToComplete = async (appId: string, backupId: string) => {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000))
|
||||
const result = await this.request
|
||||
.get(`/api/apps/${appId}/backups/${backupId}/file`)
|
||||
.set(this.config.defaultHeaders())
|
||||
if (result.status === 200) {
|
||||
const response = await this._requestRaw(
|
||||
"get",
|
||||
`/api/apps/${appId}/backups/${backupId}/file`
|
||||
)
|
||||
if (response.status === 200) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
@ -46,13 +42,12 @@ export class BackupAPI extends TestAPI {
|
|||
|
||||
importBackup = async (
|
||||
appId: string,
|
||||
backupId: string
|
||||
backupId: string,
|
||||
expectations?: Expectations
|
||||
): Promise<ImportAppBackupResponse> => {
|
||||
const result = await this.request
|
||||
.post(`/api/apps/${appId}/backups/${backupId}/import`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return result.body as ImportAppBackupResponse
|
||||
return await this._post<ImportAppBackupResponse>(
|
||||
`/api/apps/${appId}/backups/${backupId}/import`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,17 +1,196 @@
|
|||
import TestConfiguration from "../TestConfiguration"
|
||||
import { SuperTest, Test } from "supertest"
|
||||
import { SuperTest, Test, Response } from "supertest"
|
||||
import { ReadStream } from "fs"
|
||||
|
||||
export interface TestAPIOpts {
|
||||
headers?: any
|
||||
type Headers = Record<string, string | string[] | undefined>
|
||||
type Method = "get" | "post" | "put" | "patch" | "delete"
|
||||
|
||||
export interface AttachedFile {
|
||||
name: string
|
||||
file: Buffer | ReadStream | string
|
||||
}
|
||||
|
||||
function isAttachedFile(file: any): file is AttachedFile {
|
||||
if (file === undefined) {
|
||||
return false
|
||||
}
|
||||
const attachedFile = file as AttachedFile
|
||||
return (
|
||||
Object.hasOwnProperty.call(attachedFile, "file") &&
|
||||
Object.hasOwnProperty.call(attachedFile, "name")
|
||||
)
|
||||
}
|
||||
|
||||
export interface Expectations {
|
||||
status?: number
|
||||
headers?: Record<string, string | RegExp>
|
||||
headersNotPresent?: string[]
|
||||
body?: Record<string, any>
|
||||
}
|
||||
|
||||
export interface RequestOpts {
|
||||
headers?: Headers
|
||||
query?: Record<string, string | undefined>
|
||||
body?: Record<string, any>
|
||||
fields?: Record<string, any>
|
||||
files?: Record<
|
||||
string,
|
||||
Buffer | ReadStream | string | AttachedFile | undefined
|
||||
>
|
||||
expectations?: Expectations
|
||||
publicUser?: boolean
|
||||
}
|
||||
|
||||
export abstract class TestAPI {
|
||||
config: TestConfiguration
|
||||
request: SuperTest<Test>
|
||||
|
||||
protected constructor(config: TestConfiguration) {
|
||||
constructor(config: TestConfiguration) {
|
||||
this.config = config
|
||||
this.request = config.request!
|
||||
}
|
||||
|
||||
protected _get = async <T>(url: string, opts?: RequestOpts): Promise<T> => {
|
||||
return await this._request<T>("get", url, opts)
|
||||
}
|
||||
|
||||
protected _post = async <T>(url: string, opts?: RequestOpts): Promise<T> => {
|
||||
return await this._request<T>("post", url, opts)
|
||||
}
|
||||
|
||||
protected _put = async <T>(url: string, opts?: RequestOpts): Promise<T> => {
|
||||
return await this._request<T>("put", url, opts)
|
||||
}
|
||||
|
||||
protected _patch = async <T>(url: string, opts?: RequestOpts): Promise<T> => {
|
||||
return await this._request<T>("patch", url, opts)
|
||||
}
|
||||
|
||||
protected _delete = async <T>(
|
||||
url: string,
|
||||
opts?: RequestOpts
|
||||
): Promise<T> => {
|
||||
return await this._request<T>("delete", url, opts)
|
||||
}
|
||||
|
||||
protected _requestRaw = async (
|
||||
method: "get" | "post" | "put" | "patch" | "delete",
|
||||
url: string,
|
||||
opts?: RequestOpts
|
||||
): Promise<Response> => {
|
||||
const {
|
||||
headers = {},
|
||||
query = {},
|
||||
body,
|
||||
fields = {},
|
||||
files = {},
|
||||
expectations,
|
||||
publicUser = false,
|
||||
} = opts || {}
|
||||
const { status = 200 } = expectations || {}
|
||||
const expectHeaders = expectations?.headers || {}
|
||||
|
||||
if (status !== 204 && !expectHeaders["Content-Type"]) {
|
||||
expectHeaders["Content-Type"] = /^application\/json/
|
||||
}
|
||||
|
||||
let queryParams = []
|
||||
for (const [key, value] of Object.entries(query)) {
|
||||
if (value) {
|
||||
queryParams.push(`${key}=${value}`)
|
||||
}
|
||||
}
|
||||
if (queryParams.length) {
|
||||
url += `?${queryParams.join("&")}`
|
||||
}
|
||||
|
||||
const headersFn = publicUser
|
||||
? this.config.publicHeaders.bind(this.config)
|
||||
: this.config.defaultHeaders.bind(this.config)
|
||||
let request = this.request[method](url).set(
|
||||
headersFn({
|
||||
"x-budibase-include-stacktrace": "true",
|
||||
})
|
||||
)
|
||||
if (headers) {
|
||||
request = request.set(headers)
|
||||
}
|
||||
if (body) {
|
||||
request = request.send(body)
|
||||
}
|
||||
for (const [key, value] of Object.entries(fields)) {
|
||||
request = request.field(key, value)
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(files)) {
|
||||
if (isAttachedFile(value)) {
|
||||
request = request.attach(key, value.file, value.name)
|
||||
} else {
|
||||
request = request.attach(key, value as any)
|
||||
}
|
||||
}
|
||||
if (expectations?.headers) {
|
||||
for (const [key, value] of Object.entries(expectations.headers)) {
|
||||
if (value === undefined) {
|
||||
throw new Error(
|
||||
`Got an undefined expected value for header "${key}", if you want to check for the absence of a header, use headersNotPresent`
|
||||
)
|
||||
}
|
||||
request = request.expect(key, value as any)
|
||||
}
|
||||
}
|
||||
|
||||
return await request
|
||||
}
|
||||
|
||||
protected _request = async <T>(
|
||||
method: Method,
|
||||
url: string,
|
||||
opts?: RequestOpts
|
||||
): Promise<T> => {
|
||||
const { expectations } = opts || {}
|
||||
const { status = 200 } = expectations || {}
|
||||
|
||||
const response = await this._requestRaw(method, url, opts)
|
||||
|
||||
if (response.status !== status) {
|
||||
let message = `Expected status ${status} but got ${response.status}`
|
||||
|
||||
const stack = response.body.stack
|
||||
delete response.body.stack
|
||||
|
||||
if (response.body) {
|
||||
message += `\n\nBody:`
|
||||
const body = JSON.stringify(response.body, null, 2)
|
||||
for (const line of body.split("\n")) {
|
||||
message += `\n⏐ ${line}`
|
||||
}
|
||||
}
|
||||
|
||||
if (stack) {
|
||||
message += `\n\nStack from request handler:`
|
||||
for (const line of stack.split("\n")) {
|
||||
message += `\n⏐ ${line}`
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(message)
|
||||
}
|
||||
|
||||
if (expectations?.headersNotPresent) {
|
||||
for (const header of expectations.headersNotPresent) {
|
||||
if (response.headers[header]) {
|
||||
throw new Error(
|
||||
`Expected header ${header} not to be present, found value "${response.headers[header]}"`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (expectations?.body) {
|
||||
expect(response.body).toMatchObject(expectations.body)
|
||||
}
|
||||
|
||||
return response.body
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,63 +1,48 @@
|
|||
import {
|
||||
CreateDatasourceRequest,
|
||||
Datasource,
|
||||
VerifyDatasourceRequest,
|
||||
CreateDatasourceResponse,
|
||||
UpdateDatasourceResponse,
|
||||
UpdateDatasourceRequest,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import supertest from "supertest"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
export class DatasourceAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
create = async <B extends boolean = false>(
|
||||
create = async (
|
||||
config: Datasource,
|
||||
{
|
||||
expectStatus,
|
||||
rawResponse,
|
||||
}: { expectStatus?: number; rawResponse?: B } = {}
|
||||
): Promise<B extends false ? Datasource : supertest.Response> => {
|
||||
const body: CreateDatasourceRequest = {
|
||||
datasource: config,
|
||||
tablesFilter: [],
|
||||
}
|
||||
const result = await this.request
|
||||
.post(`/api/datasources`)
|
||||
.send(body)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus || 200)
|
||||
if (rawResponse) {
|
||||
return result as any
|
||||
}
|
||||
return result.body.datasource
|
||||
expectations?: Expectations
|
||||
): Promise<Datasource> => {
|
||||
const response = await this._post<CreateDatasourceResponse>(
|
||||
`/api/datasources`,
|
||||
{
|
||||
body: {
|
||||
datasource: config,
|
||||
tablesFilter: [],
|
||||
},
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
return response.datasource
|
||||
}
|
||||
|
||||
update = async (
|
||||
datasource: Datasource,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
datasource: UpdateDatasourceRequest,
|
||||
expectations?: Expectations
|
||||
): Promise<Datasource> => {
|
||||
const result = await this.request
|
||||
.put(`/api/datasources/${datasource._id}`)
|
||||
.send(datasource)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return result.body.datasource as Datasource
|
||||
const response = await this._put<UpdateDatasourceResponse>(
|
||||
`/api/datasources/${datasource._id}`,
|
||||
{ body: datasource, expectations }
|
||||
)
|
||||
return response.datasource
|
||||
}
|
||||
|
||||
verify = async (
|
||||
data: VerifyDatasourceRequest,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
const result = await this.request
|
||||
.post(`/api/datasources/verify`)
|
||||
.send(data)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return result
|
||||
return await this._post(`/api/datasources/verify`, {
|
||||
body: data,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,16 +1,8 @@
|
|||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
import { Row } from "@budibase/types"
|
||||
|
||||
export class LegacyViewAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
get = async (id: string, { expectStatus } = { expectStatus: 200 }) => {
|
||||
return await this.request
|
||||
.get(`/api/views/${id}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
get = async (id: string, expectations?: Expectations) => {
|
||||
return await this._get<Row[]>(`/api/views/${id}`, { expectations })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,52 +1,39 @@
|
|||
import { AnyDocument, PermissionLevel } from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import {
|
||||
AddPermissionRequest,
|
||||
AddPermissionResponse,
|
||||
GetResourcePermsResponse,
|
||||
RemovePermissionRequest,
|
||||
RemovePermissionResponse,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
export class PermissionAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
get = async (resourceId: string, expectations?: Expectations) => {
|
||||
return await this._get<GetResourcePermsResponse>(
|
||||
`/api/permission/${resourceId}`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
|
||||
get = async (
|
||||
resourceId: string,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
) => {
|
||||
return this.request
|
||||
.get(`/api/permission/${resourceId}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
}
|
||||
|
||||
set = async (
|
||||
{
|
||||
roleId,
|
||||
resourceId,
|
||||
level,
|
||||
}: { roleId: string; resourceId: string; level: PermissionLevel },
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
): Promise<any> => {
|
||||
const res = await this.request
|
||||
.post(`/api/permission/${roleId}/${resourceId}/${level}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return res.body
|
||||
add = async (
|
||||
request: AddPermissionRequest,
|
||||
expectations?: Expectations
|
||||
): Promise<AddPermissionResponse> => {
|
||||
const { roleId, resourceId, level } = request
|
||||
return await this._post<AddPermissionResponse>(
|
||||
`/api/permission/${roleId}/${resourceId}/${level}`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
|
||||
revoke = async (
|
||||
{
|
||||
roleId,
|
||||
resourceId,
|
||||
level,
|
||||
}: { roleId: string; resourceId: string; level: PermissionLevel },
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
request: RemovePermissionRequest,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
const res = await this.request
|
||||
.delete(`/api/permission/${roleId}/${resourceId}/${level}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return res
|
||||
const { roleId, resourceId, level } = request
|
||||
return await this._delete<RemovePermissionResponse>(
|
||||
`/api/permission/${roleId}/${resourceId}/${level}`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,60 +1,32 @@
|
|||
import TestConfiguration from "../TestConfiguration"
|
||||
import {
|
||||
Query,
|
||||
QueryPreview,
|
||||
type ExecuteQueryRequest,
|
||||
type ExecuteQueryResponse,
|
||||
ExecuteQueryRequest,
|
||||
ExecuteQueryResponse,
|
||||
PreviewQueryRequest,
|
||||
PreviewQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { TestAPI } from "./base"
|
||||
|
||||
export class QueryAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
create = async (body: Query): Promise<Query> => {
|
||||
const res = await this.request
|
||||
.post(`/api/queries`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.send(body)
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (res.status !== 200) {
|
||||
throw new Error(JSON.stringify(res.body))
|
||||
}
|
||||
|
||||
return res.body as Query
|
||||
return await this._post<Query>(`/api/queries`, { body })
|
||||
}
|
||||
|
||||
execute = async (
|
||||
queryId: string,
|
||||
body?: ExecuteQueryRequest
|
||||
): Promise<ExecuteQueryResponse> => {
|
||||
const res = await this.request
|
||||
.post(`/api/v2/queries/${queryId}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.send(body)
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (res.status !== 200) {
|
||||
throw new Error(JSON.stringify(res.body))
|
||||
}
|
||||
|
||||
return res.body
|
||||
return await this._post<ExecuteQueryResponse>(
|
||||
`/api/v2/queries/${queryId}`,
|
||||
{
|
||||
body,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
previewQuery = async (queryPreview: QueryPreview) => {
|
||||
const res = await this.request
|
||||
.post(`/api/queries/preview`)
|
||||
.send(queryPreview)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
|
||||
if (res.status !== 200) {
|
||||
throw new Error(JSON.stringify(res.body))
|
||||
}
|
||||
|
||||
return res.body
|
||||
previewQuery = async (queryPreview: PreviewQueryRequest) => {
|
||||
return await this._post<PreviewQueryResponse>(`/api/queries/preview`, {
|
||||
body: queryPreview,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,162 +8,140 @@ import {
|
|||
BulkImportResponse,
|
||||
SearchRowResponse,
|
||||
SearchParams,
|
||||
DeleteRowRequest,
|
||||
DeleteRows,
|
||||
DeleteRow,
|
||||
ExportRowsResponse,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
export class RowAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
get = async (
|
||||
sourceId: string,
|
||||
rowId: string,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
const request = this.request
|
||||
.get(`/api/${sourceId}/rows/${rowId}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect(expectStatus)
|
||||
if (expectStatus !== 404) {
|
||||
request.expect("Content-Type", /json/)
|
||||
}
|
||||
return request
|
||||
return await this._get<Row>(`/api/${sourceId}/rows/${rowId}`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
getEnriched = async (
|
||||
sourceId: string,
|
||||
rowId: string,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
const request = this.request
|
||||
.get(`/api/${sourceId}/${rowId}/enrich`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect(expectStatus)
|
||||
if (expectStatus !== 404) {
|
||||
request.expect("Content-Type", /json/)
|
||||
}
|
||||
return request
|
||||
return await this._get<Row>(`/api/${sourceId}/${rowId}/enrich`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
save = async (
|
||||
tableId: string,
|
||||
row: SaveRowRequest,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<Row> => {
|
||||
const resp = await this.request
|
||||
.post(`/api/${tableId}/rows`)
|
||||
.send(row)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
if (resp.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
resp.status
|
||||
}, body: ${JSON.stringify(resp.body)}`
|
||||
)
|
||||
}
|
||||
return resp.body as Row
|
||||
return await this._post<Row>(`/api/${tableId}/rows`, {
|
||||
body: row,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
validate = async (
|
||||
sourceId: string,
|
||||
row: SaveRowRequest,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<ValidateResponse> => {
|
||||
const resp = await this.request
|
||||
.post(`/api/${sourceId}/rows/validate`)
|
||||
.send(row)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return resp.body as ValidateResponse
|
||||
return await this._post<ValidateResponse>(
|
||||
`/api/${sourceId}/rows/validate`,
|
||||
{
|
||||
body: row,
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
patch = async (
|
||||
sourceId: string,
|
||||
row: PatchRowRequest,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<Row> => {
|
||||
let resp = await this.request
|
||||
.patch(`/api/${sourceId}/rows`)
|
||||
.send(row)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
if (resp.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
resp.status
|
||||
}, body: ${JSON.stringify(resp.body)}`
|
||||
)
|
||||
}
|
||||
return resp.body as Row
|
||||
return await this._patch<Row>(`/api/${sourceId}/rows`, {
|
||||
body: row,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
delete = async (
|
||||
sourceId: string,
|
||||
rows: Row | string | (Row | string)[],
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
row: DeleteRow,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
return this.request
|
||||
.delete(`/api/${sourceId}/rows`)
|
||||
.send(Array.isArray(rows) ? { rows } : rows)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return await this._delete<Row>(`/api/${sourceId}/rows`, {
|
||||
body: row,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
bulkDelete = async (
|
||||
sourceId: string,
|
||||
body: DeleteRows,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
return await this._delete<Row[]>(`/api/${sourceId}/rows`, {
|
||||
body,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
fetch = async (
|
||||
sourceId: string,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<Row[]> => {
|
||||
const request = this.request
|
||||
.get(`/api/${sourceId}/rows`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect(expectStatus)
|
||||
|
||||
return (await request).body
|
||||
return await this._get<Row[]>(`/api/${sourceId}/rows`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
exportRows = async (
|
||||
tableId: string,
|
||||
body: ExportRowsRequest,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
const request = this.request
|
||||
.post(`/api/${tableId}/rows/exportRows?format=json`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.send(body)
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return request
|
||||
const response = await this._requestRaw(
|
||||
"post",
|
||||
`/api/${tableId}/rows/exportRows`,
|
||||
{
|
||||
body,
|
||||
query: { format: "json" },
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
return response.text
|
||||
}
|
||||
|
||||
bulkImport = async (
|
||||
tableId: string,
|
||||
body: BulkImportRequest,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<BulkImportResponse> => {
|
||||
let request = this.request
|
||||
.post(`/api/tables/${tableId}/import`)
|
||||
.send(body)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect(expectStatus)
|
||||
return (await request).body
|
||||
return await this._post<BulkImportResponse>(
|
||||
`/api/tables/${tableId}/import`,
|
||||
{
|
||||
body,
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
search = async (
|
||||
sourceId: string,
|
||||
params?: SearchParams,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<SearchRowResponse> => {
|
||||
const request = this.request
|
||||
.post(`/api/${sourceId}/search`)
|
||||
.send(params)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect(expectStatus)
|
||||
|
||||
return (await request).body
|
||||
return await this._post<SearchRowResponse>(`/api/${sourceId}/search`, {
|
||||
body: params,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,18 +1,8 @@
|
|||
import TestConfiguration from "../TestConfiguration"
|
||||
import { Screen } from "@budibase/types"
|
||||
import { TestAPI } from "./base"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
export class ScreenAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
list = async (): Promise<Screen[]> => {
|
||||
const res = await this.request
|
||||
.get(`/api/screens`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return res.body as Screen[]
|
||||
list = async (expectations?: Expectations): Promise<Screen[]> => {
|
||||
return await this._get<Screen[]>(`/api/screens`, { expectations })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,74 +5,38 @@ import {
|
|||
SaveTableResponse,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
export class TableAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
save = async (
|
||||
data: SaveTableRequest,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<SaveTableResponse> => {
|
||||
const res = await this.request
|
||||
.post(`/api/tables`)
|
||||
.send(data)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (res.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
res.status
|
||||
} with body ${JSON.stringify(res.body)}`
|
||||
)
|
||||
}
|
||||
|
||||
return res.body
|
||||
return await this._post<SaveTableResponse>("/api/tables", {
|
||||
body: data,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
fetch = async (
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
): Promise<Table[]> => {
|
||||
const res = await this.request
|
||||
.get(`/api/tables`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return res.body
|
||||
fetch = async (expectations?: Expectations): Promise<Table[]> => {
|
||||
return await this._get<Table[]>("/api/tables", { expectations })
|
||||
}
|
||||
|
||||
get = async (
|
||||
tableId: string,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<Table> => {
|
||||
const res = await this.request
|
||||
.get(`/api/tables/${tableId}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return res.body
|
||||
return await this._get<Table>(`/api/tables/${tableId}`, { expectations })
|
||||
}
|
||||
|
||||
migrate = async (
|
||||
tableId: string,
|
||||
data: MigrateRequest,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<MigrateResponse> => {
|
||||
const res = await this.request
|
||||
.post(`/api/tables/${tableId}/migrate`)
|
||||
.send(data)
|
||||
.set(this.config.defaultHeaders())
|
||||
if (res.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
res.status
|
||||
} with body ${JSON.stringify(res.body)}`
|
||||
)
|
||||
}
|
||||
return res.body
|
||||
return await this._post<MigrateResponse>(`/api/tables/${tableId}/migrate`, {
|
||||
body: data,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,154 +4,79 @@ import {
|
|||
Flags,
|
||||
UserMetadata,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
import { DocumentInsertResponse } from "@budibase/nano"
|
||||
|
||||
export class UserAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
fetch = async (
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<FetchUserMetadataResponse> => {
|
||||
const res = await this.request
|
||||
.get(`/api/users/metadata`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (res.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
res.status
|
||||
} with body ${JSON.stringify(res.body)}`
|
||||
)
|
||||
}
|
||||
|
||||
return res.body
|
||||
return await this._get<FetchUserMetadataResponse>("/api/users/metadata", {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
find = async (
|
||||
id: string,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<FindUserMetadataResponse> => {
|
||||
const res = await this.request
|
||||
.get(`/api/users/metadata/${id}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (res.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
res.status
|
||||
} with body ${JSON.stringify(res.body)}`
|
||||
)
|
||||
}
|
||||
|
||||
return res.body
|
||||
return await this._get<FindUserMetadataResponse>(
|
||||
`/api/users/metadata/${id}`,
|
||||
{
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
update = async (
|
||||
user: UserMetadata,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<DocumentInsertResponse> => {
|
||||
const res = await this.request
|
||||
.put(`/api/users/metadata`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.send(user)
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (res.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
res.status
|
||||
} with body ${JSON.stringify(res.body)}`
|
||||
)
|
||||
}
|
||||
|
||||
return res.body as DocumentInsertResponse
|
||||
return await this._put<DocumentInsertResponse>("/api/users/metadata", {
|
||||
body: user,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
updateSelf = async (
|
||||
user: UserMetadata,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<DocumentInsertResponse> => {
|
||||
const res = await this.request
|
||||
.post(`/api/users/metadata/self`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.send(user)
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (res.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
res.status
|
||||
} with body ${JSON.stringify(res.body)}`
|
||||
)
|
||||
}
|
||||
|
||||
return res.body as DocumentInsertResponse
|
||||
return await this._post<DocumentInsertResponse>(
|
||||
"/api/users/metadata/self",
|
||||
{
|
||||
body: user,
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
destroy = async (
|
||||
id: string,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<{ message: string }> => {
|
||||
const res = await this.request
|
||||
.delete(`/api/users/metadata/${id}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (res.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
res.status
|
||||
} with body ${JSON.stringify(res.body)}`
|
||||
)
|
||||
}
|
||||
|
||||
return res.body as { message: string }
|
||||
return await this._delete<{ message: string }>(
|
||||
`/api/users/metadata/${id}`,
|
||||
{
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
setFlag = async (
|
||||
flag: string,
|
||||
value: any,
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<{ message: string }> => {
|
||||
const res = await this.request
|
||||
.post(`/api/users/flags`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.send({ flag, value })
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (res.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
res.status
|
||||
} with body ${JSON.stringify(res.body)}`
|
||||
)
|
||||
}
|
||||
|
||||
return res.body as { message: string }
|
||||
return await this._post<{ message: string }>(`/api/users/flags`, {
|
||||
body: { flag, value },
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
getFlags = async (
|
||||
{ expectStatus } = { expectStatus: 200 }
|
||||
): Promise<Flags> => {
|
||||
const res = await this.request
|
||||
.get(`/api/users/flags`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
|
||||
if (res.status !== expectStatus) {
|
||||
throw new Error(
|
||||
`Expected status ${expectStatus} but got ${
|
||||
res.status
|
||||
} with body ${JSON.stringify(res.body)}`
|
||||
)
|
||||
}
|
||||
|
||||
return res.body as Flags
|
||||
getFlags = async (expectations?: Expectations): Promise<Flags> => {
|
||||
return await this._get<Flags>(`/api/users/flags`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,21 +3,16 @@ import {
|
|||
UpdateViewRequest,
|
||||
ViewV2,
|
||||
SearchViewRowRequest,
|
||||
PaginatedSearchRowResponse,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { Response } from "superagent"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
export class ViewV2API extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
create = async (
|
||||
viewData?: Partial<CreateViewRequest>,
|
||||
{ expectStatus } = { expectStatus: 201 }
|
||||
expectations?: Expectations
|
||||
): Promise<ViewV2> => {
|
||||
let tableId = viewData?.tableId
|
||||
if (!tableId && !this.config.table) {
|
||||
|
@ -30,43 +25,36 @@ export class ViewV2API extends TestAPI {
|
|||
name: generator.guid(),
|
||||
...viewData,
|
||||
}
|
||||
const result = await this.request
|
||||
.post(`/api/v2/views`)
|
||||
.send(view)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return result.body.data as ViewV2
|
||||
|
||||
const exp: Expectations = {
|
||||
status: 201,
|
||||
...expectations,
|
||||
}
|
||||
|
||||
const resp = await this._post<{ data: ViewV2 }>("/api/v2/views", {
|
||||
body: view,
|
||||
expectations: exp,
|
||||
})
|
||||
return resp.data
|
||||
}
|
||||
|
||||
update = async (
|
||||
view: UpdateViewRequest,
|
||||
{
|
||||
expectStatus,
|
||||
handleResponse,
|
||||
}: {
|
||||
expectStatus: number
|
||||
handleResponse?: (response: Response) => void
|
||||
} = { expectStatus: 200 }
|
||||
expectations?: Expectations
|
||||
): Promise<ViewV2> => {
|
||||
const result = await this.request
|
||||
.put(`/api/v2/views/${view.id}`)
|
||||
.send(view)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
|
||||
if (handleResponse) {
|
||||
handleResponse(result)
|
||||
}
|
||||
return result.body.data as ViewV2
|
||||
const resp = await this._put<{ data: ViewV2 }>(`/api/v2/views/${view.id}`, {
|
||||
body: view,
|
||||
expectations,
|
||||
})
|
||||
return resp.data
|
||||
}
|
||||
|
||||
delete = async (viewId: string, { expectStatus } = { expectStatus: 204 }) => {
|
||||
return this.request
|
||||
.delete(`/api/v2/views/${viewId}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect(expectStatus)
|
||||
delete = async (viewId: string, expectations?: Expectations) => {
|
||||
const exp = {
|
||||
status: 204,
|
||||
...expectations,
|
||||
}
|
||||
return await this._delete(`/api/v2/views/${viewId}`, { expectations: exp })
|
||||
}
|
||||
|
||||
get = async (viewId: string) => {
|
||||
|
@ -78,17 +66,29 @@ export class ViewV2API extends TestAPI {
|
|||
search = async (
|
||||
viewId: string,
|
||||
params?: SearchViewRowRequest,
|
||||
{ expectStatus = 200, usePublicUser = false } = {}
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
return this.request
|
||||
.post(`/api/v2/views/${viewId}/search`)
|
||||
.send(params)
|
||||
.set(
|
||||
usePublicUser
|
||||
? this.config.publicHeaders()
|
||||
: this.config.defaultHeaders()
|
||||
)
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expectStatus)
|
||||
return await this._post<PaginatedSearchRowResponse>(
|
||||
`/api/v2/views/${viewId}/search`,
|
||||
{
|
||||
body: params,
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
publicSearch = async (
|
||||
viewId: string,
|
||||
params?: SearchViewRowRequest,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
return await this._post<PaginatedSearchRowResponse>(
|
||||
`/api/v2/views/${viewId}/search`,
|
||||
{
|
||||
body: params,
|
||||
expectations,
|
||||
publicUser: true,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { PlanType } from "../../../sdk"
|
||||
import { PermissionLevel, PlanType } from "../../../sdk"
|
||||
|
||||
export interface ResourcePermissionInfo {
|
||||
role: string
|
||||
|
@ -14,3 +14,21 @@ export interface GetResourcePermsResponse {
|
|||
export interface GetDependantResourcesResponse {
|
||||
resourceByType?: Record<string, number>
|
||||
}
|
||||
|
||||
export interface AddedPermission {
|
||||
_id?: string
|
||||
rev?: string
|
||||
error?: string
|
||||
reason?: string
|
||||
}
|
||||
|
||||
export type AddPermissionResponse = AddedPermission[]
|
||||
|
||||
export interface AddPermissionRequest {
|
||||
roleId: string
|
||||
resourceId: string
|
||||
level: PermissionLevel
|
||||
}
|
||||
|
||||
export interface RemovePermissionRequest extends AddPermissionRequest {}
|
||||
export interface RemovePermissionResponse extends AddPermissionResponse {}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { SearchFilters, SearchParams } from "../../../sdk"
|
||||
import { Row } from "../../../documents"
|
||||
import { SortOrder } from "../../../api"
|
||||
import { PaginationResponse, SortOrder } from "../../../api"
|
||||
import { ReadStream } from "fs"
|
||||
|
||||
export interface SaveRowRequest extends Row {}
|
||||
|
@ -31,6 +31,10 @@ export interface SearchRowResponse {
|
|||
rows: any[]
|
||||
}
|
||||
|
||||
export interface PaginatedSearchRowResponse
|
||||
extends SearchRowResponse,
|
||||
PaginationResponse {}
|
||||
|
||||
export interface ExportRowsRequest {
|
||||
rows: string[]
|
||||
columns?: string[]
|
||||
|
|
|
@ -27,3 +27,9 @@ export interface FetchAppPackageResponse {
|
|||
clientLibPath: string
|
||||
hasLock: boolean
|
||||
}
|
||||
|
||||
export interface PublishResponse {
|
||||
_id: string
|
||||
status: string
|
||||
appUrl: string
|
||||
}
|
||||
|
|
|
@ -13,3 +13,4 @@ export * from "./searchFilter"
|
|||
export * from "./cookies"
|
||||
export * from "./automation"
|
||||
export * from "./layout"
|
||||
export * from "./query"
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
import { QueryPreview, QuerySchema } from "../../documents"
|
||||
|
||||
export interface PreviewQueryRequest extends QueryPreview {}
|
||||
|
||||
export interface PreviewQueryResponse {
|
||||
rows: any[]
|
||||
nestedSchemaFields: { [key: string]: { [key: string]: string | QuerySchema } }
|
||||
schema: { [key: string]: string | QuerySchema }
|
||||
info: any
|
||||
extra: any
|
||||
}
|
||||
|
||||
export interface ExecuteQueryRequest {
|
||||
parameters?: { [key: string]: string }
|
||||
pagination?: any
|
||||
}
|
||||
|
||||
export interface ExecuteQueryResponse {
|
||||
data: Record<string, any>[]
|
||||
}
|
|
@ -62,22 +62,6 @@ export interface PaginationValues {
|
|||
limit: number | null
|
||||
}
|
||||
|
||||
export interface PreviewQueryRequest extends Omit<Query, "parameters"> {
|
||||
parameters: {}
|
||||
flags?: {
|
||||
urlName?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface ExecuteQueryRequest {
|
||||
parameters?: { [key: string]: string }
|
||||
pagination?: any
|
||||
}
|
||||
|
||||
export interface ExecuteQueryResponse {
|
||||
data: Row[]
|
||||
}
|
||||
|
||||
export enum HttpMethod {
|
||||
GET = "GET",
|
||||
POST = "POST",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Document } from "../../document"
|
||||
import { View, ViewV2 } from "../view"
|
||||
import { RenameColumn } from "../../../sdk"
|
||||
import { AddColumn, RenameColumn } from "../../../sdk"
|
||||
import { TableSchema } from "./schema"
|
||||
|
||||
export const INTERNAL_TABLE_SOURCE_ID = "bb_internal"
|
||||
|
@ -29,5 +29,6 @@ export interface Table extends Document {
|
|||
|
||||
export interface TableRequest extends Table {
|
||||
_rename?: RenameColumn
|
||||
_add?: AddColumn
|
||||
created?: boolean
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { Table } from "../documents"
|
||||
import { Table, Row } from "../documents"
|
||||
import { QueryJson } from "./search"
|
||||
|
||||
export const PASSWORD_REPLACEMENT = "--secret-value--"
|
||||
|
||||
|
@ -181,11 +182,24 @@ export interface Schema {
|
|||
errors: Record<string, string>
|
||||
}
|
||||
|
||||
// return these when an operation occurred but we got no response
|
||||
enum DSPlusOperation {
|
||||
CREATE = "create",
|
||||
READ = "read",
|
||||
UPDATE = "update",
|
||||
DELETE = "delete",
|
||||
}
|
||||
|
||||
export type DatasourcePlusQueryResponse = Promise<
|
||||
Row[] | Record<DSPlusOperation, boolean>[] | void
|
||||
>
|
||||
|
||||
export interface DatasourcePlus extends IntegrationBase {
|
||||
// if the datasource supports the use of bindings directly (to protect against SQL injection)
|
||||
// this returns the format of the identifier
|
||||
getBindingIdentifier(): string
|
||||
getStringConcat(parts: string[]): string
|
||||
query(json: QueryJson): DatasourcePlusQueryResponse
|
||||
buildSchema(
|
||||
datasourceId: string,
|
||||
entities: Record<string, Table>
|
||||
|
|
|
@ -60,6 +60,10 @@ export interface RenameColumn {
|
|||
updated: string
|
||||
}
|
||||
|
||||
export interface AddColumn {
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface RelationshipsJson {
|
||||
through?: string
|
||||
from?: string
|
||||
|
@ -94,6 +98,7 @@ export interface QueryJson {
|
|||
idFilter?: SearchFilters
|
||||
}
|
||||
relationships?: RelationshipsJson[]
|
||||
tableAliases?: Record<string, string>
|
||||
}
|
||||
|
||||
export interface SqlQuery {
|
||||
|
|
384
yarn.lock
384
yarn.lock
|
@ -1097,7 +1097,7 @@
|
|||
"@babel/highlight@^7.23.4":
|
||||
version "7.23.4"
|
||||
resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b"
|
||||
integrity "sha1-7arfTYIy4alhQy23hQkSB+rQYhs= sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A=="
|
||||
integrity sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==
|
||||
dependencies:
|
||||
"@babel/helper-validator-identifier" "^7.22.20"
|
||||
chalk "^2.4.2"
|
||||
|
@ -1988,14 +1988,14 @@
|
|||
resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310"
|
||||
integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==
|
||||
|
||||
"@babel/runtime@^7.10.5":
|
||||
"@babel/runtime@^7.10.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.21.0":
|
||||
version "7.23.9"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.9.tgz#47791a15e4603bb5f905bc0753801cf21d6345f7"
|
||||
integrity sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.15.4", "@babel/runtime@^7.21.0", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
|
||||
"@babel/runtime@^7.12.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
|
||||
version "7.23.8"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.8.tgz#8ee6fe1ac47add7122902f257b8ddf55c898f650"
|
||||
integrity sha512-Y7KbAP984rn1VGMbGqKmBLio9V7y5Je9GvU4rQPCPinCyNfUcToxIXl06d59URp/F3LwinvODxab5N/G6qggkw==
|
||||
|
@ -3419,9 +3419,9 @@
|
|||
tar "^6.1.11"
|
||||
|
||||
"@mongodb-js/saslprep@^1.1.0":
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@mongodb-js/saslprep/-/saslprep-1.1.1.tgz#9a6c2516bc9188672c4d953ec99760ba49970da7"
|
||||
integrity sha512-t7c5K033joZZMspnHg/gWPE4kandgc2OxE74aYOtGKfgB9VPuVJPix0H6fhmm2erj5PBJ21mqcx34lpIGtUCsQ==
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/@mongodb-js/saslprep/-/saslprep-1.1.4.tgz#24ec1c4915a65f5c506bb88c081731450d91bb1c"
|
||||
integrity sha512-8zJ8N1x51xo9hwPh6AWnKdLGEC5N3lDa6kms1YHmFBoRhTpJR6HG8wWk0td1MVCu9cD4YBrvjZEtd5Obw0Fbnw==
|
||||
dependencies:
|
||||
sparse-bitfield "^3.0.3"
|
||||
|
||||
|
@ -4012,70 +4012,70 @@
|
|||
estree-walker "^2.0.2"
|
||||
picomatch "^2.3.1"
|
||||
|
||||
"@rollup/rollup-android-arm-eabi@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.10.0.tgz#786eaf6372be2fc209cc957c14aa9d3ff8fefe6a"
|
||||
integrity sha512-/MeDQmcD96nVoRumKUljsYOLqfv1YFJps+0pTrb2Z9Nl/w5qNUysMaWQsrd1mvAlNT4yza1iVyIu4Q4AgF6V3A==
|
||||
"@rollup/rollup-android-arm-eabi@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.12.0.tgz#38c3abd1955a3c21d492af6b1a1dca4bb1d894d6"
|
||||
integrity sha512-+ac02NL/2TCKRrJu2wffk1kZ+RyqxVUlbjSagNgPm94frxtr+XDL12E5Ll1enWskLrtrZ2r8L3wED1orIibV/w==
|
||||
|
||||
"@rollup/rollup-android-arm64@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.10.0.tgz#0114a042fd6396f4f3233e6171fd5b61a36ed539"
|
||||
integrity sha512-lvu0jK97mZDJdpZKDnZI93I0Om8lSDaiPx3OiCk0RXn3E8CMPJNS/wxjAvSJJzhhZpfjXsjLWL8LnS6qET4VNQ==
|
||||
"@rollup/rollup-android-arm64@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.12.0.tgz#3822e929f415627609e53b11cec9a4be806de0e2"
|
||||
integrity sha512-OBqcX2BMe6nvjQ0Nyp7cC90cnumt8PXmO7Dp3gfAju/6YwG0Tj74z1vKrfRz7qAv23nBcYM8BCbhrsWqO7PzQQ==
|
||||
|
||||
"@rollup/rollup-darwin-arm64@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.10.0.tgz#944d007c1dc71a8c9174d11671c0c34bd74a2c81"
|
||||
integrity sha512-uFpayx8I8tyOvDkD7X6n0PriDRWxcqEjqgtlxnUA/G9oS93ur9aZ8c8BEpzFmsed1TH5WZNG5IONB8IiW90TQg==
|
||||
"@rollup/rollup-darwin-arm64@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.12.0.tgz#6c082de71f481f57df6cfa3701ab2a7afde96f69"
|
||||
integrity sha512-X64tZd8dRE/QTrBIEs63kaOBG0b5GVEd3ccoLtyf6IdXtHdh8h+I56C2yC3PtC9Ucnv0CpNFJLqKFVgCYe0lOQ==
|
||||
|
||||
"@rollup/rollup-darwin-x64@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.10.0.tgz#1d08cb4521a058d7736ab1c7fe988daf034a2598"
|
||||
integrity sha512-nIdCX03qFKoR/MwQegQBK+qZoSpO3LESurVAC6s6jazLA1Mpmgzo3Nj3H1vydXp/JM29bkCiuF7tDuToj4+U9Q==
|
||||
"@rollup/rollup-darwin-x64@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.12.0.tgz#c34ca0d31f3c46a22c9afa0e944403eea0edcfd8"
|
||||
integrity sha512-cc71KUZoVbUJmGP2cOuiZ9HSOP14AzBAThn3OU+9LcA1+IUqswJyR1cAJj3Mg55HbjZP6OLAIscbQsQLrpgTOg==
|
||||
|
||||
"@rollup/rollup-linux-arm-gnueabihf@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.10.0.tgz#4763eec1591bf0e99a54ad3d1ef39cb268ed7b19"
|
||||
integrity sha512-Fz7a+y5sYhYZMQFRkOyCs4PLhICAnxRX/GnWYReaAoruUzuRtcf+Qnw+T0CoAWbHCuz2gBUwmWnUgQ67fb3FYw==
|
||||
"@rollup/rollup-linux-arm-gnueabihf@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.12.0.tgz#48e899c1e438629c072889b824a98787a7c2362d"
|
||||
integrity sha512-a6w/Y3hyyO6GlpKL2xJ4IOh/7d+APaqLYdMf86xnczU3nurFTaVN9s9jOXQg97BE4nYm/7Ga51rjec5nfRdrvA==
|
||||
|
||||
"@rollup/rollup-linux-arm64-gnu@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.10.0.tgz#e6dae70c53ace836973526c41803b877cffc6f7b"
|
||||
integrity sha512-yPtF9jIix88orwfTi0lJiqINnlWo6p93MtZEoaehZnmCzEmLL0eqjA3eGVeyQhMtxdV+Mlsgfwhh0+M/k1/V7Q==
|
||||
"@rollup/rollup-linux-arm64-gnu@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.12.0.tgz#788c2698a119dc229062d40da6ada8a090a73a68"
|
||||
integrity sha512-0fZBq27b+D7Ar5CQMofVN8sggOVhEtzFUwOwPppQt0k+VR+7UHMZZY4y+64WJ06XOhBTKXtQB/Sv0NwQMXyNAA==
|
||||
|
||||
"@rollup/rollup-linux-arm64-musl@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.10.0.tgz#5692e1a0feba0cc4a933864961afc3211177d242"
|
||||
integrity sha512-9GW9yA30ib+vfFiwjX+N7PnjTnCMiUffhWj4vkG4ukYv1kJ4T9gHNg8zw+ChsOccM27G9yXrEtMScf1LaCuoWQ==
|
||||
"@rollup/rollup-linux-arm64-musl@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.12.0.tgz#3882a4e3a564af9e55804beeb67076857b035ab7"
|
||||
integrity sha512-eTvzUS3hhhlgeAv6bfigekzWZjaEX9xP9HhxB0Dvrdbkk5w/b+1Sxct2ZuDxNJKzsRStSq1EaEkVSEe7A7ipgQ==
|
||||
|
||||
"@rollup/rollup-linux-riscv64-gnu@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.10.0.tgz#fbe3d80f7a7ac54a8847f5bddd1bc6f7b9ccb65f"
|
||||
integrity sha512-X1ES+V4bMq2ws5fF4zHornxebNxMXye0ZZjUrzOrf7UMx1d6wMQtfcchZ8SqUnQPPHdOyOLW6fTcUiFgHFadRA==
|
||||
"@rollup/rollup-linux-riscv64-gnu@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.12.0.tgz#0c6ad792e1195c12bfae634425a3d2aa0fe93ab7"
|
||||
integrity sha512-ix+qAB9qmrCRiaO71VFfY8rkiAZJL8zQRXveS27HS+pKdjwUfEhqo2+YF2oI+H/22Xsiski+qqwIBxVewLK7sw==
|
||||
|
||||
"@rollup/rollup-linux-x64-gnu@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.10.0.tgz#3f06b55ccf173446d390d0306643dff62ec99807"
|
||||
integrity sha512-w/5OpT2EnI/Xvypw4FIhV34jmNqU5PZjZue2l2Y3ty1Ootm3SqhI+AmfhlUYGBTd9JnpneZCDnt3uNOiOBkMyw==
|
||||
"@rollup/rollup-linux-x64-gnu@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.12.0.tgz#9d62485ea0f18d8674033b57aa14fb758f6ec6e3"
|
||||
integrity sha512-TenQhZVOtw/3qKOPa7d+QgkeM6xY0LtwzR8OplmyL5LrgTWIXpTQg2Q2ycBf8jm+SFW2Wt/DTn1gf7nFp3ssVA==
|
||||
|
||||
"@rollup/rollup-linux-x64-musl@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.10.0.tgz#e4ac9b27041c83d7faab6205f62763103eb317ba"
|
||||
integrity sha512-q/meftEe3QlwQiGYxD9rWwB21DoKQ9Q8wA40of/of6yGHhZuGfZO0c3WYkN9dNlopHlNT3mf5BPsUSxoPuVQaw==
|
||||
"@rollup/rollup-linux-x64-musl@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.12.0.tgz#50e8167e28b33c977c1f813def2b2074d1435e05"
|
||||
integrity sha512-LfFdRhNnW0zdMvdCb5FNuWlls2WbbSridJvxOvYWgSBOYZtgBfW9UGNJG//rwMqTX1xQE9BAodvMH9tAusKDUw==
|
||||
|
||||
"@rollup/rollup-win32-arm64-msvc@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.10.0.tgz#6ad0d4fb0066f240778ee3f61eecf7aa0357f883"
|
||||
integrity sha512-NrR6667wlUfP0BHaEIKgYM/2va+Oj+RjZSASbBMnszM9k+1AmliRjHc3lJIiOehtSSjqYiO7R6KLNrWOX+YNSQ==
|
||||
"@rollup/rollup-win32-arm64-msvc@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.12.0.tgz#68d233272a2004429124494121a42c4aebdc5b8e"
|
||||
integrity sha512-JPDxovheWNp6d7AHCgsUlkuCKvtu3RB55iNEkaQcf0ttsDU/JZF+iQnYcQJSk/7PtT4mjjVG8N1kpwnI9SLYaw==
|
||||
|
||||
"@rollup/rollup-win32-ia32-msvc@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.10.0.tgz#29d50292381311cc8d3623e73b427b7e2e40a653"
|
||||
integrity sha512-FV0Tpt84LPYDduIDcXvEC7HKtyXxdvhdAOvOeWMWbQNulxViH2O07QXkT/FffX4FqEI02jEbCJbr+YcuKdyyMg==
|
||||
"@rollup/rollup-win32-ia32-msvc@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.12.0.tgz#366ca62221d1689e3b55a03f4ae12ae9ba595d40"
|
||||
integrity sha512-fjtuvMWRGJn1oZacG8IPnzIV6GF2/XG+h71FKn76OYFqySXInJtseAqdprVTDTyqPxQOG9Exak5/E9Z3+EJ8ZA==
|
||||
|
||||
"@rollup/rollup-win32-x64-msvc@4.10.0":
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.10.0.tgz#4eedd01af3a82c1acb0fe6d837ebf339c4cbf839"
|
||||
integrity sha512-OZoJd+o5TaTSQeFFQ6WjFCiltiYVjIdsXxwu/XZ8qRpsvMQr4UsVrE5UyT9RIvsnuF47DqkJKhhVZ2Q9YW9IpQ==
|
||||
"@rollup/rollup-win32-x64-msvc@4.12.0":
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.12.0.tgz#9ffdf9ed133a7464f4ae187eb9e1294413fab235"
|
||||
integrity sha512-ZYmr5mS2wd4Dew/JjT0Fqi2NPB/ZhZ2VvPp7SmvPZb4Y1CG/LRcS6tcRo2cYU7zLK5A7cdbhWnnWmUjoI4qapg==
|
||||
|
||||
"@roxi/routify@2.18.0":
|
||||
version "2.18.0"
|
||||
|
@ -5219,16 +5219,16 @@
|
|||
integrity sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==
|
||||
|
||||
"@types/chai-subset@^1.3.3":
|
||||
version "1.3.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/chai-subset/-/chai-subset-1.3.5.tgz#3fc044451f26985f45625230a7f22284808b0a9a"
|
||||
integrity sha512-c2mPnw+xHtXDoHmdtcCXGwyLMiauiAyxWMzhGpqHC4nqI/Y5G2XhTampslK2rb59kpcuHon03UH8W6iYUzw88A==
|
||||
version "1.3.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/chai-subset/-/chai-subset-1.3.3.tgz#97893814e92abd2c534de422cb377e0e0bdaac94"
|
||||
integrity sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==
|
||||
dependencies:
|
||||
"@types/chai" "*"
|
||||
|
||||
"@types/chai@*", "@types/chai@^4.3.4":
|
||||
version "4.3.11"
|
||||
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.11.tgz#e95050bf79a932cb7305dd130254ccdf9bde671c"
|
||||
integrity sha512-qQR1dr2rGIHYlJulmr8Ioq3De0Le9E4MJ5AiaeAETJJpndT1uUNHsGFK3L/UIu+rbkQSdj8J/w2bCsBZc/Y5fQ==
|
||||
version "4.3.9"
|
||||
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.9.tgz#144d762491967db8c6dea38e03d2206c2623feec"
|
||||
integrity sha512-69TtiDzu0bcmKQv3yg1Zx409/Kd7r0b5F1PfpYJfSHzLGtB53547V4u+9iqKYsTu/O2ai6KTb0TInNpvuQ3qmg==
|
||||
|
||||
"@types/chance@1.1.3":
|
||||
version "1.1.3"
|
||||
|
@ -5623,10 +5623,10 @@
|
|||
"@types/node" "*"
|
||||
form-data "^3.0.0"
|
||||
|
||||
"@types/node@*", "@types/node@>=10.0.0", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0", "@types/node@>=8.1.0":
|
||||
version "20.11.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.2.tgz#39cea3fe02fbbc2f80ed283e94e1d24f2d3856fb"
|
||||
integrity sha512-cZShBaVa+UO1LjWWBPmWRR4+/eY/JR/UIEcDlVsw3okjWEu+rB7/mH6X3B/L+qJVHDLjk9QW/y2upp9wp1yDXA==
|
||||
"@types/node@*", "@types/node@>=10.0.0", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0":
|
||||
version "20.10.7"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.10.7.tgz#40fe8faf25418a75de9fe68a8775546732a3a901"
|
||||
integrity sha512-fRbIKb8C/Y2lXxB5eVMj4IU7xpdox0Lh8bUPEdtLysaylsml1hOOx1+STloRs/B9nf7C6kPRmmg/V7aQW7usNg==
|
||||
dependencies:
|
||||
undici-types "~5.26.4"
|
||||
|
||||
|
@ -5652,10 +5652,17 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.37.tgz#0bfcd173e8e1e328337473a8317e37b3b14fd30d"
|
||||
integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg==
|
||||
|
||||
"@types/node@>=8.1.0":
|
||||
version "20.11.10"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.10.tgz#6c3de8974d65c362f82ee29db6b5adf4205462f9"
|
||||
integrity sha512-rZEfe/hJSGYmdfX9tvcPMYeYPW2sNl50nsw4jZmRcaG0HIAb0WYEpsB05GOb53vjqpyE9GUhlDQ4jLSoB5q9kg==
|
||||
dependencies:
|
||||
undici-types "~5.26.4"
|
||||
|
||||
"@types/node@^18.11.18":
|
||||
version "18.19.13"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.13.tgz#c3e989ca967b862a1f6c8c4148fe31865eedaf1a"
|
||||
integrity sha512-kgnbRDj8ioDyGxoiaXsiu1Ybm/K14ajCgMOkwiqpHrnF7d7QiYRoRqHIpglMMs3DwXinlK4qJ8TZGlj4hfleJg==
|
||||
version "18.19.10"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.10.tgz#4de314ab66faf6bc8ba691021a091ddcdf13a158"
|
||||
integrity sha512-IZD8kAM02AW1HRDTPOlz3npFava678pr8Ie9Vp8uRhBROXAv8MXT2pCnGZZAKYdromsNQLHQcfWQ6EOatVLtqA==
|
||||
dependencies:
|
||||
undici-types "~5.26.4"
|
||||
|
||||
|
@ -6075,9 +6082,9 @@
|
|||
integrity sha512-xTE1E+YF4aWPJJeUzaZI5DRntlkY3+BCVJi0axFptnjGmAoWxkyREIh/XMrfxVLejwQxMCfDXdICo0VLxThrog==
|
||||
|
||||
"@types/whatwg-url@^11.0.2":
|
||||
version "11.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/whatwg-url/-/whatwg-url-11.0.3.tgz#9f584c9a9421f0971029ee504dd62a831cb8f3aa"
|
||||
integrity sha512-z1ELvMijRL1QmU7QuzDkeYXSF2+dXI0ITKoQsIoVKcNBOiK5RMmWy+pYYxJTHFt8vkpZe7UsvRErQwcxZkjoUw==
|
||||
version "11.0.4"
|
||||
resolved "https://registry.yarnpkg.com/@types/whatwg-url/-/whatwg-url-11.0.4.tgz#ffed0dc8d89d91f62e3f368fcbda222a487c4f63"
|
||||
integrity sha512-lXCmTWSHJvf0TRSO58nm978b8HJ/EdsSsEKLd3ODHFjo+3VGAyyTp4v50nWvwtzBxSMQrVOK7tcuN0zGPLICMw==
|
||||
dependencies:
|
||||
"@types/webidl-conversions" "*"
|
||||
|
||||
|
@ -6527,16 +6534,11 @@ acorn-walk@^7.1.1:
|
|||
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc"
|
||||
integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==
|
||||
|
||||
acorn-walk@^8.0.2, acorn-walk@^8.1.1:
|
||||
acorn-walk@^8.0.2, acorn-walk@^8.1.1, acorn-walk@^8.2.0:
|
||||
version "8.2.0"
|
||||
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1"
|
||||
integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==
|
||||
|
||||
acorn-walk@^8.2.0:
|
||||
version "8.3.2"
|
||||
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.2.tgz#7703af9415f1b6db9315d6895503862e231d34aa"
|
||||
integrity sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==
|
||||
|
||||
acorn@^5.2.1, acorn@^5.7.3:
|
||||
version "5.7.4"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.4.tgz#3e8d8a9947d0599a1796d10225d7432f4a4acf5e"
|
||||
|
@ -6547,10 +6549,10 @@ acorn@^7.1.1:
|
|||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
|
||||
integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==
|
||||
|
||||
acorn@^8.1.0, acorn@^8.10.0, acorn@^8.11.3, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.1, acorn@^8.8.2, acorn@^8.9.0:
|
||||
version "8.11.3"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
|
||||
integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==
|
||||
acorn@^8.1.0, acorn@^8.10.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.1, acorn@^8.8.2, acorn@^8.9.0:
|
||||
version "8.11.2"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.2.tgz#ca0d78b51895be5390a5903c5b3bdcdaf78ae40b"
|
||||
integrity sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w==
|
||||
|
||||
add-stream@^1.0.0:
|
||||
version "1.0.0"
|
||||
|
@ -6992,7 +6994,7 @@ asn1.js@^5.0.0, asn1.js@^5.2.0, asn1.js@^5.4.1:
|
|||
minimalistic-assert "^1.0.0"
|
||||
safer-buffer "^2.1.0"
|
||||
|
||||
asn1@^0.2.6, asn1@~0.2.3:
|
||||
asn1@^0.2.4, asn1@^0.2.6, asn1@~0.2.3:
|
||||
version "0.2.6"
|
||||
resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d"
|
||||
integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==
|
||||
|
@ -7043,7 +7045,12 @@ async@^2.6.3:
|
|||
dependencies:
|
||||
lodash "^4.17.14"
|
||||
|
||||
async@^3.2.1, async@^3.2.3, async@^3.2.4:
|
||||
async@^3.2.1, async@^3.2.3:
|
||||
version "3.2.4"
|
||||
resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c"
|
||||
integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==
|
||||
|
||||
async@^3.2.4:
|
||||
version "3.2.5"
|
||||
resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66"
|
||||
integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==
|
||||
|
@ -7646,6 +7653,11 @@ bufferutil@^4.0.1:
|
|||
dependencies:
|
||||
node-gyp-build "^4.3.0"
|
||||
|
||||
buildcheck@0.0.3:
|
||||
version "0.0.3"
|
||||
resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.3.tgz#70451897a95d80f7807e68fc412eb2e7e35ff4d5"
|
||||
integrity sha512-pziaA+p/wdVImfcbsZLNF32EiWyujlQLwolMqUQE8xpKNOH7KmZQaY8sXN7DGOEzPAElo9QTaeNRfGnf3iOJbA==
|
||||
|
||||
buildcheck@~0.0.6:
|
||||
version "0.0.6"
|
||||
resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.6.tgz#89aa6e417cfd1e2196e3f8fe915eb709d2fe4238"
|
||||
|
@ -7910,9 +7922,9 @@ catharsis@^0.9.0:
|
|||
lodash "^4.17.15"
|
||||
|
||||
chai@^4.3.7:
|
||||
version "4.4.1"
|
||||
resolved "https://registry.yarnpkg.com/chai/-/chai-4.4.1.tgz#3603fa6eba35425b0f2ac91a009fe924106e50d1"
|
||||
integrity sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g==
|
||||
version "4.3.10"
|
||||
resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.10.tgz#d784cec635e3b7e2ffb66446a63b4e33bd390384"
|
||||
integrity sha512-0UXG04VuVbruMUYbJ6JctvH0YnC/4q3/AkT18q4NaITo91CUm0liMS9VqzT9vZhVQ/1eqPanMWjBM+Juhfb/9g==
|
||||
dependencies:
|
||||
assertion-error "^1.1.0"
|
||||
check-error "^1.0.3"
|
||||
|
@ -8659,6 +8671,14 @@ cosmiconfig@^8.2.0:
|
|||
parse-json "^5.0.0"
|
||||
path-type "^4.0.0"
|
||||
|
||||
cpu-features@~0.0.4:
|
||||
version "0.0.4"
|
||||
resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.4.tgz#0023475bb4f4c525869c162e4108099e35bf19d8"
|
||||
integrity sha512-fKiZ/zp1mUwQbnzb9IghXtHtDoTMtNeb8oYGx6kX2SYfhnG0HNdBEBIzB9b5KlXu5DQPhfy3mInbBxFcgwAr3A==
|
||||
dependencies:
|
||||
buildcheck "0.0.3"
|
||||
nan "^2.15.0"
|
||||
|
||||
cpu-features@~0.0.9:
|
||||
version "0.0.9"
|
||||
resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.9.tgz#5226b92f0f1c63122b0a3eb84cb8335a4de499fc"
|
||||
|
@ -9558,9 +9578,9 @@ diff@^4.0.1:
|
|||
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
|
||||
|
||||
diff@^5.1.0:
|
||||
version "5.2.0"
|
||||
resolved "https://registry.yarnpkg.com/diff/-/diff-5.2.0.tgz#26ded047cd1179b78b9537d5ef725503ce1ae531"
|
||||
integrity sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==
|
||||
version "5.1.0"
|
||||
resolved "https://registry.yarnpkg.com/diff/-/diff-5.1.0.tgz#bc52d298c5ea8df9194800224445ed43ffc87e40"
|
||||
integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==
|
||||
|
||||
diffie-hellman@^5.0.0:
|
||||
version "5.0.3"
|
||||
|
@ -9616,7 +9636,16 @@ docker-modem@^3.0.0:
|
|||
split-ca "^1.0.1"
|
||||
ssh2 "^1.11.0"
|
||||
|
||||
dockerode@^3.2.1, dockerode@^3.3.5:
|
||||
dockerode@^3.2.1:
|
||||
version "3.3.4"
|
||||
resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.4.tgz#875de614a1be797279caa9fe27e5637cf0e40548"
|
||||
integrity sha512-3EUwuXnCU+RUlQEheDjmBE0B7q66PV9Rw5NiH1sXwINq0M9c5ERP9fxgkw36ZHOtzf4AGEEYySnkx/sACC9EgQ==
|
||||
dependencies:
|
||||
"@balena/dockerignore" "^1.0.2"
|
||||
docker-modem "^3.0.0"
|
||||
tar-fs "~2.0.1"
|
||||
|
||||
dockerode@^3.3.5:
|
||||
version "3.3.5"
|
||||
resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.5.tgz#7ae3f40f2bec53ae5e9a741ce655fff459745629"
|
||||
integrity sha512-/0YNa3ZDNeLr/tSckmD69+Gq+qVNhvKfAHNeZJBnp7EOP6RGKV8ORrJHkUn20So5wU+xxT7+1n5u8PjHbfjbSA==
|
||||
|
@ -9746,9 +9775,9 @@ dotenv@8.6.0, dotenv@^8.2.0:
|
|||
integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==
|
||||
|
||||
dotenv@^16.3.1:
|
||||
version "16.3.1"
|
||||
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e"
|
||||
integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==
|
||||
version "16.4.1"
|
||||
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.1.tgz#1d9931f1d3e5d2959350d1250efab299561f7f11"
|
||||
integrity sha512-CjA3y+Dr3FyFDOAMnxZEGtnW9KBR2M0JvvUtXNW+dYJL5ROWxP9DUHCwgFqpMk0OXCc0ljhaNTr2w/kutYIcHQ==
|
||||
|
||||
dotenv@~10.0.0:
|
||||
version "10.0.0"
|
||||
|
@ -10796,13 +10825,20 @@ fast-xml-parser@4.2.5:
|
|||
dependencies:
|
||||
strnum "^1.0.5"
|
||||
|
||||
fast-xml-parser@^4.1.3, fast-xml-parser@^4.2.2, fast-xml-parser@^4.2.5:
|
||||
fast-xml-parser@^4.1.3:
|
||||
version "4.3.3"
|
||||
resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.3.3.tgz#aeaf5778392329f17168c40c51bcbfec8ff965be"
|
||||
integrity sha512-coV/D1MhrShMvU6D0I+VAK3umz6hUaxxhL0yp/9RjfiYUfAv14rDhGQL+PLForhMdr0wq3PiV07WtkkNjJjNHg==
|
||||
dependencies:
|
||||
strnum "^1.0.5"
|
||||
|
||||
fast-xml-parser@^4.2.2, fast-xml-parser@^4.2.5:
|
||||
version "4.3.2"
|
||||
resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.3.2.tgz#761e641260706d6e13251c4ef8e3f5694d4b0d79"
|
||||
integrity "sha1-dh5kEmBwbW4TJRxO+OP1aU1LDXk= sha512-rmrXUXwbJedoXkStenj1kkljNF7ugn5ZjR9FJcwmCfcCbtOMDghPajbc+Tck6vE6F5XsDmx+Pr2le9fw8+pXBg=="
|
||||
dependencies:
|
||||
strnum "^1.0.5"
|
||||
|
||||
fastest-levenshtein@^1.0.12:
|
||||
version "1.0.16"
|
||||
resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz#210e61b6ff181de91ea9b3d1b84fdedd47e034e5"
|
||||
|
@ -10862,7 +10898,7 @@ fetch-cookie@0.11.0:
|
|||
dependencies:
|
||||
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
|
||||
|
||||
fflate@^0.4.1:
|
||||
fflate@^0.4.1, fflate@^0.4.8:
|
||||
version "0.4.8"
|
||||
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae"
|
||||
integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==
|
||||
|
@ -15616,17 +15652,7 @@ mkdirp@^1.0.3, mkdirp@^1.0.4:
|
|||
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e"
|
||||
integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
|
||||
|
||||
mlly@^1.1.0:
|
||||
version "1.6.0"
|
||||
resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.6.0.tgz#0ecfbddc706857f5e170ccd28c6b0b9c81d3f548"
|
||||
integrity sha512-YOvg9hfYQmnaB56Yb+KrJE2u0Yzz5zR+sLejEvF4fzwzV1Al6hkf2vyHTwqCRyv0hCi9rVCqVoXpyYevQIRwLQ==
|
||||
dependencies:
|
||||
acorn "^8.11.3"
|
||||
pathe "^1.1.2"
|
||||
pkg-types "^1.0.3"
|
||||
ufo "^1.3.2"
|
||||
|
||||
mlly@^1.2.0:
|
||||
mlly@^1.1.0, mlly@^1.2.0:
|
||||
version "1.4.2"
|
||||
resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.4.2.tgz#7cf406aa319ff6563d25da6b36610a93f2a8007e"
|
||||
integrity sha512-i/Ykufi2t1EZ6NaPLdfnZk2AX8cs0d+mTzVKuPfqPKPatxLApaBoxJQ9x1/uckXtrS/U5oisPMDkNs0yQTaBRg==
|
||||
|
@ -15819,6 +15845,11 @@ named-placeholders@^1.1.3:
|
|||
dependencies:
|
||||
lru-cache "^7.14.1"
|
||||
|
||||
nan@^2.15.0, nan@^2.16.0:
|
||||
version "2.17.0"
|
||||
resolved "https://registry.yarnpkg.com/nan/-/nan-2.17.0.tgz#c0150a2368a182f033e9aa5195ec76ea41a199cb"
|
||||
integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==
|
||||
|
||||
nan@^2.17.0, nan@^2.18.0:
|
||||
version "2.18.0"
|
||||
resolved "https://registry.yarnpkg.com/nan/-/nan-2.18.0.tgz#26a6faae7ffbeb293a39660e88a76b82e30b7554"
|
||||
|
@ -17181,11 +17212,6 @@ pathe@^1.1.0, pathe@^1.1.1:
|
|||
resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.1.tgz#1dd31d382b974ba69809adc9a7a347e65d84829a"
|
||||
integrity sha512-d+RQGp0MAYTIaDBIMmOfMwz3E+LOZnxx1HZd5R18mmCZY0QBlK0LDZfPc8FW8Ed2DlvsuE6PRjroDY+wg4+j/Q==
|
||||
|
||||
pathe@^1.1.2:
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.2.tgz#6c4cb47a945692e48a1ddd6e4094d170516437ec"
|
||||
integrity sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==
|
||||
|
||||
pathval@^1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.1.tgz#8534e77a77ce7ac5a2512ea21e0fdb8fcf6c3d8d"
|
||||
|
@ -17810,10 +17836,18 @@ postgres-interval@^1.1.0:
|
|||
dependencies:
|
||||
xtend "^4.0.0"
|
||||
|
||||
posthog-js@^1.13.4, posthog-js@^1.36.0:
|
||||
version "1.100.0"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.100.0.tgz#687b9a6e4ed226aa6572f4040b418ea0c8b3d353"
|
||||
integrity sha512-r2XZEiHQ9mBK7D1G9k57I8uYZ2kZTAJ0OCX6K/OOdCWN8jKPhw3h5F9No5weilP6eVAn+hrsy7NvPV7SCX7gMg==
|
||||
posthog-js@^1.13.4:
|
||||
version "1.103.1"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.103.1.tgz#f846c413c28aca204dc1527f49d39f651348f3c4"
|
||||
integrity sha512-cFXFU4Z4kl/+RUUV4ju1DlfM7dwCGi6H9xWsfhljIhGcBbT8UfS4JGgZGXl9ABQDdgDPb9xciqnysFSsUQshTA==
|
||||
dependencies:
|
||||
fflate "^0.4.8"
|
||||
preact "^10.19.3"
|
||||
|
||||
posthog-js@^1.36.0:
|
||||
version "1.96.1"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.96.1.tgz#4f9719a24e4e14037b0e72d430194d7cdb576447"
|
||||
integrity sha512-kv1vQqYMt2BV3YHS+wxsbGuP+tz+M3y1AzNhz8TfkpY1HT8W/ONT0i0eQpeRr9Y+d4x/fZ6M4cXG5GMvi9lRCA==
|
||||
dependencies:
|
||||
fflate "^0.4.1"
|
||||
|
||||
|
@ -18054,6 +18088,11 @@ pprof-format@^2.0.7:
|
|||
resolved "https://registry.yarnpkg.com/pprof-format/-/pprof-format-2.0.7.tgz#526e4361f8b37d16b2ec4bb0696b5292de5046a4"
|
||||
integrity sha512-1qWaGAzwMpaXJP9opRa23nPnt2Egi7RMNoNBptEE/XwHbcn4fC2b/4U4bKc5arkGkIh2ZabpF2bEb+c5GNHEKA==
|
||||
|
||||
preact@^10.19.3:
|
||||
version "10.19.3"
|
||||
resolved "https://registry.yarnpkg.com/preact/-/preact-10.19.3.tgz#7a7107ed2598a60676c943709ea3efb8aaafa899"
|
||||
integrity sha512-nHHTeFVBTHRGxJXKkKu5hT8C/YWBkPso4/Gad6xuj5dbptt9iF9NZr9pHbPhBrnT2klheu7mHTxTZ/LjwJiEiQ==
|
||||
|
||||
prebuild-install@^7.1.1:
|
||||
version "7.1.1"
|
||||
resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.1.tgz#de97d5b34a70a0c81334fd24641f2a1702352e45"
|
||||
|
@ -19269,25 +19308,25 @@ rollup@^3.27.1:
|
|||
fsevents "~2.3.2"
|
||||
|
||||
rollup@^4.9.6:
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.10.0.tgz#244c2cb54a8de004a949fe6036a0801be9060456"
|
||||
integrity sha512-t2v9G2AKxcQ8yrG+WGxctBes1AomT0M4ND7jTFBCVPXQ/WFTvNSefIrNSmLKhIKBrvN8SG+CZslimJcT3W2u2g==
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.12.0.tgz#0b6d1e5f3d46bbcf244deec41a7421dc54cc45b5"
|
||||
integrity sha512-wz66wn4t1OHIJw3+XU7mJJQV/2NAfw5OAk6G6Hoo3zcvz/XOfQ52Vgi+AN4Uxoxi0KBBwk2g8zPrTDA4btSB/Q==
|
||||
dependencies:
|
||||
"@types/estree" "1.0.5"
|
||||
optionalDependencies:
|
||||
"@rollup/rollup-android-arm-eabi" "4.10.0"
|
||||
"@rollup/rollup-android-arm64" "4.10.0"
|
||||
"@rollup/rollup-darwin-arm64" "4.10.0"
|
||||
"@rollup/rollup-darwin-x64" "4.10.0"
|
||||
"@rollup/rollup-linux-arm-gnueabihf" "4.10.0"
|
||||
"@rollup/rollup-linux-arm64-gnu" "4.10.0"
|
||||
"@rollup/rollup-linux-arm64-musl" "4.10.0"
|
||||
"@rollup/rollup-linux-riscv64-gnu" "4.10.0"
|
||||
"@rollup/rollup-linux-x64-gnu" "4.10.0"
|
||||
"@rollup/rollup-linux-x64-musl" "4.10.0"
|
||||
"@rollup/rollup-win32-arm64-msvc" "4.10.0"
|
||||
"@rollup/rollup-win32-ia32-msvc" "4.10.0"
|
||||
"@rollup/rollup-win32-x64-msvc" "4.10.0"
|
||||
"@rollup/rollup-android-arm-eabi" "4.12.0"
|
||||
"@rollup/rollup-android-arm64" "4.12.0"
|
||||
"@rollup/rollup-darwin-arm64" "4.12.0"
|
||||
"@rollup/rollup-darwin-x64" "4.12.0"
|
||||
"@rollup/rollup-linux-arm-gnueabihf" "4.12.0"
|
||||
"@rollup/rollup-linux-arm64-gnu" "4.12.0"
|
||||
"@rollup/rollup-linux-arm64-musl" "4.12.0"
|
||||
"@rollup/rollup-linux-riscv64-gnu" "4.12.0"
|
||||
"@rollup/rollup-linux-x64-gnu" "4.12.0"
|
||||
"@rollup/rollup-linux-x64-musl" "4.12.0"
|
||||
"@rollup/rollup-win32-arm64-msvc" "4.12.0"
|
||||
"@rollup/rollup-win32-ia32-msvc" "4.12.0"
|
||||
"@rollup/rollup-win32-x64-msvc" "4.12.0"
|
||||
fsevents "~2.3.2"
|
||||
|
||||
rotating-file-stream@3.1.0:
|
||||
|
@ -19319,7 +19358,14 @@ rxjs@^6.6.6:
|
|||
dependencies:
|
||||
tslib "^1.9.0"
|
||||
|
||||
rxjs@^7.5.5, rxjs@^7.8.1:
|
||||
rxjs@^7.5.5:
|
||||
version "7.8.0"
|
||||
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.0.tgz#90a938862a82888ff4c7359811a595e14e1e09a4"
|
||||
integrity sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg==
|
||||
dependencies:
|
||||
tslib "^2.1.0"
|
||||
|
||||
rxjs@^7.8.1:
|
||||
version "7.8.1"
|
||||
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543"
|
||||
integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==
|
||||
|
@ -20013,7 +20059,18 @@ ssh-remote-port-forward@^1.0.4:
|
|||
"@types/ssh2" "^0.5.48"
|
||||
ssh2 "^1.4.0"
|
||||
|
||||
ssh2@^1.11.0, ssh2@^1.4.0:
|
||||
ssh2@^1.11.0:
|
||||
version "1.11.0"
|
||||
resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.11.0.tgz#ce60186216971e12f6deb553dcf82322498fe2e4"
|
||||
integrity sha512-nfg0wZWGSsfUe/IBJkXVll3PEZ//YH2guww+mP88gTpuSU4FtZN7zu9JoeTGOyCNx2dTDtT9fOpWwlzyj4uOOw==
|
||||
dependencies:
|
||||
asn1 "^0.2.4"
|
||||
bcrypt-pbkdf "^1.0.2"
|
||||
optionalDependencies:
|
||||
cpu-features "~0.0.4"
|
||||
nan "^2.16.0"
|
||||
|
||||
ssh2@^1.4.0:
|
||||
version "1.15.0"
|
||||
resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.15.0.tgz#2f998455036a7f89e0df5847efb5421748d9871b"
|
||||
integrity sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==
|
||||
|
@ -20091,9 +20148,9 @@ statuses@2.0.1, statuses@^2.0.0:
|
|||
integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==
|
||||
|
||||
std-env@^3.3.1:
|
||||
version "3.7.0"
|
||||
resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.7.0.tgz#c9f7386ced6ecf13360b6c6c55b8aaa4ef7481d2"
|
||||
integrity sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==
|
||||
version "3.4.3"
|
||||
resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.4.3.tgz#326f11db518db751c83fd58574f449b7c3060910"
|
||||
integrity sha512-f9aPhy8fYBuMN+sNfakZV18U39PbalgjXG3lLB9WkaYTxijru61wb57V9wxxNthXM5Sd88ETBWi29qLAsHO52Q==
|
||||
|
||||
step@0.0.x:
|
||||
version "0.0.6"
|
||||
|
@ -20556,9 +20613,9 @@ svelte-spa-router@^4.0.1:
|
|||
regexparam "2.0.2"
|
||||
|
||||
svelte@^4.2.10:
|
||||
version "4.2.10"
|
||||
resolved "https://registry.yarnpkg.com/svelte/-/svelte-4.2.10.tgz#3bef8d79ca75eb53cc4d03f9fac1546e60393f77"
|
||||
integrity sha512-Ep06yCaCdgG1Mafb/Rx8sJ1QS3RW2I2BxGp2Ui9LBHSZ2/tO/aGLc5WqPjgiAP6KAnLJGaIr/zzwQlOo1b8MxA==
|
||||
version "4.2.12"
|
||||
resolved "https://registry.yarnpkg.com/svelte/-/svelte-4.2.12.tgz#13d98d2274d24d3ad216c8fdc801511171c70bb1"
|
||||
integrity sha512-d8+wsh5TfPwqVzbm4/HCXC783/KPHV60NvwitJnyTA5lWn1elhXMNWhXGCJ7PwPa8qFUnyJNIyuIRt2mT0WMug==
|
||||
dependencies:
|
||||
"@ampproject/remapping" "^2.2.1"
|
||||
"@jridgewell/sourcemap-codec" "^1.4.15"
|
||||
|
@ -20968,9 +21025,9 @@ tiny-queue@^0.2.0:
|
|||
integrity sha512-EijGsv7kzd9I9g0ByCl6h42BWNGUZrlCSejfrb3AKeHC33SGbASu1VDf5O3rRiiUOhAC9CHdZxFPbZu0HmR70A==
|
||||
|
||||
tinybench@^2.3.1:
|
||||
version "2.6.0"
|
||||
resolved "https://registry.yarnpkg.com/tinybench/-/tinybench-2.6.0.tgz#1423284ee22de07c91b3752c048d2764714b341b"
|
||||
integrity sha512-N8hW3PG/3aOoZAN5V/NSAEDz0ZixDSSt5b/a05iqtpgfLWMSVuCo7w0k2vVvEjdrIoeGqZzweX2WlyioNIHchA==
|
||||
version "2.5.1"
|
||||
resolved "https://registry.yarnpkg.com/tinybench/-/tinybench-2.5.1.tgz#3408f6552125e53a5a48adee31261686fd71587e"
|
||||
integrity sha512-65NKvSuAVDP/n4CqH+a9w2kTlLReS9vhsAP06MWx+/89nMinJyB2icyl58RIcqCmIggpojIGeuJGhjU1aGMBSg==
|
||||
|
||||
tinycolor2@^1.6.0:
|
||||
version "1.6.0"
|
||||
|
@ -21410,11 +21467,6 @@ ufo@^1.3.0:
|
|||
resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.3.1.tgz#e085842f4627c41d4c1b60ebea1f75cdab4ce86b"
|
||||
integrity sha512-uY/99gMLIOlJPwATcMVYfqDSxUR9//AUcgZMzwfSTJPDKzA1S8mX4VLqa+fiAtveraQUBCz4FFcwVZBGbwBXIw==
|
||||
|
||||
ufo@^1.3.2:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.4.0.tgz#39845b31be81b4f319ab1d99fd20c56cac528d32"
|
||||
integrity sha512-Hhy+BhRBleFjpJ2vchUNN40qgkh0366FWJGqVLYBHev0vpHTrXSA0ryT+74UiW6KWsldNurQMKGqCm1M2zBciQ==
|
||||
|
||||
uglify-js@^3.1.4, uglify-js@^3.7.7:
|
||||
version "3.17.4"
|
||||
resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.4.tgz#61678cf5fa3f5b7eb789bb345df29afb8257c22c"
|
||||
|
@ -21459,9 +21511,9 @@ underscore@~1.13.2:
|
|||
integrity sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==
|
||||
|
||||
undici-types@^6.0.1:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.0.1.tgz#62e2af9fcd3ce359634175658de39df8d0f37197"
|
||||
integrity sha512-i9dNdkCziyqGpFxhatR9LITcInbFWh+ExlWkrZQpZHje8FfCcJKgps0IbmMd7D1o8c8syG4pIOV+aKIoC9JEyA==
|
||||
version "6.6.2"
|
||||
resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.6.2.tgz#48c65d30bfcae492c3c89b1d147fed9d43a16b79"
|
||||
integrity sha512-acoBcoBobgsg3YUEO/Oht8JJCuFYpzWLFKbqEbcEZcXdkQrTzkF/yWj9JoLaFDa6ArI31dFEmNZkCjQZ7mlf7w==
|
||||
|
||||
undici-types@~5.26.4:
|
||||
version "5.26.5"
|
||||
|
@ -21474,9 +21526,9 @@ undici@^4.14.1:
|
|||
integrity sha512-tkZSECUYi+/T1i4u+4+lwZmQgLXd4BLGlrc7KZPcLIW7Jpq99+Xpc30ONv7nS6F5UNOxp/HBZSSL9MafUrvJbw==
|
||||
|
||||
undici@^6.0.1:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/undici/-/undici-6.0.1.tgz#385572addca36d1c2b280629cb694b726170027e"
|
||||
integrity sha512-eZFYQLeS9BiXpsU0cuFhCwfeda2MnC48EVmmOz/eCjsTgmyTdaHdVsPSC/kwC2GtW2e0uH0HIPbadf3/bRWSxw==
|
||||
version "6.6.2"
|
||||
resolved "https://registry.yarnpkg.com/undici/-/undici-6.6.2.tgz#8dce5ae54e8a3bc7140c2b2a0972b5fde9a88efb"
|
||||
integrity sha512-vSqvUE5skSxQJ5sztTZ/CdeJb1Wq0Hf44hlYMciqHghvz+K88U0l7D6u1VsndoFgskDcnU+nG3gYmMzJVzd9Qg==
|
||||
dependencies:
|
||||
"@fastify/busboy" "^2.0.0"
|
||||
|
||||
|
@ -21800,18 +21852,7 @@ vite-plugin-static-copy@^0.17.0:
|
|||
fs-extra "^11.1.0"
|
||||
picocolors "^1.0.0"
|
||||
|
||||
"vite@^3.0.0 || ^4.0.0":
|
||||
version "4.5.2"
|
||||
resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.2.tgz#d6ea8610e099851dad8c7371599969e0f8b97e82"
|
||||
integrity sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w==
|
||||
dependencies:
|
||||
esbuild "^0.18.10"
|
||||
postcss "^8.4.27"
|
||||
rollup "^3.27.1"
|
||||
optionalDependencies:
|
||||
fsevents "~2.3.2"
|
||||
|
||||
vite@^4.5.0:
|
||||
"vite@^3.0.0 || ^4.0.0", vite@^4.5.0:
|
||||
version "4.5.0"
|
||||
resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.0.tgz#ec406295b4167ac3bc23e26f9c8ff559287cff26"
|
||||
integrity sha512-ulr8rNLA6rkyFAlVWw2q5YJ91v098AFQ2R0PRFwPzREXOUJQPtFUG0t+/ZikhaOCDqFoDhN6/v8Sq0o4araFAw==
|
||||
|
@ -22445,7 +22486,12 @@ yaml@^1.10.2:
|
|||
resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b"
|
||||
integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==
|
||||
|
||||
yaml@^2.1.1, yaml@^2.2.2:
|
||||
yaml@^2.1.1:
|
||||
version "2.3.2"
|
||||
resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.2.tgz#f522db4313c671a0ca963a75670f1c12ea909144"
|
||||
integrity sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg==
|
||||
|
||||
yaml@^2.2.2:
|
||||
version "2.3.4"
|
||||
resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.4.tgz#53fc1d514be80aabf386dc6001eb29bf3b7523b2"
|
||||
integrity sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==
|
||||
|
|
Loading…
Reference in New Issue