Merge branch 'master' into fix/sql-pagination-fixes

This commit is contained in:
Michael Drury 2024-08-02 13:21:22 +01:00 committed by GitHub
commit c443ad5d5b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
38 changed files with 1709 additions and 1015 deletions

View File

@ -1,9 +1,9 @@
<script>
import { Select, Icon } from "@budibase/bbui"
import { FIELDS } from "constants/backend"
import { canBeDisplayColumn, utils } from "@budibase/shared-core"
import { API } from "api"
import { parseFile } from "./utils"
import { canBeDisplayColumn } from "@budibase/shared-core"
export let rows = []
export let schema = {}
@ -97,6 +97,8 @@
let errors = {}
let selectedColumnTypes = {}
let rawRows = []
$: displayColumnOptions = Object.keys(schema || {}).filter(column => {
return validation[column] && canBeDisplayColumn(schema[column].type)
})
@ -106,6 +108,8 @@
}
$: {
rows = rawRows.map(row => utils.trimOtherProps(row, Object.keys(schema)))
// binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
if (newValidateHash !== validateHash) {
@ -122,7 +126,7 @@
try {
const response = await parseFile(e)
rows = response.rows
rawRows = response.rows
schema = response.schema
fileName = response.fileName
selectedColumnTypes = Object.entries(response.schema).reduce(
@ -188,7 +192,7 @@
type="file"
on:change={handleFile}
/>
<label for="file-upload" class:uploaded={rows.length > 0}>
<label for="file-upload" class:uploaded={rawRows.length > 0}>
{#if error}
Error: {error}
{:else if fileName}
@ -198,7 +202,7 @@
{/if}
</label>
</div>
{#if rows.length > 0 && !error}
{#if rawRows.length > 0 && !error}
<div class="schema-fields">
{#each Object.entries(schema) as [name, column]}
<div class="field">

View File

@ -78,7 +78,7 @@
await datasources.fetch()
await afterSave(table)
} catch (e) {
notifications.error(e)
notifications.error(e.message || e)
// reload in case the table was created
await tables.fetch()
}

View File

@ -1,206 +0,0 @@
// @ts-ignore
import fs from "fs"
// eslint-disable-next-line @typescript-eslint/no-unused-vars
module FetchMock {
// @ts-ignore
const fetch = jest.requireActual("node-fetch")
let failCount = 0
let mockSearch = false
const func = async (url: any, opts: any) => {
const { host, pathname } = new URL(url)
function json(body: any, status = 200) {
return {
status,
headers: {
raw: () => {
return { "content-type": ["application/json"] }
},
get: (name: string) => {
if (name.toLowerCase() === "content-type") {
return ["application/json"]
}
},
},
json: async () => {
//x-www-form-encoded body is a URLSearchParams
//The call to stringify it leaves it blank
if (body?.opts?.body instanceof URLSearchParams) {
const paramArray = Array.from(body.opts.body.entries())
body.opts.body = paramArray.reduce((acc: any, pair: any) => {
acc[pair[0]] = pair[1]
return acc
}, {})
}
return body
},
}
}
if (pathname.includes("/api/global")) {
const user = {
email: "test@example.com",
_id: "us_test@example.com",
status: "active",
roles: {},
builder: {
global: false,
},
admin: {
global: false,
},
}
return pathname.endsWith("/users") && opts.method === "GET"
? json([user])
: json(user)
}
// mocked data based on url
else if (pathname.includes("api/apps")) {
return json({
app1: {
url: "/app1",
},
})
} else if (host.includes("example.com")) {
return json({
body: opts.body,
url,
method: opts.method,
})
} else if (host.includes("invalid.com")) {
return json(
{
invalid: true,
},
404
)
} else if (mockSearch && pathname.includes("_search")) {
const body = opts.body
const parts = body.split("tableId:")
let tableId
if (parts && parts[1]) {
tableId = parts[1].split('"')[0]
}
return json({
rows: [
{
doc: {
_id: "test",
tableId: tableId,
query: opts.body,
},
},
],
bookmark: "test",
})
} else if (host.includes("google.com")) {
return json({
url,
opts,
value:
'<!doctype html><html itemscope="" itemtype="http://schema.org/WebPage" lang="en-GB"></html>',
})
} else if (
url === "https://api.github.com/repos/my-repo/budibase-comment-box"
) {
return Promise.resolve({
json: () => {
return {
name: "budibase-comment-box",
releases_url:
"https://api.github.com/repos/my-repo/budibase-comment-box{/id}",
}
},
})
} else if (
url === "https://api.github.com/repos/my-repo/budibase-comment-box/latest"
) {
return Promise.resolve({
json: () => {
return {
assets: [
{
content_type: "application/gzip",
browser_download_url:
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz",
},
],
}
},
})
} else if (
url ===
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz"
) {
return Promise.resolve({
body: fs.createReadStream(
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
),
ok: true,
})
} else if (url === "https://www.npmjs.com/package/budibase-component") {
return Promise.resolve({
status: 200,
json: () => {
return {
name: "budibase-component",
"dist-tags": {
latest: "1.0.0",
},
versions: {
"1.0.0": {
dist: {
tarball:
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.2.tgz",
},
},
},
}
},
})
} else if (
url ===
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.2.tgz"
) {
return Promise.resolve({
body: fs.createReadStream(
"src/api/routes/tests/data/budibase-component-1.0.2.tgz"
),
ok: true,
})
} else if (
url === "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz"
) {
return Promise.resolve({
body: fs.createReadStream(
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
),
ok: true,
})
} else if (url === "https://www.googleapis.com/oauth2/v4/token") {
// any valid response
return json({})
} else if (host.includes("failonce.com")) {
failCount++
if (failCount === 1) {
return json({ message: "error" }, 500)
} else {
return json({
fails: failCount - 1,
url,
opts,
})
}
}
return fetch(url, opts)
}
func.Headers = fetch.Headers
func.mockSearch = () => {
mockSearch = true
}
module.exports = func
}

View File

@ -1,6 +1,13 @@
import { npmUpload, urlUpload, githubUpload } from "./uploaders"
import { plugins as pluginCore } from "@budibase/backend-core"
import { PluginType, FileType, PluginSource } from "@budibase/types"
import {
PluginType,
FileType,
PluginSource,
Ctx,
CreatePluginRequest,
CreatePluginResponse,
} from "@budibase/types"
import env from "../../../environment"
import { clientAppSocket } from "../../../websockets"
import sdk from "../../../sdk"
@ -29,7 +36,9 @@ export async function upload(ctx: any) {
}
}
export async function create(ctx: any) {
export async function create(
ctx: Ctx<CreatePluginRequest, CreatePluginResponse>
) {
const { source, url, headers, githubToken } = ctx.request.body
try {
@ -75,14 +84,9 @@ export async function create(ctx: any) {
const doc = await pro.plugins.storePlugin(metadata, directory, source)
clientAppSocket?.emit("plugins-update", { name, hash: doc.hash })
ctx.body = {
message: "Plugin uploaded successfully",
plugins: [doc],
}
ctx.body = { plugin: doc }
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
}
}

View File

@ -34,7 +34,11 @@ import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash"
import { helpers } from "@budibase/shared-core"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && isExternalTable(table)) {
@ -167,7 +171,7 @@ export async function validateNewTableImport(
if (isRows(rows) && isSchema(schema)) {
ctx.status = 200
ctx.body = validateSchema(rows, schema)
ctx.body = validateSchema(rows, schema, PROTECTED_INTERNAL_COLUMNS)
} else {
ctx.status = 422
}
@ -180,6 +184,7 @@ export async function validateExistingTableImport(
let schema = null
let protectedColumnNames
if (tableId) {
const table = await sdk.tables.getTable(tableId)
schema = table.schema
@ -189,6 +194,9 @@ export async function validateExistingTableImport(
name: "_id",
type: FieldType.STRING,
}
protectedColumnNames = PROTECTED_INTERNAL_COLUMNS.filter(x => x !== "_id")
} else {
protectedColumnNames = PROTECTED_EXTERNAL_COLUMNS
}
} else {
ctx.status = 422
@ -197,7 +205,7 @@ export async function validateExistingTableImport(
if (tableId && isRows(rows) && isSchema(schema)) {
ctx.status = 200
ctx.body = validateSchema(rows, schema)
ctx.body = validateSchema(rows, schema, protectedColumnNames)
} else {
ctx.status = 422
}

View File

@ -51,7 +51,3 @@ export function jsonWithSchema(schema: TableSchema, rows: Row[]) {
export function isFormat(format: any): format is RowExportFormat {
return Object.values(RowExportFormat).includes(format as RowExportFormat)
}
export function parseCsvExport<T>(value: string) {
return JSON.parse(value) as T
}

View File

@ -20,6 +20,7 @@ import { type App } from "@budibase/types"
import tk from "timekeeper"
import * as uuid from "uuid"
import { structures } from "@budibase/backend-core/tests"
import nock from "nock"
describe("/applications", () => {
let config = setup.getConfig()
@ -35,6 +36,7 @@ describe("/applications", () => {
throw new Error("Failed to publish app")
}
jest.clearAllMocks()
nock.cleanAll()
})
// These need to go first for the app totals to make sense
@ -324,18 +326,33 @@ describe("/applications", () => {
describe("delete", () => {
it("should delete published app and dev apps with dev app ID", async () => {
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await config.api.application.delete(app.appId)
expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
})
it("should delete published app and dev app with prod app ID", async () => {
await config.api.application.delete(app.appId.replace("_dev", ""))
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await config.api.application.delete(prodAppId)
expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
})
it("should be able to delete an app after SQS_SEARCH_ENABLE has been set but app hasn't been migrated", async () => {
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, async () => {
await config.api.application.delete(app.appId)
})

View File

@ -19,6 +19,7 @@ import {
} from "@budibase/types"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures"
import nock from "nock"
describe("/datasources", () => {
const config = setup.getConfig()
@ -37,6 +38,7 @@ describe("/datasources", () => {
config: {},
})
jest.clearAllMocks()
nock.cleanAll()
})
describe("create", () => {
@ -71,6 +73,12 @@ describe("/datasources", () => {
describe("dynamic variables", () => {
it("should invalidate changed or removed variables", async () => {
nock("http://www.example.com/")
.get("/")
.reply(200, [{ value: "test" }])
.get("/?test=test")
.reply(200, [{ value: 1 }])
let datasource = await config.api.datasource.create({
type: "datasource",
name: "Rest",
@ -81,7 +89,7 @@ describe("/datasources", () => {
const query = await config.api.query.save({
datasourceId: datasource._id!,
fields: {
path: "www.google.com",
path: "www.example.com",
},
parameters: [],
transformer: null,

View File

@ -15,6 +15,8 @@ jest.mock("@budibase/backend-core", () => {
import { events, objectStore } from "@budibase/backend-core"
import * as setup from "./utilities"
import nock from "nock"
import { PluginSource } from "@budibase/types"
const mockUploadDirectory = objectStore.uploadDirectory as jest.Mock
const mockDeleteFolder = objectStore.deleteFolder as jest.Mock
@ -28,6 +30,7 @@ describe("/plugins", () => {
beforeEach(async () => {
await config.init()
jest.clearAllMocks()
nock.cleanAll()
})
const createPlugin = async (status?: number) => {
@ -112,67 +115,108 @@ describe("/plugins", () => {
})
describe("github", () => {
const createGithubPlugin = async (status?: number, url?: string) => {
return await request
.post(`/api/plugin`)
.send({
source: "Github",
url,
githubToken: "token",
beforeEach(async () => {
nock("https://api.github.com")
.get("/repos/my-repo/budibase-comment-box")
.reply(200, {
name: "budibase-comment-box",
releases_url:
"https://api.github.com/repos/my-repo/budibase-comment-box{/id}",
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(status ? status : 200)
}
it("should be able to create a plugin from github", async () => {
const res = await createGithubPlugin(
200,
"https://github.com/my-repo/budibase-comment-box.git"
)
expect(res.body).toBeDefined()
expect(res.body.plugin).toBeDefined()
expect(res.body.plugin._id).toEqual("plg_comment-box")
.get("/repos/my-repo/budibase-comment-box/latest")
.reply(200, {
assets: [
{
content_type: "application/gzip",
browser_download_url:
"https://github.com/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz",
},
],
})
nock("https://github.com")
.get(
"/my-repo/budibase-comment-box/releases/download/v1.0.2/comment-box-1.0.2.tar.gz"
)
.replyWithFile(
200,
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
)
})
it("should be able to create a plugin from github", async () => {
const { plugin } = await config.api.plugin.create({
source: PluginSource.GITHUB,
url: "https://github.com/my-repo/budibase-comment-box.git",
githubToken: "token",
})
expect(plugin._id).toEqual("plg_comment-box")
})
it("should fail if the url is not from github", async () => {
const res = await createGithubPlugin(
400,
"https://notgithub.com/my-repo/budibase-comment-box"
)
expect(res.body.message).toEqual(
"Failed to import plugin: The plugin origin must be from Github"
await config.api.plugin.create(
{
source: PluginSource.GITHUB,
url: "https://notgithub.com/my-repo/budibase-comment-box",
githubToken: "token",
},
{
status: 400,
body: {
message:
"Failed to import plugin: The plugin origin must be from Github",
},
}
)
})
})
describe("npm", () => {
it("should be able to create a plugin from npm", async () => {
const res = await request
.post(`/api/plugin`)
.send({
source: "NPM",
url: "https://www.npmjs.com/package/budibase-component",
nock("https://registry.npmjs.org")
.get("/budibase-component")
.reply(200, {
name: "budibase-component",
"dist-tags": {
latest: "1.0.0",
},
versions: {
"1.0.0": {
dist: {
tarball:
"https://registry.npmjs.org/budibase-component/-/budibase-component-1.0.1.tgz",
},
},
},
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body).toBeDefined()
expect(res.body.plugin._id).toEqual("plg_budibase-component")
.get("/budibase-component/-/budibase-component-1.0.1.tgz")
.replyWithFile(
200,
"src/api/routes/tests/data/budibase-component-1.0.1.tgz"
)
const { plugin } = await config.api.plugin.create({
source: PluginSource.NPM,
url: "https://www.npmjs.com/package/budibase-component",
})
expect(plugin._id).toEqual("plg_budibase-component")
expect(events.plugin.imported).toHaveBeenCalled()
})
})
describe("url", () => {
it("should be able to create a plugin from a URL", async () => {
const res = await request
.post(`/api/plugin`)
.send({
source: "URL",
url: "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz",
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body).toBeDefined()
expect(res.body.plugin._id).toEqual("plg_comment-box")
nock("https://www.someurl.com")
.get("/comment-box/comment-box-1.0.2.tar.gz")
.replyWithFile(
200,
"src/api/routes/tests/data/comment-box-1.0.2.tar.gz"
)
const { plugin } = await config.api.plugin.create({
source: PluginSource.URL,
url: "https://www.someurl.com/comment-box/comment-box-1.0.2.tar.gz",
})
expect(plugin._id).toEqual("plg_comment-box")
expect(events.plugin.imported).toHaveBeenCalledTimes(1)
})
})

View File

@ -5,8 +5,6 @@ import { getCachedVariable } from "../../../../threads/utils"
import nock from "nock"
import { generator } from "@budibase/backend-core/tests"
jest.unmock("node-fetch")
describe("rest", () => {
let config: TestConfiguration
let datasource: Datasource

View File

@ -33,6 +33,7 @@ import {
UpdatedRowEventEmitter,
TableSchema,
JsonFieldSubType,
RowExportFormat,
} from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash"
@ -1811,6 +1812,7 @@ describe.each([
await config.api.row.exportRows(
"1234567",
{ rows: [existing._id!] },
RowExportFormat.JSON,
{ status: 404 }
)
})
@ -1849,6 +1851,202 @@ describe.each([
const results = JSON.parse(res)
expect(results.length).toEqual(3)
})
describe("should allow exporting all column types", () => {
let tableId: string
let expectedRowData: Row
beforeAll(async () => {
const fullSchema = setup.structures.fullSchemaWithoutLinks({
allRequired: true,
})
const table = await config.api.table.save(
saveTableRequest({
...setup.structures.basicTable(),
schema: fullSchema,
primary: ["string"],
})
)
tableId = table._id!
const rowValues: Record<keyof typeof fullSchema, any> = {
[FieldType.STRING]: generator.guid(),
[FieldType.LONGFORM]: generator.paragraph(),
[FieldType.OPTIONS]: "option 2",
[FieldType.ARRAY]: ["options 2", "options 4"],
[FieldType.NUMBER]: generator.natural(),
[FieldType.BOOLEAN]: generator.bool(),
[FieldType.DATETIME]: generator.date().toISOString(),
[FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()],
[FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(),
[FieldType.FORMULA]: undefined, // generated field
[FieldType.AUTO]: undefined, // generated field
[FieldType.JSON]: { name: generator.guid() },
[FieldType.INTERNAL]: generator.guid(),
[FieldType.BARCODEQR]: generator.guid(),
[FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(),
[FieldType.BIGINT]: generator.integer().toString(),
[FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }],
[FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id },
}
const row = await config.api.row.save(table._id!, rowValues)
expectedRowData = {
_id: row._id,
[FieldType.STRING]: rowValues[FieldType.STRING],
[FieldType.LONGFORM]: rowValues[FieldType.LONGFORM],
[FieldType.OPTIONS]: rowValues[FieldType.OPTIONS],
[FieldType.ARRAY]: rowValues[FieldType.ARRAY],
[FieldType.NUMBER]: rowValues[FieldType.NUMBER],
[FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN],
[FieldType.DATETIME]: rowValues[FieldType.DATETIME],
[FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map(
(a: any) =>
expect.objectContaining({
...a,
url: expect.any(String),
})
),
[FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({
...rowValues[FieldType.ATTACHMENT_SINGLE],
url: expect.any(String),
}),
[FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula,
[FieldType.AUTO]: expect.any(Number),
[FieldType.JSON]: rowValues[FieldType.JSON],
[FieldType.INTERNAL]: rowValues[FieldType.INTERNAL],
[FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR],
[FieldType.SIGNATURE_SINGLE]: expect.objectContaining({
...rowValues[FieldType.SIGNATURE_SINGLE],
url: expect.any(String),
}),
[FieldType.BIGINT]: rowValues[FieldType.BIGINT],
[FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map(
expect.objectContaining
),
[FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining(
rowValues[FieldType.BB_REFERENCE_SINGLE]
),
}
})
it("as csv", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
const jsonResult = await config.api.table.csvToJson({
csvString: exportedValue,
})
const stringified = (value: string) =>
JSON.stringify(value).replace(/"/g, "'")
const matchingObject = (key: string, value: any, isArray: boolean) => {
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
if (isArray) {
return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`))
}
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
}
expect(jsonResult).toEqual([
{
...expectedRowData,
auto: expect.any(String),
array: stringified(expectedRowData["array"]),
attachment: matchingObject(
"key",
expectedRowData["attachment"][0].sample,
true
),
attachment_single: matchingObject(
"key",
expectedRowData["attachment_single"].sample,
false
),
boolean: stringified(expectedRowData["boolean"]),
json: stringified(expectedRowData["json"]),
number: stringified(expectedRowData["number"]),
signature_single: matchingObject(
"key",
expectedRowData["signature_single"].sample,
false
),
bb_reference: matchingObject(
"_id",
expectedRowData["bb_reference"][0].sample,
true
),
bb_reference_single: matchingObject(
"_id",
expectedRowData["bb_reference_single"].sample,
false
),
},
])
})
it("as json", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON
)
const json = JSON.parse(exportedValue)
expect(json).toEqual([expectedRowData])
})
it("as json with schema", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON_WITH_SCHEMA
)
const json = JSON.parse(exportedValue)
expect(json).toEqual({
schema: expect.any(Object),
rows: [expectedRowData],
})
})
it("exported data can be re-imported", async () => {
// export all
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
// import all twice
const rows = await config.api.table.csvToJson({
csvString: exportedValue,
})
await config.api.row.bulkImport(tableId, {
rows,
})
await config.api.row.bulkImport(tableId, {
rows,
})
const { rows: allRows } = await config.api.row.search(tableId)
const expectedRow = {
...expectedRowData,
_id: expect.any(String),
_rev: expect.any(String),
type: "row",
tableId: tableId,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
}
expect(allRows).toEqual([expectedRow, expectedRow, expectedRow])
})
})
})
let o2mTable: Table

View File

@ -1,4 +1,8 @@
import { context, docIds, events } from "@budibase/backend-core"
import {
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import {
AutoFieldSubType,
BBReferenceFieldSubType,
@ -13,8 +17,10 @@ import {
TableSchema,
TableSourceType,
User,
ValidateTableImportResponse,
ViewCalculation,
ViewV2Enriched,
RowExportFormat,
} from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
@ -119,6 +125,64 @@ describe.each([
body: basicTable(),
})
})
it("does not persist the row fields that are not on the table schema", async () => {
const table: SaveTableRequest = basicTable()
table.rows = [
{
name: "test-name",
description: "test-desc",
nonValid: "test-non-valid",
},
]
const res = await config.api.table.save(table)
const persistedRows = await config.api.row.search(res._id!)
expect(persistedRows.rows).toEqual([
expect.objectContaining({
name: "test-name",
description: "test-desc",
}),
])
expect(persistedRows.rows[0].nonValid).toBeUndefined()
})
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)(
"cannot use protected column names (%s) while importing a table",
async columnName => {
const table: SaveTableRequest = basicTable()
table.rows = [
{
name: "test-name",
description: "test-desc",
},
]
await config.api.table.save(
{
...table,
schema: {
...table.schema,
[columnName]: {
name: columnName,
type: FieldType.STRING,
},
},
},
{
status: 400,
body: {
message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`,
status: 400,
},
}
)
}
)
})
describe("update", () => {
@ -1024,7 +1088,10 @@ describe.each([
})
})
describe("import validation", () => {
describe.each([
[RowExportFormat.CSV, (val: any) => JSON.stringify(val).replace(/"/g, "'")],
[RowExportFormat.JSON, (val: any) => val],
])("import validation (%s)", (_, userParser) => {
const basicSchema: TableSchema = {
id: {
type: FieldType.NUMBER,
@ -1036,9 +1103,41 @@ describe.each([
},
}
describe("validateNewTableImport", () => {
it("can validate basic imports", async () => {
const result = await config.api.table.validateNewTableImport(
const importCases: [
string,
(rows: Row[], schema: TableSchema) => Promise<ValidateTableImportResponse>
][] = [
[
"validateNewTableImport",
async (rows: Row[], schema: TableSchema) => {
const result = await config.api.table.validateNewTableImport({
rows,
schema,
})
return result
},
],
[
"validateExistingTableImport",
async (rows: Row[], schema: TableSchema) => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows,
})
return result
},
],
]
describe.each(importCases)("%s", (_, testDelegate) => {
it("validates basic imports", async () => {
const result = await testDelegate(
[{ id: generator.natural(), name: generator.first() }],
basicSchema
)
@ -1053,21 +1152,54 @@ describe.each([
},
})
})
})
describe("validateExistingTableImport", () => {
it("can validate basic imports", async () => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: basicSchema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows: [{ id: generator.natural(), name: generator.first() }],
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in schema (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport({
rows: [
{
id: generator.natural(),
name: generator.first(),
[columnName]: generator.word(),
},
],
schema: {
...basicSchema,
},
})
expect(result).toEqual({
allValid: false,
errors: {
[columnName]: `${columnName} is a protected column name`,
},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
[columnName]: false,
},
})
})
it("does not allow imports without rows", async () => {
const result = await testDelegate([], basicSchema)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {},
})
})
it("validates imports with some empty rows", async () => {
const result = await testDelegate(
[{}, { id: generator.natural(), name: generator.first() }, {}],
basicSchema
)
expect(result).toEqual({
allValid: true,
errors: {},
@ -1079,6 +1211,217 @@ describe.each([
})
})
isInternal &&
it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in the rows (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport({
rows: [
{
id: generator.natural(),
name: generator.first(),
},
],
schema: {
...basicSchema,
[columnName]: {
name: columnName,
type: FieldType.STRING,
},
},
})
expect(result).toEqual({
allValid: false,
errors: {
[columnName]: `${columnName} is a protected column name`,
},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
[columnName]: false,
},
})
})
it("validates required fields and valid rows", async () => {
const schema: TableSchema = {
...basicSchema,
name: {
type: FieldType.STRING,
name: "name",
constraints: { presence: true },
},
}
const result = await testDelegate(
[
{ id: generator.natural(), name: generator.first() },
{ id: generator.natural(), name: generator.first() },
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
it("validates required fields and non-valid rows", async () => {
const schema: TableSchema = {
...basicSchema,
name: {
type: FieldType.STRING,
name: "name",
constraints: { presence: true },
},
}
const result = await testDelegate(
[
{ id: generator.natural(), name: generator.first() },
{ id: generator.natural(), name: "" },
],
schema
)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: false,
},
})
})
describe("bb references", () => {
const getUserValues = () => ({
_id: docIds.generateGlobalUserID(),
primaryDisplay: generator.first(),
email: generator.email({}),
})
it("can validate user column imports", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser(getUserValues()),
},
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: true,
},
})
})
it("can validate user column imports with invalid data", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser(getUserValues()),
},
{
id: generator.natural(),
name: generator.first(),
user: "no valid user data",
},
],
schema
)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: false,
},
})
})
it("can validate users column imports", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
externalType: "array",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser([
getUserValues(),
getUserValues(),
getUserValues(),
]),
},
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: true,
},
})
})
})
})
describe("validateExistingTableImport", () => {
isInternal &&
it("can reimport _id fields for internal tables", async () => {
const table = await config.api.table.save(

View File

@ -1,26 +0,0 @@
const setup = require("./utilities")
const fetch = require("node-fetch")
jest.mock("node-fetch")
describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeAll(async () => {
await config.init()
inputs = {
username: "joe_bloggs",
url: "http://www.example.com",
}
})
afterAll(setup.afterAll)
it("should be able to run the action", async () => {
const res = await setup.runStep(setup.actions.discord.stepId, inputs)
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("post")
expect(res.success).toEqual(true)
})
})

View File

@ -0,0 +1,26 @@
import { getConfig, afterAll as _afterAll, runStep, actions } from "./utilities"
import nock from "nock"
describe("test the outgoing webhook action", () => {
let config = getConfig()
beforeAll(async () => {
await config.init()
})
afterAll(_afterAll)
beforeEach(() => {
nock.cleanAll()
})
it("should be able to run the action", async () => {
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
const res = await runStep(actions.discord.stepId, {
url: "http://www.example.com",
username: "joe_bloggs",
})
expect(res.response.foo).toEqual("bar")
expect(res.success).toEqual(true)
})
})

View File

@ -1,4 +1,5 @@
import { getConfig, afterAll, runStep, actions } from "./utilities"
import nock from "nock"
describe("test the outgoing webhook action", () => {
let config = getConfig()
@ -9,42 +10,45 @@ describe("test the outgoing webhook action", () => {
afterAll()
beforeEach(() => {
nock.cleanAll()
})
it("should be able to run the action", async () => {
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
const res = await runStep(actions.integromat.stepId, {
value1: "test",
url: "http://www.example.com",
})
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("post")
expect(res.response.foo).toEqual("bar")
expect(res.success).toEqual(true)
})
it("should add the payload props when a JSON string is provided", async () => {
const payload = `{"value1":1,"value2":2,"value3":3,"value4":4,"value5":5,"name":"Adam","age":9}`
const payload = {
value1: 1,
value2: 2,
value3: 3,
value4: 4,
value5: 5,
name: "Adam",
age: 9,
}
nock("http://www.example.com/")
.post("/", payload)
.reply(200, { foo: "bar" })
const res = await runStep(actions.integromat.stepId, {
value1: "ONE",
value2: "TWO",
value3: "THREE",
value4: "FOUR",
value5: "FIVE",
body: {
value: payload,
},
body: { value: JSON.stringify(payload) },
url: "http://www.example.com",
})
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("post")
expect(res.response.body).toEqual(payload)
expect(res.response.foo).toEqual("bar")
expect(res.success).toEqual(true)
})
it("should return a 400 if the JSON payload string is malformed", async () => {
const payload = `{ value1 1 }`
const res = await runStep(actions.integromat.stepId, {
value1: "ONE",
body: {
value: payload,
},
body: { value: "{ invalid json }" },
url: "http://www.example.com",
})
expect(res.httpStatus).toEqual(400)

View File

@ -1,4 +1,5 @@
import { getConfig, afterAll, runStep, actions } from "./utilities"
import nock from "nock"
describe("test the outgoing webhook action", () => {
let config = getConfig()
@ -9,31 +10,33 @@ describe("test the outgoing webhook action", () => {
afterAll()
beforeEach(() => {
nock.cleanAll()
})
it("should be able to run the action and default to 'get'", async () => {
nock("http://www.example.com/").get("/").reply(200, { foo: "bar" })
const res = await runStep(actions.n8n.stepId, {
url: "http://www.example.com",
body: {
test: "IGNORE_ME",
},
})
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("GET")
expect(res.response.body).toBeUndefined()
expect(res.response.foo).toEqual("bar")
expect(res.success).toEqual(true)
})
it("should add the payload props when a JSON string is provided", async () => {
const payload = `{ "name": "Adam", "age": 9 }`
nock("http://www.example.com/")
.post("/", { name: "Adam", age: 9 })
.reply(200)
const res = await runStep(actions.n8n.stepId, {
body: {
value: payload,
value: JSON.stringify({ name: "Adam", age: 9 }),
},
method: "POST",
url: "http://www.example.com",
})
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("POST")
expect(res.response.body).toEqual(`{"name":"Adam","age":9}`)
expect(res.success).toEqual(true)
})
@ -53,6 +56,9 @@ describe("test the outgoing webhook action", () => {
})
it("should not append the body if the method is HEAD", async () => {
nock("http://www.example.com/")
.head("/", body => body === "")
.reply(200)
const res = await runStep(actions.n8n.stepId, {
url: "http://www.example.com",
method: "HEAD",
@ -60,9 +66,6 @@ describe("test the outgoing webhook action", () => {
test: "IGNORE_ME",
},
})
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("HEAD")
expect(res.response.body).toBeUndefined()
expect(res.success).toEqual(true)
})
})

View File

@ -1,41 +0,0 @@
const setup = require("./utilities")
const fetch = require("node-fetch")
jest.mock("node-fetch")
describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeAll(async () => {
await config.init()
inputs = {
requestMethod: "POST",
url: "www.example.com",
requestBody: JSON.stringify({
a: 1,
}),
}
})
afterAll(setup.afterAll)
it("should be able to run the action", async () => {
const res = await setup.runStep(
setup.actions.OUTGOING_WEBHOOK.stepId,
inputs
)
expect(res.success).toEqual(true)
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("POST")
expect(JSON.parse(res.response.body).a).toEqual(1)
})
it("should return an error if something goes wrong in fetch", async () => {
const res = await setup.runStep(setup.actions.OUTGOING_WEBHOOK.stepId, {
requestMethod: "GET",
url: "www.invalid.com",
})
expect(res.success).toEqual(false)
})
})

View File

@ -0,0 +1,37 @@
import { getConfig, afterAll as _afterAll, runStep, actions } from "./utilities"
import nock from "nock"
describe("test the outgoing webhook action", () => {
const config = getConfig()
beforeAll(async () => {
await config.init()
})
afterAll(_afterAll)
beforeEach(() => {
nock.cleanAll()
})
it("should be able to run the action", async () => {
nock("http://www.example.com")
.post("/", { a: 1 })
.reply(200, { foo: "bar" })
const res = await runStep(actions.OUTGOING_WEBHOOK.stepId, {
requestMethod: "POST",
url: "www.example.com",
requestBody: JSON.stringify({ a: 1 }),
})
expect(res.success).toEqual(true)
expect(res.response.foo).toEqual("bar")
})
it("should return an error if something goes wrong in fetch", async () => {
const res = await runStep(actions.OUTGOING_WEBHOOK.stepId, {
requestMethod: "GET",
url: "www.invalid.com",
})
expect(res.success).toEqual(false)
})
})

View File

@ -1,4 +1,5 @@
import { getConfig, afterAll, runStep, actions } from "./utilities"
import nock from "nock"
describe("test the outgoing webhook action", () => {
let config = getConfig()
@ -9,44 +10,45 @@ describe("test the outgoing webhook action", () => {
afterAll()
beforeEach(() => {
nock.cleanAll()
})
it("should be able to run the action", async () => {
nock("http://www.example.com/").post("/").reply(200, { foo: "bar" })
const res = await runStep(actions.zapier.stepId, {
value1: "test",
url: "http://www.example.com",
})
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("post")
expect(res.response.foo).toEqual("bar")
expect(res.success).toEqual(true)
})
it("should add the payload props when a JSON string is provided", async () => {
const payload = `{ "value1": 1, "value2": 2, "value3": 3, "value4": 4, "value5": 5, "name": "Adam", "age": 9 }`
const payload = {
value1: 1,
value2: 2,
value3: 3,
value4: 4,
value5: 5,
name: "Adam",
age: 9,
}
nock("http://www.example.com/")
.post("/", { ...payload, platform: "budibase" })
.reply(200, { foo: "bar" })
const res = await runStep(actions.zapier.stepId, {
value1: "ONE",
value2: "TWO",
value3: "THREE",
value4: "FOUR",
value5: "FIVE",
body: {
value: payload,
},
body: { value: JSON.stringify(payload) },
url: "http://www.example.com",
})
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("post")
expect(res.response.body).toEqual(
`{"platform":"budibase","value1":1,"value2":2,"value3":3,"value4":4,"value5":5,"name":"Adam","age":9}`
)
expect(res.response.foo).toEqual("bar")
expect(res.success).toEqual(true)
})
it("should return a 400 if the JSON payload string is malformed", async () => {
const payload = `{ value1 1 }`
const res = await runStep(actions.zapier.stepId, {
value1: "ONE",
body: {
value: payload,
},
body: { value: "{ invalid json }" },
url: "http://www.example.com",
})
expect(res.httpStatus).toEqual(400)

View File

@ -1,4 +1,5 @@
import {
BodyType,
DatasourceFieldType,
HttpMethod,
Integration,
@ -15,7 +16,7 @@ import {
import get from "lodash/get"
import * as https from "https"
import qs from "querystring"
import type { Response } from "node-fetch"
import type { Response, RequestInit } from "node-fetch"
import fetch from "node-fetch"
import { formatBytes } from "../utilities"
import { performance } from "perf_hooks"
@ -28,15 +29,6 @@ import path from "path"
import { Builder as XmlBuilder } from "xml2js"
import { getAttachmentHeaders } from "./utils/restUtils"
enum BodyType {
NONE = "none",
FORM_DATA = "form",
XML = "xml",
ENCODED = "encoded",
JSON = "json",
TEXT = "text",
}
const coreFields = {
path: {
type: DatasourceFieldType.STRING,
@ -127,7 +119,23 @@ const SCHEMA: Integration = {
},
}
class RestIntegration implements IntegrationBase {
interface ParsedResponse {
data: any
info: {
code: number
size: string
time: string
}
extra?: {
raw: string | undefined
headers: Record<string, string[] | string>
}
pagination?: {
cursor: any
}
}
export class RestIntegration implements IntegrationBase {
private config: RestConfig
private headers: {
[key: string]: string
@ -138,7 +146,10 @@ class RestIntegration implements IntegrationBase {
this.config = config
}
async parseResponse(response: Response, pagination: PaginationConfig | null) {
async parseResponse(
response: Response,
pagination?: PaginationConfig
): Promise<ParsedResponse> {
let data: any[] | string | undefined,
raw: string | undefined,
headers: Record<string, string[] | string> = {},
@ -235,8 +246,8 @@ class RestIntegration implements IntegrationBase {
getUrl(
path: string,
queryString: string,
pagination: PaginationConfig | null,
paginationValues: PaginationValues | null
pagination?: PaginationConfig,
paginationValues?: PaginationValues
): string {
// Add pagination params to query string if required
if (pagination?.location === "query" && paginationValues) {
@ -279,10 +290,10 @@ class RestIntegration implements IntegrationBase {
addBody(
bodyType: string,
body: string | any,
input: any,
pagination: PaginationConfig | null,
paginationValues: PaginationValues | null
) {
input: RequestInit,
pagination?: PaginationConfig,
paginationValues?: PaginationValues
): RequestInit {
if (!input.headers) {
input.headers = {}
}
@ -345,6 +356,7 @@ class RestIntegration implements IntegrationBase {
string = new XmlBuilder().buildObject(object)
}
input.body = string
// @ts-ignore
input.headers["Content-Type"] = "application/xml"
break
case BodyType.JSON:
@ -356,13 +368,14 @@ class RestIntegration implements IntegrationBase {
object[key] = value
})
input.body = JSON.stringify(object)
// @ts-ignore
input.headers["Content-Type"] = "application/json"
break
}
return input
}
getAuthHeaders(authConfigId: string): { [key: string]: any } {
getAuthHeaders(authConfigId?: string): { [key: string]: any } {
let headers: any = {}
if (this.config.authConfigs && authConfigId) {
@ -398,7 +411,7 @@ class RestIntegration implements IntegrationBase {
headers = {},
method = HttpMethod.GET,
disabledHeaders,
bodyType,
bodyType = BodyType.NONE,
requestBody,
authConfigId,
pagination,
@ -407,7 +420,7 @@ class RestIntegration implements IntegrationBase {
const authHeaders = this.getAuthHeaders(authConfigId)
this.headers = {
...this.config.defaultHeaders,
...(this.config.defaultHeaders || {}),
...headers,
...authHeaders,
}
@ -420,7 +433,7 @@ class RestIntegration implements IntegrationBase {
}
}
let input: any = { method, headers: this.headers }
let input: RequestInit = { method, headers: this.headers }
input = this.addBody(
bodyType,
requestBody,
@ -437,7 +450,12 @@ class RestIntegration implements IntegrationBase {
// Deprecated by rejectUnauthorized
if (this.config.legacyHttpParser) {
// NOTE(samwho): it seems like this code doesn't actually work because it requires
// node-fetch >=3, and we're not on that because upgrading to it produces errors to
// do with ESM that are above my pay grade.
// https://github.com/nodejs/node/issues/43798
// @ts-ignore
input.extraHttpOptions = { insecureHTTPParser: true }
}

View File

@ -1,4 +1,5 @@
import type { GoogleSpreadsheetWorksheet } from "google-spreadsheet"
import nock from "nock"
jest.mock("google-auth-library")
const { OAuth2Client } = require("google-auth-library")
@ -62,6 +63,13 @@ describe("Google Sheets Integration", () => {
await config.init()
jest.clearAllMocks()
nock.cleanAll()
nock("https://www.googleapis.com/").post("/oauth2/v4/token").reply(200, {
grant_type: "client_credentials",
client_id: "your-client-id",
client_secret: "your-client-secret",
})
})
function createBasicTable(name: string, columns: string[]): Table {

File diff suppressed because it is too large Load Diff

View File

@ -48,9 +48,7 @@ export async function save(
}
// check for case sensitivity - we don't want to allow duplicated columns
const duplicateColumn = findDuplicateInternalColumns(table, {
ignoreProtectedColumnNames: !oldTable && !!opts?.isImport,
})
const duplicateColumn = findDuplicateInternalColumns(table)
if (duplicateColumn.length) {
throw new Error(
`Column(s) "${duplicateColumn.join(

View File

@ -1,6 +1,7 @@
import TestConfiguration from "../../tests/utilities/TestConfiguration"
import { startup } from "../index"
import { users, utils, tenancy } from "@budibase/backend-core"
import nock from "nock"
describe("check BB_ADMIN environment variables", () => {
const config = new TestConfiguration()
@ -8,7 +9,17 @@ describe("check BB_ADMIN environment variables", () => {
await config.init()
})
beforeEach(() => {
nock.cleanAll()
})
it("should be able to create a user with the BB_ADMIN environment variables", async () => {
nock("http://localhost:10000")
.get("/api/global/configs/checklist")
.reply(200, {})
.get("/api/global/self/api_key")
.reply(200, {})
const EMAIL = "budibase@budibase.com",
PASSWORD = "budibase"
await tenancy.doInTenant(tenancy.DEFAULT_TENANT_ID, async () => {

View File

@ -1,6 +1,7 @@
import env from "../environment"
import { env as coreEnv, timers } from "@budibase/backend-core"
import { testContainerUtils } from "@budibase/backend-core/tests"
import nock from "nock"
if (!process.env.CI) {
// set a longer timeout in dev for debugging 100 seconds
@ -9,6 +10,15 @@ if (!process.env.CI) {
jest.setTimeout(30 * 1000)
}
nock.disableNetConnect()
nock.enableNetConnect(host => {
return (
host.includes("localhost") ||
host.includes("127.0.0.1") ||
host.includes("::1")
)
})
testContainerUtils.setupEnv(env, coreEnv)
afterAll(() => {

View File

@ -15,6 +15,7 @@ import { RoleAPI } from "./role"
import { TemplateAPI } from "./template"
import { RowActionAPI } from "./rowAction"
import { AutomationAPI } from "./automation"
import { PluginAPI } from "./plugin"
export default class API {
table: TableAPI
@ -33,6 +34,7 @@ export default class API {
templates: TemplateAPI
rowAction: RowActionAPI
automation: AutomationAPI
plugin: PluginAPI
constructor(config: TestConfiguration) {
this.table = new TableAPI(config)
@ -51,5 +53,6 @@ export default class API {
this.templates = new TemplateAPI(config)
this.rowAction = new RowActionAPI(config)
this.automation = new AutomationAPI(config)
this.plugin = new PluginAPI(config)
}
}

View File

@ -0,0 +1,11 @@
import { Expectations, TestAPI } from "./base"
import { CreatePluginRequest, CreatePluginResponse } from "@budibase/types"
export class PluginAPI extends TestAPI {
create = async (body: CreatePluginRequest, expectations?: Expectations) => {
return await this._post<CreatePluginResponse>(`/api/plugin`, {
body,
expectations,
})
}
}

View File

@ -11,6 +11,7 @@ import {
DeleteRows,
DeleteRow,
PaginatedSearchRowResponse,
RowExportFormat,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
@ -105,6 +106,7 @@ export class RowAPI extends TestAPI {
exportRows = async (
tableId: string,
body: ExportRowsRequest,
format: RowExportFormat = RowExportFormat.JSON,
expectations?: Expectations
) => {
const response = await this._requestRaw(
@ -112,7 +114,7 @@ export class RowAPI extends TestAPI {
`/api/${tableId}/rows/exportRows`,
{
body,
query: { format: "json" },
query: { format },
expectations,
}
)

View File

@ -1,13 +1,14 @@
import {
BulkImportRequest,
BulkImportResponse,
CsvToJsonRequest,
CsvToJsonResponse,
MigrateRequest,
MigrateResponse,
Row,
SaveTableRequest,
SaveTableResponse,
Table,
TableSchema,
ValidateNewTableImportRequest,
ValidateTableImportRequest,
ValidateTableImportResponse,
} from "@budibase/types"
@ -71,17 +72,13 @@ export class TableAPI extends TestAPI {
}
validateNewTableImport = async (
rows: Row[],
schema: TableSchema,
body: ValidateNewTableImportRequest,
expectations?: Expectations
): Promise<ValidateTableImportResponse> => {
return await this._post<ValidateTableImportResponse>(
`/api/tables/validateNewTableImport`,
{
body: {
rows,
schema,
},
body,
expectations,
}
)
@ -99,4 +96,14 @@ export class TableAPI extends TestAPI {
}
)
}
csvToJson = async (
body: CsvToJsonRequest,
expectations?: Expectations
): Promise<CsvToJsonResponse> => {
return await this._post<CsvToJsonResponse>(`/api/convert/csvToJson`, {
body,
expectations,
})
}
}

View File

@ -26,6 +26,10 @@ import {
WebhookActionType,
AutomationEventType,
LoopStepType,
FieldSchema,
BBReferenceFieldSubType,
JsonFieldSubType,
AutoFieldSubType,
} from "@budibase/types"
import { LoopInput } from "../../definitions/automations"
import { merge } from "lodash"
@ -573,3 +577,161 @@ export function basicEnvironmentVariable(
development: dev || prod,
}
}
export function fullSchemaWithoutLinks({
allRequired,
}: {
allRequired?: boolean
}) {
const schema: {
[type in Exclude<FieldType, FieldType.LINK>]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: "string",
type: FieldType.STRING,
constraints: {
presence: allRequired,
},
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
constraints: {
presence: allRequired,
},
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: allRequired,
inclusion: ["option 1", "option 2", "option 3", "option 4"],
},
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
constraints: {
presence: allRequired,
type: JsonFieldSubType.ARRAY,
inclusion: ["options 1", "options 2", "options 3", "options 4"],
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
constraints: {
presence: allRequired,
},
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
constraints: {
presence: allRequired,
},
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
constraints: {
presence: allRequired,
},
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
constraints: {
presence: allRequired,
},
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
constraints: {
presence: allRequired,
},
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
constraints: {
presence: allRequired,
},
},
[FieldType.BB_REFERENCE]: {
name: "user",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
constraints: {
presence: allRequired,
},
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "users",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
constraints: {
presence: allRequired,
},
},
[FieldType.ATTACHMENTS]: {
name: "attachments",
type: FieldType.ATTACHMENTS,
constraints: {
presence: allRequired,
},
},
[FieldType.ATTACHMENT_SINGLE]: {
name: "attachment_single",
type: FieldType.ATTACHMENT_SINGLE,
constraints: {
presence: allRequired,
},
},
[FieldType.AUTO]: {
name: "auto",
type: FieldType.AUTO,
subtype: AutoFieldSubType.AUTO_ID,
autocolumn: true,
constraints: {
presence: allRequired,
},
},
[FieldType.JSON]: {
name: "json",
type: FieldType.JSON,
constraints: {
presence: allRequired,
},
},
[FieldType.INTERNAL]: {
name: "internal",
type: FieldType.INTERNAL,
constraints: {
presence: allRequired,
},
},
[FieldType.SIGNATURE_SINGLE]: {
name: "signature_single",
type: FieldType.SIGNATURE_SINGLE,
constraints: {
presence: allRequired,
},
},
}
return schema
}
export function basicAttachment() {
return {
key: generator.guid(),
name: generator.word(),
extension: generator.word(),
size: generator.natural(),
url: `/${generator.guid()}`,
}
}

View File

@ -8,7 +8,6 @@ import {
} from "@budibase/types"
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
import { db } from "@budibase/backend-core"
import { parseCsvExport } from "../api/controllers/view/exporters"
type Rows = Array<Row>
@ -41,7 +40,11 @@ export function isRows(rows: any): rows is Rows {
return Array.isArray(rows) && rows.every(row => typeof row === "object")
}
export function validate(rows: Rows, schema: TableSchema): ValidationResults {
export function validate(
rows: Rows,
schema: TableSchema,
protectedColumnNames: readonly string[]
): ValidationResults {
const results: ValidationResults = {
schemaValidation: {},
allValid: false,
@ -49,6 +52,8 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
errors: {},
}
protectedColumnNames = protectedColumnNames.map(x => x.toLowerCase())
rows.forEach(row => {
Object.entries(row).forEach(([columnName, columnData]) => {
const {
@ -63,6 +68,12 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
return
}
if (protectedColumnNames.includes(columnName.toLowerCase())) {
results.schemaValidation[columnName] = false
results.errors[columnName] = `${columnName} is a protected column name`
return
}
// If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array
if (typeof columnType !== "string") {
results.invalidColumns.push(columnName)
@ -73,7 +84,7 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
"Column names can't contain special characters"
} else if (
columnData == null &&
!schema[columnName].constraints?.presence
!helpers.schema.isRequired(constraints)
) {
results.schemaValidation[columnName] = true
} else if (
@ -83,6 +94,12 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
isAutoColumn
) {
return
} else if (
[FieldType.STRING].includes(columnType) &&
!columnData &&
helpers.schema.isRequired(constraints)
) {
results.schemaValidation[columnName] = false
} else if (columnType === FieldType.NUMBER && isNaN(Number(columnData))) {
// If provided must be a valid number
results.schemaValidation[columnName] = false
@ -109,6 +126,13 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
})
})
for (const schemaField of Object.keys(schema)) {
if (protectedColumnNames.includes(schemaField.toLowerCase())) {
results.schemaValidation[schemaField] = false
results.errors[schemaField] = `${schemaField} is a protected column name`
}
}
results.allValid =
Object.values(results.schemaValidation).length > 0 &&
Object.values(results.schemaValidation).every(column => column)
@ -140,7 +164,7 @@ export function parse(rows: Rows, table: Table): Rows {
const columnSchema = schema[columnName]
const { type: columnType } = columnSchema
if (columnType === FieldType.NUMBER) {
if ([FieldType.NUMBER].includes(columnType)) {
// If provided must be a valid number
parsedRow[columnName] = columnData ? Number(columnData) : columnData
} else if (
@ -152,16 +176,23 @@ export function parse(rows: Rows, table: Table): Rows {
parsedRow[columnName] = columnData
? new Date(columnData).toISOString()
: columnData
} else if (
columnType === FieldType.JSON &&
typeof columnData === "string"
) {
parsedRow[columnName] = parseJsonExport(columnData)
} else if (columnType === FieldType.BB_REFERENCE) {
let parsedValues: { _id: string }[] = columnData || []
if (columnData) {
parsedValues = parseCsvExport<{ _id: string }[]>(columnData)
if (columnData && typeof columnData === "string") {
parsedValues = parseJsonExport<{ _id: string }[]>(columnData)
}
parsedRow[columnName] = parsedValues?.map(u => u._id)
} else if (columnType === FieldType.BB_REFERENCE_SINGLE) {
const parsedValue =
columnData && parseCsvExport<{ _id: string }>(columnData)
let parsedValue = columnData
if (columnData && typeof columnData === "string") {
parsedValue = parseJsonExport<{ _id: string }>(columnData)
}
parsedRow[columnName] = parsedValue?._id
} else if (
(columnType === FieldType.ATTACHMENTS ||
@ -169,7 +200,7 @@ export function parse(rows: Rows, table: Table): Rows {
columnType === FieldType.SIGNATURE_SINGLE) &&
typeof columnData === "string"
) {
parsedRow[columnName] = parseCsvExport(columnData)
parsedRow[columnName] = parseJsonExport(columnData)
} else {
parsedRow[columnName] = columnData
}
@ -185,32 +216,54 @@ function isValidBBReference(
subtype: BBReferenceFieldSubType,
isRequired: boolean
): boolean {
if (typeof data !== "string") {
try {
if (type === FieldType.BB_REFERENCE_SINGLE) {
if (!data) {
return !isRequired
}
const user = parseJsonExport<{ _id: string }>(data)
return db.isGlobalUserID(user._id)
}
switch (subtype) {
case BBReferenceFieldSubType.USER:
case BBReferenceFieldSubType.USERS: {
const userArray = parseJsonExport<{ _id: string }[]>(data)
if (!Array.isArray(userArray)) {
return false
}
const constainsWrongId = userArray.find(
user => !db.isGlobalUserID(user._id)
)
return !constainsWrongId
}
default:
throw utils.unreachable(subtype)
}
} catch {
return false
}
}
if (type === FieldType.BB_REFERENCE_SINGLE) {
if (!data) {
return !isRequired
}
const user = parseCsvExport<{ _id: string }>(data)
return db.isGlobalUserID(user._id)
function parseJsonExport<T>(value: any) {
if (typeof value !== "string") {
return value
}
try {
const parsed = JSON.parse(value)
switch (subtype) {
case BBReferenceFieldSubType.USER:
case BBReferenceFieldSubType.USERS: {
const userArray = parseCsvExport<{ _id: string }[]>(data)
if (!Array.isArray(userArray)) {
return false
}
const constainsWrongId = userArray.find(
user => !db.isGlobalUserID(user._id)
)
return !constainsWrongId
return parsed as T
} catch (e: any) {
if (
e.message.startsWith("Expected property name or '}' in JSON at position ")
) {
// This was probably converted as CSV and it has single quotes instead of double ones
const parsed = JSON.parse(value.replace(/'/g, '"'))
return parsed as T
}
default:
throw utils.unreachable(subtype)
// It is no a valid JSON
throw e
}
}

View File

@ -53,10 +53,7 @@ export function canBeSortColumn(type: FieldType): boolean {
return !!allowSortColumnByType[type]
}
export function findDuplicateInternalColumns(
table: Table,
opts?: { ignoreProtectedColumnNames: boolean }
): string[] {
export function findDuplicateInternalColumns(table: Table): string[] {
// maintains the case of keys
const casedKeys = Object.keys(table.schema)
// get the column names
@ -72,11 +69,10 @@ export function findDuplicateInternalColumns(
}
}
}
if (!opts?.ignoreProtectedColumnNames) {
for (let internalColumn of PROTECTED_INTERNAL_COLUMNS) {
if (casedKeys.find(key => key === internalColumn)) {
duplicates.push(internalColumn)
}
for (let internalColumn of PROTECTED_INTERNAL_COLUMNS) {
if (casedKeys.find(key => key === internalColumn)) {
duplicates.push(internalColumn)
}
}
return duplicates

View File

@ -67,3 +67,13 @@ export function hasSchema(test: any) {
Object.keys(test).length > 0
)
}
export function trimOtherProps(object: any, allowedProps: string[]) {
const result = Object.keys(object)
.filter(key => allowedProps.includes(key))
.reduce<Record<string, any>>(
(acc, key) => ({ ...acc, [key]: object[key] }),
{}
)
return result
}

View File

@ -15,3 +15,4 @@ export * from "./automation"
export * from "./layout"
export * from "./query"
export * from "./role"
export * from "./plugins"

View File

@ -0,0 +1,12 @@
import { PluginSource } from "../../documents"
export interface CreatePluginRequest {
source: PluginSource
url: string
githubToken?: string
headers?: { [key: string]: string }
}
export interface CreatePluginResponse {
plugin: any
}

View File

@ -45,15 +45,15 @@ export interface DynamicVariable {
export interface RestConfig {
url: string
rejectUnauthorized: boolean
rejectUnauthorized?: boolean
downloadImages?: boolean
defaultHeaders: {
defaultHeaders?: {
[key: string]: any
}
legacyHttpParser: boolean
authConfigs: RestAuthConfig[]
staticVariables: {
legacyHttpParser?: boolean
authConfigs?: RestAuthConfig[]
staticVariables?: {
[key: string]: string
}
dynamicVariables: DynamicVariable[]
dynamicVariables?: DynamicVariable[]
}

View File

@ -36,31 +36,39 @@ export interface QueryResponse {
pagination: any
}
export enum BodyType {
NONE = "none",
FORM_DATA = "form",
XML = "xml",
ENCODED = "encoded",
JSON = "json",
TEXT = "text",
}
export interface RestQueryFields {
path: string
path?: string
queryString?: string
headers: { [key: string]: any }
disabledHeaders: { [key: string]: any }
requestBody: any
bodyType: string
json: object
method: string
authConfigId: string
pagination: PaginationConfig | null
paginationValues: PaginationValues | null
headers?: { [key: string]: any }
disabledHeaders?: { [key: string]: any }
requestBody?: any
bodyType?: BodyType
method?: string
authConfigId?: string
pagination?: PaginationConfig
paginationValues?: PaginationValues
}
export interface PaginationConfig {
type: string
location: string
pageParam: string
sizeParam: string | null
responseParam: string | null
sizeParam?: string
responseParam?: string
}
export interface PaginationValues {
page: string | number | null
limit: number | null
page?: string | number
limit?: number
}
export enum HttpMethod {

192
yarn.lock
View File

@ -6707,22 +6707,39 @@ acorn-import-assertions@^1.9.0:
resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac"
integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==
acorn-jsx@^5.3.2:
acorn-jsx-walk@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/acorn-jsx-walk/-/acorn-jsx-walk-2.0.0.tgz#a5ed648264e68282d7c2aead80216bfdf232573a"
integrity sha512-uuo6iJj4D4ygkdzd6jPtcxs8vZgDX9YFIkqczGImoypX2fQ4dVImmu3UzA4ynixCIMTrEOWW+95M2HuBaCEOVA==
acorn-jsx@5.3.2, acorn-jsx@^5.3.2:
version "5.3.2"
resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
acorn-loose@8.4.0:
version "8.4.0"
resolved "https://registry.yarnpkg.com/acorn-loose/-/acorn-loose-8.4.0.tgz#26d3e219756d1e180d006f5bcc8d261a28530f55"
integrity sha512-M0EUka6rb+QC4l9Z3T0nJEzNOO7JcoJlYMrBlyBCiFSXRyxjLKayd4TbQs2FDRWQU1h9FR7QVNHt+PEaoNL5rQ==
dependencies:
acorn "^8.11.0"
acorn-walk@8.3.3, acorn-walk@^8.0.2, acorn-walk@^8.1.1, acorn-walk@^8.2.0, acorn-walk@^8.3.2:
version "8.3.3"
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.3.tgz#9caeac29eefaa0c41e3d4c65137de4d6f34df43e"
integrity sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==
dependencies:
acorn "^8.11.0"
acorn-walk@^7.1.1:
version "7.2.0"
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc"
integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==
acorn-walk@^8.0.2, acorn-walk@^8.1.1, acorn-walk@^8.2.0, acorn-walk@^8.3.2:
version "8.3.3"
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.3.tgz#9caeac29eefaa0c41e3d4c65137de4d6f34df43e"
integrity sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==
dependencies:
acorn "^8.11.0"
acorn@8.12.1:
version "8.12.1"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248"
integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==
acorn@^5.2.1, acorn@^5.7.3:
version "5.7.4"
@ -6791,6 +6808,16 @@ ajv-formats@^2.0.2:
dependencies:
ajv "^8.0.0"
ajv@8.17.1:
version "8.17.1"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6"
integrity sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==
dependencies:
fast-deep-equal "^3.1.3"
fast-uri "^3.0.1"
json-schema-traverse "^1.0.0"
require-from-string "^2.0.2"
ajv@^6.12.3, ajv@^6.12.4:
version "6.12.6"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
@ -8484,6 +8511,11 @@ combos@^0.2.0:
resolved "https://registry.yarnpkg.com/combos/-/combos-0.2.0.tgz#dc31c5a899b42293d55fe19c064d3e6e207ba4f7"
integrity sha512-Z6YfvgiTCERWJTj3wQiXamFhssdvz1n4ok447rS330lw3uL72WAx8IvrLU7xiE71uyb5WF8JEP+BWB5KhOoGeg==
commander@12.1.0:
version "12.1.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-12.1.0.tgz#01423b36f501259fdaac4d0e4d60c96c991585d3"
integrity sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==
commander@6.2.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.0.tgz#b990bfb8ac030aedc6d11bc04d1488ffef56db75"
@ -9551,6 +9583,34 @@ depd@^1.1.0, depd@~1.1.2:
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==
dependency-cruiser@^16.3.7:
version "16.3.10"
resolved "https://registry.yarnpkg.com/dependency-cruiser/-/dependency-cruiser-16.3.10.tgz#fe26a50d5e10a4496bc2b70d027fca6ded48814f"
integrity sha512-WkCnibHBfvaiaQ+S46LZ6h4AR6oj42Vsf5/0Vgtrwdwn7ZekMJdZ/ALoTwNp/RaGlKW+MbV/fhSZOvmhAWVWzQ==
dependencies:
acorn "8.12.1"
acorn-jsx "5.3.2"
acorn-jsx-walk "2.0.0"
acorn-loose "8.4.0"
acorn-walk "8.3.3"
ajv "8.17.1"
commander "12.1.0"
enhanced-resolve "5.17.1"
ignore "5.3.1"
interpret "^3.1.1"
is-installed-globally "1.0.0"
json5 "2.2.3"
memoize "10.0.0"
picocolors "1.0.1"
picomatch "4.0.2"
prompts "2.4.2"
rechoir "^0.8.0"
safe-regex "2.1.1"
semver "^7.6.3"
teamcity-service-messages "0.1.14"
tsconfig-paths-webpack-plugin "4.1.0"
watskeburt "4.1.0"
dependency-tree@^9.0.0:
version "9.0.0"
resolved "https://registry.yarnpkg.com/dependency-tree/-/dependency-tree-9.0.0.tgz#9288dd6daf35f6510c1ea30d9894b75369aa50a2"
@ -10221,6 +10281,14 @@ engine.io@~6.5.2:
engine.io-parser "~5.2.1"
ws "~8.17.1"
enhanced-resolve@5.17.1, enhanced-resolve@^5.7.0:
version "5.17.1"
resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15"
integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==
dependencies:
graceful-fs "^4.2.4"
tapable "^2.2.0"
enhanced-resolve@^5.8.3:
version "5.14.1"
resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.14.1.tgz#de684b6803724477a4af5d74ccae5de52c25f6b3"
@ -11016,6 +11084,11 @@ fast-text-encoding@^1.0.0:
resolved "https://registry.yarnpkg.com/fast-text-encoding/-/fast-text-encoding-1.0.6.tgz#0aa25f7f638222e3396d72bf936afcf1d42d6867"
integrity sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w==
fast-uri@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.0.1.tgz#cddd2eecfc83a71c1be2cc2ef2061331be8a7134"
integrity sha512-MWipKbbYiYI0UC7cl8m/i/IWTqfC8YXsqjzybjddLsFjStroQzsHXkc73JutMvBiXmOvapk+axIl79ig5t55Bw==
fast-url-parser@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/fast-url-parser/-/fast-url-parser-1.1.3.tgz#f4af3ea9f34d8a271cf58ad2b3759f431f0b318d"
@ -11877,6 +11950,13 @@ global-agent@3.0.0:
semver "^7.3.2"
serialize-error "^7.0.1"
global-directory@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/global-directory/-/global-directory-4.0.1.tgz#4d7ac7cfd2cb73f304c53b8810891748df5e361e"
integrity sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==
dependencies:
ini "4.1.1"
global-dirs@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-3.0.1.tgz#0c488971f066baceda21447aecb1a8b911d22485"
@ -12541,6 +12621,11 @@ ignore-walk@^6.0.0:
dependencies:
minimatch "^7.4.2"
ignore@5.3.1:
version "5.3.1"
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.1.tgz#5073e554cd42c5b33b394375f538b8593e34d4ef"
integrity sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==
ignore@^5.0.4, ignore@^5.2.0, ignore@^5.2.4:
version "5.3.0"
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.0.tgz#67418ae40d34d6999c95ff56016759c718c82f78"
@ -12666,6 +12751,11 @@ ini@2.0.0:
resolved "https://registry.yarnpkg.com/ini/-/ini-2.0.0.tgz#e5fd556ecdd5726be978fa1001862eacb0a94bc5"
integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==
ini@4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/ini/-/ini-4.1.1.tgz#d95b3d843b1e906e56d6747d5447904ff50ce7a1"
integrity sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==
ini@^1.3.2, ini@^1.3.4, ini@^1.3.8, ini@~1.3.0:
version "1.3.8"
resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c"
@ -12743,6 +12833,11 @@ interpret@^2.2.0:
resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9"
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
interpret@^3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/interpret/-/interpret-3.1.1.tgz#5be0ceed67ca79c6c4bc5cf0d7ee843dcea110c4"
integrity sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ==
into-stream@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/into-stream/-/into-stream-3.1.0.tgz#96fb0a936c12babd6ff1752a17d05616abd094c6"
@ -12973,6 +13068,14 @@ is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1:
dependencies:
is-extglob "^2.1.1"
is-installed-globally@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-1.0.0.tgz#08952c43758c33d815692392f7f8437b9e436d5a"
integrity sha512-K55T22lfpQ63N4KEN57jZUAaAYqYHEe8veb/TycJRk9DdSCLLcovXz/mL6mOnhQaZsQGwPhuFopdQIlqGSEjiQ==
dependencies:
global-directory "^4.0.1"
is-path-inside "^4.0.0"
is-installed-globally@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.4.0.tgz#9a0fd407949c30f86eb6959ef1b7994ed0b7b520"
@ -13060,6 +13163,11 @@ is-path-inside@^3.0.2, is-path-inside@^3.0.3:
resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283"
integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==
is-path-inside@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-4.0.0.tgz#805aeb62c47c1b12fc3fd13bfb3ed1e7430071db"
integrity sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA==
is-plain-obj@^1.0.0, is-plain-obj@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e"
@ -14084,6 +14192,11 @@ json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1:
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==
json5@2.2.3, json5@^2.2.1, json5@^2.2.2, json5@^2.2.3:
version "2.2.3"
resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283"
integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==
json5@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593"
@ -14091,11 +14204,6 @@ json5@^1.0.2:
dependencies:
minimist "^1.2.0"
json5@^2.2.1, json5@^2.2.2, json5@^2.2.3:
version "2.2.3"
resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283"
integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==
jsonc-parser@3.2.0, jsonc-parser@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76"
@ -15441,6 +15549,13 @@ memdown@^5.1.0:
ltgt "~2.2.0"
safe-buffer "~5.2.0"
memoize@10.0.0:
version "10.0.0"
resolved "https://registry.yarnpkg.com/memoize/-/memoize-10.0.0.tgz#43fa66b2022363c7c50cf5dfab732a808a3d7147"
integrity sha512-H6cBLgsi6vMWOcCpvVCdFFnl3kerEXbrYh9q+lY6VXvQSmM6CkmV08VOwT+WE2tzIEqRPFfAq3fm4v/UIW6mSA==
dependencies:
mimic-function "^5.0.0"
memory-pager@^1.0.2:
version "1.5.0"
resolved "https://registry.yarnpkg.com/memory-pager/-/memory-pager-1.5.0.tgz#d8751655d22d384682741c972f2c3d6dfa3e66b5"
@ -15549,6 +15664,11 @@ mimic-fn@^4.0.0:
resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-4.0.0.tgz#60a90550d5cb0b239cca65d893b1a53b29871ecc"
integrity sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==
mimic-function@^5.0.0:
version "5.0.1"
resolved "https://registry.yarnpkg.com/mimic-function/-/mimic-function-5.0.1.tgz#acbe2b3349f99b9deaca7fb70e48b83e94e67076"
integrity sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==
mimic-response@^1.0.0, mimic-response@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b"
@ -17412,15 +17532,20 @@ phin@^2.9.1:
resolved "https://registry.yarnpkg.com/phin/-/phin-2.9.3.tgz#f9b6ac10a035636fb65dfc576aaaa17b8743125c"
integrity sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA==
picocolors@1.0.1, picocolors@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.1.tgz#a8ad579b571952f0e5d25892de5445bcfe25aaa1"
integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==
picocolors@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c"
integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==
picocolors@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.1.tgz#a8ad579b571952f0e5d25892de5445bcfe25aaa1"
integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==
picomatch@4.0.2:
version "4.0.2"
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-4.0.2.tgz#77c742931e8f3b8820946c76cd0c1f13730d1dab"
integrity sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==
picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1:
version "2.3.1"
@ -18385,7 +18510,7 @@ promise.series@^0.2.0:
resolved "https://registry.yarnpkg.com/promise.series/-/promise.series-0.2.0.tgz#2cc7ebe959fc3a6619c04ab4dbdc9e452d864bbd"
integrity sha512-VWQJyU2bcDTgZw8kpfBpB/ejZASlCrzwz5f2hjb/zlujOEB4oeiAhHygAWq8ubsX2GVkD4kCU5V2dwOTaCY5EQ==
prompts@^2.0.1:
prompts@2.4.2, prompts@^2.0.1:
version "2.4.2"
resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069"
integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==
@ -18952,6 +19077,11 @@ regenerator-transform@^0.15.1:
dependencies:
"@babel/runtime" "^7.8.4"
regexp-tree@~0.1.1:
version "0.1.27"
resolved "https://registry.yarnpkg.com/regexp-tree/-/regexp-tree-0.1.27.tgz#2198f0ef54518ffa743fe74d983b56ffd631b6cd"
integrity sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA==
regexp.prototype.flags@^1.4.3, regexp.prototype.flags@^1.5.2:
version "1.5.2"
resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz#138f644a3350f981a858c44f6bb1a61ff59be334"
@ -19492,6 +19622,13 @@ safe-regex-test@^1.0.3:
es-errors "^1.3.0"
is-regex "^1.1.4"
safe-regex@2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-2.1.1.tgz#f7128f00d056e2fe5c11e81a1324dd974aadced2"
integrity sha512-rx+x8AMzKb5Q5lQ95Zoi6ZbJqwCLkqi3XuJXp5P3rT8OEc6sZCJG5AE5dU3lsgRr/F4Bs31jSlVN+j5KrsGu9A==
dependencies:
regexp-tree "~0.1.1"
safe-stable-stringify@^2.1.0, safe-stable-stringify@^2.3.1:
version "2.4.3"
resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz#138c84b6f6edb3db5f8ef3ef7115b8f55ccbf886"
@ -19603,7 +19740,7 @@ semver-diff@^3.1.1:
dependencies:
semver "^6.3.0"
"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@~2.3.1:
"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@^7.6.3, semver@~2.3.1:
version "7.5.3"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e"
integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==
@ -20867,6 +21004,11 @@ tarn@^3.0.1, tarn@^3.0.2:
resolved "https://registry.yarnpkg.com/tarn/-/tarn-3.0.2.tgz#73b6140fbb881b71559c4f8bfde3d9a4b3d27693"
integrity sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ==
teamcity-service-messages@0.1.14:
version "0.1.14"
resolved "https://registry.yarnpkg.com/teamcity-service-messages/-/teamcity-service-messages-0.1.14.tgz#193d420a5e4aef8e5e50b8c39e7865e08fbb5d8a"
integrity sha512-29aQwaHqm8RMX74u2o/h1KbMLP89FjNiMxD9wbF2BbWOnbM+q+d1sCEC+MqCc4QW3NJykn77OMpTFw/xTHIc0w==
tedious@^16.4.0:
version "16.7.1"
resolved "https://registry.yarnpkg.com/tedious/-/tedious-16.7.1.tgz#1190f30fd99a413f1dc9250dee4835cf0788b650"
@ -21258,6 +21400,15 @@ ts-node@10.8.1:
v8-compile-cache-lib "^3.0.1"
yn "3.1.1"
tsconfig-paths-webpack-plugin@4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/tsconfig-paths-webpack-plugin/-/tsconfig-paths-webpack-plugin-4.1.0.tgz#3c6892c5e7319c146eee1e7302ed9e6f2be4f763"
integrity sha512-xWFISjviPydmtmgeUAuXp4N1fky+VCtfhOkDUFIv5ea7p4wuTomI4QTrXvFBX2S4jZsmyTSrStQl+E+4w+RzxA==
dependencies:
chalk "^4.1.0"
enhanced-resolve "^5.7.0"
tsconfig-paths "^4.1.2"
tsconfig-paths@4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-4.0.0.tgz#1082f5d99fd127b72397eef4809e4dd06d229b64"
@ -22037,6 +22188,11 @@ walker@^1.0.8:
dependencies:
makeerror "1.0.12"
watskeburt@4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/watskeburt/-/watskeburt-4.1.0.tgz#3c0227669be646a97424b631164b1afe3d4d5344"
integrity sha512-KkY5H51ajqy9HYYI+u9SIURcWnqeVVhdH0I+ab6aXPGHfZYxgRCwnR6Lm3+TYB6jJVt5jFqw4GAKmwf1zHmGQw==
wcwidth@^1.0.0, wcwidth@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8"