Merge branch 'master' into add-select-states-to-dropdown-data-provider-select
This commit is contained in:
commit
b2aca0c4a6
|
@ -3,6 +3,7 @@ const mockS3 = {
|
||||||
deleteObject: jest.fn().mockReturnThis(),
|
deleteObject: jest.fn().mockReturnThis(),
|
||||||
deleteObjects: jest.fn().mockReturnThis(),
|
deleteObjects: jest.fn().mockReturnThis(),
|
||||||
createBucket: jest.fn().mockReturnThis(),
|
createBucket: jest.fn().mockReturnThis(),
|
||||||
|
getObject: jest.fn().mockReturnThis(),
|
||||||
listObject: jest.fn().mockReturnThis(),
|
listObject: jest.fn().mockReturnThis(),
|
||||||
getSignedUrl: jest.fn((operation: string, params: any) => {
|
getSignedUrl: jest.fn((operation: string, params: any) => {
|
||||||
return `http://s3.example.com/${params.Bucket}/${params.Key}`
|
return `http://s3.example.com/${params.Bucket}/${params.Key}`
|
||||||
|
|
|
@ -249,7 +249,8 @@ export const paginatedUsers = async ({
|
||||||
limit,
|
limit,
|
||||||
}: SearchUsersRequest = {}) => {
|
}: SearchUsersRequest = {}) => {
|
||||||
const db = getGlobalDB()
|
const db = getGlobalDB()
|
||||||
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
|
const pageSize = limit ?? PAGE_LIMIT
|
||||||
|
const pageLimit = pageSize + 1
|
||||||
// get one extra document, to have the next page
|
// get one extra document, to have the next page
|
||||||
const opts: DatabaseQueryOpts = {
|
const opts: DatabaseQueryOpts = {
|
||||||
include_docs: true,
|
include_docs: true,
|
||||||
|
@ -276,7 +277,7 @@ export const paginatedUsers = async ({
|
||||||
const response = await db.allDocs(getGlobalUserParams(null, opts))
|
const response = await db.allDocs(getGlobalUserParams(null, opts))
|
||||||
userList = response.rows.map((row: any) => row.doc)
|
userList = response.rows.map((row: any) => row.doc)
|
||||||
}
|
}
|
||||||
return pagination(userList, pageLimit, {
|
return pagination(userList, pageSize, {
|
||||||
paginate: true,
|
paginate: true,
|
||||||
property,
|
property,
|
||||||
getKey,
|
getKey,
|
||||||
|
|
|
@ -1,2 +1,3 @@
|
||||||
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
|
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
|
||||||
|
|
||||||
export const MOCK_DATE_TIMESTAMP = 1577836800000
|
export const MOCK_DATE_TIMESTAMP = 1577836800000
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
Heading,
|
Heading,
|
||||||
Body,
|
Body,
|
||||||
Button,
|
Button,
|
||||||
ButtonGroup,
|
|
||||||
Table,
|
Table,
|
||||||
Layout,
|
Layout,
|
||||||
Modal,
|
Modal,
|
||||||
|
@ -46,6 +45,10 @@
|
||||||
datasource: {
|
datasource: {
|
||||||
type: "user",
|
type: "user",
|
||||||
},
|
},
|
||||||
|
options: {
|
||||||
|
paginate: true,
|
||||||
|
limit: 10,
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
let groupsLoaded = !$licensing.groupsEnabled || $groups?.length
|
let groupsLoaded = !$licensing.groupsEnabled || $groups?.length
|
||||||
|
@ -65,10 +68,12 @@
|
||||||
{ column: "role", component: RoleTableRenderer },
|
{ column: "role", component: RoleTableRenderer },
|
||||||
]
|
]
|
||||||
let userData = []
|
let userData = []
|
||||||
|
let invitesLoaded = false
|
||||||
|
let pendingInvites = []
|
||||||
|
let parsedInvites = []
|
||||||
|
|
||||||
$: isOwner = $auth.accountPortalAccess && $admin.cloud
|
$: isOwner = $auth.accountPortalAccess && $admin.cloud
|
||||||
$: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled
|
$: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled
|
||||||
|
|
||||||
$: debouncedUpdateFetch(searchEmail)
|
$: debouncedUpdateFetch(searchEmail)
|
||||||
$: schema = {
|
$: schema = {
|
||||||
email: {
|
email: {
|
||||||
|
@ -88,16 +93,6 @@
|
||||||
width: "1fr",
|
width: "1fr",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const getPendingSchema = tblSchema => {
|
|
||||||
if (!tblSchema) {
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
|
|
||||||
pendingSchema.email.displayName = "Pending Invites"
|
|
||||||
return pendingSchema
|
|
||||||
}
|
|
||||||
|
|
||||||
$: pendingSchema = getPendingSchema(schema)
|
$: pendingSchema = getPendingSchema(schema)
|
||||||
$: userData = []
|
$: userData = []
|
||||||
$: inviteUsersResponse = { successful: [], unsuccessful: [] }
|
$: inviteUsersResponse = { successful: [], unsuccessful: [] }
|
||||||
|
@ -121,9 +116,15 @@
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
let invitesLoaded = false
|
|
||||||
let pendingInvites = []
|
const getPendingSchema = tblSchema => {
|
||||||
let parsedInvites = []
|
if (!tblSchema) {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
|
||||||
|
pendingSchema.email.displayName = "Pending Invites"
|
||||||
|
return pendingSchema
|
||||||
|
}
|
||||||
|
|
||||||
const invitesToSchema = invites => {
|
const invitesToSchema = invites => {
|
||||||
return invites.map(invite => {
|
return invites.map(invite => {
|
||||||
|
@ -143,7 +144,9 @@
|
||||||
const updateFetch = email => {
|
const updateFetch = email => {
|
||||||
fetch.update({
|
fetch.update({
|
||||||
query: {
|
query: {
|
||||||
email,
|
string: {
|
||||||
|
email,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -296,7 +299,7 @@
|
||||||
{/if}
|
{/if}
|
||||||
<div class="controls">
|
<div class="controls">
|
||||||
{#if !readonly}
|
{#if !readonly}
|
||||||
<ButtonGroup>
|
<div class="buttons">
|
||||||
<Button
|
<Button
|
||||||
disabled={readonly}
|
disabled={readonly}
|
||||||
on:click={$licensing.userLimitReached
|
on:click={$licensing.userLimitReached
|
||||||
|
@ -315,7 +318,7 @@
|
||||||
>
|
>
|
||||||
Import
|
Import
|
||||||
</Button>
|
</Button>
|
||||||
</ButtonGroup>
|
</div>
|
||||||
{:else}
|
{:else}
|
||||||
<ScimBanner />
|
<ScimBanner />
|
||||||
{/if}
|
{/if}
|
||||||
|
@ -390,12 +393,15 @@
|
||||||
</Modal>
|
</Modal>
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
|
.buttons {
|
||||||
|
display: flex;
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
.pagination {
|
.pagination {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: row;
|
flex-direction: row;
|
||||||
justify-content: flex-end;
|
justify-content: flex-end;
|
||||||
}
|
}
|
||||||
|
|
||||||
.controls {
|
.controls {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: row;
|
flex-direction: row;
|
||||||
|
@ -403,7 +409,6 @@
|
||||||
align-items: center;
|
align-items: center;
|
||||||
gap: var(--spacing-xl);
|
gap: var(--spacing-xl);
|
||||||
}
|
}
|
||||||
|
|
||||||
.controls-right {
|
.controls-right {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: row;
|
flex-direction: row;
|
||||||
|
@ -411,7 +416,6 @@
|
||||||
align-items: center;
|
align-items: center;
|
||||||
gap: var(--spacing-xl);
|
gap: var(--spacing-xl);
|
||||||
}
|
}
|
||||||
|
|
||||||
.controls-right :global(.spectrum-Search) {
|
.controls-right :global(.spectrum-Search) {
|
||||||
width: 200px;
|
width: 200px;
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,6 +70,13 @@ module AwsMock {
|
||||||
Contents: {},
|
Contents: {},
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// @ts-ignore
|
||||||
|
this.getObject = jest.fn(
|
||||||
|
response({
|
||||||
|
Body: "",
|
||||||
|
})
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
aws.DynamoDB = { DocumentClient }
|
aws.DynamoDB = { DocumentClient }
|
||||||
|
|
|
@ -32,11 +32,8 @@ import {
|
||||||
tenancy,
|
tenancy,
|
||||||
users,
|
users,
|
||||||
} from "@budibase/backend-core"
|
} from "@budibase/backend-core"
|
||||||
import { USERS_TABLE_SCHEMA } from "../../constants"
|
import { USERS_TABLE_SCHEMA, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
|
||||||
import {
|
import { buildDefaultDocs } from "../../db/defaultData/datasource_bb_default"
|
||||||
buildDefaultDocs,
|
|
||||||
DEFAULT_BB_DATASOURCE_ID,
|
|
||||||
} from "../../db/defaultData/datasource_bb_default"
|
|
||||||
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
|
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
|
||||||
import { stringToReadStream } from "../../utilities"
|
import { stringToReadStream } from "../../utilities"
|
||||||
import { doesUserHaveLock } from "../../utilities/redis"
|
import { doesUserHaveLock } from "../../utilities/redis"
|
||||||
|
|
|
@ -5,6 +5,8 @@ import sdk from "../../../sdk"
|
||||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||||
import { mocks } from "@budibase/backend-core/tests"
|
import { mocks } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
|
mocks.licenses.useBackups()
|
||||||
|
|
||||||
describe("/backups", () => {
|
describe("/backups", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
@ -12,16 +14,17 @@ describe("/backups", () => {
|
||||||
afterAll(setup.afterAll)
|
afterAll(setup.afterAll)
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
|
tk.reset()
|
||||||
await config.init()
|
await config.init()
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("exportAppDump", () => {
|
describe("/api/backups/export", () => {
|
||||||
it("should be able to export app", async () => {
|
it("should be able to export app", async () => {
|
||||||
const res = await request
|
const { body, headers } = await config.api.backup.exportBasicBackup(
|
||||||
.post(`/api/backups/export?appId=${config.getAppId()}`)
|
config.getAppId()!
|
||||||
.set(config.defaultHeaders())
|
)
|
||||||
.expect(200)
|
expect(body instanceof Buffer).toBe(true)
|
||||||
expect(res.headers["content-type"]).toEqual("application/gzip")
|
expect(headers["content-type"]).toEqual("application/gzip")
|
||||||
expect(events.app.exported).toBeCalledTimes(1)
|
expect(events.app.exported).toBeCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -36,11 +39,11 @@ describe("/backups", () => {
|
||||||
it("should infer the app name from the app", async () => {
|
it("should infer the app name from the app", async () => {
|
||||||
tk.freeze(mocks.date.MOCK_DATE)
|
tk.freeze(mocks.date.MOCK_DATE)
|
||||||
|
|
||||||
const res = await request
|
const { headers } = await config.api.backup.exportBasicBackup(
|
||||||
.post(`/api/backups/export?appId=${config.getAppId()}`)
|
config.getAppId()!
|
||||||
.set(config.defaultHeaders())
|
)
|
||||||
|
|
||||||
expect(res.headers["content-disposition"]).toEqual(
|
expect(headers["content-disposition"]).toEqual(
|
||||||
`attachment; filename="${
|
`attachment; filename="${
|
||||||
config.getApp()!.name
|
config.getApp()!.name
|
||||||
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
|
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
|
||||||
|
@ -48,6 +51,21 @@ describe("/backups", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("/api/backups/import", () => {
|
||||||
|
it("should be able to import an app", async () => {
|
||||||
|
const appId = config.getAppId()!
|
||||||
|
const automation = await config.createAutomation()
|
||||||
|
await config.createAutomationLog(automation, appId)
|
||||||
|
await config.createScreen()
|
||||||
|
const exportRes = await config.api.backup.createBackup(appId)
|
||||||
|
expect(exportRes.backupId).toBeDefined()
|
||||||
|
const importRes = await config.api.backup.importBackup(
|
||||||
|
appId,
|
||||||
|
exportRes.backupId
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe("calculateBackupStats", () => {
|
describe("calculateBackupStats", () => {
|
||||||
it("should be able to calculate the backup statistics", async () => {
|
it("should be able to calculate the backup statistics", async () => {
|
||||||
await config.createAutomation()
|
await config.createAutomation()
|
||||||
|
|
|
@ -172,3 +172,8 @@ export enum AutomationErrors {
|
||||||
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
|
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
|
||||||
export const MAX_AUTOMATION_RECURRING_ERRORS = 5
|
export const MAX_AUTOMATION_RECURRING_ERRORS = 5
|
||||||
export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber"
|
export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber"
|
||||||
|
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
|
||||||
|
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
|
||||||
|
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
|
||||||
|
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
|
||||||
|
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
||||||
|
|
|
@ -1,4 +1,12 @@
|
||||||
import { AutoFieldSubTypes, FieldTypes } from "../../constants"
|
import {
|
||||||
|
AutoFieldSubTypes,
|
||||||
|
FieldTypes,
|
||||||
|
DEFAULT_BB_DATASOURCE_ID,
|
||||||
|
DEFAULT_INVENTORY_TABLE_ID,
|
||||||
|
DEFAULT_EMPLOYEE_TABLE_ID,
|
||||||
|
DEFAULT_EXPENSES_TABLE_ID,
|
||||||
|
DEFAULT_JOBS_TABLE_ID,
|
||||||
|
} from "../../constants"
|
||||||
import { importToRows } from "../../api/controllers/table/utils"
|
import { importToRows } from "../../api/controllers/table/utils"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import LinkDocument from "../linkedRows/LinkDocument"
|
import LinkDocument from "../linkedRows/LinkDocument"
|
||||||
|
@ -16,12 +24,6 @@ import {
|
||||||
TableSourceType,
|
TableSourceType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
|
|
||||||
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
|
|
||||||
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
|
|
||||||
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
|
|
||||||
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
|
||||||
|
|
||||||
const defaultDatasource = {
|
const defaultDatasource = {
|
||||||
_id: DEFAULT_BB_DATASOURCE_ID,
|
_id: DEFAULT_BB_DATASOURCE_ID,
|
||||||
type: dbCore.BUDIBASE_DATASOURCE_TYPE,
|
type: dbCore.BUDIBASE_DATASOURCE_TYPE,
|
||||||
|
|
|
@ -7,10 +7,12 @@ import {
|
||||||
TableSourceType,
|
TableSourceType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { DocumentType, SEPARATOR } from "../db/utils"
|
import { DocumentType, SEPARATOR } from "../db/utils"
|
||||||
import { InvalidColumns, NoEmptyFilterStrings } from "../constants"
|
import {
|
||||||
|
InvalidColumns,
|
||||||
|
NoEmptyFilterStrings,
|
||||||
|
DEFAULT_BB_DATASOURCE_ID,
|
||||||
|
} from "../constants"
|
||||||
import { helpers } from "@budibase/shared-core"
|
import { helpers } from "@budibase/shared-core"
|
||||||
import * as external from "../api/controllers/table/external"
|
|
||||||
import * as internal from "../api/controllers/table/internal"
|
|
||||||
|
|
||||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||||
const ROW_ID_REGEX = /^\[.*]$/g
|
const ROW_ID_REGEX = /^\[.*]$/g
|
||||||
|
@ -96,7 +98,8 @@ export function isInternalTableID(tableId: string) {
|
||||||
export function isExternalTable(table: Table) {
|
export function isExternalTable(table: Table) {
|
||||||
if (
|
if (
|
||||||
table?.sourceId &&
|
table?.sourceId &&
|
||||||
table.sourceId.includes(DocumentType.DATASOURCE + SEPARATOR)
|
table.sourceId.includes(DocumentType.DATASOURCE + SEPARATOR) &&
|
||||||
|
table?.sourceId !== DEFAULT_BB_DATASOURCE_ID
|
||||||
) {
|
) {
|
||||||
return true
|
return true
|
||||||
} else if (table?.sourceType === TableSourceType.EXTERNAL) {
|
} else if (table?.sourceType === TableSourceType.EXTERNAL) {
|
||||||
|
|
|
@ -26,7 +26,6 @@ export interface DBDumpOpts {
|
||||||
export interface ExportOpts extends DBDumpOpts {
|
export interface ExportOpts extends DBDumpOpts {
|
||||||
tar?: boolean
|
tar?: boolean
|
||||||
excludeRows?: boolean
|
excludeRows?: boolean
|
||||||
excludeLogs?: boolean
|
|
||||||
encryptPassword?: string
|
encryptPassword?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,14 +82,15 @@ export async function exportDB(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function defineFilter(excludeRows?: boolean, excludeLogs?: boolean) {
|
function defineFilter(excludeRows?: boolean) {
|
||||||
const ids = [USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX]
|
const ids = [
|
||||||
|
USER_METDATA_PREFIX,
|
||||||
|
LINK_USER_METADATA_PREFIX,
|
||||||
|
AUTOMATION_LOG_PREFIX,
|
||||||
|
]
|
||||||
if (excludeRows) {
|
if (excludeRows) {
|
||||||
ids.push(TABLE_ROW_PREFIX)
|
ids.push(TABLE_ROW_PREFIX)
|
||||||
}
|
}
|
||||||
if (excludeLogs) {
|
|
||||||
ids.push(AUTOMATION_LOG_PREFIX)
|
|
||||||
}
|
|
||||||
return (doc: any) =>
|
return (doc: any) =>
|
||||||
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
|
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
|
||||||
}
|
}
|
||||||
|
@ -118,7 +118,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
fs.writeFileSync(join(tmpPath, path), contents)
|
fs.writeFileSync(join(tmpPath, path), contents)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// get all of the files
|
// get all the files
|
||||||
else {
|
else {
|
||||||
tmpPath = await objectStore.retrieveDirectory(
|
tmpPath = await objectStore.retrieveDirectory(
|
||||||
ObjectStoreBuckets.APPS,
|
ObjectStoreBuckets.APPS,
|
||||||
|
@ -141,7 +141,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
// enforce an export of app DB to the tmp path
|
// enforce an export of app DB to the tmp path
|
||||||
const dbPath = join(tmpPath, DB_EXPORT_FILE)
|
const dbPath = join(tmpPath, DB_EXPORT_FILE)
|
||||||
await exportDB(appId, {
|
await exportDB(appId, {
|
||||||
filter: defineFilter(config?.excludeRows, config?.excludeLogs),
|
filter: defineFilter(config?.excludeRows),
|
||||||
exportPath: dbPath,
|
exportPath: dbPath,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -191,7 +191,6 @@ export async function streamExportApp({
|
||||||
}) {
|
}) {
|
||||||
const tmpPath = await exportApp(appId, {
|
const tmpPath = await exportApp(appId, {
|
||||||
excludeRows,
|
excludeRows,
|
||||||
excludeLogs: true,
|
|
||||||
tar: true,
|
tar: true,
|
||||||
encryptPassword,
|
encryptPassword,
|
||||||
})
|
})
|
||||||
|
|
|
@ -805,8 +805,9 @@ class TestConfiguration {
|
||||||
|
|
||||||
// AUTOMATION LOG
|
// AUTOMATION LOG
|
||||||
|
|
||||||
async createAutomationLog(automation: Automation) {
|
async createAutomationLog(automation: Automation, appId?: string) {
|
||||||
return await context.doInAppContext(this.getProdAppId(), async () => {
|
appId = appId || this.getProdAppId()
|
||||||
|
return await context.doInAppContext(appId!, async () => {
|
||||||
return await pro.sdk.automations.logs.storeLog(
|
return await pro.sdk.automations.logs.storeLog(
|
||||||
automation,
|
automation,
|
||||||
basicAutomationResults(automation._id!)
|
basicAutomationResults(automation._id!)
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
import {
|
||||||
|
CreateAppBackupResponse,
|
||||||
|
ImportAppBackupResponse,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import TestConfiguration from "../TestConfiguration"
|
||||||
|
import { TestAPI } from "./base"
|
||||||
|
|
||||||
|
export class BackupAPI extends TestAPI {
|
||||||
|
constructor(config: TestConfiguration) {
|
||||||
|
super(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
exportBasicBackup = async (appId: string) => {
|
||||||
|
const result = await this.request
|
||||||
|
.post(`/api/backups/export?appId=${appId}`)
|
||||||
|
.set(this.config.defaultHeaders())
|
||||||
|
.expect("Content-Type", /application\/gzip/)
|
||||||
|
.expect(200)
|
||||||
|
return {
|
||||||
|
body: result.body as Buffer,
|
||||||
|
headers: result.headers,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
createBackup = async (appId: string) => {
|
||||||
|
const result = await this.request
|
||||||
|
.post(`/api/apps/${appId}/backups`)
|
||||||
|
.set(this.config.defaultHeaders())
|
||||||
|
.expect("Content-Type", /json/)
|
||||||
|
.expect(200)
|
||||||
|
return result.body as CreateAppBackupResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
importBackup = async (
|
||||||
|
appId: string,
|
||||||
|
backupId: string
|
||||||
|
): Promise<ImportAppBackupResponse> => {
|
||||||
|
const result = await this.request
|
||||||
|
.post(`/api/apps/${appId}/backups/${backupId}/import`)
|
||||||
|
.set(this.config.defaultHeaders())
|
||||||
|
.expect("Content-Type", /json/)
|
||||||
|
.expect(200)
|
||||||
|
return result.body as ImportAppBackupResponse
|
||||||
|
}
|
||||||
|
}
|
|
@ -7,6 +7,7 @@ import { DatasourceAPI } from "./datasource"
|
||||||
import { LegacyViewAPI } from "./legacyView"
|
import { LegacyViewAPI } from "./legacyView"
|
||||||
import { ScreenAPI } from "./screen"
|
import { ScreenAPI } from "./screen"
|
||||||
import { ApplicationAPI } from "./application"
|
import { ApplicationAPI } from "./application"
|
||||||
|
import { BackupAPI } from "./backup"
|
||||||
import { AttachmentAPI } from "./attachment"
|
import { AttachmentAPI } from "./attachment"
|
||||||
|
|
||||||
export default class API {
|
export default class API {
|
||||||
|
@ -18,6 +19,7 @@ export default class API {
|
||||||
datasource: DatasourceAPI
|
datasource: DatasourceAPI
|
||||||
screen: ScreenAPI
|
screen: ScreenAPI
|
||||||
application: ApplicationAPI
|
application: ApplicationAPI
|
||||||
|
backup: BackupAPI
|
||||||
attachment: AttachmentAPI
|
attachment: AttachmentAPI
|
||||||
|
|
||||||
constructor(config: TestConfiguration) {
|
constructor(config: TestConfiguration) {
|
||||||
|
@ -29,6 +31,7 @@ export default class API {
|
||||||
this.datasource = new DatasourceAPI(config)
|
this.datasource = new DatasourceAPI(config)
|
||||||
this.screen = new ScreenAPI(config)
|
this.screen = new ScreenAPI(config)
|
||||||
this.application = new ApplicationAPI(config)
|
this.application = new ApplicationAPI(config)
|
||||||
|
this.backup = new BackupAPI(config)
|
||||||
this.attachment = new AttachmentAPI(config)
|
this.attachment = new AttachmentAPI(config)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,3 +20,8 @@ export interface CreateAppBackupResponse {
|
||||||
export interface UpdateAppBackupRequest {
|
export interface UpdateAppBackupRequest {
|
||||||
name: string
|
name: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ImportAppBackupResponse {
|
||||||
|
restoreId: string
|
||||||
|
message: string
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue