Merge branch 'master' into add-select-states-to-dropdown-data-provider-select
This commit is contained in:
commit
b2aca0c4a6
|
@ -3,6 +3,7 @@ const mockS3 = {
|
|||
deleteObject: jest.fn().mockReturnThis(),
|
||||
deleteObjects: jest.fn().mockReturnThis(),
|
||||
createBucket: jest.fn().mockReturnThis(),
|
||||
getObject: jest.fn().mockReturnThis(),
|
||||
listObject: jest.fn().mockReturnThis(),
|
||||
getSignedUrl: jest.fn((operation: string, params: any) => {
|
||||
return `http://s3.example.com/${params.Bucket}/${params.Key}`
|
||||
|
|
|
@ -249,7 +249,8 @@ export const paginatedUsers = async ({
|
|||
limit,
|
||||
}: SearchUsersRequest = {}) => {
|
||||
const db = getGlobalDB()
|
||||
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
|
||||
const pageSize = limit ?? PAGE_LIMIT
|
||||
const pageLimit = pageSize + 1
|
||||
// get one extra document, to have the next page
|
||||
const opts: DatabaseQueryOpts = {
|
||||
include_docs: true,
|
||||
|
@ -276,7 +277,7 @@ export const paginatedUsers = async ({
|
|||
const response = await db.allDocs(getGlobalUserParams(null, opts))
|
||||
userList = response.rows.map((row: any) => row.doc)
|
||||
}
|
||||
return pagination(userList, pageLimit, {
|
||||
return pagination(userList, pageSize, {
|
||||
paginate: true,
|
||||
property,
|
||||
getKey,
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
|
||||
|
||||
export const MOCK_DATE_TIMESTAMP = 1577836800000
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
Heading,
|
||||
Body,
|
||||
Button,
|
||||
ButtonGroup,
|
||||
Table,
|
||||
Layout,
|
||||
Modal,
|
||||
|
@ -46,6 +45,10 @@
|
|||
datasource: {
|
||||
type: "user",
|
||||
},
|
||||
options: {
|
||||
paginate: true,
|
||||
limit: 10,
|
||||
},
|
||||
})
|
||||
|
||||
let groupsLoaded = !$licensing.groupsEnabled || $groups?.length
|
||||
|
@ -65,10 +68,12 @@
|
|||
{ column: "role", component: RoleTableRenderer },
|
||||
]
|
||||
let userData = []
|
||||
let invitesLoaded = false
|
||||
let pendingInvites = []
|
||||
let parsedInvites = []
|
||||
|
||||
$: isOwner = $auth.accountPortalAccess && $admin.cloud
|
||||
$: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled
|
||||
|
||||
$: debouncedUpdateFetch(searchEmail)
|
||||
$: schema = {
|
||||
email: {
|
||||
|
@ -88,16 +93,6 @@
|
|||
width: "1fr",
|
||||
},
|
||||
}
|
||||
|
||||
const getPendingSchema = tblSchema => {
|
||||
if (!tblSchema) {
|
||||
return {}
|
||||
}
|
||||
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
|
||||
pendingSchema.email.displayName = "Pending Invites"
|
||||
return pendingSchema
|
||||
}
|
||||
|
||||
$: pendingSchema = getPendingSchema(schema)
|
||||
$: userData = []
|
||||
$: inviteUsersResponse = { successful: [], unsuccessful: [] }
|
||||
|
@ -121,9 +116,15 @@
|
|||
}
|
||||
})
|
||||
}
|
||||
let invitesLoaded = false
|
||||
let pendingInvites = []
|
||||
let parsedInvites = []
|
||||
|
||||
const getPendingSchema = tblSchema => {
|
||||
if (!tblSchema) {
|
||||
return {}
|
||||
}
|
||||
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
|
||||
pendingSchema.email.displayName = "Pending Invites"
|
||||
return pendingSchema
|
||||
}
|
||||
|
||||
const invitesToSchema = invites => {
|
||||
return invites.map(invite => {
|
||||
|
@ -143,7 +144,9 @@
|
|||
const updateFetch = email => {
|
||||
fetch.update({
|
||||
query: {
|
||||
email,
|
||||
string: {
|
||||
email,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
@ -296,7 +299,7 @@
|
|||
{/if}
|
||||
<div class="controls">
|
||||
{#if !readonly}
|
||||
<ButtonGroup>
|
||||
<div class="buttons">
|
||||
<Button
|
||||
disabled={readonly}
|
||||
on:click={$licensing.userLimitReached
|
||||
|
@ -315,7 +318,7 @@
|
|||
>
|
||||
Import
|
||||
</Button>
|
||||
</ButtonGroup>
|
||||
</div>
|
||||
{:else}
|
||||
<ScimBanner />
|
||||
{/if}
|
||||
|
@ -390,12 +393,15 @@
|
|||
</Modal>
|
||||
|
||||
<style>
|
||||
.buttons {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
}
|
||||
.pagination {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
|
||||
.controls {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
@ -403,7 +409,6 @@
|
|||
align-items: center;
|
||||
gap: var(--spacing-xl);
|
||||
}
|
||||
|
||||
.controls-right {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
@ -411,7 +416,6 @@
|
|||
align-items: center;
|
||||
gap: var(--spacing-xl);
|
||||
}
|
||||
|
||||
.controls-right :global(.spectrum-Search) {
|
||||
width: 200px;
|
||||
}
|
||||
|
|
|
@ -70,6 +70,13 @@ module AwsMock {
|
|||
Contents: {},
|
||||
})
|
||||
)
|
||||
|
||||
// @ts-ignore
|
||||
this.getObject = jest.fn(
|
||||
response({
|
||||
Body: "",
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
aws.DynamoDB = { DocumentClient }
|
||||
|
|
|
@ -32,11 +32,8 @@ import {
|
|||
tenancy,
|
||||
users,
|
||||
} from "@budibase/backend-core"
|
||||
import { USERS_TABLE_SCHEMA } from "../../constants"
|
||||
import {
|
||||
buildDefaultDocs,
|
||||
DEFAULT_BB_DATASOURCE_ID,
|
||||
} from "../../db/defaultData/datasource_bb_default"
|
||||
import { USERS_TABLE_SCHEMA, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
|
||||
import { buildDefaultDocs } from "../../db/defaultData/datasource_bb_default"
|
||||
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
|
||||
import { stringToReadStream } from "../../utilities"
|
||||
import { doesUserHaveLock } from "../../utilities/redis"
|
||||
|
|
|
@ -5,6 +5,8 @@ import sdk from "../../../sdk"
|
|||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import { mocks } from "@budibase/backend-core/tests"
|
||||
|
||||
mocks.licenses.useBackups()
|
||||
|
||||
describe("/backups", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
|
@ -12,16 +14,17 @@ describe("/backups", () => {
|
|||
afterAll(setup.afterAll)
|
||||
|
||||
beforeEach(async () => {
|
||||
tk.reset()
|
||||
await config.init()
|
||||
})
|
||||
|
||||
describe("exportAppDump", () => {
|
||||
describe("/api/backups/export", () => {
|
||||
it("should be able to export app", async () => {
|
||||
const res = await request
|
||||
.post(`/api/backups/export?appId=${config.getAppId()}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
expect(res.headers["content-type"]).toEqual("application/gzip")
|
||||
const { body, headers } = await config.api.backup.exportBasicBackup(
|
||||
config.getAppId()!
|
||||
)
|
||||
expect(body instanceof Buffer).toBe(true)
|
||||
expect(headers["content-type"]).toEqual("application/gzip")
|
||||
expect(events.app.exported).toBeCalledTimes(1)
|
||||
})
|
||||
|
||||
|
@ -36,11 +39,11 @@ describe("/backups", () => {
|
|||
it("should infer the app name from the app", async () => {
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
const res = await request
|
||||
.post(`/api/backups/export?appId=${config.getAppId()}`)
|
||||
.set(config.defaultHeaders())
|
||||
const { headers } = await config.api.backup.exportBasicBackup(
|
||||
config.getAppId()!
|
||||
)
|
||||
|
||||
expect(res.headers["content-disposition"]).toEqual(
|
||||
expect(headers["content-disposition"]).toEqual(
|
||||
`attachment; filename="${
|
||||
config.getApp()!.name
|
||||
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
|
||||
|
@ -48,6 +51,21 @@ describe("/backups", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("/api/backups/import", () => {
|
||||
it("should be able to import an app", async () => {
|
||||
const appId = config.getAppId()!
|
||||
const automation = await config.createAutomation()
|
||||
await config.createAutomationLog(automation, appId)
|
||||
await config.createScreen()
|
||||
const exportRes = await config.api.backup.createBackup(appId)
|
||||
expect(exportRes.backupId).toBeDefined()
|
||||
const importRes = await config.api.backup.importBackup(
|
||||
appId,
|
||||
exportRes.backupId
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("calculateBackupStats", () => {
|
||||
it("should be able to calculate the backup statistics", async () => {
|
||||
await config.createAutomation()
|
||||
|
|
|
@ -172,3 +172,8 @@ export enum AutomationErrors {
|
|||
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
|
||||
export const MAX_AUTOMATION_RECURRING_ERRORS = 5
|
||||
export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber"
|
||||
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
|
||||
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
|
||||
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
|
||||
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
|
||||
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
||||
|
|
|
@ -1,4 +1,12 @@
|
|||
import { AutoFieldSubTypes, FieldTypes } from "../../constants"
|
||||
import {
|
||||
AutoFieldSubTypes,
|
||||
FieldTypes,
|
||||
DEFAULT_BB_DATASOURCE_ID,
|
||||
DEFAULT_INVENTORY_TABLE_ID,
|
||||
DEFAULT_EMPLOYEE_TABLE_ID,
|
||||
DEFAULT_EXPENSES_TABLE_ID,
|
||||
DEFAULT_JOBS_TABLE_ID,
|
||||
} from "../../constants"
|
||||
import { importToRows } from "../../api/controllers/table/utils"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import LinkDocument from "../linkedRows/LinkDocument"
|
||||
|
@ -16,12 +24,6 @@ import {
|
|||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
|
||||
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
|
||||
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
|
||||
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
|
||||
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
||||
|
||||
const defaultDatasource = {
|
||||
_id: DEFAULT_BB_DATASOURCE_ID,
|
||||
type: dbCore.BUDIBASE_DATASOURCE_TYPE,
|
||||
|
|
|
@ -7,10 +7,12 @@ import {
|
|||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import { DocumentType, SEPARATOR } from "../db/utils"
|
||||
import { InvalidColumns, NoEmptyFilterStrings } from "../constants"
|
||||
import {
|
||||
InvalidColumns,
|
||||
NoEmptyFilterStrings,
|
||||
DEFAULT_BB_DATASOURCE_ID,
|
||||
} from "../constants"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
import * as external from "../api/controllers/table/external"
|
||||
import * as internal from "../api/controllers/table/internal"
|
||||
|
||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||
const ROW_ID_REGEX = /^\[.*]$/g
|
||||
|
@ -96,7 +98,8 @@ export function isInternalTableID(tableId: string) {
|
|||
export function isExternalTable(table: Table) {
|
||||
if (
|
||||
table?.sourceId &&
|
||||
table.sourceId.includes(DocumentType.DATASOURCE + SEPARATOR)
|
||||
table.sourceId.includes(DocumentType.DATASOURCE + SEPARATOR) &&
|
||||
table?.sourceId !== DEFAULT_BB_DATASOURCE_ID
|
||||
) {
|
||||
return true
|
||||
} else if (table?.sourceType === TableSourceType.EXTERNAL) {
|
||||
|
|
|
@ -26,7 +26,6 @@ export interface DBDumpOpts {
|
|||
export interface ExportOpts extends DBDumpOpts {
|
||||
tar?: boolean
|
||||
excludeRows?: boolean
|
||||
excludeLogs?: boolean
|
||||
encryptPassword?: string
|
||||
}
|
||||
|
||||
|
@ -83,14 +82,15 @@ export async function exportDB(
|
|||
})
|
||||
}
|
||||
|
||||
function defineFilter(excludeRows?: boolean, excludeLogs?: boolean) {
|
||||
const ids = [USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX]
|
||||
function defineFilter(excludeRows?: boolean) {
|
||||
const ids = [
|
||||
USER_METDATA_PREFIX,
|
||||
LINK_USER_METADATA_PREFIX,
|
||||
AUTOMATION_LOG_PREFIX,
|
||||
]
|
||||
if (excludeRows) {
|
||||
ids.push(TABLE_ROW_PREFIX)
|
||||
}
|
||||
if (excludeLogs) {
|
||||
ids.push(AUTOMATION_LOG_PREFIX)
|
||||
}
|
||||
return (doc: any) =>
|
||||
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
|
||||
}
|
||||
|
@ -118,7 +118,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
|||
fs.writeFileSync(join(tmpPath, path), contents)
|
||||
}
|
||||
}
|
||||
// get all of the files
|
||||
// get all the files
|
||||
else {
|
||||
tmpPath = await objectStore.retrieveDirectory(
|
||||
ObjectStoreBuckets.APPS,
|
||||
|
@ -141,7 +141,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
|||
// enforce an export of app DB to the tmp path
|
||||
const dbPath = join(tmpPath, DB_EXPORT_FILE)
|
||||
await exportDB(appId, {
|
||||
filter: defineFilter(config?.excludeRows, config?.excludeLogs),
|
||||
filter: defineFilter(config?.excludeRows),
|
||||
exportPath: dbPath,
|
||||
})
|
||||
|
||||
|
@ -191,7 +191,6 @@ export async function streamExportApp({
|
|||
}) {
|
||||
const tmpPath = await exportApp(appId, {
|
||||
excludeRows,
|
||||
excludeLogs: true,
|
||||
tar: true,
|
||||
encryptPassword,
|
||||
})
|
||||
|
|
|
@ -805,8 +805,9 @@ class TestConfiguration {
|
|||
|
||||
// AUTOMATION LOG
|
||||
|
||||
async createAutomationLog(automation: Automation) {
|
||||
return await context.doInAppContext(this.getProdAppId(), async () => {
|
||||
async createAutomationLog(automation: Automation, appId?: string) {
|
||||
appId = appId || this.getProdAppId()
|
||||
return await context.doInAppContext(appId!, async () => {
|
||||
return await pro.sdk.automations.logs.storeLog(
|
||||
automation,
|
||||
basicAutomationResults(automation._id!)
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
import {
|
||||
CreateAppBackupResponse,
|
||||
ImportAppBackupResponse,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
|
||||
export class BackupAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
super(config)
|
||||
}
|
||||
|
||||
exportBasicBackup = async (appId: string) => {
|
||||
const result = await this.request
|
||||
.post(`/api/backups/export?appId=${appId}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /application\/gzip/)
|
||||
.expect(200)
|
||||
return {
|
||||
body: result.body as Buffer,
|
||||
headers: result.headers,
|
||||
}
|
||||
}
|
||||
|
||||
createBackup = async (appId: string) => {
|
||||
const result = await this.request
|
||||
.post(`/api/apps/${appId}/backups`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return result.body as CreateAppBackupResponse
|
||||
}
|
||||
|
||||
importBackup = async (
|
||||
appId: string,
|
||||
backupId: string
|
||||
): Promise<ImportAppBackupResponse> => {
|
||||
const result = await this.request
|
||||
.post(`/api/apps/${appId}/backups/${backupId}/import`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return result.body as ImportAppBackupResponse
|
||||
}
|
||||
}
|
|
@ -7,6 +7,7 @@ import { DatasourceAPI } from "./datasource"
|
|||
import { LegacyViewAPI } from "./legacyView"
|
||||
import { ScreenAPI } from "./screen"
|
||||
import { ApplicationAPI } from "./application"
|
||||
import { BackupAPI } from "./backup"
|
||||
import { AttachmentAPI } from "./attachment"
|
||||
|
||||
export default class API {
|
||||
|
@ -18,6 +19,7 @@ export default class API {
|
|||
datasource: DatasourceAPI
|
||||
screen: ScreenAPI
|
||||
application: ApplicationAPI
|
||||
backup: BackupAPI
|
||||
attachment: AttachmentAPI
|
||||
|
||||
constructor(config: TestConfiguration) {
|
||||
|
@ -29,6 +31,7 @@ export default class API {
|
|||
this.datasource = new DatasourceAPI(config)
|
||||
this.screen = new ScreenAPI(config)
|
||||
this.application = new ApplicationAPI(config)
|
||||
this.backup = new BackupAPI(config)
|
||||
this.attachment = new AttachmentAPI(config)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,3 +20,8 @@ export interface CreateAppBackupResponse {
|
|||
export interface UpdateAppBackupRequest {
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface ImportAppBackupResponse {
|
||||
restoreId: string
|
||||
message: string
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue