Merge branch 'master' into BUDI-8416/allow-updating-email-via-scim

This commit is contained in:
Adria Navarro 2024-07-03 16:59:22 +02:00 committed by GitHub
commit 40d6cbbb59
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 64 additions and 58 deletions

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.29.10", "version": "2.29.11",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -10,6 +10,6 @@ export const MIGRATIONS: AppMigration[] = [
{ {
id: "20240604153647_initial_sqs", id: "20240604153647_initial_sqs",
func: m20240604153647_initial_sqs, func: m20240604153647_initial_sqs,
disabled: !env.SQS_SEARCH_ENABLE, disabled: !(env.SQS_MIGRATION_ENABLE || env.SQS_SEARCH_ENABLE),
}, },
] ]

View File

@ -40,7 +40,7 @@ const migration = async () => {
// only do initial search if environment is using SQS already // only do initial search if environment is using SQS already
// initial search makes sure that all the indexes have been created // initial search makes sure that all the indexes have been created
// and are ready to use, avoiding any initial waits for large tables // and are ready to use, avoiding any initial waits for large tables
if (env.SQS_SEARCH_ENABLE) { if (env.SQS_MIGRATION_ENABLE || env.SQS_SEARCH_ENABLE) {
const tables = await sdk.tables.getAllInternalTables() const tables = await sdk.tables.getAllInternalTables()
// do these one by one - running in parallel could cause problems // do these one by one - running in parallel could cause problems
for (let table of tables) { for (let table of tables) {

View File

@ -66,64 +66,69 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
} }
} }
async function sqsDisabled(cb: () => Promise<void>) { type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE"
await config.withEnv({ SQS_SEARCH_ENABLE: "" }, cb)
async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
await config.withEnv({ [envVar]: "" }, cb)
} }
async function sqsEnabled(cb: () => Promise<void>) { async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
await config.withEnv({ SQS_SEARCH_ENABLE: "1" }, cb) await config.withEnv({ [envVar]: "1" }, cb)
} }
beforeAll(async () => { describe.each(["SQS_MIGRATION_ENABLE", "SQS_SEARCH_ENABLE"] as SQSEnvVar[])(
await sqsDisabled(async () => { "SQS migration with (%s)",
await config.init() envVar => {
const table = await config.api.table.save(basicTable()) beforeAll(async () => {
tableId = table._id! await sqsDisabled(envVar, async () => {
const db = dbCore.getDB(config.appId!) await config.init()
// old link document const table = await config.api.table.save(basicTable())
await db.put(oldLinkDocument()) tableId = table._id!
}) const db = dbCore.getDB(config.appId!)
}) // old link document
await db.put(oldLinkDocument())
describe("SQS migration", () => {
it("test migration runs as expected against an older DB", async () => {
const db = dbCore.getDB(config.appId!)
// confirm nothing exists initially
await sqsDisabled(async () => {
let error: any | undefined
try {
await db.get(SQLITE_DESIGN_DOC_ID)
} catch (err: any) {
error = err
}
expect(error).toBeDefined()
expect(error.status).toBe(404)
})
await sqsEnabled(async () => {
await processMigrations(config.appId!, MIGRATIONS)
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
expect(designDoc.sql.tables).toBeDefined()
const mainTableDef = designDoc.sql.tables[tableId]
expect(mainTableDef).toBeDefined()
expect(mainTableDef.fields[prefix("name")]).toEqual({
field: "name",
type: SQLiteType.TEXT,
}) })
expect(mainTableDef.fields[prefix("description")]).toEqual({
field: "description",
type: SQLiteType.TEXT,
})
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
expect(linkDoc.tableId).toEqual(
generateJunctionTableID(tableId1, tableId2)
)
// should have swapped the documents
expect(linkDoc.doc1.tableId).toEqual(tableId2)
expect(linkDoc.doc1.rowId).toEqual(rowId2)
expect(linkDoc.doc2.tableId).toEqual(tableId1)
expect(linkDoc.doc2.rowId).toEqual(rowId1)
}) })
})
}) it("test migration runs as expected against an older DB", async () => {
const db = dbCore.getDB(config.appId!)
// confirm nothing exists initially
await sqsDisabled(envVar, async () => {
let error: any | undefined
try {
await db.get(SQLITE_DESIGN_DOC_ID)
} catch (err: any) {
error = err
}
expect(error).toBeDefined()
expect(error.status).toBe(404)
})
await sqsEnabled(envVar, async () => {
await processMigrations(config.appId!, MIGRATIONS)
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
expect(designDoc.sql.tables).toBeDefined()
const mainTableDef = designDoc.sql.tables[tableId]
expect(mainTableDef).toBeDefined()
expect(mainTableDef.fields[prefix("name")]).toEqual({
field: "name",
type: SQLiteType.TEXT,
})
expect(mainTableDef.fields[prefix("description")]).toEqual({
field: "description",
type: SQLiteType.TEXT,
})
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
expect(linkDoc.tableId).toEqual(
generateJunctionTableID(tableId1, tableId2)
)
// should have swapped the documents
expect(linkDoc.doc1.tableId).toEqual(tableId2)
expect(linkDoc.doc1.rowId).toEqual(rowId2)
expect(linkDoc.doc2.tableId).toEqual(tableId1)
expect(linkDoc.doc2.rowId).toEqual(rowId1)
})
})
}
)

View File

@ -92,6 +92,7 @@ const environment = {
SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE, SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE,
SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE, SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE,
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE, SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
SQS_MIGRATION_ENABLE: process.env.SQS_MIGRATION_ENABLE,
// flags // flags
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS, ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
DISABLE_THREADING: process.env.DISABLE_THREADING, DISABLE_THREADING: process.env.DISABLE_THREADING,